[KINECT2.0]获取关节坐标并保存到txt

1.介绍:
最近做一些体感的项目,涉及到利用kinect捕捉人体关节位置和姿态。这段代码面向和我一样的科研小白,可以以根据需求更改第244行的值,来获得不同的关节位置。对应关节序号放在最后图那里。根据大佬的代码修改的:https://blog.csdn.net/zmdsjtu/article/details/52275879?utm_medium=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase&depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromMachineLearnPai2-1.nonecase
在CSDN这段时间遇到了很多在机器人方向努力的人,希望能和大家一块解决问题!
2.代码

//2020-9-14 读取各个关节坐标,并计算臂型角,计算手臂末端位置和姿态
//提示:本程序是在配置好kinect和Opencv的情况下运行的,平台:Kinect2.0/VS2019——豆奶豆豆奶
#include 
#include 
#include 
#include
#include
#include

using namespace std;
using namespace cv;
/****************0.定义释放接口************************/
template<class Interface>
inline void SafeRelease(Interface*& pInterfaceToRelease)
{
     
	if (pInterfaceToRelease != NULL) {
     
		pInterfaceToRelease->Release();
		pInterfaceToRelease = NULL;
	}
}


void DrawBone(Mat& SkeletonImage, CvPoint pointSet[], const Joint* pJoints, int whichone, JointType joint0, JointType joint1);

void drawSkeleton(Mat& SkeletonImage, CvPoint pointSet[], const Joint* pJoints, int whichone);
/1
int main(int argc, char** argv[])
{
     
	//OpenCV中开启CPU的硬件指令优化功能函数
	setUseOptimized(true);
/****************1.打开Kinect2.0************************/
	//HRESULT用于判断是否成功
	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor(&pSensor);
	hResult = pSensor->Open();
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}
/****************2.分别打开color图像阅读器以及Body数据的阅读器************************/
	//一般步骤是Source->Reader->Description
	//Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource(&pColorSource);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IBodyFrameSource* pBodySource;
	hResult = pSensor->get_BodyFrameSource(&pBodySource);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader(&pColorReader);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IBodyFrameReader* pBodyReader;
	hResult = pBodySource->OpenReader(&pBodyReader);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pColorSource->get_FrameDescription(&pDescription);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width(&width); // 1920
	pDescription->get_Height(&height); // 1080
	unsigned int bufferSize = width * height * 4 * sizeof(unsigned char);

	// 建立图像矩阵,bufferMat用来存储16位的图像数据,直接用来显示会接近全黑,不方便观察,用bodyMat转换成8位再显示
	cv::Mat bufferMat(height, width, CV_8UC4);
	cv::Mat bodyMat(height / 2, width / 2, CV_8UC4);
	cv::namedWindow("Body");

	// Color Table
	cv::Vec3b color[BODY_COUNT];
	color[0] = cv::Vec3b(255, 0, 0);//红色
	color[1] = cv::Vec3b(0, 255, 0);//绿色
	color[2] = cv::Vec3b(0, 0, 255);//蓝色
	color[3] = cv::Vec3b(255, 255, 0);//黄色
	color[4] = cv::Vec3b(255, 0, 255);//品红(红紫)
	color[5] = cv::Vec3b(0, 255, 255);//青色


	//关节位置6个人每个人25个关节每个关节3个坐标
	float Skeletons[6][25][3];

	// Coordinate Mapper坐标映射//使用深度帧数据,从彩色空间向深度空间映射整个帧
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
	if (FAILED(hResult)) {
     
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}
	//****************//将数组保存到txt/************************/
	//"E:\\Project_VS\\2020-9-14\\grab1.txt"为数据保存位置
	ofstream outfile("E:\\Project_VS\\2020-9-14\\grab1.txt", ios::in | ios::out | ios::binary);
	if (!outfile.is_open())
	{
     
		cout << " the file open fail" << endl;
		exit(1);
	}
	while (1){
     
			// Frame
			IColorFrame* pColorFrame = nullptr;
			hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
			if (SUCCEEDED(hResult)) {
     
				hResult = pColorFrame->CopyConvertedFrameDataToArray(bufferSize, reinterpret_cast<BYTE*>(bufferMat.data), ColorImageFormat::ColorImageFormat_Bgra);
				//CopyConvertedFrameDataToArray将转换后的帧数据复制到数组第一个参数的大小为图像的长乘上宽再乘上4个通道。
				if (SUCCEEDED(hResult)) {
     
					cv::resize(bufferMat, bodyMat, cv::Size(), 0.5, 0.5);
				}

			}

			//更新骨骼帧
			IBodyFrame* pBodyFrame = nullptr;
			hResult = pBodyReader->AcquireLatestFrame(&pBodyFrame);//AcquireLatestFrame获取最近的帧。如果没有可用的帧,它可能返回null。
			if (SUCCEEDED(hResult)) {
     
				IBody* pBody[BODY_COUNT] = {
      0 };
				//更新骨骼数据
				hResult = pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, pBody);// 刷新骨骼信息(6个)
				//这里的pBody是一个长度为6的IBody数组,IBody是用来存储追踪到的骨架信息的类。
				if (SUCCEEDED(hResult))
				{
     
					for (int count = 0; count < BODY_COUNT; count++)
					{
     
						BOOLEAN bTracked = false;
						hResult = pBody[count]->get_IsTracked(&bTracked);// 检查是否被追踪
						if (SUCCEEDED(hResult) && bTracked)
						{
     
							//获取关节位置
							Joint joint[JointType::JointType_Count];

							// 获取关节方向
							JointOrientation aOrientations[JointType::JointType_Count];
							
							/					
							hResult = pBody[count]->GetJoints(JointType::JointType_Count, joint);//joint
							if (pBody[count]->GetJointOrientations(JointType::JointType_Count, aOrientations) != S_OK)
							{
     
								cerr << "Get joints fail" << endl;
							}

							if (SUCCEEDED(hResult))
							{
     
								// Left Hand State左手
								HandState leftHandState = HandState::HandState_Unknown;//当前手的状态是未知的?
								hResult = pBody[count]->get_HandLeftState(&leftHandState);//获取左手状态
								if (SUCCEEDED(hResult)) {
     
									ColorSpacePoint colorSpacePoint = {
      0 };//ColorSpacePoint Structure ,A 2D location in color space.
									hResult = pCoordinateMapper->MapCameraPointToColorSpace(joint[JointType::JointType_HandLeft].Position, &colorSpacePoint);
									//将一个点从相机空间映射到色彩空间。
									if (SUCCEEDED(hResult)) {
     
										int x = static_cast<int>(colorSpacePoint.X);
										int y = static_cast<int>(colorSpacePoint.Y);


										if ((x >= 0) && (x < width) && (y >= 0) && (y < height)) {
     
											if (leftHandState == HandState::HandState_Open) {
     
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(0, 128, 0), 5, CV_AA);
												//cv::circle是画圆的一个命令,用cv::Scalar来设置opencv中图片的颜色(0,128,0绿色)
											}
											else if (leftHandState == HandState::HandState_Closed) {
     
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(0, 0, 128), 5, CV_AA);
											}
											else if (leftHandState == HandState::HandState_Lasso) {
     
												//HandState_open,_Closed,_Lasso分别对应着手打开 手合着 手套索0,0,128青色128,128,0褐色
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(128, 128, 0), 5, CV_AA);
											}
										}
									}
								}

								HandState rightHandState = HandState::HandState_Unknown;
								ColorSpacePoint colorSpacePoint = {
      0 };
								hResult = pBody[count]->get_HandRightState(&rightHandState);
								if (SUCCEEDED(hResult)) {
     
									hResult = pCoordinateMapper->MapCameraPointToColorSpace(joint[JointType::JointType_HandRight].Position, &colorSpacePoint);
									if (SUCCEEDED(hResult)) {
     
										int x = static_cast<int>(colorSpacePoint.X);
										int y = static_cast<int>(colorSpacePoint.Y);

										if ((x >= 0) && (x < width) && (y >= 0) && (y < height)) {
     
											if (rightHandState == HandState::HandState_Open) {
     
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(0, 128, 0), 5, CV_AA);
											}
											else if (rightHandState == HandState::HandState_Closed) {
     
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(0, 0, 128), 5, CV_AA);

											}
											else if (rightHandState == HandState::HandState_Lasso) {
     
												cv::circle(bufferMat, cv::Point(x, y), 75, cv::Scalar(128, 128, 0), 5, CV_AA);
											}
										}
									}
								}
								CvPoint skeletonPoint[BODY_COUNT][JointType_Count] = {
      cvPoint(0,0) };
								//CvPoint:表示一个坐标为整数的二维点,是一个包含integer类型成员x和y的简单结构体。
								// Joint
								for (int type = 0; type < JointType::JointType_Count; type++) {
     
									ColorSpacePoint colorSpacePoint = {
      0 };
									pCoordinateMapper->MapCameraPointToColorSpace(joint[type].Position, &colorSpacePoint);
									//Maps a point from camera space to color space.
									int x = static_cast<int>(colorSpacePoint.X);//static_cast,命名上理解是静态类型转换。如int转换成char
									int y = static_cast<int>(colorSpacePoint.Y);
									skeletonPoint[count][type].x = x;
									skeletonPoint[count][type].y = y;
									if ((x >= 0) && (x < width) && (y >= 0) && (y < height)) {
     
										cv::circle(bufferMat, cv::Point(x, y), 5, static_cast<cv::Scalar>(color[count]), -1, CV_AA);
									}
								}
								int S = 0;
								for (int i = 0; i < 25; i++)
								{
     
									Skeletons[count][i][0] = joint[i].Position.X;
									Skeletons[count][i][1] = joint[i].Position.Y;
									Skeletons[count][i][2] = joint[i].Position.Z;
									/****************************************/
									//获得关节位置
									//cout<
									//获得关节姿态
									//cout << count << "右腕姿态" << aOrientations[10].Orientation.w << " " << aOrientations[10].Orientation.x << " " << aOrientations[10].Orientation.y << " " << aOrientations[10].Orientation.z << end;
								
								}

	                                outfile <<count<<
										"SpineShoulder" << " " << joint[20].Position.X << " " << joint[20].Position.Y << " " << joint[20].Position.Z << " " <<
										"ShoulderLeft" <<" "<< joint[4].Position.X << " " << joint[4].Position.Y << " " << joint[4].Position.Z<< " " <<
										"ElbowLeft"<<" "<<joint[5].Position.X << " " << joint[5].Position.Y << " " << joint[5].Position.Z << " " <<
										"Wristleft" << " " << joint[6].Position.X << " " << joint[6].Position.Y << " " << joint[6].Position.Z << " " <<
										"Handleft" << " " << joint[7].Position.X << " " << joint[7].Position.Y << " " << joint[7].Position.Z << " " <<
										"HandTipLeft" << " " << joint[21].Position.X << " " << joint[21].Position.Y << " " << joint[21].Position.Z << " " <<
										"ThumbLeft" << " " << joint[22].Position.X << " " << joint[22].Position.Y << " " << joint[22].Position.Z << " " <<
										"ShoulderRight" << " " << joint[8].Position.X << " " << joint[8].Position.Y << " " << joint[8].Position.Z << " " <<
										"ElowRight" << " " << joint[9].Position.X << " " << joint[9].Position.Y << " " << joint[9].Position.Z << " " <<
										"WristRight" << " " << joint[10].Position.X << " " << joint[10].Position.Y << " " << joint[10].Position.Z << " " <<
										"HandRight" << " " << joint[11].Position.X << " " << joint[11].Position.Y << " " << joint[11].Position.Z<< " " <<
										"HandTipRight" << " " << joint[23].Position.X << " " << joint[23].Position.Y << " " << joint[23].Position.Z << " " <<
										"ThumbRight" << " " << joint[24].Position.X << " " << joint[24].Position.Y << " " << joint[24].Position.Z << endl;
								drawSkeleton(bufferMat, skeletonPoint[count], joint, count);
							}
						}
					}
					cv::resize(bufferMat, bodyMat, cv::Size(), 0.5, 0.5);
				}
				for (int count = 0; count < BODY_COUNT; count++) {
     
					SafeRelease(pBody[count]);
				}
			}
			SafeRelease(pColorFrame);
			SafeRelease(pBodyFrame);

			waitKey(1);
			cv::imshow("Body", bodyMat);

		}
		SafeRelease(pColorSource);
		SafeRelease(pColorReader);
		SafeRelease(pDescription);
		SafeRelease(pBodySource);
		// done with body frame reader
		SafeRelease(pBodyReader);

		SafeRelease(pDescription);
		// done with coordinate mapper
		SafeRelease(pCoordinateMapper);

		if (pSensor) {
     
			pSensor->Close();
		}
		SafeRelease(pSensor);

		return 0;
		outfile.close();

}
void DrawBone(Mat& SkeletonImage, CvPoint pointSet[], const Joint* pJoints, int whichone, JointType joint0, JointType joint1)
{
     
	TrackingState joint0State = pJoints[joint0].TrackingState;
	TrackingState joint1State = pJoints[joint1].TrackingState;

	// If we can't find either of these joints, exit
	if ((joint0State == TrackingState_NotTracked) || (joint1State == TrackingState_NotTracked))
	{
     
		return;
	}

	// Don't draw if both points are inferred
	if ((joint0State == TrackingState_Inferred) && (joint1State == TrackingState_Inferred))
	{
     
		return;
	}


	CvScalar color;
	switch (whichone) //跟踪不同的人显示不同的颜色   
	{
     
	case 0:
		color = cvScalar(255);
		break;
	case 1:
		color = cvScalar(0, 255);
		break;
	case 2:
		color = cvScalar(0, 0, 255);
		break;
	case 3:
		color = cvScalar(255, 255, 0);
		break;
	case 4:
		color = cvScalar(255, 0, 255);
		break;
	case 5:
		color = cvScalar(0, 255, 255);
		break;
	}


	// We assume all drawn bones are inferred unless BOTH joints are tracked
	if ((joint0State == TrackingState_Tracked) && (joint1State == TrackingState_Tracked))
	{
     
		line(SkeletonImage, pointSet[joint0], pointSet[joint1], color, 2);
	}
	else
	{
     
		line(SkeletonImage, pointSet[joint0], pointSet[joint1], color, 2);
	}
}

void drawSkeleton(Mat& SkeletonImage, CvPoint pointSet[], const Joint* pJoints, int whichone)
{
     

	// Draw the bones

	// Torso
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_Head, JointType_Neck);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_Neck, JointType_SpineShoulder);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_SpineMid);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineMid, JointType_SpineBase);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_ShoulderRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_ShoulderLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineBase, JointType_HipRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineBase, JointType_HipLeft);

	// Right Arm    
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ShoulderRight, JointType_ElbowRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ElbowRight, JointType_WristRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristRight, JointType_HandRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HandRight, JointType_HandTipRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristRight, JointType_ThumbRight);

	// Left Arm
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ShoulderLeft, JointType_ElbowLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ElbowLeft, JointType_WristLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristLeft, JointType_HandLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HandLeft, JointType_HandTipLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristLeft, JointType_ThumbLeft);

	// Right Leg
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HipRight, JointType_KneeRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_KneeRight, JointType_AnkleRight);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_AnkleRight, JointType_FootRight);

	// Left Leg
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HipLeft, JointType_KneeLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_KneeLeft, JointType_AnkleLeft);
	DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_AnkleLeft, JointType_FootLeft);
}


// 运行程序: Ctrl + F5 或调试 >“开始执行(不调试)”菜单
// 调试程序: F5 或调试 >“开始调试”菜单

// 入门使用技巧: 
//   1. 使用解决方案资源管理器窗口添加/管理文件
//   2. 使用团队资源管理器窗口连接到源代码管理
//   3. 使用输出窗口查看生成输出和其他消息
//   4. 使用错误列表窗口查看错误
//   5. 转到“项目”>“添加新项”以创建新的代码文件,或转到“项目”>“添加现有项”以将现有代码文件添加到项目
//   6. 将来,若要再次打开此项目,请转到“文件”>“打开”>“项目”并选择 .sln 文件

3.关节对应图

[KINECT2.0]获取关节坐标并保存到txt_第1张图片

你可能感兴趣的:(机器人,kinect,kinect,体感技术)