转载:http://blog.csdn.net/chenxin_130/article/details/6703301
小斤的博客贴出了源码,手势识别
转载:http://viml.nchc.org.tw/blog/paper_info.php?CLASS_ID=1&SUB_ID=1&PAPER_ID=234
heresy大神详细描述了原理
通过函数 xn::GestureGenerator 的函式EnumerateGestures() 來取得可识别手势名称。
#include <stdlib.h> #include <iostream> #include "opencv/cv.h" #include "opencv/highgui.h" #include <XnCppWrapper.h> using namespace std; using namespace cv; // output for XnPoint3D ostream& operator<<( ostream& out, const XnPoint3D& rPoint ) { out << "(" << rPoint.X << "," << rPoint.Y << "," << rPoint.Z << ")"; return out; } //【4】手势识别callback 函数,可为空 // callback function for gesture recognized void XN_CALLBACK_TYPE gestureRecog( xn::GestureGenerator &generator, const XnChar *strGesture, const XnPoint3D *pIDPosition, const XnPoint3D *pEndPosition, void *pCookie ) { cout << strGesture<<" from "<<*pIDPosition<<" to "<<*pEndPosition << endl; //画出手势识别时的点 int imgStartX=0; int imgStartY=0; int imgEndX=0; int imgEndY=0; char locationinfo[100]; imgStartX=(int)(640/2-(pIDPosition->X)); imgStartY=(int)(480/2-(pIDPosition->Y)); imgEndX=(int)(640/2-(pEndPosition->X)); imgEndY=(int)(480/2-(pEndPosition->Y)); IplImage* refimage=(IplImage*)pCookie; if(strcmp(strGesture,"RaiseHand")==0) { cvCircle(refimage,cvPoint(imgStartX,imgStartY),1,CV_RGB(255,0,0),2); } else if(strcmp(strGesture,"Wave")==0) { cvLine(refimage,cvPoint(imgStartX,imgStartY),cvPoint(imgEndX,imgEndY),CV_RGB(255,255,0),6); } else if(strcmp(strGesture,"Click")==0) { cvCircle(refimage,cvPoint(imgStartX,imgStartY),6,CV_RGB(0,0,255),12); } cvSetImageROI(refimage,cvRect(40,450,640,30)); CvFont font; cvInitFont( &font, CV_FONT_VECTOR0,1, 1, 0, 3, 5); cvSet(refimage, cvScalar(255,255,255)); sprintf(locationinfo,"From: %d,%d to %d,%d",(int)pIDPosition->X,(int)pIDPosition->Y,(int)(pEndPosition->X),(int)(pEndPosition->Y)); cvPutText(refimage, locationinfo ,cvPoint(30, 30), &font, CV_RGB(0,0,0)); cvResetImageROI(refimage); } void clearImg(IplImage* inputimg) { CvFont font; cvInitFont( &font, CV_FONT_VECTOR0,1, 1, 0, 3, 5); memset(inputimg->imageData,255,640*480*3); cvPutText(inputimg, "Hand Raise!" ,cvPoint(20, 20), &font, CV_RGB(255,0,0)); cvPutText(inputimg, "Hand Wave!" , cvPoint(20, 50), &font, CV_RGB(255,255,0)); cvPutText(inputimg, "Hand Push!" , cvPoint(20, 80), &font, CV_RGB(0,0,255)); } //【5】手势过程callback // callback function for gesture progress void XN_CALLBACK_TYPE gestureProgress( xn::GestureGenerator &generator, const XnChar *strGesture, const XnPoint3D *pPosition, XnFloat fProgress, void *pCookie ) { cout << strGesture << ":" << fProgress << " at " << *pPosition << endl; } int main( int argc, char** argv ) { IplImage* drawPadImg=cvCreateImage(cvSize(640,480),IPL_DEPTH_8U,3); IplImage* cameraImg=cvCreateImage(cvSize(640,480),IPL_DEPTH_16U,1); IplImage* depthshow = cvCreateImage(cvSize(640,480),IPL_DEPTH_8U,1); cvNamedWindow("Gesture",1); cvNamedWindow("Camera",1); clearImg(drawPadImg); XnStatus res; char key=0; // context初始化context xn::Context context; res = context.Init(); xn::DepthMetaData depthMD; // create generator 生产节点 // xn::ImageGenerator imageGenerator; xn::DepthGenerator depthGenerator; //res = imageGenerator.Create( context ); res = depthGenerator.Create(context); //【1】 xn::GestureGenerator gestureGenerator; res = gestureGenerator.Create( context ); /*检查有多少种手势 XnUInt16 uNum = 10; XnChar** asName = new XnChar*[uNum]; for (int i=0; i<uNum;i++) { asName[i] = new XnChar[100]; } res = gestureGenerator.EnumerateGestures(*asName,uNum); cout<<"there are"<<uNum<<"gestures"<<endl; for (int i=0;i<uNum;i++) { cout<<asName[i]<<endl; }*/ //【2】设置手势 // Add gesture //gestureGenerator.AddGesture( "MovingHand", NULL ); gestureGenerator.AddGesture( "Wave", NULL ); gestureGenerator.AddGesture( "Click", NULL ); // gestureGenerator.AddGesture( "RaiseHand", NULL ); // gestureGenerator.AddGesture("MovingHand",NULL); // /【3】callback函数 // 6. Register callback functions of gesture generator XnCallbackHandle handle; gestureGenerator.RegisterGestureCallbacks( gestureRecog, gestureProgress, (void*)drawPadImg, handle ); //start generate data context.StartGeneratingAll(); res = context.WaitAndUpdateAll(); while( (key!=27) && !(res = context.WaitAndUpdateAll()) ) { if(key=='c') { clearImg(drawPadImg); } depthGenerator.GetMetaData(depthMD); memcpy(cameraImg->imageData,depthMD.Data(),640*480*2); cvConvertScale(cameraImg,depthshow,255/4096.0,0); cvShowImage("Gesture",drawPadImg); cvShowImage("Camera",depthshow); key=cvWaitKey(20); } cvDestroyWindow("Gesture"); cvDestroyWindow("Camera"); cvReleaseImage(&drawPadImg); cvReleaseImage(&cameraImg); context.StopGeneratingAll(); context.Shutdown(); return 0; }