OpenCV模板匹配的方法识别手势(修改版)
找不原出处。http://blog.ednchina.com/opencv2008/202330/message.aspx
//修改后能运行的程序,但是结果好像不是很好
//我的机子是赛扬600,超频到900在用,内存是256sdram,刚才调的时候居然
//内存不够用了,又很多地方需要改进,如果有时间,我会把改了的重新上传
//还有个问题就是,偶尔会定位到我的脸上,估计是我的脸长的太像手了 : )
//采用模板匹配的方法识别手势
//这次修改了内存消耗猛增的bug,但是仍然有小幅增长,只要测试时间在半个
//小时之内,一般不会出现内存不足的提示。
//修正了原来版本在读 取10幅图后显示错误的问题
//测试图片请在《[资料]OpenCV模板匹配的方 法识别手势》里面下载
#ifdef _CH_
#define WIN32
#error "The file needs cvaux, which is not wrapped yet. Sorry"
#endif
#ifndef _EiC
#include "cv.h"
#include "cxcore.h"
#include "cvaux.h"
#include "highgui.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <math.h>
#include <float.h>
#include <limits.h>
#include <time.h>
#include <ctype.h>
#endif
static CvMemStorage* storage;
//static double color;
static CvSize imgSize;
static CvSize tmplSize;
static CvSeq* contour;
static CvSeq* biggest;
static CvCapture* capture = 0;
static int isHandOpen = 1;
//static double color;
static CvScalar color;
static IplImage *img;
static IplImage *tmpImg;
static IplImage *conv;
static IplImage *H;
static IplImage *S;
static IplImage *V;
static IplImage *tmpH1;
static IplImage *tmpS1;
static IplImage *tmpH2;
static IplImage *tmpS2;
static IplImage *tmpH3;
static IplImage *tmpS3;
static IplImage *openHandTmpl, *closedHandTmpl;
static IplImage *openGrayHandTmpl, *closedGrayHandTmpl;
static IplImage *openscaledTmpl, *closedscaledTmpl;
static IplImage *openMatchResult, *closedMatchResult;
FILE *output;
void handDetect( IplImage *img);
static int numImg = 10;
char *names[] = { "./images/test01.jpg", "./images/test02.jpg", "./images/test03.jpg", "./images/test04.jpg", "./images/test05.jpg",
"../images/test06.jpg", "./images/test07.jpg", "./images/test08.jpg", "./images/test09.jpg", "./images/test10.jpg"};
char *resultsiamge[] = {"result01.jpg","result02.jpg","result03.jpg","result04.jpg","result05.jpg","result06.jpg","result07.jpg",
"result08.jpg","result09.jpg","result10.jpg"};
int main( int argc, char** argv )
{
int i = 0;
char filename[80];
storage = cvCreateMemStorage(0);
#ifdef DEBUG
cvNamedWindow( "Template1", 1 );
cvNamedWindow( "Template2", 1 );
cvNamedWindow( "Comp Results1", 1 );
cvNamedWindow( "Comp Results2", 1 );
cvNamedWindow( "Skin Detection", 1 );
#endif
openHandTmpl = cvLoadImage("openHandTmpl.jpg",1);
closedHandTmpl = cvLoadImage("closedHandTmpl.jpg",1);
// Convert form RGB to GRAY
tmplSize.width = openHandTmpl->width;
tmplSize.height = openHandTmpl->height;
openGrayHandTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
cvCvtColor( openHandTmpl, openGrayHandTmpl, CV_BGR2GRAY );
tmplSize.width = closedHandTmpl->width;
tmplSize.height = closedHandTmpl->height;
closedGrayHandTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
cvCvtColor( closedHandTmpl, closedGrayHandTmpl, CV_BGR2GRAY );
cvNamedWindow("source",1);
cvMoveWindow("source",0,0);
cvNamedWindow( "Results", 1 );
cvMoveWindow("Results",400,300);
cvAddSearchPath("//images//");
// Open the Output File
output = fopen( "results.log", "w" );
// Color for contour drawing
color = CV_RGB( 255,255,255 );
if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
{
capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
}
else if( argc == 2 )
{
capture = cvCaptureFromAVI( argv[1] );
}
if(capture != 0)
{
for(;;)
{
IplImage *frame;//, *frame_copy;
if( !cvGrabFrame( capture ))
break;
frame = cvRetrieveFrame( capture );
if( !frame )
break;
imgSize.width = frame->width;
imgSize.height = frame->height;
// frame_copy = cvCloneImage( frame );
handDetect( frame );
if( cvWaitKey( 10 ) >= 0 )
break;
}
cvReleaseCapture( &capture );
}
else
{
for ( i = 0; i < numImg; i++ )
{
fprintf(output, "Image %i %s ************************/n", i+1, names);
strcpy(filename, "result_");
// cvNamedWindow( names, 1 );
// cvNamedWindow("source",1);
// cvMoveWindow("source",0,0);
img = cvLoadImage(names[i],1);
cvShowImage("source",img);
cvWaitKey(0);
if( img )
{
imgSize.width = img->width;
imgSize.height = img->height;
handDetect( img );
// strcat(filename, names);
cvSaveImage(resultsiamge[i], img);
cvReleaseImage(&img);
}
}
cvWaitKey(0);
// for ( i = 0; i < numImg; i++ )
// {
// cvDestroyWindow( names);
// }
}
cvReleaseImage(&tmpImg);
cvReleaseImage(&conv);
cvReleaseImage(&H);
cvReleaseImage(&S);
cvReleaseImage(&V);
cvReleaseImage(&tmpH1);
cvReleaseImage(&tmpH2);
cvReleaseImage(&tmpH3);
cvReleaseImage(&tmpS1);
cvReleaseImage(&tmpS2);
cvReleaseImage(&tmpS3);
cvReleaseImage(&openHandTmpl);
cvReleaseImage(&closedHandTmpl);
cvReleaseImage(&openGrayHandTmpl);
cvReleaseImage(&closedGrayHandTmpl);
cvReleaseImage(&openscaledTmpl);
cvReleaseImage(&closedscaledTmpl);
cvReleaseImage(&openMatchResult);
cvReleaseImage(&closedMatchResult);
cvReleaseMemStorage(&storage);
fclose( output );
// cvDestroyWindow( "Result");
#ifdef DEBUG
cvDestroyWindow( "Template1");
cvDestroyWindow( "Template2" );
cvDestroyWindow( "Comp Results1" );
cvDestroyWindow( "Comp Results2" );
cvDestroyWindow( "Skin Detection" );
#endif
return 0;
}
void handDetect( IplImage *img)
{
// CvSize tmplSize;
CvSize resultSize;
IplConvKernel *erosionElement, *dilationElement;
double contArea, imgArea, maxRatio = 0.0;
CvRect bndRect = cvRect(0,0,0,0);
CvPoint pt1, pt2;
float scaleFactor;
CvScalar averageValue;
int startCount = 0;
int openCount = 0;
int closedCount = 0;
float openCompRatio = 0.0;
float closedCompRatio = 0.0;
float bestRatio = 0.0;
imgArea = imgSize.width * imgSize.height;
// Open Template files
// openHandTmpl = cvLoadImage("openHandTmpl.jpg",1);
// closedHandTmpl = cvLoadImage("closedHandTmpl.jpg",1);
// // Convert form RGB to GRAY
// tmplSize.width = openHandTmpl->width;
// tmplSize.height = openHandTmpl->height;
//
// openGrayHandTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
// cvCvtColor( openHandTmpl, openGrayHandTmpl, CV_BGR2GRAY );
//
// tmplSize.width = closedHandTmpl->width;
// tmplSize.height = closedHandTmpl->height;
//
// closedGrayHandTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
// cvCvtColor( closedHandTmpl, closedGrayHandTmpl, CV_BGR2GRAY );
// Generate processing images
tmpImg = cvCloneImage(img);
conv = cvCreateImage( imgSize, IPL_DEPTH_8U, 3);
tmpH1 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
tmpS1 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
tmpH2 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
tmpS2 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
tmpH3 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
tmpS3 = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
H = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
S = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
V = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
// Flip the image if in capture mode
if ( capture )
{
cvFlip(conv, conv, 0);
}
// Image Smoothing
//cvSmooth(img,tmpImg,CV_BLUR,3,3);
cvSmooth(img,img,CV_GAUSSIAN,3,3);
// Convert to HSV
cvCvtColor( tmpImg, conv, CV_BGR2HSV );
// Split to HSV planes
cvCvtPixToPlane(conv,H,S,V,0);
// Average Illumination
averageValue = cvAvg( V , 0);
fprintf(output, "Illumination Level = %f/n", averageValue.val[0]);
// Detect skin tone Hues and Saturations
// NOTE: Hue values need to be doubled for actual
// Red to Orange Hue with High Saturation
// Hue 0 to 28 degree and Sat 190 to 200
cvInRangeS(H,cvScalar(0.0,0.0,0,0),cvScalar(14.0,0.0,0,0),tmpH1);
cvInRangeS(S,cvScalar(75.0,0.0,0,0),cvScalar(200.,0.0,0,0),tmpS1);
cvAnd(tmpH1,tmpS1,tmpH1,0);
// Red Hue with Low Saturation
// Hue 0 to 26 degree and Sat 20 to 90
cvInRangeS(H,cvScalar(0.0,0.0,0,0),cvScalar(13.0,0.0,0,0),tmpH2);
cvInRangeS(S,cvScalar(20.0,0.0,0,0),cvScalar(90.0,0.0,0,0),tmpS2);
cvAnd(tmpH2,tmpS2,tmpH2,0);
// Red Hue to Pink with Low Saturation
// Hue 340 to 360 degree and Sat 15 to 90
cvInRangeS(H,cvScalar(170.0,0.0,0,0),cvScalar(180.0,0.0,0,0),tmpH3);
cvInRangeS(S,cvScalar(15.0,0.0,0,0),cvScalar(90.,0.0,0,0),tmpS3);
cvAnd(tmpH3,tmpS3,tmpH3,0);
// Combine the Hue and Sat detections
cvOr(tmpH3,tmpH2,tmpH2,0);
cvOr(tmpH1,tmpH2,tmpH1,0);
// Dilation and Erosion
// Structuring Element Generation
dilationElement = cvCreateStructuringElementEx( 5,5,3,3, CV_SHAPE_RECT , 0 );
erosionElement = cvCreateStructuringElementEx( 5,5,3,3, CV_SHAPE_RECT , 0 );
// Dilation adds a layer on, and returns things to the correct size.
cvDilate(tmpH1,tmpH2,dilationElement,1);
// Erosion peels a layer of pixels off, and makes small regions disappear
cvErode(tmpH1,tmpH3,erosionElement,1);
// Find the contours of all remaining objects
contour = 0;
biggest = 0;
cvFindContours( tmpH3, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
cvZero(tmpH1);
// contour now contains a CvSequence of all of the contours
for( ; contour != 0; contour = contour->h_next )
{
contArea = fabs(cvContourArea( contour, CV_WHOLE_SEQ ));
// Ignore very small contours
if ( contArea/imgArea >= 0.015 )
{
cvDrawContours( tmpH1, contour, color, color, 0, -1, 8 );
bndRect = cvBoundingRect( contour, 0 );
// Scale the templates and result images
scaleFactor = ((float)bndRect.width / (float)openHandTmpl->width);
tmplSize.width = scaleFactor * openHandTmpl->width;
tmplSize.height = scaleFactor * openHandTmpl->height;
openscaledTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
closedscaledTmpl = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
openMatchResult = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
closedMatchResult = cvCreateImage( tmplSize, IPL_DEPTH_8U, 1);
cvResize( openGrayHandTmpl,openscaledTmpl, CV_INTER_LINEAR );
cvResize( closedGrayHandTmpl,closedscaledTmpl, CV_INTER_LINEAR );
// Set ROI for image
bndRect.width = tmplSize.width;
bndRect.height = tmplSize.height;
cvSetImageROI(tmpH1, bndRect);
// Check that ROI matches the tmplate sizes
// If the ROI is too near the edge og the image then the
// ROI may be truncated when it is set and will not match the bndRect
if ( tmpH1->roi->width != bndRect.width || tmpH1->roi->height != bndRect.height )
{
}
else
{
// Compare the Templates to the image ROI
cvCmp( tmpH1,openscaledTmpl, openMatchResult, CV_CMP_EQ );
cvCmp( tmpH1,closedscaledTmpl, closedMatchResult, CV_CMP_EQ );
}
// Analyze the results
startCount = cvCountNonZero( tmpH1 );
openCount = cvCountNonZero( openMatchResult );
closedCount = cvCountNonZero( closedMatchResult );
openCompRatio = (float)openCount/(float)startCount;
closedCompRatio = (float)closedCount/(float)startCount;
bestRatio = 0.0;
// Small regions may give eroneous results
if ( openCompRatio > 1.0 ) openCompRatio = 0.0;
if ( closedCompRatio > 1.0 ) closedCompRatio = 0.0;
// Compare the Open and closed hand results
if ( openCompRatio > closedCompRatio )
{
if ( openCompRatio > maxRatio )
{
maxRatio = openCompRatio;
biggest = contour;
isHandOpen = 1;
}
}
else
{
if ( closedCompRatio > maxRatio )
{
maxRatio = closedCompRatio;
biggest = contour;
isHandOpen = 0;
}
}
// Reset ROI
cvResetImageROI( tmpH1 );
// Output the results
fprintf(output, "Contour Non-zero Count = %i/n", startCount);
fprintf(output, "Open Hand Matched Non-zero Count = %i Open Hand Ratio = %f/n", openCount, openCompRatio);
fprintf(output, "Clased Hand Matched Non-zero Count = %i Clased Hand Ratio = %f/n/n", closedCount, closedCompRatio);
#ifdef DEBUG
cvShowImage("Comp Results1",openMatchResult);
cvShowImage("Comp Results2",closedMatchResult);
cvShowImage("Template1",openscaledTmpl);
cvShowImage("Template2",closedscaledTmpl);
cvShowImage("Skin Detection",tmpH1);
cvWaitKey(0);
#endif
}
cvZero(tmpH1);
}
// Draw Detection Rectangle
// Red for Open Hand
// Green for Closed Hand
if ( biggest && maxRatio > 0.60 )
{
bndRect = cvBoundingRect( biggest, 0 );
cvResetImageROI( img );
pt1.x = bndRect.x;
pt1.y = bndRect.y;
pt2.x = bndRect.x + bndRect.width;
pt2.y = bndRect.y + bndRect.height;
if ( isHandOpen )
{
cvRectangle( img, pt1, pt2, CV_RGB(255,0,0), 5 );
}
else
{
cvRectangle( img, pt1, pt2, CV_RGB(0,255,0), 5 );
}
}
// Display the results
cvShowImage("Results",img);
cvWaitKey(0);
// If we found any contours then free the memory they use.
if (contour!=NULL)
{
cvClearSeq(contour);
}
cvClearMemStorage(storage);
cvReleaseImage(&tmpImg);
cvReleaseImage(&conv);
cvReleaseImage(&tmpH1);
cvReleaseImage(&tmpS1);
cvReleaseImage(&tmpH2);
cvReleaseImage(&tmpS2);
cvReleaseImage(&tmpH3);
cvReleaseImage(&tmpS3);
cvReleaseImage(&H);
cvReleaseImage(&S);
cvReleaseImage(&V);
cvReleaseImage(&openscaledTmpl);
cvReleaseImage(&closedscaledTmpl);
cvReleaseImage(&openMatchResult);
cvReleaseImage(&closedMatchResult);
}
#ifdef _EiC
main(1,"HandDetect.c");
#endif