六种方法分别是:基于RGB分割,基于RG同道的分割,ycrcb+otsu(ostu可以参考http://blog.csdn.net/onezeros/article/details/6136770,
http://wenku.baidu.com/view/05c47e03bed5b9f3f90f1ce4.html),YCrCb空间,YUV空间,HSV空间。下一步就是通过JNI将这些检测移植到android上,最终目标是实现Android智能手机利用掌纹开关机。
环境是在qt下,.pro文件里增加如下代码:
INCLUDEPATH += /usr/include/opencv
LIBS += /usr/lib/libcv.so \
/usr/lib/libcvaux.so \
/usr/lib/libcxcore.so \
/usr/lib/libhighgui.so \
/usr/lib/libml.so
#include
#include "cv.h"
#include "highgui.h"
void SkinRGB(IplImage* rgb,IplImage* _dst);
void cvSkinRG(IplImage* rgb,IplImage* gray);
void cvThresholdOtsu(IplImage* src, IplImage* dst);
void cvSkinOtsu(IplImage* src, IplImage* dst);
void cvSkinYCbCr(IplImage* img, IplImage* mask);
void cvSkinYUV(IplImage* src,IplImage* dst);
void cvSkinHSV(IplImage* src,IplImage* dst);
using namespace std;
// skin region location using rgb limitation
int main()
{
IplImage *srcImg = cvLoadImage("/home/yan/download/testPalm4.jpg", 1);
IplImage *dstRGB = cvCreateImage(cvGetSize(srcImg), 8, 3);
IplImage *dstRG = cvCreateImage(cvGetSize(srcImg), 8, 1);
IplImage* dst_crotsu=cvCreateImage(cvGetSize(srcImg),8,1);
IplImage* dst_ycbcr=cvCreateImage(cvGetSize(srcImg),8,1);
IplImage* dst_yuv=cvCreateImage(cvGetSize(srcImg),8,3);
IplImage* dst_hsv=cvCreateImage(cvGetSize(srcImg),8,3);
SkinRGB(srcImg, dstRGB);
cvSaveImage("/home/yan/download/1_dstRGB.jpg", dstRGB);
cvSkinRG(srcImg, dstRG);
cvSaveImage("/home/yan/download/2_dstRG.jpg", dstRG);
cvSkinOtsu(srcImg, dst_crotsu);
cvSaveImage("/home/yan/download/3_dst_crotsu.jpg", dst_crotsu);
cvSkinYCbCr(srcImg, dst_ycbcr);
cvSaveImage("/home/yan/download/4_dst_ycbcr.jpg", dst_ycbcr);
cvSkinYUV(srcImg, dst_yuv);
cvSaveImage("/home/yan/download/5_dst_yuv.jpg", dst_yuv);
cvSkinHSV(srcImg, dst_hsv);
cvSaveImage("/home/yan/download/6_dst_hsv.jpg", dst_hsv);
cvNamedWindow("srcImg", 1);
cvShowImage("srcImg", srcImg);
cvNamedWindow("dstRGB", 1);
cvShowImage("dstRGB", dstRGB);
cvNamedWindow("dstRG", 1);
cvShowImage("dstRG", dstRG);
cvNamedWindow("dstcrotsu", 1);
cvShowImage("dstcrotsu", dst_crotsu);
cvNamedWindow("dst_ycbcr", 1);
cvShowImage("dst_ycbcr", dst_ycbcr);
cvNamedWindow("dst_yuv", 1);
cvShowImage("dst_yuv", dst_yuv);
cvNamedWindow("dst_hsv", 1);
cvShowImage("dst_hsv", dst_hsv);
cvWaitKey(0);
cout << "Hello World!" << endl;
return 0;
}
void SkinRGB(IplImage* rgb,IplImage* _dst)
{
cout<<"111"<nChannels==3&& _dst->nChannels==3);
static const int R=2;
static const int G=1;
static const int B=0;
IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0;hheight;h++) {
unsigned char* prgb=(unsigned char*)rgb->imageData+h*rgb->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0;wwidth;w++) {
if ((prgb[R]>95 && prgb[G]>40 && prgb[B]>20 &&
prgb[R]-prgb[B]>15 && prgb[R]-prgb[G]>15/*&&
!(prgb[R]>170&&prgb[G]>170&&prgb[B]>170)*/)||//uniform illumination
(prgb[R]>200 && prgb[G]>210 && prgb[B]>170 &&
abs(prgb[R]-prgb[B])<=15 && prgb[R]>prgb[B]&& prgb[G]>prgb[B])//lateral illumination
) {
memcpy(pdst,prgb,3);
}
prgb+=3;
pdst+=3;
}
}
cvCopyImage(dst,_dst);
cvReleaseImage(&dst);
}
void cvSkinRG(IplImage* rgb,IplImage* gray)
{
assert(rgb->nChannels==3&&gray->nChannels==1);
const int R=2;
const int G=1;
const int B=0;
double Aup=-1.8423;
double Bup=1.5294;
double Cup=0.0422;
double Adown=-0.7279;
double Bdown=0.6066;
double Cdown=0.1766;
for (int h=0; hheight; h++)
{
unsigned char* pGray=(unsigned char*)gray->imageData+h*gray->widthStep;
unsigned char* pRGB=(unsigned char* )rgb->imageData+h*rgb->widthStep;
for (int w=0; wwidth; w++)
{
int s=pRGB[R]+pRGB[G]+pRGB[B];
double r=(double)pRGB[R]/s;
double g=(double)pRGB[G]/s;
double Gup=Aup*r*r+Bup*r+Cup;
double Gdown=Adown*r*r+Bdown*r+Cdown;
double Wr=(r-0.33)*(r-0.33)+(g-0.33)*(g-0.33);
if (gGdown && Wr>0.004)
{
*pGray=255;
}
else
{
*pGray=0;
}
pGray++;
pRGB+=3;
}
}
}
void cvThresholdOtsu(IplImage* src, IplImage* dst)
{
int height=src->height;
int width=src->width;
//histogram
float histogram[256]= {0};
for(int i=0; iimageData+src->widthStep*i;
for(int j=0; jmaxVariance)
{
maxVariance=variance;
threshold=i;
}
}
cvThreshold(src,dst,threshold,255,CV_THRESH_BINARY);
}
void cvSkinOtsu(IplImage* src, IplImage* dst)
{
assert(dst->nChannels==1&& src->nChannels==3);
IplImage* ycrcb=cvCreateImage(cvGetSize(src),8,3);
IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,ycrcb,CV_BGR2YCrCb);
cvSplit(ycrcb,0,cr,0,0);
cvThresholdOtsu(cr,cr);
cvCopyImage(cr,dst);
cvReleaseImage(&cr);
cvReleaseImage(&ycrcb);
}
void cvSkinYCbCr(IplImage* img, IplImage* mask)
{
CvSize imageSize = cvSize(img->width, img->height);
IplImage *imgY = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgCr = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgCb = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
IplImage *imgYCrCb = cvCreateImage(imageSize, img->depth, img->nChannels);
cvCvtColor(img,imgYCrCb,CV_BGR2YCrCb);
cvSplit(imgYCrCb, imgY, imgCr, imgCb, 0);
int y, cr, cb, l, x1, y1, value;
unsigned char *pY, *pCr, *pCb, *pMask;
pY = (unsigned char *)imgY->imageData;
pCr = (unsigned char *)imgCr->imageData;
pCb = (unsigned char *)imgCb->imageData;
pMask = (unsigned char *)mask->imageData;
cvSetZero(mask);
l = img->height * img->width;
for (int i = 0; i < l; i++){
y = *pY;
cr = *pCr;
cb = *pCb;
cb -= 109;
cr -= 152
;
x1 = (819*cr-614*cb)/32 + 51;
y1 = (819*cr+614*cb)/32 + 77;
x1 = x1*41/1024;
y1 = y1*73/1024;
value = x1*x1+y1*y1;
if(y<100) (*pMask)=(value<700) ? 255:0;
else (*pMask)=(value<850)? 255:0;
pY++;
pCr++;
pCb++;
pMask++;
}
cvReleaseImage(&imgY);
cvReleaseImage(&imgCr);
cvReleaseImage(&imgCb);
cvReleaseImage(&imgYCrCb);
}
void cvSkinYUV(IplImage* src,IplImage* dst)
{
IplImage* ycrcb=cvCreateImage(cvGetSize(src),8,3);
//IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
//IplImage* cb=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,ycrcb,CV_BGR2YCrCb);
//cvSplit(ycrcb,0,cr,cb,0);
static const int Cb=2;
static const int Cr=1;
static const int Y=0;
//IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0; hheight; h++)
{
unsigned char* pycrcb=(unsigned char*)ycrcb->imageData+h*ycrcb->widthStep;
unsigned char* psrc=(unsigned char*)src->imageData+h*src->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0; wwidth; w++)
{
if (pycrcb[Cr]>=133&&pycrcb[Cr]<=173&&pycrcb[Cb]>=77&&pycrcb[Cb]<=127)
{
memcpy(pdst,psrc,3);
}
pycrcb+=3;
psrc+=3;
pdst+=3;
}
}
//cvCopyImage(dst,_dst);
//cvReleaseImage(&dst);
}
void cvSkinHSV(IplImage* src,IplImage* dst)
{
IplImage* hsv=cvCreateImage(cvGetSize(src),8,3);
//IplImage* cr=cvCreateImage(cvGetSize(src),8,1);
//IplImage* cb=cvCreateImage(cvGetSize(src),8,1);
cvCvtColor(src,hsv,CV_BGR2HSV);
//cvSplit(ycrcb,0,cr,cb,0);
static const int V=2;
static const int S=1;
static const int H=0;
//IplImage* dst=cvCreateImage(cvGetSize(_dst),8,3);
cvZero(dst);
for (int h=0; hheight; h++)
{
unsigned char* phsv=(unsigned char*)hsv->imageData+h*hsv->widthStep;
unsigned char* psrc=(unsigned char*)src->imageData+h*src->widthStep;
unsigned char* pdst=(unsigned char*)dst->imageData+h*dst->widthStep;
for (int w=0; wwidth; w++)
{
if (phsv[H]>=7&&phsv[H]<=29)
{
memcpy(pdst,psrc,3);
}
phsv+=3;
psrc+=3;
pdst+=3;
}
}
//cvCopyImage(dst,_dst);
//cvReleaseImage(&dst);
}
下面是效果图:
测试图片:
下图的贴图依次对应上面的六种方法:
从上面的结果对比图中可以清晰看的,ycrcb+ostu的效果无疑是最好的。其次是rgb和yuv方法。这个图片效果之所以这么好是因为测试图片拍摄的时候背景为白色。然后,遗憾的是,当背景色不纯的时候,比如有红也有黑,效果就很不理想了。实验发现,当背景为纯色,且是白色或黑色时,效果最好。
参考:
http://blog.sina.com.cn/s/blog_9ce5a1b501017otq.html
http://blog.csdn.net/scyscyao/article/details/5468577
http://wenku.baidu.com/view/05c47e03bed5b9f3f90f1ce4.html
http://blog.csdn.net/onezeros/article/details/6136770
--------------------------本掌纹是作者自己的,转载请注明作者yanzi1225627