Android实现基于肤色的皮肤检测

2019独角兽企业重金招聘Python工程师标准>>> hot3.png

在android上实现 基于肤色的皮肤检测的几个技术要点:

(1)android上使用相机预览,包括相机api的使用和surfaceview的应用。

(2)android上相机使用的色彩空间NV12.

(3)NV12是YCrCb的色彩空间,了解YCrCb色彩空间。YCrCb和YUV之间的转换。

yuv色彩模型来源于rgb模型,该模型的特点是将亮度和色度分离开,从而适合于图像处理领域。YCbCr模型来源于yuv模型.

(4)YCrCb色彩空间表示的人类肤色的特征。这个特征是133≤Cr≤173,77≤Cb≤127.实验表明Cr在[140,160]区间是符合黄种人的肤色。

YUV和RGB的转换:

★这里是不是不是yuv而是Y Cb Cr???★

 Y = 0.299 R + 0.587 G + 0.114 B

U = -0.1687 R - 0.3313 G + 0.5 B + 128

V = 0.5 R - 0.4187 G - 0.0813 B + 128

 R = Y + 1.402 (V-128)

G = Y - 0.34414 (U-128) - 0.71414 (V-128)

B = Y + 1.772 (U-128)

 以前,一直没明白yuv和YcbCr之间的差异,想必有些朋友也会有同样的疑惑。

所以,我看完之后就记载下来了。

一、和rgb之间换算公式的差异

yuv<-->rgb

Y'= 0.299*R' + 0.587*G' + 0.114*B'

U'= -0.147*R' - 0.289*G' + 0.436*B' = 0.492*(B'- Y')

V'= 0.615*R' - 0.515*G' - 0.100*B' = 0.877*(R'- Y')

R' = Y' + 1.140*V'

G' = Y' - 0.394*U' - 0.581*V'

B' = Y' + 2.032*U'

yCbCr<-->rgb

Y’ = 0.257*R' + 0.504*G' + 0.098*B' + 16

Cb' = -0.148*R' - 0.291*G' + 0.439*B' + 128

Cr' = 0.439*R' - 0.368*G' - 0.071*B' + 128

R' = 1.164*(Y’-16) + 1.596*(Cr'-128)

G' = 1.164*(Y’-16) - 0.813*(Cr'-128) - 0.392*(Cb'-128)

B' = 1.164*(Y’-16) + 2.017*(Cb'-128)

Note: 上面各个符号都带了一撇,表示该符号在原值基础上进行了gamma correction

 

源代码如下:

package com.example.hearrate;

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Size;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.res.Configuration;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.graphics.PorterDuff;
import android.graphics.PorterDuff.Mode;
public class MainActivity extends Activity implements  SurfaceHolder.Callback ,Camera.PreviewCallback{
SurfaceHolder mHolder;
SurfaceView mView;
SurfaceView mLayer;
SurfaceHolder mLayerHolder;
private Camera mCamera =null;
private boolean bIfPreview =false;
private int mPreviewHeight;
private int mPreviewWidth;
private Canvas canvas; 
private Paint paint; 
private int facex=0,facey=0;
private boolean bprocessing=false;
private int[] RGBData; 
private byte[] mYUVData;   
private boolean bfront=false;
	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.activity_main);
		mView=(SurfaceView)findViewById(R.id.layer0);
		paint = new Paint(); 
		paint.setColor(Color.RED); 
		paint.setAntiAlias(true); 

		mPreviewWidth=320;
		mPreviewHeight=400;
		mHolder=mView.getHolder();
		mHolder.addCallback(this);
		mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		
		mLayer=(SurfaceView)findViewById(R.id.layer1);
		mLayer.setZOrderOnTop(true);
		//mLayer.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
	
		
		mLayerHolder=mLayer.getHolder();
		mLayerHolder.setFormat(PixelFormat.TRANSPARENT);
		mLayerHolder.addCallback(this);
		mLayerHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
	}
void drawlayer1()
{
	canvas=mLayerHolder.lockCanvas();
	// canvas.drawRGB(0, 0, 0); 
	// canvas.save(); 
	 Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
	 //绘制 
	// canvas.drawBitmap(bmp, null, paint); 
	 drawImage(canvas,bmp,facex,facex,72,72,0,0);
	 canvas.restore(); 
	 bmp=null;
	 mLayerHolder.unlockCanvasAndPost(canvas);
}
	@Override
	public boolean onCreateOptionsMenu(Menu menu) {
		// Inflate the menu; this adds items to the action bar if it is present.
		getMenuInflater().inflate(R.menu.activity_main, menu);
		return true;
	}

	@Override
	public void surfaceChanged(SurfaceHolder arg0, int arg1, int width, int height) {
		// TODO Auto-generated method stub
		mPreviewWidth=width;
		mPreviewHeight=height;
		if(arg0.equals(mLayerHolder))
		{
			//drawlayer1();
			return;
		}
		
		RGBData= new int[mPreviewHeight* mPreviewWidth];
		mYUVData= new byte[mPreviewHeight* mPreviewWidth+(mPreviewHeight/2)* (mPreviewWidth/2)+(mPreviewHeight/2)* (mPreviewWidth/2)];
			initCamera(); 
	}

	@SuppressLint("NewApi")
	@Override
	public void surfaceCreated(SurfaceHolder arg0) {
		// TODO Auto-generated method stub
		// TODO Auto-generated method stub 
		if(arg0.equals(mLayerHolder))
			return;
if(Build.VERSION.SDK_INT>=Build.VERSION_CODES.GINGERBREAD)
{
for(int i=0;i>1)*w+j;
//	canvas=mLayerHolder.lockCanvas();
	// canvas.drawRGB(0, 0, 0); 
//	canvas.save(); 
	// Bitmap bmp=BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
	 //绘制 
	// canvas.drawBitmap(bmp, null, paint); 
	// drawImage(canvas,bmp,facex,facex,72,72,0,0);
	
	
	// bmp=null;
	//  int RGBData[] = new int[mPreviewHeight* mPreviewWidth]; 
   //   byte[] mYUVData = new byte[mYUV420sp.length];    
   //   System.arraycopy(mYUV420sp, 0, mYUVData, 0, mYUV420sp.length);
/*
	for( i=0,yp = 0;i>1)*mPreviewWidth;
		for( j=0;j133&&v<173)
			    canvas.drawPoint(j, i, paint);
			 
			 int y1192 = 1192 * y;

			int r = (y1192 + 1634 * v);

			int g = (y1192 - 833 * v - 400 * u);

			int b = (y1192 + 2066 * u);

			if (r < 0) r = 0; else if (r > 262143) r = 262143;

		if (g < 0) g = 0; else if (g > 262143) g = 262143;

			if (b < 0) b = 0; else if (b > 262143) b = 262143;
		//	int rgb=0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
			
			//r=(rgb&0x00ff0000)>>4;
		//	g=(rgb&0x0000ff00)>>2;
		//	b=(rgb&0x000000ff);
          //    if(r>200&&g>200&&b>200)
           // 	  canvas.drawPoint(j, i, paint);
			//  rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
		}
		
		
	}
	

	 canvas.restore(); 
	 mLayerHolder.unlockCanvasAndPost(canvas);
	 */
/*
 * framesize=w*h;
 * yp=0;
 * for (int i=0;i>1)*w;
 *  for(int j=0;j> 1) * width, u = 0, v = 0;
        for (int i = 0; i < width; i++, yp++) {
            int y = (0xff & ((int) yuv420sp[yp]));
            if (y < 0) y = 0;
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]);;
                u = (0xff & yuv420sp[uvp++]);
            }
            ///133≤Cr≤173,77≤Cb≤127 
if(y>250)
{
	RGBData[yp]=Color.RED;
	// canvas.drawPoint(i, j, paint);
	}else
	{
		RGBData[yp]=Color.TRANSPARENT;
	}

        }
        
    }

	}
	public void detectface(byte[] yuv420sp, int width, int height)
	{
		
        final int frameSize = width * height;

        for (int j = 0, yp = 0; j < height; j++) {
        int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
        for (int i = 0; i < width; i++, yp++) {
    
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]);;
                u = (0xff & yuv420sp[uvp++]);
            }
            ///133≤Cr≤173,77≤Cb≤127 
if((v)>133&&(v)<160&&(u>77)&&(u<127))
{
	RGBData[yp]=Color.RED;
	// canvas.drawPoint(i, j, paint);
	}else
	{
		RGBData[yp]=Color.TRANSPARENT;
	}

        }
        
    }

	}
	 public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
        final int frameSize = width * height;
        canvas=mLayerHolder.lockCanvas();
        Paint paint1 = new Paint();  
        paint1.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));  
        canvas.drawPaint(paint1);
        canvas.save();
        for (int j = 0, yp = 0; j < height; j++) {
        int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
        for (int i = 0; i < width; i++, yp++) {
            int y = (0xff & ((int) yuv420sp[yp])) - 16;
            if (y < 0) y = 0;
            if ((i & 1) == 0) {
                v = (0xff & yuv420sp[uvp++]) - 128;
                u = (0xff & yuv420sp[uvp++]) - 128;
            }
            ///133≤Cr≤173,77≤Cb≤127 
if((v)>133&&(v)<160&&(u>77)&&(u<127))
{
	 canvas.drawPoint(i, j, paint);
	}
/*
 * 这个是yuv转RGB的处理  
 *   */
            int y1192 = 1192 * y;
            int r = (y1192 + 1634 * v);
            int g = (y1192 - 833 * v - 400 * u);
            int b = (y1192 + 2066 * u);

            if (r < 0) r = 0; else if (r > 262143) r = 262143;
            if (g < 0) g = 0; else if (g > 262143) g = 262143;
            if (b < 0) b = 0; else if (b > 262143) b = 262143;

            rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
            
             r = (rgb[yp] >> 16)&0xff;
                  g = (rgb[yp] >> 8) & 0xff;
                   b = rgb[yp] & 0xff;
                 //  if(r==255&&g==255&&b==255)
                  // canvas.drawPoint(i, j, paint);

    
        }
        
    }
   	 canvas.restore(); 
   	 mLayerHolder.unlockCanvasAndPost(canvas);
}


	private void initCamera()
	{
		if (bIfPreview)
		{
			mCamera.stopPreview();
		}
		if(null != mCamera)
		{
			try{
				Camera.Parameters parameters = mCamera.getParameters();
				// parameters.setFlashMode("off"); // 无闪光灯
				 parameters.setPictureFormat(PixelFormat.JPEG); //Sets the image format for picture 设定相片格式为JPEG,默认为NV21 
				parameters.setPreviewFormat(PixelFormat.YCbCr_420_SP); //Sets the image format for preview picture,默认为NV21
				
				 mCamera.setPreviewCallback(this); 
				 // 【调试】获取caera支持的PictrueSize,看看能否设置??
				List pictureSizes = mCamera.getParameters().getSupportedPictureSizes();
				 List previewSizes = mCamera.getParameters().getSupportedPreviewSizes();
				 List previewFormats = mCamera.getParameters().getSupportedPreviewFormats();
				List previewFrameRates = mCamera.getParameters().getSupportedPreviewFrameRates();
				
				  Size psize = null;
				 for (int i = 0; i < pictureSizes.size(); i++)
				 {
				 psize = (Size) pictureSizes.get(i);
				
				 }
				  for (int i = 0; i < previewSizes.size(); i++)
				 {
				 psize = (Size) previewSizes.get(i);
				
				  }
				 Integer pf = null;
				 for (int i = 0; i < previewFormats.size(); i++)
				 {
				pf = (Integer) previewFormats.get(i);
			
				 }
				
				// 设置拍照和预览图片大小
				 parameters.setPictureSize(640, 480); //指定拍照图片的大小
				parameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 指定preview的大小 
				 //这两个属性 如果这两个属性设置的和真实手机的不一样时,就会报错
				if(bfront)
				{
				 parameters.set("orientation", "landscape"); //
				 parameters.set("rotation", 0); // 镜头角度转90度(默认摄像头是横拍) 
					mCamera.setDisplayOrientation(0); // 在2.2以上可以使用
				}
				// 横竖屏镜头自动调整
			/*	if (this.getResources().getConfiguration().orientation != Configuration.ORIENTATION_LANDSCAPE) 
				 {
				 parameters.set("orientation", "portrait"); //
				 parameters.set("rotation", 90); // 镜头角度转90度(默认摄像头是横拍) 
				mCamera.setDisplayOrientation(90); // 在2.2以上可以使用
				 } else// 如果是横屏
			 {
				 parameters.set("orientation", "landscape"); //
				mCamera.setDisplayOrientation(0); // 在2.2以上可以使用
			 } 
			  */
				  //添加对视频流处理函数
				// 设定配置参数并开启预览
				 mCamera.setParameters(parameters); // 将Camera.Parameters设定予Camera 
				 mCamera.startPreview(); // 打开预览画面
				 bIfPreview = true;
				 // 【调试】设置后的图片大小和预览大小以及帧率
				 Camera.Size csize = mCamera.getParameters().getPreviewSize();
				 mPreviewHeight = csize.height; //
				mPreviewWidth = csize.width;
				 
				 csize = mCamera.getParameters().getPictureSize();
				
				
			}catch(Exception e)
			{
				Log(e.getMessage());
			}
			
		}
	}
	void Log(String msg)
	{
		System.out.println("LOG:"+msg);
	}
	int[] g_v_table,g_u_table,y_table;
	int[][] r_yv_table,b_yu_table;
	int inited = 0;
	 

	void initTable()
	{
		g_v_table=new int[256];
		g_u_table=new int[256];
		y_table=new int[256];
		r_yv_table=new int[256][256];
		b_yu_table=new int[256][256];
		if (inited == 0)
		{
			inited = 1;
			int m = 0,n=0;
			for (; m < 256; m++)
			{
				g_v_table[m] = 833 * (m - 128);
				g_u_table[m] = 400 * (m - 128);
				y_table[m] = 1192 * (m - 16);
			}
			int temp = 0;
			for (m = 0; m < 256; m++)
				for (n = 0; n < 256; n++)
				{
					temp = 1192 * (m - 16) + 1634 * (n - 128);
					if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
					r_yv_table[m][n] = temp;
	 
					temp = 1192 * (m - 16) + 2066 * (n - 128);
					if (temp < 0) temp = 0; else if (temp > 262143) temp = 262143;
					b_yu_table[m][n] = temp;
				}
		}
	}
	public class ProcessTask extends AsyncTask
	{

		@Override
		protected void onPostExecute(Void result) {
			// TODO Auto-generated method stub
			super.onPostExecute(result);
			drawdetect();
			bprocessing=false;
		}

		@Override
		protected void onPreExecute() {
			// TODO Auto-generated method stub
			super.onPreExecute();
			if(bprocessing)
				this.cancel(true);
		
		}

		@Override
		protected Void doInBackground(byte[]... params) {
			// TODO Auto-generated method stub
			bprocessing=true;
			byte[] data=	params[0];
			
			//皮肤检测
			detectface(data,mPreviewWidth, mPreviewHeight);
	   //白色检测
			//detectwhite(data,mPreviewWidth, mPreviewHeight);
	  //  publishProgress(null);
			return null;
		}
		
	}
}

以下是layout




            
            
  

        
            
  
  

以下是manifest




    









   

        
            
                

                
            
        
    

转载于:https://my.oschina.net/u/948588/blog/124445

你可能感兴趣的:(Android实现基于肤色的皮肤检测)