【IOS】人脸检测简单搭建

1.引入头文件

#import  

代理


2.新建属性

@property(strong,nonatomic)AVCaptureSession*captureSession;//数据传递

@property(strong,nonatomic)AVCaptureDeviceInput*captureDeviceInput;//输入数据

@property(strong,nonatomic)AVCaptureVideoDataOutput*captureVideoDataOutput;//视频输出

@property(strong,nonatomic)AVCaptureVideoPreviewLayer*captureVideoPreviewLayer;//相机拍摄预览图层 连接一个view.layer显示


3.初始化配置


//(1)初始化Session


_captureSession= [[AVCaptureSessionalloc]init];

if([_captureSessioncanSetSessionPreset:AVCaptureSessionPreset1280x720]) {

_captureSession.sessionPreset=AVCaptureSessionPreset1280x720;

}

//(2)初始化输入取得后置摄像头ios10之前


AVCaptureDevice*captureDevice = [selfgetCameraDeviceWithPosition:AVCaptureDevicePositionBack];

if(!captureDevice) {

NSLog(@"取得后置摄像头时出现问题.");

return;

}

/*

-(AVCaptureDevice*)getCameraDeviceWithPosition:(AVCaptureDevicePosition)position{

NSArray*cameras= [AVCaptureDevicedevicesWithMediaType:AVMediaTypeVideo];

for(AVCaptureDevice*cameraincameras) {

if([cameraposition]==position) {

returncamera;

}

}

returnnil;

}

*/

//根据输入设备初始化设备输入对象,用于获得输入数据

NSError*error=nil;

_captureDeviceInput= [[AVCaptureDeviceInputalloc]initWithDevice:captureDeviceerror:&error];

if(error) {

NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);

return;

}

if([_captureSessioncanAddInput:_captureDeviceInput]) {

[_captureSessionaddInput:_captureDeviceInput];

}

//(3)初始化输出


_captureVideoDataOutput= [[AVCaptureVideoDataOutputalloc]init];

if([_captureSessioncanAddOutput:_captureVideoDataOutput]) {

[_captureSessionaddOutput:_captureVideoDataOutput];

//指定代理 增加并行线程

dispatch_queue_tqueue =dispatch_queue_create("myQueue",DISPATCH_QUEUE_CONCURRENT);

[_captureVideoDataOutputsetSampleBufferDelegate:selfqueue:queue];

_captureVideoDataOutput.videoSettings=

[NSDictionarydictionaryWithObject:

[NSNumbernumberWithInt:kCVPixelFormatType_32BGRA]

forKey:(id)kCVPixelBufferPixelFormatTypeKey];

AVCaptureConnection*captureConnection = [_captureVideoDataOutputconnectionWithMediaType:AVMediaTypeVideo];

NSError*error;

CMTimeframeDuration =CMTimeMake(1,15);//指定帧数

NSArray*supportedFrameRateRanges = [captureDevice.activeFormatvideoSupportedFrameRateRanges];

BOOLframeRateSupported =NO;

for(AVFrameRateRange*rangeinsupportedFrameRateRanges) {

if(CMTIME_COMPARE_INLINE(frameDuration, >=, range.minFrameDuration) &&

CMTIME_COMPARE_INLINE(frameDuration, <=, range.maxFrameDuration)) {

frameRateSupported =YES;

}

}

if(frameRateSupported && [captureDevicelockForConfiguration:&error]) {

[captureDevicesetActiveVideoMaxFrameDuration:frameDuration];

[captureDevicesetActiveVideoMinFrameDuration:frameDuration];

[captureDeviceunlockForConfiguration];

}

//防抖

if([captureConnectionisVideoStabilizationSupported]) {

captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;

}

}

// (4)初始化显示界面


_captureVideoPreviewLayer= [[AVCaptureVideoPreviewLayeralloc]initWithSession:self.captureSession];

CALayer*layer =self.showView.layer;

layer.masksToBounds=YES;

_captureVideoPreviewLayer.frame=layer.bounds;

_captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspect;

[layeraddSublayer:_captureVideoPreviewLayer];

/*

//更新界面布局

- (void)viewDidLayoutSubviews{

CALayer*layer =self.showView.layer;

_captureVideoPreviewLayer.frame=layer.bounds;

}

*/


4.代理处理数据


- (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection

{

CVImageBufferRefimageBuffer =CMSampleBufferGetImageBuffer(sampleBuffer);

CVPixelBufferLockBaseAddress(imageBuffer,0);

uint8_t*baseAddress = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);

size_tbytesPerRow =CVPixelBufferGetBytesPerRow(imageBuffer);

size_twidth =CVPixelBufferGetWidth(imageBuffer);

size_theight =CVPixelBufferGetHeight(imageBuffer);

CVPixelBufferUnlockBaseAddress(imageBuffer,0);

CGColorSpaceRefcolorSpace =CGColorSpaceCreateDeviceRGB();

CGContextRefnewContext =CGBitmapContextCreate(baseAddress, width, height,8, bytesPerRow, colorSpace,kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);

CGImageRefnewImage =CGBitmapContextCreateImage(newContext);

UIImage*image= [UIImageimageWithCGImage:newImagescale:1.0orientation:UIImageOrientationRight];//当前的图像

//人脸检测

CIDetector*faceDetector = [CIDetectordetectorOfType:CIDetectorTypeFacecontext:niloptions:@{CIDetectorAccuracy:CIDetectorAccuracyLow}];

NSArray*features = [faceDetectorfeaturesInImage:[CIImageimageWithCGImage:image.CGImage]options:@{CIDetectorImageOrientation:[NSNumbernumberWithInt:6]}];

CIFaceFeature*feature = [featuresfirstObject];

if(feature) {

CGFloatx =0;

CGFloaty =0;

CGFloatx1 =0;

CGFloaty1 =0;

CGFloathwidth =40;

CGFloathheight =40;

CGFloatoffSetY =0;

CGFloatoffSetX =0;

CGFloatoffSetHeight =0;

if(feature.hasLeftEyePosition){

CGPointleftEyePos = feature.leftEyePosition;

NSLog(@"leftX = %f  leftY = %f",leftEyePos.x,leftEyePos.y);

x1 = leftEyePos.x;

y1 = leftEyePos.y;

}

if(feature.hasRightEyePosition){

CGPointrightEyePos = feature.rightEyePosition;

NSLog(@"rightX = %f  rightY = %f",rightEyePos.x,rightEyePos.y);

x = rightEyePos.x;

y = rightEyePos.y;

hheight = (y - y1);

}

if(feature.hasMouthPosition){

CGPointmouthPos = feature.mouthPosition;

NSLog(@"mouthX = %f mouthY = %f",mouthPos.x,mouthPos.y);

hwidth = mouthPos.x- x;

offSetHeight = hheight/4.0;

offSetY = offSetHeight;

offSetX = hwidth/2.0;

}

CGRectrect =CGRectMake(x, image.size.width- y, hwidth, hheight);

CGImageRefsubImageRef =CGImageCreateWithImageInRect(image.CGImage, rect);

UIImage*smallImage= [UIImageimageWithCGImage:subImageRefscale:1.0orientation:UIImageOrientationRight];//获取到的脸部图像

dispatch_async(dispatch_get_main_queue(), ^{

self.headImage.image= smallImage;

self.yuanImage.image= image;

});

CGImageRelease(subImageRef);

}

CGContextRelease(newContext);

CGColorSpaceRelease(colorSpace);

CGImageRelease(newImage);

}





你可能感兴趣的:(【IOS】人脸检测简单搭建)