1.在控制其中添加如下属性
/** 输入设备 */
@property (nonatomic,strong) AVCaptureDevice *device;
/** 摄像头输入流 */
@property (nonatomic,strong) AVCaptureDeviceInput *deviceInput;
/** 摄像头输出流 */
@property (nonatomic,strong) AVCaptureVideoDataOutput *videoDataOutput;
/** 流 */
@property (nonatomic,strong) AVCaptureSession *session;
/** 输出刷新线程 */
@property (nonatomic,strong) dispatch_queue_t videoDataOutputQueue;
/** */
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
/** 人脸识别 */
@property (nonatomic,strong) CIDetector *faceDetector;
- (CIDetector *)faceDetector{
if (_faceDetector == nil) {
NSDictionary *faceDetectorOptions = [NSDictionary dictionaryWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
_faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:faceDetectorOptions];
}
return _faceDetector;
}
/**
* 初始化 人脸识别
*/
- (void)setupDetector {
// 1.设备方向
self.devicePosition = AVCaptureDevicePositionFront;
// 2.获取设别
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (device.position == self.devicePosition) {
self.device = device;
break;
}
}
// 处理设别不存在
if (!self.device) {
// 设别不存在
#warning TODO
NSLog(@"设别不存在");
}
NSError *error = nil;
// 3.输入流
self.deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:&error];
// 处理输入流异常
if (error) {
#warning TODO
NSLog(@"输入流初始化异常");
}
// 4.处理输出流线程
#define FYFVideoDataOutputQueue "VideoDataOutputQueue"
self.videoDataOutputQueue = dispatch_queue_create(FYFVideoDataOutputQueue, DISPATCH_QUEUE_SERIAL);
// 5.输出流(从指定的视频中采集数据)
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// 设置采集相片的像素格式
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[self.videoDataOutput setVideoSettings:rgbOutputSettings];
// 处理输出线程被阻塞时,丢弃掉没有处理的画面
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
//
[self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
// 6.设置 session
self.session = [[AVCaptureSession alloc] init];
// 1>设置采样质量
[self.session setSessionPreset:AVCaptureSessionPresetHigh];
// 2>添加输入流
if ([self.session canAddInput:self.deviceInput]) {
[self.session addInput:self.deviceInput];
} else {
// 处理不能添加 input
}
// 3>添加输出流
if ([self.session canAddOutput:self.videoDataOutput]) {
[self.session addOutput:self.videoDataOutput];
} else {
// 处理不能添加 output
}
// 7.相机层创建
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.previewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
//self.previewLayer.frame = self.view.bounds;
// 8.在 view 中添加 PreviewLayer
CALayer *rootLayer = [self.view layer];
[rootLayer setMasksToBounds:YES];
self.previewLayer.frame = rootLayer.bounds;
[rootLayer insertSublayer:self.previewLayer atIndex:0];
// 9.开始
[self.session startRunning];
}
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// 获取图片
// CVPixelBuffer(core video pixel buffer): 指的是主内存中的图片缓存,用来保存图片像素数据。应用程序在产生图片帧、解压缩视频数据或调用Core Image的时候可以调用此对象
// 实时处理带监测的图片
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// CMAttachmentBearer是一个基于CF的对象,支持键/值/模式 附件API。 任何CF对象都可以添加到CMAttachmentBearer对象,来存储额外信息
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)(attachments)];
if (attachments) {
CFRelease(attachments);
}
NSDictionary *imageOptionPortrait = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationPortrait] forKey:CIDetectorImageOrientation];
NSDictionary *imageOptionPortraitUpsideDown = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationPortraitUpsideDown] forKey:CIDetectorImageOrientation];
NSDictionary *imageOptionLandscapeLeft = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationLandscapeLeft] forKey:CIDetectorImageOrientation];
NSDictionary *imageOptionLandscapeRight = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationLandscapeRight] forKey:CIDetectorImageOrientation];
NSMutableArray *allFeatures = [NSMutableArray array];
[allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionPortrait]];
[allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionPortraitUpsideDown]];
[allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionLandscapeLeft]];
[allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionLandscapeRight]];
if (allFeatures.count) {
// 检测到人脸
} else {
// 未检测到人脸
}
}
/**
* 根据设备方向获取图片的方向
*/
- (NSNumber *)getImageOrientationByDeviceOrientation:(UIDeviceOrientation)deviceOrientation {
int imageOrientation;
enum {
PHOTOS_EXIF_0ROW_TOP_0COL_LEFT = 1, // 1 = 0th row is at the top, and 0th column is on the left (THE DEFAULT).
PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT = 2, // 2 = 0th row is at the top, and 0th column is on the right.
PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT = 3, // 3 = 0th row is at the bottom, and 0th column is on the right.
PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT = 4, // 4 = 0th row is at the bottom, and 0th column is on the left.
PHOTOS_EXIF_0ROW_LEFT_0COL_TOP = 5, // 5 = 0th row is on the left, and 0th column is the top.
PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP = 6, // 6 = 0th row is on the right, and 0th column is the top.
PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM = 7, // 7 = 0th row is on the right, and 0th column is the bottom.
PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM = 8 // 8 = 0th row is on the left, and 0th column is the bottom.
};
switch (deviceOrientation) {
case UIDeviceOrientationPortraitUpsideDown: // Device oriented vertically, home button on the top
imageOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM;
break;
case UIDeviceOrientationLandscapeLeft: // Device oriented horizontally, home button on the right
// if (self.isUsingFrontFacingCamera)
imageOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
// else
// exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
break;
case UIDeviceOrientationLandscapeRight: // Device oriented horizontally, home button on the left
// if (self.isUsingFrontFacingCamera)
imageOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
// else
// exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
break;
case UIDeviceOrientationPortrait: // Device oriented vertically, home button on the bottom
default:
imageOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP;
break;
}
return [NSNumber numberWithInt:imageOrientation];
}
// clean up capture setup
- (void)teardownAVCapture {
for(AVCaptureInput *input in self.session.inputs){
[self.session removeInput:input];
}
for(AVCaptureOutput *output in self.session.outputs){
[self.session removeOutput:output];
}
[self.session stopRunning];
self.videoDataOutput = nil;
self.videoDataOutputQueue = nil;
self.device = nil;
[self.previewLayer removeFromSuperlayer];
self.previewLayer = nil;
}