一、概述
从iOS4开始,AVFoundation框架增加了几个类,AVCaptureDevice、AVCaptureSession等,可以获取摄像头的数据,而不会弹出类似于ImagePicker一样的界面,我们可以将数据转为一张张的图片,然后我们可以即时显示在视图上,也可以使用FFMPEG或者其它的视频编码工具,来合成视频。
二、步骤
第一步:创建AVCaptureSession,添加输入,输出源
//创建并配置一个摄像会话,并启动。
- (void)setupCaptureSession
{
NSError *error = nil;
//创建会话
AVCaptureSession *session = [[AVCaptureSession alloc] init];
//设置视频质量
session.sessionPreset = AVCaptureSessionPresetMedium;
//获取合适的AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//创建设备输入流,并增加到会话。
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
//处理错误
}
[session addInput:input];
//创建一个视频输出流,并增加到会话。
AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
[session addOutput:output];
//配置输出流
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
//指定像素格式。
output.videoSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//设置FPS
output.minFrameDuration = CMTimeMake(1, 15);
//启动会话
[session startRunning];
//将会话与当前控制器关联
[self setSession:session];
}
第二步:实现AVCaptureVideoDataOutputSampleBufferDelegate协议方法
//当采样数据被写入缓冲区时调用
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//抽取采样数据,合成UIImage对象
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
//后续自定义处理
xxxxxxxx
}
//抽取采样数据,合成UIImage对象
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//锁定像素缓冲区的起始地址
CVPixelBufferLockBaseAddress(imageBuffer,0);
//获取每行像素的字节数
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
//获取像素缓冲区的宽度和高度
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
//创建基于设备的RGB颜色空间
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!colorSpace)
{
NSLog(@"CGColorSpaceCreateDeviceRGB failure");
return nil;
}
//获取像素缓冲区的起始地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
//获取像素缓冲区的数据大小
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
//使用提供的数据创建CGDataProviderRef
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,NULL);
//通过CGDataProviderRef,创建CGImageRef
CGImageRef cgImage =
CGImageCreate(width,
height,
8,
32,
bytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
provider,
NULL,
true,
kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
//创建UIImage
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
//解锁像素缓冲区起始地址
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return image;
}