iOS 使用AVFoundation,捕捉静态图像

一、引入AVFoundation框架

添加头文件

#import

二、定义以下对象:

AVCaptureDevice * device;  // 图像捕捉设备

AVCaptureDeviceInput * input; // 输入流

AVCaptureStillImageOutput * output; //输出流

 AVCaptureSession * session; // 输入设备和输出设备之间的数据传递

AVCaptureVideoPreviewLayer * preview; // 预览图层,显示摄像头捕捉到的画面

 UIButton * cameraButton;  // 拍照按钮

三、初始化对象

//1.实例化捕捉会话

self.session = [[AVCaptureSession alloc]init];

// 将捕捉会话的预设设置为图像

self.session.sessionPreset = AVCaptureSessionPresetPhoto;

//2.初始化捕捉设备

//self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

self.device = [self backCamera];  // 这里使用后置摄像头

//3.用captureDevice创建输入流

self.input = [[AVCaptureDeviceInput alloc]initWithDevice:self.device error:nil];

// 将输入流添加到会话中

if ([self.session canAddInput:self.input]) {

[self.session addInput:self.input];

}

//4.创建媒体数据输出流为静态图像

self.output = [[AVCaptureStillImageOutput alloc]init];

// 将输出流设置成JPEG的图片格式输出,这里输出流的设置参数AVVideoCodecJPEG参数表示以JPEG的图片格式输出图片

NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil];

[self.output setOutputSettings:outputSettings];

// 5.将输出流添加到会话中

if ([self.session canAddOutput:self.output]) {

[self.session addOutput:self.output];

}

// 6. 通过会话创建预览图层

self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];

// 设置预览图层填充方式

self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;

CGFloat max = MAX([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

CGFloat min = MIN([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

// 设置预览图层的大小

self.preview.frame = CGRectMake(0, 0, min, max);

// 将预览图层放置在最上面

[self.view.layer insertSublayer:self.preview atIndex:0];



- (void)viewDidAppear:(BOOL)animated{

if (self.session) {

[self.session startRunning];

}

}


- (void)viewDidDisappear:(BOOL)animated{

if (self.session) {

[self.session stopRunning];

}

}

四、初始化设备时,可选择前置或后置摄影头;

// 根据位置获取前后摄像头设备

- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {

    AVCaptureDeviceDiscoverySession * deviceDiscoverSession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];

    NSArray * devices = deviceDiscoverSession.devices;

    for (AVCaptureDevice *device in devices) {

        if ([device position] == position) {

        return device;

        }

    }

return nil;

}

// 获取前置摄像头

- (AVCaptureDevice *)frontCamera {

return [self cameraWithPosition:AVCaptureDevicePositionFront];

}

// 获取后置摄像头

- (AVCaptureDevice *)backCamera {

return [self cameraWithPosition:AVCaptureDevicePositionBack];

}

// 切换摄像头

- (void)switchDevice{

NSArray *inputs = self.session.inputs;

for ( AVCaptureDeviceInput *input in inputs ) {

AVCaptureDevice *device = input.device;

if ( [device hasMediaType:AVMediaTypeVideo] ) {

AVCaptureDevicePosition position = device.position;

AVCaptureDevice *newCamera = nil;

AVCaptureDeviceInput *newInput = nil;

if (position == AVCaptureDevicePositionFront)

newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];

else

newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];

newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];

[self.session beginConfiguration];

[self.session removeInput:input];

[self.session addInput:newInput];

[self.session commitConfiguration];

break;

}

}

}

五、在拍照按钮的点击事件中获取捕捉到的图像,并可将其转成base64字符串

- (void)captureStillImage{

AVCaptureConnection * videoConnection = [self.output connectionWithMediaType:AVMediaTypeVideo];

if (!videoConnection) {

NSLog(@"take photo failed!");

return;

}

[self.output captureStillImageAsynchronouslyFromConnection:videoConnection

completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {

if (imageDataSampleBuffer == NULL) {

return;

}

NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];

UIImage * image = [UIImage imageWithData:imageData];

UIImage * smallImage = [self scaleImage:image toScale:0.5];

NSData * resultImageData = UIImageJPEGRepresentation(smallImage, 1.0);

NSString * imageBase64String = [resultImageData base64EncodedStringWithOptions:NSDataBase64Encoding64CharacterLineLength];

}];

}

P.S. 这里可以将获取到的图片进行缩小;

// 等比率缩放图片

- (UIImage *)scaleImage:(UIImage *)image toScale:(float)scaleSize{

UIGraphicsBeginImageContext(CGSizeMake(image.size.width * scaleSize, image.size.height * scaleSize));

[image drawInRect:CGRectMake(0, 0, image.size.width * scaleSize, image.size.height * scaleSize)];

UIImage * scaledImage = UIGraphicsGetImageFromCurrentImageContext();

UIGraphicsEndImageContext();

return scaledImage;

}

六、横屏时,调整预览图层frame

- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration {

[super willRotateToInterfaceOrientation:toInterfaceOrientation duration:duration];

CGFloat max = MAX([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

CGFloat min = MIN([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

if (toInterfaceOrientation == UIInterfaceOrientationLandscapeLeft) {

self.preview.frame = CGRectMake(0, 0, max, min);

}

if (toInterfaceOrientation == UIInterfaceOrientationLandscapeRight) {

self.preview.frame = CGRectMake(0, 0, max, min);

}

if (toInterfaceOrientation == UIInterfaceOrientationPortrait) {

self.preview.frame = CGRectMake(0, 0, min, max);

}

}

你可能感兴趣的:(iOS 使用AVFoundation,捕捉静态图像)