最近想研究一下AVFoundation,所以产生了这篇博客,欢迎大家指正
参考大神博客链接
使用 AVFoundation 拍照好而录制视频的一般步骤
1、创建AVCaptureSession对象。AVCaptureSession:媒体捕捉会话,负责把捕获的音视频数据输出到输出设备中,一个AVCaptureSession可以有多个输入输出。
2、使用AVCaptureDevice的静态方法获得需要使用的设备。AVCaptureDevice:输入设备,包括麦克风、摄像头等,通过该对象可以设置物理设备的属性,如相机聚焦、白平衡。
3、利用输入设备AVCaptureDevice初始化AVCaptureDeviceInput对象。AVCaptureDeviceInput:设备输入数据管理对象,可以根据AVCaptureDevice创建对应的AVCaptureDeviceInput对象,该对象会被添加到AVCaptureSeeeion中管理。
4、初始化输出数据管理对象AVCaptureOutput,如果拍照就初始化AVCapturePhotoOutput对象,如果视频录制就初始化AVCaptureMovieFileOutput对象。AVCaptureOutput:用于接受各类的输出数据,通常使用对应的子类,如AVCaptureAudioDataOutput、AVCapturePhotoOutput、AVCaptureVideoDataOutput、AVCaptureFileOutput。该对象也会被添加到AVCaptureSession管理。
5、将数据输入对象AVCaptureDeviceInput和数据输出对象AVCaptureOutput添加到AVCaptureSession管理。
6、创建视频预览图层AVCaptureVideoPreviewLayer并指定媒体会话,添加图层到显示器中,挑用AVCaptureSession 的 startRuning 方法开始捕获。AVCaptureVideoPreviewLayer:相机拍摄预览图层,是CALayer的子类,使用该对象可以实时查看拍照或视频录制效果,创建该对象需要指定对应的AVCaptureSession对象。
7、将捕获的音频或视频数据输出到指定文件。
其核心代码如下
.h文件
#import
@interface PhotoView : UIView
- (instancetype)initWithFrame:(CGRect)frame completeHandle:(void (^)(UIImage * photo))completeHandle;
- (void)start;
- (void)stop;
@end
.m文件
#import "PhotoView.h"
#import
typedef void(^CompleteHandle)(UIImage * photo);
@interface PhotoView ()
@property (nonatomic, strong) AVCaptureSession * captureSession;
/**AVCaptureDeviceInput*/
@property (nonatomic, strong)AVCaptureDeviceInput * deviceInput;
/**AVCaptureOutput*/
@property (nonatomic, strong)AVCapturePhotoOutput * photoOutput;
@property (nonatomic, strong)AVCaptureDevice * device;
@property (nonatomic, copy)CompleteHandle completeHandle;
@end
@implementation PhotoView
- (instancetype)initWithFrame:(CGRect)frame completeHandle:(void (^)(UIImage *))completeHandle{
if (self = [super initWithFrame:frame]) {
self.completeHandle = completeHandle;
[self configCamera];
[self setUserInterface];
}
return self;
}
- (void)setUserInterface
{
NSArray * leftBtnTitles = @[@"auto",@"open",@"close"];
CGFloat btnSize = 30.f;
CGFloat width = self.bounds.size.width;
CGFloat height = self.bounds.size.height;
CGFloat margin = 30.f;
for (int i = 0; i < leftBtnTitles.count; i ++) {
UIButton * button = [[UIButton alloc] initWithFrame:CGRectMake(10, (margin + btnSize) * i + margin , btnSize, btnSize)];
[button setImage:[UIImage imageNamed:leftBtnTitles[i]] forState:UIControlStateNormal];
[button addTarget:self action:@selector(btnClick:) forControlEvents:UIControlEventTouchUpInside];
button.tag = 100 + i;
[self addSubview:button];
}
NSArray * rightBtnTitles = @[@"change"];
for (int i = 0; i < rightBtnTitles.count; i ++) {
UIButton * button = [[UIButton alloc] initWithFrame:CGRectMake(width - btnSize - 10, (margin + btnSize) * i + margin , btnSize, btnSize)];
[button setImage:[UIImage imageNamed:rightBtnTitles[i]] forState:UIControlStateNormal];
[button addTarget:self action:@selector(btnClick:) forControlEvents:UIControlEventTouchUpInside];
button.tag = 200 + i;
[self addSubview:button];
}
UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom];
takeButton.frame = CGRectMake((width - 60) / 2, height - 70, 60, 60);
[takeButton setImage:[UIImage imageNamed:@"carema"] forState:UIControlStateNormal];
[takeButton addTarget:self action:@selector(takePhoto) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:takeButton];
}
- (void)configCamera
{
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];
}
self.device = [self cameraDeviceWithPosition:AVCaptureDevicePositionBack];
[self deviceFouceModeForType:AVCaptureFocusModeContinuousAutoFocus];
self.deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
self.photoOutput = [[AVCapturePhotoOutput alloc] init];
if ([self.captureSession canAddInput:self.deviceInput]) {
[self.captureSession addInput:self.deviceInput];
}
if ([self.captureSession canAddOutput:self.photoOutput]) {
[self.captureSession addOutput:self.photoOutput];
}
AVCaptureVideoPreviewLayer * previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = CGRectMake(0, 0, self.bounds.size.width, self.bounds.size.height);
[self.layer addSublayer:previewLayer];
}
/**
* 获取AVCaptureDevice
*/
- (AVCaptureDevice *)cameraDeviceWithPosition:(AVCaptureDevicePosition)position
{
AVCaptureDeviceDiscoverySession * dev_dis_session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
if (dev_dis_session.devices.count > 0) {
return [dev_dis_session.devices firstObject];
}
return nil;
}
- (void)start
{
[self.captureSession startRunning];
}
- (void)stop
{
[self.captureSession stopRunning];
}
/**
* 拍照
*/
- (void)takePhoto
{
NSDictionary * output_set = @{AVVideoCodecKey:AVVideoCodecJPEG};
AVCapturePhotoSettings * photoset = [AVCapturePhotoSettings photoSettingsWithFormat:output_set];
[self.photoOutput capturePhotoWithSettings:photoset delegate:self ];
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error {
if (!error) {
NSData *data = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
UIImage *image = [UIImage imageWithData:data];
// 拍照后调用
UIImageWriteToSavedPhotosAlbum(image, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
}
}
- (void)image: (UIImage *) image didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
if(!error){
self.completeHandle(image);//拍照之后传照片给控制器
}
}
- (void)btnClick:(UIButton *)sender
{
if (sender.tag == 100) {
// 自动
[self deviceTorModeForType:AVCaptureTorchModeAuto]; // 闪光灯 - 自动
}
if (sender.tag == 101) {
[self deviceTorModeForType:AVCaptureTorchModeOn]; // 闪光灯 - 打开
}
if (sender.tag == 102) {
[self deviceTorModeForType:AVCaptureTorchModeOff]; // 闪光灯 - 关闭
}
if (sender.tag == 200) {
// 调换摄像头
[self switchingCamera];
}
}
/**
* 切换摄像头
*/
- (void)switchingCamera
{
AVCaptureDevicePosition currentPosition = self.deviceInput.device.position;
AVCaptureDevice * device;
if (currentPosition == AVCaptureDevicePositionBack) {
if ([self.captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
[self.captureSession setSessionPreset:AVCaptureSessionPreset1280x720];
}
device = [self cameraDeviceWithPosition:AVCaptureDevicePositionFront];
}
if (currentPosition == AVCaptureDevicePositionFront) {
if ([self.captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];
}
device = [self cameraDeviceWithPosition:AVCaptureDevicePositionBack];
}
AVCaptureDeviceInput * deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.deviceInput];
if ([self.captureSession canAddInput:deviceInput]) {
[self.captureSession addInput:deviceInput];
self.deviceInput = deviceInput;
self.device = device;
}
[self deviceFouceModeForType:AVCaptureFocusModeContinuousAutoFocus];
[self.captureSession commitConfiguration];
}
- (void)deviceTorModeForType:(AVCaptureTorchMode)type
{
if ([self.device hasTorch]) {
BOOL locked = [self.device lockForConfiguration:nil];
if (locked) {
self.device.torchMode = type;
[self.device unlockForConfiguration];
}
}
}
/**
* 聚焦
*/
- (void)deviceFouceModeForType:(AVCaptureFocusMode)type
{
if ([self.device isFocusModeSupported:type]) {
BOOL locked = [self.device lockForConfiguration:nil];
if (locked) {
// 开启
self.device.focusMode = type;
[self.device unlockForConfiguration];
}
}
}
@end
控制器
#import "ViewController.h"
#import "PhotoView.h"
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self setUserInterface];
}
- (void)setUserInterface
{
PhotoView * view = [[PhotoView alloc] initWithFrame:self.view.bounds completeHandle:^(UIImage * photo){
NSLog(@"%@",photo);
}];
[self.view addSubview:view];
[view start];
}
demo 欢迎 star