在 AV Foundation ⑫ 了解捕捉媒体 了解捕捉媒体的相关内容,捕捉会话AVCaptureSession
、捕捉设备 AVCaptureDevice
、捕捉设备的输入AVCaptureDeviceInput
和 捕捉设备的输出AVCaptureOutput
以及视频内容的预览 AVCaptureVideoPreviewLayer
图层。
iOS 相机应用程序允许开发者从前置和后置摄像头捕捉照片和电影。这个示例代码项目展示了如何在自己的相机应用程序中利用内置前后 iPhone 摄像头的基本功能。实现这些捕获功能。
开发相机应用程序需要使用真机进行编译和测试,在编写代码之前需要先处理隐私请求
设置预览视图
相机程序需要一个实时预览的视图,其图层为 AVCaptureVideoPreviewLayer
,关联捕捉会话AVCaptureSession
,保持同步,AVCaptureSession
并处理屏幕触控点转换为摄像头坐标系的坐标用于聚焦和曝光
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self setupView];
}
return self;
}
- (id)initWithCoder:(NSCoder *)coder {
self = [super initWithCoder:coder];
if (self) {
[self setupView];
}
return self;
}
+ (Class)layerClass {
//在UIView 重写layerClass 类方法可以让开发者创建视图实例自定义图层了下
//重写layerClass方法并返回AVCaptureVideoPrevieLayer类对象
return [AVCaptureVideoPreviewLayer class];
}
- (AVCaptureSession*)session {
//重写session方法,返回捕捉会话
return [(AVCaptureVideoPreviewLayer*)self.layer session];
}
- (void)setSession:(AVCaptureSession *)session {
//重写session属性的访问方法,在setSession:方法中访问视图layer属性。
//AVCaptureVideoPreviewLayer 实例,并且设置AVCaptureSession 将捕捉数据直接输出到图层中,并确保与会话状态同步。
[(AVCaptureVideoPreviewLayer*)self.layer setSession:session];
}
//关于UI的实现,例如手势,单击、双击 单击聚焦、双击曝光
- (void)setupView {
[(AVCaptureVideoPreviewLayer *)self.layer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
_singleTapRecognizer =
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleSingleTap:)];
_doubleTapRecognizer =
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleDoubleTap:)];
_doubleTapRecognizer.numberOfTapsRequired = 2;
_doubleDoubleTapRecognizer =
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(handleDoubleDoubleTap:)];
_doubleDoubleTapRecognizer.numberOfTapsRequired = 2;
_doubleDoubleTapRecognizer.numberOfTouchesRequired = 2;
[self addGestureRecognizer:_singleTapRecognizer];
[self addGestureRecognizer:_doubleTapRecognizer];
[self addGestureRecognizer:_doubleDoubleTapRecognizer];
[_singleTapRecognizer requireGestureRecognizerToFail:_doubleTapRecognizer];
_focusBox = [self viewWithColor:[UIColor colorWithRed:0.102 green:0.636 blue:1.000 alpha:1.000]];
_exposureBox = [self viewWithColor:[UIColor colorWithRed:1.000 green:0.421 blue:0.054 alpha:1.000]];
[self addSubview:_focusBox];
[self addSubview:_exposureBox];
}
- (void)handleSingleTap:(UIGestureRecognizer *)recognizer {
CGPoint point = [recognizer locationInView:self];
[self runBoxAnimationOnView:self.focusBox point:point];
if (self.delegate) {
[self.delegate tappedToFocusAtPoint:[self captureDevicePointForPoint:point]];
}
}
//私有方法 用于支持该类定义的不同触摸处理方法。 将屏幕坐标系上的触控点转换为摄像头上的坐标系点
- (CGPoint)captureDevicePointForPoint:(CGPoint)point {
AVCaptureVideoPreviewLayer *layer =
(AVCaptureVideoPreviewLayer *)self.layer;
return [layer captureDevicePointOfInterestForPoint:point];
}
- (void)handleDoubleTap:(UIGestureRecognizer *)recognizer {
CGPoint point = [recognizer locationInView:self];
[self runBoxAnimationOnView:self.exposureBox point:point];
if (self.delegate) {
[self.delegate tappedToExposeAtPoint:[self captureDevicePointForPoint:point]];
}
}
- (void)handleDoubleDoubleTap:(UIGestureRecognizer *)recognizer {
[self runResetAnimation];
if (self.delegate) {
[self.delegate tappedToResetFocusAndExposure];
}
}
- (void)runBoxAnimationOnView:(UIView *)view point:(CGPoint)point {
view.center = point;
view.hidden = NO;
[UIView animateWithDuration:0.15f
delay:0.0f
options:UIViewAnimationOptionCurveEaseInOut
animations:^{
view.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1.0);
}
completion:^(BOOL complete) {
double delayInSeconds = 0.5f;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
view.hidden = YES;
view.transform = CGAffineTransformIdentity;
});
}];
}
- (void)runResetAnimation {
if (!self.tapToFocusEnabled && !self.tapToExposeEnabled) {
return;
}
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.layer;
CGPoint centerPoint = [previewLayer pointForCaptureDevicePointOfInterest:CGPointMake(0.5f, 0.5f)];
self.focusBox.center = centerPoint;
self.exposureBox.center = centerPoint;
self.exposureBox.transform = CGAffineTransformMakeScale(1.2f, 1.2f);
self.focusBox.hidden = NO;
self.exposureBox.hidden = NO;
[UIView animateWithDuration:0.15f
delay:0.0f
options:UIViewAnimationOptionCurveEaseInOut
animations:^{
self.focusBox.layer.transform = CATransform3DMakeScale(0.5, 0.5, 1.0);
self.exposureBox.layer.transform = CATransform3DMakeScale(0.7, 0.7, 1.0);
}
completion:^(BOOL complete) {
double delayInSeconds = 0.5f;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
self.focusBox.hidden = YES;
self.exposureBox.hidden = YES;
self.focusBox.transform = CGAffineTransformIdentity;
self.exposureBox.transform = CGAffineTransformIdentity;
});
}];
}
- (void)setTapToFocusEnabled:(BOOL)enabled {
_tapToFocusEnabled = enabled;
self.singleTapRecognizer.enabled = enabled;
}
- (void)setTapToExposeEnabled:(BOOL)enabled {
_tapToExposeEnabled = enabled;
self.doubleTapRecognizer.enabled = enabled;
}
- (UIView *)viewWithColor:(UIColor *)color {
UIView *view = [[UIView alloc] initWithFrame:BOX_BOUNDS];
view.backgroundColor = [UIColor clearColor];
view.layer.borderColor = color.CGColor;
view.layer.borderWidth = 5.0f;
view.hidden = YES;
return view;
}
设置捕捉会话、输入和输出
捕捉会话的代码会放置在一个相机控制器的类 CameraController
里面。这个类用于配置和管理不同的捕捉设备,同时也对捕捉的输出进行控制和交互。首先编写一个设置会话的方法 - (BOOL)setupSession:(NSError **)error
:
- 创建
AVCaptureSession
实例 - 并添加摄像头和麦克风输入,
AVMediaTypeVideo
和AVMediaTypeAudio
类型的捕捉设备,并将它们封装成AVCaptureDeviceInput
对象。 - 最后添加捕捉静态图片
AVCaptureStillImageOutput
和 QuickTime 视频AVCaptureMovieFileOutput
的输出实例
- (BOOL)setupSession:(NSError **)error {
//创建捕捉会话。AVCaptureSession 是捕捉场景的中心枢纽
self.captureSession = [[AVCaptureSession alloc]init];
/*
AVCaptureSessionPresetHigh
AVCaptureSessionPresetMedium
AVCaptureSessionPresetLow
AVCaptureSessionPreset640x480
AVCaptureSessionPreset1280x720
AVCaptureSessionPresetPhoto
*/
//设置图像的分辨率
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
//拿到默认视频捕捉设备 iOS系统返回后置摄像头
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//将捕捉设备封装成AVCaptureDeviceInput
//注意:为会话添加捕捉设备,必须将设备封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
//判断videoInput是否有效
if (videoInput)
{
//canAddInput:测试是否能被添加到会话中
if ([self.captureSession canAddInput:videoInput])
{
//将videoInput 添加到 captureSession中
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
}
}else
{
return NO;
}
//选择默认音频捕捉设备 即返回一个内置麦克风
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
//为这个设备创建一个捕捉设备输入
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
//判断audioInput是否有效
if (audioInput) {
//canAddInput:测试是否能被添加到会话中
if ([self.captureSession canAddInput:audioInput])
{
//将audioInput 添加到 captureSession中
[self.captureSession addInput:audioInput];
}
}else
{
return NO;
}
//AVCaptureStillImageOutput 实例 从摄像头捕捉静态图片
self.imageOutput = [[AVCaptureStillImageOutput alloc]init];
//配置字典:希望捕捉到JPEG格式的图片
self.imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
//输出连接 判断是否可用,可用则添加到输出连接中去
if ([self.captureSession canAddOutput:self.imageOutput])
{
[self.captureSession addOutput:self.imageOutput];
}
//创建一个AVCaptureMovieFileOutput 实例,用于将Quick Time 电影录制到文件系统
self.movieOutput = [[AVCaptureMovieFileOutput alloc]init];
//输出连接 判断是否可用,可用则添加到输出连接中去
if ([self.captureSession canAddOutput:self.movieOutput])
{
[self.captureSession addOutput:self.movieOutput];
}
self.videoQueue = dispatch_queue_create("example.VideoQueue", NULL);
return YES;
}
启动和停止会话
捕捉会话的对象图会通过调用 setupSession:
方法被妥善处置,不过在使用捕捉会话前,首先要启动会话。有启动会话当初就会有启动会话,这里定义两个相应的方法:startSession
和 stopSession
方法供外界调用:
- (void)startSession {
//检查是否处于运行状态
if (![self.captureSession isRunning])
{
//使用同步调用会损耗一定的时间,则用异步的方式处理
dispatch_async(self.videoQueue, ^{
[self.captureSession startRunning];
});
}
}
- (void)stopSession {
//检查是否处于运行状态
if ([self.captureSession isRunning])
{
//使用异步方式,停止运行
dispatch_async(self.videoQueue, ^{
[self.captureSession stopRunning];
});
}
}
切换摄像头
基本上所有的 iOS 设备都具有前置和后置两个摄像头。首先要开发的功能就是让用户能够在摄像头之间进行切换:
-
cameraWithPosition:
方法根据摄像头的position
从数组中查找相应的摄像头并返回; -
activeCamera
方法返回当前捕捉会话对应的摄像头; -
inactivceCamera
方法返回当前激活摄像头的反向摄像头; -
canSwitchCameras
方法返回一个 BOOL 值用于表示是否有超过一个摄像头可用; -
cameraCount
方法返回可用视频捕捉设备的数量 - 最后
switchCameras
方法实现切换未激活摄像头功能。
#pragma mark - Device Configuration 配置摄像头支持的方法
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
//获取可用视频设备
NSArray *devicess = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//遍历可用的视频设备 并返回position 参数值
for (AVCaptureDevice *device in devicess)
{
if (device.position == position) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)activeCamera {
//返回当前捕捉会话对应的摄像头的device 属性
return self.activeVideoInput.device;
}
//返回当前未激活的摄像头
- (AVCaptureDevice *)inactiveCamera {
//通过查找当前激活摄像头的反向摄像头获得,如果设备只有1个摄像头,则返回nil
AVCaptureDevice *device = nil;
if (self.cameraCount > 1)
{
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
device = [self cameraWithPosition:AVCaptureDevicePositionFront];
}else
{
device = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
}
return device;
}
//判断是否有超过1个摄像头可用
- (BOOL)canSwitchCameras {
return self.cameraCount > 1;
}
//可用视频捕捉设备的数量
- (NSUInteger)cameraCount {
return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
}
//切换摄像头
- (BOOL)switchCameras {
//判断是否有多个摄像头
if (![self canSwitchCameras])
{
return NO;
}
//获取当前设备的反向设备
NSError *error;
AVCaptureDevice *videoDevice = [self inactiveCamera];
//将输入设备封装成AVCaptureDeviceInput
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//判断videoInput 是否为nil
if (videoInput)
{
//标注原配置变化开始
[self.captureSession beginConfiguration];
//将捕捉会话中,原本的捕捉输入设备移除
[self.captureSession removeInput:self.activeVideoInput];
//判断新的设备是否能加入
if ([self.captureSession canAddInput:videoInput])
{
//能加入成功,则将videoInput 作为新的视频捕捉设备
[self.captureSession addInput:videoInput];
//将获得设备 改为 videoInput
self.activeVideoInput = videoInput;
}else
{
//如果新设备,无法加入。则将原本的视频捕捉设备重新加入到捕捉会话中
[self.captureSession addInput:self.activeVideoInput];
}
//配置完成后, AVCaptureSession commitConfiguration 会分批的将所有变更整合在一起。
[self.captureSession commitConfiguration];
}else
{
//创建AVCaptureDeviceInput 出现错误,则通知委托来处理该错误
[self.delegate deviceConfigurationFailedWithError:error];
return NO;
}
return YES;
}
配置捕捉设备
配置聚焦
-
cameraSupportsTapToFocus
判断是否支持聚焦 -
focusAtPoint:
设置聚焦点
#pragma mark - Focus Methods 点击聚焦方法的实现
- (BOOL)cameraSupportsTapToFocus {
//询问激活中的摄像头是否支持兴趣点对焦
return [[self activeCamera]isFocusPointOfInterestSupported];
}
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
//是否支持兴趣点对焦 & 是否自动对焦模式
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
//锁定设备准备配置,如果获得了锁
if ([device lockForConfiguration:&error]) {
//将focusPointOfInterest属性设置CGPoint
device.focusPointOfInterest = point;
//focusMode 设置为AVCaptureFocusModeAutoFocus
device.focusMode = AVCaptureFocusModeAutoFocus;
//释放该锁定
[device unlockForConfiguration];
}else{
//错误时,则返回给错误处理代理
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
配置曝光
-
cameraSupportsTapToExpose
判断是否支持曝光 -
exposeAtPoint:
设置曝光点 - 监听
adjustingExposure
属性,在曝光完成后,锁定曝光,并移除通知
#pragma mark - Exposure Methods 点击曝光的方法实现
- (BOOL)cameraSupportsTapToExpose {
//询问设备是否支持对一个兴趣点进行曝光
return [[self activeCamera] isExposurePointOfInterestSupported];
}
static const NSString *THCameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
AVCaptureExposureMode exposureMode =AVCaptureExposureModeContinuousAutoExposure;
//判断是否支持 AVCaptureExposureModeContinuousAutoExposure 模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) {
[device isExposureModeSupported:exposureMode];
NSError *error;
//锁定设备准备配置
if ([device lockForConfiguration:&error])
{
//配置期望值
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
//判断设备是否支持锁定曝光的模式。
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//支持,则使用kvo确定设备的adjustingExposure属性的状态。
[device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&THCameraAdjustingExposureContext];
}
//释放该锁定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
//判断context(上下文)是否为THCameraAdjustingExposureContext
if (context == &THCameraAdjustingExposureContext) {
//获取device
AVCaptureDevice *device = (AVCaptureDevice *)object;
//判断设备是否不再调整曝光等级,确认设备的exposureMode是否可以设置为AVCaptureExposureModeLocked
if(!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked])
{
//移除作为adjustingExposure 的self,就不会得到后续变更的通知
[object removeObserver:self forKeyPath:@"adjustingExposure" context:&THCameraAdjustingExposureContext];
//异步方式调回主队列,
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改exposureMode
device.exposureMode = AVCaptureExposureModeLocked;
//释放该锁定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
});
}
}else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
重置对焦和曝光
//重新设置对焦&曝光
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
//获取对焦兴趣点 和 连续自动对焦模式 是否被支持
BOOL canResetFocus = [device isFocusPointOfInterestSupported]&& [device isFocusModeSupported:focusMode];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
//确认曝光度可以被重设
BOOL canResetExposure = [device isFocusPointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
//回顾一下,捕捉设备空间左上角(0,0),右下角(1,1) 中心点则(0.5,0.5)
CGPoint centPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
//锁定设备,准备配置
if ([device lockForConfiguration:&error]) {
//焦点可设,则修改
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centPoint;
}
//曝光度可设,则设置为期望的曝光模式
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centPoint;
}
//释放锁定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
配置闪光灯和手电筒
#pragma mark - Flash and Torch Modes 闪光灯 & 手电筒
//判断是否有闪光灯
- (BOOL)cameraHasFlash {
return [[self activeCamera]hasFlash];
}
//闪光灯模式
- (AVCaptureFlashMode)flashMode {
return [[self activeCamera]flashMode];
}
//设置闪光灯
- (void)setFlashMode:(AVCaptureFlashMode)flashMode {
//获取会话
AVCaptureDevice *device = [self activeCamera];
//判断是否支持闪光灯模式
if ([device isFlashModeSupported:flashMode]) {
//如果支持,则锁定设备
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改闪光灯模式
device.flashMode = flashMode;
//修改完成,解锁释放设备
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
//是否支持手电筒
- (BOOL)cameraHasTorch {
return [[self activeCamera]hasTorch];
}
//手电筒模式
- (AVCaptureTorchMode)torchMode {
return [[self activeCamera]torchMode];
}
//设置是否打开手电筒
- (void)setTorchMode:(AVCaptureTorchMode)torchMode {
AVCaptureDevice *device = [self activeCamera];
if ([device isTorchModeSupported:torchMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.torchMode = torchMode;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
配置视频缩放
- 首先是
cameraSupportsZoom:
方法的实现,通过当前选中的AVCaptureDevice
获取它的活动AVCaptureDeviceFormat
,如果格式的videoMaxZoomFactor
值大于 1.0,则设备支持缩放功能; -
maxZoomFactor
方法要确定最大允许缩放因子 -
setZoomValue:
方法提供改变缩放等级 -
rampZoomToValue:
方法支持在一段时间内从当前值到zoomValue
的缩放 - 取消缩放
- 监听
videoZoomFactor
和rampingVideoZoom
用于页面更新
- (BOOL)cameraSupportsZoom {
return self.activeCamera.activeFormat.videoMaxZoomFactor > 1.0f; // 1
}
- (CGFloat)maxZoomFactor {
return MIN(self.activeCamera.activeFormat.videoMaxZoomFactor, 4.0f); // 2
}
- (void)setZoomValue:(CGFloat)zoomValue { // 3
if (!self.activeCamera.isRampingVideoZoom) {
NSError *error;
if ([self.activeCamera lockForConfiguration:&error]) { // 4
// Provide linear feel to zoom slider
CGFloat zoomFactor = pow([self maxZoomFactor], zoomValue); // 5
self.activeCamera.videoZoomFactor = zoomFactor;
[self.activeCamera unlockForConfiguration]; // 6
} else {
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
- (void)rampZoomToValue:(CGFloat)zoomValue { // 1
CGFloat zoomFactor = pow([self maxZoomFactor], zoomValue);
NSError *error;
if ([self.activeCamera lockForConfiguration:&error]) {
[self.activeCamera rampToVideoZoomFactor:zoomFactor // 2
withRate:THZoomRate];
[self.activeCamera unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError:error];
}
}
- (void)cancelZoom { // 3
NSError *error;
if ([self.activeCamera lockForConfiguration:&error]) {
[self.activeCamera cancelVideoZoomRamp]; // 4
[self.activeCamera unlockForConfiguration];
} else {
[self.delegate deviceConfigurationFailedWithError:error];
}
}
- (void)addVideoZoomFactorObserver{
[self.activeCamera addObserver:self // 2
forKeyPath:@"videoZoomFactor"
options:0
context:NULL];
[self.activeCamera addObserver:self // 3
forKeyPath:@"rampingVideoZoom"
options:0
context:NULL];
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if ([keyPath isEqualToString: @"videoZoomFactor"]) {
[self updateZoomingDelegate]; // 4
} else if ([keyPath isEqualToString: @"rampingVideoZoom"]) {
if (self.activeCamera.isRampingVideoZoom) {
[self updateZoomingDelegate]; // 5
}
} else {
[super observeValueForKeyPath:keyPath
ofObject:object
change:change
context:context];
}
}
- (void)updateZoomingDelegate {
CGFloat curZoomFactor = self.activeCamera.videoZoomFactor;
CGFloat maxZoomFactor = [self maxZoomFactor];
CGFloat value = log(curZoomFactor) / log(maxZoomFactor); // 6
[self.zoomingDelegate rampedZoomToValue:value]; // 7
}
拍摄静态图片
AVCaptureStillImageOutput
类定义了 captureStillImageAsynchronouslyFromConnection:completionHandler:
方法来执行实际的拍摄,定义一个 captureStillImage
方法用于拍照按钮执行:
- 通过
connectionWithMediaType:
获取AVCaptureConnection
指针 - 设置连接
connection
的视频方向videoOrientation
; - 定义一个
completition handler
块,在内部接收一个有效的CMSampleBuffer
,调用AVCaptureStillImageOutput
类的jpegStillImageNSDataRepresentation:
获取表示图片字节的NSData
,并转换为UIImage
实例
#pragma mark - Image Capture Methods 拍摄静态图片
//获取方向值
- (AVCaptureVideoOrientation)currentVideoOrientation {
AVCaptureVideoOrientation orientation;
//获取UIDevice 的 orientation
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
break;
default:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
}
return orientation;
return 0;
}
/*
AVCaptureStillImageOutput 是AVCaptureOutput的子类。用于捕捉图片
*/
- (void)captureStillImage {
//获取连接
AVCaptureConnection *connection = [self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
//程序只支持纵向,但是如果用户横向拍照时,需要调整结果照片的方向
//判断是否支持设置视频方向
if (connection.isVideoOrientationSupported) {
//获取方向值
connection.videoOrientation = [self currentVideoOrientation];
}
//定义一个handler 块,会返回1个图片的NSData数据
id handler = ^(CMSampleBufferRef sampleBuffer,NSError *error)
{
if (sampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
//捕捉图片成功后,写入图片
}else
{
NSLog(@"NULL sampleBuffer:%@",[error localizedDescription]);
}
};
//捕捉静态图片
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:handler];
}
视频捕捉
AVCaptureMovieFileOutput
类大多数核心功能继承于父类 AVCaptureFileOutput
,AVCaptureFileOutput
类定义了许多实用功能。比如录制到最长时限或录制到特定文件大小时为止。还可以配置成保留最小可用的磁盘空间。这一点在存储空间有限的移动设备上录制视频时非常重要。
通常当 QuickTime影片准备发布时,影片头的元数据处于文件的开始位置。这样可以让视频播放器快速读取头包含信息,来确定文件的内容、结构和其包含的多个样本的位置。不过,当录制一个 QuickTime 影片时,直到所有的样片都完成捕捉后才能创建信息头。当录制结束时,创建头数据并将它附在文件结尾处。
将创建头的过程放在所有影片样本完成捕捉之后存在一个问题,尤其是在移动设备的情况下。如果遇到崩溃或其他中断,比如有电话拨入,则影片头就不会被正确写入,会在磁盘生成一个不可读的影片文件。 AVCaptureMovieFileOutput
提供一个核心功能就是分段捕捉 QuickTime 影片。
当录制开始时,在文件最前面写入一个最小化的头信息,随着录制的进行,片段按照一定的周期写入,创建完整的头信息。默认状态下,每10秒写入一个片段,不过这个时间的间隔可以通过修改捕捉设备输出的 movieFragentInterval
属性来改变。
isRecording
判断是否在录制状态startRecording
方法中获取处理当前视频捕捉连接的信息,设置enablesVideoStabilizationWhenAvailable
提搞视频稳定性,设置摄像头平滑对焦模式smoothAutoFocusEnabled
降低对焦操作的速率,根据指定文件路径调用startRecordingToOutputFileURL:recordingDelegate:
开始录制stopRecording
停止录制-
实现
AVCaptureFileOutputRecordingDelegate
协议中的captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections: error:
方法,接收返回信息进行处理
#pragma mark - Video Capture Methods 捕捉视频
//判断是否录制状态
- (BOOL)isRecording {
return self.movieOutput.isRecording;
}
//开始录制
- (void)startRecording {
if (![self isRecording]) {
//获取当前视频捕捉连接信息,用于捕捉视频数据配置一些核心属性
AVCaptureConnection * videoConnection = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
//判断是否支持设置videoOrientation 属性。
if([videoConnection isVideoOrientationSupported])
{
//支持则修改当前视频的方向
videoConnection.videoOrientation = [self currentVideoOrientation];
}
//判断是否支持视频稳定 可以显著提高视频的质量。只会在录制视频文件涉及
if([videoConnection isVideoStabilizationSupported])
{
videoConnection.enablesVideoStabilizationWhenAvailable = YES;
}
AVCaptureDevice *device = [self activeCamera];
//摄像头可以进行平滑对焦模式操作。即减慢摄像头镜头对焦速度。当用户移动拍摄时摄像头会尝试快速自动对焦。
if (device.isSmoothAutoFocusEnabled) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.smoothAutoFocusEnabled = YES;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
//查找写入捕捉视频的唯一文件系统URL.
self.outputURL = [self uniqueURL];
//在捕捉输出上调用方法 参数1:录制保存路径 参数2:代理
[self.movieOutput startRecordingToOutputFileURL:self.outputURL recordingDelegate:self];
}
}
- (CMTime)recordedDuration {
return self.movieOutput.recordedDuration;
}
//写入视频唯一文件系统URL
- (NSURL *)uniqueURL {
NSFileManager *fileManager = [NSFileManager defaultManager];
//temporaryDirectoryWithTemplateString 可以将文件写入的目的创建一个唯一命名的目录;
NSString *dirPath = [fileManager temporaryDirectoryWithTemplateString:@"kamera.XXXXXX"];
if (dirPath) {
NSString *filePath = [dirPath stringByAppendingPathComponent:@"kamera_movie.mov"];
return [NSURL fileURLWithPath:filePath];
}
return nil;
}
//停止录制
- (void)stopRecording {
//是否正在录制
if ([self isRecording]) {
[self.movieOutput stopRecording];
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
//错误
if (error) {
[self.delegate mediaCaptureFailedWithError:error];
}else
{
//写入图片
}
self.outputURL = nil;
}