一:前言
AVFoundation是一个可以用来使用和创建基于时间的视听媒体的框架,它提供了一个能使用基于时间的视听数据的详细级别的Objective-C接口
二:相关代码在Github
三: AVFoundation相关 挂代理AVCaptureVideoDataOutputSampleBufferDelegate(视频数据代理)和AVCaptureAudioDataOutputSampleBufferDelegate(音频数据代理)
<1>相关的一些类
@property (nonatomic, strong) AVCaptureSession *captureSession;//服务Session
//拍照
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;//设备输入
@property (nonatomic, strong) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;//相机拍摄预览图层
//摄像
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;//视频文件输出
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInputAudio;//设备音频输入
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioOutput;//音频输出
@property (nonatomic, strong) AVCaptureMetadataOutput *metaDataOut;//人脸识别
//写入数据相关
@property (nonatomic, strong) NSURL *videoUrl;//写入文件文件本地URL
@property (nonatomic, strong) AVAssetWriter *writer;//媒体写入对象
@property (nonatomic, strong) AVAssetWriterInput *videoInput;//视频写入
@property (nonatomic, strong) AVAssetWriterInput *audioInput;//音频写入
<2>懒加载
- (AVAssetWriter *)writer{
if (!_writer) {
_writer = [AVAssetWriter assetWriterWithURL:self.videoUrl fileType:AVFileTypeQuickTimeMovie error:nil];
_writer.shouldOptimizeForNetworkUse = YES;
}
return _writer;
}
- (AVAssetWriterInput *)audioInput{
if (!_audioInput) {
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100], AVSampleRateKey,
[ NSNumber numberWithInt: 128000], AVEncoderBitRateKey,
nil];
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings];
_audioInput.expectsMediaDataInRealTime = YES;
}
return _audioInput;
}
- (AVCaptureAudioDataOutput *)audioOutput{
if (!_audioOutput) {
_audioOutput = [[AVCaptureAudioDataOutput alloc]init];
[_audioOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
}
return _audioOutput;
}
- (NSURL *)videoUrl{
if (!_videoUrl) {
NSDate *date = [NSDate date];
NSString *string = [NSString stringWithFormat:@"%ld.mov",(unsigned long)(date.timeIntervalSince1970 * 1000)];
NSString *cachePath = [NSTemporaryDirectory() stringByAppendingPathComponent:string];
_videoUrl = [NSURL fileURLWithPath:cachePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:cachePath])
{
[[NSFileManager defaultManager] removeItemAtPath:cachePath error:nil];
}
}
return _videoUrl;
}
- (AVCaptureVideoPreviewLayer *)previewLayer {
if (!_previewLayer) {
AVCaptureVideoPreviewLayer *preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
_previewLayer = preview;
}
return _previewLayer;
}
- (AVCaptureDeviceInput *)captureDeviceInput{
if (!_captureDeviceInput) {
AVCaptureDevice *captureDevice =[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
_captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil];
}
return _captureDeviceInput;
}
- (AVCaptureSession *)captureSession{
if (!_captureSession) {
_captureSession = [[AVCaptureSession alloc]init];
_captureSession.sessionPreset = AVCaptureSessionPreset1280x720;//默认输出分辨率
}
return _captureSession;
}
- (AVCaptureStillImageOutput *)captureStillImageOutput{
if (!_captureStillImageOutput) {
_captureStillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
[_captureStillImageOutput setOutputSettings:outputSettings];
}
return _captureStillImageOutput;
}
- (AVCaptureDeviceInput *)captureDeviceInputAudio{
if (!_captureDeviceInputAudio) {
AVCaptureDevice *deviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_captureDeviceInputAudio = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:nil];
}
return _captureDeviceInputAudio;
}
- (AVCaptureVideoDataOutput *)videoOutput{
if (!_videoOutput) {
_videoOutput = [[AVCaptureVideoDataOutput alloc]init];
_videoOutput.alwaysDiscardsLateVideoFrames = YES;
[_videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
_videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,nil];
}
return _videoOutput;
}
<3>开启和停止捕捉会话
- (void)startUp{
[self.captureSession startRunning];
}
- (void)shutdown{
if (self.captureSession) {
[self.captureSession stopRunning];
}
}
<4>拍照相关
//开启拍照相关服务
- (void)setupPhotoSession{
[self.captureSession beginConfiguration];
for (AVCaptureInput *input in self.captureSession.inputs) {
[self.captureSession removeInput:input];
}
for (AVCaptureOutput *ouput in self.captureSession.outputs) {
[self.captureSession removeOutput:ouput];
}
if ([self.captureSession canAddInput:self.captureDeviceInput]) {
[self.captureSession addInput:self.captureDeviceInput];
}
if ([self.captureSession canAddOutput:self.captureStillImageOutput]) {
[self.captureSession addOutput:self.captureStillImageOutput];
}
//视频数据输出
if ([self.captureSession canAddOutput:self.videoOutput]) {
[self.captureSession addOutput:self.videoOutput];
}
[self.captureSession commitConfiguration];
}
//拍照
- (void)takePhoto{
AVCaptureConnection *captureConnection = [self.captureStillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if ([captureConnection isVideoOrientationSupported])
{ //输出图片的方向
[captureConnection setVideoOrientation:(AVCaptureVideoOrientation)[UIApplication sharedApplication].statusBarOrientation];
}
[self.captureStillImageOutput captureStillImageAsynchronouslyFromConnection:captureConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage imageWithData:imageData]fixOrientation];
//拿到了拍摄的图片 请自行写保存图片的方法至相册
}
}];
}
<5>录视频相关
//开始录视频相关服务
-(void)setupVideoSession{
[self.captureSession beginConfiguration];
for (AVCaptureInput *input in self.captureSession.inputs) {
[self.captureSession removeInput:input];
}
for (AVCaptureOutput *ouput in self.captureSession.outputs) {
[self.captureSession removeOutput:ouput];
}
//摄像头设备输入
if ([self.captureSession canAddInput:self.captureDeviceInput]) {
[self.captureSession addInput:self.captureDeviceInput];
}
//麦克风音频设备输入
if ([self.captureSession canAddInput:self.captureDeviceInputAudio]) {
[self.captureSession addInput:self.captureDeviceInputAudio];
}
//视频数据输出
if ([self.captureSession canAddOutput:self.videoOutput]) {
[self.captureSession addOutput:self.videoOutput];
}
//音频数据输出
if ([self.captureSession canAddOutput:self.audioOutput]) {
[self.captureSession addOutput:self.audioOutput];
}
[self.captureSession commitConfiguration];
}
PS:声明BOOL类型的一个属性isStart
//开始录制视频
- (void)startRecoard{
self.isStart = YES;
}
//停止录制视频
- (void)stopRecoard{
self.isStart = NO;
[self.videoInput markAsFinished];
[self.audioInput markAsFinished];
dispatch_async(dispatch_get_main_queue(), ^{
[self.writer finishWritingWithCompletionHandler:^{
//拿到录制视频的本地videoUrl 请自行写保存视频至相册
self.videoUrl = nil;
self.writer = nil;
}];
});
}
#pragma mark =============== 视频数据代理 ===============
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (captureOutput == self.videoOutput) {
AVCaptureConnection *videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
if ([videoConnection isVideoOrientationSupported]){
//确保视频视频的方向
[videoConnection setVideoOrientation:(AVCaptureVideoOrientation)[UIApplication sharedApplication].statusBarOrientation];
}
static int frame = 0;
@synchronized(self) {
if( frame == 0 && self.writer.status == AVAssetWriterStatusUnknown && self.isStart == YES)
{
AVCaptureConnection *videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
NSDictionary *settings;
if ([videoConnection isVideoOrientationSupported]){
[videoConnection setVideoOrientation:(AVCaptureVideoOrientation)[UIApplication sharedApplication].statusBarOrientation];
}
//在这里写录制视频横竖屏时的视频宽高
if (videoConnection.videoOrientation == AVCaptureVideoOrientationPortrait) {
//竖屏
settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:@"视频宽" ], AVVideoWidthKey,
[NSNumber numberWithInteger:@"视频高" ], AVVideoHeightKey,
nil];
}
else if (videoConnection.videoOrientation == AVCaptureVideoOrientationLandscapeRight||videoConnection.videoOrientation == AVCaptureVideoOrientationLandscapeLeft){
//横屏
settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger:@"视频宽" ], AVVideoWidthKey,
[NSNumber numberWithInteger: @"视频高"], AVVideoHeightKey,
nil];
}
self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.videoInput.expectsMediaDataInRealTime = YES;
[self.captureSession beginConfiguration];
//视频数据输入
if ([self.writer canAddInput:self.videoInput]) {
[self.writer addInput:self.videoInput];
}
//音频数据输入
if ([self.writer canAddInput:self.audioInput]) {
[self.writer addInput:self.audioInput];
}
[self.captureSession commitConfiguration];
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.writer startWriting];
[self.writer startSessionAtSourceTime:lastSampleTime];
NSLog(@"写入数据");
}
//写入失败
if (self.writer.status == AVAssetWriterStatusFailed) {
NSLog(@"%@",self.writer.error.localizedDescription);
}
if (self.isStart == YES) {
if (captureOutput == self.videoOutput) {
if ([self.videoInput isReadyForMoreMediaData]) {
//拼接视频数据
[self.videoInput appendSampleBuffer:sampleBuffer];
}
}
if (captureOutput ==self.audioOutput) {
if ([self.audioInput isReadyForMoreMediaData]){
//拼接音频数据
[self.audioInput appendSampleBuffer:sampleBuffer];
}
}
}
}
}
// 通过抽样缓存数据创建一个UIImage对象(视频帧转图片)
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// 锁定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// 得到pixel buffer的基地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// 得到pixel buffer的行字节数
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的宽和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// 创建一个依赖于设备的RGB颜色空间
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// 根据这个位图context中的像素数据创建一个Quartz image对象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// 释放context和颜色空间
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// 用Quartz image创建一个UIImage对象image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// 释放Quartz image对象
CGImageRelease(quartzImage);
return (image);
}
<6>相机功能设置
//设置自动曝光和聚焦
- (BOOL)resetFocusAndExposureModes{
AVCaptureDevice *device= [self.captureDeviceInput device];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
BOOL canResetFocus = [device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode];
BOOL canResetExposure = [device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
CGPoint centerPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
if ([device lockForConfiguration:&error]) {
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centerPoint;
}
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centerPoint;
}
[device unlockForConfiguration];
return YES;
}
else{
return NO;
}
}
//点击聚焦
-(void)tapAction:(CGPoint )point{
if ([self cameraSupportsTapToFocus]) {
[self focusAtPoint:point];
}
}
- (void)focusAtPoint:(CGPoint)point{
AVCaptureDevice *device = [self.captureDeviceInput device];
if ([self cameraSupportsTapToFocus] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.focusPointOfInterest = point;
device.focusMode = AVCaptureFocusModeAutoFocus;
[device unlockForConfiguration];
}
else{
NSLog(@"error");
}
}
}
- (BOOL)cameraSupportsTapToFocus {
return [[self.captureDeviceInput device] isFocusPointOfInterestSupported];
}
//点击曝光
-(void)doubleTapAction:(CGPoint )point{
if ([self cameraSupportsTapToExpose]) {
[self exposeAtPoint:point];
}
}
- (BOOL)cameraSupportsTapToExpose {
return [[self.captureDeviceInput device] isExposurePointOfInterestSupported];
}
static const NSString *CameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point{
AVCaptureDevice *device = [self.captureDeviceInput device];
if ([self cameraSupportsTapToExpose] && [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposurePointOfInterest = point;
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[device addObserver:self
forKeyPath:@"adjustingExposure"
options:NSKeyValueObservingOptionNew
context:&CameraAdjustingExposureContext];
}
[device unlockForConfiguration];
}
else{
NSLog(@"error");
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == &CameraAdjustingExposureContext) {
AVCaptureDevice *device = (AVCaptureDevice *)object;
if (!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[object removeObserver:self
forKeyPath:@"adjustingExposure"
context:&CameraAdjustingExposureContext];
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeLocked;
NSLog(@"曝光完成");
[device unlockForConfiguration];
}
else{
NSLog(@"error");
}
});
}
}
else{
[super observeValueForKeyPath:keyPath
ofObject:object
change:change
context:context];
}
}
//光学防抖开启
- (void)cameraBackgroundDidClickOpenAntiShake{
AVCaptureConnection *captureConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeCinematic]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
}
}
//光学防抖关闭
- (void)cameraBackgroundDidClickCloseAntiShake{
AVCaptureConnection *captureConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
NSLog(@"change captureConnection: %@", captureConnection);
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeOff]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeOff;
}
}
//开启闪光灯
- (void)openFlashLight{
AVCaptureDevice *backCamera = [self backCamera];
if (backCamera.flashMode == AVCaptureTorchModeOff||backCamera.flashMode==AVCaptureFlashModeAuto) {
[backCamera lockForConfiguration:nil];
backCamera.flashMode = AVCaptureFlashModeOn;
backCamera.torchMode = AVCaptureTorchModeOn;
[backCamera unlockForConfiguration];
}
}
//关闭闪光灯
- (void)closeFlashLight{
AVCaptureDevice *backCamera = [self backCamera];
if (backCamera.flashMode == AVCaptureTorchModeOn||backCamera.flashMode==AVCaptureFlashModeAuto) {
[backCamera lockForConfiguration:nil];
backCamera. flashMode = AVCaptureTorchModeOff;
backCamera.torchMode = AVCaptureTorchModeOff;
[backCamera unlockForConfiguration];
}
}
//切换摄像头
- (void)changeCameraInputDeviceis{
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
AVCaptureDevice *newCamera = nil;
AVCaptureDeviceInput *newInput = nil;
AVCaptureDevicePosition position = [[self.captureDeviceInput device] position];
if (position == AVCaptureDevicePositionFront){
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}else {
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
if (newInput != nil) {
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.captureDeviceInput];
if ([self.captureSession canAddInput:newInput]) {
[self.captureSession addInput:newInput];
self.captureDeviceInput = newInput;
}else {
[self.captureSession addInput:self.captureDeviceInput];
[MBProgressHUD showTipMessageInWindow:NSLocalizedString(@"请切换视频分辨率",nil)];
}
[self.captureSession commitConfiguration];
}
}
}
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *camera in cameras) {
if ([camera position]==position) {
return camera;
}
}
return nil;
}
- (void)setFlashMode:(AVCaptureFlashMode )flashMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFlashModeSupported:flashMode]) {
[captureDevice setFlashMode:flashMode];
}
}];
}
- (void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
NSError *error;
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
if ([captureDevice lockForConfiguration:&error]) {
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
}else{
}
}
- (AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//用来返回是前置摄像头还是后置摄像头
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
//返回和视频录制相关的所有默认设备
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//遍历这些设备返回跟position相关的设备
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
//对焦
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
}
if ([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
}
if ([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:point];
}
}];
}
//数码变焦
- (void)cameraBackgroundDidChangeZoom{
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
[captureDevice rampToVideoZoomFactor:@"变焦倍数" withRate:50];
[captureDevice unlockForConfiguration];
}
}