一个自定义界面 & 可以分段录制的摄像机
使用方法:
将demo中的XDVideoCamera文件夹拖入到项目中
引入头文件
#import "XDVideocamera.h"
在点击事件里进行跳转即可
XDVideocamera *video = [[XDVideocamera alloc]init];
[self presentViewController:video animated:YES completion:^{
NSLog(@"进入摄像机");
}];
demo链接:https://github.com/Xiexingda/XDVideoCamera.git
UI界面已经单独写入了VideoUI中,需要改变界面的话直接在该类中进行就可以了
喜欢的话记得在github给颗小星星哦!
下面是对实现过程进行的详细叙述
1.视频录制的实现
相关属性
@property (strong,nonatomic) AVCaptureSession *session; //会话管理
@property (strong,nonatomic) AVCaptureDeviceInput *deviceInput; //负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureMovieFileOutput *movieFileOutput; //视频输出流
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *videoPreviewLayer; //相机拍摄预览图层
@property (nonatomic, strong) NSMutableArray *videoArray;
@property (strong,nonatomic) CALayer *previewLayer; //视频预览layer层
@property (strong,nonatomic) UIView *focusView; //聚焦
@property (assign,nonatomic) BOOL enableRotation; //是否允许旋转(注意在视频录制过程中禁止屏幕旋转)
@property (assign,nonatomic) CGRect *lastBounds; //旋转的前大小
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;//后台任务标识
在进入和退出界面时开始和停止会话
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[self.session startRunning];
}
- (void)viewDidDisappear:(BOOL)animated {
[super viewDidDisappear:animated];
[self.session stopRunning];
}
调整角度
//屏幕旋转时调整预览图层
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration {
AVCaptureConnection *connection = [self.videoPreviewLayer connection];
connection.videoOrientation = (AVCaptureVideoOrientation)toInterfaceOrientation;
}
//旋转后重新设置大小
- (void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation {
_videoPreviewLayer.frame = self.previewLayer.bounds;
}
初始化会话管理 & 相关设备
- (void)configSessionManager {
//初始化会话
_session = [[AVCaptureSession alloc]init];
[self changeConfigurationWithSession:_session block:^(AVCaptureSession *session) {
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
[session setSessionPreset:AVCaptureSessionPresetHigh];
}
//获取输入设备
AVCaptureDevice *device = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
if (!device) {
NSLog(@"获取后置摄像头失败");
return;
}
//添加一个音频输入设备
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]firstObject];
if (!audioDevice) {
NSLog(@"获取麦克风失败");
}
//用当前设备初始化输入数据
NSError *error = nil;
_deviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:device error:&error];
if (error) {
NSLog(@"获取视频输入对象失败 原因:%@",error.localizedDescription);
return;
}
//用当前音频设备初始化音频输入
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc]initWithDevice:audioDevice error:&error];
if (error) {
NSLog(@"获取音频输入对象失败 原因:%@",error.localizedDescription);
}
//初始化设备输出对象
_movieFileOutput = [[AVCaptureMovieFileOutput alloc]init];
//将设备的输入输出添加到会话管理
if ([session canAddInput:_deviceInput]) {
[session addInput:_deviceInput];
[session addInput:audioInput];
}
if ([session canAddOutput:_movieFileOutput]) {
[session addOutput:_movieFileOutput];
}
//创建视频预览层,用于实时展示摄像头状态
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
_videoPreviewLayer.frame = _previewLayer.bounds;
_videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_previewLayer insertSublayer:_videoPreviewLayer below:_focusView.layer];
_enableRotation = YES;
[self addNotificationToDevice:device];
}];
}
添加通知&移除通知
/**
给输入设备添加通知
*/
-(void)addNotificationToDevice:(AVCaptureDevice *)captureDevice{
//注意添加区域改变捕获通知必须首先设置设备允许捕获
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
captureDevice.subjectAreaChangeMonitoringEnabled=YES;
}];
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
//捕获区域发生改变
[notificationCenter addObserver:self selector:@selector(areaChanged:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
//链接成功
[notificationCenter addObserver:self selector:@selector(deviceConnected:) name:AVCaptureDeviceWasConnectedNotification object:captureDevice];
//链接断开
[notificationCenter addObserver:self selector:@selector(deviceDisconnected:) name:AVCaptureDeviceWasDisconnectedNotification object:captureDevice];
//会话出错
[notificationCenter addObserver:self selector:@selector(sessionError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
}
-(void)removeNotificationFromDevice:(AVCaptureDevice *)captureDevice{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[notificationCenter removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:captureDevice];
[notificationCenter removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:captureDevice];
}
/**
移除所有通知
*/
-(void)removeNotification{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self];
}
通知处理
/**
设备连接成功
@param notification 通知对象
*/
-(void)deviceConnected:(NSNotification *)notification{
NSLog(@"设备已连接...");
}
/**
设备连接断开
@param notification 通知对象
*/
-(void)deviceDisconnected:(NSNotification *)notification{
NSLog(@"设备已断开.");
}
/**
捕获区域改变
@param notification 通知对象
*/
-(void)areaChanged:(NSNotification *)notification{
NSLog(@"区域改变...");
CGPoint cameraPoint = [self.videoPreviewLayer captureDevicePointOfInterestForPoint:self.view.center];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}
/**
会话出错
@param notification 通知对象
*/
-(void)sessionError:(NSNotification *)notification{
NSLog(@"会话错误.");
}
获取摄像头设备
/**
取得指定位置的摄像头
@param position 摄像头位置
@return 摄像头设备
*/
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *camera in cameras) {
if ([camera position]==position) {
return camera;
}
}
return nil;
}
工具方法
/**
改变设备属性的统一操作方法
@param propertyChange 属性改变操作
*/
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
AVCaptureDevice *captureDevice= [self.deviceInput device];
NSError *error;
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
if ([captureDevice lockForConfiguration:&error]) {
if (propertyChange) {
propertyChange(captureDevice);
}
[captureDevice unlockForConfiguration];
}else{
NSLog(@"出错了,错误信息:%@",error.localizedDescription);
}
}
/**
改变会话同意操作方法
@param currentSession self.session
@param block Session操作区域
*/
- (void)changeConfigurationWithSession:(AVCaptureSession *)currentSession block:(void (^)(AVCaptureSession *session))block {
[currentSession beginConfiguration];
if (block) {
block(currentSession);
}
[currentSession commitConfiguration];
}
/**
获取时间
@return 返回日期,用日期命名
*/
- (NSString *)getCurrentDate {
//用日期做为视频文件名称
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
formatter.dateFormat = @"yyyyMMddHHmmss";
NSString *dateStr = [formatter stringFromDate:[NSDate date]];
return dateStr;
}
/**
提示框
@param title 提示内容
@param btn 取消按钮
@return 提示框
*/
- (UIAlertView *)noticeAlertTitle:(NSString *)title cancel:(NSString *)btn {
UIAlertView *alert = [[UIAlertView alloc]initWithTitle:title message:nil delegate:self cancelButtonTitle:btn otherButtonTitles:nil, nil];
[alert show];
return alert;
}
/**
清除视频Url路径下的缓存
@param urlArray _videoArray
*/
- (void)freeArrayAndItemsInUrlArray:(NSArray *)urlArray {
if (urlArray.count <= 0) {
return;
}
for (NSURL *url in urlArray) {
[[StoreFileManager defaultManager] removeItemAtUrl:url];
}
}
切换摄像头
/**
切换摄像头
@return 返回bool值用于改变按钮状态
*/
- (BOOL)changeBtClick {
bool isBackground;
//获取当前设备
AVCaptureDevice *currentDevice = [self.deviceInput device];
AVCaptureDevicePosition currentPosition = [currentDevice position];
[self removeNotificationFromDevice:currentDevice];
AVCaptureDevice *toDevice;
AVCaptureDevicePosition toPosition;
if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
toPosition = AVCaptureDevicePositionBack;
isBackground = YES;
} else {
toPosition = AVCaptureDevicePositionFront;
isBackground = NO;
}
toDevice = [self getCameraDeviceWithPosition:toPosition];
[self addNotificationToDevice:toDevice];
//获得要调整的设备输入对象
NSError *error = nil;
AVCaptureDeviceInput *toDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:toDevice error:&error];
if (error) {
NSLog(@"获取设备失败");
}
[self changeConfigurationWithSession:_session block:^(AVCaptureSession *session) {
//移除原有输入对象
[session removeInput:self.deviceInput];
self.deviceInput = nil;
//添加新输入对象
if ([session canAddInput:toDeviceInput]) {
[session addInput:toDeviceInput];
self.deviceInput = toDeviceInput;
}
}];
return isBackground;
}
点击开始录制按钮
/**
录制
@return 返回bool值用于改变按钮状态
*/
- (BOOL)videoBtClick {
//根据设备输出获得链接
AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
//根据链接取出设备输出的数据
if (![self.movieFileOutput isRecording]) {
self.enableRotation = NO;
//如果支持多任务则开启多任务
if ([[UIDevice currentDevice] isMultitaskingSupported]) {
self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
}
//预览图层和视频方向保持一致
connection.videoOrientation = [self.videoPreviewLayer connection].videoOrientation;
//视频防抖模式
if ([connection isVideoStabilizationSupported]) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
//用日期做为视频文件名称
NSString *str = [self getCurrentDate];
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingString:[NSString stringWithFormat:@"%@%@",str,@"myMovie.mov"]];
NSURL *fileUrl = [NSURL fileURLWithPath:outputFilePath];
[self.movieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
return YES;
}
[self.movieFileOutput stopRecording];
return NO;
}
聚焦
点击屏幕是更改所点位置的聚焦和白平衡
/**
点击屏幕聚焦
@param view 手势所在的视图
@param gesture 手势
*/
- (void)videoLayerClick:(SelectView *)view gesture:(UITapGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:view];
NSLog(@"位置:%f",point.y);
CGPoint cameraPoint = [self.videoPreviewLayer captureDevicePointOfInterestForPoint:point];
[self setFocusViewWithPoint:point];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}
/**
设置聚焦光标位置
@param point 光标位置
*/
-(void)setFocusViewWithPoint:(CGPoint)point{
self.focusView.center=point;
self.focusView.transform=CGAffineTransformMakeScale(1.5, 1.5);
self.focusView.alpha=1.0;
[UIView animateWithDuration:1.0 animations:^{
self.focusView.transform=CGAffineTransformIdentity;
} completion:^(BOOL finished) {
self.focusView.alpha=0;
}];
}
/**
设置聚焦点
@param point 聚焦点
*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
if ([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
if ([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:point];
}
}];
}
AVFoundation的代理
在代理中把每段录制的视频资源放入数组中为 之后的合并视频做准备
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
NSLog(@"开始录制...");
[_videoArray addObject:fileURL];
NSLog(@"%@",fileURL);
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
NSLog(@"视频录制完成");
self.enableRotation = YES;
NSLog(@"%@",outputFileURL);
}
2.视频合并的实现
/**
开始合并视频
*/
- (void)mergeClick {
if (_videoArray.count <= 0) {
[self noticeAlertTitle:@"请先录制视频,然后后在合并" cancel:@"确定"];
return;
}
if ([self.movieFileOutput isRecording]) {
NSLog(@"请录制完成后在合并");
[self noticeAlertTitle:@"请录制完成后在合并" cancel:@"确定"];
return;
}
UIAlertView *alert = [self noticeAlertTitle:@"处理中..." cancel:nil];
NSString *pathStr = [self getCurrentDate];
[[XDVideoManager defaultManager]
mergeVideosToOneVideo:_videoArray
toStorePath:pathStr
WithStoreName:@"xiaoxie"
backGroundTask:_backgroundTaskIdentifier
success:^(NSString *info){
NSLog(@"%@",info);
[_videoArray removeAllObjects];
[alert dismissWithClickedButtonIndex:-1 animated:YES];
} failure:^(NSString *error){
NSLog(@"%@", error);
[_videoArray removeAllObjects];
}];
}
调用方法
- (void)mergeVideosToOneVideo:(NSArray *)tArray toStorePath:(NSString *)storePath WithStoreName:(NSString *)storeName backGroundTask:(UIBackgroundTaskIdentifier)task success:(void (^)(NSString *info))successBlock failure:(void (^)(NSString *error))failureBlcok
{
AVMutableComposition *mixComposition = [self mergeVideostoOnevideo:tArray];
NSURL *outputFileUrl = [self joinStorePaht:storePath togetherStoreName:storeName];
[self storeAVMutableComposition:mixComposition withStoreUrl:outputFileUrl WihtName:storeName backGroundTask:(UIBackgroundTaskIdentifier)task filesArray:tArray success:successBlock failure:failureBlcok];
}
视频合并的核心代码
/**
多个视频合成为一个
@param array 多个视频的NSURL地址
@return 返回AVMutableComposition
*/
- (AVMutableComposition *)mergeVideostoOnevideo:(NSArray*)array
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
Float64 tmpDuration =0.0f;
for (NSURL *videoUrl in array)
{
AVURLAsset *asset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,asset.duration);
[a_compositionVideoTrack setPreferredTransform:videoAssetTrack.preferredTransform];
[a_compositionAudioTrack setPreferredTransform:audioAssetTrack.preferredTransform];
/**
依次加入每个asset
param TimeRange 加入的asset持续时间
param Track 加入的asset类型,这里都是video
param Time 从哪个时间点加入asset,这里用了CMTime下面的CMTimeMakeWithSeconds(tmpDuration, 0),timesacle为0
*/
NSError *error;
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:videoAssetTrack atTime:CMTimeMakeWithSeconds(tmpDuration, 0) error:&error];
[a_compositionAudioTrack insertTimeRange:video_timeRange ofTrack:audioAssetTrack atTime:CMTimeMakeWithSeconds(tmpDuration, 0) error:&error];
tmpDuration += CMTimeGetSeconds(asset.duration);
}
return mixComposition;
}
视频合并后的存储路径
/**
拼接url地址
@param sPath sPath 沙盒文件夹名
@param sName sName 文件名称
@return 返回拼接好的url地址
*/
- (NSURL *)joinStorePaht:(NSString *)sPath togetherStoreName:(NSString *)sName
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentPath = [paths objectAtIndex:0];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *storePath = [documentPath stringByAppendingPathComponent:sPath];
BOOL isExist = [fileManager fileExistsAtPath:storePath];
if(!isExist){
[fileManager createDirectoryAtPath:storePath withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *realName = [NSString stringWithFormat:@"%@.mov", sName];
storePath = [storePath stringByAppendingPathComponent:realName];
[[StoreFileManager defaultManager] removeItemAtPath:storePath];
NSURL *outputFileUrl = [NSURL fileURLWithPath:storePath];
return outputFileUrl;
}
对合并完成后的视频进行存储
/**
存储合成的视频
@param mixComposition mixComposition参数
@param storeUrl 存储的路径
@param aName 视频名称
@param task 后台标识
@param files 视频URL路径数组
@param successBlock 成功回调
@param failureBlcok 失败回调
*/
- (void)storeAVMutableComposition:(AVMutableComposition*)mixComposition withStoreUrl:(NSURL *)storeUrl WihtName:(NSString *)aName backGroundTask:(UIBackgroundTaskIdentifier)task filesArray:(NSArray *)files success:(void (^)(NSString *outPath))successBlock failure:(void (^)(NSString *error))failureBlcok
{
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = storeUrl;
__block typeof(task) blockTask = task;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
//写入系统相册
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc]init];
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:storeUrl completionBlock:^(NSURL *assetURL, NSError *error) {
[[StoreFileManager defaultManager] removeItemAtPath:[storeUrl path]];
[self freeFilesInArray:files];
//通知后台挂起
if (blockTask != UIBackgroundTaskInvalid) {
[[UIApplication sharedApplication] endBackgroundTask:blockTask];
blockTask = UIBackgroundTaskInvalid;
}
if (error) {
if (failureBlcok) {
NSString *errorStr = [NSString stringWithFormat:@"存入相册失败:%@",error.localizedDescription];
failureBlcok(errorStr);
}
} else {
if (successBlock) {
NSString *successStr = [NSString stringWithFormat:@"视频保存成功,相册Url:%@",assetURL];
successBlock(successStr);
}
}
}];
});
}];
}
清理缓存
/**
释放缓存
@param filesArray 存放视频URL路径的数组
*/
- (void)freeFilesInArray:(NSArray *)filesArray {
for (NSURL *fileUrl in filesArray) {
[[StoreFileManager defaultManager] removeItemAtUrl:fileUrl];
}
}
至此已经完成了一个分段式录制摄像机,具体详情请查看demo中的代码
demo链接:https://github.com/Xiexingda/XDVideoCamera.git
喜欢的话记得在github给颗小星星哦!