使用AVFoundation 录制视频 并拼接起来
视频录制工具
@property (nonatomic, strong, readonly) AVCaptureSession *captureSession;
//录制视频完成回调
@property (copy, nonatomic) void (^videoCompleteBlock)(NSURL *url);
@property (copy, nonatomic) void (^startRecord)(void);
//拍照回调
@property (copy, nonatomic) void (^picCompleteBlock)(UIImage *image);
//Session设置
- (BOOL)setupSession:(NSError **)error;
- (void)startSession;
- (void)stopSession;
//摄像头
- (BOOL)switchCameras;
- (BOOL)canSwitchCameras;
//拍照
- (void)captureStillImage;
//闪光灯和手电筒
@property (nonatomic, assign) BOOL cameraHasFlash;
@property (nonatomic, assign) BOOL cameraHasTorch;
@property (nonatomic) AVCaptureTorchMode torchMode;
@property (nonatomic) AVCaptureFlashMode flashMode;
//视频录制
- (void)startRecording;
- (void)stopRecording;
- (BOOL)isRecording;
//记录时间
- (CMTime)recordedDuration;
//焦点曝光
- (void)focusAtPoint:(CGPoint)point;
- (void)exposeAtPoint:(CGPoint)point;
//重置对焦、曝光模式 暂时没用
- (void)resetFocusAndExposureModes;
.m实现
//
// AWTCaptureVideoTool.m
// AWT_iOS
//
// Created by apple on 2019/10/26.
// Copyright © 2019年 wtoip_mac. All rights reserved.
//
#import "AWTCaptureVideoTool.h"
#import
//#import "NSFileManager+Additions.h"
#import "DeviceDirection.h"
@interface AWTCaptureVideoTool()
@property (strong,nonatomic) AVCaptureSession *captureSession;//负责输入和输出设置之间的数据传递
@property (strong,nonatomic) AVCaptureDeviceInput *activeVideoInput;//负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureMovieFileOutput *movieOutput;//视频输出流
@property (strong, nonatomic) AVCaptureStillImageOutput *imageOutput;//图片输出
@property (copy, nonatomic) NSURL *outputURL;
@property (nonatomic, strong) NSString * degress;
@end
@implementation AWTCaptureVideoTool
-(instancetype)init{
if (self = [super init]) {
// [[NSNotificationCenter defaultCenter]addObserverForName:@"DeviceDegress" object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
//
// self.degress = note.userInfo[@"degress"];
//
// }];
}
return self;
}
- (BOOL)setupSession:(NSError **)error {
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *videoDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput =
[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
if ([self.
captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
}
} else {
return NO;
}
AVCaptureDevice *audioDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput =
[AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
if (audioInput) {
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
}
} else {
return NO;
}
self.imageOutput = [[AVCaptureStillImageOutput alloc] init];
self.imageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
if ([self.captureSession canAddOutput:self.imageOutput]) {
[self.captureSession addOutput:self.imageOutput];
}
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([self.captureSession canAddOutput:self.movieOutput]) {
[self.captureSession addOutput:self.movieOutput];
}
return YES;
}
//异步开启会话
- (void)startSession{
if (![self.captureSession isRunning]) {
dispatch_async([self globalQueue], ^{
[self.captureSession startRunning];
});
}
}
- (dispatch_queue_t)globalQueue {
return dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
}
- (void)stopSession{
if ([self.captureSession isRunning]) {
dispatch_async([self globalQueue], ^{
[self.captureSession stopRunning];
});
}
}
//摄像头
- (BOOL)canSwitchCameras {
return self.cameraCount > 1;
}
- (NSUInteger)cameraCount {
return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
}
- (BOOL)switchCameras {
if (![self canSwitchCameras]) {
return NO;
}
NSError *error;
AVCaptureDevice *videoDevice = [self inactiveCamera];
AVCaptureDeviceInput *videoInput =
[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (videoInput) {
[self.captureSession beginConfiguration];
//移除之前的input
[self.captureSession removeInput:self.activeVideoInput];
if ([self.captureSession canAddInput:videoInput]) {
//添加新的
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
} else {
[self.captureSession addInput:self.activeVideoInput];
}
[self.captureSession commitConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
return NO;
}
return YES;
}
- (AVCaptureDevice *)inactiveCamera {
AVCaptureDevice *device = nil;
if (self.cameraCount > 1) {
if (self.activeVideoInput.device.position == AVCaptureDevicePositionBack) {
device = [self cameraWithPosition:AVCaptureDevicePositionFront];
} else {
device = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
}
return device;
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == position) {
return device;
}
}
return nil;
}
#pragma mark - 拍照
- (void)captureStillImage {
AVCaptureConnection *connection =
[self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection.isVideoOrientationSupported) {
if ([self.degress isEqualToString:@"right"]) {
connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}else if ([self.degress isEqualToString:@"down"]){
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}else if ([self.degress isEqualToString:@"up"]){
connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
}else if ([self.degress isEqualToString:@"left"]){
connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
}
id handler = ^(CMSampleBufferRef sampleBuffer, NSError *error) {
if (sampleBuffer != NULL) {
NSData *imageData =
[AVCaptureStillImageOutput
jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeImageToSavedPhotosAlbum:image.CGImage
orientation:(NSInteger)image.imageOrientation
completionBlock:^(NSURL *assetURL, NSError *error) {
NSLog(@"写入完成=====:%@",assetURL);
if (!error) {
dispatch_async(dispatch_get_main_queue(), ^{
if (self.picCompleteBlock) {
self.picCompleteBlock(image);
}
});
} else {
id message = [error localizedDescription];
NSLog(@"Error: %@", message);
}
}];
} else {
NSLog(@"NULL sampleBuffer: %@", [error localizedDescription]);
}
};
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:handler];
}
#pragma mark - 视频录制
- (void)startRecording {
if (![self isRecording]) {
AVCaptureConnection *videoConnection =
[self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
if ([videoConnection isVideoOrientationSupported]) {
if ([self.degress isEqualToString:@"right"]) {
videoConnection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}else if ([self.degress isEqualToString:@"down"]){
videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
}else if ([self.degress isEqualToString:@"up"]){
videoConnection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
}else if ([self.degress isEqualToString:@"left"]){
videoConnection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
}
if ([videoConnection isVideoStabilizationSupported]) {
videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
AVCaptureDevice *device = [self activeCamera];
if (device.isSmoothAutoFocusSupported) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.smoothAutoFocusEnabled = NO;
[device unlockForConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
}
}
self.outputURL = [self joinStorePaht:@"caputerVideo"];
[self.movieOutput startRecordingToOutputFileURL:self.outputURL
recordingDelegate:self];
}
}
-(NSURL *)joinStorePaht:(NSString *)fileName
{
NSString *documentPath = NSTemporaryDirectory();
NSFileManager *fileManager = [NSFileManager defaultManager];
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
formatter.dateFormat = @"yyyyMMddHHmmss";
NSString *nowTimeStr = [formatter stringFromDate:[NSDate dateWithTimeIntervalSinceNow:0]];
NSString *storePath = [documentPath stringByAppendingPathComponent:nowTimeStr];
BOOL isExist = [fileManager fileExistsAtPath:storePath];
if(!isExist){
[fileManager createDirectoryAtPath:storePath withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *realName = [NSString stringWithFormat:@"%@.mp4", fileName];
storePath = [storePath stringByAppendingPathComponent:realName];
NSURL *outputFileUrl = [NSURL fileURLWithPath:storePath];
return outputFileUrl;
}
- (AVCaptureDevice *)activeCamera {
return self.activeVideoInput.device;
}
- (void)stopRecording{
if ([self isRecording]) {
[self.movieOutput stopRecording];
}
}
- (BOOL)isRecording{
return self.movieOutput.isRecording;
}
- (CMTime)recordedDuration{
return self.movieOutput.recordedDuration;
}
- (AVCaptureVideoOrientation)currentVideoOrientation {
AVCaptureVideoOrientation orientation;
UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
switch (deviceOrientation) {
case UIDeviceOrientationFaceDown:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeLeft:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
default:
orientation = AVCaptureVideoOrientationPortrait;
break;
}
return orientation;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
NSLog(@"开始录制...");
if (self.startRecord) {
self.startRecord();
}
}
- (void)captureOutput:(AVCaptureFileOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
NSLog(@"====nuffer==:%@",sampleBuffer);
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
NSLog(@"视频录制完成.");
if (error) {
NSLog(@"==%s====%@",__func__,error);
} else {
self.videoCompleteBlock(self.outputURL);
}
self.outputURL = nil;
}
#pragma mark - 焦点曝光
- (void)focusAtPoint:(CGPoint)point{
AVCaptureDevice *device = [self activeCamera];
if (device.isFocusPointOfInterestSupported &&
[device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.focusPointOfInterest = point;
device.focusMode = AVCaptureFocusModeAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
}
}
}
static const NSString *THCameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point{
AVCaptureDevice *device = [self activeCamera];
AVCaptureExposureMode exposureMode =
AVCaptureExposureModeContinuousAutoExposure;
if (device.isExposurePointOfInterestSupported &&
[device isExposureModeSupported:exposureMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[device addObserver:self
forKeyPath:@"adjustingExposure"
options:NSKeyValueObservingOptionNew
context:&THCameraAdjustingExposureContext];
}
[device unlockForConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if (context == &THCameraAdjustingExposureContext) {
AVCaptureDevice *device = (AVCaptureDevice *)object;
if (!device.isAdjustingExposure &&
[device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[object removeObserver:self
forKeyPath:@"adjustingExposure"
context:&THCameraAdjustingExposureContext];
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
}
});
}
} else {
[super observeValueForKeyPath:keyPath
ofObject:object
change:change
context:context];
}
}
#pragma mark - 重置对焦、曝光模式
- (void)resetFocusAndExposureModes {
AVCaptureDevice *device = [self activeCamera];
AVCaptureExposureMode exposureMode =
AVCaptureExposureModeContinuousAutoExposure;
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
BOOL canResetFocus = [device isFocusPointOfInterestSupported] &&
[device isFocusModeSupported:focusMode];
BOOL canResetExposure = [device isExposurePointOfInterestSupported] &&
[device isExposureModeSupported:exposureMode];
CGPoint centerPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
if ([device lockForConfiguration:&error]) {
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centerPoint;
}
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centerPoint;
}
[device unlockForConfiguration];
} else {
NSLog(@"==%s====%@",__func__,error);
}
}
#pragma mark - 闪光灯
- (BOOL)cameraHasFlash {
return [[self activeCamera] hasFlash];
}
- (AVCaptureFlashMode)flashMode {
return [[self activeCamera] flashMode];
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode {
AVCaptureDevice *device = [self activeCamera];
if (device.flashMode != flashMode &&
[device isFlashModeSupported:flashMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.flashMode = flashMode;
[device unlockForConfiguration];
} else {
}
}
}
- (BOOL)cameraHasTorch {
return [[self activeCamera] hasTorch];
}
- (AVCaptureTorchMode)torchMode {
return [[self activeCamera] torchMode];
}
- (void)setTorchMode:(AVCaptureTorchMode)torchMode {
AVCaptureDevice *device = [self activeCamera];
if (device.torchMode != torchMode &&
[device isTorchModeSupported:torchMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.torchMode = torchMode;
[device unlockForConfiguration];
} else {
}
}
}
@end
录制的视频 保存到本地 是旋转90度的,如果需要正的话需要旋转
视频旋转 拼接工具类
//拼接视频
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray completion:(void (^)(NSURL *outputPath))completion;
//旋转视频
-(void)startExportVideoWithVideoAssetForMX:(NSURL *)videolUrl degrees:(int)degrees completion:(void (^)(NSString *outputPath))completion;
//获取视频的方向
- (int)degressFromVideoFileWithUrl:(NSURL *)videolUrl;
//获取视频第一帧的截图
- (UIImage*) getVideoPreViewImage:(NSURL *)path;
//旋转图片
- (UIImage *)fixOrientation:(UIImage *)aImage;
.m
//
// AWTVideoEditorTool.m
// AWT_iOS
//
// Created by apple on 2019/10/30.
// Copyright © 2019年 wtoip_mac. All rights reserved.
//
#import "AWTVideoEditorTool.h"
#import "AppDelegate.h"
@interface AWTVideoEditorTool()
@end
@implementation AWTVideoEditorTool
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray completion:(void (^)(NSURL *outputPath))completion{
NSError *error = nil;
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
//视频容器
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//音频容器
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetTrack;
CMTime totalDuration = kCMTimeZero;
NSLog(@"=====%ld",fileURLArray.count);
for (int i = 0; i< fileURLArray.count; i++) {
AVAsset *asset = [AVAsset assetWithURL:fileURLArray[i]];
CMTime singleTime = asset.duration;
CMTimeRange videoTimeRang = CMTimeRangeMake(kCMTimeZero, singleTime);
assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
NSLog(@"单个视频 timeRange>>>>%@",[NSValue valueWithCMTimeRange:videoTimeRang]);
[videoTrack insertTimeRange:videoTimeRang ofTrack:assetTrack
atTime:totalDuration
error:&error];
assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
[audioTrack insertTimeRange:videoTimeRang ofTrack:assetTrack
atTime:totalDuration
error:&error];
totalDuration = CMTimeAdd(totalDuration, singleTime);
NSLog(@"===合成视频 timeRange====:%@", [NSValue valueWithCMTime:totalDuration]);
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
NSDateFormatter *formater = [[NSDateFormatter alloc] init];
[formater setDateFormat:@"yyyyMMddHHmmss"];
NSString *outputPath = [NSHomeDirectory() stringByAppendingFormat:@"/tmp/%@.mp4", [formater stringFromDate:[NSDate date]]];
NSLog(@"video outputPath = %@",outputPath);
NSURL *videoPath = [NSURL fileURLWithPath:outputPath];
exporter.outputURL = videoPath;
NSLog(@"===路径===:%@",exporter.outputURL);
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
if (exporter.status == AVAssetExportSessionStatusCompleted) {
NSLog(@"======合成成功=====");
if (completion) {
completion(videoPath);
}
}else if (exporter.status == AVAssetExportSessionStatusFailed){
NSLog(@"======合成失败==:%@",exporter.error);
if (completion) {
completion(nil);
}
}else if (exporter.status == AVAssetExportSessionStatusCancelled){
NSLog(@"===cancle===");
if (completion) {
completion(nil);
}
}
});
}];
}
//获取视频的方向
- (int)degressFromVideoFileWithUrl:(NSURL *)videolUrl {
AVAsset *asset = [AVAsset assetWithURL:videolUrl];
int degress = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
// Portrait
degress = 90;
} else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
// PortraitUpsideDown
degress = 270;
} else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
// LandscapeRight
degress = 0;
} else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
// LandscapeLeft
degress = 180;
}
}
return degress;
}
//旋转视频
-(void)startExportVideoWithVideoAssetForMX:(NSURL *)videolUrl degrees:(int)degrees completion:(void (^)(NSString *outputPath))completion {
AVAsset *videoAsset = [AVAsset assetWithURL:videolUrl];
NSArray *presets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
if ([presets containsObject:AVAssetExportPreset640x480]) {
AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPreset640x480];
NSDateFormatter *formater = [[NSDateFormatter alloc] init];
[formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
NSString *outputPath = [NSHomeDirectory() stringByAppendingFormat:@"/tmp/output-%@.mp4", [formater stringFromDate:[NSDate date]]];
NSLog(@"video outputPath = %@",outputPath);
session.outputURL = [NSURL fileURLWithPath:outputPath];
session.shouldOptimizeForNetworkUse = true;
NSArray *supportedTypeArray = session.supportedFileTypes;
if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) {
session.outputFileType = AVFileTypeMPEG4;
} else if (supportedTypeArray.count == 0) {
NSLog(@"No supported file types 视频类型暂不支持导出");
return;
} else {
session.outputFileType = [supportedTypeArray objectAtIndex:0];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"]]) {
[[NSFileManager defaultManager] createDirectoryAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"] withIntermediateDirectories:YES attributes:nil error:nil];
}
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
CGSize naturalSize = videoAssetTrack.naturalSize;
int d = [self degressFromVideoFileWithUrl:videolUrl];
NSLog(@"===视频的宽高==:%f===%f==方向==:%d", naturalSize.width, naturalSize.height, d);
if (degrees != 0) {
CGAffineTransform translateToCenter;
CGAffineTransform mixedTransform;
AVMutableVideoComposition *waterMarkVideoComposition = [AVMutableVideoComposition videoComposition];
waterMarkVideoComposition.frameDuration = CMTimeMake(1, 30);
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
if (degrees == 90) {
// 顺时针旋转90°
translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.height, 0.0);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
waterMarkVideoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height,videoTrack.naturalSize.width);
} else if(degrees == 180){
// 顺时针旋转180°
translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
waterMarkVideoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.width,videoTrack.naturalSize.height);
} else if(degrees == 270){
// 顺时针旋转270°
translateToCenter = CGAffineTransformMakeTranslation(0.0, videoTrack.naturalSize.width);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
waterMarkVideoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height,videoTrack.naturalSize.width);
}
AVMutableVideoCompositionInstruction *roateInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
roateInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
AVMutableVideoCompositionLayerInstruction *roateLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
roateInstruction.layerInstructions = @[roateLayerInstruction];
//将视频方向旋转加入到视频处理中
waterMarkVideoComposition.instructions = @[roateInstruction];
session.videoComposition = waterMarkVideoComposition;
}
// Begin to export video to the output path asynchronously.
[session exportAsynchronouslyWithCompletionHandler:^(void) {
switch (session.status) {
case AVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting"); break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCompleted: {
NSLog(@"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(outputPath);
}
});
} break;
case AVAssetExportSessionStatusFailed:
NSLog(@"AVAssetExportSessionStatusFailed==:%@",session.error); break;
default: break;
}
}];
}
}
//获取视频第一帧的截图
- (UIImage*) getVideoPreViewImage:(NSURL *)path
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:path options:nil];
AVAssetImageGenerator *assetGen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
assetGen.appliesPreferredTrackTransform = YES;
CMTime time = CMTimeMakeWithSeconds(0.0, 600);
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [assetGen copyCGImageAtTime:time actualTime:&actualTime error:&error];
UIImage *videoImage = [[UIImage alloc] initWithCGImage:image];
CGImageRelease(image);
return videoImage;
}
//旋转图片
- (UIImage *)fixOrientation:(UIImage *)aImage {
// No-op if the orientation is already correct
if (aImage.imageOrientation == UIImageOrientationUp)
return aImage;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (aImage.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
switch (aImage.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
default:
break;
}
CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
CGImageGetBitsPerComponent(aImage.CGImage), 0,
CGImageGetColorSpace(aImage.CGImage),
CGImageGetBitmapInfo(aImage.CGImage));
CGContextConcatCTM(ctx, transform);
switch (aImage.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
@end
控制器中视频拼接
//多个视频拼接
[self.videoEditorTool mergeAndExportVideosAtFileURLs:self.urlArray completion:^(NSURL * _Nonnull outputPath) {
if (outputPath) {
//默认以第一个视频方向为主
int degrees = [self.videoEditorTool degressFromVideoFileWithUrl:self.urlArray[0]];
[self.videoEditorTool startExportVideoWithVideoAssetForMX:outputPath degrees:degrees completion:^(NSString * _Nonnull outputPath) {
[IWTAlertView hideLoadingView];
self.videoPath = [NSURL fileURLWithPath:outputPath];
if (self.videoPath != nil && self.urlArray.count > 0) {
ALAssetsLibrary *library=[[ALAssetsLibrary alloc]init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:self.videoPath]) {
ALAssetsLibraryWriteVideoCompletionBlock completionBlock;
completionBlock = ^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(@"==%s====%@",__func__,error);
} else {
}
};
[library writeVideoAtPathToSavedPhotosAlbum:self.videoPath completionBlock:completionBlock];
}
AWTSendVideoViewController *senvideoVC = [[AWTSendVideoViewController alloc]init];
senvideoVC.videoUrl = self.videoPath;
senvideoVC.firstVideoUrl = self.urlArray[0];
[mbHUD hideAnimated:true afterDelay:1.0f];
senvideoVC.remallobjectsBlock = ^{
[self.urlArray removeAllObjects];
};
[self presentViewController:senvideoVC animated:true completion:^{
}];
}
}];
计算进度条
CMTime duration = CMTimeAdd(totalTime, self.captureVideoTool.recordedDuration);
NSUInteger time = ceil((NSUInteger)CMTimeGetSeconds(duration));
if (time >= 30) {
time = 30;
}
// NSInteger hours = (time / 3600);
NSInteger minutes = (time / 60) % 60;
NSInteger seconds = ceil(time % 60);
NSString *format = @"%02i:%02i";
NSString *timeString = [NSString stringWithFormat:format, minutes, seconds];
self.tilmeL.text = timeString;
// if (seconds >= 1) {
self.photoView.progress += 0.01;
// }
地址
[(https://github.com/hengHaibo/VideoRecode/tree/master/%E8%A7%86%E9%A2%91%E5%BD%95%E5%88%B6%20%E6%8B%BC%E6%8E%A5)