先说视频合并 这是视频和音频合并 因为使用shareAEC sdk录屏unity画面 面对物体时候和easyARyou冲突 会黑屏 最终使用unity官方推荐的everyPlayer录屏 但是录不到unity自带的声音为,所以在录屏的时候进行录音,下面是录音的关键代码
-(void)setAudioSession{
AVAudioSession*audioSession=[AVAudioSessionsharedInstance];
//设置为播放和录音状态,以便可以在录制完之后播放录音AVAudioSessionCategoryAmbient AVAudioSessionCategoryPlayAndRecord
[audioSessionsetCategory:AVAudioSessionCategoryPlayAndRecorderror:nil];
[audioSessionsetActive:YESerror:nil];
//下面这几句代码 是为了在录音开启的时候继续播放外音 以达到能录到模型的声音
UInt32doChangeDefaultRoute =1;
//kAudioSessionProperty_OverrideAudioRoute kAudioSessionProperty_OverrideCategoryDefaultToSpeaker
AudioSessionSetProperty(
kAudioSessionProperty_OverrideCategoryDefaultToSpeaker,
sizeof(doChangeDefaultRoute),
&doChangeDefaultRoute
);
}
/**
*录音文件设置
*
*@return返回录音设置
*/
- (NSDictionary*)getAudioSetting
{
NSMutableDictionary*dic = [NSMutableDictionarydictionary];
[dicsetObject:@(kAudioFormatLinearPCM)forKey:AVFormatIDKey];//设置录音格式
[dicsetObject:@(4410)forKey:AVSampleRateKey];//设置采样率8000
[dicsetObject:@(2)forKey:AVNumberOfChannelsKey];//设置通道,这里采用单声道1 2
[dicsetObject:@(8)forKey:AVLinearPCMBitDepthKey];//每个采样点位数,分为8,16,24,32
[dicsetObject:@(YES)forKey:AVLinearPCMIsFloatKey];//是否使用浮点数采样
[dicsetObject:@(128000)forKey:AVEncoderBitRateKey];
//AVEncoderAudioQualityKey:@(AVAudioQualityMax)
returndic;
}
/**
*录音存储路径
*
*@return返回存储路径
*/
- (NSString*)getSavePath
{
NSString*url = [[UtinityHelpertmpPath]stringByAppendingPathComponent:[NSStringstringWithFormat:@"aa.%@",MusicType]];
returnurl;
}
- (AVAudioRecorder*)audioRecorder
{
if(!_audioRecorder) {
NSError*error =nil;
_audioRecorder= [[AVAudioRecorderalloc]initWithURL:[NSURLURLWithString:[selfgetSavePath]]settings:[selfgetAudioSetting]error:&error];
_audioRecorder.delegate=self;
//_audioRecorder.meteringEnabled = YES; //是否启用录音测量,如果启用录音测量可以获得录音分贝等数据信息
if(![_audioRecorderprepareToRecord]) {
SL_Log(@"录音启动失败");
}
[selfsetAudioSession];
if(error) {
NSLog(@"创建录音机对象发生错误:%@",error.localizedDescription);
returnnil;
}
}
return_audioRecorder;
}
//混合音乐
- (void)merge
{
NSFileManager*manger = [NSFileManagerdefaultManager];
//路径
//NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
//声音来源
NSString*strMusicPath = [selfgetSavePath];
if(!strMusicPath) {
//音频不存在
return;
}
NSURL*audioInputUrl = [NSURLfileURLWithPath:[selfgetSavePath]];
//视频来源
NSString*strVideoPath = [[UtinityHelpertmpPath]stringByAppendingPathComponent:@"Everyplay/session"];
NSArray*pathlList = [mangersubpathsAtPath:strVideoPath];
if(pathlList.count<=0) {
NSLog(@"视频不存在");
[SVProgressHUDshowErrorWithStatus:@"录屏失败"];
return;
}else{
strVideoPath = [strVideoPathstringByAppendingPathComponent:pathlList.firstObject];
NSArray*pathlList1 = [mangersubpathsAtPath:strVideoPath];
if(pathlList.count<=0) {
return;
}else{
for(NSString*strPathinpathlList1) {
if([strPath.pathExtensionisEqualToString:@"mp4"]) {
strVideoPath = [strVideoPathstringByAppendingPathComponent:strPath];
break;
}
}
}
}
//strVideoPath = [[NSBundle mainBundle] pathForResource:@"screen-001" ofType:@"mp4"];
NSURL*videoInputUrl = [NSURLfileURLWithPath:strVideoPath];
//最终合成输出路径
NSString*videoPath = [[UtinityHelperdocmentPath]stringByAppendingPathComponent:VideoFolder];
[UtinityHelpercreatFolderWithPath:videoPath];
NSString*strVideoName = [NSStringstringWithFormat:@"recoder_%@.mp4",[UtinityHelperswitchStrDateWithDate:[NSDatedate]format:@"yyyyMMddHHmmss"]];
NSString*outPutFilePath = [videoPathstringByAppendingPathComponent:strVideoName];
if([UtinityHelperisHasPath:outPutFilePath]) {
[UtinityHelperremoveFileWithPath:outPutFilePath];
}
//添加合成路径
NSURL*outputFileUrl = [NSURLfileURLWithPath:outPutFilePath];
//时间起点
CMTimenextClistartTime =kCMTimeZero;
//创建可变的音视频组合
AVMutableComposition*comosition = [AVMutableCompositioncomposition];
//视频采集
AVURLAsset*videoAsset = [[AVURLAssetalloc]initWithURL:videoInputUrloptions:nil];
//视频时间范围
CMTimeRangevideoTimeRange =CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
//视频通道枚举kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack*videoTrack = [comositionaddMutableTrackWithMediaType:AVMediaTypeVideopreferredTrackID:kCMPersistentTrackID_Invalid];
//视频采集通道
AVAssetTrack*videoAssetTrack = [[videoAssettracksWithMediaType:AVMediaTypeVideo]firstObject];
//把采集轨道数据加入到可变轨道之中
[videoTrackinsertTimeRange:videoTimeRangeofTrack:videoAssetTrackatTime:nextClistartTimeerror:nil];
//由于合成之后视频会旋转180度所以合成之前先旋转180度
CGAffineTransformpreferredTransform = videoTrack.preferredTransform;
//CGAffineTransform trans = CGAffineTransformTranslate(preferredTransform, 0.0, -videoTrack.naturalSize.height);
CGAffineTransformtransNew =CGAffineTransformRotate(preferredTransform,M_PI);
videoTrack.preferredTransform= transNew;
//声音采集
AVURLAsset*audioAsset = [[AVURLAssetalloc]initWithURL:audioInputUrloptions:nil];
//因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
CMTimeRangeaudioTimeRange = videoTimeRange;
//音频通道
AVMutableCompositionTrack*audioTrack = [comositionaddMutableTrackWithMediaType:AVMediaTypeAudiopreferredTrackID:kCMPersistentTrackID_Invalid];
//音频采集通道
AVAssetTrack*audioAssetTrack = [[audioAssettracksWithMediaType:AVMediaTypeAudio]firstObject];
//加入合成轨道之中
[audioTrackinsertTimeRange:audioTimeRangeofTrack:audioAssetTrackatTime:nextClistartTimeerror:nil];
//创建一个输出
AVAssetExportSession*assetExport = [[AVAssetExportSessionalloc]initWithAsset:comositionpresetName:AVAssetExportPresetMediumQuality];
//输出类型
//assetExport.outputFileType = AVFileType3GPP;
assetExport.outputFileType=AVFileTypeMPEG4;
//输出地址
assetExport.outputURL= outputFileUrl;
//优化
assetExport.shouldOptimizeForNetworkUse=YES;
//合成完毕
[assetExportexportAsynchronouslyWithCompletionHandler:^{
//回到主线程
SL_Log(@"合成完毕");
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
}
关于视频旋转的一些代码
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray
{
//NSLog(@"the getVideoCount is %lu", (unsigned long)[self getVideoCount]);
//if (self.getVideoCount != fileURLArray.count) {
//NSLog(@"必定崩溃-------");
//NSLog(@"必定崩溃-------");
//NSLog(@"必定崩溃-------");
//}
NSLog(@"the fileURLArray is %@", fileURLArray);
if (fileURLArray.count <= 0) {
#warning小片段视频还未成功生成就开始合并视频
NSLog(@"严重错误!!!!!!!!!!!!!!!!!!!!!");
return;
}else{
for (NSURL *fileURL in fileURLArray) {
NSString *path = fileURL.resourceSpecifier;
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{
//JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs theVideoPath is %@", path);
NSUInteger size;
NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];
//JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs fileSize is %lu", size/(1024*1024));
}
}
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSError *error = nil;
CGSize renderSize = CGSizeMake(0, 0);
NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
CMTime totalDuration = kCMTimeZero;
//先去assetTrack也为了取renderSize
NSMutableArray *assetTrackArray = [[NSMutableArray alloc] init];
NSMutableArray *assetAudioTrackArray = [NSMutableArray arrayWithCapacity:0];
NSMutableArray *assetArray = [[NSMutableArray alloc] init];
for (NSURL *fileURL in fileURLArray) {
AVAsset *asset = [AVAsset assetWithURL:fileURL];
//AVURLAsset *asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];
//NSString *tracksKey = @"tracks";
NSString *tracksKey = @"AVMediaTypeAudio";
if (!asset) {
continue;
}
[assetArray addObject:asset];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[assetTrackArray addObject:assetTrack];
//AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//[assetAudioTrackArray addObject:assetAudioTrack];
renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);
renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);
}
NSLog(@"the assetAudioTrackArray is %@", assetAudioTrackArray);
CGFloat renderW = MIN(renderSize.width, renderSize.height);
NSLog(@"the renderW is %f", renderW);
NSLog(@"the assetArray cout is %lu", (unsigned long)[assetArray count]);
NSLog(@"the assetTrackArray cout is %lu", (unsigned long)[assetTrackArray count]);
for (int i = 0; i < [assetArray count] && i < [assetTrackArray count]; i++) {
AVAsset *asset = [assetArray objectAtIndex:i];
AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];
//AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
//ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
//atTime:totalDuration
//error:nil];
#warning这里加一个判断
//[asset tracksWithMediaType:AVMediaTypeAudio]取出的数组可能为空这段视频没有音频
NSArray *arr = [asset tracksWithMediaType:AVMediaTypeAudio];
//JFLog(DBGUI, @"the audioTrackArr is %@", arr);
if (arr.count <= 0) {
NSLog(@"没有视频!!!!!!!!!!!!!!!!!!!!!");
NSLog(@"没有视频!!!!!!!!!!!!!!!!!!!!!");
}
if (arr.count > 0) {
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[arr objectAtIndex:0] atTime:totalDuration error:nil];
}
//[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:([arr count]>0)?[arr objectAtIndex:0]:nil atTime:totalDuration error:nil];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSLog(@"the assetduration is %lld", asset.duration.value/asset.duration.timescale);
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetTrack
atTime:totalDuration
error:&error];
//fix orientationissue
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
totalDuration = CMTimeAdd(totalDuration, asset.duration);
CGFloat rate;
NSLog(@"the renderW is %f", renderW);
NSLog(@"assetTrack.naturalSize.width is %f", assetTrack.naturalSize.width);
NSLog(@"assetTrack.naturalSize.height is %f", assetTrack.naturalSize.height);
rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
NSLog(@"the rate is %f", rate);
NSLog(@" preferredTransform.a is %f", assetTrack.preferredTransform.a);
NSLog(@" preferredTransform.b is %f", assetTrack.preferredTransform.b);
NSLog(@" preferredTransform.c is %f", assetTrack.preferredTransform.c);
NSLog(@" preferredTransform.d is %f", assetTrack.preferredTransform.d);
NSLog(@" preferredTransform.tx is %f", assetTrack.preferredTransform.tx);
NSLog(@" preferredTransform.ty is %f", assetTrack.preferredTransform.ty);
CGAffineTransform translateToCenter;
CGAffineTransform mixedTransform;
//AVMutableVideoComposition *waterMarkVideoComposition = [AVMutableVideoComposition videoComposition];
//waterMarkVideoComposition.frameDuration = CMTimeMake(1, 30);
int degrees = [self degressFromVideoFileWithURL:assetTrack];
//degrees = 180;
if (degrees == 0) {
//AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:assetpresetName:AVAssetExportPresetMediumQuality];
//session.outputURL = outputURL;
//session.outputFileType = AVFileTypeQuickTimeMovie;
}else{
if(degrees == 90){
//顺时针旋转90°
NSLog(@"视频旋转90度,home按键在左");
translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.height, 0.0);
//mixedTransform = CGAffineTransformRotate(translateToCenter,0);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
//videoTrack.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}else if(degrees == 180){
//顺时针旋转180°
NSLog(@"视频旋转180度,home按键在上");
translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
//waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.width,assetTrack.naturalSize.height);
}else if(degrees == 270){
//顺时针旋转270°
NSLog(@"视频旋转270度,home按键在右");
translateToCenter = CGAffineTransformMakeTranslation(0.0, assetTrack.naturalSize.width);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
//waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}
}
CGAffineTransform preferredTransform = assetTrack.preferredTransform;
CGAffineTransform trans = CGAffineTransformTranslate(preferredTransform, 0.0, -assetTrack.naturalSize.height);
CGAffineTransform transNew = CGAffineTransformRotate(preferredTransform,M_PI_2*3);
transNew = CGAffineTransformTranslate(transNew, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0);
transNew = CGAffineTransformConcat(trans, transNew);
transNew = CGAffineTransformScale(transNew, rate, rate);//放缩,解决前后摄像结果大小不对称
//CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);
//layerTransform = (a = 0, b = 1, c = -1, d = 0, tx = 1080, ty = 0)
CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.naturalSize.height * rate, assetTrack.preferredTransform.ty * rate);
//
//layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0));//向上移动取中部影响
////
//layerTransform = CGAffineTransformScale(layerTransform, rate, rate);//放缩,解决前后摄像结果大小不对称
//[layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];
//[layerInstruciton setOpacity:0.0 atTime:totalDuration];
[layerInstruciton setTransform:transNew atTime:kCMTimeZero];
//[layerInstruciton setTransform:mixedTransform atTime:kCMTimeZero];
//[layerInstruciton setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
//data
[layerInstructionArray addObject:layerInstruciton];
}
//get save path
NSURL *mergeFileURL = [NSURL fileURLWithPath:[[self class] getVideoMergeFilePathString]];
//export
AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
mainInstruciton.layerInstructions = layerInstructionArray;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = @[mainInstruciton];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
//mainCompositionInst.frameDuration = CMTimeMake(1, 24);
NSLog(@"the renderSize is %@", NSStringFromCGSize(CGSizeMake(renderW, renderW)));
mainCompositionInst.renderSize = CGSizeMake(renderW, renderW);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
if ([exporter status] != AVAssetExportSessionStatusCompleted) {
NSLog(@"the status is %ld", (long)[exporter status]);
NSLog(@"the outPutPath is %@", [exporter.outputURL absoluteString]);
NSLog(@"the error is %@", [exporter error].userInfo);
NSLog(@"the error is %@", [exporter error]);
}
if ([exporter status] == AVAssetExportSessionStatusFailed) {
//if (DEBUG) {
//NSLog(@"error ");
//}
}
NSString *path = mergeFileURL.resourceSpecifier;
//NSString *pa1 = outputFileURL.absoluteString;
//NSString *pa2 = outputFileURL.resourceSpecifier;
//NSString *pa3 = outputFileURL.scheme;
//NSString *pa4 = outputFileURL.relativePath;
NSLog(@"theVideoPath is %@", path);
NSLog(@"outputFileURL is %@", mergeFileURL);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{
NSLog(@"theVideoPath is %@", path);
NSUInteger size;
NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];
NSLog(@"didFinishRecordingToOutputFileAtURL fileSize is %lu", size/(1024*1024));
}
NSLog(@"the outputFile is %@", mergeFileURL);
dispatch_async(dispatch_get_main_queue(), ^{
//MSPreViewController *VC = [MSPreViewController new];
//
//VC.videoURL = mergeFileURL;
//
//NSLog(@"navi is %@", self.navigationController);
//
//[self.navigationController pushViewController:VC animated:YES];
});
//return;
//dispatch_async(dispatch_get_main_queue(), ^{
//if ([_delegate respondsToSelector:@selector(videoRecorder:didFinishMergingVideosToOutPutFileAtURL:)]) {
//[_delegate videoRecorder:self didFinishMergingVideosToOutPutFileAtURL:mergeFileURL];
//}
//});
}];
});
}
- (NSUInteger)degressFromVideoFileWithURL:(NSURL*)url
{
NSUIntegerdegress =0;
AVAsset*asset = [AVAssetassetWithURL:url];
NSArray*tracks = [assettracksWithMediaType:AVMediaTypeVideo];
if([trackscount] >0) {
AVAssetTrack*videoTrack = [tracksobjectAtIndex:0];
CGAffineTransformt = videoTrack.preferredTransform;
if(t.a==0&& t.b==1.0&& t.c== -1.0&& t.d==0){
// Portrait
degress =90;
}elseif(t.a==0&& t.b== -1.0&& t.c==1.0&& t.d==0){
// PortraitUpsideDown
degress =270;
}elseif(t.a==1.0&& t.b==0&& t.c==0&& t.d==1.0){
// LandscapeRight
degress =0;
}elseif(t.a== -1.0&& t.b==0&& t.c==0&& t.d== -1.0){
// LandscapeLeft
degress =180;
}
}
returndegress;
}
- (UIImage*)extractImageFromVideoFileWithUrl:(NSURL*)url
{
NSDictionary*opts =@{AVURLAssetPreferPreciseDurationAndTimingKey:@(NO)};
AVURLAsset*asset = [[AVURLAssetalloc]initWithURL:urloptions:opts];
AVAssetImageGenerator*gen = [[AVAssetImageGeneratoralloc]initWithAsset:asset];
//应用方向
gen.appliesPreferredTrackTransform=YES;
CMTimetime =CMTimeMakeWithSeconds(1,60);
NSError*error =nil;
CMTimeactualTime;
CGImageRefimage = [gencopyCGImageAtTime:timeactualTime:&actualTimeerror:&error];
if(error)
{
SL_Log(@"error %@",error);
returnnil;
}
UIImage*thumb = [[UIImagealloc]initWithCGImage:image];
CGImageRelease(image);
returnthumb;
}