相关类
- (void)testCom3{
//创建一个音视频组合轨道
AVMutableComposition *mainComposition = [[AVMutableComposition alloc]init];
//可变音视频轨道添加一个 视频通道
AVMutableCompositionTrack *compositionVideoTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//可变音视频轨道添加一个 音频通道
AVMutableCompositionTrack *compositionAudioTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
#pragma mark - 获取音视频 track
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.movieFile1 options:nil]; // URLHookmark
// AVURLAsset *videoAsset2 = [[AVURLAsset alloc] initWithURL:self.movieFile2 options:nil]; // FurShader 无声
//视频通道数组
NSArray<AVAssetTrack *> *videoTrackers = [asset tracksWithMediaType:AVMediaTypeVideo];
if (0 >= videoTrackers.count) {
NSLog(@"数据获取失败");
return ;
}
//获取第一个视频通道
AVAssetTrack *video_track = [videoTrackers objectAtIndex:0];
//获取音频轨道数组
NSArray<AVAssetTrack *> *audioTrackers = [asset tracksWithMediaType:AVMediaTypeAudio];
if (0 >= audioTrackers.count) {
NSLog(@"获取音频数据失败");
return;
}
//获取第一个音频轨道
AVAssetTrack *audio_track = [audioTrackers objectAtIndex:0];
#pragma mark - 将获取的视轨倒入杯子
//视频时间
float video_times = (float)asset.duration.value / (float)asset.duration.timescale;
compositionVideoTrack.preferredTransform = video_track.preferredTransform;
NSError *error = nil;
//把视频轨道数据加入到可变轨道之中
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:video_track
atTime:kCMTimeZero
error:&error];
if (error) {
NSLog(@"error;%@",error);
return;
}
//*****************************************************
int audio_time_scale = audio_track.naturalTimeScale;
//获取音频的时间
CMTime audio_duration = CMTimeMake(video_times * audio_time_scale, audio_time_scale);
//将音频轨道加入到可变轨道中
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audio_duration)
ofTrack:audio_track
atTime:kCMTimeZero
error:&error];
if (error) {
NSLog(@"音轨error:%@",error);
return;
}
#pragma mark - 再加入一个音轨
NSURL *mixAssetUrl = self.musicFile1;
//增加音轨
//采集资源
AVURLAsset *mixAsset = [[AVURLAsset alloc]initWithURL:mixAssetUrl options:nil];
NSArray<AVAssetTrack *> *audioTrackers_mix = [mixAsset tracksWithMediaType:AVMediaTypeAudio];
if (0 >= audioTrackers_mix.count) {
NSLog(@"获取第二音轨资源失败");
return;
}
//可变音视轨道再添加一个 音轨
AVMutableCompositionTrack *mixAudioTrack = [mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];//第二音轨
//将采集到数据加入到第二音轨
[mixAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audio_duration)
ofTrack:[audioTrackers_mix objectAtIndex:0]
atTime:kCMTimeZero
error:&error];
NSLog(@"tracks : %@",mainComposition.tracks);
/* (
"" ,
"" ,
""
) */
// [self saveComposition:mainComposition];
#pragma mark - 视频操作指令集合
//视频操作指令集合
AVMutableVideoComposition *select_videoComposition = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:mainComposition];
AVMutableVideoComposition *first_vcn = [AVMutableVideoComposition videoCompositionWithPropertiesOfAsset:asset];
select_videoComposition.renderSize = first_vcn.renderSize; // (width = 1280, height = 720)
#pragma mark - AudioMix 操作
//获取第一个音频轨道
AVMutableAudioMixInputParameters *firstAudioParam = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioTrack];
float firstStartVolume = 0 ;
float firstEndVolume = 0.1;
//设置第一个音轨音量
[firstAudioParam setVolumeRampFromStartVolume:firstStartVolume toEndVolume:firstEndVolume timeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)];
//第二个音频轨道
AVMutableAudioMixInputParameters *secondAudioParam = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:mixAudioTrack];
float secondStartVolume = 0.3;
float secondEndVolume = 1;
[secondAudioParam setVolumeRampFromStartVolume:secondStartVolume toEndVolume:secondEndVolume timeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)];
//
AVMutableAudioMix *videoAudioMixTools = [AVMutableAudioMix audioMix];
videoAudioMixTools.inputParameters = @[firstAudioParam,secondAudioParam];
// [self saveComposition:mainComposition];
#pragma mark - 保存
NSURL *composedMovieUrl = [self getSaveURLWithExtension:@"m4a"];
AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mainComposition
presetName:AVAssetExportPreset1280x720];
assetExportSession.outputFileType = AVFileTypeQuickTimeMovie;
assetExportSession.outputURL = composedMovieUrl;
assetExportSession.shouldOptimizeForNetworkUse = YES;
assetExportSession.audioMix = videoAudioMixTools;
[self startExport:assetExportSession];
return;
#pragma mark - 播放
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:mainComposition];
[item setAudioMix:videoAudioMixTools];
AVPlayer *tmpPlayer = [AVPlayer playerWithPlayerItem:item];
self.player = tmpPlayer;
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
playerLayer.frame = NSMakeRect(0, 0, 300, 300);
playerLayer.videoGravity = AVLayerVideoGravityResize;
self.window.contentView.wantsLayer = YES;
[self.window.contentView.layer addSublayer:playerLayer ];
[self.player play];
}
- (NSURL *)getSaveURLWithExtension:(NSString *)extension{
NSTimeInterval timestamp = [NSDate timeIntervalSinceReferenceDate];
NSString *filename = [NSString stringWithFormat:@"%f.%@", timestamp,extension];
NSString *docsDir = NSSearchPathForDirectoriesInDomains(NSDesktopDirectory, NSUserDomainMask, YES).firstObject;
NSString *composedMoviePath = [docsDir stringByAppendingPathComponent:filename];
// 保存設定
NSURL *composedMovieUrl = [NSURL fileURLWithPath:composedMoviePath];
NSLog(@"composedMovieUrl : %@",composedMovieUrl);
return composedMovieUrl;
}
- (void)saveComposition:(AVMutableComposition *)composition{
NSURL *composedMovieUrl = [self getSaveURLWithExtension:@"m4a"];
AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPreset1280x720];
assetExportSession.outputFileType = AVFileTypeQuickTimeMovie;
assetExportSession.outputURL = composedMovieUrl;
assetExportSession.shouldOptimizeForNetworkUse = YES;
[self startExport:assetExportSession];
}