最近帮朋友做个小功能,视频添加声音,可以修改添加音频开始时间、音量大小等,封装了个工具类,不多说,直接看代码吧。
XXWVideoEditor.h文件
#import
#import
@interface XXWVideoEditor : NSObject
/**
视频添加音频
@param videoUrl 视频URL
@param audioUrl 音频URL
@param startTime 音频插入开始时间
@param endTime 音频插入结束时间
@param isOriginal 是否保留原声
@param oriVolume 原声音量
@param newVolume 插入的音频音量
@param completionHandle 完成回调
*/
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)endTime
isOriginalSound:(BOOL)isOriginal
oriVolume:(float)oriVolume
newVolume:(float)newVolume
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle;
@end
XXWVideoEditor.m文件
#import "XXWVideoEditor.h"
@implementation XXWVideoEditor
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
audioUrl:(NSURL *)audioUrl
start:(CGFloat)startTime
end:(CGFloat)endTime
isOriginalSound:(BOOL)isOriginal
oriVolume:(float)oriVolume
newVolume:(float)newVolume
completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle
{
//音频来源路径
NSURL *audio_inputFileUrl = audioUrl;
//视频来源路径
NSURL *video_inputFileUrl = videoUrl;
//导出路径
NSString *outputFilePath = [XXWVideoEditor fileSavePath];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
CMTime nextClipStartTime = kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];
//视频采集
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:nextClipStartTime
error:nil];
CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
CMTimeRange audio_timeRange = CMTimeRangeMake(start, duration);
//创建最终混合的音频实例
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
//新添加的音频
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
AVMutableCompositionTrack *newAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[newAudioTrack insertTimeRange:audio_timeRange
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]
atTime:start
error:nil];
AVMutableAudioMixInputParameters *newAudioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:newAudioTrack] ;
[newAudioInputParams setVolumeRampFromStartVolume:newVolume toEndVolume:.0f timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[newAudioInputParams setTrackID:newAudioTrack.trackID];
//视频文件原始音频通道
if (isOriginal) {
//视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音)
AVMutableCompositionTrack *originVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
[originVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
AVMutableAudioMixInputParameters *originAudioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:originVoiceTrack] ;
[originAudioInputParams setVolumeRampFromStartVolume:oriVolume toEndVolume:.0f timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[originAudioInputParams setTrackID:originVoiceTrack.trackID];
audioMix.inputParameters = [NSArray arrayWithObjects:newAudioInputParams,originAudioInputParams,nil];
}else{
audioMix.inputParameters = [NSArray arrayWithObject:newAudioInputParams];
}
//创建一个输出
AVAssetExportSession *_assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
_assetExport.shouldOptimizeForNetworkUse= YES;
_assetExport.audioMix = audioMix;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch ([_assetExport status]) {
case AVAssetExportSessionStatusFailed: {
NSLog(@"合成失败:%@",[[_assetExport error] description]);
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCancelled: {
completionHandle(outputFilePath,NO);
} break;
case AVAssetExportSessionStatusCompleted: {
completionHandle(outputFilePath,YES);
} break;
default: {
completionHandle(outputFilePath,NO);
} break;
}
}
];
}
+ (NSString *)fileSavePath
{
NSString *documentsDirectory =[NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
//最终合成输出路径
NSString *outputFilePath =[documentsDirectory stringByAppendingPathComponent:@"output.mp4"];
if([[NSFileManager defaultManager]fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager]removeItemAtPath:outputFilePath error:nil];
return outputFilePath;
}
@end