图片合成视频
首先需要导入的三个依赖库
AVKit.framework MediaPlayer.framework AVFoundation.framework
下面代码中的注释很清楚, 就不做多解释, 直接上代码了
[objc]view plaincopy
#import "ViewController.h"
#import
#import
#import
@interfaceViewController ()
{
NSMutableArray*imageArr;//未压缩的图片
NSMutableArray*imageArray;//经过压缩的图片
}
@property(nonatomic,strong)NSString*theVideoPath;
@end
@implementationViewController
- (void)viewDidLoad {
[superviewDidLoad];
imageArray = [[NSMutableArrayalloc]init];
imageArr =[[NSMutableArrayalloc]initWithObjects:
[UIImageimageNamed:@"IMG_3811.jpg"],[UIImageimageNamed:@"IMG_3812.jpg"],[UIImageimageNamed:@"IMG_3813.jpg"],[UIImageimageNamed:@"IMG_3814.jpg"],[UIImageimageNamed:@"IMG_3815.jpg"],[UIImageimageNamed:@"IMG_3816.jpg"],[UIImageimageNamed:@"IMG_3817.jpg"],[UIImageimageNamed:@"IMG_3818.jpg"],[UIImageimageNamed:@"IMG_3820.jpg"],[UIImageimageNamed:@"IMG_3821.jpg"],[UIImageimageNamed:@"IMG_3822.jpg"],[UIImageimageNamed:@"IMG_3823.jpg"],[UIImageimageNamed:@"IMG_3824.jpg"],[UIImageimageNamed:@"IMG_3825.jpg"],[UIImageimageNamed:@"IMG_3826.jpg"],[UIImageimageNamed:@"IMG_3827.jpg"],[UIImageimageNamed:@"IMG_3828.jpg"],[UIImageimageNamed:@"IMG_3829.jpg"],[UIImageimageNamed:@"IMG_3830.jpg"],[UIImageimageNamed:@"IMG_3831.jpg"],[UIImageimageNamed:@"IMG_3832.jpg"],[UIImageimageNamed:@"IMG_3833.jpg"],[UIImageimageNamed:@"IMG_3834.jpg"],[UIImageimageNamed:@"IMG_3835.jpg"],[UIImageimageNamed:@"IMG_3836.jpg"],[UIImageimageNamed:@"IMG_3837.jpg"],[UIImageimageNamed:@"IMG_3838.jpg"],[UIImageimageNamed:@"IMG_3839.jpg"],[UIImageimageNamed:@"IMG_3840.jpg"],[UIImageimageNamed:@"IMG_3841.jpg"],[UIImageimageNamed:@"IMG_3842.jpg"],[UIImageimageNamed:@"IMG_3843.jpg"],[UIImageimageNamed:@"IMG_3844.jpg"],[UIImageimageNamed:@"IMG_3845.jpg"],[UIImageimageNamed:@"IMG_3846.jpg"],nil];
for(inti =0; i
UIImage*imageNew = imageArr[i];
//设置image的尺寸
CGSize imagesize = imageNew.size;
imagesize.height=408;
imagesize.width=306;
//对图片大小进行压缩--
imageNew = [selfimageWithImage:imageNewscaledToSize:imagesize];
[imageArrayaddObject:imageNew];
}
UIButton* button =[UIButtonbuttonWithType:UIButtonTypeRoundedRect];
[buttonsetFrame:CGRectMake(100,100,100,50)];
[buttonsetTitle:@"视频合成"forState:UIControlStateNormal];
[buttonaddTarget:selfaction:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
button.backgroundColor= [UIColorredColor];
[self.viewaddSubview:button];
UIButton* button1=[UIButtonbuttonWithType:UIButtonTypeRoundedRect];
[button1setFrame:CGRectMake(100,200,100,50)];
[button1setTitle:@"视频播放"forState:UIControlStateNormal];
[button1addTarget:selfaction:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
button1.backgroundColor= [UIColorredColor];
[self.viewaddSubview:button1];
}
//对图片尺寸进行压缩--
-(UIImage*)imageWithImage:(UIImage*)imagescaledToSize:(CGSize)newSize
{
// 新创建的位图上下文 newSize为其大小
UIGraphicsBeginImageContext(newSize);
// 对图片进行尺寸的改变
[imagedrawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
// 从当前上下文中获取一个UIImage对象 即获取新的图片对象
UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Return the new image.
returnnewImage;
}
-(void)testCompressionSession
{
//设置mov路径
NSArray*paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
NSString*moviePath =[[pathsobjectAtIndex:0]stringByAppendingPathComponent:[NSStringstringWithFormat:@"%@.mov",@"test"]];
self.theVideoPath=moviePath;
//定义视频的大小320 480 倍数
CGSize size =CGSizeMake(320,480);
// [selfwriteImages:imageArr ToMovieAtPath:moviePath withSize:sizeinDuration:4 byFPS:30];//第2中方法
NSError*error =nil;
// 转成UTF-8编码
unlink([moviePathUTF8String]);
NSLog(@"path->%@",moviePath);
// iphone提供了AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件
AVAssetWriter*videoWriter = [[AVAssetWriteralloc]initWithURL:[NSURLfileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovieerror:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error =%@", [errorlocalizedDescription]);
//mov的格式设置 编码格式 宽度 高度
NSDictionary*videoSettings =[NSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,
[NSNumbernumberWithInt:size.width],AVVideoWidthKey,
[NSNumbernumberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput*writerInput =[AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideooutputSettings:videoSettings];
NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionarydictionaryWithObjectsAndKeys:[NSNumbernumberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
// AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
// 可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
// 是更有效的比添加像素缓冲区分配使用一个单独的池
AVAssetWriterInputPixelBufferAdaptor*adaptor =[AVAssetWriterInputPixelBufferAdaptorassetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInputsourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWritercanAddInput:writerInput]);
if([videoWritercanAddInput:writerInput])
{
NSLog(@"11111");
}
else
{
NSLog(@"22222");
}
[videoWriteraddInput:writerInput];
[videoWriterstartWriting];
[videoWriterstartSessionAtSourceTime:kCMTimeZero];
//合成多张图片为一个视频文件
dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
int__block frame =0;
[writerInputrequestMediaDataWhenReadyOnQueue:dispatchQueueusingBlock:^{
while([writerInputisReadyForMoreMediaData])
{
if(++frame >=[imageArraycount]*10)
{
[writerInputmarkAsFinished];
[videoWriterfinishWriting];
// [videoWriterfinishWritingWithCompletionHandler:nil];
break;
}
CVPixelBufferRef buffer =NULL;
intidx =frame/10;
NSLog(@"idx==%d",idx);
buffer = (CVPixelBufferRef)[selfpixelBufferFromCGImage:[[imageArrayobjectAtIndex:idx]CGImage]size:size];
if(buffer)
{
if(![adaptorappendPixelBuffer:bufferwithPresentationTime:CMTimeMake(frame,30)])//设置每秒钟播放图片的个数
{
NSLog(@"FAIL");
}
else
{
NSLog(@"OK");
}
CFRelease(buffer);
}
}
}];
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)imagesize:(CGSize)size
{
NSDictionary*options =[NSDictionarydictionaryWithObjectsAndKeys:
[NSNumbernumberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumbernumberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer =NULL;
CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
voidvoid*pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
// 当你调用这个函数的时候,Quartz创建一个位图绘制环境,也就是位图上下文。当你向上下文中绘制信息时,Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定:每个组件的位数,颜色空间,alpha选项
CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
//使用CGContextDrawImage绘制图片 这里设置不正确的话 会导致视频颠倒
// 当通过CGContextDrawImage绘制图片到一个context中时,如果传入的是UIImage的CGImageRef,因为UIKit和CG坐标系y轴相反,所以图片绘制将会上下颠倒
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
// 释放色彩空间
CGColorSpaceRelease(rgbColorSpace);
// 释放context
CGContextRelease(context);
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
returnpxbuffer;
}
//播放
-(void)playAction
{
NSLog(@"************%@",self.theVideoPath);
NSURL*sourceMovieURL = [NSURLfileURLWithPath:self.theVideoPath];
AVAsset*movieAsset = [AVURLAssetURLAssetWithURL:sourceMovieURLoptions:nil];
AVPlayerItem*playerItem = [AVPlayerItemplayerItemWithAsset:movieAsset];
AVPlayer*player = [AVPlayerplayerWithPlayerItem:playerItem];
AVPlayerLayer*playerLayer = [AVPlayerLayerplayerLayerWithPlayer:player];
playerLayer.frame=self.view.layer.bounds;
playerLayer.videoGravity= AVLayerVideoGravityResizeAspect;
[self.view.layeraddSublayer:playerLayer];
[playerplay];
}