iOS多张图片合成一个视频

IOS多张图片合成一个视频

- (void)viewDidLoad  
{  
   [superviewDidLoad];  
    //Do any additional setup after loading the view. 
   imageArr =[[NSMutableArrayalloc]initWithObjects:  
             [UIImageimageNamed:@"1"],[UIImageimageNamed:@"2.jpg"],[UIImageimageNamed:@"3.jpg"],[UIImageimageNamed:@"4.jpg"],[UIImageimageNamed:@"5.jpg"],[UIImageimageNamed:@"6.jpg"],[UIImageimageNamed:@"7.jpg"],[UIImageimageNamed:@"8.jpg"],[UIImageimageNamed:@"9.jpg"],[UIImageimageNamed:@"10.jpg"],[UIImageimageNamed:@"11.jpg"],[UIImageimageNamed:@"12.jpg"],[UIImageimageNamed:@"13.jpg"],[UIImageimageNamed:@"14.jpg"],[UIImageimageNamed:@"15"],[UIImageimageNamed:@"16"],[UIImageimageNamed:@"17"],[UIImageimageNamed:@"18"],[UIImageimageNamed:@"19"],[UIImageimageNamed:@"20"],[UIImageimageNamed:@"21"],[UIImageimageNamed:@"22"],[UIImageimageNamed:@"23"],[UIImageimageNamed:@"24"],[UIImageimageNamed:@"25"],[UIImageimageNamed:@"26"],[UIImageimageNamed:@"27"],[UIImageimageNamed:@"28"],[UIImageimageNamed:@"29"],[UIImageimageNamed:@"30"],[UIImageimageNamed:@"31"],nil];  

   UIButton * button =[UIButtonbuttonWithType:UIButtonTypeRoundedRect];  
    [buttonsetFrame:CGRectMake(100,100, 100,100)];  
   [buttonsetTitle:@"合成"forState:UIControlStateNormal];  
   [buttonaddTarget:selfaction:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];  
   [self.viewaddSubview:button];  

   UIButton * button1 =[UIButtonbuttonWithType:UIButtonTypeRoundedRect];  
    [button1setFrame:CGRectMake(100,200, 100,100)];  
   [button1setTitle:@"播放"forState:UIControlStateNormal];  
   [button1addTarget:selfaction:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];  
   [self.viewaddSubview:button1];  
}  

-(void)testCompressionSession  
{  
   NSLog(@"开始");  
   //NSString *moviePath = [[NSBundle mainBundle]pathForResource:@"Movie" ofType:@"mov"]; 
   NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);  
   NSString *moviePath =[[pathsobjectAtIndex:0]stringByAppendingPathComponent:[NSStringstringWithFormat:@"%@.mov",@"test"]];  
   self.theVideoPath=moviePath;  
   CGSize size =CGSizeMake(320,400);//定义视频的大小 

// [selfwriteImages:imageArr ToMovieAtPath:moviePath withSize:sizeinDuration:4 byFPS:30];//第2中方法 

   NSError *error =nil;  

   unlink([moviePathUTF8String]);  
   NSLog(@"path->%@",moviePath);  
   //—-initialize compression engine 
   AVAssetWriter *videoWriter =[[AVAssetWriteralloc]initWithURL:[NSURLfileURLWithPath:moviePath]  
                                                 fileType:AVFileTypeQuickTimeMovie  
                                                   error:&error];  
   NSParameterAssert(videoWriter);  
   if(error)  
      NSLog(@"error =%@", [errorlocalizedDescription]);  

   NSDictionary *videoSettings =[NSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,  
                             [NSNumbernumberWithInt:size.width],AVVideoWidthKey,  
                             [NSNumbernumberWithInt:size.height],AVVideoHeightKey,nil];  
   AVAssetWriterInput *writerInput =[AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideooutputSettings:videoSettings];  

   NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionarydictionaryWithObjectsAndKeys:[NSNumbernumberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];  

   AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor  
                                            assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInputsourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];  
   NSParameterAssert(writerInput);  
   NSParameterAssert([videoWritercanAddInput:writerInput]);  

   if ([videoWritercanAddInput:writerInput])  
      NSLog(@"11111");  
   else  
      NSLog(@"22222");  

    [videoWriteraddInput:writerInput];  

    [videoWriterstartWriting];  
   [videoWriterstartSessionAtSourceTime:kCMTimeZero];  

   //合成多张图片为一个视频文件 
   dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);  
   int __block frame =0;  

   [writerInputrequestMediaDataWhenReadyOnQueue:dispatchQueueusingBlock:^{  
       while([writerInputisReadyForMoreMediaData])  
       {  
          if(++frame >=[imageArrcount]*10)  
           {  
              [writerInputmarkAsFinished];  
              [videoWriterfinishWriting];  
// [videoWriterfinishWritingWithCompletionHandler:nil]; 
             break;  
           }  

          CVPixelBufferRef buffer =NULL;  

          int idx =frame/10;  
          NSLog(@"idx==%d",idx);  

           buffer =(CVPixelBufferRef)[selfpixelBufferFromCGImage:[[imageArrobjectAtIndex:idx]CGImage]size:size];  

          if (buffer)  
           {  
             if(![adaptorappendPixelBuffer:bufferwithPresentationTime:CMTimeMake(frame,10)])  
                NSLog(@"FAIL");  
             else  
                NSLog(@"OK");  
                CFRelease(buffer);  
           }  
       }  
    }];  
}  

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)imagesize:(CGSize)size  
{  
   NSDictionary *options =[NSDictionarydictionaryWithObjectsAndKeys:  
                        [NSNumbernumberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,  
                        [NSNumbernumberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];  
   CVPixelBufferRef pxbuffer =NULL;  
   CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridgeCFDictionaryRef) options,&pxbuffer);  

   NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);  

   CVPixelBufferLockBaseAddress(pxbuffer,0);  
   voidvoid *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);  
   NSParameterAssert(pxdata !=NULL);  

   CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();  
   CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);  
   NSParameterAssert(context);  

   CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);  

   CGColorSpaceRelease(rgbColorSpace);  
   CGContextRelease(context);   

   CVPixelBufferUnlockBaseAddress(pxbuffer,0);   

   return pxbuffer;  
}  

-(void)playAction  
{  
   MPMoviePlayerViewController *theMovie =[[MPMoviePlayerViewControlleralloc]initWithContentURL:[NSURLfileURLWithPath:self.theVideoPath]];  
   [selfpresentMoviePlayerViewControllerAnimated:theMovie];  
   theMovie.moviePlayer.movieSourceType=MPMovieSourceTypeFile;  
   [theMovie.moviePlayerplay];  
}  


//第二种方式 
- (void)writeImages:(NSArray *)imagesArrayToMovieAtPath:(NSString *) pathwithSize:(CGSize) size  
        inDuration:(float)durationbyFPS:(int32_t)fps{  
   //Wire the writer: 
   NSError *error =nil;  
   AVAssetWriter *videoWriter =[[AVAssetWriteralloc]initWithURL:[NSURLfileURLWithPath:path]  
                                                  fileType:AVFileTypeQuickTimeMovie  
                                                    error:&error];  
   NSParameterAssert(videoWriter);  

   NSDictionary *videoSettings =[NSDictionarydictionaryWithObjectsAndKeys:  
                             AVVideoCodecH264,AVVideoCodecKey,  
                             [NSNumbernumberWithInt:size.width],AVVideoWidthKey,  
                             [NSNumbernumberWithInt:size.height],AVVideoHeightKey,  
                             nil nil];  

   AVAssetWriterInput* videoWriterInput =[AVAssetWriterInput  
                                     assetWriterInputWithMediaType:AVMediaTypeVideo  
                                     outputSettings:videoSettings];  


   AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor  
                                            assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput  
                                            sourcePixelBufferAttributes:nil];  
   NSParameterAssert(videoWriterInput);  
   NSParameterAssert([videoWritercanAddInput:videoWriterInput]);  
    [videoWriteraddInput:videoWriterInput];  

   //Start a session: 
    [videoWriterstartWriting];  
   [videoWriterstartSessionAtSourceTime:kCMTimeZero];  

   //Write some samples: 
   CVPixelBufferRef buffer =NULL;  

   int frameCount =0;  

   int imagesCount = [imagesArraycount];  
   float averageTime =duration/imagesCount;  
   int averageFrame =(int)(averageTime * fps);  

   for(UIImage *img in imagesArray)  
    {  
       buffer=[selfpixelBufferFromCGImage:[imgCGImage]size:size];  
       BOOL append_ok =NO;  
       int j =0;  
       while (!append_ok&& j <</b> 30)  
       {  
          if(adaptor.assetWriterInput.readyForMoreMediaData)  
           {  
             printf("appending %d attemp%d\n", frameCount, j);  

             CMTime frameTime =CMTimeMake(frameCount,(int32_t)fps);  
              floatframeSeconds =CMTimeGetSeconds(frameTime);  
             NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);  
              append_ok = [adaptorappendPixelBuffer:bufferwithPresentationTime:frameTime];  

             if(buffer)  
                 [NSThreadsleepForTimeInterval:0.05];  
           }  
          else  
           {  
             printf("adaptor not ready %d,%d\n", frameCount, j);  
             [NSThreadsleepForTimeInterval:0.1];  
           }  
          j++;  
       }  
       if (!append_ok){  
         printf("error appendingimage %d times %d\n", frameCount, j);  
       }  

       frameCount = frameCount + averageFrame;  
    }  

   //Finish the session: 
    [videoWriterInputmarkAsFinished];  
    [videoWriterfinishWriting];  
   NSLog(@"finishWriting");  
}  

推荐一个别人写好的BearsG

你可能感兴趣的:(ios,视频,图片)