最近做了个视频项目,分享下源码。
掌握一个主线:视频原始数据-->FFMPeg -->YUV-->RGB-->UIImage
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// .h文件
#import <AVFoundation/AVFoundation.h>
#include "libavformat/avformat.h"
#include <libavutil/opt.h>
#include <libavutil/audioconvert.h>
#include <libavutil/common.h>
#include <libavutil/imgutils.h>
#include <libavutil/mathematics.h>
#include <libavutil/samplefmt.h>
#include <libswscale/swscale.h>
@interface VideioDecoder : NSObject {
AVCodecContext *codetext;
struct SwsContext * scxt420;
AVFrame *frame420;
AVFrame *frameRgba;
NSRecursiveLock* _fLock;
int _nOldWidth;
int _nOldHeight;
}
+ (VideioFrameDecoder *)shareInstance;
- (void)initFFMPEG;
- (void)didFinish:(NSData *)data Len:(int)nLen;
@end
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// .m文件
// 以单例形式处理
static VideioDecoder *instance = nil;
+ (VideioDecoder *)shareInstance {
@synchronized(self) {
if (nil == instance) {
instance = [[self alloc] init];
}
}
return instance;
}
- (id)init {
self = [super init];
if (self) {
_fLock = [NSRecursiveLock new];
}
return self;
}
- (void)dealloc {
[_fLock release];
[self releaseFFMPEG];
[super dealloc];
}
- (void)initFFMPEG {
// Register all formats and codec
av_register_all();
avcodec_register_all();
AVCodec *pCodec = avcodec_find_decoder(CODEC_ID_H264);
if(pCodec == NULL) {
av_log(NULL, AV_LOG_ERROR, "Unsupported codec!\n");
goto initError;
}
codetext = avcodec_alloc_context3(pCodec);
if(!codetext) {
NSLog(@"avcodec_alloc_context is false");
return;
}
codetext->bit_rate = 20;
codetext->codec_id = AV_CODEC_ID_H264;
codetext->width = 320
codetext->height = 240;
_nOldWidth = codetext->width;
_nOldHeight = codetext->height;
AVRational rate;
rate.num = 1;
rate.den = 25;
codetext->time_base= rate;
// codetext->gop_size = 1;
// codetext->max_b_frames=1;
codetext->pix_fmt = PIX_FMT_YUV420P;
//codetext->frame_number = 1;
if(avcodec_open2(codetext,pCodec,NULL)<0)
{
NSLog(@"不能打开编码库");
return;
}
frame420 = avcodec_alloc_frame();
frameRgba = avcodec_alloc_frame();
NSLog(@"init success");
return;
initError:
NSLog(@"init failed");
return ;
#endif
}
-(void)releaseFFMPEG {
// Free scaler
if(scxt420){
sws_freeContext(scxt420);
scxt420 = nil;
}
if(frame420) {
avcodec_free_frame(&frame420);
frame420 = nil;
}
if(frameRgba) {
avcodec_free_frame(&frameRgba);
frameRgba = nil;
}
// Close the codec
if (codetext) {
avcodec_close(codetext);
av_free(codetext);
codetext = nil;
}
}
-(UIImage *)dataPacket:( AVPacket*)pack{
if(pack->data == NULL)
return nil;
[_fLock lock];
int outbuf_size = (codetext->width*codetext->height * 3) / 2;
UInt8 *yuv_buff = (unsigned char*)av_malloc(outbuf_size);
avpicture_fill((AVPicture *)frame420, yuv_buff, PIX_FMT_YUV420P,
codetext->width, codetext->height);
int got_picture_ptr=0;
int nImageSize;
nImageSize = avcodec_decode_video2(codetext,frame420,&got_picture_ptr,pack);
if((nImageSize>0) && 1 == got_picture_ptr) {
int outRgbSize = codetext->width*codetext->height * 4;
UInt8 * rgbout = (unsigned char*)av_malloc(outRgbSize);
avpicture_fill((AVPicture*)frameRgba, (uint8_t*)rgbout, PIX_FMT_RGBA, codetext->width, codetext->height);
struct SwsContext *scxt =sws_getContext(codetext->width,codetext->height,PIX_FMT_YUV420P,codetext->width,codetext->height,PIX_FMT_RGBA,SWS_POINT,NULL,NULL,NULL);
sws_scale(scxt,(const uint8_t**)frame420->data,frame420->linesize,0,codetext->height,frameRgba->data,frameRgba->linesize);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate( frameRgba->data[0] , codetext->width, codetext->height, 8, frameRgba->linesize[0] , colorSpace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
free(yuv_buff);
free(rgbout);
[_fLock unlock];
if(scxt) {
sws_freeContext(scxt);
scxt = nil;
}
return image;
}
else {
NSLog(@"dataPacket failed.........");
}
[_fLock unlock];
return nil;
}
- (void)didFinish:(NSData *)data Len:(int)nLen {
AVPacket pack;
av_init_packet(&pack);
pack.data = (uint8_t *)[data bytes];
pack.size = nLen;
UIImage *image = [self dataPacket:&pack];
if (nil != image) {
//图片处理
}
av_free_packet(&pack);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
调用部分
初始化部分 (只调用一次的地方)
[VideioDecode shareInstance]initFFPEG];
接收数据的地方
- (void)recvdata {
......
[[VideioDecoder shareInstance] didFinish:data Len:len];
.......
}
祝你们好运。