ffmpeg解码后存YUV数据

extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
};


int main(int argc, char* argv[])
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame,*pFrameYUV;
uint8_t *out_buffer;
AVPacket *packet;
int y_size;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
//输入文件路径
char filepath[]="Titanic.ts";


int frame_cnt;
av_log_set_level(AV_LOG_DEBUG);
av_register_all();
pFormatCtx = avformat_alloc_context();


if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){
printf("Couldn't open input stream.\n");
av_log(NULL, AV_LOG_ERROR, "打开输入文件失败了!\n");
return -1;
}
if(avformat_find_stream_info(pFormatCtx,NULL)<0){
printf("Couldn't find stream information.\n");
return -1;
}
videoindex=-1;
for(i=0; inb_streams; i++) 
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
videoindex=i;
break;
}
if(videoindex==-1){
printf("Didn't find a video stream.\n");
return -1;
}


pCodecCtx=pFormatCtx->streams[videoindex]->codec;
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL){
printf("Codec not found.\n");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
printf("Could not open codec.\n");
return -1;
}
pFrame=av_frame_alloc();
pFrameYUV=av_frame_alloc();
out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
packet=(AVPacket *)av_malloc(sizeof(AVPacket));
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, 
pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 


frame_cnt=0;


FILE *pf = fopen("out.yuv", "ab");


while(av_read_frame(pFormatCtx, packet)>=0){
if(packet->stream_index==videoindex){
/*
* 在此处添加输出H264码流的代码
* 取自于packet,使用fwrite()
*/
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Error.\n");
return -1;
}
if(got_picture){
//存取YUV数据
if (pf)
{
int nWidth = pFrame->width;
int nHeight = pFrame->height;

                                        //如果uv值存储顺序反了就会导致YUV数据存在色差
for (int i = 0; i< pCodecCtx->height; i++)
{
fwrite(pFrame->data[0] + i* pFrame->linesize[0], 1, nWidth, pf);
}


for (i = 0; i< pCodecCtx->height/2; i++)
{
fwrite(pFrame->data[1] + i* pFrame->linesize[1], 1, nWidth/2, pf);
}


for (i = 0; i< pCodecCtx->height/2; i++)
{
fwrite(pFrame->data[2] + i* pFrame->linesize[2], 1, nWidth/2, pf);
}

}


sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
pFrameYUV->data, pFrameYUV->linesize);
printf("Decoded frame index: %d\n",frame_cnt);


frame_cnt++;


}
}
av_free_packet(packet);
}
    if (pf)
    {
fclose(pf);
    }


sws_freeContext(img_convert_ctx);
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);


return 0;
}

你可能感兴趣的:(ffmpeg)