关于ffmpeg封装h264为mp4的问题

          照着ffmpeg里的例子封装h264为mp4的答题思路是对的,但是,还是要加一些东西才行~

下面是之前查东西的一些收获:

    对于h264文件来说,h264有两种封装,
    一种是annexb模式,传统模式,有startcode,SPS和PPS是在ES中
     一种是mp4模式,一般mp4 mkv会有,没有startcode,SPS和PPS以及其它信息被封装在container中,每一个frame前面是这个frame的长度
很多解码器只支持annexb这种模式,因此需要将mp4做转换:
在ffmpeg中用h264_mp4toannexb_filter可以做转换
实现:
注册filter
avcbsfc = av_bitstream_filter_init("h264_mp4toannexb");
转换bitstream
av_bitstream_filter_filter(AVBitStreamFilterContext *bsfc,
AVCodecContext *avctx, const char *args,
uint8_t **poutbuf, int *poutbuf_size,
const uint8_t *buf, int buf_size, int keyframe)



下面是我对视频封装具体代码:




#include "stdafx.h"
#ifndef INT64_C 
#define INT64_C(c) (c ## LL) 
#define UINT64_C(c) (c ## ULL) 
#endif 
#ifdef __MINGW32__
#undef main /* Prevents SDL from overriding main() */
#endif
#ifdef __cplusplus 
extern "C" {
#endif
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h" 
#include "libavutil/imgutils.h"  
#include "libavutil/opt.h"    
#include "libavutil/mathematics.h"   
#include "libavutil/samplefmt.h"
#include "SDL\SDL.h"
#include "SDL\SDL_thread.h"
#ifdef __cplusplus 
}
#endif 
#include
#include
#include
#include
using namespace std;
 #pragma comment(lib,"ws2_32.lib")
int main(int argc, char *argv[]) {
  AVFormatContext *pFormatCtx,*ofmt=NULL;
  pFormatCtx =avformat_alloc_context();
  int             i, videoStream,ret;
  AVCodecContext  *pCodecCtx;
  AVCodec         *pCodec;
  AVFrame         *pFrame,*pFrameYUV,*pFrameRGB; 
  SDL_Overlay     *bmp;
  SDL_Surface     *screen;
  SDL_Rect        rect;
  SDL_Event       event;
  av_register_all();
  avformat_network_init();
 // char *infile="udp://127.0.0.1:6666";
  char *infile="jl.h264";
  char *outname="lk.mp4";


  if(avformat_open_input(&pFormatCtx,infile,NULL,NULL)!=0)


cout<<"open file fails"<      }
  if(avformat_find_stream_info(pFormatCtx,NULL)<0)
{
cout<<"find stream fails"<}
   videoStream=-1;
   av_dump_format(pFormatCtx, 0, infile, false);
   for(i=0;inb_streams;i++)
   {
  if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
  {
  videoStream=i;
  break;
  }
   }
   if(videoStream==-1)
   {
  cout<<"no video stream"<    }
   pCodecCtx=pFormatCtx->streams[videoStream]->codec;
    //av_opt_set( pCodecCtx->priv_data, "preset", "superfast", 0);
// av_opt_set( pCodecCtx->priv_data, "tune", "zerolatency", 0);
   pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
   if(pCodec==NULL)
   {
  cout<<"no find decoder"<  return -1;
   }
   if(avcodec_open2(pCodecCtx,pCodec,NULL)<0)
   {
  cout<< "open decoder fails"<  return -1;
   }
   //为解码帧分配内存
   pFrame=avcodec_alloc_frame();
   pFrameYUV=avcodec_alloc_frame();
   pFrameRGB=avcodec_alloc_frame();
   //根据像素格式和分辨率获得图片所需空间大小
   uint8_t *out_buffer,*rgb_buffer;
   out_buffer=new uint8_t[avpicture_get_size(PIX_FMT_YUV420P,pCodecCtx->width,pCodecCtx->height)];
    rgb_buffer=new uint8_t[avpicture_get_size(PIX_FMT_RGB24,pCodecCtx->width,pCodecCtx->height)];
   avpicture_fill((AVPicture*)pFrameYUV,out_buffer,PIX_FMT_YUV420P,pCodecCtx->width,pCodecCtx->height);
    avpicture_fill((AVPicture*)pFrameRGB,rgb_buffer,PIX_FMT_RGB24,pCodecCtx->width,pCodecCtx->height);
   bool quit=false;
//   SDL_Event event;
   if(SDL_Init(SDL_INIT_EVENTTHREAD))
   {
  cout<<"init SDL fails"<  return -1;
   }
   //建立一个指定宽度高度的窗口
//   SDL_Surface *screen;
   screen=SDL_SetVideoMode(pCodecCtx->width,pCodecCtx->height,0,0);
   if(!screen)
   {
  cout<<"build screen fails"<  return -1;
   }
   //在屏幕上创建一个yuv覆盖。一边我们输入视频
//   SDL_Overlay *bmp;
   bmp=SDL_CreateYUVOverlay(pCodecCtx->width,pCodecCtx->height,SDL_YV12_OVERLAY,screen);
   //定义数据包
   int y_size=pCodecCtx->width*pCodecCtx->height;
   AVPacket* packet=(AVPacket*)av_malloc(sizeof(AVPacket));
   if(av_new_packet(packet,y_size))
   {
  cout<<"av_new_packet fails"<  return -1;
   }
   //根据编码信息设置渲染格式
   struct SwsContext *img_convert_ctx;
   img_convert_ctx=sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,pCodecCtx->width,pCodecCtx->height,PIX_FMT_YUV420P,SWS_BICUBIC,NULL,NULL,NULL);
    struct SwsContext *img_convert_rgb;
   img_convert_rgb=sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,pCodecCtx->width,pCodecCtx->height,PIX_FMT_RGB24,SWS_BICUBIC,NULL,NULL,NULL);
   
   //不停地从码流中提取帧数据,存放到数据包,并且判断用户是否要退出
   int got_picture;
   int kk=0;




     AVStream *in_stream,*out_stream;


   //输出设置
   avformat_alloc_output_context2(&ofmt,NULL,"mp4",outname);
   if(!ofmt)
   {
  cout<<"creat output fails"<  exit(1);
   }


   
  in_stream=pFormatCtx->streams[0];
  
  out_stream=avformat_new_stream(ofmt,in_stream->codec->codec);
  if(!out_stream)
  {
  cout<<"out_stream build fails"<  exit(1);
  }
  int ret1=avcodec_copy_context(out_stream->codec,in_stream->codec);
  if(ret1<0)
  {
  cout<<"copy_context fails"<  }
  out_stream->codec->codec_tag=0;
  out_stream->r_frame_rate.num=25;
  out_stream->r_frame_rate.den=1;
  if(ofmt->oformat->flags & AVFMT_GLOBALHEADER)
 {
out_stream->codec->flags|=CODEC_FLAG_GLOBAL_HEADER;
 }     
      
ret=avio_open(&ofmt->pb,outname,AVIO_FLAG_WRITE);
if(ret<0)
{
cout<<"could not open outfile"<}
       
//av_dump_format(ofmt, 0, outname, false);
int frame_index=0;
//FILE *fp=fopen("df.h264","ab");
ret =avformat_write_header(ofmt,NULL);
if(ret<0)
 {
  cout<<"avformat_write_header fails"< }
int key=0;
AVBitStreamFilterContext* h264bsfc =  av_bitstream_filter_init("h264_mp4toannexb"); 
 while(av_read_frame(pFormatCtx,packet)>=0 && quit==false)
      {
 //in=pFormatCtx->streams[packet->stream_index];
// out=ofmt->streams[packet->stream_index];
// ret=av_interleaved_write_frame(ofmt,packet);


 av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &(packet->data), &(packet->size), packet->data, packet->size, 0);
 if(packet->stream_index==videoStream)
 {
 ret=avcodec_decode_video2(pCodecCtx,pFrame,&got_picture,packet);


 
//  cout<<"receive:"<pts< if(ret<0)
 {
 cout<<"decode fails"< //return -1;
 }
 if((got_picture)&&(ret>0))
 {
/* pFrame->data[0] += pFrame->linesize[0] * (pCodecCtx->height - 1);
                 pFrame->linesize[0] *= -1; 
                 pFrame->data[1] += pFrame->linesize[1] * (pCodecCtx->height / 2 - 1);
                 pFrame->linesize[1] *= -1;
                pFrame->data[2] += pFrame->linesize[2] * (pCodecCtx->height / 2 - 1);
             pFrame->linesize[2] *= -1;*/
 sws_scale(img_convert_ctx,(const uint8_t *const*)pFrame->data,pFrame->linesize,0,pCodecCtx->height,pFrameYUV->data,pFrameYUV->linesize);
 sws_scale(img_convert_rgb,(const uint8_t *const*)pFrame->data,pFrame->linesize,0,pCodecCtx->height,pFrameRGB->data,pFrameRGB->linesize);
 SDL_LockYUVOverlay(bmp);
 bmp->pixels[0]=pFrameYUV->data[0];
 bmp->pixels[2]=pFrameYUV->data[1];
 bmp->pixels[1]=pFrameYUV->data[2];
 bmp->pitches[0]=pFrameYUV->linesize[0];
 bmp->pitches[2]=pFrameYUV->linesize[1];
 bmp->pitches[1]=pFrameYUV->linesize[2];
 SDL_UnlockYUVOverlay(bmp);
 //rect 用于显示SDL_Overlay显示的位置
 rect.x=0;
 rect.y=0;
 rect.w=pCodecCtx->width;
 rect.h=pCodecCtx->height;
 SDL_DisplayYUVOverlay(bmp,&rect);
SDL_Delay(40);//延迟40ms相当于25帧每秒
 while(SDL_PollEvent(&event))
 {
 if(event.type==SDL_QUIT)
 quit=true;
 }
 

//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
packet->pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
packet->dts=packet->pts;
packet->duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
       
/* copy packet */
//转换PTS/DTS(Convert PTS/DTS)  
packet->pts = av_rescale_rnd(packet->pts, out_stream->time_base.den,in_stream->time_base.den,AV_ROUND_NEAR_INF);
packet->dts = av_rescale_rnd(packet->dts, out_stream->time_base.den,in_stream->time_base.den,AV_ROUND_NEAR_INF);
packet->duration = av_rescale_q(packet->duration, in_stream->time_base,out_stream->time_base);
packet->pos = -1;
packet->stream_index=0;
/*
FILE *fp1=fopen("ff.h264","wb");
fwrite(packet->data,1,packet->size,fp1);
fclose(fp1);



FILE *fp2=fopen("ff1.h264","wb");
fwrite(packet->data,1,packet->size,fp2);
fclose(fp2);*/
packet->flags |=AV_PKT_FLAG_KEY;

                 //用了此句就不用bitstream,AVBitStreamFilterContext
//写入(Write)
if (av_interleaved_write_frame(ofmt, packet) < 0)
{
printf( "Error muxing packet\n");
break;
}

printf("Write 1 Packet. size:%5d\tpts:%8d\n",packet->size,packet->pts);


 
}
}
 av_free_packet(packet);


   }
   //fclose(fp);
  av_write_trailer(ofmt);
    SDL_Quit();
sws_freeContext(img_convert_ctx);
delete []out_buffer;
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
av_close_input_file(pFormatCtx);
return 0;
}






























   //***************输出配置start
   
  /*
   avformat_alloc_output_context2(&ofmt,NULL,"h264",outname);
   if(!ofmt)
   {
  cout<<"create output context fails"<  exit(1);
   }
   for(int j=0;jnb_streams;j++)
   { 
  AVStream *in_stream=pFormatCtx->streams[j];
  AVStream *out_stream=avformat_new_stream(ofmt,in_stream->codec->codec);
  if(!out_stream)
  {
  cout<<"out_stream build fails"<   }
  ret=avcodec_copy_context(out_stream->codec,in_stream->codec);
  if(ret<0)
  {
  cout<<"copy_context fails"<  }
  out_stream->codec->codec_tag=0;
  out_stream->r_frame_rate.num=25;
  out_stream->r_frame_rate.den=1;
  if(ofmt->oformat->flags & AVFMT_GLOBALHEADER)
  {
  out_stream->codec->flags|=CODEC_FLAG_GLOBAL_HEADER;
  }     
   }
   ret=avio_open(&ofmt->pb,outname,AVIO_FLAG_WRITE);
   if(ret<0)
   {
  cout<<"could not open outfile"<    }
   //av_dump_format(ofmt, 0, outname, false);
   ret =avformat_write_header(ofmt,NULL);
   if(ret<0)
     {
     cout<<"avformat_write_header fails"<      }
//   AVStream *in,*out;
*/


   //**************输出配置end


你可能感兴趣的:(FFMPEG)