在 FFmpeg-full-SDK-3.2下 Tutorial01 修改如下

 


int main(int argc, char *argv[]) {
  AVFormatContext *pFormatCtx;
  int             i, videoStream;
  AVCodecContext  *pCodecCtx;
  AVCodec         *pCodec;
  AVFrame         *pFrame;
  AVFrame         *pFrameRGB;
  AVPacket        packet;
  int             frameFinished;
  int             numBytes;
  uint8_t         *buffer;
  struct SwsContext      *ctx;
 
  argc = 2;
  argv[1] = "f:\\output.avi";

  if(argc < 2) {
    printf("Please provide a movie file\n");
    return -1;
  }
  // Register all formats and codecs
  av_register_all();
 
  // Open video file
  if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
    return -1; // Couldn't open file
 
  // Retrieve stream information
  if(av_find_stream_info(pFormatCtx)<0)
    return -1; // Couldn't find stream information
 
  // Dump information about file onto standard error
  dump_format(pFormatCtx, 0, argv[1], 0);
 
  // Find the first video stream
  videoStream=-1;
  for(i=0; i<pFormatCtx->nb_streams; i++)
    if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
      videoStream=i;
      break;
    }
  if(videoStream==-1)
    return -1; // Didn't find a video stream
 
  // Get a pointer to the codec context for the video stream
  pCodecCtx=pFormatCtx->streams[videoStream]->codec;
 
  // Find the decoder for the video stream
  pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
  if(pCodec==NULL) {
    fprintf(stderr, "Unsupported codec!\n");
    return -1; // Codec not found
  }
  // Open codec
  if(avcodec_open(pCodecCtx, pCodec)<0)
    return -1; // Could not open codec
 
  // Allocate video frame
  pFrame=avcodec_alloc_frame();
 
  // Allocate an AVFrame structure
  pFrameRGB=avcodec_alloc_frame();
  if(pFrameRGB==NULL)
    return -1;
 
  // Determine required buffer size and allocate buffer
  numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
         pCodecCtx->height);
  buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
 
  // Assign appropriate parts of buffer to image planes in pFrameRGB
  // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
  // of AVPicture
  avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
   pCodecCtx->width, pCodecCtx->height);
  //注意,这里是PIX_FMT_RGB24,它决定了图片的格式
  ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
   pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
   PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
 
  // Read frames and save first five frames to disk
  i=0;
  while(av_read_frame(pFormatCtx, &packet)>=0) {
    // Is this a packet from the video stream?
    if(packet.stream_index==videoStream) {
      // Decode video frame
      avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
      packet.data, packet.size);
    
      // Did we get a video frame?
      if(frameFinished) {
    // Convert the image from its native format to RGB
    // img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24,
    //                   (AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
    //                  pCodecCtx->height);
  
    sws_scale(ctx, pFrame->data, pFrame->linesize,
     0, pCodecCtx->height,(AVPicture *)pFrameRGB->data,
     (AVPicture *)pFrameRGB->linesize);
//    sws_scale(ctx, pFrame->data, pFrame->linesize,
//     0, pCodecCtx->height,pict.data, pict.linesize);
  
    // Save the frame to disk
    if(++i<=5)
     SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height,
      i);
      }
    }
  
    // Free the packet that was allocated by av_read_frame
    av_free_packet(&packet);
  }
 
  // Free the RGB image
  av_free(buffer);
  av_free(pFrameRGB);
 
  // Free the YUV frame
  av_free(pFrame);
 
  // Close the codec
  avcodec_close(pCodecCtx);
 
  // Close the video file
  av_close_input_file(pFormatCtx);
 
  return 0;
}

 

注:

在ffmpeg源代码下有一些example.c例子我们可以参考:

编码参照output_example.c,解码参照apiexample.c

你可能感兴趣的:(Stream,video,File,null,buffer,Codec)