参考http://m.oschina.net/blog/56616调通了ffmpeg例程
gcc test_decode_frames.c -I/usr/local/include -lavcodec -lavformat -lswscale -lavutil
#include <stdlib.h> #include <stdio.h> #include <libavformat/avformat.h> #include <libavcodec/avcodec.h> #include <libswscale/swscale.h> static void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) { FILE *pFile; char szFilename[32]; int y; sprintf(szFilename, "frame%d.ppm", iFrame); pFile = fopen(szFilename, "wb"); if (!pFile) return; fprintf(pFile, "P6\n%d %d\n255\n", width, height); for (y = 0; y < height; y++) fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile); fclose(pFile); } int main(int argc, char* argv[]) { AVFormatContext *pFormatCtx = NULL; int i = 0; int videoStream = 0; AVCodecContext *pCodecCtx = 0; AVCodec *pCodec = NULL; AVFrame *pFrame = 0; AVFrame *pFrameRGB = NULL; AVPacket packet; int gotPic = 0; int bytesUsed = 0; int numBytes = 0; uint8_t *buffer = NULL; // 注册所有 muxer/demuxer, encoder/decoder av_register_all(); // 打开一个文件, 读文件头 if (avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0) return -1; // 获取文件中的流信息 if (avformat_find_stream_info(pFormatCtx, NULL ) < 0) return -1; // 输出文件信息到终端 av_dump_format(pFormatCtx, -1, argv[1], 0); // 找到第一个视频流 videoStream = -1; for (i = 0; i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) return -1; // 打开解码器 pCodecCtx = pFormatCtx->streams[videoStream]->codec; pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) return -1; if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) return -1; // 申请帧缓存 pFrame = avcodec_alloc_frame(); if (pFrame == NULL) return -1; pFrameRGB = avcodec_alloc_frame(); if (pFrameRGB == NULL) return -1; numBytes = avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); buffer = av_malloc(numBytes); avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); // 读取帧数据==>解码==>色彩空间转换==>保存PPM文件 i = 0; while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == videoStream) { gotPic = 0; bytesUsed = 0; bytesUsed = avcodec_decode_video2(pCodecCtx, pFrame, &gotPic, &packet); if (gotPic && (bytesUsed > 0)) { struct SwsContext *img_convert_ctx = NULL; img_convert_ctx = sws_getCachedContext(img_convert_ctx, pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); if (!img_convert_ctx) { fprintf(stderr, "Cannot initialize sws conversion context\n"); exit(1); } sws_scale(img_convert_ctx, (const uint8_t * const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); if (i++ < 50) SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i); } } } av_free(buffer); av_free(pFrameRGB); av_free(pFrame); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); return 0; }