FFMpeg版本: ffmpeg-3.4-win32-dev
SDL版本:SDL2-2.0.7
参考文章:
雷神的各种相关文章。。。
相关工程文件 FFMpeg+SDL2 播放本地AVI文件
希望能有一定的参考帮助吧……
// FFmpeg_playTest.cpp : 定义控制台应用程序的入口点。
//
#include "stdafx.h"
#ifdef __cplusplus
extern "C" {
#endif
#include "include/libavcodec/avcodec.h"
#include "include/libswscale/swscale.h"
#include "include/libavutil/imgutils.h"
#include "SDL2-2.0.7/include/SDL.h"
#include
#include
#include
#include
#ifdef __cplusplus
}
#endif
#pragma comment(lib, "lib/avcodec.lib")
#pragma comment(lib, "lib/avutil.lib")
#pragma comment(lib, "lib/swscale.lib")
#pragma comment(lib, "SDL2-2.0.7/lib/x86/SDL2.lib")
#define INBUF_SIZE 4096
int ShowFrame(struct SwsContext *pSwsContext, SDL_Renderer* pRenderer, SDL_Texture* pSDLTexture, AVCodecContext* pCodecContext, AVFrame* pSrcFrame, AVFrame* pDestFrame);
static void decode(AVCodecContext *dec_ctx, AVFrame *frame, AVPacket *pkt, AVFrame *pDestframe )
{
char buf[1024];
int ret;
ret = avcodec_send_packet(dec_ctx, pkt);
if (ret < 0) {
fprintf(stderr, "Error sending a packet for decoding\n");
exit(1);
}
while (ret >= 0)
{
ret = avcodec_receive_frame(dec_ctx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0) {
fprintf(stderr, "Error during decoding\n");
exit(1);
}
printf("saving frame %3d\n", dec_ctx->frame_number);
fflush(stdout);
}
}
int _tmain(int argc, _TCHAR* argv[])
{
FILE* pFile = NULL;
const char* chFileName = NULL;
//chFileName = "./test.avi";
chFileName = "./bigbuckbunny_480x272.h264";
fopen_s(&pFile, chFileName, "rb");
if (!pFile)
{
fprintf(stderr, "Could not open %s\n", chFileName);
exit(1);
}
const AVCodec *pCodec = NULL;
AVCodecParserContext* pParserCtx = NULL;
AVCodecContext* pCodecCtx = NULL;
AVFrame* pFram = NULL;
uint8_t inbuf[INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE] = {0};
uint8_t *pData = NULL;
size_t data_size = 0;
int iRet = 0;
AVPacket *pPkt = NULL;
avcodec_register_all();
pPkt = av_packet_alloc();
if (!pPkt)
{
printf("av_packet_alloc failed.\n");
exit(1);
}
memset(inbuf + INBUF_SIZE, 0, AV_INPUT_BUFFER_PADDING_SIZE);
//find the video decoder
//pCodec = avcodec_find_decoder(AV_CODEC_ID_MPEG4);
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!pCodec)
{
fprintf(stderr, "Codec not found.\n");
exit(1);
}
pParserCtx = av_parser_init(pCodec->id);
if (pParserCtx == NULL)
{
fprintf(stderr, "parser not found.\n");
exit(1);
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (pCodecCtx == NULL)
{
fprintf(stderr, "could not allocate video contex \n");
exit(1);
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
fprintf(stderr, "could not open codec\n");
exit(1);
}
pFram = av_frame_alloc();
if (pFram == NULL)
{
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
//SDL初始化
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
{
printf("Could not initialize SDL --%s\n", SDL_GetError());
exit(1);
}
SDL_Window* pScreen = NULL;
SDL_Renderer* pSdlRenderer = NULL;
SDL_Texture* pSdlTexture = NULL;
SDL_Event sdlEvent;
struct SwsContext *img_convert_ctx = NULL;
AVFrame* pFrameYUV = NULL;
uint8_t* out_buffer = NULL;
while (!feof(pFile))
{
data_size = fread(inbuf, 1, INBUF_SIZE, pFile);
if (!data_size)
{
printf("read file failed.\n");
break;
}
pData = inbuf;
while (data_size > 0)
{
iRet = av_parser_parse2(pParserCtx, pCodecCtx, &pPkt->data, &pPkt->size,
pData, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (iRet < 0)
{
fprintf(stderr, "Error while parsing\n");
exit(1);
}
pData += iRet;
data_size -= iRet;
if (pPkt->size)
{
printf("[packet] size = %6d\n", pPkt->size);
switch (pParserCtx->pict_type)
{
case AV_PICTURE_TYPE_I: printf("Type:I\t"); break;
case AV_PICTURE_TYPE_P: printf("Type:P\t"); break;
case AV_PICTURE_TYPE_B: printf("Type:B\t"); break;
default: printf("Type:Other\t"); break;
}
//--------------------------------------------DECODE------------------------------
int ret;
ret = avcodec_send_packet(pCodecCtx, pPkt);
if (ret < 0) {
fprintf(stderr, "Error sending a packet for decoding\n");
exit(1);
}
while (ret >= 0)
{
ret = avcodec_receive_frame(pCodecCtx, pFram);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
//fprintf(stderr, "(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF\n");
break;
}
else if (ret < 0)
{
fprintf(stderr, "Error during decoding\n");
exit(1);
}
//由于需要先成功解析一帧才能获取到图像的宽高等信息,所以这些初始化的操作只能放到这里来进行
if (pFrameYUV == NULL)
{
pFrameYUV = av_frame_alloc(); //存储转换后的AVFrame
out_buffer = new uint8_t[av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1)];
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
}
if (pScreen == NULL)
{
//SDL init-----------------------------------------
pScreen = SDL_CreateWindow("RTSP Client Demo",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
pCodecCtx->width,
pCodecCtx->height,
SDL_WINDOW_RESIZABLE | SDL_WINDOW_OPENGL);
if (pScreen == NULL)
{
printf("SDL: could not set video mode -exit.\n");
exit(1);
}
pSdlRenderer = SDL_CreateRenderer(pScreen, -1, 0);
pSdlTexture = SDL_CreateTexture(pSdlRenderer,
SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width,
pCodecCtx->height);
}
ShowFrame(img_convert_ctx, pSdlRenderer, pSdlTexture, pCodecCtx, pFram, pFrameYUV); //显示一帧
printf("saving frame %3d, width = %d, height = %d\n", pCodecCtx->frame_number, pCodecCtx->width, pCodecCtx->height);
fflush(stdout);
}
//-------------------------------------END Decode-----------------------------------
}
else
{
//printf("pPkt->size = 0\n");
}
av_packet_unref(pPkt);
SDL_PollEvent(&sdlEvent);
switch (sdlEvent.type)
{
case SDL_QUIT:
SDL_Quit();
exit(0);
break;
default:
break;
}
}
}
decode(pCodecCtx, pFram, NULL, NULL);
if (pFile)
{
fclose(pFile);
pFile = NULL;
}
if (pSdlTexture)
{
SDL_DestroyTexture(pSdlTexture);
pSdlTexture = NULL;
}
if (pSdlRenderer)
{
SDL_DestroyRenderer(pSdlRenderer);
pSdlRenderer = NULL;
}
av_parser_close(pParserCtx);
avcodec_free_context(&pCodecCtx);
av_frame_free(&pFram);
if (out_buffer)
{
delete[] out_buffer;
out_buffer = NULL;
}
if (pFrameYUV)
{
av_frame_free(&pFrameYUV);
}
av_packet_unref(pPkt);
return 0;
}
int ShowFrame(struct SwsContext *pSwsContext, SDL_Renderer* pRenderer, SDL_Texture* pSDLTexture, AVCodecContext* pCodecContext, AVFrame* pSrcFrame, AVFrame* pDestFrame)
{
if ( !pRenderer || !pSDLTexture || !pCodecContext || !pSrcFrame)
{
printf("Show frame failed, param is null.\n");
return -1;
}
//像素格式转换 prame 转换成 pFrame Yuv
pSwsContext = sws_getContext(pCodecContext->width,
pCodecContext->height,
pCodecContext->pix_fmt,
pCodecContext->width,
pCodecContext->height,
AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL,
NULL,
NULL);
sws_scale(pSwsContext, (const uint8_t* const*)pSrcFrame->data, pSrcFrame->linesize, 0, pSrcFrame->height, pDestFrame->data, pDestFrame->linesize);
sws_freeContext(pSwsContext);
//SDL 显示----------------------------
SDL_Rect sdlRect;
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = pCodecContext->width;
sdlRect.h = pCodecContext->height;
SDL_UpdateTexture(pSDLTexture, &sdlRect, pDestFrame->data[0], pDestFrame->linesize[0]);
SDL_RenderClear(pRenderer);
SDL_RenderCopy(pRenderer, pSDLTexture, &sdlRect, &sdlRect);
SDL_RenderPresent(pRenderer);
//延时20ms
SDL_Delay(20);
return 0;
}