下面完整代码,在vc2005下编译通过。可以看到,程序运行后视频播放出来了,但是由于没有加入播放延迟,视频简直跑疯了,为视频加入延迟将在教程五中实现,目前可以简单地让程序在播放完一帧后,sleep若干秒,改善一下运行状况。
[cpp:collapse:showcolumns] + expand source view plain copy print ?
·········10········20········30········40········50········60········70········80········90········100·······110·······120·······130·······140·······150
-
-
- #include "stdafx.h"
- #include "libavformat/avformat.h"
- #include "libswscale/swscale.h"
-
-
- #include <stdlib.h>
- #include <stdio.h>
- #include <string.h>
- #include <math.h>
- #include <SDL/SDL.h>
-
- #ifdef main
- #undef main
- #endif
-
- #define SDL_AUDIO_BUFFER_SIZE 1024
- static int sws_flags = SWS_BICUBIC;
-
- int main(int argc, char *argv[])
- {
- AVFormatContext *pFormatCtx;
- int i, videoStream(-1);
- AVCodecContext *pCodecCtx;
- AVCodec *pCodec;
- AVFrame *pFrame;
- AVPacket packet;
- int frameFinished;
- float aspect_ratio;
- AVCodecContext *aCodecCtx;
- SDL_Overlay *bmp;
- SDL_Surface *screen;
- SDL_Rect rect;
- SDL_Event event;
- if(argc < 2)
- {
- fprintf(stderr, "Usage: test /n");
- exit(1);
- }
-
- av_register_all();
- pFormatCtx = av_alloc_format_context();
- if (!pFormatCtx) {
- fprintf(stderr, "Memory error/n");
- exit(1);
- }
- if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
- return -1;
- if(av_find_stream_info(pFormatCtx)<0)
- return -1;
-
- dump_format(pFormatCtx, 0, argv[1], 0);
-
-
- for(i=0; i<pFormatCtx->nb_streams; i++)
- {
- if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && videoStream<0)
- {
- videoStream=i;
- }
- }
- if(videoStream==-1)
- return -1;
-
-
-
- pCodecCtx=pFormatCtx->streams[videoStream]->codec;
- pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
- if(pCodec==NULL)
- {
- fprintf(stderr, "Unsupported codec!/n");
- return -1;
- }
-
- if(avcodec_open(pCodecCtx, pCodec)<0)
- return -1;
-
-
- pFrame=avcodec_alloc_frame();
-
- uint8_t *buffer;
- int numBytes;
-
- numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
- pCodecCtx->height);
- buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
-
- if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
- {
- fprintf(stderr, "Could not initialize SDL - %s/n", SDL_GetError());
- exit(1);
- }
-
- #ifndef __DARWIN__
- screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
- #else
- screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
- #endif
- if(!screen)
- {
- fprintf(stderr, "SDL: could not set video mode - exiting/n");
- exit(1);
- }
-
- bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,
- SDL_YV12_OVERLAY, screen);
-
- static struct SwsContext *img_convert_ctx;
- if (img_convert_ctx == NULL)
- {
- img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
- pCodecCtx->pix_fmt,
- pCodecCtx->width, pCodecCtx->height,
- PIX_FMT_YUV420P,
- sws_flags, NULL, NULL, NULL);
- if (img_convert_ctx == NULL)
- {
- fprintf(stderr, "Cannot initialize the conversion context/n");
- exit(1);
- }
- }
- i=0;
- while(av_read_frame(pFormatCtx, &packet)>=0)
- {
-
- if(packet.stream_index==videoStream)
- {
-
- avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
- packet.data, packet.size);
-
- if(frameFinished)
- {
-
-
-
-
-
-
- SDL_LockYUVOverlay(bmp);
- AVPicture pict;
- pict.data[0] = bmp->pixels[0];
- pict.data[1] = bmp->pixels[2];
- pict.data[2] = bmp->pixels[1];
-
- pict.linesize[0] = bmp->pitches[0];
- pict.linesize[1] = bmp->pitches[2];
- pict.linesize[2] = bmp->pitches[1];
-
-
-
-
-
- sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize,
- 0, pCodecCtx->height, pict.data, pict.linesize);
- SDL_UnlockYUVOverlay(bmp);
- rect.x = 0;
- rect.y = 0;
- rect.w = pCodecCtx->width;
- rect.h = pCodecCtx->height;
- SDL_DisplayYUVOverlay(bmp, &rect);
-
- }
- }
-
-
- av_free_packet(&packet);
-
- SDL_PollEvent(&event);
- switch(event.type)
- {
- case SDL_QUIT:
- SDL_Quit();
- exit(0);
- break;
- default: break;
- }
- };
-
- av_free(buffer);
-
-
- av_free(pFrame);
-
- avcodec_close(pCodecCtx);
-
- av_close_input_file(pFormatCtx);
- return 0;
- }
// ffmpegExe.cpp: 主项目文件。 #include "stdafx.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" //#include <windows.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> #include <SDL/SDL.h> #ifdef main #undef main #endif #define SDL_AUDIO_BUFFER_SIZE 1024 static int sws_flags = SWS_BICUBIC; int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx; int i, videoStream(-1); AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame; AVPacket packet; int frameFinished; float aspect_ratio; AVCodecContext *aCodecCtx; SDL_Overlay *bmp; SDL_Surface *screen; SDL_Rect rect; SDL_Event event; if(argc < 2) { fprintf(stderr, "Usage: test /n"); exit(1); } av_register_all(); pFormatCtx = av_alloc_format_context(); if (!pFormatCtx) { fprintf(stderr, "Memory error/n"); exit(1); } if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0) return -1; // Couldn't open file if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information // Dump information about file onto standard error dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && videoStream<0) { videoStream=i; } } if(videoStream==-1) return -1; // Didn't find a video stream // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { fprintf(stderr, "Unsupported codec!/n"); return -1; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) return -1; // Could not open codec // Allocate video frame pFrame=avcodec_alloc_frame(); uint8_t *buffer; int numBytes; // Determine required buffer size and allocate buffer numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { fprintf(stderr, "Could not initialize SDL - %s/n", SDL_GetError()); exit(1); } #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if(!screen) { fprintf(stderr, "SDL: could not set video mode - exiting/n"); exit(1); } bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); static struct SwsContext *img_convert_ctx; if (img_convert_ctx == NULL) { img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL); if (img_convert_ctx == NULL) { fprintf(stderr, "Cannot initialize the conversion context/n"); exit(1); } } i=0; while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); // Did we get a video frame? if(frameFinished) { // Convert the image from its native format to RGB /*sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);*/ // Save the frame to disk /*if(++i<=5) SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);*/ SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses /*img_convert(&pict, PIX_FMT_YUV420P, (AVPicture *)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);*/ sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); rect.x = 0; rect.y = 0; rect.w = pCodecCtx->width; rect.h = pCodecCtx->height; SDL_DisplayYUVOverlay(bmp, &rect); //Sleep(60); } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); switch(event.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; default: break; } }; // Free the RGB image av_free(buffer); //av_free(pFrameRGB); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file av_close_input_file(pFormatCtx); return 0; }
此篇文章来此【 http://blog.csdn.net/mu399/article/details/5814859】