《FFmpeg学习(一)》
《FFmpeg学习(二)》
《FFmpeg学习(三)》
FFmpeg的的是一套可以用来记录,转换数字音频,视频,并能将其转化为流的开源计算机程序。采用LGPL或GPL许可证。它提供了录制,转换以及流化音视频的完整解决方案。它包含了非常先进的音频/视频编解码库libavcodec的的,为了保证高可移植性和编解码质量,libavcodec的的里很多代码都是从头开发的。
ffmpeg的的官网:点击打开链接
ffmpeg的的各个版本下载地址:点击打开链接
ffmpeg的的学习资料:
雷博士的博客:点击打开链接
专栏:ffmpeg实战教程
ffmpeg 2.5.6 整理的文档
ffmpeg 3.4.2 整理的文档
ffmpeg 4.0整理的文档
SDL2.0.8 整理文档
知乎上对的ffmpeg的的解读:点击打开链接
下面开始我的ffmpeg开发:VS2013 + ffmpeg-4.0 + SDL2.0.8
解决VS2013模块对于SAFESEH映像是不安全
common.h
#ifndef COMMON_H_
#define COMMON_H_
#include
#ifdef __cplusplus
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "SDL2/SDL.h"
#include "SDL2/SDL_thread.h"
}
#endif
#endif // COMMON_H_
main.cpp
#include
#include "common.h"
#undef main
int main()
{
av_register_all();//ע
unsigned int nVersion = avcodec_version();
return 0;
}
Git的学习请看我这篇:《 Git上传代码到github上 》
完整工程地址:https://gitee.com/harray/MyFFmpegStudy
我的代码存放地址:https://gitee.com/harray/
testyuv分支,专门用来测试yuv的
分支地址:https://gitee.com/harray/MyFFmpegStudy/tree/testyuv
#include
#include
#include
#include "common.h"
#undef main
int main()
{
//读取文件test_yuv420p_320x180.yuv
FILE* fp_yuv = fopen("test_yuv420p_320x180.yuv", "rb");
//写入文件frame.yuv
FILE* fp_frame = fopen("frame_320x180_out.yuv", "wb");
//开辟内存读取test_yuv420p_320x180.yuv文件的第一帧的亮度数据(Y)
char* buffer_y = (char*)malloc(sizeof(char)* 320 * 180);
char* buffer_u = (char*)malloc(sizeof(char)* 320 * 180 / 4);
char* buffer_v = (char*)malloc(sizeof(char)* 320 * 180 / 4);
//读取函数,将test_yuv420p_320x180.yuv的第一帧存入buff指向的内存
for (int i = 0; i < 30; i++)
{
fread(buffer_y, 320 * 180, 1, fp_yuv);
fread(buffer_u, 320 * 180 / 4, 1, fp_yuv);
fread(buffer_v, 320 * 180 / 4, 1, fp_yuv);
//for(int i=0;i<320*180/4;i++){
// buffer_u[i]=128;
// buffer_v[i]=128;
//}
memset(buffer_u, 128, 320 * 180 / 4);
memset(buffer_v, 128, 320 * 180 / 4);
fwrite(buffer_y, 320 * 180, 1, fp_frame);
fwrite(buffer_u, 320 * 180 / 4, 1, fp_frame);
fwrite(buffer_v, 320 * 180 / 4, 1, fp_frame);
}
//fread(buff,320*180,1,fp_yuv);
//将buff指向的内存写入frame.yuv
free(buffer_y);
free(buffer_u);
free(buffer_v);
fclose(fp_yuv);
fclose(fp_frame);
return 0;
}
decoder分支地址:https://gitee.com/harray/MyFFmpegStudy/tree/decoder
#include
#include "common.h"
#define __STDC_CONSTANT_MACROS
#undef main
int main()
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
uint8_t *out_buffer;
AVPacket *packet;
int y_size;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
//输入文件路径
char filepath[] = "Titanic.ts";
int frame_cnt;
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0)
{
printf("Couldn't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL)<0)
{
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; inb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
if (videoindex == -1)
{
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL)<0)
{
printf("Could not open codec.\n");
return -1;
}
/*
* 在此处添加输出视频信息的代码
* 取自于pFormatCtx,使用fprintf()
*/
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//Output Info-----------------------------
printf("--------------- File Information ----------------\n");
av_dump_format(pFormatCtx, 0, filepath, 0);
printf("-------------------------------------------------\n");
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
frame_cnt = 0;
while (av_read_frame(pFormatCtx, packet) >= 0)
{
if (packet->stream_index == videoindex)
{
/*
* 在此处添加输出H264码流的代码
* 取自于packet,使用fwrite()
*/
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0)
{
printf("Decode Error.\n");
return -1;
}
if (got_picture){
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameYUV->data, pFrameYUV->linesize);
printf("Decoded frame index: %d\n", frame_cnt);
/*
* 在此处添加输出YUV的代码
* 取自于pFrameYUV,使用fwrite()
*/
frame_cnt++;
}
}
av_free_packet(packet);
}
sws_freeContext(img_convert_ctx);
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
play_video地址:https://gitee.com/harray/MyFFmpegStudy/tree/play_video
#include
#include "common.h"
const int bpp = 12;
int screen_w = 640, screen_h = 360;
const int pixel_w = 640, pixel_h = 360;
unsigned char buffer[pixel_w*pixel_h*bpp / 8];
//Refresh Event
#define REFRESH_EVENT (SDL_USEREVENT + 1)
//Break
#define BREAK_EVENT (SDL_USEREVENT + 2)
int thread_exit = 0;
int refresh_video(void *opaque)
{
thread_exit = 0;
while (thread_exit == 0)
{
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit = 0;
//Break
SDL_Event event;
event.type = BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}
#undef main
int main()
{
if (SDL_Init(SDL_INIT_VIDEO))
{
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
SDL_Window *screen = SDL_CreateWindow("Simplest Video Play SDL2", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if (!screen)
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
Uint32 pixformat = 0;
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
pixformat = SDL_PIXELFORMAT_IYUV;
SDL_Texture* sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat, SDL_TEXTUREACCESS_STREAMING, pixel_w, pixel_h);
FILE *fp = NULL;
fp = fopen("sintel_640_360.yuv", "rb+");
if (fp == NULL)
{
printf("cannot open this file\n");
return -1;
}
SDL_Rect sdlRect;
SDL_Thread *refresh_thread = SDL_CreateThread(refresh_video, NULL, NULL);
SDL_Event event;
while (1)
{
//Wait
SDL_WaitEvent(&event);
if (event.type == REFRESH_EVENT){
if (fread(buffer, 1, pixel_w*pixel_h*bpp / 8, fp) != pixel_w*pixel_h*bpp / 8)
{
// Loop
fseek(fp, 0, SEEK_SET);
fread(buffer, 1, pixel_w*pixel_h*bpp / 8, fp);
}
SDL_UpdateTexture(sdlTexture, NULL, buffer, pixel_w);
//FIX: If window is resize
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
SDL_RenderClear(sdlRenderer);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
}
else if (event.type == SDL_WINDOWEVENT)
{
//If Resize
SDL_GetWindowSize(screen, &screen_w, &screen_h);
}
else if (event.type == SDL_QUIT)
{
thread_exit = 1;
}
else if (event.type == BREAK_EVENT)
{
break;
}
}
SDL_Quit();
return 0;
}
play_su版本地址:https://gitee.com/harray/MyFFmpegStudy/tree/play_su
#include
#include "common.h"
//Refresh Event
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
int thread_exit = 0;
int sfp_refresh_thread(void *opaque)
{
thread_exit = 0;
while (!thread_exit)
{
SDL_Event event;
event.type = SFM_REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit = 0;
//Break
SDL_Event event;
event.type = SFM_BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}
#undef main
int main(int argc, char* argv[])
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
uint8_t *out_buffer;
AVPacket *packet;
int ret, got_picture;
//------------SDL----------------
int screen_w, screen_h;
SDL_Window *screen;
SDL_Renderer* sdlRenderer;
SDL_Texture* sdlTexture;
SDL_Rect sdlRect;
SDL_Thread *video_tid;
SDL_Event event;
struct SwsContext *img_convert_ctx;
char filepath[] = "˿ʿ.mov";
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0)
{
printf("Couldn't open input stream.\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
printf("Couldn't find stream information.\n");
return -1;
}
videoindex = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
if (videoindex == -1){
printf("Didn't find a video stream.\n");
return -1;
}
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
printf("Codec not found.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("Could not open codec.\n");
return -1;
}
pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();
out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER))
{
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
//SDL 2.0 Support for multiple windows
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_CreateWindow("ffmpeg player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h, SDL_WINDOW_OPENGL);
if (!screen)
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
return -1;
}
sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
//------------SDL End------------
//Event Loop
for (;;)
{
//Wait
SDL_WaitEvent(&event);
if (event.type == SFM_REFRESH_EVENT)
{
//------------------------------
if (av_read_frame(pFormatCtx, packet) >= 0)
{
if (packet->stream_index == videoindex)
{
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0)
{
printf("Decode Error.\n");
return -1;
}
if (got_picture)
{
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
//SDL---------------------------
SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
SDL_RenderClear(sdlRenderer);
//SDL_RenderCopy( sdlRenderer, sdlTexture, &sdlRect, &sdlRect );
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(sdlRenderer);
//SDL End-----------------------
}
}
av_free_packet(packet);
}
else
{
//Exit Thread
thread_exit = 1;
}
}
else if (event.type == SDL_QUIT)
{
thread_exit = 1;
}
else if (event.type == SFM_BREAK_EVENT)
{
break;
}
}
sws_freeContext(img_convert_ctx);
SDL_Quit();
//--------------
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}
以上摘自雷霄骅博士的《基于 FFmpeg + SDL 的视频播放器的制作》
相关的视频下载地址:点击打开链接
密码:n4ji
从下一章开始,开始写Qt + ffmpeg的实例了
主要是用来记录与分享自己的学习的心得
欢迎大家加我的群:460952208