演示环境:CentOS 7
FFmpeg的H264编码,需要x264库支持
[root@localhost ~]# wget ftp://ftp.videolan.org/pub/x264/snapshots/last_x264.tar.bz2 [root@localhost ~]# tar xf last_x264.tar.bz2 [root@localhost ~]# cd x264-snapshot-20171020-2245/ [root@localhost x264-snapshot-20171020-2245]# ./configure --prefix=/home/x264_dev --enable-shared --enable-static --disable-asm [root@localhost x264-snapshot-20171020-2245]# make && make install [root@localhost x264-snapshot-20171020-2245]# echo '/home/x264_dev/lib/' >> /etc/ld.so.conf && ldconfig -v #添加x264库到系统路路径
编译安装FFmpeg:
[root@localhost ~]# wget http://ffmpeg.org/releases/ffmpeg-3.4.tar.bz2 [root@localhost ~]# tar xf ffmpeg-3.4.tar.bz2 [root@localhost ~]# cd ffmpeg-3.4 [root@localhost ffmpeg-3.4]# export PKG_CONFIG_PATH=/home/x264_dev/lib/pkgconfig/:$PKG_CONFIG_PATH #根据PKG_CONFIG_PATH的值寻找x264库路径 [root@localhost ffmpeg-3.4]# ./configure \ --prefix=/home/ffmpeg_dev \ --enable-shared \ --enable-static \ --disable-optimizations \ --disable-x86asm \ --enable-libx264 \ --enable-gpl \ --disable-cuvid \ --disable-cuda [root@localhost ffmpeg-3.4]# make && make install [root@localhost ffmpeg-3.4]# echo '/home/ffmpeg_dev/lib/' >> /etc/ld.so.conf && ldconfig -v #添加FFmpeg的库到系统路路径 #说明:若需使用x264需要 安装x264库:--enable-libx264 --enable-gpl # 关闭GPU视频硬件加速的库: --disable-cuda --disable-cuvid # 生成静态库与动态库 --enable-shared --enable-static
下载测试视频,截取一段转为yuv格式(从第5秒,截取20秒)
[root@localhost ~]# cd /home/ [root@localhost home]# wget http://sh.yinyuetai.com/uploads/videos/common/0E3E014EBF3448D901AF3519C4A1D4E0.mp4 [root@localhost home]# /home/ffmpeg_dev/bin/ffmpeg -ss 5 -t 20 -i 0E3E014EBF3448D901AF3519C4A1D4E0.mp4 -s 1280x720 -pix_fmt yuv420p 1280x720_yuv420p.yuv [root@localhost home]# /home/ffmpeg_dev/bin/ffmpeg -ss 5 -t 20 -i 0E3E014EBF3448D901AF3519C4A1D4E0.mp4 -s 1280x720 -pix_fmt yuv422p 1280x720_yuv422p.yuv
FFmpeg 常见的结构体
YUV420转h264源程序:
[root@localhost ffmpeg_dev]# cat main.c
#include
#include
#include "libavutil/imgutils.h"
#include "libavutil/samplefmt.h"
#include "libavutil/opt.h"
#include "libavformat/avformat.h"
const char *inputFileName = NULL;
const char *outputFileName = NULL;
int frameWidth = 0;
int frameHeight = 0;
int bitRate = 0;
int frameTotal = 0;
FILE *pFileInput = NULL;
FILE *pFileOutput = NULL;
AVCodec *codec = NULL;
AVCodecContext *codecCtx = NULL;
AVFrame *frame = NULL;
AVPacket pkt;
static int parse_input_paramaters(int argc, char **argv)
{
inputFileName = argv[1];
outputFileName = argv[2];
pFileInput = fopen(inputFileName, "rb");
if(NULL == pFileInput)
{
printf("open file ERR: [%s] \n", inputFileName);
return -1;
}
pFileOutput = fopen(outputFileName, "wb+");
if(NULL == pFileInput)
{
printf("open file ERR: [%s] \n", outputFileName);
return -1;
}
frameWidth = atoi(argv[3]);
frameHeight = atoi(argv[4]);
bitRate = atoi(argv[5]);
frameTotal = atoi(argv[6]);
return 0;
}
static int read_yuv_data(int color)
{
//color = 0 -> Y
//color = 1 -> U
//color = 2 -> V
int color_height = color == 0 ? frameHeight : frameHeight / 2;
int color_width = color == 0 ? frameWidth : frameWidth / 2;
int color_size = color_height * color_width;
int color_stride = frame->linesize[color];
if(color_width == color_stride)
{
//printf("color_width == color_stride,color=[%d],color_stride=[%d] \n", color, color_stride);
fread(frame->data[color], color_size, 1, pFileInput);
}
//else
//{
// printf("color_width != color_stride,color=[%d] \n", color);
// int i;
// for(i = 0; i < color_height; i++)
// {
// fread(frame->data[color] + i * color_stride, color_width, 1, pFileInput);
// }
//}
return color_size;
}
int main(int argc, char **argv)
{
if(parse_input_paramaters(argc, argv) == 0)
{
printf("inputFile:%s \n", inputFileName);
printf("outputFile:%s \n", outputFileName);
printf("Frame resolution::[%d*%d] \n", frameWidth, frameHeight);
printf("freamToEncode=%d\n", frameTotal);
printf("rate:%d \n", bitRate);
}
else
{
printf("init ERROR\n");
return -1;
}
avcodec_register_all();//注册编解码组件
codec = avcodec_find_encoder(AV_CODEC_ID_H264);//查找编×××
if(NULL == codec)
{
printf("find AV_CODEC_ID_H264 fail! \n");
return -1;
}
//分配AVCodecContex实例
codecCtx = avcodec_alloc_context3(codec);
if(NULL == codecCtx)
{
printf("avcodec_alloc_context3 ERROR\n");
return -1;
}
//设置编×××的参数
codecCtx->width = frameWidth;//帧高
codecCtx->height = frameHeight;
codecCtx->bit_rate = bitRate;//比特率
AVRational r = {1, 25};//设置帧率
codecCtx->time_base = r;//设置帧率
codecCtx->gop_size =12;
codecCtx->max_b_frames = 1;
codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(codecCtx->priv_data, "preset", "slow", 0);
//打开编码器
if(avcodec_open2(codecCtx, codec, NULL) < 0)
{
printf("avcodec_open2 ERR\n");
}
frame = av_frame_alloc();
if(NULL == frame)
{
printf("av_frame_alloc err \n");
return -1;
}
//分配AVframe及像素存储空间
frame->width = codecCtx->width;
frame->height = codecCtx->height;
frame->format = codecCtx->pix_fmt;
if(av_p_w_picpath_alloc(frame->data, frame->linesize, frame->width, frame->height, frame->format, 32) < 0)//32 表示对齐
{
printf("ERROR av_p_w_picpath_alloc\n");
}
int i;
for(i = 0; i < frameTotal; i++)
{
av_init_packet(&pkt);
pkt.data = NULL;
pkt.size = 0;
read_yuv_data(0);
read_yuv_data(1);
read_yuv_data(2);
frame->pts = i;
int got_packet;
//int avcodec_encode_video2(AVCodecContext *avctx, AVPacket *avpkt, const AVFrame *frame, int *got_packet_ptr);
if(avcodec_encode_video2(codecCtx, &pkt, (const AVFrame *)frame, &got_packet) < 0)
{
printf("avcodec_encodec_video2 ERR \n");
return -1;
}
if(got_packet)
{
printf("Write packet of frame [%d], size=[%d] \n", i, pkt.size);
//size_t fwrite(const void *ptr, size_t size, size_t nmemb, FILE *stream);
fwrite(pkt.data, 1, pkt.size, pFileOutput);
av_packet_unref(&pkt);
}
}
int got_packet = 1;
while(got_packet)
{
if(avcodec_encode_video2(codecCtx, &pkt, NULL, &got_packet) < 0)
{
printf("avcodec_encodec_video2 ERR \n");
return -1;
}
if(got_packet)
{
printf("Write cache packet of frame [%d], size=[%d] \n", i, pkt.size);
fwrite(pkt.data, 1, pkt.size, pFileOutput);
av_packet_unref(&pkt);
}
}
fclose(pFileInput);
fclose(pFileOutput);
avcodec_close(codecCtx);
av_free(codecCtx);
av_freep(&frame->data[0]);
av_frame_free(&frame);
return 0;
}
[root@localhost ffmpeg_dev]#
makefile文件
[root@localhost ffmpeg_dev]# cat makefile FLAGS = -Wall -g INCLUDEPATH = -I /home/ffmpeg_dev/include/ LIBPATH = -L /home/ffmpeg_dev/lib/ LIBS= -l avcodec \ -l pthread \ -l avutil \ -l m \ -l dl \ -l swresample \ exe=yuv2h264 $(exe): gcc main.c ${FLAGS} ${INCLUDEPATH} ${LIBPATH} ${LIBS} -o $@ clean: rm -rf ${exe} [root@localhost ffmpeg_dev]#
编译并运行,将YUV420文件转为H264
[root@localhost ffmpeg_dev]# cat auto.sh make clean make #提示 : Potplayer可以直接播放H264文件 #程序名: YUV420P文件名, H264输出文件名, YUV的宽与高, 压缩比特率, 需要编码多少帧(不要大于YUV文件的帧数) ./yuv2h264 1280x720_420P.yuv 1280x720.h264 1280 720 819200 250 [root@localhost ffmpeg_dev]#
播放器打开H264文件: