一 ffmpeg安装:
参考链接 https://blog.csdn.net/danfengw/article/details/62887997
注意:ffmpeg没有x264编码(带有x264解码)需要自己安装,上面链接提供安装的指令;
都安装完成后需要添加环境变量
sudo vim ~/.bashrc
在末尾添加
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/wping/ffmpeg_build/lib/
export PKG_CONFIG_PATH=/home/wping/ffmpeg_build/lib/pkgconfig/
二 正文
CMakeLists.txt文件
这里有个坑刚开始怎么也编译不了最后用pkg-config编译
在(ffmpeg_build/lib)中有个pkgconfig文件
CMAKE_MINIMUM_REQUIRED(VERSION 3.0)
project(demo)
set(CMAKE_C_COMPILER "g++")
add_definitions(`pkg-config --cflags --libs x264 libswscale libavformat libavutil libavcodec`)
include(FindPkgConfig)
pkg_check_modules(AVCODEC REQUIRED libavcodec)
pkg_check_modules(AVFORMAT REQUIRED libavformat)
pkg_check_modules(AVSCALE REQUIRED libswscale)
pkg_check_modules(AVDEV REQUIRED libavdevice)
pkg_check_modules(AVUTIL REQUIRED libavutil)
pkg_check_modules(AVX REQUIRED x264)
pkg_check_modules(AVPOST REQUIRED libpostproc)
set(INC_DIR /home/wping/ffmpeg_build/include/) //在这里注意路径问题
set(LINK_DIR /home/wping/ffmpeg_build/lib/pkgconfig)
include_directories(${INC_DIR} ./include)
link_directories(${AVX_LIBRARY_DIRS} ${AVSCALE_LIBRARY_DIRS} ${AVFORMAT_LIBRARY_DIRS} ${AVUTIL_LIBRARY_DIRS} ${AVCODEC_LIBRARY_DIRS})
aux_source_directory(src SOURCE_FILES)
set(SOURCE ${SOURCE_FILES})
add_executable(demo ${SOURCE})
target_link_libraries(demo ${AVX_LDFLAGS} ${AVSCALE_LDFLAGS} ${AVFORMAT_LDFLAGS} ${AVUTIL_LDFLAGS} ${AVCODEC_LDFLAGS} )
#include
#include"rtspSource.h"
int main(){
char fileName[]= "rtsp://184.72.239.149/vod/mp4://BigBuckBunny_175k.mov";//自己可以选择源流
rtspSource(fileName);
}
3.rtspSource.cpp
void rtspSource(char* fileName){
FILE *fpSaveH264;
if((fpSaveH264= fopen("saveH264.h264", "ab"))== NULL) return;
FILE *fpSaveYUV;
if(( fpSaveYUV= fopen("saveYUV.yuv", "ab")) == NULL) return;
ffmpegCommonSet *decodePro= new ffmpegCommonSet();
ffmpegCommonSet *encodePro= new ffmpegCommonSet();
h264decodeInit(decodePro, fileName);
h264encodeInit(encodePro, decodePro->pCodecCtx, decodePro->pCodecCtx->width, decodePro->pCodecCtx->height);
for(; ;){
av_init_packet(&decodePro->packet);
if(av_read_frame(decodePro->pFormatCtx, &decodePro->packet)>= 0){
if(decodePro->packet.stream_index== decodePro->videoIndex){
avcodec_decode_video2(decodePro->pCodecCtx, decodePro->pFrame, & decodePro->gotPicture, &decodePro->packet); //可以在这里将packet的内容保存下来为h264格式
if(decodePro->gotPicture){
sws_scale(decodePro->img_convert_ctx, (const uint8_t * const *)decodePro->pFrame->data,\
decodePro->pFrame->linesize, 0, decodePro->pCodecCtx->height,\
decodePro->pFrameYUV->data, decodePro->pFrameYUV->linesize);
uint8_t *bufferYUV=(uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P,\
decodePro->pCodecCtx->width, decodePro->pCodecCtx->height));
singleFrameSave(decodePro->pFrame, bufferYUV, decodePro->pCodecCtx->width, decodePro->pCodecCtx->height);
fwrite(bufferYUV, 1, decodePro->pCodecCtx->height* decodePro->pCodecCtx->width *3/2, fpSaveYUV);//保存单帧的YUV
avpicture_fill((AVPicture *)encodePro->pFrame, bufferYUV, AV_PIX_FMT_YUV420P, decodePro->pCodecCtx->width, decodePro->pCodecCtx->height);
encodePro->pFrame->pts= encodePro->frameCount;
av_init_packet(&encodePro->packet);
YUVSaveToH264(encodePro->pFrame, bufferYUV, decodePro->pCodecCtx->width, decodePro->pCodecCtx->height);
int ret= avcodec_encode_video2(encodePro->pCodecCtx, &encodePro->packet, encodePro->pFrame, &encodePro->gotPicture);
if(ret< 0){
printf("fail to encode \n");
return;
}
if(encodePro->gotPicture){
fwrite(encodePro->packet.data, 1, encodePro->packet.size, fpSaveH264);
av_packet_unref(&encodePro->packet);
}
av_free(bufferYUV);
av_free_packet(&encodePro->packet);
}
}
av_free_packet(&decodePro->packet);
}
else break;
}
for(encodePro->gotPicture=1; encodePro->gotPicture; ){
fflush(stdout);
encodePro->pFrame->pts=encodePro->frameCount;
int ret= avcodec_encode_video2(encodePro->pCodecCtx, &encodePro->packet, NULL, &encodePro->gotPicture);
if(ret< 0){printf("fail2 to encode \n"); break;}
if(encodePro->gotPicture){
fwrite(encodePro->packet.data, 1, encodePro->packet.size, fpSaveH264);
av_packet_unref(&encodePro->packet);
}
}
av_free(decodePro->outbuffer);
delete encodePro;
delete decodePro;
}
#pragma once
#include"h264decodeInit.h"
void h264decodeInit(ffmpegCommonSet *decodePro, char *fileName){
if(avformat_open_input(&decodePro->pFormatCtx, fileName, NULL, NULL )!= 0){
printf("can not open stream info \n");
return;
}
if(avformat_find_stream_info(decodePro->pFormatCtx, NULL)< 0){
printf("can not find stream info \n");
return;
}
for(int i= 0; i< decodePro->pFormatCtx->nb_streams; ++i){
if(decodePro->pFormatCtx->streams[i]->codec->codec_type== AVMEDIA_TYPE_VIDEO){
decodePro->videoIndex= i;
break;
}
}
if(decodePro->videoIndex == -1){
printf("do not find a video stream \n");
return;
}
decodePro->pCodecCtx= decodePro->pFormatCtx->streams[decodePro->videoIndex]->codec;
decodePro->pCodec= avcodec_find_decoder(decodePro->pCodecCtx->codec_id);
if(!decodePro->pCodec){
printf("can not find codec \n");
return;
}
if(avcodec_open2(decodePro->pCodecCtx, decodePro->pCodec, NULL), 0){
printf("can not open codec \n");
return;
}
decodePro->outbuffer= (uint8_t *)(av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, \
decodePro->pCodecCtx->width, decodePro->pCodecCtx->height)));
avpicture_fill((AVPicture*) decodePro->pFrameYUV, decodePro->outbuffer, AV_PIX_FMT_YUV420P,\
decodePro->pCodecCtx->width, decodePro->pCodecCtx->height);
decodePro->img_convert_ctx= sws_getContext(decodePro->pCodecCtx->width, decodePro->pCodecCtx->height,\
decodePro->pCodecCtx->pix_fmt, decodePro->pCodecCtx->width, decodePro->pCodecCtx->height,\
AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
printf("---------------------------file info--------------------------- \n");
av_dump_format(decodePro->pFormatCtx, 0, fileName, 0);
printf("--------------------------------------------------------------- \n");
}
#include"h264encodeInit.h"
#include
void h264encodeInit(ffmpegCommonSet *encodePro, AVCodecContext *pCodecCtxOri, int width, int height){
encodePro->pFrame->width= width;
encodePro->pFrame->height= height;
encodePro->pFrame->format= AV_PIX_FMT_YUV420P;
encodePro->pCodec= avcodec_find_encoder(AV_CODEC_ID_H264);
if(!encodePro->pCodec){
printf("can not find encoder \n");
return;
}
encodePro->pCodecCtx= avcodec_alloc_context3(encodePro->pCodec);
if(!encodePro->pCodec){
printf("can not alloc video context \n");
return;
}
encodePro->pCodecCtx->bit_rate= pCodecCtxOri->bit_rate;
encodePro->pCodecCtx->width= width;
encodePro->pCodecCtx->height= height;
encodePro->pCodecCtx->gop_size= pCodecCtxOri->gop_size;
encodePro->pCodecCtx->qmin= pCodecCtxOri->qmin;
encodePro->pCodecCtx->qmax= pCodecCtxOri->qmax;
encodePro->pCodecCtx->max_b_frames= pCodecCtxOri->max_b_frames;
encodePro->pCodecCtx->pix_fmt= AV_PIX_FMT_YUV420P;
encodePro->pCodecCtx->time_base.num= pCodecCtxOri->time_base.num;
encodePro->pCodecCtx->time_base.den= pCodecCtxOri->time_base.den;
av_opt_set(encodePro->pCodecCtx->priv_data, "preset", "slow", 0);
printf("bit_rate=%d \n", encodePro->pCodecCtx->bit_rate);
printf("gopsize=%d \n", encodePro->pCodecCtx->gop_size);
if(avcodec_open2(encodePro->pCodecCtx, encodePro->pCodec, NULL)< 0){
printf("can not open encodecccc \n");
return ;
}
}
#pragma once
#include"singleFrameSave.h"
void singleFrameSave(AVFrame *pFrame, uint8_t *buffer, int width, int height){
int i, a=0;
for(i=0; idata[0]+i*pFrame->linesize[0], \
width); a+= width;}
for(i=0; idata[1]+i*pFrame->linesize[1], \
width/2); a+= width/2;}
for(i=0; idata[2]+i*pFrame->linesize[2], \
width/2); a+= width/2;}
}
#pragma once
#include"YUVSaveToH264.h"
#include"commonSet.h"
void YUVSaveToH264(AVFrame* pFrame, uint8_t *buffer, int width, int height){
pFrame->data[0]= buffer;
pFrame->data[1]= buffer+ width*height;
pFrame->data[2]= buffer+ width*height*5/4;
}
#pragma once
#ifdef __cplusplus
extern "C"
{
#endif
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#ifdef __cplusplus
}
#endif
class ffmpegCommonSet{
public:
AVCodecContext *pCodecCtx;
AVFormatContext *pFormatCtx;
AVFrame *pFrame, *pFrameYUV;
uint8_t * outbuffer;
AVPacket packet;
AVCodec *pCodec;
int videoIndex;
int gotPicture;
struct SwsContext *img_convert_ctx;
int frameCount;
ffmpegCommonSet(){
av_register_all();
avformat_network_init();
pFormatCtx= avformat_alloc_context();
pFrame= av_frame_alloc();
pFrameYUV= av_frame_alloc();
videoIndex= -1;
gotPicture= 0;
frameCount= 0;
}
~ffmpegCommonSet(){
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
};
#pragma once
#include"commonSet.h"
void rtspSource(char *fileName);
#pragma once
#include "commonSet.h"
void h264decodeInit(ffmpegCommonSet* decodePro, char *fileName);
#pragma once
#include"commonSet.h"
void h264encodeInit(ffmpegCommonSet* decodePro, AVCodecContext *pCodecCtxOri, int width, int height );
#pragma once
#include"commonSet.h"
void singleFrameSave(AVFrame *pFrame, uint8_t *buffer, int width, int height);
#pragma once
#include"commonSet.h"
void YUVSaveToH264(AVFrame *pFrame, uint8_t *buffer, int width, int height);
三 总结
cmake .
make
./demo
最后会有.yuv文件跟.h264文件产生