#include
extern "C" {
#include "libavformat\avformat.h"
#include "libavutil\avutil.h"
#include "libavutil\time.h"
#include "libavcodec\avcodec.h"
}
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avutil.lib")
using std::cout;
using std::endl;
void printErr(char *info, int ret)
{
char err[1024] = { 0 };
av_strerror(ret, err, sizeof(err));
cout << info << ": " << err << endl;
system("pause");
}
double r2d(AVRational r)
{
return r.den == 0 ? 0 : r.num * 1.0 / r.den;
}
int main(int argv, char *argc[])
{
//打开输入封装格式
char *inPath = "D:/video/Bilby.mkv";
AVFormatContext *inFormatCtx = nullptr;
int ret = avformat_open_input(&inFormatCtx, inPath, NULL, NULL);
if (ret != 0) {
printErr("avformat_open_input failed", ret);
return -1;
}
//读取packet,获取stream信息
avformat_find_stream_info(inFormatCtx, NULL);
av_dump_format(inFormatCtx, 0, NULL, 0);
//创建输出的封装格式,输出为rtmp流
char *outPath = "rtmp://xxx.xxx.xxx/livetv/chcatv";
AVFormatContext *outFormatCtx = nullptr;
ret = avformat_alloc_output_context2(&outFormatCtx, NULL, "flv", outPath);
if (ret != 0) {
printErr("avformat_alloc_output_context2 failed", ret);
return -1;
}
int videoIndex = av_find_best_stream(inFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
int audioIndex = av_find_best_stream(inFormatCtx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
//为输出封装格式创建stream
for (int i = 0; i < inFormatCtx->nb_streams; i++) {
AVStream *inStream = inFormatCtx->streams[i];
AVStream *outStream = avformat_new_stream(outFormatCtx, NULL);
ret = avcodec_parameters_copy(outStream->codecpar, inStream->codecpar);
if (ret != 0) {
printErr("avcodec_parameters_copy failed", ret);
return -1;
}
//codec_tag=0 ffmpeg自动设置封装格式支持的数据流
outStream->codecpar->codec_tag = 0;
}
av_dump_format(outFormatCtx, 0, NULL, 1);
//打开io,进行rtmp链接
ret = avio_open(&outFormatCtx->pb, outPath, AVIO_FLAG_WRITE);
if (ret != 0) {
printErr("avio_open failed", ret);
return -1;
}
//写入stream头,会改变输出stream中的time_base
ret = avformat_write_header(outFormatCtx, NULL);
if (ret != 0) {
printErr("avformat_write_header failed", ret);
return -1;
}
AVPacket pkt;
av_init_packet(&pkt);
int64_t startTime = av_gettime();
//遍历输入文件中的packet
while (1) {
//读取packet
ret = av_read_frame(inFormatCtx, &pkt);
if (ret < 0) {
break;
}
//pts,dts,duration以输出format的time_base转换
AVRational srcTb = inFormatCtx->streams[pkt.stream_index]->time_base;
AVRational dstTb = outFormatCtx->streams[pkt.stream_index]->time_base;
pkt.pts = av_rescale_q_rnd(pkt.pts, srcTb, dstTb, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, srcTb, dstTb, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, srcTb, dstTb);
//使用视频的pts与实际时间进行同步
if (pkt.stream_index == videoIndex) {
int64_t now = av_gettime() - startTime;
int64_t pts = pkt.pts * 1000 * 1000 * r2d(dstTb);
if (pts > now) {
av_usleep(pts - now);
}
}
//发送packet
av_interleaved_write_frame(outFormatCtx, &pkt);
}
av_write_trailer(outFormatCtx);
//关闭输入输出
avformat_close_input(&inFormatCtx);
avio_close(outFormatCtx->pb);
avformat_free_context(outFormatCtx);
system("pause");
return 0;
}
推流和转封装代码基本一致,需要注意以下几点
1.avformat_alloc_output_context2创建输出封装格式,输出路径就是指定推流地址。
2.ffmpeg可能无法从推流地址分析出封装格式,在调用avformat_alloc_output_context2时,需要手动指定一下,本文指定的是flv格式。
3.在读取packet推流时需要与当前时间进行同步。因为ffmpeg本地读取文件速度很快,而播放速度相对较慢。如果不同步有可能出现40分钟的文件10分钟就读取发送完成,造成一些问题。