本文使用Ffmpeg API实现推流。
AV_TIME_BASE
ffmpeg中的内部计时单位(时间基),ffmepg中的所有时间都是于它为一个单位,比如AVStream中的duration即以为着这个流的长度为duration个AV_TIME_BASE。AV_TIME_BASE定义为:
#define AV_TIME_BASE 1000000
函数声明
int64_t av_rescale_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding rnd)
直接看代码, 它的作用是计算 “a * b / c” 的值并分五种方式来取整.
但是在FFmpeg中,则是将以 “时钟基c” 表示的 数值a 转换成以 “时钟基b” 来表示。
看AVRounding结构体,就是这五种方式
enum AVRounding {
AV_ROUND_ZERO = 0, ///< Round toward zero. 靠近0
AV_ROUND_INF = 1, ///< Round away from zero. 远离0
AV_ROUND_DOWN = 2, ///< Round toward -infinity. 趋于负无穷
AV_ROUND_UP = 3, ///< Round toward +infinity. 趋于负无穷
AV_ROUND_NEAR_INF = 5, ///< Round to nearest and halfway cases away from zero. 四舍五入,小于0.5取值趋向0,大于0.5取值趋远于0
/**
* Flag telling rescaling functions to pass `INT64_MIN`/`MAX` through
* unchanged, avoiding special cases for #AV_NOPTS_VALUE.
*
* Unlike other values of the enumeration AVRounding, this value is a
* bitmask that must be used in conjunction with another value of the
* enumeration through a bitwise OR, in order to set behavior for normal
* cases.
*
* @code{.c}
* av_rescale_rnd(3, 1, 2, AV_ROUND_UP | AV_ROUND_PASS_MINMAX);
* // Rescaling 3:
* // Calculating 3 * 1 / 2
* // 3 / 2 is rounded up to 2
* // => 2
*
* av_rescale_rnd(AV_NOPTS_VALUE, 1, 2, AV_ROUND_UP | AV_ROUND_PASS_MINMAX);
* // Rescaling AV_NOPTS_VALUE:
* // AV_NOPTS_VALUE == INT64_MIN
* // AV_NOPTS_VALUE is passed through
* // => AV_NOPTS_VALUE
* @endcode
*/
AV_ROUND_PASS_MINMAX = 8192,
};
将以时钟基为c 的时间戳a 转换成以b为时钟基并且以rnd 这种方式进行运算的值
函数定义:
int64_t av_rescale_q_rnd(int64_t a, AVRational bq, AVRational cq,
enum AVRounding rnd)
{
int64_t b = bq.num * (int64_t)cq.den;
int64_t c = cq.num * (int64_t)bq.den;
return av_rescale_rnd(a, b, c, rnd);
}
函数定义
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
{
return av_rescale_q_rnd(a, bq, cq, AV_ROUND_NEAR_INF);
}
使用示例:
将以"1MHz时钟基" 表示的 “PTS/DTS值a” 转换成以 “90kHz时钟基” 表示。
//调用转换
int64_t av_rescale_q(pkt->pts=-10949117256, src_tb={num=1, den=1000000}, dst_tb{num=1, den=90000))
{
return av_rescale_q_rnd(a, bq, cq, AV_ROUND_NEAR_INF);
}
int64_t av_rescale_q_rnd(int64_t a, AVRational bq, AVRational cq,
enum AVRounding rnd)
{
int64_t b = bq.num * (int64_t)cq.den;// = 1 * 90000 = 90000;
int64_t c = cq.num * (int64_t)bq.den; // = 1 * 1000000 = 1000000
return av_rescale_rnd(a, b, c, 5);
}
int64_t av_rescale_rnd(a=10949117256, b=90000, c=1000000, rnd=5)
{
if (rnd==5)
r = c / 2; // r =500000;
if (b<=INT_MAX && c<=INT_MAX)
{
if (a<=INT_MAX)
return (a * b + r)/c;
else
return a/c*b + (a%c*b + r)/c; // = 10949117256 / 1000000 * 90000 +
// (10949117256 % 1000000 * 90000 + 500000) / 1000000
// = 985420553
}
else
{
...
}
}
视频比较好理解,就是每帧递增,假如fps是25帧的,时间基为fps的倒数1/25,那么pts递增即可。
如下:
计算公式为: 第 n 帧 的 p t s = n ∗ ( ( 1 / t i m b a s e ) / f p s ) ; 第n帧的pts = n * ((1 / timbase)/ fps); 第n帧的pts=n∗((1/timbase)/fps);
音频相对来说更难理解一些,因为音频的一个packet不止一帧,所以一秒到底有多少个packet就不知道,就别说如何计算pts了。
假设音频一秒有num_pkt个packet,那么这个num_pkt到底是多少?
这的从音频基础开始说起,我们知道音频有个采样率,就是一秒钟采用多少次,很多音频都是44100的采样率,也有8k的,那么这个采样率和num_pkt有什么关系呢?
我们发现在AVFrame中有一个比较重要的字段叫做nb_samples,这个字段名为采样数,此字段可以结合音频数据格式计算这个frame->data有多大,其实这个字段联合采样率还可以计算音频一秒有多少个packet。
计算公式如下:
n u m p k t = 采 样 率 / n b s a m p l e s ; num_pkt = 采样率 / nb_samples; numpkt=采样率/nbsamples;
这样我们就知道了音频每秒的包数目(可以见到理解为帧),有了此数据计算pts就和视频一模一样了,
计算公式如下:
第n个包的pts = n * ((1 / timbase)/ num_pkt);
很多音频时间基和采样率成倒数,那么根据公式我们的音频pts就可以很简单的以nb_samples递增了,如下:
第一个包:pts = 0 * nb_samples;
第二个包:pts = 1 * nb_samples;
第三个包:pts = 2 * nb_samples;
.
.
.
第n个包:pts = (n - 1) * nb_samples;
使用QT创建新项目,添加ffmpeg包引用。
#-------------------------------------------------
#
# Project created by QtCreator 2020-05-13T09:08:20
#
#-------------------------------------------------
QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
TARGET = OutputSample
TEMPLATE = app
SOURCES += main.cpp\
mainwindow.cpp
HEADERS += mainwindow.h
FORMS += mainwindow.ui
INCLUDEPATH +="D:\\tools\\ffmpeg\\win32\\dev\\include"
LIBS += -LD:\tools\ffmpeg\win32\dev\lib -lavutil -lavformat -lavcodec -lavdevice -lavfilter -lpostproc -lswresample -lswscale
#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
}
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include
#include
#include
#ifdef __cplusplus
};
#endif
#endif
int main(int argc, char* argv[])
{
AVOutputFormat *ofmt = NULL;
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex=-1;
int frame_index=0;
int64_t start_time=0;
in_filename = "D:/1.mp4";//输入URL(Input file URL)
out_filename = "rtmp://rtmp服务器地址:端口/flv/test_1_1";//输出 URL(Output URL)[RTMP]
//out_filename = "rtp://233.233.233.233:6666";//输出 URL(Output URL)[UDP]
// 初始化ffmpeg
av_register_all();
// 初始化网络库
avformat_network_init();
// 初始化输入
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {
printf( "Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
printf( "Failed to retrieve input stream information");
goto end;
}
for(i=0; inb_streams; i++)
if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
videoindex=i;
break;
}
// 打印流媒体信息
av_dump_format(ifmt_ctx, 0, in_filename, 0);
// 输出流
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP
if (!ofmt_ctx) {
printf( "Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++) {
//Create output AVStream according to input AVStream
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream) {
printf( "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
//Copy the settings of AVCodecContext
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0) {
printf( "Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// 打印输出流信息
av_dump_format(ofmt_ctx, 0, out_filename, 1);
// 使用avio_open 打开输出
if (!(ofmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0) {
printf( "Could not open output URL '%s'", out_filename);
goto end;
}
}
// 输出写文件头
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
printf( "Error occurred when opening output URL\n");
goto end;
}
start_time=av_gettime();
while (1) {
AVStream *in_stream, *out_stream;
//获取一帧
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
// 如果没有PTS , 例如H.264裸流
//更简单的处理就用 PTSpkt.pts==AV_NOPTS_VALUE
if(pkt.pts==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//计算 pts的公式
pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts=pkt.pts;
pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
}
//Important:Delay
if(pkt.stream_index==videoindex){
AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q={1,AV_TIME_BASE};
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
// 复制包
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
// 打印log
if(pkt.stream_index==videoindex){
printf("Send %8d video frames to output URL\n",frame_index);
frame_index++;
}
//ret = av_write_frame(ofmt_ctx, &pkt);
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0) {
printf( "Error muxing packet\n");
break;
}
av_free_packet(&pkt);
}
//Write file trailer
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
printf( "Error occurred.\n");
return -1;
}
return 0;
}
使用VLC打开 rtmp://rtmp服务器地址:端口/flv/test_1_1 即可查看视频。
参考:https://blog.csdn.net/dancing_night/article/details/45972361
https://www.jianshu.com/p/5634712cfe1b