参考雷神的《最简单的基于FFmpeg的推流器(以推送RTMP为例)》
以及另一位《使用ffmpeg将实时流保存为AVI》实现,然后对比两种实现方式。
这部分主要是参考雷神的,在此基础上实现了读取rtsp/rtmp流后再推rtsp/rtmp流,也实现了读取内存数据再推流。其中rtsp可推h264/h265流。flv只能推送h264,暂时265不支持拓展。
从内存读取数据时,在调用avformat_open_input(&ifmt_ctx, NULL, NULL, NULL)之前,内存数据必须到位,不然会open失败,而且第一帧必须是IDR帧。
/* 使能直接读取内存数据然后进行推流 */
#define READ_FROME_MEM_BUFFER (1)
/* 推流测试 可推rtsp/rtmp流*/
int test_pusher_main( )
{
AVOutputFormat *ofmt = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex = -1;
int frame_index = 0;
int64_t start_time = 0;
#ifndef READ_FROME_MEM_BUFFER
in_filename = "rtsp://localhost:556/test";//输入URL(Input file URL)
#else
Read_Data read_handle("cuc_ieschool.flv");
#endif
out_filename = "rtmp://localhost:1936/live/livestream";//输出 URL(Output URL)[RTMP]
//out_filename = "rtp://233.233.233.233:6666";//输出 URL(Output URL)[UDP]
//avfilter_register_all();
av_log_set_level(AV_LOG_DEBUG);
avcodec_register_all();
av_register_all();
//Network
avformat_network_init();
#ifdef READ_FROME_MEM_BUFFER
ifmt_ctx = avformat_alloc_context();
unsigned char* aviobuffer = (unsigned char *)av_malloc(32768);
AVIOContext* avio = avio_alloc_context(aviobuffer, 32768, 0, &read_handle, Read_Data::read_buffer, NULL, NULL);
/* Open an input stream and read the header. */
ifmt_ctx->pb = avio;
if (avformat_open_input(&ifmt_ctx, NULL, NULL, NULL) != 0)
{
printf("Couldn't open input stream.\n");
return -1;
}
#else
//Input
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0)
{
printf("Could not open input file.");
goto end;
}
#endif
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
{
printf("Failed to retrieve input stream information");
goto end;
}
for (i = 0; inb_streams; i++)
if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
if (videoindex == -1)
{
printf("Didn't find a video stream.\n");
return -1;
}
#ifndef READ_FROME_MEM_BUFFER
av_dump_format(ifmt_ctx, 0, in_filename, 0);
#endif
//Output
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP
if (!ofmt_ctx)
{
printf("Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++)
{
//Create output AVStream according to input AVStream
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
//Copy the settings of AVCodecContext
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0)
{
printf("Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
//if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
//{
//printf("###### set AV_CODEC_FLAG_GLOBAL_HEADER ######\n");
//out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
//}
}
//Dump Format------------------
av_dump_format(ofmt_ctx, 0, out_filename, 1);
//Open output URL
if (!(ofmt->flags & AVFMT_NOFILE))
{
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0)
{
printf("Could not open output URL '%s'", out_filename);
goto end;
}
}
//Write file header
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0)
{
printf("Error occurred when opening output URL\n");
goto end;
}
start_time = av_gettime();
while (1)
{
AVStream *in_stream, *out_stream;
//Get an AVPacket
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
//FIX:No PTS (Example: Raw H.264)
//Simple Write PTS
if (pkt.pts == AV_NOPTS_VALUE)
{
//Write PTS
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//Parameters
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
//Important:Delay
if (pkt.stream_index == videoindex)
{
AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q = { 1, AV_TIME_BASE };
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
/* copy packet */
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
//Print to Screen
if (pkt.stream_index == videoindex)
{
printf("Send %8d video frames to output URL\n", frame_index);
frame_index++;
}
//ret = av_write_frame(ofmt_ctx, &pkt);
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0)
{
printf("Error muxing packet ret = %d\n", ret);
char buf[128];
memset(buf, 0, 128);
av_strerror(ret, buf, 128);
printf("error: %s\n", buf);
break;
}
av_free_packet(&pkt);
}
//Write file trailer
av_write_trailer(ofmt_ctx);
getchar();
end:
#ifdef READ_FROME_MEM_BUFFER
av_freep(&avio->buffer);
av_freep(&avio);
#endif
avformat_close_input(&ifmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF)
{
printf("Error occurred.\n");
return -1;
}
getchar();
return 0;
}
参考第二篇博文,在参考的博文中只给出代码框架,细节需要自己实现。
这种方法实现的推流,配置的部分输出码流参数并不起效,如码率、帧率及分辨率,这些参数的实际推流输出会等于输入的。
主要是判别h264的I帧,若此处判别失败则推流成功但是客户端无法解析出。
这种方法也适合推rtsp/rtmp流,支持h264/h265码流,暂时不支持音频。
/* 直接读buffer数据后填充AVPack结构体,然后推流 */
int test_pusher_rtmp_main2()
{
int ret;
char buf[1024] = "";
AVOutputFormat *ofmt = NULL;
AVFormatContext *ofmt_ctx = NULL;
AVStream *out_stream = NULL;
AVPacket pkt = { 0 };
AVPacket in_pkt = { 0 };
AVDictionary *dic = NULL;
AVCodecContext *avctx = NULL;
avcodec_register_all();
av_register_all();
av_log_set_level(AV_LOG_DEBUG);
avformat_network_init();
int frame_cnt = 0;
//Read_Data read_handle("test.h265");
const int buffer_size = 1024 * 1024 * 3;
uint8_t* frame_buf = new uint8_t[buffer_size];
memset(frame_buf, 0, buffer_size);
//const char* in_filename = "cuc_ieschool.flv";
const char* in_filename = "test.h264";
//const char* in_filename = "4K.h264";
AVFormatContext *ifmt_ctx = NULL;
int videoindex = -1;
//Input
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0)
{
printf("Could not open input file.");
memset(buf, 0 ,1024);
av_strerror(ret, buf, 1024);
printf("Couldn't open file %s with error[%s]\n", in_filename, buf);
return -1;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0)
{
printf("Failed to retrieve input stream information");
return -1;
}
int i = 0;
for (i = 0; i < ifmt_ctx->nb_streams; i++)
if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
if (videoindex == -1)
{
printf("Didn't find a video stream.\n");
return -1;
}
const char* out_filename = "rtmp://localhost:1936/live/livestream";
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename);
//const char* out_filename = "rtsp://localhost:556/livestream";
/* allocate the output media context */
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "rtsp", out_filename);
if (!ofmt_ctx)
{
printf("Could not deduce output format from file extension: using AVI.\n");
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "avi", out_filename);
goto exit;
}
if (!ofmt_ctx)
{
goto exit;
}
ofmt = ofmt_ctx->oformat;
out_stream = avformat_new_stream(ofmt_ctx, NULL);
if (!out_stream)
{
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto exit;
}
avctx = out_stream->codec;
avctx->codec_type = AVMEDIA_TYPE_VIDEO;
ret = av_dict_set(&dic, "qp", "0", 0);
ret = av_dict_set(&dic, "bufsize", "1524000", 0);
if (ret < 0)
{
printf("av_dict_set bufsize fail \n");
return -1;
}
///*此处,需指定编码后的H264数据的分辨率、帧率及码率*/
/* avctx->codec_id = AV_CODEC_ID_H264;
avctx->codec_type = AVMEDIA_TYPE_VIDEO;
avctx->bit_rate = 2000000;
avctx->width = 1280;
avctx->height = 720;
avctx->time_base.num = 1;
avctx->time_base.den = 25;
avctx->qmin = 10;
avctx->qmax = 60;
avctx->codec_tag = 0;
avctx->has_b_frames = 0;*/
/* 实际上并不起效,但是必须设置否则启动不起来 */
out_stream->codecpar->codec_id = AV_CODEC_ID_H264;
out_stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
out_stream->codecpar->width = 1920;
out_stream->codecpar->height = 1080;
out_stream->codecpar->codec_tag = 0;
out_stream->codecpar->bit_rate = 8000000;
out_stream->codecpar->format = AV_PIX_FMT_YUV420P;
//out_stream->r_frame_rate = (AVRational){ 25, 1 };
/* print output stream information*/
av_dump_format(ofmt_ctx, 0, out_filename, 1);
if (!(ofmt->flags & AVFMT_NOFILE))
{
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0)
{
printf("Could not open output file '%s'\n", out_filename);
goto exit;
}
printf("Open output file success!\n");
}
//写文件头(Write file header)
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0)
{
printf("write avi file header failed\n");
goto exit;
}
while (1)
{
av_init_packet(&pkt);
ret = av_read_frame(ifmt_ctx, &in_pkt);
if (ret < 0)
{
av_init_packet(&pkt);
avformat_close_input(&ifmt_ctx);
ret = avformat_open_input(&ifmt_ctx, in_filename, NULL, NULL);
if (ret < 0)
{
return -1;
}
if (av_read_frame(ifmt_ctx, &in_pkt) != 0)
{
return -1;
}
}
memcpy(frame_buf, in_pkt.data, in_pkt.size);
printf("size = %d \n", in_pkt.size);
if (in_pkt.size <= 4)
{
continue;
av_packet_unref(&pkt);
}
int frame_type = H264_GetFrameType(frame_buf, in_pkt.size, 4);
if (frame_type == H264_FRAME_I || frame_type == H264_FRAME_SI)
{
// 判断该H264帧是否为I帧
printf("####I frame ######\n");
pkt.flags |= AV_PKT_FLAG_KEY;
//getchar();
}
else
{
frame_type = H264_GetFrameType((unsigned char*)frame_buf, in_pkt.size, 3);
if (frame_type == H264_FRAME_I || frame_type == H264_FRAME_SI)
{
// 判断该H264帧是否为I帧
printf("11111####I frame ######\n");
pkt.flags |= AV_PKT_FLAG_KEY;
}
else
{
/* p frame*/
pkt.flags = 0;
}
}
//pkt.dts = pkt.pts = AV_NOPTS_VALUE;
pkt.dts = pkt.pts = frame_cnt;
pkt.size = in_pkt.size; /*帧大小*/
pkt.data = frame_buf; /*帧数据*/
if (!pkt.data)
{
printf("no data\n");
continue;
}
//写入(Write)
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0)
{
av_strerror(ret, buf, 1024);
}
if (frame_cnt % 2)
printf("Send frame[%d] \n", frame_cnt);
frame_cnt++;
av_packet_unref(&pkt);
memset(frame_buf, 0, buffer_size);
av_usleep(40000);
}
av_write_trailer(ofmt_ctx);
exit:
if (frame_buf)
{
delete[] frame_buf;
frame_buf = NULL;
}
/* close output */
if (ofmt_ctx && !(ofmt_ctx->oformat->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
return 0;
}
在ubuntu14上实现的demo。