一 遇到这样一个问题
本来录制的是hls,但是用户需要下载录制文件,一般需要mp4格式。
二 如何合并ts列表为mp4?
用两个io上下文,一个用于输入,一个用于输出。但是输出整个过程中都是同一个,
输出是一个个ts文件对应的。
详见代码:
int ConvertTsFilesToMp4(const char *input_filename,const char *output_filename,MergeTsContext *merge_ctx)
{
if(input_filename == NULL || output_filename == NULL)
{
return NGX_ERROR;
}
AVFormatContext *input_format_ctx_ = NULL;
int error = avformat_open_input(&input_format_ctx_,input_filename,NULL,0);
if(error != 0)
{
printf("avformat_open_input file error\n");
return NGX_ERROR;
}
error = avformat_find_stream_info(input_format_ctx_,NULL);
if(error != 0)
{
printf("avformat_find_stream_info error\n");
return NGX_ERROR;
}
//AVRational old_timebase;
bool extra_data = true;
if(merge_ctx->has_opened == false || merge_ctx->output_format_ctx == NULL)
{
error = avformat_alloc_output_context2(&merge_ctx->output_format_ctx, NULL, "mp4",output_filename);
if(error != 0)
{
printf("avformat_open_input file error\n");
return NGX_ERROR;
}
av_dump_format(input_format_ctx_,0,input_filename,0);
for (int i = 0; i < input_format_ctx_->nb_streams; i++)
{
merge_ctx->in_stream = input_format_ctx_->streams[i];
merge_ctx->out_stream = avformat_new_stream(merge_ctx->output_format_ctx, merge_ctx->in_stream->codec->codec);
if (!merge_ctx->out_stream) {
printf("avformat_new_stream out stream failed ##\n");
return NGX_ERROR;
}
int ret = 0;
ret = avcodec_copy_context(merge_ctx->out_stream->codec, merge_ctx->in_stream->codec);
if (ret < 0) {
printf("Failed to copy context from input to output stream codec context\n");
return NGX_ERROR;
}
merge_ctx->out_stream->codec->codec_tag = 0;
if (merge_ctx->output_format_ctx->oformat->flags & AVFMT_GLOBALHEADER)
merge_ctx->out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
error = avio_open(&(merge_ctx->output_format_ctx->pb), output_filename, AVIO_FLAG_WRITE);
if(error != 0)
{
printf("avformat_open_input file error\n");
return -1;
}
merge_ctx->video = av_bitstream_filter_init("h264_mp4toannexb");
merge_ctx->audio = av_bitstream_filter_init("aac_adtstoasc");
if(merge_ctx->video == NULL || merge_ctx->audio == NULL)
{
printf("av_bitstream_filter_init failed ##\n");
return NGX_ERROR;
}
merge_ctx->has_opened = true;
extra_data = false;
}
AVPacket packet = {0};
AVRational new_timebase;
new_timebase.den = 0;
new_timebase.num = 1000;
while(av_read_frame(input_format_ctx_,&packet) == 0)
{
if(extra_data == false)
{
error = avformat_write_header(merge_ctx->output_format_ctx, NULL);
av_bitstream_filter_filter(merge_ctx->video, merge_ctx->out_stream->codec, NULL, &(packet.data), &(packet.size), packet.data, packet.size, \
packet.flags & AV_PKT_FLAG_KEY);
av_bitstream_filter_filter(merge_ctx->audio, merge_ctx->out_stream->codec, NULL, &(packet.data), &(packet.size), packet.data, packet.size, \
packet.flags & AV_PKT_FLAG_KEY);
extra_data = true;
}
//packet.pts = av_rescale_q_rnd(packet.pts,merge_ctx->in_stream->time_base,new_timebase, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//packet.dts = av_rescale_q_rnd(packet.dts,merge_ctx->in_stream->time_base,new_timebase, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//packet.duration = av_rescale_q(packet.duration, merge_ctx->in_stream->time_base, new_timebase);
error = av_write_frame(merge_ctx->output_format_ctx,&packet);
}
avformat_close_input(&input_format_ctx_);
}