使用ffmpeg读取本地文件,进行推流

使用ffmpeg读取本地文件,进行推流,跳到指定时间播放。

注意MP4文件h264_mp4toannexb与aac_adtstoasc。

要实现循环播放注意两点:

1.播放到结尾,跳到开始位置重新播放;

2.时间戳要重新打,不然会报错;

 

直接上代码:

#define __STDC_CONSTANT_MACROS

#ifdef __cplusplus
extern "C"
{
#endif

#include "libavutil/mathematics.h"
#include "libavutil/avutil.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/audio_fifo.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavfilter/avfiltergraph.h"
#include "libavfilter/buffersink.h"
#include "libavfilter/buffersrc.h"
#include "libavutil/time.h"


#include 
#include 
#ifdef __cplusplus
}
#endif

#pragma  comment(lib, "avcodec.lib")
#pragma  comment(lib, "avdevice.lib")
#pragma  comment(lib, "avfilter.lib")
#pragma  comment(lib, "avformat.lib")
#pragma  comment(lib, "avutil.lib")
#pragma  comment(lib, "postproc.lib")
#pragma  comment(lib, "swresample.lib")
#pragma  comment(lib, "swscale.lib")


int64_t				m_base_dts = -1;
int64_t				m_sys_base_time = -1;

int64_t GetCurSysTime()
{
	return av_gettime() / 1000;
}

void SetTime(const int64_t sys_base_time, const int64_t base_dts)
{
	m_sys_base_time = sys_base_time;
	m_base_dts = base_dts;
}


int main()
{
	// 注册所有
	av_register_all();
	avfilter_register_all();
	avformat_network_init();
	
	
	AVOutputFormat *ofmt = NULL;
	//Input AVFormatContext and Output AVFormatContext
	AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
	AVPacket pkt;
	int ret, i;
	int videoindex = -1;
	int frame_index = 0;
	int64_t start_time = 0;

	const char *in_filename = "E:\\video\\1.ts";//输入URL(Input file URL)
	const char *out_filename = "udp://@231.0.16.100:7001";//输出 URL  udp	
	//const char *out_filename = "rtmp://172.17.36.15:1935/live/1234";//输出 URL  rtmp


	// 打开输入文件
	ifmt_ctx = NULL;
	if ((ret = avformat_open_input(&ifmt_ctx, in_filename, NULL, NULL)) < 0)
	{
		return ret;
	}

	if ((ret = avformat_find_stream_info(ifmt_ctx, NULL)) < 0)
	{
		return ret;
	}
	AVCodec *dec;

	/* select the video stream */
	ret = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0);

	for (i = 0; i < ifmt_ctx->nb_streams; i++)
	{
		// 只处理视频流和音频流
		if (ifmt_ctx->streams[i]->codec->codec_type != AVMEDIA_TYPE_VIDEO && ifmt_ctx->streams[i]->codec->codec_type != AVMEDIA_TYPE_AUDIO)
			continue;

		AVStream *stream = ifmt_ctx->streams[i];
		dec = avcodec_find_decoder(stream->codecpar->codec_id);
		if (!dec)
			return AVERROR_DECODER_NOT_FOUND;

		AVCodecContext *codec_ctx;
		codec_ctx = stream->codec;
		if (!codec_ctx)
			return AVERROR(ENOMEM);

		ret = avcodec_parameters_to_context(codec_ctx, stream->codecpar);
		if (ret < 0)
		{
			return ret;
		}

		if (codec_ctx->codec_type == AVMEDIA_TYPE_VIDEO || codec_ctx->codec_type == AVMEDIA_TYPE_AUDIO)
		{
			/* Open decoder */
			if (codec_ctx->codec_type == AVMEDIA_TYPE_VIDEO)
			{
				videoindex = i;
				codec_ctx->framerate = av_guess_frame_rate(ifmt_ctx, stream, NULL);
			}

			ret = avcodec_open2(codec_ctx, dec, NULL);
			if (ret < 0)
			{
				return ret;
			}
		}
	}
	av_dump_format(ifmt_ctx, 0, in_filename, 0);

		
	//Output 打开输出路径
	ofmt_ctx = NULL;
	//avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename);
	//avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
	avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP

	ofmt = ofmt_ctx->oformat;
	for (i = 0; i < ifmt_ctx->nb_streams; i++)
	{
		// 只处理视频流和音频流
		if (ifmt_ctx->streams[i]->codec->codec_type != AVMEDIA_TYPE_VIDEO && ifmt_ctx->streams[i]->codec->codec_type != AVMEDIA_TYPE_AUDIO)
			continue;

		//Create output AVStream according to input AVStream
		AVStream *in_stream = ifmt_ctx->streams[i];
		AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
		if (!out_stream)
		{
			printf("Failed allocating output stream\n");
			ret = AVERROR_UNKNOWN;
			goto end;
		}

		//Copy the settings of AVCodecContext
		ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
		if (ret < 0)
		{
			printf("Failed to copy context from input to output stream codec context\n");
			goto end;
		}
		out_stream->codec->codec_tag = 0;
		if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
			out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
	}


	if (!(ofmt_ctx->oformat->flags & AVFMT_NOFILE))
	{
		ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
		if (ret < 0)
			return ret;
	}

	//av_opt_set(ofmt_ctx->priv_data, "-bsf:a", "aac_adtstoasc", 0);

	AVDictionary* opt = NULL;
	ret = avformat_write_header(ofmt_ctx, &opt);
	int nVPtsIncrement = 0;
	int nAPtsIncrement = 0;
	if (ret < 0)
		return ret;

	start_time = av_gettime();
	while (1)
 	{
		AVStream *in_stream, *out_stream;
		//Get an AVPacket
		ret = av_read_frame(ifmt_ctx, &pkt);
		if (ret < 0)
			break;

		in_stream = ifmt_ctx->streams[pkt.stream_index];
		out_stream = ofmt_ctx->streams[pkt.stream_index];

		//Important:Delay
#if 0
		AVRational time_base = ifmt_ctx->streams[pkt.stream_index]->time_base;
		AVRational time_base_q = { 1, AV_TIME_BASE };
		int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
		int64_t now_time = av_gettime() - start_time;
		int64_t diff = pts_time - ifmt_ctx->start_time;
		if (diff > now_time)
			av_usleep(diff - now_time);
#else
		int64_t cur_dts;
		int64_t cur_dts_ms;
		cur_dts = pkt.dts;
		cur_dts_ms = cur_dts * av_q2d(in_stream->time_base) * 1000;
		if (pkt.dts == AV_NOPTS_VALUE)
		{
			//m_lock_time.Lock();
			m_sys_base_time = -1;
			//m_lock_time.Unlock();
			continue;
		}
		else
		{
			//m_lock_time.Lock();
			if (m_sys_base_time < 0 && pkt.dts != AV_NOPTS_VALUE) // 设置时间基准
			{
				SetTime(GetCurSysTime(), cur_dts_ms);
			}
			//m_lock_time.Unlock();

			while (GetCurSysTime() - m_sys_base_time < cur_dts_ms - m_base_dts)
			{
				av_usleep(1000);
			}
		}
#endif

		/* copy packet */
		//Convert PTS/DTS
#if 1
		pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
#else
		av_packet_rescale_ts(&pkt, n_stream->time_base, ut_stream->time_base);
#endif

		ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
		if (ret < 0)
 		{
			printf("Error muxing packet\n");
			break;
		}

		av_free_packet(&pkt);
		static bool bFlag = true;
		if (bFlag)
		{
			// 快进
			//int nRet = av_seek_frame(ifmt_ctx, -1, ifmt_ctx->start_time+(180.0) * AV_TIME_BASE, AVSEEK_FLAG_BACKWARD);
			bFlag = false;
		}
	}

	//Write file trailer
	av_write_trailer(ofmt_ctx);

end:
	avformat_close_input(&ifmt_ctx);
	/* close output */
	if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
		avio_close(ofmt_ctx->pb);
	avformat_free_context(ofmt_ctx);
	if (ret < 0 && ret != AVERROR_EOF)
 	{
		printf("Error occurred.\n");
		return -1;
	}

	return 0;
}

你可能感兴趣的:(视音频,c/c++,FFmpeg)