使用ffmpeg来探测GB28181的ps流

GB28181

GB28181 是我国内的标准,现在到2016修订后比较成熟,有很多可取之处,当然,依然是建立在sip协议之上,比起rtmp协议来说,他的优点是复用了rtp协议和sdp协议,这一点很优秀,也解决了很多问题。

ps流

ps流严格来说是适合传输的文件流。例如海康大华的GB28181 的ps流,事实上,用ffmpeg也是可以探测ps流的,准备好一个类,叫IOThread,从TThread类继承,这个类后面给出,只是为了简单启动一个线程,也不一定要这么写,完全是为了示例。

class IOThread :public TThread
{
	uint8_t* pIObuffer = NULL;
	AVIOContext* pb = NULL;
	AVInputFormat* piFmt = NULL;
	AVFormatContext* pFormatContext = NULL;
	unsigned int video_stream_index;
	queue<RTPPacket*> _queue;
	void *_RTPSession = NULL;
	std::mutex  _mutex;
	int64_t framebytes = 0;
	class Lock {
	private:
		std::lock_guard<std::mutex> m_lock;
	public:
		inline Lock(IOThread* parent) : m_lock(parent->_mutex) {}
	};
public:
	static int fill_iobuffer(void* opaque, uint8_t* buf, int bufSize);
public:


	/*void AddBytes(int len)
	{
		framebytes += len;
	}*/
	int64_t GetBytes()
	{
		return framebytes;
	}

	void InitProbe(void *param);
	int  ReadNetPacket(uint8_t *buf, int buf_size);
	void Push(RTPPacket * pkt);
	void Run();
};

具体实现,这只是个技术示例,展示用,实际使用请写好,自行修改!



//因为可能启动多个IOThread 
int IOThread::fill_iobuffer(void* opaque, uint8_t* buf, int bufSize)
{
	IOThread * IOB = (IOThread*)opaque;

	return  IOB->ReadNetPacket(buf, bufSize);
}

//初始化探测器,每一路流都有一个探测器
void IOThread::InitProbe(void *param)
{
	_RTPSession = (MyRTPSession*)param;
}
void IOThread::Push(RTPPacket * pkt)
{
	//Lock lock(this);
	if (IsStop())
		return;
	Lock lock(this);
	_queue.push(pkt);
}

int IOThread::ReadNetPacket(uint8_t *buf, int buf_size)
{
	//rtp协议优化,一次读取两个到几个包
#define READ_PKT_NUM 3
	int nsize = 0;
	

	for (int i = 0; i < READ_PKT_NUM; i++)
	{
		std::this_thread::sleep_for(std::chrono::milliseconds(5));

		Lock lock(this);
		if (_queue.empty())
			break;
		RTPPacket * pkt = _queue.front();
		_queue.pop();
		int plen = pkt->GetPayloadLength();
		
		memcpy(buf+nsize, pkt->GetPayloadData(), plen);
		nsize += plen;
		((MyRTPSession*)(_RTPSession))->DeletePacket(pkt);
	}
	return nsize;
}


void IOThread::Run()
{
	if (_RTPSession == NULL) {
		printf("session is null\n");
		return;
	}
	//这里需要修正的是:如果一开始接收到的不是关键帧,前面开始过,没有停止
	//等待50毫秒
	while (1)
	{
		if (IsStop())
			return;
		if (_queue.size() < READ_PKT_NUM)
		{
			std::this_thread::sleep_for(std::chrono::milliseconds(5));
		}
		else
			break;
	}
	//先
	clock_t start, end,end2;
	start = clock();
	//缓存大小 看定义PKT_NUM
#define BUFFER_IO_LEN READ_PKT_NUM*1500
	pIObuffer = (uint8_t*)av_malloc(BUFFER_IO_LEN);
	pb = avio_alloc_context(
		pIObuffer,
		BUFFER_IO_LEN,
		0,
		this,
		fill_iobuffer,
		NULL,
		NULL);

	if (av_probe_input_buffer(pb, &piFmt, "", NULL, 0, 0) < 0)//探测从内存中获取到的媒体流的格式
	{
		printf("Error: probe format failed\n");
		return;
	}

	//printf("input format:%s[%s]\n", piFmt->name, piFmt->long_name);

	pFormatContext = avformat_alloc_context();
	pFormatContext->pb = pb;
	pFormatContext->flags = AVFMT_FLAG_CUSTOM_IO;
	pFormatContext->max_analyze_duration = 1000000;
	pFormatContext->fps_probe_size = 5;

	//AVInputFormat* pInputFormat = NULL;
	//pInputFormat = av_find_input_format("h264");
	//int ret = avformat_open_input(&pFormatContext, "", pInputFormat, NULL);
	int ret = avformat_open_input(&pFormatContext, "", piFmt, NULL);
	if (ret < 0)
	{
		printf("Error: avformat_open_input failed----------------- \n");
		//ASSERT(0);
		return;
	}


	end = clock();
	printf("avformat_open_input() used time: %ld ms \n", start - end);

	ret = avformat_find_stream_info(pFormatContext, NULL);
	if (ret < 0)
	{
		printf("Error: avformat_find_stream_info failed----------------- \n");
		return;
	}
	for (int i = 0; i < pFormatContext->nb_streams; i++)
	{
		if (AVMEDIA_TYPE_VIDEO == pFormatContext->streams[video_stream_index]->codecpar->codec_type)
		{
			video_stream_index = i;
			//avCodecID = pFormatContext->streams[video_stream_index]->codec->codec_id;
			break;
		}
	}
	end2 = clock();
	printf("avformat_find_stream_info() used time: %ld ms\n", end2- start);

#if 1
	//char filename[MAX_PATH];
	//_snprintf(filename, 128, "qianbo_ffmpeg.264");
	FILE * fp = fopen("qianbo_ffmpeg.264", "wb");
#endif
	//fwrite(data, 1, len, fp);
	AVPacket packet;
	while (1)
	{
		if (IsStop())
			break;
		std::this_thread::sleep_for(std::chrono::milliseconds(20));
		av_init_packet(&packet);
		if (av_read_frame(pFormatContext, &packet) == 0) { //读成功
			if (packet.stream_index == video_stream_index)
			{
				uint8_t * data = packet.data;
				//char buffer[256];
				//sprintf(buffer, "%02x %02x %02x %02x %02x %02x len : %d", data[0], data[1], data[2], data[3], data[4], data[5], packet.size);
				//printf("%s\n",buffer);
				fwrite(data, 1, packet.size, fp);
			}
			av_packet_unref(&packet);
		}
	}
	av_free(pIObuffer);
#if 1
	fclose(fp);

#endif
}

thread类

#ifndef _TTHREAD_RUN_ABLE_H_
#define _TTHREAD_RUN_ABLE_H_


#include 
#include 
#include 
#include 
#include 
using namespace std;

class TThread
{
private:

	//线程
	thread _thread;
	//等待信号
	std::mutex _signal_mutex;
	std::condition_variable _cond;
protected:
	volatile char _stop = true;
	//锁定运行状态
	std::mutex _mutex;
public:
	TThread()
	{}
	virtual ~TThread()
	{}

public:
	
	void Join()
	{
		if (_thread.joinable())
			_thread.join();
	}
	bool  IsStop()
	{
		return _stop == 1 ? true : false;
	}
	void WaitForSignal()
	{
		std::unique_lock<std::mutex> ul(_signal_mutex);
		_cond.wait(ul);
	}
	void Notify()
	{
		_cond.notify_one();
	}

	virtual int Start()
	{
		if (_stop == 0)
			return -1;
		_stop = 0;
		_thread = std::thread(std::bind(&TThread::Run, this));
		return 0;
	}	
	
	virtual void Stop()
	{
		//先通知,后停止
		Notify();
		_stop = 1; // true;
	}

	virtual void Run() = 0;

};
#endif

以上就是使用ffmepg来探测流的过程,不详细说明,实践动手就行,这个方式也是比较好的,因为ffmpeg还是可以帮我们屏蔽一些细节,为了达到效果,还是要认真了解ps流,也需要了解各个不同厂家的ps流的详细不同情况。

感谢您看我的文章。

你可能感兴趣的:(c++高级技巧,音视频和c++,java,物联网,音视频,c++,GB28181,RTP,ffmpeg)