qt使用ffmpeg显示rtsp视频流

最简单的方式是直接使用qlable实现


#ifndef QWIDEGETPLAY_H
#define QWIDEGETPLAY_H

#include <QWidget>
#include <QThread>
#include <QImage>
#include <QPainter>
#include <QDebug>
#include <QLabel>
extern "C" {
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"swscale.lib")


#include "libavutil/avutil.h"
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"
}

QStringList ffGetStreamInfo(QString url);


class QWidegetRender : public QThread
{
	Q_OBJECT

public:
	QWidegetRender(QLabel* toRender,QObject *parent=0);
	~QWidegetRender();

	void startRender(QString playUrl);
	void stopRender();

public slots:
	void onImage();
	void onFScreen();
 signals:
	void imageReady();
	
protected:
	bool eventFilter(QObject *obj, QEvent *event);
protected:
	virtual void run();
	int create_swsContext(int iFFPixFmt);
	int decode_packet(int *got_frame, int cached);
	int open_codec_context(int *stream_idx,	AVFormatContext *fmt_ctx, enum AVMediaType type);
	static int ffInterrupt_callback(void* param);

private:
	QLabel* m_Widget;
	QPainter* painter;
	bool m_bFullSrceen;
	bool m_bStop;

	AVFormatContext *fmt_ctx ;
	AVCodecContext *video_dec_ctx ;
	AVStream *video_stream ;
	QString src_filename ;
	SwsContext* sws_cxt;
	uint8_t *video_dst_data[4] ;
	int video_dst_linesize[4];
	int video_dst_bufsize;
	int video_stream_idx ;
	AVFrame *frame ;
	AVPacket pkt;
};

#endif // QWIDEGETPLAY_H

#include "qwidegetplay.h"
#include <QEvent>
#include "qproxyserver.h"
#include <QMessageBox>
QWidegetRender::QWidegetRender(QLabel* toRender,QObject *parent)
	: QThread(parent)
{
	m_bStop = true;
	m_bFullSrceen = false;
	painter = new QPainter(toRender);
	m_Widget = toRender;
	 connect(this,SIGNAL(imageReady()),this,SLOT(onImage()),Qt::BlockingQueuedConnection);
	//connect(m_Widget,SIGNAL(triggered()),this,SLOT(onFScreen()));
	//m_Widget->installEventFilter(this);
	

	
}

QWidegetRender::~QWidegetRender()
{
	if (!m_bStop)
	{
		stopRender();
	}
	if (painter)
	{
		delete painter;
	}
}

void QWidegetRender::startRender(QString playUrl)
{
	CRtspCheck rtspCheck;
	if (playUrl.isEmpty() || !rtspCheck.openUrl(playUrl))
	{
		QMessageBox::information(NULL,"warnning",QStringLiteral("该链接无法播放"));
		return ;
	}
	src_filename = playUrl;	
	if (!m_bStop)
	{
		stopRender();
	}
	m_bStop = false;
	
	start();
}

void QWidegetRender::stopRender()
{
	
	disconnect(this,SIGNAL(imageReady()),this,SLOT(onImage())); // 防止wait阻塞主线程时,解码线程imageReady信号得不到处理而阻塞	
	 m_bStop = true;
	if(isRunning() && !wait(10000))
	 {
		 qDebug()<<"wait render thread failed";
	 }
     qDebug()<<"wait render thread ok";
	 
	 connect(this,SIGNAL(imageReady()),this,SLOT(onImage()),Qt::BlockingQueuedConnection);
	 QPixmap pixBlack(1,1);
	 pixBlack.fill(Qt::black);
	 m_Widget->setPixmap(pixBlack.scaled(m_Widget->width(),m_Widget->height()));
	
}
bool QWidegetRender::eventFilter(QObject *obj, QEvent *event)
{
	if (obj == m_Widget && event->type()==QEvent::MouseButtonDblClick)
	{
		onFScreen();
		return true;
	}
	else
	{
		return QObject::eventFilter(obj, event);
	}
}
void QWidegetRender::onFScreen()
{
	if (m_bStop)
	{
		return;
	}
	static int flags ;
	if (!m_bFullSrceen)
	{
		flags = m_Widget->windowFlags();
		m_Widget->setWindowFlags(Qt::Dialog);
		m_Widget->showFullScreen();
	}
	else
	{
		m_Widget->setWindowFlags((Qt::WindowFlags)flags);
		m_Widget->showNormal();
		m_Widget->setGeometry(9,9,431,221);

	}
	m_bFullSrceen = !m_bFullSrceen;

}
struct st_dev_streamInfo
{
	QString width;
	QString heigt;
};
typedef st_dev_streamInfo st_dev_streamInfo;



void QWidegetRender::run()
{
	
	qDebug()<<"render thread starting";
	fmt_ctx = avformat_alloc_context();
	video_dec_ctx=NULL ;
	video_stream=NULL ;
	sws_cxt=NULL;
	for (int i =0;i<4;i++)
	{
		video_dst_data[i]=NULL;
		video_dst_linesize[i] = NULL;
	}

	video_dst_bufsize = 0;
	video_stream_idx = 0;
	frame = NULL;
	AVInputFormat* fmtInput=NULL ;
	char deviceBuffer[256] ={0};
	int ret = 0, got_frame;
	AVDictionary* options = NULL;

	//src_filename = "rtsp://192.168.0.134:8554/Video640x480@60fps";
	//src_filename = "rtsp://admin:[email protected]:1025/av_stream/ch1/main"; 
	QByteArray arr = src_filename.toLatin1();
	const char* playUrl = arr.data();
	//QStringList wh = ffGetStreamInfo(src_filename);
	/* register all formats and codecs */

	//av_register_all();	
	//avformat_network_init();

	fmt_ctx->interrupt_callback.callback = QWidegetRender::ffInterrupt_callback;
	fmt_ctx->interrupt_callback.opaque = this;
	av_dict_set(&options, "rtsp_transport", "tcp", 0);
	if (avformat_open_input(&fmt_ctx, playUrl, NULL, &options) < 0) {
		fprintf(stderr, "Could not open source file %s\n", src_filename);
		goto end;
	}
	//void ** context ;
	//const AVOption* retOpt = av_opt_find2(&fmt_ctx,"udp","",0,AV_OPT_SEARCH_FAKE_OBJ,0);
	

	if (open_codec_context(&video_stream_idx, fmt_ctx, AVMEDIA_TYPE_VIDEO) >= 0) {
		video_stream = fmt_ctx->streams[video_stream_idx];
		video_dec_ctx = video_stream->codec;

	}

	/* dump input information to stderr */
	av_dump_format(fmt_ctx, 0, playUrl, 0);

	if (!video_stream) {
		fprintf(stderr, "Could not find  video stream in the input, aborting\n");
		ret = 1;
		goto end;
	}

	frame = avcodec_alloc_frame();
	if (!frame) {
		fprintf(stderr, "Could not allocate frame\n");
		ret = AVERROR(ENOMEM);
		goto end;
	}

	/* initialize packet, set data to NULL, let the demuxer fill it */
	av_init_packet(&pkt);
	pkt.data = NULL;
	pkt.size = 0;

	int skipOneFrame = 0;
	/* read frames from the file */
	while (av_read_frame(fmt_ctx, &pkt) >= 0) {

		decode_packet(&got_frame, 0);

		av_free_packet(&pkt);

		if (m_bStop)
		{
			qDebug()<<"render play stop, break loop...";
			break;
		}
	}

	qDebug()<<"out of loop...";
	/* flush cached frames */
	pkt.data = NULL;
	pkt.size = 0;
	do {
		qDebug()<<"flush cached frames";
		decode_packet(&got_frame, 1);
	} while (got_frame);

end:
	qDebug()<<"play end, free resource...";
	if (video_dec_ctx)
		avcodec_close(video_dec_ctx);
	if(fmt_ctx)
	avformat_close_input(&fmt_ctx);

	if(frame)
	av_free(frame);

	if(video_dst_data[0])
	av_free(video_dst_data[0]);

	if(sws_cxt){
	sws_freeContext(sws_cxt);
	sws_cxt = NULL;
	}

	qDebug()<<"render thread exit";
	return ;
}

int QWidegetRender::create_swsContext(int iFFPixFmt)
{
	int ret = 0;
	/* allocate image where the decoded image will be put */
	ret = av_image_alloc(video_dst_data, video_dst_linesize,
		video_dec_ctx->width,video_dec_ctx->height,
		(AVPixelFormat)iFFPixFmt, 4);
	if (ret < 0) {
		fprintf(stderr, "Could not allocate raw video buffer\n");
		return -1;
	}
	video_dst_bufsize = ret;

	sws_cxt = sws_getContext(video_dec_ctx->width,video_dec_ctx->height,video_dec_ctx->pix_fmt,
		video_dec_ctx->width,video_dec_ctx->height,(AVPixelFormat)iFFPixFmt,SWS_BILINEAR,NULL,NULL,NULL);

	return 0;
}

int QWidegetRender::decode_packet(int *got_frame, int cached)
{
	int ret = 0;

	if (pkt.stream_index == video_stream_idx) {
		/* decode video frame */
		ret = avcodec_decode_video2(video_dec_ctx, frame, got_frame, &pkt);
		if (ret < 0) {
			fprintf(stderr, "Error decoding video frame\n");
			return ret;
		}

		if (*got_frame) {
			if(!sws_cxt){
				create_swsContext(AV_PIX_FMT_RGB32);
			}
			sws_scale(sws_cxt, frame->data,frame->linesize,0,video_dec_ctx->height,
				video_dst_data,video_dst_linesize);
			 emit imageReady();
		}
	} 

	return ret;
}

int QWidegetRender::open_codec_context(int *stream_idx,
	AVFormatContext *fmt_ctx, enum AVMediaType type)
{
	int ret;
	AVStream *st;
	AVCodecContext *dec_ctx = NULL;
	AVCodec *dec = NULL;

	ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
	if (ret < 0) {
		fprintf(stderr, "Could not find %s stream in input file '%s'\n",
			av_get_media_type_string(type), src_filename);
		return ret;
	} else {
		*stream_idx = ret;
		st = fmt_ctx->streams[*stream_idx];

		/* find decoder for the stream */
		dec_ctx = st->codec;
		dec = avcodec_find_decoder(dec_ctx->codec_id);
		if (!dec) {
			fprintf(stderr, "Failed to find %s codec\n",
				av_get_media_type_string(type));
			return ret;
		}

		if ((ret = avcodec_open2(dec_ctx, dec, NULL)) < 0) {
			fprintf(stderr, "Failed to open %s codec\n",
				av_get_media_type_string(type));
			return ret;
		}
	}

	return 0;
}


void QWidegetRender::onImage()
{
	//qDebug()<<"onImage()" <<m_Widget->width()<< " " <<m_Widget->height();
	QImage image(video_dst_data[0],video_dec_ctx->width,video_dec_ctx->height,QImage::Format_RGB32);
	QImage destImage = image.scaled(m_Widget->width(),m_Widget->height(),Qt::IgnoreAspectRatio);
	m_Widget->setPixmap(QPixmap::fromImage(destImage));
	//m_Widget->setPixmap(QPixmap::fromImage(image));

}

int QWidegetRender::ffInterrupt_callback(void* param)
{
	QWidegetRender* pThis = (QWidegetRender*)param;
	if (pThis)
	{
		if (pThis->m_bStop)
		return 1;
		else
		return 0;
	}
	return 1;
}


QStringList ffGetStreamInfo(QString url)
{
	QStringList result;
	result<<"0"<<"0";
	AVFormatContext* fmt_ctx = NULL;
	AVDictionary* options = NULL;
	QByteArray arr = url.toLatin1();
	const char* playUrl = arr.data();
	av_dict_set(&options, "rtsp_transport", "tcp", 0);
	if (avformat_open_input(&fmt_ctx, playUrl, NULL, &options) < 0) {
		qDebug()<<"avformat_open_input error!";
		return result;
	}

	if (avformat_find_stream_info(fmt_ctx,NULL) < 0)
	{
		qDebug()<<"avformat_find_stream_info error!";
		if(&fmt_ctx)
			avformat_close_input(&fmt_ctx);
		return result;
	}
	for (int i = 0; i < fmt_ctx->nb_streams;i++)
	{
		AVStream* pstream =  fmt_ctx->streams[i];
		if (pstream)
		{
			AVCodecContext* pCodec = pstream->codec;
			if (pCodec && pCodec->codec_type == AVMEDIA_TYPE_VIDEO)
			{				
				QString width;
				QString heigt;
				width.setNum(pCodec->width);
				heigt.setNum(pCodec->height);
				result[0] = width;
				result[1] = heigt;
			}
		}
	}
	if(&fmt_ctx)
		avformat_close_input(&fmt_ctx);
	return result ;

}


 



你可能感兴趣的:(qt使用ffmpeg显示rtsp视频流)