main.cpp
#include "frmmain.h"
#include
#include
#include
#include
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
//根据不同QT版本编译,在QT5以下版本中需要设置为UTF-8编码才能正常显示中文
#if (QT_VERSION <= QT_VERSION_CHECK(5,0,0))
QTextCodec *codec=QTextCodec::codecForName("UTF-8");
QTextCodec::setCodecForLocale(codec);
QTextCodec::setCodecForCStrings(codec);
QTextCodec::setCodecForTr(codec);
#endif
//加载和应用黑灰色主题样式表
QFile qssFile(":/style.qss");
qssFile.open(QFile::ReadOnly);
if(qssFile.isOpen()){
qApp->setStyleSheet(QLatin1String(qssFile.readAll()));
qssFile.close();
}
frmMain w;
//设置窗体居中显示,并且不能改变大小
QDesktopWidget desktop;
int screenX=desktop.availableGeometry().width();
int screenY=desktop.availableGeometry().height();
int frmX=w.width();
int frmY=w.height();
QPoint movePoint(screenX/2-frmX/2,screenY/2-frmY/2);
w.move(movePoint);
//设置窗体不能调整大小
w.setFixedSize(frmX,frmY);
w.show();
return a.exec();
}
frmmain.cpp
#include "frmmain.h"
#include "ui_frmmain.h"
#include "qffmpeg.h"
#include "rtspthread.h"
#include
frmMain::frmMain(QWidget *parent) :
QWidget(parent),
ui(new Ui::frmMain)
{
ui->setupUi(this);
tempWidth=320;
tempHeight=180;
video1Max=false;
video2Max=false;
video3Max=false;
all=false;
ui->labVideo1->installEventFilter(this);
ui->labVideo2->installEventFilter(this);
ui->labVideo3->installEventFilter(this);
}
frmMain::~frmMain()
{
delete ui;
}
//处理用户双击对应通道最大化处理
bool frmMain::eventFilter(QObject *obj, QEvent *event)
{
if (event->type()==QEvent::MouseButtonDblClick){
if (obj==ui->labVideo1){
if (video1Max){
tempWidth=320;
tempHeight=180;
ui->labVideo2->setVisible(true);
ui->labVideo3->setVisible(true);
ui->labImage->setVisible(true);
}else{
tempWidth=645;
tempHeight=370;
ui->labVideo2->setVisible(false);
ui->labVideo3->setVisible(false);
ui->labImage->setVisible(false);
}
video1Max=!video1Max;
}else if (obj==ui->labVideo2){
if (video2Max){
tempWidth=320;
tempHeight=180;
ui->labVideo1->setVisible(true);
ui->labVideo3->setVisible(true);
ui->labImage->setVisible(true);
}else{
tempWidth=645;
tempHeight=370;
ui->labVideo1->setVisible(false);
ui->labVideo3->setVisible(false);
ui->labImage->setVisible(false);
}
video2Max=!video2Max;
}else if (obj==ui->labVideo3){
if (video3Max){
tempWidth=320;
tempHeight=180;
ui->labVideo1->setVisible(true);
ui->labVideo2->setVisible(true);
ui->labImage->setVisible(true);
}else{
tempWidth=645;
tempHeight=370;
ui->labVideo1->setVisible(false);
ui->labVideo2->setVisible(false);
ui->labImage->setVisible(false);
}
video3Max=!video3Max;
}
}
return QObject::eventFilter(obj,event);
}
void frmMain::on_btnOpen_clicked()
{
QFFmpeg *ffmpeg=new QFFmpeg(this);
connect(ffmpeg,SIGNAL(GetImage(QImage)),this,SLOT(SetImage(QImage)));
ffmpeg->SetUrl(ui->txtUrl->text());
if (ffmpeg->Init()){
RtspThread *rtsp=new RtspThread(this);
rtsp->setffmpeg(ffmpeg);
rtsp->start();
}
}
void frmMain::on_btnGetImage_clicked()
{
ui->labImage->clear();
int index=ui->cboxVideo->currentIndex();
if (index==0){
if (ui->labVideo1->pixmap()!=0x0)
ui->labImage->setPixmap(*ui->labVideo1->pixmap());
}else if (index==1){
if (ui->labVideo2->pixmap()!=0x0)
ui->labImage->setPixmap(*ui->labVideo2->pixmap());
}else if (index==2){
if (ui->labVideo3->pixmap()!=0x0)
ui->labImage->setPixmap(*ui->labVideo3->pixmap());
}
}
void frmMain::SetImage(const QImage &image)
{
if (image.height()>0){
QPixmap pix = QPixmap::fromImage(image.scaled(tempWidth,tempHeight));
ui->labVideo1->setPixmap(pix);
if (all){//启用三通道同步
ui->labVideo2->setPixmap(pix);
ui->labVideo3->setPixmap(pix);
}
}
}
void frmMain::on_ckAll_stateChanged(int arg1)
{
all=arg1!=0?true:false;
}
frmmain.h
#ifndef FRMMAIN_H
#define FRMMAIN_H
#include
namespace Ui {
class frmMain;
}
class frmMain : public QWidget
{
Q_OBJECT
public:
explicit frmMain(QWidget *parent = 0);
~frmMain();
private slots:
void SetImage(const QImage &image);
void on_btnOpen_clicked();
void on_btnGetImage_clicked();
void on_ckAll_stateChanged(int arg1);
protected:
bool eventFilter(QObject *obj, QEvent *event);
private:
Ui::frmMain *ui;
int tempWidth;
int tempHeight;
bool video1Max;
bool video2Max;
bool video3Max;
bool all;
};
#endif // FRMMAIN_H
qffmeg.cpp
#include "qffmpeg.h"
#include
#include
QFFmpeg::QFFmpeg(QObject *parent) :
QObject(parent)
{
videoStreamIndex=-1;
av_register_all();//注册库中所有可用的文件格式和解码器
avformat_network_init();//初始化网络流格式,使用RTSP网络流时必须先执行
pAVFormatContext = avformat_alloc_context();//申请一个AVFormatContext结构的内存,并进行简单初始化
pAVFrame=av_frame_alloc();
}
QFFmpeg::~QFFmpeg()
{
avformat_free_context(pAVFormatContext);
av_frame_free(&pAVFrame);
sws_freeContext(pSwsContext);
}
bool QFFmpeg::Init()
{
//打开视频流
int result=avformat_open_input(&pAVFormatContext, url.toStdString().c_str(),NULL,NULL);
if (result<0){
qDebug()<<"打开视频流失败";
return false;
}
//获取视频流信息
result=avformat_find_stream_info(pAVFormatContext,NULL);
if (result<0){
qDebug()<<"获取视频流信息失败";
return false;
}
//获取视频流索引
videoStreamIndex = -1;
for (uint i = 0; i < pAVFormatContext->nb_streams; i++) {
if (pAVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex==-1){
qDebug()<<"获取视频流索引失败";
return false;
}
//获取视频流的分辨率大小
pAVCodecContext = pAVFormatContext->streams[videoStreamIndex]->codec;
videoWidth=pAVCodecContext->width;
videoHeight=pAVCodecContext->height;
avpicture_alloc(&pAVPicture,PIX_FMT_RGB24,videoWidth,videoHeight);
AVCodec *pAVCodec;
//获取视频流解码器
pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
pSwsContext = sws_getContext(videoWidth,videoHeight,PIX_FMT_YUV420P,videoWidth,videoHeight,PIX_FMT_RGB24,SWS_BICUBIC,0,0,0);
//打开对应解码器
result=avcodec_open2(pAVCodecContext,pAVCodec,NULL);
if (result<0){
qDebug()<<"打开解码器失败";
return false;
}
qDebug()<<"初始化视频流成功";
return true;
}
void QFFmpeg::Play()
{
//一帧一帧读取视频
int frameFinished=0;
while (true){
if (av_read_frame(pAVFormatContext, &pAVPacket) >= 0){
if(pAVPacket.stream_index==videoStreamIndex){
qDebug()<<"开始解码"<data,pAVFrame->linesize,0,videoHeight,pAVPicture.data,pAVPicture.linesize);
//发送获取一帧图像信号
QImage image(pAVPicture.data[0],videoWidth,videoHeight,QImage::Format_RGB888);
emit GetImage(image);
mutex.unlock();
}
}
}
av_free_packet(&pAVPacket);//释放资源,否则内存会一直上升
}
}
qffmpeg.h
#ifndef QFFMPEG_H
#define QFFMPEG_H
//必须加以下内容,否则编译不能通过,为了兼容C和C99标准
#ifndef INT64_C
#define INT64_C
#define UINT64_C
#endif
//引入ffmpeg头文件
extern "C"
{
#include
#include
#include
#include
#include
}
#include
#include
#include
class QFFmpeg : public QObject
{
Q_OBJECT
public:
explicit QFFmpeg(QObject *parent = 0);
~QFFmpeg();
bool Init();
void Play();
void SetUrl(QString url){this->url=url;}
QString Url()const{return url;}
int VideoWidth()const{return videoWidth;}
int VideoHeight()const{return videoHeight;}
private:
QMutex mutex;
AVPicture pAVPicture;
AVFormatContext *pAVFormatContext;
AVCodecContext *pAVCodecContext;
AVFrame *pAVFrame;
SwsContext * pSwsContext;
AVPacket pAVPacket;
QString url;
int videoWidth;
int videoHeight;
int videoStreamIndex;
signals:
void GetImage(const QImage &image);
public slots:
};
#endif // QFFMPEG_H
rtspthread.cpp
#include "rtspthread.h"
RtspThread::RtspThread(QObject *parent) :
QThread(parent)
{
}
void RtspThread::run()
{
ffmpeg->Play();
}
rtspthread.h
#ifndef RTSPTHREAD_H
#define RTSPTHREAD_H
#include
#include "qffmpeg.h"
class RtspThread : public QThread
{
Q_OBJECT
public:
explicit RtspThread(QObject *parent = 0);
void run();
void setffmpeg(QFFmpeg *f){ffmpeg=f;}
private:
QFFmpeg * ffmpeg;
signals:
public slots:
};
#endif // RTSPTHREAD_H
源代码加库文件:https://download.csdn.net/download/gaodes/10769146
搭配使用的live555服务器:https://download.csdn.net/download/gaodes/10769151