Qt版本:5.11 64位
ffmpeg版本:>3.0 64位
ffmpeg拉流、解码,因为Qt不支持显示yuv数据,需要ffmpeg转换为rgb数据,再用QImage显示,这种方式性能会不好,暂时先实现这种简单的。一般情况下会用opengl渲染。
拉流cctv1:http://ivi.bupt.edu.cn/hls/cctv1hd.m3u8,此例子只解码视频数据,不处理音频。
代码:解码线程
#pragma once
#include
#include
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/dict.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
};
class DecodeThread : public QThread
{
Q_OBJECT
public:
DecodeThread(QObject *parent);
~DecodeThread();
Q_SIGNALS:
void sigData(uint8_t* rgbBuffer);
public:
void setUrl(QString url);
void setStoped(bool stop);
protected:
void run();
protected:
double r2d(AVRational r);
void decodeStream();
private:
QString m_url;
bool m_isStop = false;
private:
AVFormatContext* m_pFormatCtx = NULL;
AVCodecContext* m_pCodecCtx = NULL;
AVPacket* m_avpacket = NULL;
AVFrame *m_frame = NULL;
//视频index
int m_videoIndex = -1;
//视频总时间,单位ms
int64_t m_totalTime = 0;
//视频宽度;
int m_width = 0;
//视频高度;
int m_height = 0;
//视频帧率;
int m_fps = 0;
};
#include "DecodeThread.h"
DecodeThread::DecodeThread(QObject *parent)
: QThread(parent)
{
}
DecodeThread::~DecodeThread()
{
if (m_pFormatCtx != NULL)
{
avformat_close_input(&m_pFormatCtx);
avformat_free_context(m_pFormatCtx);
m_pFormatCtx = NULL;
}
if (m_pCodecCtx != NULL)
{
avcodec_close(m_pCodecCtx);
avcodec_free_context(&m_pCodecCtx);
m_pCodecCtx = NULL;
}
if (m_avpacket != NULL)
{
av_packet_unref(m_avpacket);
delete m_avpacket;
m_avpacket = NULL;
}
if (m_frame != NULL)
{
av_frame_free(&m_frame);
m_frame = NULL;
}
}
void DecodeThread::setUrl(QString url)
{
m_url = url;
}
void DecodeThread::setStoped(bool stop)
{
m_isStop = stop;
}
void DecodeThread::run()
{
//注册所有组件 新版本已弃用
av_register_all();
//打开输入视频文件
if (avformat_open_input(&m_pFormatCtx, m_url.toStdString().c_str(), NULL, NULL) != 0)
{
printf("Couldn't open input stream.\n");
}
if (avformat_find_stream_info(m_pFormatCtx, NULL) < 0)
{
printf("Couldn't find stream information.\n");
}
for (int i = 0; i < m_pFormatCtx->nb_streams/*视音频流的个数*/; i++)
{
//查找视频
if (m_pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
m_videoIndex = i;
break;
}
}
if (m_videoIndex == -1)
{
printf("Couldn't find a video stream.\n");
}
/**
* 不赞成这样使用
* pCodecCtx = pFormatCtx->streams[videoIndex]->codec; //指向AVCodecContext的指针
*/
m_pCodecCtx = avcodec_alloc_context3(NULL);
if (m_pCodecCtx == NULL)
{
printf("Could not allocate AVCodecContext\n");
}
avcodec_parameters_to_context(m_pCodecCtx, m_pFormatCtx->streams[m_videoIndex]->codecpar);
//指向AVCodec的指针.查找解码器
AVCodec *pCodec = avcodec_find_decoder(m_pCodecCtx->codec_id);
if (pCodec == NULL)
{
printf("Codec not found pCodec\n");
}
//打开解码器
if (avcodec_open2(m_pCodecCtx, pCodec, NULL) < 0)
{
printf("Could not open codec.\n");
}
//视频宽
m_width = m_pFormatCtx->streams[m_videoIndex]->codecpar->width;
//视频高
m_height = m_pFormatCtx->streams[m_videoIndex]->codecpar->height;
//获取帧率;
m_fps = r2d(m_pFormatCtx->streams[m_videoIndex]->avg_frame_rate);
if (m_fps == 0)
{
m_fps = 25;
}
//初始化AVPacket
m_avpacket = new AVPacket;
av_init_packet(m_avpacket);
m_avpacket->data = NULL;
//初始化frame,
m_frame = av_frame_alloc();
if (!m_frame)
{
printf("av_frame_alloc fail\n");
}
//开始解码
decodeStream();
}
double DecodeThread::r2d(AVRational r)
{
return r.den == 0 ? 0 : (double)r.num / (double)r.den;
}
void DecodeThread::decodeStream()
{
while (av_read_frame(m_pFormatCtx, m_avpacket) >= 0)
{
if (m_avpacket->stream_index == m_videoIndex)
{
int ret = avcodec_send_packet(m_pCodecCtx, m_avpacket);
if (ret >= 0)
{
ret = avcodec_receive_frame(m_pCodecCtx, m_frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
{
return;
}
else if (ret < 0)
{
return;
}
switch (m_pCodecCtx->pix_fmt)
{
case AV_PIX_FMT_YUV420P:
{
int width = m_frame->width;
int height = m_frame->height;
AVFrame *pFrameRGB = av_frame_alloc();
int numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, width, height);
int nBGRFrameSize = av_image_get_buffer_size(AV_PIX_FMT_RGB32, width, height, 1);
uint8_t* rgbBuffer = (uint8_t*)av_malloc(nBGRFrameSize);
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, rgbBuffer, AV_PIX_FMT_RGB32, width, height, 1);
//改变像素格式
SwsContext *img_convert_ctx = sws_getContext(width, height, AV_PIX_FMT_YUV420P, width, height, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
//颜色空间转换 yuv420p --> rgb32
sws_scale(img_convert_ctx,
(uint8_t const *const *)m_frame->data,
m_frame->linesize, 0, height, pFrameRGB->data,
pFrameRGB->linesize);
//发送信号,转换后的rgb buffer
emit sigData(rgbBuffer);
//释放内存
av_frame_free(&pFrameRGB);
}break;
default:
{
printf("default format:%d\n", m_pCodecCtx->pix_fmt);
return;
}
}
}
}
}
}
代码:ui界面
#pragma once
#include
#include "ui_QtGuiApplication1.h"
#include "DecodeThread.h"
class QtGuiApplication1 : public QWidget
{
Q_OBJECT
public:
QtGuiApplication1(QWidget *parent = Q_NULLPTR);
~QtGuiApplication1();
private:
Ui::QtGuiApplication1Class ui;
private Q_SLOTS:
void slotBtnClicked();
void slotData(uint8_t* data);
private:
DecodeThread *m_thead = nullptr;
};
#include "QtGuiApplication1.h"
QtGuiApplication1::QtGuiApplication1(QWidget *parent)
: QWidget(parent)
{
ui.setupUi(this);
m_thead = new DecodeThread(this);
connect(ui.pushButton, &QPushButton::clicked, this, &QtGuiApplication1::slotBtnClicked);
connect(m_thead, &DecodeThread::sigData, this, &QtGuiApplication1::slotData);
}
QtGuiApplication1::~QtGuiApplication1()
{
}
void QtGuiApplication1::slotBtnClicked()
{
QString url = ui.lineEdit->text();
m_thead->setUrl(url);
m_thead->start();
}
void QtGuiApplication1::slotData(uint8_t* data)
{
QImage image(data, 1920, 1080, QImage::Format_ARGB32);
ui.lbImage->setPixmap(QPixmap::fromImage(image));
av_free(data);
}