先看一个效果吧,由于这个抓图软件只有7.5fps,所以看其来很卡,但真实的程序运行起来比这个流畅多了。不能上传mp4,所以就弄了gif
我用的是Nvidia的硬解码SDK,并在其中加入了ffmpeg拉流或视频。解码出来的图片格式是nv12,这里我在shader里面实现的nv12转rgb的纹理,公式也是网上找的了。
看一下工程结构吧
要封装成qml的接口,要有opengl渲染,有图像显示,所以我选择了继承QQuickItem。qml控件类如下
#ifndef VIDEOTHREADRENDER_H
#define VIDEOTHREADRENDER_H
#include
QT_FORWARD_DECLARE_CLASS(RenderThread)
class VideoThreadRender : public QQuickItem
{
Q_OBJECT
Q_PROPERTY(QString videoSource READ videoSource WRITE setVideoSource NOTIFY videoSourceChanged)
public:
VideoThreadRender(QQuickItem *parent = nullptr);
static QList threads;
public Q_SLOTS:
void ready();
signals:
void videoSourceChanged();
protected:
QSGNode *updatePaintNode(QSGNode *, UpdatePaintNodeData *) override;
private:
RenderThread *m_renderThread{nullptr};
QString m_videoSource;
void setVideoSource(QString);
QString videoSource();
};
#endif // VIDEOTHREADRENDER_H
声明没啥说的,没有多于的函数,留了一个设置视频来源的QString。
#include "videothreadrender.h"
#include "renderthread.h"
#include "texturenode.h"
#include
QList VideoThreadRender::threads;
VideoThreadRender::VideoThreadRender(QQuickItem *parent)
: m_renderThread(0),
QQuickItem(parent)
{
setFlag(ItemHasContents, true); //有图像显示,设置为true
m_renderThread = new RenderThread(QSize(512, 512)); //实例子线程,和下面的渲染线程不是同一个,渲染线程是说的Scene Graphics里面的渲染线程
}
void VideoThreadRender::ready()
{
m_renderThread->surface = new QOffscreenSurface(); //实例一个离屏的Surface,有点像不显示的Window,使得opengl的contex能够绑定到它上面
m_renderThread->surface->setFormat(m_renderThread->context->format());
m_renderThread->surface->create(); //根据文档QOffscreenSurface的create只能在GUI线程调用,所以在这里做了实例和初始化。
m_renderThread->videoSource = m_videoSource;
m_renderThread->moveToThread(m_renderThread); //移动到子线程循环
//当场景失效后,关闭子线程的资源
connect(window(), &QQuickWindow::sceneGraphInvalidated, m_renderThread, &RenderThread::shutDown, Qt::QueuedConnection);
//启动子线程
m_renderThread->start();
update(); //再update一次用于实例TextureNode,因为程序刚初始化时会调用一次,但在初始化子线程后,返回了,所以要再来一次实例TextureNode。
}
//此函数是由渲染线程调用的,不是在GUI线程
QSGNode *VideoThreadRender::updatePaintNode(QSGNode *oldNode, UpdatePaintNodeData *)
{
TextureNode *node = static_cast(oldNode);
if (!m_renderThread->context) {
QOpenGLContext *current = window()->openglContext();
current->doneCurrent(); //取消opengl在当前上下文中的绑定,因为下面要设置shareContext,即将sharedContext移动到子线程
m_renderThread->context = new QOpenGLContext();
m_renderThread->context->setFormat(current->format());
m_renderThread->context->setShareContext(current);
m_renderThread->context->create();
m_renderThread->context->moveToThread(m_renderThread); //context有线程归属性,一个context只能被它关联的线程调用makeCurrent,不能被其它线程调用;也只能有一个对应的surface
//一个线程在同一时刻也只能有一个context
current->makeCurrent(window()); //恢复绑定
QMetaObject::invokeMethod(this, "ready"); //跨线程调用
return 0;
}
if (!node) {
node = new TextureNode(window()); //实例化自定义的纹理结点
//当纹理在子线程渲染好后,将纹理id、大小设置到自定义的QSimpleTextureNode结构中
connect(m_renderThread, &RenderThread::textureReady, node, &TextureNode::newTexture, Qt::DirectConnection);
//update函数调用后,渲染线程会在适当的时候发出beforRendering信号
connect(node, &TextureNode::pendingNewTexture, window(), &QQuickWindow::update, Qt::QueuedConnection);
//在开始渲染之前,把子线程渲染好的纹理设置到QSimpletTextureNode中,以便渲染线程使用
connect(window(), &QQuickWindow::beforeRendering, node, &TextureNode::prepareNode, Qt::DirectConnection);
//渲染好的纹理被使用后,通知子线程渲染下一帧
connect(node, &TextureNode::textureInUse, m_renderThread, &RenderThread::renderNext, Qt::QueuedConnection);
// Get the production of FBO textures started..
QMetaObject::invokeMethod(m_renderThread, "renderNext", Qt::QueuedConnection);
}
node->setRect(boundingRect());设置显示区域,为qml分配的整个区域
return node;
}
void VideoThreadRender::setVideoSource(QString s)
{
if(m_videoSource != s){
emit videoSourceChanged();
}
m_videoSource = s;
}
QString VideoThreadRender::videoSource()
{
return m_videoSource;
}
以上就是离屏渲染的一半知识要点了,另外一半就是子线程的操作和自定义的TextureNode结点。下次再讲解吧,有些累了。