海思Hi3536使用QOpenGLWidget预览yuv420sp视频

参考文章:

                 《ffmpeg opengl 硬解视频并使用opengl在qt中显示nv12》

                 《qt 使用opengl显示yuv实时视频流》

上述参考文章中《qt 使用opengl显示yuv实时视频流》主要是介绍yuv420p存储方式的openGL渲染,Hi3536可以利用vi_dump/vpss_chn_dump工具dump出yuv420p文件来测试;而《ffmpeg opengl 硬解视频并使用opengl在qt中显示nv12》则是介绍yuv420sp的。

        照搬《ffmpeg opengl 硬解视频并使用opengl在qt中显示nv12》提供的代码,视频严重偏蓝,查看《HiMPP V3.0 媒体处理软件开发参考.pdf》知道海思Hi3536解码输出的是yuv420sp存储格式,但它的存储顺序为yyyyy...,vuvuvuvu...。所以还需要修改一下shader代码:

const char *fsrc = "\
            precision mediump float; \
    varying mediump vec4 textureOut; \
    uniform sampler2D textureY; \
    uniform sampler2D textureUV; \
    void main(void) \
    {\
        vec3 yuv; \
        vec3 rgb; \
        yuv.x = texture2D(textureY, textureOut.st).r - 0.0625; \
        yuv.y = texture2D(textureUV, textureOut.st).g - 0.5; \
        yuv.z = texture2D(textureUV, textureOut.st).r - 0.5; \
        rgb = mat3( 1,       1,         1, \
                    0,       -0.39465,  2.03211, \
                    1.13983, -0.58060,  0) * yuv; \
        gl_FragColor = vec4(rgb, 1); \
    }";

下面是修改过的yuv420sp openGL渲染类:

//nv12render.h
#ifndef NV12RENDER_H
#define NV12RENDER_H
#include 
#include 
#include 

class Nv12Render : public QOpenGLFunctions
{
public:
    Nv12Render() = default;
    Nv12Render(const Nv12Render&) = delete;
    void initialize();
    void render(uchar*nv12Ptr, int w, int h);

private:
    QOpenGLShaderProgram program;
    GLuint idY,idUV;
    QOpenGLBuffer vbo;
};

#endif // NV12RENDER_H
//nv12render.cpp
#include "nv12render.h"
#include 
#include 

void Nv12Render::initialize()
{
    initializeOpenGLFunctions();
    const char *vsrc = " \
            attribute vec4 vertexIn; \
    attribute vec4 textureIn; \
    varying vec4 textureOut;  \
    void main(void)           \
    {                         \
        gl_Position = vertexIn; \
        textureOut = textureIn; \
    }";

    //Hi3536的openGL ES2着色器必须要先设置精度
    const char *fsrc = "\
            precision mediump float; \
    varying mediump vec4 textureOut; \
    uniform sampler2D textureY; \
    uniform sampler2D textureUV; \
    void main(void) \
    {\
        vec3 yuv; \
        vec3 rgb; \
        yuv.x = texture2D(textureY, textureOut.st).r - 0.0625; \
        yuv.y = texture2D(textureUV, textureOut.st).g - 0.5; \
        yuv.z = texture2D(textureUV, textureOut.st).r - 0.5; \
        rgb = mat3( 1,       1,         1, \
                    0,       -0.39465,  2.03211, \
                    1.13983, -0.58060,  0) * yuv; \
        gl_FragColor = vec4(rgb, 1); \
    }";

    program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc);
    program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc);
    program.link();

    GLfloat points[]{
        -1.0f, 1.0f,
        1.0f, 1.0f,
        1.0f, -1.0f,
        -1.0f, -1.0f,

        0.0f,0.0f,
        1.0f,0.0f,
        1.0f,1.0f,
        0.0f,1.0f
    };

    vbo.create();
    vbo.bind();
    vbo.allocate(points,sizeof(points));

    GLuint ids[2];
    glGenTextures(2,ids);
    idY = ids[0];
    idUV = ids[1];
}

void Nv12Render::render(uchar *nv12Ptr, int w, int h)
{
    if(!nv12Ptr)return;

    glClearColor(0.5f, 0.5f, 0.7f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);

    program.bind();
    vbo.bind();
    program.enableAttributeArray("vertexIn");
    program.enableAttributeArray("textureIn");
    program.setAttributeBuffer("vertexIn",GL_FLOAT, 0, 2, 2*sizeof(GLfloat));
    program.setAttributeBuffer("textureIn",GL_FLOAT,2 * 4 * sizeof(GLfloat),2,2*sizeof(GLfloat));

    glActiveTexture(GL_TEXTURE0 + 1); //测试发现使用"+ 0"会异常
    glBindTexture(GL_TEXTURE_2D,idY);
    glTexImage2D(GL_TEXTURE_2D,0,GL_RED,w,h,0,GL_RED,GL_UNSIGNED_BYTE,nv12Ptr);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glActiveTexture(GL_TEXTURE0 + 0);
    glBindTexture(GL_TEXTURE_2D,idUV);
    glTexImage2D(GL_TEXTURE_2D,0,GL_RG,w >> 1,h >> 1,0,GL_RG,GL_UNSIGNED_BYTE,nv12Ptr + w*h);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    program.setUniformValue("textureUV",0);
    program.setUniformValue("textureY",1);
    glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    program.disableAttributeArray("vertexIn");
    program.disableAttributeArray("textureIn");
    program.release();
}

继承QOpenGLWidget同时加入yuv420sp转RGB功能的GLYuvWidget类

//glyuvwidget.h
#ifndef GLYUVWIDGET_H
#define GLYUVWIDGET_H

#include 

class Nv12Render;

class GLYuvWidget : public QOpenGLWidget
{
    Q_OBJECT
public:
    GLYuvWidget(QWidget *parent =nullptr);
    ~GLYuvWidget();

public slots:
    void slotShowYuv(uchar *ptr,int width,int height); //update一帧yuv图像

protected:
    void initializeGL() override;
    void paintGL() override;

private:
    Nv12Render *m_render;
    uchar *m_ptr;
    int m_width,m_height;

    //for debug
private:
    uint64_t ddwPreTime;
    uint64_t GetUnixTimeInMsec();
};

#endif // GLYUVWIDGET_H
//glyuvwidget.cpp
#include "glyuvwidget.h"
#include "yuv420sp/nv12render.h"
#include 
#include 

uint64_t GLYuvWidget::GetUnixTimeInMsec()
{
    struct timeval tv;
    gettimeofday(&tv, NULL);
    return ((uint64_t)tv.tv_sec * 1000u + tv.tv_usec / 1000u);
}

GLYuvWidget::GLYuvWidget(QWidget *parent):
    QOpenGLWidget(parent)
{
    m_render = new Nv12Render;
}

GLYuvWidget::~GLYuvWidget()
{
    delete m_render;
}

void GLYuvWidget::slotShowYuv(uchar *ptr, int width, int height)
{
    uint64_t ddwCurTime = GetUnixTimeInMsec();
    qDebug() << "Deta time =" << ddwCurTime - ddwPreTime << "ms" << endl;
    ddwPreTime = ddwCurTime;

    m_ptr = ptr;
    m_width = width;
    m_height = height;
    update();
}

void GLYuvWidget::initializeGL()
{
    m_render->initialize();
}

void GLYuvWidget::paintGL()
{
    m_render->render(m_ptr, m_width, m_height);
}

同样假设Hi3536的SDK路径为/home/default/work/Hi3536_SDK_V2.0.6.0

.pro项目文件需要加入mpp库路径,注意添加静态库的先后顺序(下面添加的静态库只作演示用,实际根据需要添加):

MOC_DIR=./tmp/moc
OBJECTS_DIR=./tmp/obj
UI_DIR = ./tmp/ui
INCLUDEPATH += /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/include
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libmpi.a
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libtde.a
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libhdmi.a
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libdnvqe.a
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libupvqe.a
LIBS+= /home/default/work/Hi3536_SDK_V2.0.6.0/mpp_master/lib/libVoiceEngine.a
LIBS+= -ldl

使用qt598_hi3536/bin/qmake -spec linux-hi3536-g++ xxxx.pro生成Makefile,同时生成tmp目录,及其子目录moc、obj和ui。这些目录用于保存make过程中的中间文件,删除不影响。

工程中加入上述两个类,再写一个继承QThread、用于获取yuv420sp视频流的类,并获取到帧时emit一个更新帧的信号过去GLYuvWidget就可以测试了。

connect(pYuvReader[i], SIGNAL(sigNewYuvFrame(uchar*,int,int)), \
                                 pGlWin[i], SLOT(slotShowYuv(uchar*,int,int)));

 

你可能感兴趣的:(海思视频处理器,QT,QOpenGLWidget,Hi3536,QT)