基于QOpenGLWidget+FFMpeg的视频播放器

FFMpeg用来打开和解析媒体文件,提取视频流用的,QOpenGLWidget是用来播放YUV的,YUV在着色器中转换为RGB,从而实现播放。

1.视频文件的播放

视频数据提取在一个独立的线程之中,提供三个事件,
(1)文件打开;
(2)文件读取到第一帧,返回视频文件尺寸;
(3)读取到完整一帧,每帧包含三个颜色通道YUV。
FFMpeg使用很麻烦,此处不直接用,而是用FFMpeg的一个文件读取的封装库 ffms2

qffmpegreader.h

#ifndef QFFMPEGREADER_H
#define QFFMPEGREADER_H
#include 
#include 
#include 
#include 
#include 

struct FFFrame;
typedef std::shared_ptr FFFramePtr;

struct FFFrame
{
    FFFrame()
    {

    }

    //YUV
    QByteArray TextureY;
    QByteArray TextureU;
    QByteArray TextureV;

    //创建一个数据帧
    static FFFramePtr MakeFrame()
    {
        FFFramePtr FFrame = std::make_shared();
        return FFrame;
    }
};

class QFFMpegReader : public QThread
{
    Q_OBJECT
public:
    explicit QFFMpegReader(QObject *parent = nullptr);

    void Open(const QString& Url);
    void Close();

    // QThread interface
protected:
    void run();

private:
    bool OpenImpl(FFMS_VideoSource* &videosource, int& num_frames);

signals:
    void OnOpen();
    void OnUpdate(const QPoint VideoSize);
    void OnFrame(FFFramePtr Frame);

private:
    QString Url_;
    bool Running_;
    QPoint DisplaySize_;
};

#endif // QFFMPEGREADER_H

qffmpegreader.cpp

#include "qffmpegreader.h"
#include 
#include 

QFFMpegReader::QFFMpegReader(QObject *parent) : Running_(false)
{

}

void QFFMpegReader::Open(const QString& Url)
{
    Url_ = Url;
    Running_ = true;
    start();
}

void QFFMpegReader::Close()
{
    Running_ = false;
    wait();
}

bool QFFMpegReader::OpenImpl(FFMS_VideoSource* &videosource, int& num_frames)
{
    //https://github.com/FFMS/ffms2/blob/master/doc/ffms2-api.md
    /* Index the source file. Note that this example does not index any audio tracks. */
    char errmsg[1024];
    FFMS_ErrorInfo errinfo;
    errinfo.Buffer = errmsg;
    errinfo.BufferSize = sizeof(errmsg);
    errinfo.ErrorType = FFMS_ERROR_SUCCESS;
    errinfo.SubType = FFMS_ERROR_SUCCESS;

    FFMS_Indexer* indexer = FFMS_CreateIndexer(Url_.toStdString().c_str(), &errinfo);
    if (indexer == nullptr) {
        return false;
    }

    //Both FFMS_DoIndexing2 and FFMS_CancelIndexing destroys the indexer object and frees its memory.
    FFMS_Index* index = FFMS_DoIndexing2(indexer, FFMS_IEH_ABORT, &errinfo);
    if (index == nullptr) {
        FFMS_CancelIndexing(indexer);
        return false;
    }

    //查找视频源
    int trackno = FFMS_GetFirstTrackOfType(index, FFMS_TYPE_VIDEO, &errinfo);
    if (trackno < 0) {
        FFMS_DestroyIndex(index);
        return false;
    }

    //创建视频源
    videosource = FFMS_CreateVideoSource(Url_.toStdString().c_str(), trackno, index, 1, FFMS_SEEK_NORMAL, &errinfo);
    if (videosource == nullptr) {
        FFMS_DestroyIndex(index);
        return false;
    }

    //清除index
    FFMS_DestroyIndex(index);

    //通知事件
    OnOpen();

    //获取属性
    const FFMS_VideoProperties* videoprops = FFMS_GetVideoProperties(videosource);
    num_frames = videoprops->NumFrames;

    //读取第一帧
    const FFMS_Frame* propframe = FFMS_GetFrame(videosource, 0, &errinfo);
    DisplaySize_ = QPoint(propframe->EncodedWidth, propframe->EncodedHeight);
    OnUpdate(DisplaySize_);

    //设置播放像素格式,尺寸,和拉伸模式
    int pixfmts[2];
    pixfmts[0] = FFMS_GetPixFmt("yuv420p");
    pixfmts[1] = -1;
    if (FFMS_SetOutputFormatV2(videosource, pixfmts, DisplaySize_.x(), DisplaySize_.y(), FFMS_RESIZER_BICUBIC, &errinfo))
    {
        return false;
    }

    return true;
}


void QFFMpegReader::run()
{
    char errmsg[1024];
    FFMS_ErrorInfo errinfo;
    errinfo.Buffer = errmsg;
    errinfo.BufferSize = sizeof(errmsg);
    errinfo.ErrorType = FFMS_ERROR_SUCCESS;
    errinfo.SubType = FFMS_ERROR_SUCCESS;


    //打开文件
    FFMS_VideoSource* VideoSource = nullptr;
    int NumFrames = 0;
    if (!OpenImpl(VideoSource, NumFrames))
    {
        qDebug("Open File Failed [%s]", Url_.toStdString().c_str());
        return;
    }

    //逐帧处理
    FFMS_Track* VideoTrack = FFMS_GetTrackFromVideo(VideoSource);
    const FFMS_TrackTimeBase* TrackTimeBase = FFMS_GetTimeBase(VideoTrack);

    //初始化数据
    QDateTime StartTime = QDateTime::currentDateTime();

    //初始PTS并不是零,所以需要减去第一帧的PTS
    int64_t StartPTS = 0;
    int FrameNum = 0;

    while (Running_ && (FrameNum < NumFrames))
    {
        //取帧
        const FFMS_Frame* Frame = FFMS_GetFrame(VideoSource, FrameNum, &errinfo);
        const FFMS_FrameInfo* FrameInfo = FFMS_GetFrameInfo(VideoTrack, FrameNum);

        if (Frame)
        {
            //记录第一帧的PTS
            if (FrameNum == 0)
            {
                StartPTS = FrameInfo->PTS;
            }

            //拷贝帧
            FFFramePtr FFrame = FFFrame::MakeFrame();
            FFrame->TextureY.setRawData((const char *)Frame->Data[0], Frame->Linesize[0] * Frame->ScaledHeight);
            FFrame->TextureU.setRawData((const char *)Frame->Data[1], Frame->Linesize[1] * (Frame->ScaledHeight / 2));
            FFrame->TextureV.setRawData((const char *)Frame->Data[2], Frame->Linesize[2] * (Frame->ScaledHeight / 2));

            //计算时间差,逐毫秒等待,如果一次等待,则需要等待很久
            while (Running_)
            {
                int64_t PTS = (int64_t)(((FrameInfo->PTS - StartPTS) * TrackTimeBase->Num) / (double)TrackTimeBase->Den);
                int64_t CurPTS = StartTime.msecsTo(QDateTime::currentDateTime());
                if (CurPTS < PTS)
                {
                    std::this_thread::sleep_for(std::chrono::milliseconds(1));
                }
                else
                {
                    break;
                }
            }

            //显示帧
            OnFrame(FFrame);
        }
        //计算下一帧索引
        FrameNum++;

        //如果单文件循环,则重置起始时间
        if (FrameNum >= NumFrames)
        {
            StartTime = QDateTime::currentDateTime();
            FrameNum = 0;
        }
    }

    //关闭文件
    if (VideoSource)
    {
        FFMS_DestroyVideoSource(VideoSource);
        VideoSource = nullptr;
    }
}

2.视频数据的渲染

视频数据是YUV,当然这里可以直接转换为RGBA/BGRA然后传递到Opengl中渲染,但是有两个明显的缺点。
(1)RBGA占用更大的显卡带宽和显存,和YUV相比是3/8的差别;
(2)YUV转换RGBA会占用大量的CPU时间,CPU并不擅长于转换,而GPU就很擅长。
视频读取类QFFMpegReader的事件,是在异步线程中触发的,绑定的时候,需要设置连接类型为Qt::ConnectionType::QueuedConnection。
根据读取类的事件设计,可以做以下处理。

2.1文件打开

根据需要可以做一些初始化的工作

2.2文件读取到第一帧,返回视频文件尺寸

根据视频的尺寸用于创建三个纹理

2.3读取到完整一帧,每帧包含三个颜色通道YUV

用于更新三个纹理的数据

qyuvwidegt.h

#ifndef QYUVWIDEGT_H
#define QYUVWIDEGT_H
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include "qffmpegreader.h"

class QYUVWidegt : public QOpenGLWidget, protected QOpenGLFunctions_4_5_Core
{
    Q_OBJECT
public:
    explicit QYUVWidegt(QWidget *parent = nullptr);
    virtual ~QYUVWidegt();

signals:


    // QOpenGLWidget interface
protected:
    void initializeGL();
    void resizeGL(int w, int h);
    void paintGL();

private:
    QOpenGLTexture* createTexture(const QPoint VideoSize);

private:
    QFFMpegReader reader;
    QOpenGLShaderProgram program;

    QOpenGLBuffer VBO, EBO;
    QOpenGLVertexArrayObject VAO;

    QOpenGLTexture* texture_y = nullptr;
    QOpenGLTexture* texture_u = nullptr;
    QOpenGLTexture* texture_v = nullptr;
};

#endif // QYUVWIDEGT_H

qyuvwidegt.cpp

#include "qyuvwidegt.h"
#include 

QYUVWidegt::QYUVWidegt(QWidget *parent) : QOpenGLWidget(parent),
    VBO(QOpenGLBuffer::Type::VertexBuffer),
    EBO(QOpenGLBuffer::Type::IndexBuffer)
{
    connect(&reader, &QFFMpegReader::OnOpen, this,
            [&]{
        qDebug("OnOpen");
    }, Qt::ConnectionType::QueuedConnection);

    connect(&reader, &QFFMpegReader::OnUpdate, this, [&](const QPoint VideoSize){
        texture_y = createTexture(VideoSize);
        texture_u = createTexture(VideoSize / 2);
        texture_v = createTexture(VideoSize / 2);

        //纹理
        program.bind();
        GLuint textureUniformY = program.uniformLocation("tex_y");
        GLuint textureUniformU = program.uniformLocation("tex_u");
        GLuint textureUniformV = program.uniformLocation("tex_v");

        program.setUniformValue(textureUniformY, 0);
        program.setUniformValue(textureUniformU, 1);
        program.setUniformValue(textureUniformV, 2);
        program.release();

        qDebug("OnUpdate");

    }, Qt::ConnectionType::QueuedConnection);

    connect(&reader, &QFFMpegReader::OnFrame, this, [&](FFFramePtr Frame){
        texture_y->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureY.data());
        texture_u->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureU.data());
        texture_v->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureV.data());

        update();
    }, Qt::ConnectionType::QueuedConnection);
}

QYUVWidegt::~QYUVWidegt()
{
    reader.Close();

    makeCurrent();

    VBO.destroy();
    EBO.destroy();

    texture_y->destroy();
    texture_u->destroy();
    texture_v->destroy();

    VAO.destroy();
    VBO.destroy();
    EBO.destroy();

    doneCurrent();
}

QOpenGLTexture* QYUVWidegt::createTexture(const QPoint VideoSize)
{
    QOpenGLTexture* texture = new QOpenGLTexture(QOpenGLTexture::Target::Target2D);
    texture->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,QOpenGLTexture::Linear);
    texture->create();
    texture->setSize(VideoSize.x(), VideoSize.y());
    texture->setFormat(QOpenGLTexture::TextureFormat::R8_UNorm);
    texture->allocateStorage();

    return texture;
}

void QYUVWidegt::initializeGL()
{
    initializeOpenGLFunctions();
    glDisable(GL_DEPTH_TEST);

    QDir CurrentPath = QDir(R"(D:\work\OpenGL\QtWidget\Base)");
    if(!program.addShaderFromSourceFile(QOpenGLShader::Vertex, CurrentPath.absoluteFilePath(R"(YUV.vert)")) ||
        !program.addShaderFromSourceFile(QOpenGLShader::Fragment, CurrentPath.absoluteFilePath(R"(YUV.frag)")))
    {
        return;
    }
    program.link();

    float vertices[] = {
            // positions          // colors           // texture coords
             1.0f,  1.0f, 0.0f,   1.0f, 0.0f, 0.0f,   1.0f, 0.0f, // top right
             1.0f, -1.0f, 0.0f,   0.0f, 1.0f, 0.0f,   1.0f, 1.0f, // bottom right
            -1.0f, -1.0f, 0.0f,   0.0f, 0.0f, 1.0f,   0.0f, 1.0f, // bottom left
            -1.0f,  1.0f, 0.0f,   1.0f, 1.0f, 0.0f,   0.0f, 0.0f  // top left
        };

    unsigned int indices[] = {
        0, 1, 3, // first triangle
        1, 2, 3  // second triangle
    };

    //几何
     QOpenGLVertexArrayObject::Binder vaoBind(&VAO);

    VBO.create();
    VBO.bind();
    VBO.allocate(vertices, sizeof(vertices));

    EBO.create();
    EBO.bind();
    EBO.allocate(indices, sizeof(indices));

    int vertex = program.attributeLocation("vertex");
    program.setAttributeBuffer(vertex, GL_FLOAT, 0, 3, sizeof(GLfloat) * 8);
    program.enableAttributeArray(vertex);

    int color = program.attributeLocation("color");
    program.setAttributeBuffer(color, GL_FLOAT, sizeof(GLfloat) * 3, 3,  sizeof(GLfloat) * 8);
    program.enableAttributeArray(color);

    int uv = program.attributeLocation("uv");
    program.setAttributeBuffer(uv, GL_FLOAT, sizeof(GLfloat) * 6, 2, sizeof(GLfloat) * 8);
    program.enableAttributeArray(uv);

    VBO.release();

    reader.Open(R"(D:\work\OpenGL\QtWidget\Media\123.mp4)");
}

void QYUVWidegt::resizeGL(int w, int h)
{
    glViewport(0, 0, w, h);
}

void QYUVWidegt::paintGL()
{
    glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
    glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);//GL_LINE GL_FILL

    if(texture_y && texture_u && texture_v)
    {
        glActiveTexture(GL_TEXTURE0);
        texture_y->bind();
        glActiveTexture(GL_TEXTURE1);
        texture_u->bind();
        glActiveTexture(GL_TEXTURE2);
        texture_v->bind();

        QOpenGLVertexArrayObject::Binder binder(&VAO);

        program.bind();
        glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
        program.release();

        texture_y->release();
        texture_u->release();
        texture_v->release();
    }
}

3.着色器代码

YUV.vert

#version 450 core
layout(location=0) in vec3 vertex;
layout(location=1) in vec3 color;
layout(location=2) in  vec2 uv;

out  vec3 VertexColor;
out  vec2 VertexUV;

void main(void)
{
    gl_Position =vec4(vertex, 1.0);
    VertexColor = color;
    VertexUV = uv;
}

YUV.frag

#version 450 core
out vec4 FragColor;

in  vec3 VertexColor;
in  vec2 VertexUV;

uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;

void main(void)
 {
    vec3 yuv;
    vec3 rgb;
    yuv.x = texture(tex_y, VertexUV).r;
    yuv.y = texture(tex_u, VertexUV).r - 0.5;
    yuv.z = texture(tex_v, VertexUV).r - 0.5;
    rgb = mat3( 1,       1,         1,
                0,       -0.39465,  2.03211,
                1.13983, -0.58060,  0) * yuv;
    //FragColor = vec4(rgb * VertexColor, 1) ;
    FragColor = vec4(rgb, 1) ;
}

你可能感兴趣的:(基于QOpenGLWidget+FFMpeg的视频播放器)