之前做过用ffmpeg解码视频然后在qt中用opengl来显示yuv数据,这次介绍一下ffmpeg硬解的方法。
在网上找的资料大多数是ffmpeg软解的,而且解码后的格式都是yuv;我们知道nvidia显示视频解码后的格式是NV12的,那么怎么在代码中实现呢?
在后来看见别人使用ffplay工具播放视频的时候发现可以指明decoder来解码视频,于是通过在360搜索中找到了ffmpeg中查找解码器的另一种方法avcodec_find_decoder_by_name;传入的参数怎么填呢,在命令行工具里面使用ffplay加参数-decoders就可以看到当前所有的解码器,前面的解码器名字就是可以传入的参数了。
我的ffplay里面是有h264_cuvid的,也就是我的ffmpeg支持nvidia硬解。代码如下
1、启动一个线程获取视频数据
#ifndef VIDEODATA_H
#define VIDEODATA_H
#include
class VideoData : public QThread
{
Q_OBJECT
public:
VideoData(QObject *parent = nullptr);
void setUrl(QString);
signals:
void sigNewFrame();
void sigStarted(uchar*,int,int);
protected:
void run() override;
private:
QString m_url;
bool is_started = false;
};
#endif // VIDEODATA_H
其run函数如下
av_register_all();
avformat_network_init();
AVFormatContext *pAVFomatContext = avformat_alloc_context();
AVCodecContext* pAVCodecContext = nullptr;
AVFrame *pAVFrame = av_frame_alloc();
AVDictionary *opt = nullptr;
// av_dict_set(&opt,"probesize","4096",0);
// av_dict_set(&opt,"buffer_size","1024000",0);
// av_dict_set(&opt,"stimeout","5000000",0); //wei miao 5000000
// av_dict_set(&opt,"max_delay","0",0);
av_dict_set(&opt,"rtsp_transport","udp",0);
int result = avformat_open_input(&pAVFomatContext,m_url.toStdString().data(),nullptr,nullptr);
if(result < 0){
qDebug() << QStringLiteral("打开视频失败");
return;
}
result = avformat_find_stream_info(pAVFomatContext,nullptr);
if(result < 0){
qDebug() << QStringLiteral("获取视频流信息失败");
return;
}
int videoStreamIndex = -1;
for(uint i = 0; i < pAVFomatContext->nb_streams; i++){
if(pAVFomatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
videoStreamIndex = i;
break;
}
}
if(videoStreamIndex == -1){
qDebug() << QStringLiteral("获取视频流索引失败");
return;
}
int vden, vnum, fps;
if(vden <= 0 || vnum <= 0){
fps = 25;
qDebug() << "get video fps error";
qDebug() << "use default " << fps;
}else{
fps = vnum/vden;
qDebug() << "video fps:" << fps;
}
pAVCodecContext = pAVFomatContext->streams[videoStreamIndex]->codec;
int videoWidth = pAVCodecContext->width;
int videoHeight = pAVCodecContext->height;
AVCodec *pAVCodec;
// pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
pAVCodec = avcodec_find_decoder_by_name("h264_cuvid"); //查找n卡解码器
if(!pAVCodec){
qDebug() << "find h264_cuvid failed";
return;
}
int numBytes = avpicture_get_size(AV_PIX_FMT_NV12,videoWidth,videoHeight);
uint8_t *out_buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
int y_size = pAVCodecContext->width * pAVCodecContext->height;
AVPacket* pAVPacket = (AVPacket*) malloc(sizeof(AVPacket));
av_new_packet(pAVPacket,y_size);
av_dump_format(pAVFomatContext,0,m_url.toStdString().data(),0);
result = avcodec_open2(pAVCodecContext,pAVCodec,nullptr);
if(result < 0){
qDebug() << QStringLiteral("打开解码器失败");
return;
}
qDebug() << QStringLiteral("视频流初始化成功");
int got_picture = 0;
while (!isInterruptionRequested()) {
if(av_read_frame(pAVFomatContext,pAVPacket) <0){
break;
}
avcodec_decode_video2(pAVCodecContext,pAVFrame,&got_picture,pAVPacket);
if(got_picture){
if(!is_started){
emit sigStarted(out_buffer,videoWidth,videoHeight);
is_started = true;
}
int bytes = 0;
for(int i = 0; i < videoHeight; i++){ //将y分量拷贝
::memcpy(out_buffer + bytes,pAVFrame->data[0] + pAVFrame->linesize[0] * i,videoWidth);
bytes += videoWidth;
}
int uv = videoHeight >> 1;
for(int i = 0; i < uv; i++){ //将uv分量拷贝
::memcpy(out_buffer + bytes,pAVFrame->data[1] + pAVFrame->linesize[1] * i,videoWidth);
bytes += videoWidth;
}
emit sigNewFrame(); //刷新
msleep(fps);
}
av_free_packet(pAVPacket);
}
// av_free(pAVPicture);
is_started = false;
emit(is_started);
avcodec_close(pAVCodecContext);
avformat_close_input(&pAVFomatContext);
av_free(out_buffer);
2、显示还是采用openglwidget
#ifndef WIDGET_H
#define WIDGET_H
#include
QT_FORWARD_DECLARE_CLASS(Nv12Render)
QT_FORWARD_DECLARE_CLASS(VideoData)
class Widget : public QOpenGLWidget
{
Q_OBJECT
public:
Widget(QWidget *parent = nullptr);
~Widget();
public slots:
void play(QString);
void stop();
protected:
void initializeGL() override;
void paintGL() override;
private:
Nv12Render *m_render;
uchar *m_ptr;
int m_width,m_height;
VideoData *m_videoData;
};
#endif // WIDGET_H
#include "widget.h"
#include "nv12render.h"
#include "videodata.h"
Widget::Widget(QWidget *parent)
{
m_render = new Nv12Render;
m_videoData = new VideoData(this);
connect(m_videoData,SIGNAL(sigNewFrame()),this,SLOT(update()));
connect(m_videoData,&VideoData::sigStarted,this,[this](uchar* p, int w, int h){
m_ptr = p;
m_width = w;
m_height = h;
});
}
Widget::~Widget()
{
m_videoData->requestInterruption();
m_videoData->quit();
m_videoData->wait();
m_videoData->deleteLater();
delete m_render;
}
void Widget::play(QString s)
{
m_videoData->setUrl(s);
m_videoData->start();
}
void Widget::stop()
{
m_videoData->requestInterruption();
}
void Widget::initializeGL()
{
m_render->initialize();
}
void Widget::paintGL()
{
m_render->render(m_ptr,m_width,m_height);
}
3、需要写一个nv12渲染器
#ifndef NV12RENDER_H
#define NV12RENDER_H
#include
#include
#include
class Nv12Render : public QOpenGLFunctions
{
public:
Nv12Render() = default;
Nv12Render(const Nv12Render&) = delete;
void initialize();
void render(uchar*nv12Ptr, int w, int h);
private:
QOpenGLShaderProgram program;
GLuint idY,idUV;
QOpenGLBuffer vbo;
};
#endif // NV12RENDER_H
#include "nv12render.h"
#include
#include
void Nv12Render::initialize()
{
initializeOpenGLFunctions();
const char *vsrc =
"attribute vec4 vertexIn; \
attribute vec4 textureIn; \
varying vec4 textureOut; \
void main(void) \
{ \
gl_Position = vertexIn; \
textureOut = textureIn; \
}";
const char *fsrc =
"varying mediump vec4 textureOut;\n"
"uniform sampler2D textureY;\n"
"uniform sampler2D textureUV;\n"
"void main(void)\n"
"{\n"
"vec3 yuv; \n"
"vec3 rgb; \n"
"yuv.x = texture2D(textureY, textureOut.st).r - 0.0625; \n"
"yuv.y = texture2D(textureUV, textureOut.st).r - 0.5; \n"
"yuv.z = texture2D(textureUV, textureOut.st).g - 0.5; \n"
"rgb = mat3( 1, 1, 1, \n"
"0, -0.39465, 2.03211, \n"
"1.13983, -0.58060, 0) * yuv; \n"
"gl_FragColor = vec4(rgb, 1); \n"
"}\n";
program.addCacheableShaderFromSourceCode(QOpenGLShader::Vertex,vsrc);
program.addCacheableShaderFromSourceCode(QOpenGLShader::Fragment,fsrc);
program.link();
GLfloat points[]{
-1.0f, 1.0f,
1.0f, 1.0f,
1.0f, -1.0f,
-1.0f, -1.0f,
0.0f,0.0f,
1.0f,0.0f,
1.0f,1.0f,
0.0f,1.0f
};
vbo.create();
vbo.bind();
vbo.allocate(points,sizeof(points));
GLuint ids[2];
glGenTextures(2,ids);
idY = ids[0];
idUV = ids[1];
}
void Nv12Render::render(uchar *nv12Ptr, int w, int h)
{
if(!nv12Ptr)return;
glClearColor(0.5f, 0.5f, 0.7f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
program.bind();
vbo.bind();
program.enableAttributeArray("vertexIn");
program.enableAttributeArray("textureIn");
program.setAttributeBuffer("vertexIn",GL_FLOAT, 0, 2, 2*sizeof(GLfloat));
program.setAttributeBuffer("textureIn",GL_FLOAT,2 * 4 * sizeof(GLfloat),2,2*sizeof(GLfloat));
glActiveTexture(GL_TEXTURE0 + 1);
glBindTexture(GL_TEXTURE_2D,idY);
glTexImage2D(GL_TEXTURE_2D,0,GL_RED,w,h,0,GL_RED,GL_UNSIGNED_BYTE,nv12Ptr);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE0 + 0);
glBindTexture(GL_TEXTURE_2D,idUV);
glTexImage2D(GL_TEXTURE_2D,0,GL_RG,w >> 1,h >> 1,0,GL_RG,GL_UNSIGNED_BYTE,nv12Ptr + w*h);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
program.setUniformValue("textureUV",0);
program.setUniformValue("textureY",1);
glDrawArrays(GL_QUADS,0,4);
program.disableAttributeArray("vertexIn");
program.disableAttributeArray("textureIn");
program.release();
}
效果
源码工程点此下载