QT多线程采集摄像头数据处理显示(QVideoProbe+QCamera+QThread)

一、环境介绍

操作系统介绍:ubuntu 18.04 、windows、Android

QT版本:  5.12.6

摄像头:  USB摄像头、虚拟机挂载本机自带摄像头

二、功能介绍

在子线程里通过QVideoProbe捕获摄像头一帧数据,处理之后(加时间水印),再通过信号/槽机制发送给主线程,在UI界面显示。

子线程方式采用moveToThread方式实现,因为需要用到QVideoProbe的槽函数,需要事件机制,使用子类化方式使用子线程不方便,直接用moveToThread方式实现。

QT多线程采集摄像头数据处理显示(QVideoProbe+QCamera+QThread)_第1张图片

QT多线程采集摄像头数据处理显示(QVideoProbe+QCamera+QThread)_第2张图片

三、核心代码

main.cpp

#include "widget.h"

#include 

int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    Widget w;
    w.show();
    return a.exec();
}

Camera.cpp

#include "Camera.h"

/*
函数功能: 将YUV数据转为RGB格式
函数参数:
unsigned char *yuv_buffer: YUV源数据
unsigned char *rgb_buffer: 转换之后的RGB数据
int iWidth,int iHeight   : 图像的宽度和高度
*/
void yuyv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight)
{
    int x;
    int z=0;
    unsigned char *ptr = rgb_buffer;
    unsigned char *yuyv= yuv_buffer;
    for (x = 0; x < iWidth*iHeight; x++)
    {
        int r, g, b;
        int y, u, v;

        if (!z)
        y = yuyv[0] << 8;
        else
        y = yuyv[2] << 8;
        u = yuyv[1] - 128;
        v = yuyv[3] - 128;

        r = (y + (359 * v)) >> 8;
        g = (y - (88 * u) - (183 * v)) >> 8;
        b = (y + (454 * u)) >> 8;

        *(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r);
        *(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g);
        *(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b);

        if(z++)
        {
            z = 0;
            yuyv += 4;
        }
    }
}

void NV21_TO_RGB24(unsigned char *yuyv, unsigned char *rgb, int width, int height)
{
    const int nv_start = width * height ;
    int  index = 0, rgb_index = 0;
    uint8_t y, u, v;
    int r, g, b, nv_index = 0,i, j;

    for(i = 0; i < height; i++){
        for(j = 0; j < width; j ++){
            //nv_index = (rgb_index / 2 - width / 2 * ((i + 1) / 2)) * 2;
            nv_index = i / 2  * width + j - j % 2;

            y = yuyv[rgb_index];
            u = yuyv[nv_start + nv_index ];
            v = yuyv[nv_start + nv_index + 1];

            r = y + (140 * (v-128))/100;  //r
            g = y - (34 * (u-128))/100 - (71 * (v-128))/100; //g
            b = y + (177 * (u-128))/100; //b

            if(r > 255)   r = 255;
            if(g > 255)   g = 255;
            if(b > 255)   b = 255;
            if(r < 0)     r = 0;
            if(g < 0)     g = 0;
            if(b < 0)     b = 0;

            index = rgb_index % width + (height - i - 1) * width;
            //rgb[index * 3+0] = b;
            //rgb[index * 3+1] = g;
            //rgb[index * 3+2] = r;

            //颠倒图像
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 1] = b;
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 2] = g;
            //rgb[height * width * 3 - i * width * 3 - 3 * j - 3] = r;

            //正面图像
            rgb[i * width * 3 + 3 * j + 0] = b;
            rgb[i * width * 3 + 3 * j + 1] = g;
            rgb[i * width * 3 + 3 * j + 2] = r;

            rgb_index++;
        }
    }
}

static unsigned char video_yuv420p_buff[VIDEO_WIDTH*VIDEO_HEIGHT*3/2];
static unsigned char video_yuv420p_buff_temp[VIDEO_WIDTH*VIDEO_HEIGHT*3/2];
class VideoAudioEncode videoaudioencode_0;

void VideoReadThread_0::Camear_Init()
{
    /*创建摄像头对象,根据选择的摄像头打开*/
    camera = new QCamera(videoaudioencode_0.camera);
    m_pProbe = new QVideoProbe;
    if(m_pProbe != nullptr)
    {
        m_pProbe->setSource(camera); // Returns true, hopefully.
        connect(m_pProbe, SIGNAL(videoFrameProbed(QVideoFrame)),this, SLOT(slotOnProbeFrame(QVideoFrame)), Qt::QueuedConnection);
    }

    /*配置摄像头捕    QCamera *camera;
    QVideoProbe *m_pProbe;获模式为帧捕获模式*/
    //camera->setCaptureMode(QCamera::CaptureStillImage);  //如果在Linux系统下运行就这样设置
     camera->setCaptureMode(QCamera::CaptureVideo);//如果在android系统下运行就这样设置

    /*启动摄像头*/
    camera->start();

    /*设置摄像头的采集帧率和分辨率*/
    QCameraViewfinderSettings settings;
    settings.setPixelFormat(QVideoFrame::Format_YUYV); //设置像素格式  Android上只支持NV21格式
    settings.setResolution(QSize(VIDEO_WIDTH,VIDEO_HEIGHT)); //设置摄像头的分辨率
    camera->setViewfinderSettings(settings);

}

void VideoReadThread_0::slotOnProbeFrame(const QVideoFrame &frame)
{
    //qDebug()<<"子线程ProbeFrame槽函数的ID:"<stop();
        delete camera;
        camera=nullptr;
    }
    if(m_pProbe)
    {
        delete  m_pProbe;
        m_pProbe=nullptr;
    }
}

//执行线程
void VideoReadThread_0::run()
{
    stop();
    Camear_Init();
    qDebug()<<"摄像头开始采集数据";
    qDebug()<<"子线程run槽函数的ID:"<

camera.h

#ifndef CAMERA_H
#define CAMERA_H
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include      //这五个是QT处理音频的库
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include      //这五个是QT处理音频的库
#include 
#include 
#include 
#include 

//视频输出尺寸
#define VIDEO_WIDTH  640
#define VIDEO_HEIGHT 480

class VideoReadThread_0:public QObject
{
    Q_OBJECT
public:
    QCamera *camera;
    QVideoProbe *m_pProbe;

    VideoReadThread_0(QObject* parent=nullptr):QObject(parent){camera=nullptr;m_pProbe=nullptr;}
    ~VideoReadThread_0(){}
    void Camear_Init(void);
public slots:
    void slotOnProbeFrame(const QVideoFrame &frame);
    void run();
    void stop();
signals:
    void VideoDataOutput(QImage ); //输出信号

};

//视频音频编码类
class VideoAudioEncode
{
public:
    /*继续采集标志*/
    bool run_flag;
    bool mode; //0 表示保存视频 1表示推流
    /*视频相关*/
    QMutex  video_encode_mutex;
    QWaitCondition video_WaitConditon;
    QCameraInfo camera; //当前选择的摄像头
    /*音频相关*/
    QAudioDeviceInfo audio;
    QMutex  audio_encode_mutex;
    QQueue audio_data_queue;
};
extern class VideoAudioEncode videoaudioencode_0;
#endif // CAMERA_H

mainwindow.cpp

#include "widget.h"
#include "ui_widget.h"

Widget::Widget(QWidget *parent)
    : QWidget(parent)
    , ui(new Ui::Widget)
{
    ui->setupUi(this);
    qDebug()<<"主线程的ID:"<setWindowTitle("监控设备");

    //启动摄像头的信号
    connect(this,SIGNAL(StartWorkThread()),work_class,SLOT(run()));
    //释放资源-释放摄像头
    connect(this,SIGNAL(Stop_VideoAudioEncode_0()),work_class,SLOT(stop()));

    //连接摄像头采集信号,在主线程实时显示视频画面
    connect(work_class,SIGNAL(VideoDataOutput(QImage)),this,SLOT(VideoDataDisplay_0(QImage)));

    //将类移动到子线程工作
    work_class->moveToThread(work_thread);
}


Widget::~Widget()
{
    delete ui;
}

//驾驶室:视频刷新显示
void Widget::VideoDataDisplay_0(QImage image)
{
    QPixmap my_pixmap;
    my_pixmap.convertFromImage(image.copy());
    ui->label->setPixmap(my_pixmap);
}


void Widget::on_pushButton_clicked()
{
    /*2. 获取摄像头列表*/
    video_dev_list.clear();
    ui->comboBox->clear();
    video_dev_list=QCameraInfo::availableCameras();
    for(int i=0;icomboBox->addItem(video_dev_list.at(i).deviceName());
    }
}

//打开摄像头设备
void Widget::on_pushButton_2_clicked()
{
    //设置当前选择的摄像头
    videoaudioencode_0.camera=video_dev_list.at(ui->comboBox->currentIndex());
    work_thread->start();
    StartWorkThread();
}


void Widget::on_pushButton_3_clicked()
{
    Stop_VideoAudioEncode_0(); //释放摄像头资源
//    QThread::msleep(10); //等待摄像头关闭
    //停止线程
 //   work_thread->quit();
  //  work_thread->wait();
}

mainwindow.h

#ifndef WIDGET_H
#define WIDGET_H

#include 
#include "Camera.h"

QT_BEGIN_NAMESPACE
namespace Ui { class Widget; }
QT_END_NAMESPACE

class Widget : public QWidget
{
    Q_OBJECT

public:
    Widget(QWidget *parent = nullptr);
    ~Widget();
    QList video_dev_list;
    VideoReadThread_0 *work_class;
    QThread *work_thread;

private slots:
    void on_pushButton_clicked();
    void VideoDataDisplay_0(QImage image);
    void on_pushButton_2_clicked();
    void on_pushButton_3_clicked();

signals:
    void StartWorkThread();
    void Stop_VideoAudioEncode_0();//停止线程
private:
    Ui::Widget *ui;
};
#endif // WIDGET_H

四、运行环境说明

在windows、Android、ubuntu下都可正常使用。

主要注意的地方:  在windows下有些摄像头不支持YUYV格式输出设置,需要在代码里根据情况稍微修改。

 

下面公众号有全套基础QT\C++\C\单片机教程。

QT多线程采集摄像头数据处理显示(QVideoProbe+QCamera+QThread)_第3张图片

 

你可能感兴趣的:(QT,自动驾驶,神经网络,数据挖掘,机器学习)