使用Qt编写多路视频监控客户端

近期完成了一个使用Qt编写的多路视频监控客户端工程,具体是四路的一个监控的界面。现在把思路做一个大体的总结。

首先这个工程我是在前人完成了一路的一个TCP网络视频播放器客户端的基础上完成的,我做的主要工作是把一路变成四路,具体的方法是使用Qt中的多线程QThread来完成,通过多线程创建四个TCP的客户端和四个解码器,一个TCP客户端对应一个解码器,四个解码器可以同时工作,最后将四个解码器解码之后的图像,放到Qt设计的界面中显示出来,具体的做法是在Qt设计的一个窗口中,贴上四个label,把解码出来的image在对应的label上show出来就可以完成播放的功能。

多线程的写法就是创建一个新的类,来继承QThread

今天看到很多人在求相关代码,其实实现原理很简单,如果单路的播放能够正常实现的话,只需要再添加几个线程去创建多个socket和解码器就OK了,下面把主要的实现代码放一下:

先创建一个socket线程头文件socket_thread.h

/*读取视频的线程*/
class socket_thread : public QThread
{
    Q_OBJECT

public:
    explicit socket_thread();
    ~socket_thread();

public:
    void dec_create(MT_VID_DEC_INIT *dec_init); //解码器创建
    int tcp_connect(char save_ip[16] ,u_short port,SAMPLE_RECV_PARA_S *hisi);
    int dec_frame_decoder();//解码显示
    void stop();
    void close();
    void start_thread(MT_VID_DEC_INIT *dec_init);
    int  tcp_func(unsigned char func[6]);
    int  tcp_func_r(unsigned char func_r[6]);
    int send_key(int pos,unsigned char value,int press_release);//接收主函数传输键鼠消息,准备发送
    int send_mouse(int pos,unsigned char value,int press_release);

    int mouse_move_get(int pos,unsigned char value,int press_release);
    int mouse_move_send();
    void save_file(char* fileName);//保存码流
signals:
    void decordeframe(QImage);//解码显示信号
    void  socketframe(QImage);//当前画面保存信号

protected:
    void run();

public:
    DEC_HANDLE* handle; //解码过程中所需的一些参数
    MT_VID_DEC_INPUT * mt_dec_input; //存储一完整帧码流的大小的地址
    tcpClient *         tcp;
    SIZE_S *resolution; //存储开发板发送的视频分辨率
    MK_MODIFY       *mk_modification;

private:
    volatile bool stopped;
     QMutex mutex;
     FILE *fp;
     volatile bool save_flag;
    // FILE *fprgb;
};

#endif // SOCKET_THREAD

下面是socket线程的具体实现socket_thread.cpp

#include "socket_thread.h"

unsigned char recv_buf[1024*1024*2];  //存放接收包缓冲区
unsigned char str_frame_buf[0x200000];  //存放一帧码流缓冲区

socket_thread::socket_thread()
{
    stopped = false;
    save_flag = false;
    av_register_all();// 初始化ffmpeg库,注册一些带有封装协议的解码器
    avcodec_register_all(); //注册编解码器
    avdevice_register_all();//对设备进行注册
  //  fp = fopen("new","ab");
    //fprgb = fopen("save.rgb","ab");
    tcp = new tcpClient; //tcp连接,数据收发对象
    mk_modification = new MK_MODIFY;//键鼠消息对象
    mk_modification->init_function();

    handle = (DEC_HANDLE *)malloc(sizeof(DEC_HANDLE));
    mt_dec_input = (MT_VID_DEC_INPUT *)malloc(sizeof(MT_VID_DEC_INPUT));
    resolution = (SIZE_S *)malloc(sizeof(SIZE_S));
}

socket_thread::~socket_thread()
{
    stopped = true;
    save_flag = false;
    delete mk_modification;
    delete tcp;
}

/****线程启动函数,获取分辨率****/
void socket_thread::start_thread(MT_VID_DEC_INIT *dec_init)
{
    dec_init->create_height = resolution->u32Height;
    dec_init->create_width  = resolution->u32Width;
    dec_create(dec_init);

    if (isRunning()) {
        qDebug("dec-thread-is-running");
        return;
    }
    else
        start();

}

/***线程启动持续接收码流,并送解码显示****/
void socket_thread::run()
{
    tcp->send_flag();
    int ret;
    unsigned int nal_Size = 0;
    unsigned int recv_buf_cnt = 0;
    unsigned int frame_type = 0;
    unsigned int recv_iframe_flag = 0;

    while(!stopped) {
      //  mutex.lock();
        //接收码流
         ret = tcp->bitstrm_recv_package(&recv_buf_cnt, recv_buf,str_frame_buf,&nal_Size, &frame_type);
         if(ret < 0)
         {
            qDebug("bitstrm_recv_package err\n");
            continue;  /* 继续接收一帧码流 */
         }

         if(frame_type == 1)   /*收到I 帧*/
          {
             recv_iframe_flag = 1;  /*置收到I 帧标志状态*/
          }

          if(recv_iframe_flag == 0)
          {
              continue;  /* 继续接收一帧码流 */
          }

          mt_dec_input->pnal_stream = str_frame_buf;
          mt_dec_input->nal_size  = nal_Size;

          if(save_flag){
            fwrite(str_frame_buf,1,nal_Size,fp);
          }

         ret = dec_frame_decoder();
#if 0
        if(ret != 0){
            qDebug("解码显示错误");
            break;
        }
#endif

       memset(str_frame_buf,0,sizeof(str_frame_buf));

     //  mutex.unlock();
    }
    qDebug("结束线程");
    stopped = false;

}

/*****解码器创建*****/
void socket_thread::dec_create(MT_VID_DEC_INIT *dec_init)
{
    handle->videoCodec = NULL;
    handle->videoCodecContext = NULL;

    // 查找FFmpeg的解码器,函数的参数是一个解码器的ID,返回查找到的解码器
    if(dec_init->codec_type== 0)
    {
        handle->videoCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    }
    else if(dec_init->codec_type == 1)
    {
        handle->videoCodec = avcodec_find_decoder(AV_CODEC_ID_H265);
    }
    else
    {
        qDebug("find h264 or h265 codec error");
    }

    if(handle->videoCodec == NULL)
    {
        qDebug("avcodec find codec error\n");
        return;
    }

    //创建AVCodecContext结构体,得到视频流编码的上下文指针
    handle->videoCodecContext = avcodec_alloc_context3(handle->videoCodec);
      if(handle->videoCodecContext == NULL)
      {
          qDebug("Could not allocate video codec context\n");
          return;
      }

        handle->videoFrame = av_frame_alloc();//分配一个帧指针,指向解码后的原始帧yuv420
        handle->FrameRGB = av_frame_alloc();//分配一个帧指针,指向存放转换成rgb后的帧

        handle->videoCodecContext->time_base.den = dec_init->fps;
        handle->videoCodecContext->time_base.num = 1;

        handle->videoCodecContext->bit_rate = dec_init->bitRate;
        handle->videoCodecContext->frame_number = 1;
        handle->videoCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
        handle->videoCodecContext->pix_fmt = AV_PIX_FMT_RGB24;
        handle->videoCodecContext->width = dec_init->create_width;
        handle->videoCodecContext->height = dec_init->create_height;

    //根据像素格式和视频分辨率获得 picture 存储大小
   // handle->videoFrameSize = avpicture_get_size(AV_PIX_FMT_RGB24, dec_init->create_width,dec_init->create_height);
    handle->videoFrameSize =av_image_get_buffer_size(AV_PIX_FMT_RGB24, dec_init->create_width, dec_init->create_height,1);

    handle->rgb_buf = (uint8_t *)av_malloc(handle->videoFrameSize*sizeof(uint8_t));

    //sws_scale:把该帧转换成rgbSwsContext * handle->scxt
    handle->scxt =sws_getContext(handle ->videoCodecContext->width,handle ->videoCodecContext->height,AV_PIX_FMT_YUV420P,
                                   handle ->videoCodecContext->width,handle ->videoCodecContext->height,AV_PIX_FMT_RGB24,
                                   SWS_X,NULL,NULL,NULL);
    // 打开解码器
    if ( avcodec_open2(handle->videoCodecContext,handle->videoCodec,NULL) < 0)
    {
        qDebug("Could not open the video codec.");
        return;
    }

    av_init_packet(&handle->pkt);

    qDebug("success");


}

/****收到一完整帧后,进行解码显示,目前使用QLabel显示***/
int socket_thread::dec_frame_decoder()
{

    int decodedSize = 0;
    int gotPicture = 0;

    if( (mt_dec_input->pnal_stream == NULL) || (mt_dec_input->nal_size < 0) )
    {
        qDebug("没有数据内容");
        return -1;
    }

    handle ->pkt.data = mt_dec_input->pnal_stream;
    handle ->pkt.size = mt_dec_input->nal_size;

    decodedSize = avcodec_decode_video2((handle->videoCodecContext), (handle ->videoFrame), &gotPicture,&(handle->pkt));

    if (decodedSize < 0)
    {
        printf("Fail to decode a video frame.");
        return -1;
    }

    if (gotPicture != 0)
    {

        // 给RGB帧附加上分配的内存
       av_image_fill_arrays(handle->FrameRGB->data,handle->FrameRGB->linesize, handle->rgb_buf,AV_PIX_FMT_RGB24,
                            handle->videoCodecContext->width,handle->videoCodecContext->height,1);

        if(handle->scxt != NULL){
            sws_scale( handle->scxt,(const uint8_t*  const*)handle->videoFrame->data,handle->videoFrame->linesize,0,handle ->videoCodecContext->height,
                      handle->FrameRGB->data,handle->FrameRGB->linesize);

        }

//        for(int i = 0; i< handle ->videoCodecContext->height; i++){
  //          fwrite(handle->FrameRGB->data[0] + i * handle->FrameRGB->linesize[0], 1 , handle ->videoCodecContext->width *3,fprgb);
    //    }

        QImage tmpImage((uchar *)handle->rgb_buf,handle->videoCodecContext->width,handle->videoCodecContext->height,QImage::Format_RGB888);
        QImage image  = tmpImage.copy();


       //emit 发送信号 信号也可以自己定义的
        emit decordeframe(image);
        emit socketframe(image);

        av_packet_unref(&handle->pkt);
        return 0;
     }

     else{
        qDebug("解码错误");
        return -1;
     }


}

/****调用一些列TCP函数,准备连接****/
int socket_thread::tcp_connect(char save_ip[16], u_short port,SAMPLE_RECV_PARA_S *hisi)
{
   int ret = tcp->tcp_Connect(save_ip,port);

   if(ret == 0){
       resolution = tcp->tcp_start(hisi);
       qDebug("hei=%d,wid=%d",resolution->u32Width,resolution->u32Height);
   }
   else
       return -1;
   return 0;
}

/****调用TCP对象,发送设备调试信息***/
int socket_thread::tcp_func(unsigned char func[6])
{
    tcp->SendData((char*)func,6);
    return 0;
}

/****调用TCP对象,接收设备调试信息***/
int socket_thread::tcp_func_r(unsigned char func_r[6])
{

    tcp->recvData((char*)func_r,sizeof(func_r));
    return 0;
}
/****接收键盘鼠标信息,准备发送****/
int socket_thread::send_key(int pos,unsigned char value,int press_release)
{
    unsigned char key_buf[14];
    key_buf[0] = 0xff;
    key_buf[1] = 0xfe;
    key_buf[2] = 0xfd;
    key_buf[3] = 0xfc;
    key_buf[4] = 0x01;
    key_buf[5] = mk_modification->key_len;
    mk_modification->key_m(pos,value,press_release);
    memcpy(&key_buf[6],mk_modification->key_buf,mk_modification->key_len);

    tcp->SendData((char*)key_buf,sizeof(key_buf));
    return 0;
}

int socket_thread::send_mouse(int pos,unsigned char value,int press_release)
{
    mk_modification->mouse_m(pos,value,press_release);

    unsigned char mouse_buf[10];
    mouse_buf[0] = 0xff;
    mouse_buf[1] = 0xfe;
    mouse_buf[2] = 0xfd;
    mouse_buf[3] = 0xfc;
    mouse_buf[4] = 0x02;
    mouse_buf[5] = 5;
    memcpy(&mouse_buf[6],mk_modification->mouse_buf,5);

    qDebug("%x %x %x %x %x %x %x %x %x %x",mouse_buf[0],mouse_buf[1],mouse_buf[2],mouse_buf[3],mouse_buf[4],
            mouse_buf[5],mouse_buf[6],mouse_buf[7],mouse_buf[8],mouse_buf[9]);

    tcp->SendData((char*)mouse_buf,sizeof(mouse_buf));

    return 0;
}

int socket_thread::mouse_move_get(int pos,unsigned char value,int press_release)
{
    mk_modification->mouse_m(pos,value,press_release);
    return 0;
}

int socket_thread::mouse_move_send()
{
    unsigned char mouse_buf[10];
    mouse_buf[0] = 0xff;
    mouse_buf[1] = 0xfe;
    mouse_buf[2] = 0xfd;
    mouse_buf[3] = 0xfc;
    mouse_buf[4] = 0x02;
    mouse_buf[5] = 5;
    memcpy(&mouse_buf[6],mk_modification->mouse_buf,5);

    tcp->SendData((char*)mouse_buf,sizeof(mouse_buf));
    return 0;
}

/****保存码流****/
void socket_thread::save_file(char *fileName)
{
    qDebug("码流保存");
    fp = fopen(fileName,"ab");

    if(fp == NULL){
        qDebug("文件打开失败");
        return;
    }
    else{
        save_flag = true;
        return;
    }
}
/****线程退出,发送socket关闭信号***/
void socket_thread::stop()
{
    tcp->stop();
    stopped = true;
    /*等待解码线程退出销毁*/
       while(1)
       {
           msleep(5);

           if(stopped == false){
               terminate();
             //  quit();
               wait();
               close();//关闭音视频
               tcp->Close();
               /*等待stopped 值变为false ,则退出等待*/
               break;
           }

       }
}

/****解码器资源释放*****/
void socket_thread::close()
{

    if(handle == NULL)
    {
        qDebug("数据错误");
        return;
    }

    if (handle->videoCodecContext != NULL)
    {
        avcodec_close(handle->videoCodecContext);
        qDebug("videoCodecContext释放");
    }
    if(handle ->videoFrame != NULL)
    {
        av_frame_free(&handle->videoFrame);
        qDebug("videoFrame释放");
    }
    if(handle ->FrameRGB != NULL)
    {
        av_frame_free(&handle->FrameRGB);
        qDebug("FrameRGB释放");
    }
    if(handle ->scxt != NULL)
    {
       sws_freeContext(handle->scxt);
        qDebug("scxt释放");
    }

    handle->videoCodecContext = NULL;
    handle->videoCodec = NULL;
    handle->videoFrame = NULL;
    handle ->FrameRGB = NULL;
    handle->scxt = NULL;
    av_free(handle->rgb_buf);
    free(handle);
    free(mt_dec_input);
   // fclose(fp);
    //fclose(fprgb);
    qDebug("释放解码资源");

}
这是一个线程的创建方法,同理创建多个网络socket线程,分别将接收的码流传到相应的解码器,再将解码出来的原始数据送显到显示器即可

解码部分:

#include "videoplayer.h"


/***
 ***DecodeVideo类的成员
 ***/
//static QMutex decodeVideoMutex;


/***
 ***VideoPlayer类的成员
 ***/
VideoPlayer1::VideoPlayer1()
{
    initAvcodec1();

    aCodecCtx1  = NULL;
    pFormatCtx1 = NULL;
    eventloop1  = NULL;
    curState1 = StoppedState1;
    curType1 = NoneType1;

}

VideoPlayer1::~VideoPlayer1()
{

}

void VideoPlayer1::run()
{
    //qDebug("234234234353");
    eventloop1 = new QEventLoop;
    QTimer playtimer1; //控制播放的定时器
    connect(&playtimer1,SIGNAL(timeout()),this,SLOT(readPacket1()),Qt::DirectConnection);
    playtimer1.start(35);
    eventloop1->exec();           //循环进入等待,直到exec()被再次触发
    delete eventloop1;
    eventloop1 = NULL;
}

void VideoPlayer1::initAvcodec1()        //函数初始化
{
    av_register_all();// 初始化ffmpeg库,注册一些带有封装协议的解码器
    avcodec_register_all(); //注册编解码器
    avdevice_register_all();//对设备进行注册
}

bool VideoPlayer1::openVideo1(char *filename1)
{
    videoStream1 = -1;
    audioStream1 = -1;
    unsigned int i;

    //打开流媒体--本地文件
    if(avformat_open_input(&pFormatCtx1, filename1, NULL, NULL)!=0)
    {
        fprintf(stderr, "Couldn't open file\n");
        return false;  //Couldn't open file
    }
    qDebug("222222");
    //查找文件的流信息
    if(avformat_find_stream_info(pFormatCtx1,NULL)<0)
    {
        fprintf(stderr, "Couldn't find stream information\n");
        return false ; // Couldn't find stream information
    }

    //dump_format(pFormatCtx, 0, filename, 0);  //输出视频信息到终端

    // 遍历文件的流,找到第一个视频流,并记录流的编码信息
    for(i=0; i < pFormatCtx1->nb_streams; i++)
    {
        if(pFormatCtx1->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO )
        {
            videoStream1=i;
            break;
        }
        if(pFormatCtx1->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO)
        {
            audioStream1=i;
            break;
        }
    }

    if(audioStream1==-1 && videoStream1==-1)
    {
        closeVideo1();
        fprintf(stderr, "Didn't find a audio stream or Didn't find a video stream\n");
        return false; // Didn't find a audio stream or Didn't find a video stream
    }

    if (videoStream1 != -1)
    {
        // 得到视频流编码的上下文指针
        pCodecCtx1=pFormatCtx1->streams[videoStream1]->codec;

        // 查找FFmpeg的解码器,函数的参数是一个解码器的ID,返回查找到的解码器
        AVCodec *pCodec=avcodec_find_decoder(pCodecCtx1->codec_id);
        if(pCodec==NULL)
        {
            fprintf(stderr, "Unsupported codec!\n");
            return false; // Codec not found
        }
        //打开解码器
        if(avcodec_open2(pCodecCtx1, pCodec,NULL)<0)
        {
            fprintf(stderr, "Could not open audio codec!\n");
            return false; // Could not open audio codec
        }
        curType1 = VideoType1;
    }
    else
    {
        curType1 = AudioType1;
    }

    if (audioStream1 != -1)
    {
        aCodecCtx1 = pFormatCtx1->streams[audioStream1]->codec;
        AVCodec *aCodec = avcodec_find_decoder(aCodecCtx1->codec_id);
        if(!aCodec)
        {
            fprintf(stderr, "Unsupported codec!\n");
            return false;
        }
        if(avcodec_open2(aCodecCtx1, aCodec,NULL)<0)
        {
            fprintf(stderr, "Could not open video codec!\n");
            return false; // Could not open video codec
        }
    }
    qDebug("3333333");

    return true;
}

void VideoPlayer1::readPacket1()
{
    qDebug("22234433222");
    if (pFormatCtx1 == NULL) return;
    mutex1.lock();
    packet1 = nextPacket1;
  //  qDebug("%d:[%02x][%02x][%02x][%02x][%02x][%02x]",
 //          packet.size, packet.data[0], packet.data[1],packet.data[2], packet.data[3], packet.data[4], packet.data[5]);

    int frameFinished = 0;

    // 分配一个帧指针,指向解码后的原始帧
    pFrame1 = av_frame_alloc();

   // decodeVideoMutex.lock();
    // 解码该帧
    avcodec_decode_video2(pCodecCtx1, pFrame1, &frameFinished, &packet1);
   // decodeVideoMutex.unlock();

    //分配一个帧指针,指向存放转换成rgb后的帧
    pFrameRGB1 = av_frame_alloc();

    // 给pFrameRGB帧附加上分配的内存
   // avpicture_fill((AVPicture *)pFrameRGB, bufferRGB, AV_PIX_FMT_RGB24,pCodecCtx->width, pCodecCtx->height);
    av_image_fill_arrays(pFrameRGB1->data,pFrameRGB1->linesize, bufferRGB1,AV_PIX_FMT_RGB24,
                         pCodecCtx1->width,pCodecCtx1->height,1);


    //SwsContex:为sws_getContext函数返回的值,进行图像数据格式的转换以及图片的缩放应用中
    // 根据编码信息设置渲染格式
    //sws_scale:把该帧转换成rgb
    SwsContext *convert_ctx = sws_getContext(width1,height1,pix_fmt1,width1,height1,AV_PIX_FMT_RGB24,SWS_X, NULL,NULL,NULL);
    sws_scale(convert_ctx,(const uint8_t*  const*)pFrame1->data,pFrame1->linesize,0,height1,pFrameRGB1->data,pFrameRGB1->linesize);

    QImage tmpImage((uchar *)bufferRGB1,width1,height1,QImage::Format_RGB888);
    QImage image1  = tmpImage.copy();

     qDebug("2222");
    //emit 发送信号 信号也可以自己定义的
    emit readOneframe1(image1);
    emit saveframe1(image1);

    av_free(pFrameRGB1);
    av_free(pFrame1);
    sws_freeContext(convert_ctx);
    av_packet_unref(&packet1);

    if(av_read_frame(pFormatCtx1, &nextPacket1) < 0)
    {//整个视频文件读取完毕
        stop1();

    }
    mutex1.unlock();
}

void VideoPlayer1::closeVideo1()
{
    if (aCodecCtx1 != NULL)
    {
        avcodec_close(aCodecCtx1);
        avcodec_close(pCodecCtx1);
    }
    if (pFormatCtx1 != NULL)
    {
        avformat_close_input(&pFormatCtx1);
    }

    av_frame_free(&pFrameRGB1);
    av_frame_free(&pFrame1);
    aCodecCtx1  = NULL;
    pFormatCtx1 = NULL;
    curType1 = NoneType1;
}



void VideoPlayer1::setSource1(QString str)
{
    stop1();
    char ch[1024];
    strcpy(ch,(const char*)str.toLocal8Bit());
    if (openVideo1(ch))
    {
        //获得一帧视频的压缩数据
        av_read_frame(pFormatCtx1, &nextPacket1);
        if (curType1 == VideoType1)
        {
            width1 = pCodecCtx1->width;
            height1= pCodecCtx1->height;
            pix_fmt1 = pCodecCtx1->pix_fmt;
            //根据像素格式和视频分辨率获得 picture 存储大小
           // int numBytes = avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width,pCodecCtx->height);
            int numBytes =av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx1->width, pCodecCtx1->height,1);
            bufferRGB1 = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
        }
    }
    else
    {
        qDebug("open %s erro!\n",ch);
    }
}

void VideoPlayer1::play1()
{
    if (isRunning()) return;
    if (pFormatCtx1 != NULL)
    {
        start();
        //run1();
        curState1 = PlayingState1;
        emit stateChanged1(curState1);

    }
}

void VideoPlayer1::pause1()
{
    if (eventloop1 == NULL) return;
    eventloop1->exit();
    curState1 = PausedState1;
    emit stateChanged1(curState1);
}

void VideoPlayer1::stop1()
{
    if (eventloop1 == NULL) return;
    eventloop1->exit();
    closeVideo1();//关闭音视频
    curState1 = StoppedState1;
    emit stateChanged1(curState1);
}


你可能感兴趣的:(Qt,C++)