WebRTC学习之十:最简单的视频聊天(使用WebRtcVideoEngine2)

        这篇在上篇WebRTC学习之九:摄像头的捕捉和显示 的基础上修改而来。上篇中主要使用了WebRtcVideoEngine2中的WebRtcVideoCapturer类,而本篇中主要使用了WebRtcVideoEngine2中的WebRtcVideoChannel2类

一.环境

参考:WebRTC学习之三:录音和播放

二.实现

        在WebRTC学习之四:最简单的语音聊天 中我们是通过实现Transport类的两个纯虚函数SendRtp和SendRtcp,并在它们的实现中调用通信协议的发送函数将数据发送出去。然而WebRtcVideoChannel2类继承自Transport,实现了SendRtp和SendRtcp,并在实现中分别调用了MediaChannel类的SendPacket和SendRtcp函数。MediaChannel类也是WebRtcVideoChannel2的基类。SendPacket和SendRtcp函数最终调用的是MediaChannel类中NetworkInterface接口的同名函数SendPacket和SendRtcp。因此我们只需要去继承NetworkInterface接口,实现其中的函数SendPacket和SendRtcp,并在它们的实现中调用通信协议的发送函数将数据发送出去。

mynetworkinterface.h

#ifndef MYNETWORKINTERFACE_H
#define MYNETWORKINTERFACE_H

#include 
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"

class MyNetworkInterface:public QObject,public cricket::MediaChannel::NetworkInterface
{
    Q_OBJECT
public:
    MyNetworkInterface();
    ~MyNetworkInterface();
    void setLocalReceiver(int port);
    void stopRecieve();
    void setSendDestination(QString ip, int port);
    void stopSend();
    // NetworkInterface functions override
    bool SendPacket(rtc::CopyOnWriteBuffer* packet,const rtc::PacketOptions& options) override;
    bool SendRtcp(rtc::CopyOnWriteBuffer* packet,const rtc::PacketOptions& options) override;
    int SetOption(SocketType type, rtc::Socket::Option opt,int option) override
    {
        return 0;
    }
private:
    QUdpSocket * udpsocketSendRTP;
    QUdpSocket * udpsocketSendRTCP;
    QUdpSocket * udpSocketRecvRTP;
    QUdpSocket * udpSocketRecvRTCP;

    QString destIP;
    int destPort;

    bool sendFlag;
    bool recvFlag;

signals:
    void signalRecvRTPData(char *data,int length);
    void signalRecvRTCPData(char *data,int length);
    void signalSendRTPData(char *data,int length);
    void signalSendRTCPData(char *data,int length);

private slots:
    void slotRTPReadPendingDatagrams();
    void slotRTCPReadPendingDatagrams();
    void slotSendRTPData(char *data,int length);
    void slotSendRTCPData(char *data,int length);
};

#endif // MYNETWORKINTERFACE_H

mynetworkinterface.cpp

#include "mynetworkinterface.h"

#include "QDebug"
#include 
MyNetworkInterface::MyNetworkInterface()
    :destIP(""),
    destPort(0),
    sendFlag(true),
    recvFlag(true)
{
    udpsocketSendRTP=new QUdpSocket();
    udpSocketRecvRTP = new QUdpSocket();
    udpsocketSendRTCP=new QUdpSocket();
    udpSocketRecvRTCP = new QUdpSocket();

    connect(udpSocketRecvRTP, SIGNAL(readyRead()), this, SLOT(slotRTPReadPendingDatagrams()));
    connect(udpSocketRecvRTCP, SIGNAL(readyRead()), this, SLOT(slotRTCPReadPendingDatagrams()));

    connect(this,SIGNAL(signalSendRTPData(char *,int)),this,SLOT(slotSendRTPData(char *,int)));
    connect(this,SIGNAL(signalSendRTCPData(char *,int)),this,SLOT(slotSendRTCPData(char *,int)));
}

MyNetworkInterface::~MyNetworkInterface()
{
   udpsocketSendRTP->deleteLater();
   udpSocketRecvRTP->deleteLater();
   udpsocketSendRTCP->deleteLater();
   udpSocketRecvRTCP->deleteLater();
}

void MyNetworkInterface::setLocalReceiver(int port)
{
    udpSocketRecvRTP->bind(port, QUdpSocket::ShareAddress);
    udpSocketRecvRTCP->bind(port+1, QUdpSocket::ShareAddress);
    recvFlag=true;
}
void MyNetworkInterface::stopRecieve()
{
    udpSocketRecvRTP->abort();
    udpSocketRecvRTCP->abort();
    recvFlag=false;
}
void MyNetworkInterface::setSendDestination(QString ip, int port)
{
    destIP=ip;
    destPort=port;
    sendFlag=true;
}
void MyNetworkInterface::stopSend()
{
    sendFlag=false;
}
//为何不直接调用udpsocketSendRTP->writeDatagram,而用信号,是因为SendPacket在另一个线程里
bool MyNetworkInterface::SendPacket(rtc::CopyOnWriteBuffer* packet,const rtc::PacketOptions& options)
{
    Q_UNUSED(options);
    //测试发送的ssrc
//    uint8_t* packet1=(uint8_t*)data;
//    uint32_t ssrc = webrtc::ByteReader::ReadBigEndian(&packet1[8]);
//    qDebug()<<"Send SSRC:"<data(),packet->size());
    return true;
}
//为何不直接调用udpsocketSendRTCP->writeDatagram,而用信号,是因为SendRtcp在另一个线程里
bool MyNetworkInterface::SendRtcp(rtc::CopyOnWriteBuffer* packet,const rtc::PacketOptions& options)
{
   Q_UNUSED(options);

   if(sendFlag)
   emit signalSendRTCPData(packet->data(),packet->size());
   return true;
}

void MyNetworkInterface::slotSendRTPData(char *data,int length)
{
    //测试发送的ssrc
//    uint8_t* packet1=(uint8_t*)data;
//    uint32_t ssrc = webrtc::ByteReader::ReadBigEndian(&packet1[8]);
//    qDebug()<<"Send SSRC:"<writeDatagram(data, length,QHostAddress(destIP), destPort);
}
//RTCP端口为RTP端口+1
void MyNetworkInterface::slotSendRTCPData(char *data,int length)
{
    udpsocketSendRTCP->writeDatagram(data, length,QHostAddress(destIP), destPort+1);
}

void MyNetworkInterface::slotRTPReadPendingDatagrams()
{
   QByteArray datagram;
   while (udpSocketRecvRTP->hasPendingDatagrams()&&recvFlag)
   {
          datagram.resize(udpSocketRecvRTP->pendingDatagramSize());
          QHostAddress sender;
          quint16 senderPort;

          int size=udpSocketRecvRTP->readDatagram(
          datagram.data(),
          datagram.size(),
          &sender,
          &senderPort);

          if(size>0)
          {
              //测试接收的ssrc
          //    uint8_t* packet1=(uint8_t*)data;
          //    uint32_t ssrc = webrtc::ByteReader::ReadBigEndian(&packet1[8]);
          //    qDebug()<<"Receive SSRC:"<hasPendingDatagrams()&&recvFlag)
   {
          datagram.resize(udpSocketRecvRTCP->pendingDatagramSize());
          QHostAddress sender;
          quint16 senderPort;

          int size=udpSocketRecvRTCP->readDatagram(
          datagram.data(),
          datagram.size(),
          &sender,
          &senderPort);

          if(size>0)
          {
              emit signalRecvRTCPData(datagram.data(),datagram.size());
          }
    }
} 

        WebRtcVideoChannel2中有个专门设置网络接口的函数SetInterface,其参数就是NetworkInterface,这样视频数据发送就与WebRtcVideoChannel2关联起来了。

        至于频数据接收,需要调用通信协议的接收函数,将接收到的数据传递到WebRtcVideoChannel2的OnPacketReceived和OnRtcpReceived函数,这两个函数是其基类对应纯虚函数的实现。那么解码后的frame我们如何获取呢?在WebRtcVideoChannel2中有个函数SetSink,它的第二个参数是rtc::VideoSinkInterface*,因此我们只需要继承rtc::VideoSinkInterface,并实现其中的void OnFrame(const VideoFrameT& frame) =0;纯虚函数,就能得到解码后的frame,然后转换成RGB格式,就可以显示到Qt界面了。

void MainWindow::OnFrame(const cricket::VideoFrame& frame)
{
    //复制一份
    const cricket::VideoFrame* frameTemp = frame.GetCopyWithRotationApplied();
    frameTemp->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
                             receivedVideoImageData.get(),
                             frameTemp->width()*frameTemp->height()*32/8,
                             frameTemp->width()*32/8);

    QImage image(receivedVideoImageData.get(), frameTemp->width(), frameTemp->height(), QImage::Format_RGB32);
    //因为OnFrame在另一个线程,所以不直接显示,用信号中转一下,将显示放在GUI线程(主线程)
    if(!image.isNull())
        emit signalReceivedFrame(image);
}

void MainWindow::slotReceivedFrame(const QImage &image)
{
     ui->labelReceivedVideo->setPixmap(QPixmap::fromImage(image));
}
三.效果

        本人光荣上镜,长相一般,用个口罩挡一下。大窗口是摄像头捕捉和显示的图像,小窗口是通过网络接收到并显示的图像。

WebRTC学习之十:最简单的视频聊天(使用WebRtcVideoEngine2)_第1张图片



你可能感兴趣的:(WebRTC,WebRTC学习)