webrtc中的rtp解析

原创文章,禁止转载!否则追究!


之前已经说明了webrtc中的rtp头的信息解析;


这里讲解一下webrtc中的rtp解析,这里主要说明一下h264的解析;

关于vp8和vp9,webrtc中都相关的类实现和相关test文件;




关于h264的rtp文件解析,webrtc中主要实现了三种rtp的打包协议:

enum H264PacketizationTypes {
  kH264SingleNalu,  // This packet contains a single NAL unit.
  kH264StapA,       // This packet contains STAP-A (single time
                    // aggregation) packets. If this packet has an
                    // associated NAL unit type, it'll be for the
                    // first such aggregated packet.
  kH264FuA,         // This packet contains a FU-A (fragmentation
                    // unit) packet, meaning it is a part of a frame
                    // that was too large to fit into a single packet.
};


可以参考:rtp_format_h264_unittest.cc



简单 封装一下,代码如下:

class CRtpDepacketizerH264
{
public:
CRtpDepacketizerH264(): depacketizer_(RtpDepacketizer::Create(kRtpVideoH264))
{
m_rtcpBuf = NULL;


pVectorBuf = NULL;
}


~CRtpDepacketizerH264()
{
if (depacketizer_)
{
}




if (m_rtcpBuf)
{
delete m_rtcpBuf;
m_rtcpBuf = NULL;
}
}


private:
rtc::Buffer * create_H264Buffer()
{
if (m_rtcpBuf != NULL)
{
return NULL;
}


uint8_t StartCode[3] = { 0 };
StartCode[2] = 0x01;


m_rtcpBuf = new rtc::Buffer(StartCode, 3);


return m_rtcpBuf;
}




rtc::Buffer * create_H264Buffer_4Byte()
{
if (m_rtcpBuf != NULL)
{
return NULL;
}


uint8_t StartCode[4] = { 0 };
StartCode[3] = 0x01;


m_rtcpBuf = new rtc::Buffer(StartCode, 4);


return m_rtcpBuf;
}


public:

static int file_size(char* filename)
{
FILE *fp = fopen(filename, "r");
if (!fp) return -1;
fseek(fp, 0L, SEEK_END);
int size = ftell(fp);
fclose(fp);


return size;
}


static void clear_vector(std::vector * & pVector_Buf)
{
if (pVector_Buf)
{
int nSize = (int)pVector_Buf->size();


for (int n = 0; n{
delete pVector_Buf->at(n);
}


delete pVector_Buf;
}


pVector_Buf = NULL;
}


public:


std::vector * add_rtpPacket_for_frame(const uint8_t* packet, size_t length)
{

if (pVectorBuf == NULL)
{
pVectorBuf = new std::vector;
}



RTPHeader header;
std::unique_ptr parser(RtpHeaderParser::Create());


{
bool b = parser->IsRtcp(  packet, length);
if (b)
{
return NULL;
}
}


bool bParser = parser->Parse(packet, length, &header);


if (!bParser)
{
return NULL;
}

size_t nRtpHeaderLen = header.headerLength;
uint8_t * pbufPayload = (uint8_t *)(packet + nRtpHeaderLen);
size_t    nLenPayload = length - nRtpHeaderLen;


//------------------------------------------------------------


RtpDepacketizer::ParsedPayload payload;


depacketizer_->Parse(&payload, pbufPayload, nLenPayload);


// EXPECT_EQ(kVideoFrameKey, payload.frame_type);
// EXPECT_EQ(kRtpVideoH264, payload.type.Video.codec);
// EXPECT_TRUE(payload.type.Video.is_first_packet_in_frame);
// EXPECT_EQ(kH264SingleNalu, payload.type.Video.codecHeader.H264.packetization_type);
// EXPECT_EQ(kIdr, payload.type.Video.codecHeader.H264.nalu_type);


const RTPVideoHeaderH264& h264 = payload.type.Video.codecHeader.H264;


switch (payload.type.Video.codecHeader.H264.packetization_type)
{
case kH264SingleNalu:
{
{
this->clear_vector(pVectorBuf);
pVectorBuf = new std::vector;


if (m_rtcpBuf)
{
delete m_rtcpBuf;
m_rtcpBuf = NULL;
}
}


rtc::Buffer * tempBuf = NULL;


if (h264.nalu_type == kSlice)
{
tempBuf = create_H264Buffer_4Byte();
}
else
{
tempBuf = create_H264Buffer();
}


tempBuf->AppendData(payload.payload, payload.payload_length); // == //tempBuf->AppendData(pbufPayload, nLenPayload);


pVectorBuf->push_back(tempBuf);

std::vector * pVectorBufTemp = pVectorBuf;
pVectorBuf = NULL;
m_rtcpBuf = NULL;


return pVectorBufTemp;  
}


break;


case kH264FuA:
{
//rtc::Buffer * tempBuf = create_H264Buffer();


rtc::Buffer * tempBuf = NULL;


if (h264.nalu_type == kSlice)
{
tempBuf = create_H264Buffer_4Byte();
}
else
{
tempBuf = create_H264Buffer();
}



if (1)
{
//_nalu *  pfuIndicator = (_nalu *)(payload.payload);
//_fuHeader * pfuHeader = (_fuHeader *)(payload.payload+1);
//_nalu *  pfuIndicator1 = (_nalu *)(pbufPayload);
_fuHeader * pfuHeader1 = (_fuHeader *)(pbufPayload + 1);

m_rtcpBuf->AppendData(payload.payload, payload.payload_length);


//if (header.markerBit)
if (pfuHeader1->E || header.markerBit)
{
tempBuf = m_rtcpBuf;


pVectorBuf->push_back(m_rtcpBuf);
m_rtcpBuf = NULL;
std::vector * pVectorBufTemp = pVectorBuf;
pVectorBuf = NULL;
return pVectorBufTemp;
}
}
}

break;


case kH264StapA:
{
{
this->clear_vector(pVectorBuf);
pVectorBuf = new std::vector;


if (m_rtcpBuf)
{
delete m_rtcpBuf;
m_rtcpBuf = NULL;
}


LOG(INFO) << "kH264StapA "; 

for (size_t i = 0; i < h264.nalus_length; ++i)
{
h264.nalus[i].type;

LOG(INFO) << "h264.nalus[i].offset" << h264.nalus[i].offset;
LOG(INFO) << "h264.nalus[i].size" << h264.nalus[i].size;


uint8_t StartCode[4] = { 0 }; StartCode[3] = 0x01;


rtc::Buffer * ptempbuf = new rtc::Buffer(StartCode, 4);


ptempbuf->AppendData(payload.payload + h264.nalus[i].offset, h264.nalus[i].size);


pVectorBuf->push_back(ptempbuf);
}

std::vector * pVectorBufTemp = pVectorBuf;
pVectorBuf = NULL;
return pVectorBufTemp;
}

break;


default:
{
LOG(INFO) << "default:";
}
break;


}

return NULL;
}


private:

rtc::Buffer  *   m_rtcpBuf;

std::vector * pVectorBuf;

public:
std::unique_ptr depacketizer_;
};






你可能感兴趣的:(WebRTC)