metaRTC通用H264文件帧获取发送方法

      最近杨总的metaRTC很火(gitee.com/metaRTC/metaRTC),也决定将以前的一些应用移植过去,拥抱国产生态,在适配过程中,发现测试还是比较麻烦,不是ffmpeg,就是需要IPC得硬件版子,能在pc上实现初步调试是非常有必要的,于是通过一番查找和恶补H264编码格式得相关知识,做了一个通用的获取H264帧的函数,话不多说,直接上代码,获取某帧的段地址指针很相应长度偏移量

   

static int FindStartCode2 (unsigned char *Buf)
{
	if(Buf[0]!=0 || Buf[1]!=0 || Buf[2] !=1) return 0; //判断是否为0x000001,如果是返回1
	else return 1;
}
 
static int FindStartCode3 (unsigned char *Buf)
{
	if(Buf[0]!=0 || Buf[1]!=0 || Buf[2] !=0 || Buf[3] !=1) return 0;//判断是否为0x00000001,如果是返回1
	else return 1;
}
#define CONVERT_TIMESTAMP_TO_RTP(clockRate, pts) ((UINT64) ((DOUBLE) (pts) * ((DOUBLE) (clockRate) / HUNDREDS_OF_NANOS_IN_A_SECOND)))
#define VIDEO_CLOCKRATE (UINT64) 90000
//NALU HEADER
#define NALU_TYPE_MASK 0x1f //00011111  nal_unit_type 
#define NALU_FORB_MASK 0x80 //10000000  forbidden_bit
#define NALU_REF_MASK 0x60 //01100000  nal_reference_bit
#define NALU_TYPE_SPS 0x67 //SPS
#define NALU_TYPE_PPS 0x68 //PPS
#define NALU_TYPE_IDR 0x65 //IDR关键帧
#define NALU_TYPE_PFRAME 0x61 //P帧
#define NALU_TYPE_PFRAME2 0x41
// #define NALU_TYPE_BFRAME 0x01
#define NALU_TYPE_SEI 0x06

void GetNaluSlice(PBYTE scr,uint32_t scrlen,uint32_t *packetlen,uint32_t * nalutype ,BOOL * endframe,uint32_t *pscroffset){
    // printf("GetNaluSlice %d\n",scrlen);
    uint32_t destcount=0,naluendptr=0;
    if(FindStartCode2(scr)) destcount=3;
    else if(FindStartCode3(scr)) destcount=4;
    else return -1;
    *nalutype 		= scr[destcount];
    naluendptr=destcount;
    *pscroffset=destcount;
    * endframe=FALSE;
    do{
      if(FindStartCode2(scr+naluendptr)||FindStartCode3(scr+naluendptr)){
        *packetlen=naluendptr-destcount;
        if(debuglog)
        printf("get nalu type 0x%x len %d\r\n",*nalutype,naluendptr-destcount);
        break;
      } 
      if(++naluendptr>=scrlen) {
        * endframe=TRUE;
        *packetlen=naluendptr-destcount;
        if(debuglog)
         printf("get nalu type 0x%x len %d frame is end\r\n",*nalutype,naluendptr-destcount);
        break;
      }
    }while(1);
}

 发送至meta peer rtc session缓冲器

int yang_IPCEncoder_save_stream(YangVideoEncoderBuffer2* buf,YangFrame* frame,PFrame pframe ){
     uint32_t scrindex=0,scrlen=pframe->size;
     int32_t isKeyframe = YANG_Frametype_P;
     int32_t dataLength=pframe->size;
     uint8_t *data = NULL, *vbuffer = NULL;
     data = pframe->frameData;
     PBYTE pscr=pframe->frameData;
        isKeyframe = YANG_Frametype_P;
        frame->frametype = YANG_Frametype_P;

    uint32_t destindex=0;
    uint32_t  nalutype;
    BOOL  ready=FALSE,endframe=FALSE;
    uint32_t offset=0;

    do{
		GetNaluSlice(pscr,scrlen,& destindex,& nalutype ,& endframe,&scrindex);
        //offset+=scrindex;
        switch(nalutype){
            case NALU_TYPE_PPS:
            case NALU_TYPE_SPS:
            case NALU_TYPE_IDR:
                 isKeyframe = YANG_Frametype_I;
                 frame->frametype = YANG_Frametype_I;
            case NALU_TYPE_PFRAME:
            case NALU_TYPE_PFRAME2:
                 ready=TRUE;
                 //scrlen-=scrindex;
                 data=pscr;//+scrindex;
                 break;    
            case NALU_TYPE_SEI:
                 return 0;     
        }
        if(ready) break;
        if(endframe)
          return 0;
        else{
            
         pscr+=scrindex+destindex;
         scrlen-=scrindex+destindex;
         if(scrlen<=0) return 0;
        }
        //if(endframe
    }while(1);
    frame->pts=frame->dts=pframe->presentationTs;
    if (isKeyframe == YANG_Frametype_I) {
        int32_t spsLen=0,ppsLen=0,spsPos=0,ppsPos=0,ipos=0;

        dataLength=scrlen;
        spsLen=destindex;
        spsPos=data+scrindex;
        //sps lenth
        yang_put_be32((char*)frame->payload,destindex);
        ipos+=4;
        //sps content
        memcpy(frame->payload+ipos, data+scrindex, destindex);
        ipos+=destindex;
        vbuffer = data+scrindex+destindex;
        scrlen-=destindex+scrindex;
        destindex=0;scrindex=0;
        GetNaluSlice(vbuffer,scrlen,& destindex,& nalutype ,& endframe,&scrindex);
        //pps lenth
        yang_put_be32((char*)frame->payload+ipos,destindex);
        ipos+=4;
        //pps content
        memcpy(frame->payload+ipos, vbuffer+scrindex, destindex);   
        ipos+=destindex;    
        vbuffer += scrindex+destindex;
        scrlen-=destindex+scrindex;

        uint32_t destcount=0;
        if(FindStartCode2(vbuffer)) destcount=3;
        else if(FindStartCode3(vbuffer)) destcount=4;
        else return 0;
        if(destcount==3)
        {  
            frame->payload[ipos]=0x00;
            ipos+=1;
            
        }
        memcpy(frame->payload+ipos, vbuffer, scrlen);
        ipos+=scrlen;
        frame->nb = ipos;
        //dumphex(frame->payload,frame->nb);

    } else {
        int pFramePos = 0;
        dataLength=scrlen-scrindex;
        //pFramePos = yang_find_pre_start_code(data,dataLength);
        memcpy(frame->payload, data+scrindex, dataLength);
        if(frame->payload[0]==0x41) frame->payload[0]=0x61;
        frame->nb = dataLength;
    }
    buf->putEVideo(&buf->mediaBuffer,frame);
    if(debuglog){
    //if (isKeyframe == YANG_Frametype_I) {
    printf("ts-> %lld send %d bytes to rtp:",frame->dts,frame->nb);
    dumphex(frame->payload,frame->nb<200?frame->nb:200);
    printf("\n");
    }

    return 0;
}

循环发送H264文件帧线程

uint8_t buffer[2*1024*1024]={0};
uint8_t framebuffer[2*1024*1024]={0};
uint8_t h264filebuffer[5*1024*1024]={0};
PVOID sendVideoPackets(PVOID args)
{
    STATUS retStatus = STATUS_SUCCESS;
    pStreamManage pstreammanage = gpStreamManage;//(pStreamManage) args;
	EncoderSession* session=(EncoderSession*)args;
    Frame frame;
    UINT32 fileIndex = 0, frameSize;
    // CHAR filePath[MAX_PATH_LEN + 1];
    STATUS status;
    UINT32 i;
    UINT64 startTime, lastFrameTime, elapsed;

    if (pstreammanage == NULL) {
        printf("[metaRTC Master] sendVideoPackets(): operation returned status code: 0x%08x \n", STATUS_NULL_ARG);
        goto CleanUp;
    }
	YangFrame videoFrame;
	memset(&videoFrame,0,sizeof(YangFrame));

    videoFrame.payload = buffer;
    memset(&frame,0,sizeof(Frame));
    frame.frameData=framebuffer;
    startTime = GETTIME();
    lastFrameTime = startTime;
    frame.presentationTs = 0;
    pstreammanage->videoBufferSize=0;
    printf("sendVideoPackets starting ...\n");
    UINT32 h264filebuffersize=0;

    if(!strlen(filePath)||!strstr(filePath,".h264")){
        snprintf(filePath, MAX_PATH_LEN, "%s","../video/test.h264");//"../test.h264");
        printf("filepath is not exist use default %s \n",filePath);
    }
         retStatus = readFrameFromDisk(NULL, &h264filebuffersize, filePath);
        if (retStatus != STATUS_SUCCESS) {
            printf("[metaRTC Master] readFrameFromDisk(): operation returned status code: 0x%08x \n", retStatus);
            goto CleanUp;
        }
        retStatus = readFrameFromDisk(h264filebuffer, &h264filebuffersize, filePath);
        if (retStatus != STATUS_SUCCESS) {
            printf("[metaRTC Master] readFrameFromDisk(): operation returned status code: 0x%08x \n", retStatus);
            goto CleanUp;
        }  
    PBYTE scrpos=h264filebuffer,scr=h264filebuffer,pframeindex=frame.frameData;
    uint32_t scrlen=h264filebuffersize;
    uint32_t packetlen;
    uint32_t nalutype;
     BOOL  endframe;
     uint32_t pscroffset=0,ipos=0,framelen=0;      
    while (!ATOMIC_LOAD_BOOL(&pstreammanage->appTerminateFlag) &&(session->isConvert == 1)) {

        if(endframe){
            scrpos=h264filebuffer;scr=h264filebuffer;
            scrlen=h264filebuffersize;  
            pframeindex=frame.frameData;  
            frame.size=0;
              
        }
        GetNaluSlice(scr,scrlen,&packetlen,&nalutype ,&endframe,&pscroffset);
        switch(nalutype){
            case NALU_TYPE_PPS:
                //continue;
            case NALU_TYPE_SPS:
                ipos+=pscroffset+packetlen;
                memcpy(pframeindex,scrpos,ipos);
                pframeindex+= ipos;
                framelen+=ipos;
                // frame.size+=ipos;
                scrpos+=ipos;
                ipos=0;
                //continue;
                break;
            case NALU_TYPE_IDR:
            case NALU_TYPE_PFRAME:
            case NALU_TYPE_PFRAME2:
                ipos+=pscroffset+packetlen;
                
                memcpy(pframeindex,scrpos,ipos);
                pframeindex+= ipos;
                frame.size+=ipos+framelen;
                scrpos+=ipos;
                ipos=0;
                framelen=0;
                // dumphex(frame.frameData,frame.size); 
                //break;

                //memcpy(frame.frameData,scrpos,ipos);
                break;    
            case NALU_TYPE_SEI:
                //ipos+=pscroffset+packetlen;
                scrpos+=pscroffset+packetlen;;
                break;
            default:
                ipos+=pscroffset+packetlen;
                break;
                //continue;             
        }

        if(frame.size>0){
        frame.presentationTs=GETTIME();
        MUTEX_LOCK(pstreammanage->streamingSessionListReadLock);
        // for (i = 0; i < pstreammanage->streamingSessionCount; ++i) {
        retStatus = yang_IPCEncoder_save_stream(session->out_videoBuffer,&videoFrame, &frame);
        if (retStatus < 0) {
            
            goto CleanUp;
        }
        // }
        MUTEX_UNLOCK(pstreammanage->streamingSessionListReadLock);

        // Adjust sleep in the case the sleep itself and writeFrame take longer than expected. Since sleep makes sure that the thread
        // will be paused at least until the given amount, we can assume that there's no too early frame scenario.
        // Also, it's very unlikely to have a delay greater than SAMPLE_VIDEO_FRAME_DURATION, so the logic assumes that this is always
        // true for simplicity.
        elapsed = lastFrameTime - startTime;
        THREAD_SLEEP(SAMPLE_VIDEO_FRAME_DURATION - elapsed % SAMPLE_VIDEO_FRAME_DURATION);
        lastFrameTime = GETTIME();
        pframeindex=frame.frameData; 
        frame.size=0;
        ipos=0;
        }
        scr+=pscroffset+packetlen;
        scrlen-=pscroffset+packetlen;
    }

CleanUp:

    CHK_LOG_ERR(retStatus);

    return (PVOID) (ULONG_PTR) retStatus;
}

metaRTC通用H264文件帧获取发送方法_第1张图片

metaRTC通用H264文件帧获取发送方法_第2张图片

 感谢大佬liuping36131997的启发。

最后膜拜杨大侠,一己之力造出了metaRTC,也希望他的愿景成真,大家可以关注他的个人博客

https://blog.csdn.net/m0_56595685/category_11474470.html

 

你可能感兴趣的:(笔记,metaRTC,实时音视频,webrtc)