metaRTC性能测试

metaRTC 嵌入式版本是采用纯C语言实现,采用了几个著名的通信库,日前在rv1126上实现了硬件编码发送,以及pcm语音采集发送,效果跟主流的pion和kvs毫不逊色,而且采用了多线程模式反复memcpy,如果尽量采用零挎贝技术,换用mbedtls应该还有进一步提升的空间,延时本地局域网87ms左右,效果还是很不错的

PVOID RV1126_Send_VideoPackets(PVOID args)
{
    STATUS retStatus = STATUS_SUCCESS;
    pStreamManage pstreammanage = gpStreamManage;//(pStreamManage) args;
	EncoderSession* session=(EncoderSession*)args;
    Frame frame;
    UINT32 fileIndex = 0, frameSize=0;
    CHAR filePath[MAX_PATH_LEN + 1];
    STATUS status;
    UINT32 i,ret;
    UINT64 startTime, lastFrameTime, elapsed;
    UINT64 timestamp = 0;
    tstreamInfo *pstreaminfo=NULL;
    if (pstreammanage == NULL) {
        printf("[metaRTC Master] sendVideoPackets(): operation returned status code: 0x%08x \n", STATUS_NULL_ARG);
        goto CleanUp;
    }
	YangFrame videoFrame;
	memset(&videoFrame,0,sizeof(YangFrame));

    videoFrame.payload = buffer;
    memset(&frame,0,sizeof(Frame));
    // frame.frameData=framebuffer;
    startTime = GETTIME();
    lastFrameTime = startTime;
    frame.presentationTs = 0;
    pstreammanage->videoBufferSize=0;
    frame.presentationTs = 0;
    startTime = GETTIME();
    lastFrameTime = startTime;
    printf("sendVideoPackets thread open ...\n");
    if (sample_video_init(0))
    {
        printf("%s(): Failed to acquire video stream\n", __FUNCTION__);
        retStatus = STATUS_INVALID_HANDLE_ERROR;
        goto CleanUp;
    }
    if (sample_video_start(0))
    {
        printf("%s(): Failed to acquire video stream\n", __FUNCTION__);
        retStatus = STATUS_INVALID_HANDLE_ERROR;
        goto CleanUp;
    }
    while (!ATOMIC_LOAD_BOOL(&pstreammanage->appTerminateFlag) &&(session->isConvert == 1)) {

        ret=receivekvsvideostream(&pstreaminfo);
        if (pstreaminfo==NULL||pstreaminfo->a==NULL){
           continue;
        }
        #if !USE_RK_DEVICE
        frameSize=pstreaminfo->size;
        frame.frameData = pstreaminfo->a;//pstreammanage->pVideoFrameBuffer;
        frame.size = frameSize;
        frame.presentationTs=pstreaminfo->timestamp;
        #endif
        // frame.presentationTs =GETTIME();//+= SAMPLE_VIDEO_FRAME_DURATION;
        // printf("frame size : %d frame.frameData 0x%x\r\n",frame.size,frame.frameData);
        MUTEX_LOCK(pstreammanage->streamingSessionListReadLock);
        #if USE_RK_DEVICE
        retStatus = yang_rkEncoder_save_stream(session->out_videoBuffer,&videoFrame, pstreaminfo->mb);
        #else
        retStatus = yang_IPCEncoder_save_stream(session->out_videoBuffer,&videoFrame, &frame);
        #endif
        if (retStatus < 0) {
            
            printf("Error saving video frame error: %d\r\n",retStatus);
        }
        if(pstreaminfo->a!=NULL)
           releaseH264Buffer(pstreaminfo);            
        MUTEX_UNLOCK(pstreammanage->streamingSessionListReadLock);
 
        // Adjust sleep in the case the sleep itself and writeFrame take longer than expected. Since sleep makes sure that the thread
        // will be paused at least until the given amount, we can assume that there's no too early frame scenario.
        // Also, it's very unlikely to have a delay greater than SAMPLE_VIDEO_FRAME_DURATION, so the logic assumes that this is always
        // true for simplicity.
         elapsed = lastFrameTime - startTime;
        //  THREAD_SLEEP(SAMPLE_VIDEO_FRAME_DURATION - elapsed % SAMPLE_VIDEO_FRAME_DURATION);
         lastFrameTime = GETTIME();
        // THREAD_SLEEP()
    }

CleanUp:
    sample_video_exit(0);
    CHK_LOG_ERR(retStatus);

    return (PVOID) (ULONG_PTR) retStatus;
}

PVOID RV1126_Send_AudioPackets(PVOID args)
{
    STATUS retStatus = STATUS_SUCCESS;
    pStreamManage pstreammanage = gpStreamManage;//(pStreamManage) args;
    EncoderSession* session=(EncoderSession*)args;
    YangFrame audioFrame;
    UINT32 fileIndex = 0, frameSize;
    CHAR filePath[MAX_PATH_LEN + 1];
    UINT32 i,ret;
    STATUS status;
    uint64_t timestamp = 0;
    UINT64 startTime, lastFrameTime, elapsed,sleeps,sleepmod;
    // ptstreamInfo paudioinfo=NULL;
    // size_t frameSize = 0;
    tstreamInfo *pstreaminfo=NULL;
    if (pstreammanage == NULL) {
        printf("[metaRTC Master] sendAudioPackets(): operation returned status code: 0x%08x \n", STATUS_NULL_ARG);
        goto CleanUp;
    }
    printf("[metaRTC Master] sendAudioPackets(): thread start...\r\n");

   if(StartRealAudioCapturePlayer()!=0){
        printf("%s(): Failed to StartRealAudioCapturePlayer audio stream\n", __FUNCTION__);
        retStatus = STATUS_INVALID_HANDLE_ERROR;
        goto CleanUp;
   }
    if (RealAudioCaptureStart()!=0)
    {
        printf("%s(): Failed to RealAudioCaptureStart\n", __FUNCTION__);
        retStatus = STATUS_INVALID_HANDLE_ERROR;
        goto CleanUp;
    }
    startTime = GETTIME();
    lastFrameTime = startTime;
    printf("[metaRTC Master] sendAudioPackets(): starting...\r\n");
    // THREAD_SLEEP(100*HUNDREDS_OF_NANOS_IN_A_MILLISECOND);
    while (!ATOMIC_LOAD_BOOL(&pstreammanage->appTerminateFlag) &&(session->isConvert == 1)) {


        ret=receivekvsaudiostream(&pstreaminfo);
        if (pstreaminfo==NULL||pstreaminfo->a==NULL){
           continue;
        }
        frameSize=pstreaminfo->size;
        if(frameSize<0){
            printf(" frameSize(%d)<0, frame dropped", frameSize);
            frameSize = 0;
            continue;
        }    
        audioFrame.frametype=YangFrameTypeAudio;
        audioFrame.nb=frameSize;
        audioFrame.payload=pstreaminfo->a;   
        // timestamp=pstreaminfo->timestamp; 
        audioFrame.dts=audioFrame.pts=pstreaminfo->timestamp;
        MUTEX_LOCK(pstreammanage->streamingSessionListReadLock);
        session->out_audioBuffer->putEAudio(&session->out_audioBuffer->mediaBuffer,&audioFrame);
        // if(debuglog){
        //if (isKeyframe == YANG_Frametype_I) {
        // printf("audio ts-> %lld send %d bytes to rtp:",audioFrame.dts,audioFrame.nb);
        // dumphex(audioFrame.payload,audioFrame.nb<200?audioFrame.nb:200);
        // printf("\n");
        // }
        MUTEX_UNLOCK(pstreammanage->streamingSessionListReadLock);
        lastFrameTime = GETTIME();
        elapsed = lastFrameTime - startTime;
        sleepmod=elapsed % SAMPLE_AUDIO_FRAME_DURATION;
        sleeps=SAMPLE_AUDIO_FRAME_DURATION - sleepmod;
        // THREAD_SLEEP(sleeps);
        // printf("idx :%02d audio time elapsed %ld mod %ld sleep %ld\r\n",pstreaminfo->idx, elapsed,sleepmod,sleeps);

        // THREAD_SLEEP(20*HUNDREDS_OF_NANOS_IN_A_MILLISECOND);//SAMPLE_AUDIO_FRAME_DURATION);
    }

CleanUp:
   
    RealAudioExit();
    printf("audio thread leaving, err:%d\n", retStatus);
    return (PVOID) (ULONG_PTR) retStatus;
}
#define SENDFILE 0
void yang_IPCEncoder_start(EncoderSession* session)  {
	if(session->isStart) return;
	session->isStart = 1;
    printf("yang_IPCEncoder_start\r\n");
    #if SENDFILE
	if (pthread_create(&session->videothreadId, 0, sendVideoPackets, session)) {
		yang_error("YangThread::start could not start thread");
	}
	if (pthread_create(&session->audiothreadId, 0, sendAudioPackets, session)) {
		yang_error("YangThread::start could not start thread");
	}
    #else
    if (pthread_create(&session->videothreadId, 0, RV1126_Send_VideoPackets, session)) {
		yang_error("YangThread::start could not start thread");
	}
	if (pthread_create(&session->audiothreadId, 0, RV1126_Send_AudioPackets, session)) {
		yang_error("YangThread::start could not start thread");
	}
    #endif
	session->isStart = 0;
}

metaRTC性能测试_第1张图片

 metaRTC性能测试_第2张图片

你可能感兴趣的:(metaRTC,笔记,webrtc,c语言,实时音视频,p2p,实时互动)