对视频文件进行回放,主要是对播放时间的控制比较麻烦,通过两个相对事件比较,基本能做到播放事件的准确性。
文件两帧之间的事件 ><= 上一帧播放时间于当前时间的大小比较来实现。
void playbacksource::PlaybackFile(void* param)
{
AV_FRAME frame;
int skip_audio = 0;
int64_t
iNowSystemTicket
= 0;
int64_t
iLastSystemTicket
= 0;
float
fSpped
= 0.0;
int64_t
iLastSecondTick
= 0; //Only for send current frame time to client
memset( &frame,0,sizeof(AV_FRAME) );
frame.Buffer = (char *)malloc( MAX_FRAME_SIZE );
if (frame.Buffer == NULL)
{
return ;
}
m_pGPSBuffer = (char *)malloc( MAX_GPS_MSG_LENGTH );
if (m_pGPSBuffer == NULL)
{
return ;
}
m_bNextVideoFrameReady = false;
int iFPS = 0;
DATETIME FrameTime;
while (!GetEndingState())
{
int rc = 0;
fSpped = GetSpeedValue();
Sleep(5);
int play_state = GetPlayingState();
int NextFrame = GetFrameByFrame();
//if (!GetPlayingState() && !GetFrameByFrame())
if (!play_state && !NextFrame)
{
m_bNextVideoFrameReady = false;
memset(&FrameTime,0,sizeof(DATETIME));
continue;
}
//n_now_system_time = GetTickCount();
if (!m_bNextVideoFrameReady)
{
switch(GetPlayingSpeed())
{
case PLAY_SPEED_X16:
rc = YMHX_RECORDFILE_GetNextKeyFrame(&m_SRF,&frame);
break;
case PLAY_SPEED_X8:
rc = YMHX_RECORDFILE_GetNextKeyFrame(&m_SRF,&frame);
break;
case PLAY_SPEED_X4:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
case PLAY_SPEED_X2:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
case PLAY_SPEED_X1:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 0;
break;
case PLAY_SPEED_X1_2:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
case PLAY_SPEED_X1_4:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
case PLAY_SPEED_X1_8:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
case PLAY_SPEED_X1_16:
rc = YMHX_RECORDFILE_GetNextFrame(&m_SRF,&frame);
skip_audio = 1;
break;
default:
break;
}
if (rc == 1)
{
m_bNextVideoFrameReady = 1;
OutputStream(&frame);
}
if( 0 == rc)
{
m_bPlaying = 0;
m_bNeedKeyFrame = 1;
m_bNextVideoFrameReady = 0;
if(m_pFilterAV)
m_pFilterAV->NotifyEvent(WM_GRAPHNOTIFY, EV_VIDEOEOF, NULL);
continue;
}
}//if (!m_bNextVideoFrameReady)
if (m_bNextVideoFrameReady)
{
if (frame.FrameType == A_FRAME && !skip_audio && !m_bMute)
{
CheckAudioFrameFormat(&frame);
m_pFilterAV->ProcessAudioBuffer(frame.FrameSize,frame.Buffer);
}
else if (frame.FrameType == I_FRAME || frame.FrameType == P_FRAME)
{
//Only video frame need control speed
DateTimeToMsSeconds(&m_now_frame_tick,&(frame.FrameTime));
iNowSystemTicket = GetNowMsSecondsTime();
//send frame time changed event once per second
if (iNowSystemTicket - iLastSecondTick > 1000)
{
if(m_pFilterAV)
m_pFilterAV->NotifyEvent(WM_GRAPHNOTIFY, EV_UPDATESTAMP, (LONG_PTR)(m_now_frame_tick/1000));
iLastSecondTick = iNowSystemTicket;
}
//dvr time changed
if (m_now_frame_tick < m_last_frame_tick)
{
m_last_frame_tick = m_now_frame_tick;
}
//client time changed
if (iNowSystemTicket < iLastSystemTicket)
{
iLastSystemTicket = 0;
}
//check play or not
if ( (iNowSystemTicket - iLastSystemTicket) * fSpped < m_now_frame_tick - m_last_frame_tick
&& m_now_frame_tick - m_last_frame_tick < MAX_PLAY_DELAY)
{
continue;
}
m_last_frame_tick = m_now_frame_tick;
iLastSystemTicket = iNowSystemTicket;
CheckVideoFrameFormat(&frame);
m_pFilterAV->ProcessVideoBuffer(frame.FrameSize,frame.Buffer,frame.FrameType);
//Clear frame by frame play mode
SetFrameByFrame(0);
//DBG_MSG("frame type:%d frame sequence:%u video sequence:%d\n",frame.FrameType,frame.SequenceNumber,frame.VideoSequence);
}
else if (frame.FrameType == G_FRAME && m_pGPSBuffer)
{
memcpy(m_pGPSBuffer,frame.Buffer,frame.FrameSize);
if((m_pFilterAV) &&( MAX_GPS_MSG_LENGTH > frame.FrameSize) )
m_pFilterAV->NotifyEvent(WM_GRAPHNOTIFY, EV_GPS_NOTIFY, LONG_PTR(frame.FrameSize));
}
else
{
}//if (frame.FrameType == A_FRAME) end
m_bNextVideoFrameReady = 0;
}//if (m_bNextVideoFrameReady)
}//while (!GetEndingState())
if (frame.Buffer)
free(frame.Buffer);
frame.Buffer = NULL;
if (m_pGPSBuffer)
free(m_pGPSBuffer);
m_pGPSBuffer = NULL;
}