5.1放假了,昨日研究了LIVE555的play部分的源码,不敢独享,贡献出来分享。
调用栈
BasicTaskScheduler0::doEventLoop()
{
// Repeatedly loop, handling readble socketsand timed events:
while (1)
{
SingleStep();
}
}
void BasicTaskScheduler::SingleStep()
{
//handler->handlerProc =RTSPServer::RTSPClientSession::incomingRequestHandler
(*handler->handlerProc)(handler->clientData,resultConditionSet);
}
voidRTSPServer::RTSPClientSession::incomingRequestHandler1()
{
handleRequestBytes(bytesRead);
}
void RTSPServer::RTSPClientSession::handleRequestBytes(intnewBytesRead)
{
handleCmd_withinSession("PLAY", urlPreSuffix,urlSuffix, cseq,
(char const*)fRequestBuffer);
}
RTSPServer::RTSPClientSession::handleCmd_PLAY(ServerMediaSubsession*subsession,
char const* cseq,
char const* fullRequestStr)
{
fStreamStates[i].subsession->startStream(fOurSessionId,
fStreamStates[i].streamToken,
(TaskFunc*)noteClientLiveness, this,
rtpSeqNum, rtpTimestamp,
handleAlternativeRequestByte, this);
}
void OnDemandServerMediaSubsession::startStream(unsignedclientSessionId,
void* streamToken,
TaskFunc* rtcpRRHandler,void* rtcpRRHandlerClientData,
unsigned short& rtpSeqNum,
unsigned& rtpTimestamp,
ServerRequestAlternativeByteHandler*serverRequestAlternativeByteHandler,
void* serverRequestAlternativeByteHandlerClientData )
{
到了Play阶段,会调用startStream,里面会调用StreamState::startPlaying然后,fRTPSin->startPlaying(定义在MediaSink::startPlaying),里面会把MediaSink的参数fSource设置且前面的fMediaSource,然后执行continurePlaying() |
rtcpRRHandler, rtcpRRHandlerClientData,
serverRequestAlternativeByteHandler,
serverRequestAlternativeByteHandlerClientData);
}
StreamState:startPlaying(Destinations* dests,
TaskFunc* rtcpRRHandler,
void* rtcpRRHandlerClientData,
ServerRequestAlternativeByteHandler*serverRequestAlternativeByteHandler,
void*serverRequestAlternativeByteHandlerClientData)
{
fRTPSink->startPlaying(*fMediaSource,
afterPlayingStreamState,
this);
}
MediaSink::startPlaying(MediaSource& source,
afterPlayingFunc* afterFunc,
void* afterClientData)
{
fSource =(FramedSource*)&source;
fAfterFunc = afterFunc;
fAfterClientData = afterClientData;
return continuePlaying();
}
H264VideoRTPSink::continuePlaying()
{
if (fOurFragmenter == NULL) {
fOurFragmenter = new H264FUAFragmenter(envir(),
fSource,
在H264VideoRTPSink::continurePlaying()里面,会让fSource等于H264VideoRTPSink::fOurFragmenter(一个H264FUAFagmenterclass),而原来的fSource变为fOurFragmenter的fInputSource,然后执行MultiFramedRTPSink::continuePlaying()......MultiFramedRTPSink::buildAndSendPacket(true)...... MultiFramedRTPSink :: packFrame() |
ourMaxPacketSize() - 12);
fSource =fOurFragmenter;
}
returnMultiFramedRTPSink::continuePlaying();
}
H264FUAFragmenter::H264FUAFragmenter(UsageEnvironment&env,
FramedSource* inputSource,
unsigned inputBufferMax,
unsigned maxOutputPacketSize)
: FramedFilter(env, inputSource)
{
}
MultiFramedRTPSink::continuePlaying()
{
buildAndSendPacket(True);
}
MultiFramedRTPSink::buildAndSendPacket(BooleanisFirstPacket)
{
处理RTP Header
//packing as many (complete) frames into thepacket
packFrame();
}
MultiFramedRTPSink::packFrame()
H264FUAFragmenter继承自FramedFilter :FramedSource在上面的fOurFragmenter的构造函数里面就定义了FramedFilter::fInputSource等于fSourceH264FUAFragm enter :: doGetNextFrame() 會 設定 FramedFilter ::fInputSource 的 fTo , i.e. memmove(fTo, &fInputBuffer[1],fNumValidDataBytes - 1); &fInputBuffer[1] =>startcode应该可以一起放进去,所以 fInputBuffer 是我们 startcode + NAL hdr + NALpayload 要存的地方 |
//前一帧如果太大会在buffer留下剩余部分,加上
//或者设定好fSource::fTo这个指针指向OutBuf->curPtr()
fSource->getNextFrame(fOutBuf->curPtr(),
fOutBuf->totalBytesAvailable(),
afterGettingFrame,
this,
ourHandleClosure,
this);
}
FramedSource::getNextFrame(unsigned char* to,
unsigned maxSize,
afterGettingFunc*afterGettingFunc,
void*afterGettingClientData,
onCloseFunc* onCloseFunc,
void* onCloseClientData)
{
//设置各个参数
fTo = to;
fMaxSize = maxSize;
fNumTruncatedBytes = 0; // by default; could be changed bydoGetNextFrame()
fDurationInMicroseconds = 0; // by default;could be changed by doGetNextFrame()
fAfterGettingFunc = afterGettingFunc;
fAfterGettingClientData =afterGettingClientData;
fOnCloseFunc = onCloseFunc;
fOnCloseClientData = onCloseClientData;
fIsCurrentlyAwaitingData = True;
fSource::fAfterGettingFunc=MultiFramedRTPSink::afterGettingFrame
fSource::fOnCloseFunc=MultiFramedRTPSink::ourHandleClosure
doGetNextFrame();
}
H264FUAFragmenter::doGetNextFrame()
{
_ If no NALU in the buffer (i.e.fNumValidDataBytes == 1) Read a new one:
{
FramedFilter::fInputSource->getNextFrame(&fInputBuffer[1],
fInputBufferSize- 1,
afterGettingFrame,
this,
FramedSource::handleClosure,
this);
_ fInputSource::fAfterGettingFunc =H264FUAFragmenter::afterGettingFrame
}
_ Else
{
......
//// Complete delivery tothe client:
FramedSource::afterGetting(this)
}
}
FramedSource::afterGetting(FramedSource* source) {
if (source->fAfterGettingFunc!= NULL) {
(*(source->fAfterGettingFunc))(source->fAfterGettingClientData,
source->fFrameSize,source->fNumTruncatedBytes,
source->fPresentationTime,
source->fDurationInMicroseconds);
}
}
MultiFramedRTPSink::afterGettingFrame(void* clientData,
unsigned numBytesRead,unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds)
{
MultiFramedRTPSink* sink =(MultiFramedRTPSink*)clientData;
sink->afterGettingFrame1(numBytesRead,numTruncatedBytes,
presentationTime,durationInMicroseconds);
}
MultiFramedRTPSink::afterGettingFrame1(unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds)
{
if (numFrameBytesToUse == 0) {
// Sendour packet now, because we have filled it up:
sendPacketIfNecessary();
}
else
{
// There's room for more frames; try getting another:
packFrame();
}
}
MultiFramedRTPSink::sendPacketIfNecessary()
{
// Delaythis amount of time:
nextTask() = envir().taskScheduler()
.scheduleDelayedTask(uSecondsToGo, (TaskFunc*)sendNext,this);
}
// The following is called after each delay between packetsends:
MultiFramedRTPSink::sendNext(void* firstArg)
{
MultiFramedRTPSink* sink =(MultiFramedRTPSink*)firstArg;
sink->buildAndSendPacket(False);
}