前言
当前安防设备众多,比如海康、大华、宇视等厂家设备,有一个共同点就是基本都支持RTSP协议,可以用vlc中输入rtsp地址就可以播放。
用vlc测试时候,抓包发现vlc其实用的也是live555。
基于live555开发RTSPClient。下载live555源码编译,然后根据自带demo改写。下面贴代码
rrourrtspclient.h如下
#ifndef _RR_OUR_RTSPCLIENT_H_
#define _RR_OUR_RTSPCLIENT_H_
#include
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "rrourbase.h"
#include "rr_rtspclient_interface.h"
//typedef void (* pfnRtspDataCallBack)(unsigned char *pbuf, unsigned int len, void *param);
typedef void (* pfnResultCallBack)(int ret, void *pUser);
class rrourRtspClient : public RTSPClient
{
public:
static rrourRtspClient* createNew(char const* rtspURL);
rrourRtspClient(UsageEnvironment *penv, char const* rtspURL,
int verbosityLevel = 0, char const* applicationName = NULL, portNumBits tunnelOverHTTPPortNum = 0);
virtual ~rrourRtspClient();
void AfterDescribe(RTSPClient* rtspClient, int resultCode, char* resultString);
void setupNextSubsession(RTSPClient* rtspClient);
void AfterStpup(RTSPClient* rtspClient, int resultCode, char* resultString);
void AfterPlay(RTSPClient* rtspClient, int resultCode, char* resultString);
void shutdownStream(RTSPClient* rtspClient, int exitCode = 0);
int StartPlay(pfnRRRtspDataCallBack pfn, void *param, const char *pStartTime=NULL, const char *pEndTime=NULL);
void StopPlay();
void KeepAlive();
void LoopEvent();
private:
UsageEnvironment *m_env;
MediaSubsessionIterator* m_iterSeesion;
MediaSession* m_session;
MediaSubsession* m_subsession;
pfnRRRtspDataCallBack m_pfn;
void *m_pParam;
rrour_sema m_sema;
bool m_brun;
bool m_bplay;
char m_flag;
rr_htread_t m_thand;
TaskToken m_taskToken;
std::string m_startTime;
std::string m_endTime;
};
class rrourRtspSession
{
public:
rrourRtspSession(char *rtspUrl);
~rrourRtspSession();
int StartRtspPlay(pfnRRRtspDataCallBack pfn, void *param, const char *startTime, const char *endTime);
void StopRtspPlay();
private:
rrourRtspClient *m_prtspcli;
pfnRRRtspDataCallBack m_pfn;
void *m_pParam;
};
#endif
重写rrourRtspClient,rrourRtspSession
rrourrtspclient.cpp如下
#include "rrourrtspclient.h"
#include "rrplaysink.h"
#define REQUEST_STREAMING_OVER_TCP 1
void CALLBACK_AfterDescribe(RTSPClient* rtspClient, int resultCode, char* resultString)
{
if(NULL == rtspClient)
{
return;
}
rrourRtspClient *prtsp = (rrourRtspClient *)rtspClient;
prtsp->AfterDescribe(rtspClient, resultCode, resultString);
}
void CALLBACK_AfterSetup(RTSPClient* rtspClient, int resultCode, char* resultString)
{
if(NULL == rtspClient)
{
return;
}
rrourRtspClient *prtsp = (rrourRtspClient *)rtspClient;
prtsp->AfterStpup(rtspClient, resultCode, resultString);
}
void CALLBACK_AfterPlay(RTSPClient* rtspClient, int resultCode, char* resultString)
{
if(NULL == rtspClient)
{
return;
}
rrourRtspClient *prtsp = (rrourRtspClient *)rtspClient;
prtsp->AfterPlay(rtspClient, resultCode, resultString);
}
void subsessionAfterPlaying(void* clientData)
{
MediaSubsession* subsession = (MediaSubsession*)clientData;
RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);
// Begin by closing this subsession's stream:
Medium::close(subsession->sink);
subsession->sink = NULL;
// Next, check whether *all* subsessions' streams have now been closed:
MediaSession& session = subsession->parentSession();
MediaSubsessionIterator iter(session);
while ((subsession = iter.next()) != NULL)
{
if (subsession->sink != NULL)
return; // this subsession is still active
}
// All subsessions' streams have now been closed, so shutdown the client:
if(NULL == rtspClient)
{
return;
}
rrourRtspClient *prtsp = (rrourRtspClient *)rtspClient;
//prtsp->shutdownStream(rtspClient);
}
void subsessionByeHandler(void* clientData, char const* reason)
{
MediaSubsession* subsession = (MediaSubsession*)clientData;
RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
UsageEnvironment& env = rtspClient->envir(); // alias
//env << *rtspClient << "Received RTCP \"BYE\"";
//if (reason != NULL) {
// env << " (reason:\"" << reason << "\")";
// delete[] reason;
//}
//env << " on \"" << *subsession << "\" subsession\n";
// Now act as if the subsession had closed:
subsessionAfterPlaying(subsession);
}
void getParamFun(void* pParam)
{
if(pParam)
{
rrourRtspClient *prtsp = (rrourRtspClient *)pParam;
prtsp->KeepAlive();
}
}
void *rtspclient_thread_func(void *pParam)
{
if(NULL == pParam)
{
return 0;
}
rrourRtspClient *psession = (rrourRtspClient *)pParam;
if(psession)
{
psession->LoopEvent();
}
return 0;
}
rrourRtspClient* rrourRtspClient::createNew(char const* rtspURL)
{
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
return new rrourRtspClient(env, rtspURL);
}
rrourRtspClient::rrourRtspClient(UsageEnvironment *penv, char const* rtspURL,
int verbosityLevel, char const* applicationName, portNumBits tunnelOverHTTPPortNum)
: RTSPClient(*penv, rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1)
{
m_env = penv;
m_taskToken = NULL;
m_pfn = NULL;
m_pParam = NULL;
m_iterSeesion = NULL;
m_session = NULL;
m_subsession = NULL;
m_brun = true;
m_bplay = false;
m_flag = 0;
m_startTime = "";
m_endTime = "";
rr_rtsp_thread_create(rtspclient_thread_func, this, NULL, &m_thand);
}
rrourRtspClient::~rrourRtspClient()
{
m_env->taskScheduler().unscheduleDelayedTask(m_taskToken);
m_brun = false;
m_flag = 1;
if(NULL != m_thand)
{
rr_rtsp_thread_destroy(m_thand);
m_thand = NULL;
}
delete m_iterSeesion;
if (m_session != NULL)
{
Medium::close(m_session);
m_session = NULL;
}
}
void rrourRtspClient::AfterDescribe(RTSPClient* rtspClient, int resultCode, char* resultString)
{
do
{
UsageEnvironment& env = rtspClient->envir(); // alias
if (resultCode != 0)
{
printf("Failed to get a SDP error:%d description:%s\n", resultCode, resultString);
delete[] resultString;
break;
}
char* const sdpDescription = resultString;
// Create a media session object from this SDP description:
m_session = MediaSession::createNew(env, sdpDescription);
delete[] sdpDescription; // because we don't need it anymore
if (m_session == NULL)
{
printf("Failed to create a MediaSession object from the SDP description:%s\n", env.getResultMsg());
break;
}
else if (!m_session->hasSubsessions())
{
printf("This session has no media subsessions\n");
break;
}
// Then, create and set up our data source objects for the session. We do this by iterating over the session's 'subsessions',
// calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
// (Each 'subsession' will have its own data source.)
m_iterSeesion = new MediaSubsessionIterator(*m_session);
setupNextSubsession(rtspClient);
return;
} while (0);
// An unrecoverable error occurred with this stream.
//shutdownStream(rtspClient);
}
void rrourRtspClient::setupNextSubsession(RTSPClient* rtspClient)
{
UsageEnvironment& env = rtspClient->envir(); // alias
m_subsession = m_iterSeesion->next();
if (m_subsession != NULL)
{
if (!m_subsession->initiate())
{
printf("Failed to initiate the \n");
setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
}
else
{
if (m_subsession->rtcpIsMuxed())
{
//env << "client port " << scs.subsession->clientPortNum();
}
else
{
//env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
}
//env << ")\n";
// Continue setting up this subsession, by sending a RTSP "SETUP" command:
rtspClient->sendSetupCommand(*m_subsession, CALLBACK_AfterSetup, False, REQUEST_STREAMING_OVER_TCP);
}
return;
}
// We've finished setting up all of the subsessions. Now, send a RTSP "PLAY" command to start the streaming:
if (!m_startTime.empty())
{
// Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
rtspClient->sendPlayCommand(*m_session, CALLBACK_AfterPlay, m_startTime.c_str(), m_endTime.c_str());
//rtspClient->sendPlayCommand(*m_session, continueAfterPLAY, "20191010T100000Z", "20191010T100100Z");
}
else
{
//scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
rtspClient->sendPlayCommand(*m_session, CALLBACK_AfterPlay);
}
}
void rrourRtspClient::AfterStpup(RTSPClient* rtspClient, int resultCode, char* resultString)
{
do
{
UsageEnvironment& env = rtspClient->envir(); // alias
//StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
if (resultCode != 0)
{
printf("Faild to setup err:%d msg:%s\n", resultCode, resultString);
break;
}
//env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession (";
if (m_subsession->rtcpIsMuxed())
{
//env << "client port " << scs.subsession->clientPortNum();
}
else
{
//env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
}
//env << ")\n";
// Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it.
// (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
// after we've sent a RTSP "PLAY" command.)
m_subsession->sink = rrPlaySink::createNew(env, *m_subsession, m_pfn, m_pParam, rtspClient->url());
// perhaps use your own custom "MediaSink" subclass instead
if (m_subsession->sink == NULL)
{
//env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
// << "\" subsession: " << env.getResultMsg() << "\n";
printf("Faild to create data sink\n");
break;
}
//env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n";
m_subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession
m_subsession->sink->startPlaying(*(m_subsession->readSource()),
subsessionAfterPlaying, m_subsession);
// Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
if (m_subsession->rtcpInstance() != NULL)
{
m_subsession->rtcpInstance()->setByeWithReasonHandler(subsessionByeHandler, m_subsession);
}
} while (0);
delete[] resultString;
// Set up the next subsession, if any:
setupNextSubsession(rtspClient);
}
void rrourRtspClient::AfterPlay(RTSPClient* rtspClient, int resultCode, char* resultString)
{
Boolean success = False;
do
{
UsageEnvironment& env = rtspClient->envir(); // alias
if (resultCode != 0) {
//env << *rtspClient << "Failed to start playing session: " << resultString << "\n";
printf("Faild to start playing session err:%d msg:%s\n", resultCode, resultString);
break;
}
// Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end
// using a RTCP "BYE"). This is optional. If, instead, you want to keep the stream active - e.g., so you can later
// 'seek' back within it and do another RTSP "PLAY" - then you can omit this code.
// (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.)
//if (scs.duration > 0)
//{
// unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration. (This is optional.)
// scs.duration += delaySlop;
// unsigned uSecsToDelay = (unsigned)(scs.duration*1000000);
// scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
//}
/*env << *rtspClient << "Started playing session";
if (scs.duration > 0) {
env << " (for up to " << scs.duration << " seconds)";
}
env << "...\n";*/
//m_taskToken = m_env->taskScheduler().scheduleDelayedTask(30*1000*1000, (TaskFunc *)getParamFun, this);
success = True;
m_sema.post();
} while (0);
delete[] resultString;
if (!success)
{
// An unrecoverable error occurred with this stream.
//shutdownStream(rtspClient);
}
}
//void streamTimerHandler(void* clientData) {
// ourRTSPClient* rtspClient = (ourRTSPClient*)clientData;
// StreamClientState& scs = rtspClient->scs; // alias
//
// scs.streamTimerTask = NULL;
//
// // Shut down the stream:
// shutdownStream(rtspClient);
//}
void rrourRtspClient::shutdownStream(RTSPClient* rtspClient, int exitCode)
{
m_flag = 1;
UsageEnvironment& env = rtspClient->envir(); // alias
// First, check whether any subsessions have still to be closed:
if (m_session != NULL)
{
Boolean someSubsessionsWereActive = False;
MediaSubsessionIterator iter(*m_session);
MediaSubsession* subsession;
while ((subsession = iter.next()) != NULL)
{
if (subsession->sink != NULL)
{
Medium::close(subsession->sink);
subsession->sink = NULL;
if (subsession->rtcpInstance() != NULL)
{
subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
}
someSubsessionsWereActive = True;
}
}
if (someSubsessionsWereActive)
{
// Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
// Don't bother handling the response to the "TEARDOWN".
rtspClient->sendTeardownCommand(*m_session, NULL);
}
}
//env << *rtspClient << "Closing the stream.\n";
Medium::close(rtspClient);
// Note that this will also cause this stream's "StreamClientState" structure to get reclaimed.
}
int rrourRtspClient::StartPlay(pfnRRRtspDataCallBack pfn, void *param, const char *pStartTime, const char *pEndTime)
{
m_pfn = pfn;
m_pParam = param;
if(NULL != pStartTime && NULL != pEndTime)
{
m_startTime = pStartTime;
m_endTime = pEndTime;
}
sendDescribeCommand(CALLBACK_AfterDescribe);
m_bplay = true;
int ret = m_sema.wait(2000);
if(0 != ret)
{
m_flag = 1;
printf("Faild to rtsp play\n");
return -1;
}
return 0;
}
void rrourRtspClient::StopPlay()
{
shutdownStream(this);
}
void rrourRtspClient::KeepAlive()
{
m_taskToken = m_env->taskScheduler().scheduleDelayedTask(30*1000*1000, (TaskFunc *)getParamFun, this);
return;
}
void rrourRtspClient::LoopEvent()
{
while(m_brun)
{
if(m_bplay)
{
m_env->taskScheduler().doEventLoop(&m_flag);
break;
}
else
{
#ifdef WIN32
Sleep(10);
#else
usleep(10000);
#endif
}
}
}
#if 0
#endif
rrourRtspSession::rrourRtspSession(char *rtspUrl)
{
m_prtspcli = rrourRtspClient::createNew(rtspUrl);
}
rrourRtspSession::~rrourRtspSession()
{
StopRtspPlay();
}
int rrourRtspSession::StartRtspPlay(pfnRRRtspDataCallBack pfn, void *param, const char *startTime, const char *endTime)
{
if(NULL == m_prtspcli)
{
printf("The rtspcli is null as StartRtspPlay\n");
return -1;
}
return m_prtspcli->StartPlay(pfn, param, startTime, endTime);
}
void rrourRtspSession::StopRtspPlay()
{
if(NULL == m_prtspcli)
{
printf("The rtspcli is null as StopRtspPlay\n");
return;
}
m_prtspcli->StopPlay(); //内部会释放m_prtspcli对象
m_prtspcli = NULL;
}
//rrplaysink.h如
#ifndef _RR_PLAYSINK_H_
#define _RR_PLAYSINK_H_
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "rr_rtspclient_interface.h"
//typedef void (* pfnplayerSinkCallback)(unsigned char *pbuf, unsigned int len, void *param);
class rrPlaySink: public MediaSink
{
public:
static rrPlaySink* createNew(UsageEnvironment& env,
MediaSubsession& subsession, // identifies the kind of data that's being received
pfnRRRtspDataCallBack pfn = NULL,
void *pParam = NULL,
char const* streamId = NULL); // identifies the stream itself (optional)
rrPlaySink(UsageEnvironment& env, MediaSubsession& subsession,
pfnRRRtspDataCallBack pfn, void *pParam, char const* streamId);
// called only by "createNew()"
virtual ~rrPlaySink();
void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds);
private:
// redefined virtual functions:
virtual Boolean continuePlaying();
private:
unsigned char* m_pbuf;
unsigned char* m_recvbuf;
MediaSubsession& m_fSubsession;
char* m_fStreamId;
pfnRRRtspDataCallBack m_pfn;
void *m_param;
};
#endif
//rrplaysink.cpp如下
#include "rrplaysink.h"
#define RR_SINK_RECEIVE_BUFFER_SIZE 512*1024
#define RR_VIDEO_TYPE "video"
void CallBack_afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds)
{
rrPlaySink* sink = (rrPlaySink*)clientData;
if(sink)
{
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
}
}
rrPlaySink* rrPlaySink::createNew(UsageEnvironment& env, MediaSubsession& subsession,
pfnRRRtspDataCallBack pfn, void *pParam,char const* streamId)
{
return new rrPlaySink(env, subsession, pfn, pParam, streamId);
}
rrPlaySink::rrPlaySink(UsageEnvironment& env, MediaSubsession& subsession,
pfnRRRtspDataCallBack pfn, void *pParam, char const* streamId)
: MediaSink(env)
, m_fSubsession(subsession)
, m_pfn(pfn)
, m_param(pParam)
{
m_fStreamId = strDup(streamId);
m_pbuf = new unsigned char[RR_SINK_RECEIVE_BUFFER_SIZE+4];
memset(m_pbuf, 0, RR_SINK_RECEIVE_BUFFER_SIZE+4);
m_pbuf[0] = 0;
m_pbuf[1] = 0;
m_pbuf[2] = 0;
m_pbuf[3] = 0x01;
m_recvbuf = m_pbuf+4;
}
rrPlaySink::~rrPlaySink()
{
delete[] m_pbuf;
delete[] m_fStreamId;
}
#define DEBUG_PRINT_EACH_RECEIVED_FRAME 1
void rrPlaySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds)
{
const char *ptest = m_fSubsession.mediumName();
//video
if(m_pfn)
{
RR_RTSP_FRAME_INFO_S frame;
if(0 == strncmp(RR_VIDEO_TYPE, m_fSubsession.mediumName(), strlen(RR_VIDEO_TYPE)))
{
frame.type = 0;
frame.pBuf = m_pbuf;
frame.len = frameSize+4;
m_pfn(&frame, m_param);
}
else
{
frame.type = 1;
frame.pBuf = m_pbuf+4;
frame.len = frameSize;
m_pfn(&frame, m_param);
}
}
// We've just received a frame of data. (Optionally) print out information about it:
#ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME
//static FILE *fp = fopen("test.h264", "wb+");
/*unsigned char szuc[4];
szuc[0] = 0x00;
szuc[1] = 0x00;
szuc[2] = 0x00;
szuc[3] = 0x01;
fwrite(szuc, 1, 4, fp);*/
//fwrite(m_pbuf, 1, frameSize+4, fp);
//fflush(fp);
//if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
//envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
//if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
//char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
//sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
//envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
//if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) {
// envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
//}
#endif
continuePlaying();
}
Boolean rrPlaySink::continuePlaying()
{
if (fSource == NULL)
{
return False; // sanity check (should not happen)
}
// Request the next frame of data from our input source. "afterGettingFrame()" will get called later, when it arrives:
fSource->getNextFrame(m_recvbuf, RR_SINK_RECEIVE_BUFFER_SIZE,
CallBack_afterGettingFrame, this, onSourceClosure, this);
return True;
}
核心封转完成,就可以提供调用接口完成RTSP拉流了