lvrs.h //接口头文件
#ifndef _LVRS_H_
#define _LVRS_H_
#ifdef __cplusplus
#define EXTERN extern "C"
#define EXTERN_BEGIN extern "C" {
#define EXTERN_END }
#else
#define EXTERN extern
#define EXTERN_BEGIN
#define EXTERN_END
#endif
EXTERN_BEGIN
typedef int (*myGetFrameCB)(int chId,int srcId,char* buf,int size);
EXTERN void* liveVideoServerStart(myGetFrameCB cb);
EXTERN_END
#endif
推流类 liveVideoRTSPServer.h
#ifndef _LIVE_VIDEO_RTSP_SERVER_H
#define _LIVE_VIDEO_RTSP_SERVER_H
#ifndef _RTSP_SERVER_SUPPORTING_HTTP_STREAMING_HH
#include "RTSPServerSupportingHTTPStreaming.hh"
#endif
#include
class liveVideoRTSPServer: public RTSPServerSupportingHTTPStreaming {
public:
static liveVideoRTSPServer* createNew(Port ourPort,UserAuthenticationDatabase* authDatabase,
GetFrameCB cb,unsigned reclamationTestSeconds = 65);
protected:
liveVideoRTSPServer(UsageEnvironment& env, int ourSocket, Port ourPort,
UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds);
// called only by createNew();
virtual ~liveVideoRTSPServer();
protected: // redefined virtual functions
virtual ServerMediaSession* lookupServerMediaSession(char const* streamName);
private:
GetFrameCB readFreamCb;
public:
static UsageEnvironment* s_env;
static UsageEnvironment* getEnv();
};
#endif
liveVideoRTSPServer.cpp liveVideoRTSPServer实现
#include "liveVideoRTSPServer.h"
#include "BasicUsageEnvironment.hh"
#include "lvrs.h"
#include
#include
using namespace std;
UsageEnvironment* liveVideoRTSPServer::s_env = NULL;
UsageEnvironment* liveVideoRTSPServer::getEnv(){
if(s_env == NULL){
cout<< "create s_env"<readFreamCb =cb;
return lvrs;
}
liveVideoRTSPServer::liveVideoRTSPServer(UsageEnvironment& env,
int ourSocket,Port ourPort,
UserAuthenticationDatabase* authDatabase, unsigned reclamationTestSeconds)
: RTSPServerSupportingHTTPStreaming(env, ourSocket, ourPort, authDatabase, reclamationTestSeconds){
}
liveVideoRTSPServer::~liveVideoRTSPServer(){
}
static ServerMediaSession* createNewSMS(UsageEnvironment& env,
char const* streamName, GetFrameCB cb); // forward
ServerMediaSession*
liveVideoRTSPServer::lookupServerMediaSession(char const* streamName) {
cout<<"liveVideoRTSPServer::lookupServerMediaSession: "< \r\n");
// Create a new "ServerMediaSession" object for streaming from the named file.
sms = createNewSMS(envir(), streamName,readFreamCb);
addServerMediaSession(sms);
}
return sms;
}
// Special code for handling Matroska files:
static char newMatroskaDemuxWatchVariable;
static MatroskaFileServerDemux* demux;
static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {
demux = newDemux;
newMatroskaDemuxWatchVariable = 1;
}
// END Special code for handling Matroska files:
#define NEW_SMS(description) do {\
char const* descStr = description\
", streamed by the LIVE555 Media Server";\
sms = ServerMediaSession::createNew(env, streamName, streamName, descStr);\
} while(0)
static ServerMediaSession* createNewSMS(UsageEnvironment& env,
char const* streamName, GetFrameCB cb) {
// Use the file name extension to determine the type of "ServerMediaSession":
int chId,SrcId;
int i;
ServerMediaSession* sms = NULL;
Boolean const reuseSource = False;
char const* extension = strrchr(streamName, '/');
char const* pstr = streamName;
char chStr[10]={0} ;
//pstr = streamName;
if (extension == NULL) return NULL;
for(i=0;i YUV4:2:0 // allow for some possibly large H.264 frames
sms->addSubsession(H264LiveVideoServerMediaSubsession::createNew(env,cb, chId,SrcId, reuseSource));
return sms;
}
void* liveVideoServerStart(myGetFrameCB cb){
RTSPServer* rtspServer;
portNumBits rtspServerPortNum = 554;
// Begin by setting up our usage environment:
//TaskScheduler* scheduler = BasicTaskScheduler::createNew();
//UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum,NULL,(GetFrameCB)cb);
if (rtspServer == NULL) {
rtspServerPortNum = 8554;
rtspServer = liveVideoRTSPServer::createNew(rtspServerPortNum, NULL,(GetFrameCB)cb);
}
if (rtspServer == NULL) {
*liveVideoRTSPServer::getEnv() << "Failed to create RTSP server: " << liveVideoRTSPServer::getEnv()->getResultMsg() << "\n";
exit(1);
}
char* urlPrefix = rtspServer->rtspURLPrefix();
fprintf(stdout, "use like this:%s", urlPrefix);
fprintf(stdout, "channel/srcch \n");
liveVideoRTSPServer::getEnv()->taskScheduler().doEventLoop(); // does not return
return NULL;
}
H264LiveVideoServerMediaSubsession类实现
#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH_
#include "ByteFrameLiveVideoSource.hh"
#ifndef _ON_DEMAND_SERVER_MEDIA_SUBSESSION_HH
#include "OnDemandServerMediaSubsession.hh"
#endif
class H264LiveVideoServerMediaSubsession : public OnDemandServerMediaSubsession {
public:
static H264LiveVideoServerMediaSubsession*
createNew(UsageEnvironment& env,GetFrameCB cb, int mchId,int msrcId,Boolean reuseFirstSource);
// Used to implement "getAuxSDPLine()":
void checkForAuxSDPLine1();
void afterPlayingDummy1();
private: // redefined virtual functions
virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
unsigned& estBitrate);
virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,
unsigned char rtpPayloadTypeIfDynamic,
FramedSource* inputSource);
protected:
H264LiveVideoServerMediaSubsession(UsageEnvironment& env,
GetFrameCB cb, int mchId,int msrcId, Boolean reuseFirstSource);
// called only by createNew();
virtual ~H264LiveVideoServerMediaSubsession();
void setDoneFlag() { fDoneFlag = ~0; }
protected: // redefined virtual functions
virtual char const* getAuxSDPLine(RTPSink* rtpSink,
FramedSource* inputSource);
protected:
//virtual char const* sdpLines();
GetFrameCB tempCb;
int chId; //0-1
int srcId;//0 main,1 sub
char* fAuxSDPLine;
char fDoneFlag; // used when setting up "fAuxSDPLine"
RTPSink* fDummyRTPSink; // ditto
};
#endif
#include "H264LiveVideoServerMediaSubsession.hh"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
#include
using namespace std;
static void afterPlayingDummy(void* clientData);
static void checkForAuxSDPLine(void* clientData);
H264LiveVideoServerMediaSubsession* H264LiveVideoServerMediaSubsession ::
createNew(UsageEnvironment& env,GetFrameCB cb,int mchId,int msrcId, Boolean reuseFirstSource){
return new H264LiveVideoServerMediaSubsession(env,cb,mchId,msrcId,reuseFirstSource);
}
FramedSource* H264LiveVideoServerMediaSubsession ::
createNewStreamSource(unsigned clientSessionId,unsigned & estBitrate){
estBitrate = 500; // kbps, estimate
// Create the video source:
if(tempCb != NULL) cout<<"create new stream source------------------>"< \r\n");
ByteFrameLiveVideoSource* liveVideoSource = ByteFrameLiveVideoSource::createNew(envir(),tempCb,chId,srcId);
return H264VideoStreamFramer::createNew(envir(), liveVideoSource);
}
RTPSink* H264LiveVideoServerMediaSubsession ::
createNewRTPSink(Groupsock * rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,
FramedSource * inputSource){
cout<<"H264LiveVideoServerMediaSubsession :: createNewRTPSink --------------->" <startPlaying(*inputSource, afterPlayingDummy, this);
// Check whether the sink's 'auxSDPLine()' is ready:
checkForAuxSDPLine(this);
cout<<"getAuxSDPLine: ok ? "<afterPlayingDummy1();
}
void H264LiveVideoServerMediaSubsession::afterPlayingDummy1() {
// Unschedule any pending 'checking' task:
envir().taskScheduler().unscheduleDelayedTask(nextTask());
// Signal the event loop that we're done:
setDoneFlag();
}
static void checkForAuxSDPLine(void* clientData) {
H264LiveVideoServerMediaSubsession* subsess = (H264LiveVideoServerMediaSubsession*)clientData;
subsess->checkForAuxSDPLine1();
}
void H264LiveVideoServerMediaSubsession::checkForAuxSDPLine1() {
char const* dasl;
if (fAuxSDPLine != NULL) {
// Signal the event loop that we're done:
setDoneFlag();
} else if (fDummyRTPSink != NULL && (dasl = fDummyRTPSink->auxSDPLine()) != NULL) {
fAuxSDPLine = strDup(dasl);
fDummyRTPSink = NULL;
// Signal the event loop that we're done:
setDoneFlag();
} else {
// try again after a brief delay:
int uSecsToDelay = 100000; // 100 ms
nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
(TaskFunc*)checkForAuxSDPLine, this);
}
}
ByteFrameLiveVideoSource实现
#ifndef _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_
#define _BYTE_FRAME_LIVE_VIDEO_SOURCE_HH_
#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif
typedef int (*GetFrameCB)(int chId,int srcId,unsigned char* buf,int size);
class ByteFrameLiveVideoSource: public FramedSource{
public:
static ByteFrameLiveVideoSource* createNew(UsageEnvironment& env,
GetFrameCB funcCb,int chId=0,int srcId =0,
unsigned preferredFrameSize = 0,
unsigned playTimePerFrame = 0);
//void seekToByteAbsolute(u_int64_t byteNumber, u_int64_t numBytesToStream = 0);
// if "numBytesToStream" is >0, then we limit the stream to that number of bytes, before treating it as EOF
protected:
ByteFrameLiveVideoSource(UsageEnvironment& env,
int mchId,int msrcId,
unsigned preferredFrameSize,
unsigned playTimePerFrame);
// called only by createNew()
virtual ~ByteFrameLiveVideoSource();
static void getFrameableHandler(ByteFrameLiveVideoSource* source, int mask);
void doGetNextFrameFormEncoder();
private:
// redefined virtual functions:
virtual void doGetNextFrame();
virtual void doStopGettingFrames();
GetFrameCB getFrame;
private:
int chId;
int srcId;
unsigned fPreferredFrameSize;
unsigned fPlayTimePerFrame;
Boolean fFidIsSeekable;
unsigned fLastPlayTime;
Boolean fHaveStartedReading;
Boolean fLimitNumBytesToStream;
u_int64_t fNumBytesToStream; // used iff "fLimitNumBytesToStream" is True
};
#endif
#include "ByteFrameLiveVideoSource.hh"
#include "GroupsockHelper.hh"
#include
using namespace std;
#define GETFRAME_HANDLER_ID 5006
#define READ_FROM_FILES_SYNCHRONOUSLY
ByteFrameLiveVideoSource*
ByteFrameLiveVideoSource::createNew(UsageEnvironment& env,
GetFrameCB funcCb,int chId,int srcId,
unsigned preferredFrameSize,
unsigned playTimePerFrame) {
ByteFrameLiveVideoSource* newSource = new ByteFrameLiveVideoSource(env, chId, srcId,preferredFrameSize, playTimePerFrame);
newSource->getFrame = funcCb;
return newSource;
}
ByteFrameLiveVideoSource::ByteFrameLiveVideoSource(UsageEnvironment& env,
int mchId,int msrcId,
unsigned preferredFrameSize,
unsigned playTimePerFrame)
: FramedSource(env),chId(mchId),srcId(msrcId),fPreferredFrameSize(preferredFrameSize),
fPlayTimePerFrame(playTimePerFrame), fLastPlayTime(0),
fHaveStartedReading(False), fLimitNumBytesToStream(False), fNumBytesToStream(0) {
fMaxSize = 1920*1080*3/2;
}
ByteFrameLiveVideoSource::~ByteFrameLiveVideoSource(){
}
void ByteFrameLiveVideoSource::getFrameableHandler(ByteFrameLiveVideoSource* source, int /*mask*/) {
if (!source->isCurrentlyAwaitingData()) {
source->doStopGettingFrames(); // we're not ready for the data yet
return;
}//
//cout<<"[debug_msg]#"<<"do read from file"<doGetNextFrameFormEncoder();
}
void ByteFrameLiveVideoSource:: doGetNextFrameFormEncoder(){
//printf("doGetNextFrameFormEncoder================== \r\n");
// Try to read as many bytes as will fit in the buffer provided (or "fPreferredFrameSize" if less)
if (fLimitNumBytesToStream && fNumBytesToStream < (u_int64_t)fMaxSize) {
fMaxSize = (unsigned)fNumBytesToStream;
}
if (fPreferredFrameSize > 0 && fPreferredFrameSize < fMaxSize) {
fMaxSize = fPreferredFrameSize;
}
cout<<"famxSize: "< 0 && fPreferredFrameSize > 0) {
if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
// This is the first frame, so use the current time:
gettimeofday(&fPresentationTime, NULL);
} else {
// Increment by the play time of the previous data:
unsigned uSeconds = fPresentationTime.tv_usec + fLastPlayTime;
fPresentationTime.tv_sec += uSeconds/1000000;
fPresentationTime.tv_usec = uSeconds%1000000;
}
// Remember the play time of this data:
fLastPlayTime = (fPlayTimePerFrame*fFrameSize)/fPreferredFrameSize;
fDurationInMicroseconds = fLastPlayTime;
} else {
// We don't know a specific play time duration for this data,
// so just record the current time as being the 'presentation time':
gettimeofday(&fPresentationTime, NULL);
}
// Inform the reader that he has data:
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
// To avoid possible infinite recursion, we need to return to the event loop to do this:
nextTask() = envir().taskScheduler().scheduleDelayedTask(GETFRAME_HANDLER_ID,
(TaskFunc*)FramedSource::afterGetting, this);
#else
// Because the file read was done from the event loop, we can call the
// 'after getting' function directly, without risk of infinite recursion:
FramedSource::afterGetting(this);
#endif
}
void ByteFrameLiveVideoSource:: doGetNextFrame()
{
// printf("ByteFrameLiveVideoSource doGetNextFrame : \r\n");
if(fLimitNumBytesToStream && fNumBytesToStream == 0) {
handleClosure(this);
return;
}
// printf("ByteFrameLiveVideoSource doGetNextFrame 1: \r\n");
#ifdef READ_FROM_FILES_SYNCHRONOUSLY
//cout<<"[debug_msg]#"<<"do read from file"<
测试
#include "lvrs.h"
#include
int readFrame(int chId,int srcId,char* buf,int size){
printf("get frame fafaslfjljslfs------------ \n");
return 1024;
}
void main(){
liveVideoServerStart(readFrame);
}