live555构建流媒体服务器(1)

读test.264码流文件,客户端可以通过ffplay.exe rtsp: xxx播放出来。

直接使用编译好的库,改自testOnDemandRTSPServer.cpp

#include "liveMedia.hh"

#include "BasicUsageEnvironment.hh"

 

#pragma comment (lib, "Ws2_32.lib")  

#pragma comment (lib, "BasicUsageEnvironment.lib")

#pragma comment (lib, "groupsock.lib")

#pragma comment (lib, "liveMedia.lib")

#pragma comment (lib, "UsageEnvironment.lib")

UsageEnvironment* env;



// To make the second and subsequent client for each stream reuse the same

// input stream as the first client (rather than playing the file from the

// start for each client), change the following "False" to "True":

Boolean reuseFirstSource = False;



// To stream *only* MPEG-1 or 2 video "I" frames

// (e.g., to reduce network bandwidth),

// change the following "False" to "True":

Boolean iFramesOnly = False;



static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,

               char const* streamName, char const* inputFileName); // fwd



static char newMatroskaDemuxWatchVariable;

static MatroskaFileServerDemux* demux;

static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) {

  demux = newDemux;

  newMatroskaDemuxWatchVariable = 1;

}



int main(int argc, char** argv) {

  // Begin by setting up our usage environment:

  TaskScheduler* scheduler = BasicTaskScheduler::createNew();

  env = BasicUsageEnvironment::createNew(*scheduler);



  UserAuthenticationDatabase* authDB = NULL;

#ifdef ACCESS_CONTROL

  // To implement client access control to the RTSP server, do the following:

  authDB = new UserAuthenticationDatabase;

  authDB->addUserRecord("username1", "password1"); // replace these with real strings

  // Repeat the above with each <username>, <password> that you wish to allow

  // access to the server.

#endif



  // Create the RTSP server:

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);

  if (rtspServer == NULL) {

    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";

    exit(1);

  }



  char const* descriptionString

    = "Session streamed by \"testOnDemandRTSPServer\"";



  // Set up each of the possible streams that can be served by the

  // RTSP server.  Each such stream is implemented using a

  // "ServerMediaSession" object, plus one or more

  // "ServerMediaSubsession" objects for each audio/video substream.



 

  // A H.264 video elementary stream:

  {

    char const* streamName = "h264ESVideoTest";

    char const* inputFileName = "test.264";

    ServerMediaSession* sms

      = ServerMediaSession::createNew(*env, streamName, streamName,

                      descriptionString);

    sms->addSubsession(H264VideoFileServerMediaSubsession

               ::createNew(*env, inputFileName, reuseFirstSource));

    rtspServer->addServerMediaSession(sms);



    announceStream(rtspServer, sms, streamName, inputFileName);

  }







  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.

  // Try first with the default HTTP port (80), and then with the alternative HTTP

  // port numbers (8000 and 8080).



  //if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {

  //  *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";

  //} else {

  //  *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";

  //}



  env->taskScheduler().doEventLoop(); // does not return



  return 0; // only to prevent compiler warning

}



static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,

               char const* streamName, char const* inputFileName) {

  char* url = rtspServer->rtspURL(sms);

  UsageEnvironment& env = rtspServer->envir();

  env << "\n\"" << streamName << "\" stream, from the file \""

      << inputFileName << "\"\n";

  env << "Play this stream using the URL \"" << url << "\"\n";

  delete[] url;

}

 

你可能感兴趣的:(live)