博客前面两篇基本介绍了live555的一些入门知识以及大致的工作流程框架。
下面就是代码的实现,如果通过自己实现的子类是实现文件流的播放。
主要实现两个子类即可:FramedSource 和 FileServerMediaSubsession。
Subsession来建立任务,Source获取视频源数据,然后subsession新建rtpsink来发送视频数据到client即可。
实现的文件如下:
头文件
cpp文件
main.cpp文件主要是建立rtsp服务,然后把session添加到链表中。
具体实现如下:
/*
* =====================================================================================
*
* Filename: main.cpp
*
* Description: 初始化live555的工作环境,并建立RTSPServer,添加Subsession
*
* Version: 1.0
* Created: 2015年09月07日 23时03分31秒
* Revision: none
* Compiler: gcc
*
* Author: max_min_,
* Organization:
*
* =====================================================================================
*/
#include
#include "BasicUsageEnvironment.hh"
#include "RTSPServer.hh"
#include "DemoH264MediaSubsession.h"
int main(int argc, char* argv[])
{
printf(" live555 stream start\n");
// Begin by setting up the live555 usage environment
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
UserAuthenticationDatabase* authDB = NULL;
#if ACCESS_CONTROL // 认证
authDB = new UserAuthenticationDatabase;
authDB->addUserRecord(argv[1], argv[2]);
#endif
RTSPServer* rtspServer = NULL;
portNumBits rtspServerPortNum = 554; // rtsp port
// 建立RTSP服务
rtspServer = RTSPServer::createNew(*env, rtspServerPortNum, authDB);
if( rtspServer == NULL)
{
*env << " create RTSPServer Failed:" << env->getResultMsg() << "\n";
return 0;
}
const char* decription = " Session Test By live555 Stream";
//H264 Subsession
const char* streamName = "h264_streaming";
const char* inputName = "tc10.264";
ServerMediaSession *sms = ServerMediaSession::createNew(*env, streamName, streamName, decription);
// 添加自己派生的子类MediaSubsession类,并添加到ServerMediaSession
// 当有client链接上过来的时候,会调用server的lookup寻找次streamName的subsession
sms->addSubsession(DemoH264MediaSubsession::createNew(*env, inputName, false));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "URL:" << url << "\n";
// loop and not come back~
env->taskScheduler().doEventLoop();
return 0;
}
FramedSoucre子类的实现:
#include
#include "DemoH264FrameSource.h"
DemoH264FrameSource::DemoH264FrameSource(UsageEnvironment& env, const char* fileName,
unsigned int preferredFrameSize, unsigned int playTimePerFrame):FramedSource(env)
{
// ready for the source data;
// 打开流媒体文件,在实时流时,这里就是开始传送流之前的一些准备工作
fp = fopen(fileName, "rb");
}
DemoH264FrameSource::~DemoH264FrameSource()
{
}
DemoH264FrameSource* DemoH264FrameSource::createNew(UsageEnvironment& env, const char* fileName,
unsigned preferredFrameSize , unsigned playTimePerFrame )
{
return new DemoH264FrameSource(env, fileName, preferredFrameSize, playTimePerFrame);
}
/* 获取需要读取文件的总长度,live555对每次数据的发送有长度限制 */
long filesize(FILE *stream)
{
long curpos, length;
curpos = ftell(stream);
fseek(stream, 0L, SEEK_END);
length = ftell(stream);
fseek(stream, curpos, SEEK_SET);
return length;
}
void DemoH264FrameSource::doGetNextFrame()
{
// 判断是否超过最长,分开处理
if( filesize(fp) > fMaxSize)
{
fFrameSize = fread(fTo, 1, fMaxSize, fp);
}
else
{
fFrameSize = fread(fTo,1, filesize(fp), fp);
fseek(fp, 0, SEEK_SET);
}
nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
(TaskFunc*)FramedSource::afterGetting, this);
// 表示延迟0秒后再执行 afterGetting 函数
}
FileServerMediaSubsession子类的实现:
#include "DemoH264MediaSubsession.h"
#include "DemoH264FrameSource.h"
#include "H264VideoStreamFramer.hh"
#include "H264VideoRTPSink.hh"
DemoH264MediaSubsession::DemoH264MediaSubsession(UsageEnvironment& env, const char*fileName, bool reuseFirstSource)
:FileServerMediaSubsession(env, fileName, reuseFirstSource)
{
//传递需要的文件文件名
strcpy(fFileName, fileName);
}
DemoH264MediaSubsession::~DemoH264MediaSubsession()
{
}
DemoH264MediaSubsession* DemoH264MediaSubsession::createNew(UsageEnvironment& env, const char* fileName, bool reuseFirstSource)
{
DemoH264MediaSubsession* sms = new DemoH264MediaSubsession(env, fileName, reuseFirstSource);
return sms;
}
FramedSource* DemoH264MediaSubsession::createNewStreamSource(unsigned clientsessionId, unsigned& estBitrate)
{
estBitrate = 1000;
// 创建需要的source,后面再实时流的创建的时候,这里会再进一步说明
DemoH264FrameSource* source = DemoH264FrameSource::createNew(envir(), fFileName);
if ( source == NULL )
{
envir() << " new source failed!\n";
}
return H264VideoStreamFramer::createNew(envir(), source);
}
RTPSink* DemoH264MediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource* inputSource)
{
// 创建rtpSink
// 也就是Source的消费者
return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}
主要就是继承FramedSource和FileServerMediaSubsession类,然后实现自己的source获取和session的处理。
下图是我运行起来的一个效果图:
后面会继续更新一下如何获取并发送实时流数据到客户端,这也是现在很多嵌入式设备经常需要实现的功能。当时c开发的可能就需要自己实现rtsp了~~~