最近通过LFViewKit和ijkPlayer实现了从AR视频采集、推流到视频播放的一套流程,写个笔记记录一下实现的流程和对LFViewKit代码的理解和使用。由于能力有限,如发现错误欢迎指正。
采集端
音视频采集
在iOS中音视频采集可以通过AVFoundation中的相机功能也可以通过ARKit框架或者RealityKit框架获取,通过ARKit和RealityKit可以添加一些效果,这里采用ARKit进行采集。对于ARKit的使用可以去学习一下相关的内容,不说废话直接上代码:
@property (nonatomic, strong) ARSCNView * scnView;
@property (nonatomic, strong) SCNScene * scene;
@property (nonatomic, strong) SCNNode * sunNode;
@property (nonatomic, strong) ARSession * session;
@property (nonatomic, strong) ARWorldTrackingConfiguration * config;
@property (nonatomic, strong) PFLiveSession * videoSession;
- (void)loadScnView
{
self.scnView = [[ARSCNView alloc] initWithFrame:CGRectMake(0, 0, ScreenSize.width, ScreenSize.height)];
[self.view addSubview:self.scnView];
// self.scnView.allowsCameraControl = YES;
self.scnView.showsStatistics = YES;
self.scnView.delegate = self;
self.session = [[ARSession alloc] init];
self.scnView.session = self.session;
self.scnView.session.delegate = self;
self.session.delegate = self;
[self loadMode];
self.config = [[ARWorldTrackingConfiguration alloc] init];
self.config.planeDetection = ARPlaneDetectionHorizontal; // 设置主要监测平面
self.config.lightEstimationEnabled = YES; // 是否支持现实光照补给
self.config.providesAudioData = YES; // 配置支持音频
[self.session runWithConfiguration:self.config];
}
// 添加ar球体
- (void)loadMode
{
SCNSphere * sunSphere = [SCNSphere sphereWithRadius:0.2];
sunSphere.firstMaterial.multiply.contents = @"art.scnassets/earth/sun.jpg";
sunSphere.firstMaterial.diffuse.contents = @"art.scnassets/earth/sun.jpg";
sunSphere.firstMaterial.multiply.intensity = 0.5;
sunSphere.firstMaterial.lightingModelName = SCNLightingModelConstant;
self.sunNode = [[SCNNode alloc] init];
self.sunNode.geometry = sunSphere;
self.sunNode.position = SCNVector3Make(0, 0, -2);
[self.scnView.scene.rootNode addChildNode:self.sunNode];
SCNAction * act = [SCNAction repeatActionForever:[SCNAction rotateByX:0 y:1 z:0 duration:1]];
[_sunNode runAction:act];
}
// 代理回调捕获音频和视频
- (void)session:(ARSession *)session didOutputAudioSampleBuffer:(CMSampleBufferRef)audioSampleBuffer
{
[self.videoSession captureOutputAudioData:audioSampleBuffer];
}
// 通过该方法读取每一帧arkit处理后的图片,self.session.currentFrame.capturedImage获取的图片是不包含ar元素的图片
- (void)renderer:(id)renderer updateAtTime:(NSTimeInterval)time
{
if (renderer.currentRenderPassDescriptor.colorAttachments[0].texture == nil) {
return;
}
CVPixelBufferRef pixelBuffer = nil;
if (renderer.currentRenderPassDescriptor.colorAttachments[0].texture.iosurface == nil) {
return;
}
CVPixelBufferCreateWithIOSurface(kCFAllocatorDefault, renderer.currentRenderPassDescriptor.colorAttachments[0].texture.iosurface, nil, &pixelBuffer);
[self.videoSession captureOutputPixelBuffer:pixelBuffer];
}
在LFViewKit相机采用了基于AVFoundation实现的GPUImage,数据通过LFLiveSession进行交换,所以这里只需要将LFLiveSession中的GPUImage数据源替换成自己的ARKit数据源即可。
编码
视频编码采用h264编码,在LFViewKit中视频编码会根据系统版本采用软编码和硬编码,由于目前适配的系统基本都在iOS 8以后,所以这里抛弃了软编码,直接采取硬编码,代码如下:
- 对音视频参数进行配置
//
PFLiveAudioConfiguration *audioConfiguration = [PFLiveAudioConfiguration new]; //
// 设置音频相关
audioConfiguration.numberOfChannels = 2; // 设置声道数
audioConfiguration.audioBitrate = PFLiveAudioBitRate_128Kbps; // 设置音频的码率
audioConfiguration.audioSampleRate = PFLiveAudioSampleRate_44100Hz; //音频采样率
// 配置视频相关
PFLiveVideoConfiguration *videoConfiguration = [PFLiveVideoConfiguration new];
videoConfiguration.videoSize = ScreenSize; // 视频尺寸
videoConfiguration.videoBitRate = 800*1024; //视频码率,比特率 Bit Rate或叫位速率,是单位时间内视频(或音频)的数据量,单位是 bps (bit per second,位每秒),一般使用 kbps(千位每秒)或Mbps(百万位每秒)。
videoConfiguration.videoMaxBitRate = 1000*1024; // 最大码率
videoConfiguration.videoMinBitRate = 500*1024; // 最小码率
videoConfiguration.videoFrameRate = 15; // 帧率,即fps
videoConfiguration.videoMaxKeyframeInterval = 30; // 最大关键帧间隔,可设定为 fps 的2倍,影响一个 gop 的大小
videoConfiguration.outputImageOrientation = UIInterfaceOrientationPortrait; //视频输出方向
videoConfiguration.sessionPreset = PFCaptureSessionPreset360x640; //视频分辨率(都是16:9 当此设备不支持当前分辨率,自动降低一级)
- 视频编码
// 创建视频编码器并设置其参数
- (void)resetCompressionSession {
if (compressionSession) {
// 当需要主动停止编码时,可调用下面方法来强制停止编码器
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
// 释放编码会话及内存
VTCompressionSessionInvalidate(compressionSession);
// CFRelease(compressionSession);
compressionSession = NULL;
}
//创建编码会话session
OSStatus status = VTCompressionSessionCreate(NULL, _configuration.videoSize.width, _configuration.videoSize.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoCompressonOutputCallback, (__bridge void *)self, &compressionSession);
if (status != noErr) {
return;
}
_currentVideoBitRate = _configuration.videoBitRate;
// 关键帧之间的最大间隔,也称为关键帧速率
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
// 从这个关键帧到下一个关键帧的最长持续时间
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval/_configuration.videoFrameRate));
// 预期的帧速率
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(_configuration.videoFrameRate));
// 期望的平均比特率,以比特/秒为单位
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(_configuration.videoBitRate));
//
NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)];
// 码率上限
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit);
// 表示是否建议视频编码器实时执行压缩
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
// 编码比特流的配置文件和级别
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Main_AutoLevel);
// 指示是否启用了帧重新排序
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanTrue);
// H.264 压缩的熵编码模式,可以设置为 CAVLC 或者 CABAC
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_H264EntropyMode, kVTH264EntropyMode_CABAC);
// 在进行数据的编码之前,可手动调用下面的方法来申请必要的资源,如果不手动调用,则会在第一次进行数据编码时自动调用
VTCompressionSessionPrepareToEncodeFrames(compressionSession);
}
// 设置码率
- (void)setVideoBitRate:(NSInteger)videoBitRate {
if(_isBackGround) return;
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(videoBitRate));
// 以字节为单位
NSArray *limit = @[@(videoBitRate * 1.5/8), @(1)];
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit);
_currentVideoBitRate = videoBitRate;
}
- (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
if(_isBackGround) return;
frameCount++;
CMTime presentationTimeStamp = CMTimeMake(frameCount, (int32_t)_configuration.videoFrameRate);
VTEncodeInfoFlags flags;
CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate);
NSDictionary *properties = nil;
if (frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0) {
properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES};
}
NSNumber *timeNumber = @(timeStamp);
// 该函数调用一次之后,后续的调用将是无效的调用。调用此方法成功后触发回调函数完成编码;
// 对视频帧进行编码,并在会话的 VTCompressionOutputCallback 中接收压缩的视频帧。
OSStatus status = VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags);
if(status != noErr){
[self resetCompressionSession];
}
}
static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){
if (!sampleBuffer) return;
// 从采集到的视频CMSampleBufferRef中获取CVImageBufferRef
CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
if (!array) return;
//
CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0);
if (!dic) return;
// kCMSampleAttachmentKey_NotSync 获取是否包含关键帧
BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync);
uint64_t timeStamp = [((__bridge_transfer NSNumber *)VTFrameRef) longLongValue];
PFHardwareVideoEncoder *videoEncoder = (__bridge PFHardwareVideoEncoder *)VTref;
if (status != noErr) {
return;
}
// keyframe标明为关键帧,videoEncoder->sps判定是否已经存在sps
if (keyframe && !videoEncoder->sps) {
NSLog(@"获取sps数据");
// 获取数据格式描述,如果存在错误则返回NULL
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
// 创建用于记录sps数据长度
size_t sparameterSetSize, sparameterSetCount;
// 定义的sps set用来存储sps数据
const uint8_t *sparameterSet;
// 并从中返回给定索引处的 NAL 单元。这些 NAL 单元通常是参数集(例如 SPS、PPS)。此处传入0以获取sps数据
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (statusCode == noErr) {
// 获取pps相关数据
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (statusCode == noErr) {
// 将sps和pps赋值到LFHardwareVideoEncoder上
videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
// 对sps和pps进行拼接
if (videoEncoder->enabledWriteVideoFile) {
// 对sps和pps进行头部拼接并写入地址,生成NALU
NSMutableData *data = [[NSMutableData alloc] init];
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
[data appendData:videoEncoder->sps];
[data appendBytes:header length:4];
[data appendData:videoEncoder->pps];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
}
}
}
// 获取到的编码数据
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
// 读取dataBuffer中的数据到dataPointer中,
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffset = 0;
static const int AVCCHeaderLength = 4;
// 进行循环读取dataBuffer中的数据
while (bufferOffset < totalLength - AVCCHeaderLength) {
// Read the NAL unit length
uint32_t NALUnitLength = 0;
// 从dataPointer + bufferOffset开始copy AVCCHeaderLength个数据到 NALUnitLength中
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
// 进行大小端调整
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
// 进行格式配置
PFVideoFrame *videoFrame = [PFVideoFrame new];
videoFrame.timestamp = timeStamp;
videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
videoFrame.isKeyFrame = keyframe;
videoFrame.sps = videoEncoder->sps;
videoFrame.pps = videoEncoder->pps;
if (videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]) {
[videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame];
}
// 进行数据写入,生成NALU
if (videoEncoder->enabledWriteVideoFile) {
NSMutableData *data = [[NSMutableData alloc] init];
if (keyframe) { // 关键帧的处理
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
} else {
// 非关键帧的处理
uint8_t header[] = {0x00, 0x00, 0x01};
[data appendBytes:header length:3];
}
// 进行数据拼接
[data appendData:videoFrame.data];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
// 对读取的数据进行++操作
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
}
}
- 音频编码
- (void)encodeAudioData:(nullable NSData*)audioData timeStamp:(uint64_t)timeStamp {
if (![self createAudioConvert]) {
return;
}
if(leftLength + audioData.length >= self.configuration.bufferLength){
///< 发送
NSInteger totalSize = leftLength + audioData.length;
NSInteger encodeCount = totalSize/self.configuration.bufferLength;
char *totalBuf = malloc(totalSize);
char *p = totalBuf;
memset(totalBuf, (int)totalSize, 0);
memcpy(totalBuf, leftBuf, leftLength);
memcpy(totalBuf + leftLength, audioData.bytes, audioData.length);
for(NSInteger index = 0;index < encodeCount;index++){
[self encodeBuffer:p timeStamp:timeStamp];
p += self.configuration.bufferLength;
}
// 保留多余length的数据下次处理
leftLength = totalSize%self.configuration.bufferLength;
memset(leftBuf, 0, self.configuration.bufferLength);
memcpy(leftBuf, totalBuf + (totalSize -leftLength), leftLength);
free(totalBuf);
}else{
///< 积累
memcpy(leftBuf+leftLength, audioData.bytes, audioData.length);
leftLength = leftLength + audioData.length;
}
}
- (void)encodeBuffer:(char*)buf timeStamp:(uint64_t)timeStamp{
AudioBuffer inBuffer;
inBuffer.mNumberChannels = 1;
inBuffer.mData = buf;
inBuffer.mDataByteSize = (UInt32)self.configuration.bufferLength;
AudioBufferList buffers;
buffers.mNumberBuffers = 1;
buffers.mBuffers[0] = inBuffer;
AudioBufferList outBufferList;
outBufferList.mNumberBuffers = 1;
outBufferList.mBuffers[0].mNumberChannels = inBuffer.mNumberChannels;
outBufferList.mBuffers[0].mDataByteSize = inBuffer.mDataByteSize; // 设置缓冲区大小
outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区
UInt32 outputDataPacketSize = 1;
if (AudioConverterFillComplexBuffer(m_converter, inputDataProc, &buffers, &outputDataPacketSize, &outBufferList, NULL) != noErr) {
return;
}
PFAudioFrame *audioFrame = [PFAudioFrame new];
audioFrame.timestamp = timeStamp;
audioFrame.data = [NSData dataWithBytes:aacBuf length:outBufferList.mBuffers[0].mDataByteSize];
char exeData[2];
exeData[0] = _configuration.asc[0];
exeData[1] = _configuration.asc[1];
audioFrame.audioInfo = [NSData dataWithBytes:exeData length:2];
if (self.aacDeleage && [self.aacDeleage respondsToSelector:@selector(audioEncoder:audioFrame:)]) {
[self.aacDeleage audioEncoder:self audioFrame:audioFrame];
}
if (self->enabledWriteVideoFile) {
NSData *adts = [self adtsData:_configuration.numberOfChannels rawDataLength:audioFrame.data.length];
fwrite(adts.bytes, 1, adts.length, self->fp);
fwrite(audioFrame.data.bytes, 1, audioFrame.data.length, self->fp);
}
}
- (void)stopEncoder {
}
#pragma mark -- CustomMethod
- (BOOL)createAudioConvert { //根据输入样本初始化一个编码转换器
if (m_converter != nil) {
return TRUE;
}
AudioStreamBasicDescription inputFormat = {0};
inputFormat.mSampleRate = _configuration.audioSampleRate;
inputFormat.mFormatID = kAudioFormatLinearPCM;
inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;
inputFormat.mFramesPerPacket = 1;
inputFormat.mBitsPerChannel = 16;
inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame;
inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket;
AudioStreamBasicDescription outputFormat; // 这里开始是输出音频格式
memset(&outputFormat, 0, sizeof(outputFormat));
outputFormat.mSampleRate = inputFormat.mSampleRate; // 采样率保持一致
outputFormat.mFormatID = kAudioFormatMPEG4AAC; // AAC编码 kAudioFormatMPEG4AAC kAudioFormatMPEG4AAC_HE_V2
outputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;;
outputFormat.mFramesPerPacket = 1024; // AAC一帧是1024个字节
const OSType subtype = kAudioFormatMPEG4AAC;
AudioClassDescription requestedCodecs[2] = {
{
kAudioEncoderComponentType,
subtype,
kAppleSoftwareAudioCodecManufacturer
},
{
kAudioEncoderComponentType,
subtype,
kAppleHardwareAudioCodecManufacturer
}
};
OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);;
UInt32 outputBitrate = _configuration.audioBitrate;
UInt32 propSize = sizeof(outputBitrate);
if(result == noErr) {
result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate);
}
return YES;
}
#pragma mark -- AudioCallBack
OSStatus inputDataProc(AudioConverterRef inConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription * *outDataPacketDescription, void *inUserData) { //AudioConverterFillComplexBuffer 编码过程中,会要求这个函数来填充输入数据,也就是原始PCM数据
AudioBufferList bufferList = *(AudioBufferList *)inUserData;
ioData->mBuffers[0].mNumberChannels = 1;
ioData->mBuffers[0].mData = bufferList.mBuffers[0].mData;
ioData->mBuffers[0].mDataByteSize = bufferList.mBuffers[0].mDataByteSize;
return noErr;
}
推流
推流部分使用了LFViewKit中的pili-librtmp,其中关于librtmp库的注释主要来自于文章使用librtmp库进行推流与拉流。代码如下:
- 码率控制
#import "PFStreamingBuffer.h"
#import "NSMutableArray+PFAdd.h"
static const NSUInteger defaultSortBufferMaxCount = 5;///< 排序10个内
static const NSUInteger defaultUpdateInterval = 1;///< 更新频率为1s
static const NSUInteger defaultCallBackInterval = 5;///< 5s计时一次 5秒为一个网络监控周期
static const NSUInteger defaultSendBufferMaxCount = 600;///< 最大缓冲区为600
@interface PFStreamingBuffer (){
dispatch_semaphore_t _lock;
}
@property (nonatomic, strong) NSMutableArray *sortList;
@property (nonatomic, strong, readwrite) NSMutableArray *list;
@property (nonatomic, strong) NSMutableArray *thresholdList;
/** 处理buffer缓冲区情况 */
@property (nonatomic, assign) NSInteger currentInterval; //
@property (nonatomic, assign) NSInteger callBackInterval; //
@property (nonatomic, assign) NSInteger updateInterval; //
@property (nonatomic, assign) BOOL startTimer; // 开始时间
@end
@implementation PFStreamingBuffer
- (instancetype)init {
if (self = [super init]) {
_lock = dispatch_semaphore_create(1);
self.updateInterval = defaultUpdateInterval;
self.callBackInterval = defaultCallBackInterval;
self.maxCount = defaultSendBufferMaxCount;
self.lastDropFrames = 0;
self.startTimer = NO;
}
return self;
}
#pragma mark -- Custom
- (void)appendObject:(PFFrame *)frame {
if (!frame) return;
if (!_startTimer) {
_startTimer = YES;
[self tick]; // 开启监控
}
dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
if (self.sortList.count < defaultSortBufferMaxCount) { // 当缓冲区小于设置的最大缓冲数量时将新的frame加入到缓冲区
[self.sortList addObject:frame];
} else {
///< 排序
[self.sortList addObject:frame];
[self.sortList sortUsingFunction:frameDataCompare context:nil]; // 将数据进行排序
/// 丢帧
[self removeExpireFrame];
/// 添加至缓冲区
PFFrame *firstFrame = [self.sortList pfPopFirstObject];
if (firstFrame) [self.list addObject:firstFrame];
}
dispatch_semaphore_signal(_lock);
}
- (PFFrame *)popFirstObject {
dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
PFFrame *firstFrame = [self.list pfPopFirstObject];
dispatch_semaphore_signal(_lock);
return firstFrame;
}
- (void)removeAllObject {
dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
[self.list removeAllObjects];
dispatch_semaphore_signal(_lock);
}
//
- (void)removeExpireFrame {
if (self.list.count < self.maxCount) return; // 缓冲区数据小于设置的最大缓冲长度
NSArray *pFrames = [self expirePFrames];///< 第一个P到第一个I之间的p帧
self.lastDropFrames += [pFrames count];
if (pFrames && pFrames.count > 0) {
[self.list removeObjectsInArray:pFrames];
return;
}
NSArray *iFrames = [self expireIFrames];///< 删除一个I帧(但一个I帧可能对应多个nal)
self.lastDropFrames += [iFrames count];
if (iFrames && iFrames.count > 0) {
[self.list removeObjectsInArray:iFrames];
return;
}
[self.list removeAllObjects];
}
// 获取过时的frame, 如果当前第一帧是I帧则删除当前I帧到下一个I帧之间的数据,如果当前帧不是I帧则删除第一个I帧之前的数据
- (NSArray *)expirePFrames {
NSMutableArray *pframes = [[NSMutableArray alloc] init];
for (NSInteger index = 0; index < self.list.count; index++) {
PFFrame *frame = [self.list objectAtIndex:index];
if ([frame isKindOfClass:[PFVideoFrame class]]) {
PFVideoFrame *videoFrame = (PFVideoFrame *)frame;
if (videoFrame.isKeyFrame && pframes.count > 0) {
break;
} else if (!videoFrame.isKeyFrame) {
[pframes addObject:frame];
}
}
}
return pframes;
}
//
- (NSArray *)expireIFrames {
NSMutableArray *iframes = [[NSMutableArray alloc] init];
uint64_t timeStamp = 0;
for (NSInteger index = 0; index < self.list.count; index++) {
PFFrame *frame = [self.list objectAtIndex:index];
// 获取当前第一个I帧
if ([frame isKindOfClass:[PFVideoFrame class]] && ((PFVideoFrame *)frame).isKeyFrame) {
if (timeStamp != 0 && timeStamp != frame.timestamp) {
break;
}
[iframes addObject:frame];
timeStamp = frame.timestamp;
}
}
return iframes;
}
//
NSInteger frameDataCompare(id obj1, id obj2, void *context){
PFFrame *frame1 = (PFFrame *)obj1;
PFFrame *frame2 = (PFFrame *)obj2;
if (frame1.timestamp == frame2.timestamp) {
return NSOrderedSame;
}else if (frame1.timestamp > frame2.timestamp){
return NSOrderedDescending;
}
return NSOrderedAscending;
}
// 根据五次采样 self.List中数据量进行对比,如果其中的数据逐渐增加则increaseCount会增加,则需要降低码率
// 如果其中数据量越来越小,则decreaseCount会增加,需要增加码率
- (PFLiveBuffferState)currentBufferState {
NSInteger currentCount = 0;
NSInteger increaseCount = 0;
NSInteger decreaseCount = 0;
NSLog(@"个数:%ld", self.thresholdList.count);
for (NSNumber *number in self.thresholdList) {
NSLog(@"number:%ld--currentCount:%ld--increaseCount:%ld--decreaseCount:%ld", number.integerValue, currentCount, increaseCount, decreaseCount);
if (number.integerValue > currentCount) {
// 需要降低码率
increaseCount++;
} else{
// 需要增大码率
decreaseCount++;
}
currentCount = [number integerValue];
}
if (increaseCount >= self.callBackInterval) {
// 降低码率
NSLog(@"降低码率");
return PFLiveBuffferIncrease;
}
if (decreaseCount >= self.callBackInterval) {
// 提升码率
NSLog(@"提升码率");
return PFLiveBuffferDecline;
}
return PFLiveBuffferUnknown;
}
#pragma mark -- Setter Getter
- (NSMutableArray *)list {
if (!_list) {
_list = [[NSMutableArray alloc] init];
}
return _list;
}
- (NSMutableArray *)sortList {
if (!_sortList) {
_sortList = [[NSMutableArray alloc] init];
}
return _sortList;
}
- (NSMutableArray *)thresholdList {
if (!_thresholdList) {
_thresholdList = [[NSMutableArray alloc] init];
}
return _thresholdList;
}
#pragma mark -- 采样
- (void)tick {
/** 采样 3个阶段 如果网络都是好或者都是差给回调 */
_currentInterval += self.updateInterval;
dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
[self.thresholdList addObject:@(self.list.count)];
dispatch_semaphore_signal(_lock);
// NSLog(@"currentInterval:%ld--callBackInterval:%ld--updateInterval:%ld", self.currentInterval, self.callBackInterval, self.updateInterval);
if (self.currentInterval >= self.callBackInterval) { //当当前时间间隔大于等于5时
PFLiveBuffferState state = [self currentBufferState];
if (state == PFLiveBuffferIncrease) {
if (self.delegate && [self.delegate respondsToSelector:@selector(streamingBuffer:bufferState:)]) {
[self.delegate streamingBuffer:self bufferState:PFLiveBuffferIncrease];
}
} else if (state == PFLiveBuffferDecline) {
if (self.delegate && [self.delegate respondsToSelector:@selector(streamingBuffer:bufferState:)]) {
// 将网络状态回调给session以进行码率调节
[self.delegate streamingBuffer:self bufferState:PFLiveBuffferDecline];
}
}
self.currentInterval = 0;
[self.thresholdList removeAllObjects];
}
__weak typeof(self) _self = self;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(self.updateInterval * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
__strong typeof(_self) self = _self;
[self tick];
});
}
@end
- 推流部分
- (nullable instancetype)initWithStream:(nullable PFLiveStreamInfo *)stream{
return [self initWithStream:stream reconnectInterval:0 reconnectCount:0];
}
- (nullable instancetype)initWithStream:(nullable PFLiveStreamInfo *)stream reconnectInterval:(NSInteger)reconnectInterval reconnectCount:(NSInteger)reconnectCount{
if (!stream) @throw [NSException exceptionWithName:@"LFStreamRtmpSocket init error" reason:@"stream is nil" userInfo:nil];
if (self = [super init]) {
_stream = stream;
if (reconnectInterval > 0) _reconnectInterval = reconnectInterval;
else _reconnectInterval = RetryTimesMargin;
if (reconnectCount > 0) _reconnectCount = reconnectCount;
else _reconnectCount = RetryTimesBreaken;
[self addObserver:self forKeyPath:@"isSending" options:NSKeyValueObservingOptionNew context:nil];//这里改成observer主要考虑一直到发送出错情况下,可以继续发送
}
return self;
}
- (void)dealloc{
[self removeObserver:self forKeyPath:@"isSending"];
}
- (void)start {
dispatch_async(self.rtmpSendQueue, ^{
[self _start];
});
}
- (void)_start {
if (!_stream) return;
if (_isConnecting) return;
if (_rtmp != NULL) return;
self.debugInfo.streamId = self.stream.streamId;
self.debugInfo.uploadUrl = self.stream.url;
self.debugInfo.isRtmp = YES;
if (_isConnecting) return;
_isConnecting = YES;
if (self.delegate && [self.delegate respondsToSelector:@selector(socketStatus:status:)]) {
// 回调当前长链接状态为正在连接
[self.delegate socketStatus:self status:PFLivePending];
}
if (_rtmp != NULL) { // 如果当前存在链接,则将当前链接销毁
PILI_RTMP_Close(_rtmp, &_error);
PILI_RTMP_Free(_rtmp);
}
// 链接远程服务器
[self RTMP264_Connect:(char *)[_stream.url cStringUsingEncoding:NSASCIIStringEncoding]];
}
// 停止push
- (void)stop {
dispatch_async(self.rtmpSendQueue, ^{
[self _stop];
[NSObject cancelPreviousPerformRequestsWithTarget:self];
});
}
- (void)_stop {
if (self.delegate && [self.delegate respondsToSelector:@selector(socketStatus:status:)]) {
[self.delegate socketStatus:self status:PFLiveStop];
}
if (_rtmp != NULL) {
PILI_RTMP_Close(_rtmp, &_error);
PILI_RTMP_Free(_rtmp);
_rtmp = NULL;
}
[self clean];
}
- (void)sendFrame:(PFFrame *)frame {
if (!frame) return;
// 将帧数据放入数据队列中
[self.buffer appendObject:frame];
if(!self.isSending){
[self sendFrame];
}
}
- (void)setDelegate:(id)delegate {
_delegate = delegate;
}
#pragma mark -- CustomMethod
- (void)sendFrame {
__weak typeof(self) _self = self;
dispatch_async(self.rtmpSendQueue, ^{
if (!_self.isSending && _self.buffer.list.count > 0) {
_self.isSending = YES;
if (!_self.isConnected || _self.isReconnecting || _self.isConnecting || !_rtmp){ // 判断 是否建立连接/是否在重连/是否在连接中/rtmp是否存在
_self.isSending = NO;
return;
}
// 吐出首个数据
PFFrame *frame = [_self.buffer popFirstObject];
if ([frame isKindOfClass:[PFVideoFrame class]]) { // 如果是视频数据
// 如果没有发送过header数据就优先发送header数据
if (!_self.sendVideoHead) {
_self.sendVideoHead = YES;
if(!((PFVideoFrame*)frame).sps || !((PFVideoFrame*)frame).pps){
_self.isSending = NO;
return;
}
// 先发送header数据
[_self sendVideoHeader:(PFVideoFrame *)frame];
} else {
// 发送非header视频数据
[_self sendVideo:(PFVideoFrame *)frame];
}
} else { // 如果是音频数据
if (!_self.sendAudioHead) {
_self.sendAudioHead = YES;
if(!((PFAudioFrame*)frame).audioInfo){
_self.isSending = NO;
return;
}
[_self sendAudioHeader:(PFAudioFrame *)frame];
} else {
[_self sendAudio:frame];
}
}
//debug更新
_self.debugInfo.totalFrame++;
_self.debugInfo.dropFrame += _self.buffer.lastDropFrames;
_self.buffer.lastDropFrames = 0;
_self.debugInfo.dataFlow += frame.data.length;
_self.debugInfo.elapsedMilli = CACurrentMediaTime() * 1000 - _self.debugInfo.timeStamp;
if (_self.debugInfo.elapsedMilli < 1000) {
_self.debugInfo.bandwidth += frame.data.length;
if ([frame isKindOfClass:[PFAudioFrame class]]) {
_self.debugInfo.capturedAudioCount++;
} else {
_self.debugInfo.capturedVideoCount++;
}
_self.debugInfo.unSendCount = _self.buffer.list.count;
} else {
_self.debugInfo.currentBandwidth = _self.debugInfo.bandwidth;
_self.debugInfo.currentCapturedAudioCount = _self.debugInfo.capturedAudioCount;
_self.debugInfo.currentCapturedVideoCount = _self.debugInfo.capturedVideoCount;
if (_self.delegate && [_self.delegate respondsToSelector:@selector(socketDebug:debugInfo:)]) {
[_self.delegate socketDebug:_self debugInfo:_self.debugInfo];
}
_self.debugInfo.bandwidth = 0;
_self.debugInfo.capturedAudioCount = 0;
_self.debugInfo.capturedVideoCount = 0;
_self.debugInfo.timeStamp = CACurrentMediaTime() * 1000;
}
//修改发送状态
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
//< 这里只为了不循环调用sendFrame方法 调用栈是保证先出栈再进栈
_self.isSending = NO;
});
}
});
}
- (void)clean {
_isConnecting = NO;
_isReconnecting = NO;
_isSending = NO;
_isConnected = NO;
_sendAudioHead = NO;
_sendVideoHead = NO;
self.debugInfo = nil;
[self.buffer removeAllObject];
self.retryTimes4netWorkBreaken = 0;
}
// 进行连接
- (NSInteger)RTMP264_Connect:(char *)push_url {
_rtmp = PILI_RTMP_Alloc();
PILI_RTMP_Init(_rtmp);
//设置会话参数
if (PILI_RTMP_SetupURL(_rtmp, push_url, &_error) == FALSE) {
//log(LOG_ERR, "RTMP_SetupURL() failed!");
goto Failed;
}
// 设置错误、连接回调
_rtmp->m_errorCallback = RTMPErrorCallback;
_rtmp->m_connCallback = ConnectionTimeCallback;
_rtmp->m_userData = (__bridge void *)self;
_rtmp->m_msgCounter = 1;
_rtmp->Link.timeout = RTMP_RECEIVE_TIMEOUT; // 链接超时时间
//调用该方法为推流,否则为拉流
PILI_RTMP_EnableWrite(_rtmp);
//建立RTMP链接中的网络连接(NetConnection)
if (PILI_RTMP_Connect(_rtmp, NULL, &_error) == FALSE) {
goto Failed;
}
//建立RTMP链接中的网络流(NetStream
if (PILI_RTMP_ConnectStream(_rtmp, 0, &_error) == FALSE) {
goto Failed;
}
// 代理将已经开始推流的状态返回给前端
if (self.delegate && [self.delegate respondsToSelector:@selector(socketStatus:status:)]) {
[self.delegate socketStatus:self status:PFLiveStart];
}
[self sendMetaData];
_isConnected = YES;
_isConnecting = NO;
_isReconnecting = NO;
_isSending = NO;
return 0;
Failed:
PILI_RTMP_Close(_rtmp, &_error);
PILI_RTMP_Free(_rtmp);
_rtmp = NULL;
[self reconnect];
return -1;
}
#pragma mark -- Rtmp Send
- (void)sendMetaData {
PILI_RTMPPacket packet;
char pbuf[2048], *pend = pbuf + sizeof(pbuf);
packet.m_nChannel = 0x03; // control channel (invoke)
packet.m_headerType = RTMP_PACKET_SIZE_LARGE; // 数据包大小
packet.m_packetType = RTMP_PACKET_TYPE_INFO; // 数据包类型
packet.m_nTimeStamp = 0; // 输入时的时间戳
packet.m_nInfoField2 = _rtmp->m_stream_id; //
packet.m_hasAbsTimestamp = TRUE; // 是否绝对时间戳
packet.m_body = pbuf + RTMP_MAX_HEADER_SIZE;
char *enc = packet.m_body;
enc = AMF_EncodeString(enc, pend, &av_setDataFrame);
enc = AMF_EncodeString(enc, pend, &av_onMetaData);
*enc++ = AMF_OBJECT;
enc = AMF_EncodeNamedNumber(enc, pend, &av_duration, 0.0);
enc = AMF_EncodeNamedNumber(enc, pend, &av_fileSize, 0.0);
// videosize
enc = AMF_EncodeNamedNumber(enc, pend, &av_width, _stream.videoConfiguration.videoSize.width);
enc = AMF_EncodeNamedNumber(enc, pend, &av_height, _stream.videoConfiguration.videoSize.height);
// video
enc = AMF_EncodeNamedString(enc, pend, &av_videocodecid, &av_avc1);
enc = AMF_EncodeNamedNumber(enc, pend, &av_videodatarate, _stream.videoConfiguration.videoBitRate / 1000.f);
enc = AMF_EncodeNamedNumber(enc, pend, &av_framerate, _stream.videoConfiguration.videoFrameRate);
// audio
enc = AMF_EncodeNamedString(enc, pend, &av_audiocodecid, &av_mp4a);
enc = AMF_EncodeNamedNumber(enc, pend, &av_audiodatarate, _stream.audioConfiguration.audioBitrate);
enc = AMF_EncodeNamedNumber(enc, pend, &av_audiosamplerate, _stream.audioConfiguration.audioSampleRate);
enc = AMF_EncodeNamedNumber(enc, pend, &av_audiosamplesize, 16.0);
enc = AMF_EncodeNamedBoolean(enc, pend, &av_stereo, _stream.audioConfiguration.numberOfChannels == 2);
// sdk version
enc = AMF_EncodeNamedString(enc, pend, &av_encoder, &av_SDKVersion);
*enc++ = 0;
*enc++ = 0;
*enc++ = AMF_OBJECT_END;
packet.m_nBodySize = (uint32_t)(enc - packet.m_body);
if (!PILI_RTMP_SendPacket(_rtmp, &packet, FALSE, &_error)) {
return;
}
}
- (void)sendVideoHeader:(PFVideoFrame *)videoFrame {
unsigned char *body = NULL;
NSInteger iIndex = 0;
NSInteger rtmpLength = 1024;
const char *sps = videoFrame.sps.bytes;
const char *pps = videoFrame.pps.bytes;
NSInteger sps_len = videoFrame.sps.length;
NSInteger pps_len = videoFrame.pps.length;
body = (unsigned char *)malloc(rtmpLength);
memset(body, 0, rtmpLength);
body[iIndex++] = 0x17;
body[iIndex++] = 0x00;
body[iIndex++] = 0x00;
body[iIndex++] = 0x00;
body[iIndex++] = 0x00;
body[iIndex++] = 0x01;
body[iIndex++] = sps[1];
body[iIndex++] = sps[2];
body[iIndex++] = sps[3];
body[iIndex++] = 0xff;
// 切换大小端模式
/*sps*/
body[iIndex++] = 0xe1;
body[iIndex++] = (sps_len >> 8) & 0xff;
body[iIndex++] = sps_len & 0xff;
memcpy(&body[iIndex], sps, sps_len);
iIndex += sps_len;
/*pps*/
body[iIndex++] = 0x01;
body[iIndex++] = (pps_len >> 8) & 0xff;
body[iIndex++] = (pps_len) & 0xff;
memcpy(&body[iIndex], pps, pps_len);
iIndex += pps_len;
[self sendPacket:RTMP_PACKET_TYPE_VIDEO data:body size:iIndex nTimestamp:0];
free(body);
}
- (void)sendVideo:(PFVideoFrame *)frame {
NSInteger i = 0;
NSInteger rtmpLength = frame.data.length + 9;
unsigned char *body = (unsigned char *)malloc(rtmpLength);
memset(body, 0, rtmpLength);
if (frame.isKeyFrame) {
body[i++] = 0x17; // 1:Iframe 7:AVC
} else {
body[i++] = 0x27; // 2:Pframe 7:AVC
}
body[i++] = 0x01; // AVC NALU
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = (frame.data.length >> 24) & 0xff;
body[i++] = (frame.data.length >> 16) & 0xff;
body[i++] = (frame.data.length >> 8) & 0xff;
body[i++] = (frame.data.length) & 0xff;
memcpy(&body[i], frame.data.bytes, frame.data.length);
[self sendPacket:RTMP_PACKET_TYPE_VIDEO data:body size:(rtmpLength) nTimestamp:frame.timestamp];
free(body);
}
// 将数据封装成PILI_RTMPPacket对象
- (NSInteger)sendPacket:(unsigned int)nPacketType data:(unsigned char *)data size:(NSInteger)size nTimestamp:(uint64_t)nTimestamp {
// 创建RTMPPacket句柄
NSInteger rtmpLength = size;
PILI_RTMPPacket rtmp_pack;
PILI_RTMPPacket_Reset(&rtmp_pack);
PILI_RTMPPacket_Alloc(&rtmp_pack, (uint32_t)rtmpLength);
rtmp_pack.m_nBodySize = (uint32_t)size; //消息长度
memcpy(rtmp_pack.m_body, data, size);
rtmp_pack.m_hasAbsTimestamp = 0; // Timestamp 是绝对值还是相对值?
rtmp_pack.m_packetType = nPacketType; //Message type ID(1-7协议控制;8,9音视频;10以后为AMF编码消息
if (_rtmp) rtmp_pack.m_nInfoField2 = _rtmp->m_stream_id;
rtmp_pack.m_nChannel = 0x04; // 块流id
rtmp_pack.m_headerType = RTMP_PACKET_SIZE_LARGE; // 最大数据类型
if (RTMP_PACKET_TYPE_AUDIO == nPacketType && size != 4) {
rtmp_pack.m_headerType = RTMP_PACKET_SIZE_MEDIUM;
}
rtmp_pack.m_nTimeStamp = (uint32_t)nTimestamp;
NSInteger nRet = [self RtmpPacketSend:&rtmp_pack];
PILI_RTMPPacket_Free(&rtmp_pack);
return nRet;
}
// 发送数据
- (NSInteger)RtmpPacketSend:(PILI_RTMPPacket *)packet {
if (_rtmp && PILI_RTMP_IsConnected(_rtmp)) {
// 发送数据包并返回结果
int success = PILI_RTMP_SendPacket(_rtmp, packet, 0, &_error);
return success;
}
return -1;
}
// 包装音频header
- (void)sendAudioHeader:(PFAudioFrame *)audioFrame {
NSInteger rtmpLength = audioFrame.audioInfo.length + 2; /*spec data长度,一般是2*/
unsigned char *body = (unsigned char *)malloc(rtmpLength);
memset(body, 0, rtmpLength);
/*AF 00 + AAC RAW data*/
body[0] = 0xAF;
body[1] = 0x00;
memcpy(&body[2], audioFrame.audioInfo.bytes, audioFrame.audioInfo.length); /*spec_buf是AAC sequence header数据*/
[self sendPacket:RTMP_PACKET_TYPE_AUDIO data:body size:rtmpLength nTimestamp:0];
free(body);
}
// 包装音频数据
- (void)sendAudio:(PFFrame *)frame {
NSInteger rtmpLength = frame.data.length + 2; /*spec data长度,一般是2*/
unsigned char *body = (unsigned char *)malloc(rtmpLength);
memset(body, 0, rtmpLength);
/*AF 01 + AAC RAW data*/
body[0] = 0xAF;
body[1] = 0x01;
memcpy(&body[2], frame.data.bytes, frame.data.length);
[self sendPacket:RTMP_PACKET_TYPE_AUDIO data:body size:rtmpLength nTimestamp:frame.timestamp];
free(body);
}
// 断线重连
- (void)reconnect {
dispatch_async(self.rtmpSendQueue, ^{
// 重连次数小于reconnectCount并且正在重连中
if (self.retryTimes4netWorkBreaken++ < self.reconnectCount && !self.isReconnecting) {
self.isConnected = NO;
self.isConnecting = NO;
self.isReconnecting = YES;
dispatch_async(dispatch_get_main_queue(), ^{
// 根据设置的延迟时间再次调用重连方法
[self performSelector:@selector(_reconnect) withObject:nil afterDelay:self.reconnectInterval];
});
} else if (self.retryTimes4netWorkBreaken >= self.reconnectCount) {
// 当重连次数超过reconnectCount以后则直接返回重连失败状态
if (self.delegate && [self.delegate respondsToSelector:@selector(socketStatus:status:)]) {
[self.delegate socketStatus:self status:PFLiveError];
}
if (self.delegate && [self.delegate respondsToSelector:@selector(socketDidError:errorCode:)]) {
[self.delegate socketDidError:self errorCode:PFLiveSocketError_ReConnectTimeOut];
}
}
});
}
// 断后重连
- (void)_reconnect{
[NSObject cancelPreviousPerformRequestsWithTarget:self];
_isReconnecting = NO;
if(_isConnected) return;
_isReconnecting = NO;
if (_isConnected) return;
if (_rtmp != NULL) {
PILI_RTMP_Close(_rtmp, &_error);
PILI_RTMP_Free(_rtmp);
_rtmp = NULL;
}
_sendAudioHead = NO;
_sendVideoHead = NO;
if (self.delegate && [self.delegate respondsToSelector:@selector(socketStatus:status:)]) {
[self.delegate socketStatus:self status:PFLiveRefresh];
}
if (_rtmp != NULL) {
PILI_RTMP_Close(_rtmp, &_error);
PILI_RTMP_Free(_rtmp);
}
[self RTMP264_Connect:(char *)[_stream.url cStringUsingEncoding:NSASCIIStringEncoding]];
}
#pragma mark -- CallBack
void RTMPErrorCallback(RTMPError *error, void *userData) {
PFStreamRTMPSocket *socket = (__bridge PFStreamRTMPSocket *)userData;
if (error->code < 0) {
[socket reconnect];
}
}
void ConnectionTimeCallback(PILI_CONNECTION_TIME *conn_time, void *userData) {
}
#pragma mark -- LFStreamingBufferDelegate
- (void)streamingBuffer:(nullable PFStreamingBuffer *)buffer bufferState:(PFLiveBuffferState)state{
if(self.delegate && [self.delegate respondsToSelector:@selector(socketBufferStatus:status:)]){
[self.delegate socketBufferStatus:self status:state];
}
}
- librtmp头文件
typedef struct PILI_RTMPChunk {
int c_headerSize;
int c_chunkSize;
char *c_chunk;
char c_header[RTMP_MAX_HEADER_SIZE];
} PILI_RTMPChunk;
typedef struct PILI_RTMPPacket {
uint8_t m_headerType; // 块头类型
uint8_t m_packetType; // 负载格式
uint8_t m_hasAbsTimestamp; // 是否绝对时间戳
int m_nChannel; // 块流ID
uint32_t m_nTimeStamp; // 时间戳
int32_t m_nInfoField2; // 块流ID
uint32_t m_nBodySize; // 负载大小
uint32_t m_nBytesRead; // 读入负载大小
PILI_RTMPChunk *m_chunk; // 在RTMP_ReadPacket()调用时,若该字段非NULL,表示关心原始块的信息,通常设为NULL
char *m_body; // 负载指针
} PILI_RTMPPacket;
typedef struct PILI_RTMPSockBuf {
int sb_socket;
int sb_size; /* number of unprocessed bytes in buffer */
char *sb_start; /* pointer into sb_pBuffer of next byte to process */
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */
int sb_timedout;
void *sb_ssl;
} PILI_RTMPSockBuf;
// 重置报文
void PILI_RTMPPacket_Reset(PILI_RTMPPacket *p);
void PILI_RTMPPacket_Dump(PILI_RTMPPacket *p);
// 为报文分配负载空间
int PILI_RTMPPacket_Alloc(PILI_RTMPPacket *p, int nSize);
// 释放负载空间
void PILI_RTMPPacket_Free(PILI_RTMPPacket *p);
// 检查报文是否可读,当报文被分块,且接收未完成时不可读
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize)
typedef struct PILI_RTMP_LNK {
AVal hostname;
AVal domain;
AVal sockshost;
AVal playpath0; /* parsed from URL */
AVal playpath; /* passed in explicitly */
AVal tcUrl;
AVal swfUrl;
AVal pageUrl;
AVal app;
AVal auth;
AVal flashVer;
AVal subscribepath;
AVal token;
AMFObject extras;
int edepth;
int seekTime;
int stopTime;
#define RTMP_LF_AUTH 0x0001 /* using auth param */
#define RTMP_LF_LIVE 0x0002 /* stream is live */
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */
#define RTMP_LF_PLST 0x0008 /* send playlist before play */
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */
int lFlags;
int swfAge;
int protocol;
int timeout; /* connection timeout in seconds */
int send_timeout; /* send data timeout */
unsigned short socksport;
unsigned short port;
#ifdef CRYPTO
#define RTMP_SWF_HASHLEN 32
void *dh; /* for encryption */
void *rc4keyIn;
void *rc4keyOut;
uint32_t SWFSize;
uint8_t SWFHash[RTMP_SWF_HASHLEN];
char SWFVerificationResponse[RTMP_SWF_HASHLEN + 10];
#endif
} PILI_RTMP_LNK;
/* state for read() wrapper */
typedef struct PILI_RTMP_READ {
char *buf;
char *bufpos;
unsigned int buflen;
uint32_t timestamp;
uint8_t dataType;
uint8_t flags;
#define RTMP_READ_HEADER 0x01
#define RTMP_READ_RESUME 0x02
#define RTMP_READ_NO_IGNORE 0x04
#define RTMP_READ_GOTKF 0x08
#define RTMP_READ_GOTFLVK 0x10
#define RTMP_READ_SEEKING 0x20
int8_t status;
#define RTMP_READ_COMPLETE -3
#define RTMP_READ_ERROR -2
#define RTMP_READ_EOF -1
#define RTMP_READ_IGNORE 0
/* if bResume == TRUE */
uint8_t initialFrameType;
uint32_t nResumeTS;
char *metaHeader;
char *initialFrame;
uint32_t nMetaHeaderSize;
uint32_t nInitialFrameSize;
uint32_t nIgnoredFrameCounter;
uint32_t nIgnoredFlvFrameCounter;
} PILI_RTMP_READ;
typedef struct PILI_RTMP_METHOD {
AVal name;
int num;
} PILI_RTMP_METHOD;
typedef void (*PILI_RTMPErrorCallback)(RTMPError *error, void *userData);
typedef struct PILI_CONNECTION_TIME {
uint32_t connect_time;
uint32_t handshake_time;
} PILI_CONNECTION_TIME;
typedef void (*PILI_RTMP_ConnectionTimeCallback)(
PILI_CONNECTION_TIME *conn_time, void *userData);
typedef struct PILI_RTMP {
int m_inChunkSize; // 最大接收块大小
int m_outChunkSize;// 最大发送块大小
int m_nBWCheckCounter;// 带宽检测计数器
int m_nBytesIn;// 接收数据计数器
int m_nBytesInSent;// 当前数据已回应计数器
int m_nBufferMS;// 当前缓冲的时间长度,以MS为单位
int m_stream_id; // 当前连接的流ID
int m_mediaChannel;// 当前连接媒体使用的块流ID
uint32_t m_mediaStamp;// 当前连接媒体最新的时间戳
uint32_t m_pauseStamp;// 当前连接媒体暂停时的时间戳
int m_pausing;// 是否暂停状态
int m_nServerBW;// 服务器带宽
int m_nClientBW;// 客户端带宽
uint8_t m_nClientBW2;// 客户端带宽调节方式
uint8_t m_bPlaying;// 当前是否推流或连接中
uint8_t m_bSendEncoding;// 连接服务器时发送编码
uint8_t m_bSendCounter;// 设置是否向服务器发送接收字节应答
int m_numInvokes; // 0x14命令远程过程调用计数
int m_numCalls;// 0x14命令远程过程请求队列数量
PILI_RTMP_METHOD *m_methodCalls; // 远程过程调用请求队列
PILI_RTMPPacket *m_vecChannelsIn[RTMP_CHANNELS];// 对应块流ID上一次接收的报文
PILI_RTMPPacket *m_vecChannelsOut[RTMP_CHANNELS];// 对应块流ID上一次发送的报文
int m_channelTimestamp[RTMP_CHANNELS]; // 对应块流ID媒体的最新时间戳
double m_fAudioCodecs; // 音频编码器代码
double m_fVideoCodecs; // 视频编码器代码
double m_fEncoding; /* AMF0 or AMF3 */
double m_fDuration; // 当前媒体的时长
int m_msgCounter; // 使用HTTP协议发送请求的计数器
int m_polling;// 使用HTTP协议接收消息主体时的位置
int m_resplen;// 使用HTTP协议接收消息主体时的未读消息计数
int m_unackd;// 使用HTTP协议处理时无响应的计数
AVal m_clientID;// 使用HTTP协议处理时的身份ID
PILI_RTMP_READ m_read;// RTMP_Read()操作的上下文
PILI_RTMPPacket m_write;// RTMP_Write()操作使用的可复用报文对象
PILI_RTMPSockBuf m_sb;// RTMP_ReadPacket()读包操作的上下文
PILI_RTMP_LNK Link;// RTMP连接上下文
PILI_RTMPErrorCallback m_errorCallback; // rtmp链接断开或者失败后的回调
PILI_RTMP_ConnectionTimeCallback m_connCallback; // 连接超时的回调
RTMPError *m_error; //
void *m_userData;
int m_is_closing;
int m_tcp_nodelay;
uint32_t ip;
} PILI_RTMP;
// 解析流地址
int PILI_RTMP_ParseURL(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app);
int PILI_RTMP_ParseURL2(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app, AVal *domain);
void PILI_RTMP_ParsePlaypath(AVal *in, AVal *out);
// 连接前,设置服务器发送给客户端的媒体缓存时长
void PILI_RTMP_SetBufferMS(PILI_RTMP *r, int size);
// 连接后,更新服务器发送给客户端的媒体缓存时长
void PILI_RTMP_UpdateBufferMS(PILI_RTMP *r, RTMPError *error);
// 更新RTMP上下文中的相应选项
int PILI_RTMP_SetOpt(PILI_RTMP *r, const AVal *opt, AVal *arg,
RTMPError *error);
// 设置流地址
int PILI_RTMP_SetupURL(PILI_RTMP *r, const char *url, RTMPError *error);
// 设置RTMP上下文播放地址和相应选项,不关心的可以设为NULL
void PILI_RTMP_SetupStream(PILI_RTMP *r, int protocol, AVal *hostname,
unsigned int port, AVal *sockshost, AVal *playpath,
AVal *tcUrl, AVal *swfUrl, AVal *pageUrl, AVal *app,
AVal *auth, AVal *swfSHA256Hash, uint32_t swfSize,
AVal *flashVer, AVal *subscribepath, int dStart,
int dStop, int bLiveStream, long int timeout);
// 客户端连接及握手
int PILI_RTMP_Connect(PILI_RTMP *r, PILI_RTMPPacket *cp, RTMPError *error);
struct sockaddr;
int PILI_RTMP_Connect0(PILI_RTMP *r, struct addrinfo *ai, unsigned short port,
RTMPError *error);
int PILI_RTMP_Connect1(PILI_RTMP *r, PILI_RTMPPacket *cp, RTMPError *error);
// 服务端握手
int PILI_RTMP_Serve(PILI_RTMP *r, RTMPError *error);
// 接收一个报文
int PILI_RTMP_ReadPacket(PILI_RTMP *r, PILI_RTMPPacket *packet);
// 发送一个报文,queue为1表示当包类型为0x14时,将加入队列等待响应
int PILI_RTMP_SendPacket(PILI_RTMP *r, PILI_RTMPPacket *packet, int queue,
RTMPError *error);
// 直接发送块
int PILI_RTMP_SendChunk(PILI_RTMP *r, PILI_RTMPChunk *chunk, RTMPError *error);
// 检查网络是否连接
int PILI_RTMP_IsConnected(PILI_RTMP *r);
// 返回套接字
int PILI_RTMP_Socket(PILI_RTMP *r);
// 检查连接是否超时
int PILI_RTMP_IsTimedout(PILI_RTMP *r);
// 获取当前媒体的时长
double PILI_RTMP_GetDuration(PILI_RTMP *r);
// 暂停与播放切换控制
int PILI_RTMP_ToggleStream(PILI_RTMP *r, RTMPError *error);
// 连接流,并指定开始播放的位置
int PILI_RTMP_ConnectStream(PILI_RTMP *r, int seekTime, RTMPError *error);
// 重新创建流
int PILI_RTMP_ReconnectStream(PILI_RTMP *r, int seekTime, RTMPError *error);
// 删除当前流
void PILI_RTMP_DeleteStream(PILI_RTMP *r, RTMPError *error);
// 获取第一个媒体包
int PILI_RTMP_GetNextMediaPacket(PILI_RTMP *r, PILI_RTMPPacket *packet);
// 处理客户端的报文交互,即处理报文分派逻辑
int PILI_RTMP_ClientPacket(PILI_RTMP *r, PILI_RTMPPacket *packet);
// 初使化RTMP上下文,设默认值
void PILI_RTMP_Init(PILI_RTMP *r);
// 关闭RTMP上下文
void PILI_RTMP_Close(PILI_RTMP *r, RTMPError *error);
// 分配RTMP上下文
PILI_RTMP *PILI_RTMP_Alloc(void);
// 释放RTMP上下文
void PILI_RTMP_Free(PILI_RTMP *r);
// 开启客户端的RTMP写开关,用于推流
void PILI_RTMP_EnableWrite(PILI_RTMP *r);
// 返回RTMP的版本
int PILI_RTMP_LibVersion(void);
// 开启RTMP工作中断
void PILI_RTMP_UserInterrupt(void); /* user typed Ctrl-C */
// 发送0x04号命令的控制消息
int PILI_RTMP_SendCtrl(PILI_RTMP *r, short nType, unsigned int nObject,
unsigned int nTime, RTMPError *error);
/* caller probably doesn't know current timestamp, should
* just use RTMP_Pause instead
*/
// 发送0x14号远程调用控制暂停
int PILI_RTMP_SendPause(PILI_RTMP *r, int DoPause, int dTime, RTMPError *error);
int PILI_RTMP_Pause(PILI_RTMP *r, int DoPause, RTMPError *error);
// 递归在一个对象中搜索指定的属性
int PILI_RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name,
AMFObjectProperty *p);
// 底层套接口的网络读取、发送、关闭连接操作
int PILI_RTMPSockBuf_Fill(PILI_RTMPSockBuf *sb);
int PILI_RTMPSockBuf_Send(PILI_RTMPSockBuf *sb, const char *buf, int len);
int PILI_RTMPSockBuf_Close(PILI_RTMPSockBuf *sb);
// 发送建流操作
int PILI_RTMP_SendCreateStream(PILI_RTMP *r, RTMPError *error);
// 发送媒体时间定位操作
int PILI_RTMP_SendSeek(PILI_RTMP *r, int dTime, RTMPError *error);
// 发送设置服务器应答窗口大小操作
int PILI_RTMP_SendServerBW(PILI_RTMP *r, RTMPError *error);
// 发送设置服务器输出带宽操作
int PILI_RTMP_SendClientBW(PILI_RTMP *r, RTMPError *error);
// 删除0x14命令远程调用队列中的请求
void PILI_RTMP_DropRequest(PILI_RTMP *r, int i, int freeit);
// 读取FLV格式数据
int PILI_RTMP_Read(PILI_RTMP *r, char *buf, int size);
// 发送FLV格式数据
int PILI_RTMP_Write(PILI_RTMP *r, const char *buf, int size, RTMPError *error);
/* hashswf.c */
int PILI_RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age);
服务搭建
rtmp服务搭建网上相关文章太多了,我就不bb了,我中间碰到的一个连接失败的问题,把防火墙关掉就可以了。
播放端
播放端直接使用的bilibili的ijkPlayer,关于编译问题网上文章也很多,我直接按照github上的操作一次编译成功了,需要导入的库除了github上提示的额外又添加了一个libc++.tbd,播放代码可以到这里查看。
采集推流项目地址
ijkPlayer播放器项目地址
参考文章:
使用librtmp库进行推流与拉流