最近一直想做一个简易的直播服务,上网参考了很多朋友写的如何转换RTMP流的文章,
https://blog.csdn.net/scnu20142005027/article/details/60623670 比较详细的介绍了RTMP库函数的作用,
然后参考了雷霄骅Simplest LibRTMP Example例子,编写了一个RTMP流封装库,
实现流程是:某个线程不停的向LIB写入数据(H264数据包),LIB程序会自动的封装H264流为RTMP包,然
后传递到服务器,客户端可以通过VLC进行观看。
H264数据包管理:使用了Queue。
不过当前效率还有待提供,因涉及数据的拷贝问题。不足之处希望大家留言,我继续改进。
需要用户编译RTMP库,其封装代码如下:
#define RTMP_BUFFER_SIZE 8192
#define RTMP_HEAD_SIZE (sizeof(RTMPPacket)+RTMP_MAX_HEADER_SIZE)
typedef struct _NaluUnit
{
int type;
int size;
unsigned char *data;
}NaluUnit;
typedef struct _RTMPMetadata
{
unsigned int nWidth;
unsigned int nHeight;
unsigned int nFrameRate;
unsigned int nSpsLen;
unsigned char * Sps;
unsigned int nPpsLen;
unsigned char * Pps;
} RTMPMetadata,*LPRTMPMetadata;
#include
#include
#include
#include
typedef unsigned int UINT;
typedef unsigned char BYTE;
typedef unsigned long DWORD;
UINT Ue(BYTE *pBuff, UINT nLen, UINT &nStartBit)
{
UINT nZeroNum = 0;
while (nStartBit < nLen * 8)
{
if (pBuff[nStartBit / 8] & (0x80 >> (nStartBit % 8)))
{
break;
}
nZeroNum++;
nStartBit++;
}
nStartBit++;
DWORD dwRet = 0;
for (UINT i = 0; i < nZeroNum; i++)
{
dwRet <<= 1;
if (pBuff[nStartBit / 8] & (0x80 >> (nStartBit % 8)))
{
dwRet += 1;
}
nStartBit++;
}
return (1 << nZeroNum) - 1 + dwRet;
}
int Se(BYTE *pBuff, UINT nLen, UINT &nStartBit)
{
int UeVal = Ue(pBuff, nLen, nStartBit);
double k = UeVal;
int nValue = ceil(k / 2);
if (UeVal % 2 == 0) nValue = -nValue;
return nValue;
}
DWORD u(UINT BitCount, BYTE * buf, UINT &nStartBit)
{
DWORD dwRet = 0;
for (UINT i = 0; i < BitCount; i++)
{
dwRet <<= 1;
if (buf[nStartBit / 8] & (0x80 >> (nStartBit % 8)))
{
dwRet += 1;
}
nStartBit++;
}
return dwRet;
}
void de_emulation_prevention(BYTE* buf, unsigned int* buf_size)
{
int i = 0, j = 0;
BYTE* tmp_ptr = NULL;
unsigned int tmp_buf_size = 0;
int val = 0;
tmp_ptr = buf;
tmp_buf_size = *buf_size;
for (i = 0; i < (tmp_buf_size - 2); i++)
{
val = (tmp_ptr[i] ^ 0x00) + (tmp_ptr[i + 1] ^ 0x00) + (tmp_ptr[i + 2] ^ 0x03);
if (val == 0)
{
for (j = i + 2; j < tmp_buf_size - 1; j++)
{
tmp_ptr[j] = tmp_ptr[j + 1];
}
(*buf_size)--;
}
}
return;
}
int h264_decode_sps(BYTE * buf, unsigned int nLen, int &width, int &height, int &fps)
{
UINT StartBit = 0;
fps = 0;
de_emulation_prevention(buf, &nLen);
int forbidden_zero_bit = u(1, buf, StartBit);
int nal_ref_idc = u(2, buf, StartBit);
int nal_unit_type = u(5, buf, StartBit);
if (nal_unit_type == 7)
{
int profile_idc = u(8, buf, StartBit);
int constraint_set0_flag = u(1, buf, StartBit);//(buf[1] & 0x80)>>7;
int constraint_set1_flag = u(1, buf, StartBit);//(buf[1] & 0x40)>>6;
int constraint_set2_flag = u(1, buf, StartBit);//(buf[1] & 0x20)>>5;
int constraint_set3_flag = u(1, buf, StartBit);//(buf[1] & 0x10)>>4;
int reserved_zero_4bits = u(4, buf, StartBit);
int level_idc = u(8, buf, StartBit);
int seq_parameter_set_id = Ue(buf, nLen, StartBit);
if (profile_idc == 100 || profile_idc == 110 ||
profile_idc == 122 || profile_idc == 144)
{
int chroma_format_idc = Ue(buf, nLen, StartBit);
if (chroma_format_idc == 3)
{
int residual_colour_transform_flag = u(1, buf, StartBit);
}
int bit_depth_luma_minus8 = Ue(buf, nLen, StartBit);
int bit_depth_chroma_minus8 = Ue(buf, nLen, StartBit);
int qpprime_y_zero_transform_bypass_flag = u(1, buf, StartBit);
int seq_scaling_matrix_present_flag = u(1, buf, StartBit);
int seq_scaling_list_present_flag[8];
if (seq_scaling_matrix_present_flag)
{
for (int i = 0; i < 8; i++) {
seq_scaling_list_present_flag[i] = u(1, buf, StartBit);
}
}
}
int log2_max_frame_num_minus4 = Ue(buf, nLen, StartBit);
int pic_order_cnt_type = Ue(buf, nLen, StartBit);
if (pic_order_cnt_type == 0)
{
int log2_max_pic_order_cnt_lsb_minus4 = Ue(buf, nLen, StartBit);
}
else if (pic_order_cnt_type == 1)
{
int delta_pic_order_always_zero_flag = u(1, buf, StartBit);
int offset_for_non_ref_pic = Se(buf, nLen, StartBit);
int offset_for_top_to_bottom_field = Se(buf, nLen, StartBit);
int num_ref_frames_in_pic_order_cnt_cycle = Ue(buf, nLen, StartBit);
int *offset_for_ref_frame = new int[num_ref_frames_in_pic_order_cnt_cycle];
for (int i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++)
{
offset_for_ref_frame[i] = Se(buf, nLen, StartBit);
}
delete[] offset_for_ref_frame;
}
int num_ref_frames = Ue(buf, nLen, StartBit);
int gaps_in_frame_num_value_allowed_flag = u(1, buf, StartBit);
int pic_width_in_mbs_minus1 = Ue(buf, nLen, StartBit);
int pic_height_in_map_units_minus1 = Ue(buf, nLen, StartBit);
width = (pic_width_in_mbs_minus1 + 1) * 16;
height = (pic_height_in_map_units_minus1 + 1) * 16;
int frame_mbs_only_flag = u(1, buf, StartBit);
if (!frame_mbs_only_flag)
{
int mb_adaptive_frame_field_flag = u(1, buf, StartBit);
}
int direct_8x8_inference_flag = u(1, buf, StartBit);
int frame_cropping_flag = u(1, buf, StartBit);
if (frame_cropping_flag)
{
int frame_crop_left_offset = Ue(buf, nLen, StartBit);
int frame_crop_right_offset = Ue(buf, nLen, StartBit);
int frame_crop_top_offset = Ue(buf, nLen, StartBit);
int frame_crop_bottom_offset = Ue(buf, nLen, StartBit);
}
int vui_parameter_present_flag = u(1, buf, StartBit);
if (vui_parameter_present_flag)
{
int aspect_ratio_info_present_flag = u(1, buf, StartBit);
if (aspect_ratio_info_present_flag)
{
int aspect_ratio_idc = u(8, buf, StartBit);
if (aspect_ratio_idc == 255)
{
int sar_width = u(16, buf, StartBit);
int sar_height = u(16, buf, StartBit);
}
}
int overscan_info_present_flag = u(1, buf, StartBit);
if (overscan_info_present_flag)
{
int overscan_appropriate_flagu = u(1, buf, StartBit);
}
int video_signal_type_present_flag = u(1, buf, StartBit);
if (video_signal_type_present_flag)
{
int video_format = u(3, buf, StartBit);
int video_full_range_flag = u(1, buf, StartBit);
int colour_description_present_flag = u(1, buf, StartBit);
if (colour_description_present_flag)
{
int colour_primaries = u(8, buf, StartBit);
int transfer_characteristics = u(8, buf, StartBit);
int matrix_coefficients = u(8, buf, StartBit);
}
}
int chroma_loc_info_present_flag = u(1, buf, StartBit);
if (chroma_loc_info_present_flag)
{
int chroma_sample_loc_type_top_field = Ue(buf, nLen, StartBit);
int chroma_sample_loc_type_bottom_field = Ue(buf, nLen, StartBit);
}
int timing_info_present_flag = u(1, buf, StartBit);
if (timing_info_present_flag)
{
int num_units_in_tick = u(32, buf, StartBit);
int time_scale = u(32, buf, StartBit);
fps = time_scale / (2 * num_units_in_tick);
}
}
return true;
}
else
{
return false;
}
}
char * put_byte( char *output, uint8_t nVal )
{
output[0] = nVal;
return output+1;
}
char * put_be16(char *output, uint16_t nVal )
{
output[1] = nVal & 0xff;
output[0] = nVal >> 8;
return output+2;
}
char * put_be24(char *output,uint32_t nVal )
{
output[2] = nVal & 0xff;
output[1] = nVal >> 8;
output[0] = nVal >> 16;
return output+3;
}
char * put_be32(char *output, uint32_t nVal )
{
output[3] = nVal & 0xff;
output[2] = nVal >> 8;
output[1] = nVal >> 16;
output[0] = nVal >> 24;
return output+4;
}
char * put_be64( char *output, uint64_t nVal )
{
output=put_be32( output, nVal >> 32 );
output=put_be32( output, nVal );
return output;
}
char * put_amf_string( char *c, const char *str )
{
uint16_t len = strlen( str );
c = put_be16( c, len );
memcpy(c,str,len);
return c+len;
}
char * put_amf_double( char *c, double d )
{
*c++ = AMF_NUMBER;
{
unsigned char *ci, *co;
ci = (unsigned char *)&d;
co = (unsigned char *)c;
co[0] = ci[7];
co[1] = ci[6];
co[2] = ci[5];
co[3] = ci[4];
co[4] = ci[3];
co[5] = ci[2];
co[6] = ci[1];
co[7] = ci[0];
}
return c+8;
}
#include
#include
#include
#include
#include
#include
#include
struct h264DataNode
{
char * _buffer;
unsigned _size;
unsigned _capacity;
~h264DataNode() { if(_buffer) delete[]_buffer; }
h264DataNode() : _buffer(NULL), _size(0), _capacity(0){ ; }
h264DataNode(char * data, int size, bool donew)
{
if (donew)
{
_capacity = size + 1;
_buffer = new char[size + 1];
memcpy(_buffer, data, size);
_size = size;
}
else
{
_buffer = data;
_capacity = size;
_size = size;
}
}
h264DataNode(unsigned size)
{
_capacity = size + 1;
_buffer = new char[size + 1];
_size = size;
}
unsigned char * data()
{
return (unsigned char *)_buffer;
}
long size()
{
return _size;
}
void reset()
{
if (_buffer)
{
memset(_buffer, 0, _capacity);
}
_size = 0;
}
bool move(int off)
{
if (_size < off) return false;
if (_buffer == nullptr) return false;
memmove(_buffer, _buffer + off, _size - off);
_size -= off;
return true;
}
bool copy(const char * data, int len)
{
if (_size + len > _capacity) return false;
memcpy(_buffer + _size, data, len);
_size += len;
return true;
}
void append(const h264DataNode * data)
{
if (_capacity <= data->_size + _size + 1)
{
_capacity = data->_size + _size + 1;
char * da = new char[_capacity];
memcpy(da, _buffer, _size);
delete[]_buffer;
_buffer = da;
}
memcpy(_buffer + _size, data->_buffer, data->_size);
_size += data->_size;
}
};
class securityDataQueue
{
protected:
std::mutex _mutex;
std::condition_variable _condition;
std::queue _queues;
std::atomic _count;
std::atomic _stop;
securityDataQueue(const securityDataQueue&) = delete;
securityDataQueue& operator=(const securityDataQueue&) = delete;
public:
securityDataQueue()
{
_stop = 0;
_count = 0;
_queues.empty();
}
~securityDataQueue()
{
clear();
}
bool isstoped()
{
if (_stop.load(std::memory_order_relaxed) == 1)
{
return true;
}
return false;
}
int counts()
{
return _count;
}
void stop()
{
_stop.store(1);
_condition.notify_one();
}
void clear()
{
std::lock_guard lock(_mutex);
while ((_queues.size() > 0) && (!_queues.empty()))
{
h264DataNode* value = _queues.front();
_queues.pop();
delete value;
}
}
uint16_t push(h264DataNode* data)
{
if (isstoped())
{
delete data;
std::unique_lock lock(_mutex);
return (uint16_t)_queues.size();
}
else
{
std::unique_lock lock(_mutex);
bool emp = _queues.empty();
_queues.push(data);
if (emp)
{
_condition.notify_one();
}
_count.fetch_add(1);
return _queues.size();
}
}
h264DataNode* timePop(short milliSecond)
{
std::unique_lock lock(_mutex);
if (_queues.empty())
{
uint32_t timeout = milliSecond;
if (!_condition.wait_for(lock,
std::chrono::milliseconds(timeout),
[this]() { return !_queues.empty(); }))
{
return nullptr;
}
}
if (isstoped())
{
return nullptr;
}
h264DataNode* value = nullptr;
if (!_queues.empty())
{
value = _queues.front();
_queues.pop();
}
_count.fetch_sub(1);
return value;
}
};
typedef struct _RTMP_H264_Node
{
RTMP* m_pRtmp;
FILE *pFileOutput;
RTMPMetadata metaData;
securityDataQueue m_pushBuffer;
h264DataNode * m_pBuffer;
long n_nalHeadPos;
std::atomic m_pushSize;
std::future m_results;
std::atomic m_stop;
std::atomic m_running;
_RTMP_H264_Node()
{
#ifdef WIN32
WORD version;
WSADATA wsaData;
version = MAKEWORD(1, 1);
(WSAStartup(version, &wsaData) == 0);
#endif
pFileOutput = NULL;
m_running = false;
m_stop = false;
m_pRtmp = NULL;
m_pushSize = 0;
m_pBuffer = NULL;
n_nalHeadPos = 0;
}
int SendVideoSpsPps(unsigned char *pps, int pps_len, unsigned char * sps, int sps_len)
{
RTMPPacket * packet = NULL;
unsigned char * body = NULL;
int i;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE + 1024);
memset(packet, 0, RTMP_HEAD_SIZE + 1024);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
i = 0;
body[i++] = 0x17;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x01;
body[i++] = sps[1];
body[i++] = sps[2];
body[i++] = sps[3];
body[i++] = 0xff;
body[i++] = 0xe1;
body[i++] = (sps_len >> 8) & 0xff;
body[i++] = sps_len & 0xff;
memcpy(&body[i], sps, sps_len);
i += sps_len;
body[i++] = 0x01;
body[i++] = (pps_len >> 8) & 0xff;
body[i++] = (pps_len) & 0xff;
memcpy(&body[i], pps, pps_len);
i += pps_len;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = i;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = 0;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
int nRet = RTMP_SendPacket(m_pRtmp, packet, TRUE);
free(packet);
return nRet;
}
int ReadFirstNaluFromBuf(NaluUnit &nalu)
{
if (m_pBuffer == NULL) return FALSE;
int naltail_pos = n_nalHeadPos;
unsigned char * buffer = m_pBuffer->data();
long bufSize = m_pBuffer->size();
while (n_nalHeadPos < bufSize)
{
if (buffer[n_nalHeadPos++] == 0x00 &&
buffer[n_nalHeadPos++] == 0x00)
{
if (buffer[n_nalHeadPos++] == 0x01)
{
goto gotnal_head;
}
else
{
n_nalHeadPos--;
if (buffer[n_nalHeadPos++] == 0x00 &&
buffer[n_nalHeadPos++] == 0x01)
{
goto gotnal_head;
}
else
{
continue;
}
}
}
else
{
continue;
}
gotnal_head:
naltail_pos = n_nalHeadPos;
while (naltail_pos < bufSize)
{
if (buffer[naltail_pos++] == 0x00 &&
buffer[naltail_pos++] == 0x00)
{
if (buffer[naltail_pos++] == 0x01)
{
nalu.size = (naltail_pos - 3) - n_nalHeadPos;
break;
}
else
{
naltail_pos--;
if (buffer[naltail_pos++] == 0x00 &&
buffer[naltail_pos++] == 0x01)
{
nalu.size = (naltail_pos - 4) - n_nalHeadPos;
break;
}
}
}
}
nalu.type = buffer[n_nalHeadPos] & 0x1f;
nalu.data = buffer + n_nalHeadPos;
n_nalHeadPos = naltail_pos;
return TRUE;
}
}
int SendPacket(unsigned int nPacketType, unsigned char *data,
unsigned int size, unsigned int nTimestamp)
{
RTMPPacket* packet;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE + size);
memset(packet, 0, RTMP_HEAD_SIZE);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
packet->m_nBodySize = size;
memcpy(packet->m_body, data, size);
packet->m_hasAbsTimestamp = 0;
packet->m_nTimeStamp = nTimestamp;
packet->m_packetType = nPacketType;
packet->m_nInfoField2 = m_pRtmp ? m_pRtmp->m_stream_id : 0;
packet->m_nChannel = 0x04;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
if (RTMP_PACKET_TYPE_AUDIO == nPacketType && size != 4)
{
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
}
int nRet = 0;
if (m_pRtmp && RTMP_IsConnected(m_pRtmp))
{
nRet = RTMP_SendPacket(m_pRtmp, packet, FALSE);
}
else
{
printf("RTMP stopped \n");
}
free(packet);
return nRet;
}
bool Connect(const char * url)
{
n_nalHeadPos = 0;
m_pRtmp = RTMP_Alloc();
RTMP_Init(m_pRtmp);
if (RTMP_SetupURL(m_pRtmp, (char*)url) == FALSE)
{
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
return false;
}
RTMP_EnableWrite(m_pRtmp);
if (RTMP_Connect(m_pRtmp, NULL) == FALSE)
{
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
return false;
}
if (RTMP_ConnectStream(m_pRtmp, 0) == FALSE)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
return false;
}
m_results = std::async(std::launch::async, fetchRTMPData, this);
m_running.store(true);
return true;
}
static int fetchRTMPData(_RTMP_H264_Node * nodes)
{
if (!nodes->Available())
{
nodes->m_running.store(false);
return 0;
}
NaluUnit naluUnit;
int width = 0, height = 0, fps = 0;
memset(&nodes->metaData, 0, sizeof(RTMPMetadata));
nodes->ReadFirstNaluFromBuf(naluUnit);
nodes->metaData.nSpsLen = naluUnit.size;
nodes->metaData.Sps = NULL;
nodes->metaData.Sps = (unsigned char*)malloc(naluUnit.size);
memcpy(nodes->metaData.Sps, naluUnit.data, naluUnit.size);
nodes->ReadOneNaluFromBuf(naluUnit);
nodes->metaData.nPpsLen = naluUnit.size;
nodes->metaData.Pps = NULL;
nodes->metaData.Pps = (unsigned char*)malloc(naluUnit.size);
memcpy(nodes->metaData.Pps, naluUnit.data, naluUnit.size);
h264_decode_sps(nodes->metaData.Sps, nodes->metaData.nSpsLen, width, height, fps);
if (fps) nodes->metaData.nFrameRate = fps;
else nodes->metaData.nFrameRate = 25;
unsigned int tick = 0, idx = 0;
unsigned int tick_gap = 1000 / nodes->metaData.nFrameRate;
auto last = std::chrono::system_clock::now();
nodes->ReadOneNaluFromBuf(naluUnit);
std::chrono::milliseconds dur;
int bKeyframe = (naluUnit.type == 0x05) ? TRUE : FALSE;
while (nodes->SendH264Packet(naluUnit.data, naluUnit.size, bKeyframe, tick)
&& !nodes->m_stop.load())
{
nodes->m_running.store(true);
got_sps_pps:
if (!nodes->ReadOneNaluFromBuf(naluUnit)) goto end;
if (naluUnit.type == 0x07 || naluUnit.type == 0x08) goto got_sps_pps;
bKeyframe = (naluUnit.type == 0x05) ? TRUE : FALSE;
tick += tick_gap;
auto now = std::chrono::system_clock::now();
dur = std::chrono::duration_cast(now - last);
if (dur.count() < tick_gap) nodes->sleeping(tick_gap - dur.count());
#if defined(_DEBUG)
printf("NALU %04d, %8d, %8d\n", idx++, dur.count(), naluUnit.size);
#endif
nodes->m_running.store(false);
last = now;
}
end:
free(nodes->metaData.Sps);
free(nodes->metaData.Pps);
return 0;
}
void sleeping(int sz)
{
using namespace std::chrono;
duration> freq(sz);
std::this_thread::sleep_for(freq);
}
void stopping()
{
m_stop.store(true);
m_pushBuffer.stop();
m_results.wait();//
printf("RTMP close \n");
}
bool Available()
{
do{
if (m_stop.load()) return false;
h264DataNode* node = m_pushBuffer.timePop(50);
if (m_pBuffer == NULL && node != NULL) m_pBuffer = node;
else if (m_pBuffer != NULL && node != NULL)
{
m_pushSize.fetch_sub(node->size());
m_pBuffer->append(node);
delete node;
}
} while ((m_pBuffer == NULL) || (m_pBuffer->size() < RTMP_BUFFER_SIZE));
return true;
}
int SendH264Packet(unsigned char *data, unsigned int size,
int bIsKeyFrame, unsigned int nTimeStamp)
{
if (data == NULL && size < 11)
{
return false;
}
unsigned char *body = (unsigned char*)malloc(size + 9);
memset(body, 0, size + 9);
int i = 0;
if (bIsKeyFrame)
{
body[i++] = 0x17;
body[i++] = 0x01;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = size >> 24 & 0xff;
body[i++] = size >> 16 & 0xff;
body[i++] = size >> 8 & 0xff;
body[i++] = size & 0xff;
memcpy(&body[i], data, size);
SendVideoSpsPps(metaData.Pps, metaData.nPpsLen, metaData.Sps, metaData.nSpsLen);
}
else
{
body[i++] = 0x27;
body[i++] = 0x01;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = size >> 24 & 0xff;
body[i++] = size >> 16 & 0xff;
body[i++] = size >> 8 & 0xff;
body[i++] = size & 0xff;
memcpy(&body[i], data, size);
}
//static long sizelocate = 0;
//if (pFileOutput == NULL) pFileOutput = fopen("rtmpcmp1.output", "wb+");
int bRet = SendPacket(RTMP_PACKET_TYPE_VIDEO, body, i + size, nTimeStamp);
//fwrite(body, 1, i + size, pFileOutput);
//sizelocate += i + size;
free(body);
return bRet;
}
bool RTMP_Push(unsigned char * buffer, int buf_size, bool donew)
{
m_pushBuffer.push(new h264DataNode((char*)buffer, buf_size, donew));
m_pushSize.fetch_add(buf_size);
return true;
}
int ReadOneNaluFromBuf(NaluUnit &nalu)
{
int ret;
int nalustart;
long naltail_pos = n_nalHeadPos;
nalu.size = 0;
while (1)
{
if (m_stop.load()) return FALSE;
unsigned char * buffer = m_pBuffer->data();
while (naltail_pos < m_pBuffer->size() - 1)
{
if (buffer[naltail_pos++] == 0x00 &&
buffer[naltail_pos++] == 0x00)
{
if (buffer[naltail_pos++] == 0x01)
{
nalustart = 3;
goto gotnal;
}
else
{
naltail_pos--;
if (buffer[naltail_pos++] == 0x00 &&
buffer[naltail_pos++] == 0x01)
{
nalustart = 4;
goto gotnal;
}
else
{
continue;
}
}
}
else
{
continue;
}
gotnal:
nalu.type = buffer[n_nalHeadPos] & 0x1f;
nalu.size = naltail_pos - n_nalHeadPos - nalustart;
if (nalu.type == 0x06)
{
n_nalHeadPos = naltail_pos;
continue;
}
nalu.data = buffer + n_nalHeadPos;
n_nalHeadPos = naltail_pos;
return TRUE;
}
m_pBuffer->move(n_nalHeadPos);
naltail_pos -= n_nalHeadPos;
n_nalHeadPos = 0;
Available();
}
return FALSE;
}
void close()
{
m_pushBuffer.stop();
if (pFileOutput)
{
fclose(pFileOutput);
pFileOutput = NULL;
}
if (m_pRtmp)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
}
}
~_RTMP_H264_Node()
{
close();
#ifdef WIN32
WSACleanup();
#endif
if (m_pBuffer) delete m_pBuffer;
m_pBuffer = NULL;
}
}RTMP_H264_Node;