librtmp + h.264(MediaCodec) + aac(MediaCodec) + ams5
demo:https://github.com/wangzuxing/myrtmplive
pc需安装ams5, 作为流服务器,android端通过pc端ip + ams5 rtmp端口跟流服务器建立连接(NetConnection)
h.264: 摄像头预览数据 MediaCodec 编码(“video/avc”)
aac: AudioRecord(MediaRecorder.AudioSource.MIC, …) + MediaCodec编码(“audio/mp4a-latm”)
AAC/H.264推送
RTMP推送的音视频流的封装形式和FLV格式相似,音视频RTMP消息的Payload中都放的是FLV-TAG格式封装的音视频包
向AMS推送H264和AAC直播流,需要首先发送”AVC sequence header”和”AAC sequence header”
packet->m_nTimeStamp:
音视频播放的重要参数,RTMP的时间戳在发送音视频前都为零,发送音视频消息只要保证时间戳是单增等间隔的就可以正常播放音视频。
音视频时间戳就根据帧率,音频参数设定:
视频帧根据帧率,在同一时间基上累加,如,25帧每秒,则按毫秒计,1000/25=40ms,在首帧pts上进行累加
音频根据采样率及样本个数,在同一时间基上累加,如,1024个样本(1024个采样为一帧),44100采样率(即1秒钟有44100个采样),
以毫秒计,1000*1024/44100=23.21995464852607709750566893424 ms
rtmp 访问格式:
rtmp[t][e|s]://hostname[:port][/app[/playpath]]
Java端:
1、RtmpActivity.java:
...
static {
System.loadLibrary("rtmp"); // librtmp
System.loadLibrary("rtmplive");
}
public static native void RtmpLiveS(String url_s); // 设置http server端地址
public static native void RtmpLiveE(); //停止推流,并清除分配缓存
public static native void RtmpLiveSpsAndPps(byte[] sps, int sps_len, byte[] pps, int pps_len); // 填送sps pps到底层jni结构中
public static native void RtmpLiveWHFreqR(int video_w, int video_h, int video_fps); // 设置video 高、宽、帧率参数
public static native void RtmpLiveSendNalU(byte[] nalu, int nalu_size, boolean isKeyFrame); // 传送nalu到jni
public static native void RtmpLiveSendAacD(byte[] aac, int aac_size); // 传送aac sequence header到jni
public static native void RtmpLiveSendAacSequnceHder(byte[] aac, int aac_size);// 传送aac raw数据到jni
...
//打开、配置camera
private void openCamera(SurfaceHolder holder) {
releaseCamera();
try {
camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
} catch (Exception e) {
camera = null;
e.printStackTrace();
}
if(camera != null){
camera.setDisplayOrientation(90);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(width, height);
parameters.setFlashMode("off");
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
parameters.setPreviewFormat(ImageFormat.YV12); //NV21 / YV12
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
camera.setParameters(parameters);
buf = new byte[width*height*3/2];
camera.addCallbackBuffer(buf);
camera.setPreviewCallbackWithBuffer(this);
List<int[]> fpsRange = parameters.getSupportedPreviewFpsRange();
for (int[] temp3 : fpsRange) {
System.out.println(Arrays.toString(temp3));
}
parameters.setPreviewFpsRange(15000, 15000);
camera.startPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
onFrame(data, data.length); // 调用MediaCodec同步编码 camera preview data
camera.addCallbackBuffer(buf);
}
//yv12 => yuv420p : yvu -> yuv
private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height) {
System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);
System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);
System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);
}
public void onFrame(byte[] buf, int length) {
swapYV12toI420(buf, h264, width, height); // H.264 编码输入只支持I420格式YUV
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(h264, 0, length);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
...
去掉编码输出的start code(0x00 0x00 0x00 0x01 或 0x00 0x00 0x01)、获取sps pps保存,
并保证从IDR帧开始调用librtmp推送H.264的Nalu数据
...
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
public void AudioEncoder() throws IOException {
File f = new File(Environment.getExternalStorageDirectory(), "audioencoded_1.aac");
try {
if(!f.exists()){
f.createNewFile();
}else{
if(f.delete()){
f.createNewFile();
}
}
} catch (IOException e) {
e.printStackTrace();
}
try {
outputStreamAAC = new BufferedOutputStream(new FileOutputStream(f)); // 保存aac编码文件,测试使用
} catch (Exception e){
e.printStackTrace();
}
isRunning = false;
count_tt0 = 0;
aacFlag = true;
bufferSize = AudioRecord.getMinBufferSize(44100,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, // MIC 录入
44100,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT, //AudioFormat.ENCODING_DEFAULT
bufferSize);
mediaCodecAAC = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
format.setInteger(MediaFormat.KEY_BIT_RATE, 96000); //64 * 1024
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); //AACObjectHE
mediaCodecAAC.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodecAAC.start();
isRecording = true;
recorder.startRecording();
new Thread() {
public void run() {
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(bufferSize);
int read = 0;
while (isRecording) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
read = recorder.read(byteBuffer, bufferSize); // 从AudioRecord取音频数据
if(AudioRecord.ERROR_INVALID_OPERATION != read){
AACEncoder(byteBuffer.array(), read); // MediaCodec同步编码取到的音频数据并调用librtmp推送aac raw数据,若测试需要可写入.aac文件测试使用,push audio时关闭,提高推送性能
}
}
recorder.stop();
mediaCodecAAC.stop();
mediaCodecAAC.release();
try {
outputStreamAAC.flush();
outputStreamAAC.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}.start();
}
private void addADTStoPacket(byte[] packet, int packetLen) {
int profile = 2; //AAC LC
int freqIdx = 4; //44.1KHz
int chanCfg = 2; //CPE
// fill in ADTS data
packet[0] = (byte)0xFF;
packet[1] = (byte)0xF9;
packet[2] = (byte)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
packet[3] = (byte)(((chanCfg&3)<<6) + (packetLen>>11));
packet[4] = (byte)((packetLen&0x7FF) >> 3);
packet[5] = (byte)(((packetLen&7)<<5) + 0x1F);
packet[6] = (byte)0xFC;
}
// called AudioRecord's read
public synchronized void AACEncoder(byte[] input, int length) {
try {
ByteBuffer[] inputBuffers = mediaCodecAAC.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodecAAC.getOutputBuffers();
int inputBufferIndex = mediaCodecAAC.dequeueInputBuffer(-1); //-1:等待到获取成功为止, 0:直接返回
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodecAAC.queueInputBuffer(inputBufferIndex, 0, length, 0, 0); //input.length
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodecAAC.dequeueOutputBuffer(bufferInfo,0);
while (outputBufferIndex >= 0) {
int outBitsSize = bufferInfo.size;
int outPacketSize = outBitsSize + 7; // 7 is ADTS size
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//Without ADTS header,编码器编码后为aac raw data
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
/*
//With ADTS header
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + outBitsSize);
byte[] outData = new byte[outPacketSize];
addADTStoPacket(outData, outPacketSize);
outputBuffer.get(outData, 7, outBitsSize);
outputBuffer.position(bufferInfo.offset);
//保存为.aac,则每帧aac raw data必须带ADTS头,播放器才能播放
outputStreamAAC.write(outData, 0, outData.length);
*/
if(bufferInfo.size == 2){ // 调用android MediaCodec aac编码器进行编码,首帧数据即为aac sequence header数据
RtmpLiveSendAacSequnceHder(outData, bufferInfo.size);
}else{
RtmpLiveSendAacD(outData, bufferInfo.size); // 传送aac raw到jni
}
outputBuffer.clear(); //释放缓存区,避免溢出
mediaCodecAAC.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodecAAC.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
JNI端:
2、rtmplive.cpp
调用librtmp 传送packet至 ams端
//初始化并连接到服务器
int RTMP264_Connect(const char* url)
{
m_pRtmp = RTMP_Alloc();
RTMP_Init(m_pRtmp);
LOGI(" RTMP264_Connect %s ", url);
/*设置URL*/
if (RTMP_SetupURL(m_pRtmp,(char*)url) == FALSE)
{
RTMP_Free(m_pRtmp);
return false;
}
LOGI(" RTMP_SetupURL ok ");
/*设置可写,即发布流,这个函数必须在连接前使用,否则无效*/
RTMP_EnableWrite(m_pRtmp);
/*连接服务器*/
if (RTMP_Connect(m_pRtmp, NULL) == FALSE) // NetConnection struct sockaddr_in service (struct sockaddr*)&service
{
RTMP_Free(m_pRtmp);
return false;
}
LOGI(" RTMP_Connect ok ");
/*连接流*/
if (RTMP_ConnectStream(m_pRtmp,0) == FALSE) // NetStream
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
return false;
}
LOGI(" RTMP_ConnectStream ok ");
return true;
}
// 断开连接,释放相关的资源。
void RTMP264_Close()
{
if(m_pRtmp) {
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
}
LOGI(" RTMP264_Close ");
}
// 发送RTMP数据包
int SendPacket(unsigned int nPacketType,unsigned char *data,unsigned int size,unsigned int nTimestamp)
{
RTMPPacket* packet;
/*分配包内存和初始化,len为包体长度*/
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+size);
memset(packet,0,RTMP_HEAD_SIZE);
/*包体内存*/
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
packet->m_nBodySize = size;
memcpy(packet->m_body,data,size);
packet->m_hasAbsTimestamp = 0;
packet->m_packetType = nPacketType; /*此处为类型有两种一种是音频,一种是视频*/
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
packet->m_nChannel = 0x04;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
if (RTMP_PACKET_TYPE_AUDIO ==nPacketType && size !=4)
{
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
}
packet->m_nTimeStamp = nTimestamp;
/*发送*/
int nRet =0;
if (RTMP_IsConnected(m_pRtmp))
{
nRet = RTMP_SendPacket(m_pRtmp,packet,TRUE); /*TRUE为放进发送队列,FALSE是不放进发送队列,直接发送*/
}
/*释放内存*/
free(packet);
return nRet;
}
//发送视频的sps和pps信息
//AVC sequence header
int SendVideoSpsPps(unsigned char *sps,int sps_len, unsigned char *pps,int pps_len,unsigned int nTimeStamp)
{
RTMPPacket * packet=NULL;//rtmp包结构
unsigned char * body=NULL;
int i;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+1024);
//RTMPPacket_Reset(packet);//重置packet状态
memset(packet,0,RTMP_HEAD_SIZE+1024);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
i = 0;
body[i++] = 0x17; //FrameType: 1(key frame) + CodeID: 7(AVC)
body[i++] = 0x00; //AVCPackType == 0, AVC sequence header
//Composition Time
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
//AVCDecoderConfigurationRecord
body[i++] = 0x01;
body[i++] = sps[1];
body[i++] = sps[2];
body[i++] = sps[3];
body[i++] = 0xff;
//sps
body[i++] = 0xe1;
body[i++] = (sps_len >> 8) & 0xff;
body[i++] = sps_len & 0xff;
memcpy(&body[i],sps,sps_len);
i += sps_len;
//pps
body[i++] = 0x01;
body[i++] = (pps_len >> 8) & 0xff;
body[i++] = (pps_len) & 0xff;
memcpy(&body[i],pps,pps_len);
i += pps_len;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = i;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = nTimeStamp;//0;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
/*调用发送接口*/
int nRet = RTMP_SendPacket(m_pRtmp,packet,TRUE);
free(packet); //释放内存
return nRet;
}
/*
* 发送H264数据帧
* @param data 存储数据帧内容
* @param size 数据帧的大小
* @param bIsKeyFrame 记录该帧是否为关键帧
* @param nTimeStamp 当前帧的时间戳
* @成功则返回 1 , 失败则返回0
*/
int SendH264Packet(unsigned char *data,unsigned int size,int bIsKeyFrame,unsigned int nTimeStamp)
{
if(data == NULL && size<11){
LOGI(" SendH264Packet datat NULL, %d ",size);
return false;
}
unsigned char *body = (unsigned char*)malloc(size+9);
memset(body,0,size+9);
int i = 0;
if(bIsKeyFrame){
body[i++] = 0x17;// 1:Iframe 7:AVC
body[i++] = 0x01;// AVC NALU
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
// NALU size
body[i++] = size>>24 &0xff;
body[i++] = size>>16 &0xff;
body[i++] = size>>8 &0xff;
body[i++] = size&0xff;
// NALU data
memcpy(&body[i],data,size);
SendVideoSpsPps(metaData.Sps, metaData.nSpsLen, metaData.Pps, metaData.nPpsLen, nTimeStamp);
tick +=tick_gap;
nTimeStamp = tick;
}else{
body[i++] = 0x27;// 2:Pframe 7:AVC
body[i++] = 0x01;// AVCPackType == 0, AVC NALU
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
// NALU size
body[i++] = size>>24 &0xff;
body[i++] = size>>16 &0xff;
body[i++] = size>>8 &0xff;
body[i++] = size&0xff;
// NALU data
memcpy(&body[i],data,size);
}
int bRet = SendPacket(RTMP_PACKET_TYPE_VIDEO,body,i+size,nTimeStamp);
free(body);
return bRet;
}
// AAC sequence header == AACDecoderSpecificInfo
int SendAacSequenceHderPacket0(unsigned char *spec_buf,int spec_len)
{
RTMPPacket * packet;
unsigned char * body;
//int audio_specific_config = 0;
int len;
len = spec_len; /*spec data长度,一般是2*/
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+len+2);
memset(packet,0,RTMP_HEAD_SIZE);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
/*AF 00 => AAC sequence header*/
body[0] = 0xAF;
body[1] = 0x00; //AACPackType == 0x00 (AAC sequence header)
memcpy(&body[2],spec_buf,len); //spec_buf是AAC sequence header数据
/*
//按照AACDecoderSpecificInfo 产生
audio_specific_config |= ((2<<11)&0xF800); // 2:AAC:LC
audio_specific_config |= ((4<<7)&0x0780); // 4:44.1khz
audio_specific_config |= ((2<<3)&0x78); // 2:stereo
audio_specific_config |= 0&0x07; //padding:000
body[3] = (audio_specific_config>>8)&0xFF;
body[4] = audio_specific_config&0xFF;
*/
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nBodySize = len+2;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = 0;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
/*调用发送接口*/
RTMP_SendPacket(m_pRtmp,packet,TRUE);
return TRUE;
}
int SendAacRawPacket(unsigned char *buf, int len, unsigned int nTimeStamp)
{
if (len > 0) {
RTMPPacket * packet;
unsigned char * body;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE+len+2);
memset(packet,0,RTMP_HEAD_SIZE);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
/*AF 01 => AAC RAW data*/
body[0] = 0xAF;
body[1] = 0x01; //AAC RAW data
//aac raw data
memcpy(&body[2],buf,len);
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nBodySize = len+2;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = nTimeStamp;//timeoffset;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
/*调用发送接口*/
RTMP_SendPacket(m_pRtmp,packet,TRUE);
free(packet);
}
return 0;
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveSpsAndPps(JNIEnv *env, jclass clz,
jbyteArray data, jint size, jbyteArray data0, jint size0)
{
LOGI(" RtmpLiveSpsAndPps ");
unsigned char *sps = (unsigned char *)env->GetByteArrayElements(data, JNI_FALSE);
unsigned char *pps = (unsigned char *)env->GetByteArrayElements(data0, JNI_FALSE);
metaData.nSpsLen = size;
metaData.Sps=(unsigned char*)malloc(size);
memcpy(metaData.Sps, sps, size);
metaData.nPpsLen = size0;
metaData.Pps=(unsigned char*)malloc(size0);
memcpy(metaData.Pps, pps, size0);
env->ReleaseByteArrayElements(data, (jbyte *)sps, 0);
env->ReleaseByteArrayElements(data0, (jbyte *)pps, 0);
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveWHFreqR(JNIEnv *env, jclass clz,
jint w, jint h, jbyte freq)
{
LOGI(" RtmpLiveWHFreqR ");
metaData.nWidth = w;
metaData.nHeight = h;
metaData.nFrameRate = freq;
tick = 0;
tick_gap = 1000/metaData.nFrameRate;
//tick_gap = inc++ * 90000/fps;
}
int count;
int first_s=1;
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveSendNalU(JNIEnv *env, jclass clz,
jbyteArray data, jint size, jboolean iskeyframe)
{
unsigned char *nalu = (unsigned char *)env->GetByteArrayElements(data, JNI_FALSE);
if(++count>15){
count = 0;
LOGI(" RtmpLiveSendNalU %d ",size);
}
SendH264Packet(nalu, size, iskeyframe, tick);
tick +=tick_gap;
env->ReleaseByteArrayElements(data, (jbyte *)nalu, 0);
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveSendAacSequnceHder(JNIEnv *env, jclass clz,
jbyteArray data, jint size)
{
unsigned char *aac = (unsigned char *)env->GetByteArrayElements(data, JNI_FALSE);
if(++count>15){
count = 0;
//LOGI(" RtmpLiveSendAacD %d ",size);
}
tick0 = 0;
tick_gap0 = 23;//1000*1024/44100;
SendAacSequenceHderPacket0(aac, size);
env->ReleaseByteArrayElements(data, (jbyte *)aac, 0);
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveSendAacD(JNIEnv *env, jclass clz,
jbyteArray data, jint size)
{
unsigned char *aac = (unsigned char *)env->GetByteArrayElements(data, JNI_FALSE);
if(++count>15){
count = 0;
LOGI(" RtmpLiveSendAacD %d ",size);
}
SendAacRawPacket(aac, size, tick0);
tick0 +=tick_gap0;
env->ReleaseByteArrayElements(data, (jbyte *)aac, 0);
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveS(JNIEnv *env, jclass clz, jstring url_s)
{
char const* url = env->GetStringUTFChars(url_s, NULL);
LOGI(" RtmpLive start ");
LOGI(" url %s ", url);
first_s = 1;
RTMP264_Connect("rtmp://192.168.10.113:1935/live/livestream"); // rtmp 访问格式 rtmp[t][e|s]://hostname[:port][/app[/playpath]]
env->ReleaseStringUTFChars(url_s, url);
}
JNIEXPORT void JNICALL Java_com_example_myrtmplive_RtmpActivity_RtmpLiveE(JNIEnv *env, jclass clz)
{
//断开连接并释放相关资源
RTMP264_Close();
if(metaData.Sps != NULL){
free(metaData.Sps);
}
if(metaData.Pps != NULL){
free(metaData.Pps);
}
}
3、Android.mk:
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := rtmp
LOCAL_SRC_FILES := librtmp.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)\
$(LOCAL_PATH)/librtmp
LOCAL_SHARED_LIBRARIES := rtmp
LOCAL_MODULE := rtmplive
LOCAL_SRC_FILES := rtmplive.cpp
LOCAL_LDLIBS += -llog -lc -lz
include $(BUILD_SHARED_LIBRARY)