好多开发者提到,如何实现Android平台,多实例推送,多实例推送,有几种理解:
目前,市面上的大多设计,都不够灵活,以下以“Android回调编码后的音视频数据”为例,推送一路原始的编码后的RTMP数据出去,然后,编码后的数据,回调到上层,再启动一个新的Publisher实例,推到新的RTMP地址(当然只是业务展示,实际可用于对接第三方系统,如GB28181或其他服务):
具体流程如下:
对应接口:
/**
* Set Audio Encoded Data Callback.
*
* @param audio_encoded_data_callback: Audio Encoded Data Callback.
*
* @return {0} if successful
*/
public native int SmartPublisherSetAudioEncodedDataCallback(long handle, Object audio_encoded_data_callback);
/**
* Set Video Encoded Data Callback.
*
* @param video_encoded_data_callback: Video Encoded Data Callback.
*
* @return {0} if successful
*/
public native int SmartPublisherSetVideoEncodedDataCallback(long handle, Object video_encoded_data_callback);
设置回调:
libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle, new PublisherAudioEncodedDataCallback());
libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());
class PublisherAudioEncodedDataCallback implements NTAudioDataCallback
{
private int audio_buffer_size = 0;
private int param_info_size = 0;
private ByteBuffer audio_buffer_ = null;
private ByteBuffer parameter_info_ = null;
@Override
public ByteBuffer getAudioByteBuffer(int size)
{
//Log.i("getAudioByteBuffer", "size: " + size);
if( size < 1 )
{
return null;
}
if ( size <= audio_buffer_size && audio_buffer_ != null )
{
return audio_buffer_;
}
audio_buffer_size = size + 512;
audio_buffer_size = (audio_buffer_size+0xf) & (~0xf);
audio_buffer_ = ByteBuffer.allocateDirect(audio_buffer_size);
// Log.i("getAudioByteBuffer", "size: " + size + " buffer_size:" + audio_buffer_size);
return audio_buffer_;
}
@Override
public ByteBuffer getAudioParameterInfo(int size)
{
//Log.i("getAudioParameterInfo", "size: " + size);
if(size < 1)
{
return null;
}
if ( size <= param_info_size && parameter_info_ != null )
{
return parameter_info_;
}
param_info_size = size + 32;
param_info_size = (param_info_size+0xf) & (~0xf);
parameter_info_ = ByteBuffer.allocateDirect(param_info_size);
//Log.i("getAudioParameterInfo", "size: " + size + " buffer_size:" + param_info_size);
return parameter_info_;
}
public void onAudioDataCallback(int ret, int audio_codec_id, int sample_size, int is_key_frame, long timestamp, int sample_rate, int channel, int parameter_info_size, long reserve)
{
Log.i("onAudioDataCallback", "ret: " + ret + ", audio_codec_id: " + audio_codec_id + ", sample_size: " + sample_size + ", timestamp: " + timestamp +
",sample_rate:" + sample_rate + ",chn: " + channel + ", parameter_info_size:" + parameter_info_size);
if ( audio_buffer_ == null)
return;
audio_buffer_.rewind();
if ( ret == 0 && publisherHandle2 != 0 ) {
libPublisher.SmartPublisherPostAudioEncodedData(publisherHandle2, audio_codec_id, audio_buffer_, sample_size, is_key_frame, timestamp, parameter_info_, parameter_info_size);
}
}
}
class PublisherVideoEncodedDataCallback implements NTVideoDataCallback
{
private int video_buffer_size = 0;
private ByteBuffer video_buffer_ = null;
@Override
public ByteBuffer getVideoByteBuffer(int size)
{
//Log.i("getVideoByteBuffer", "size: " + size);
if( size < 1 )
{
return null;
}
if ( size <= video_buffer_size && video_buffer_ != null )
{
return video_buffer_;
}
video_buffer_size = size + 1024;
video_buffer_size = (video_buffer_size+0xf) & (~0xf);
video_buffer_ = ByteBuffer.allocateDirect(video_buffer_size);
// Log.i("getVideoByteBuffer", "size: " + size + " buffer_size:" + video_buffer_size);
return video_buffer_;
}
public void onVideoDataCallback(int ret, int video_codec_id, int sample_size, int is_key_frame, long timestamp, int width, int height, long presentation_timestamp)
{
Log.i("onVideoDataCallback", "ret: " + ret + ", video_codec_id: " + video_codec_id + ", sample_size: " + sample_size + ", is_key_frame: "+ is_key_frame + ", timestamp: " + timestamp +
",width: " + width + ", height:" + height + ",presentation_timestamp:" + presentation_timestamp);
if ( video_buffer_ == null)
return;
video_buffer_.rewind();
if ( ret == 0 && publisherHandle2 !=0 ) {
libPublisher.SmartPublisherPostVideoEncodedData(publisherHandle2, video_codec_id, video_buffer_, sample_size, is_key_frame, timestamp, presentation_timestamp);
}
}
}
/**
* Start output Encoded Data(用于编码后的音视频数据回调)
*
* @return {0} if successful
*/
public native int SmartPublisherStartOutputEncodedData(long handle);
/**
* Stop output Encoded Data
*
* @return {0} if successful
*/
public native int SmartPublisherStopOutputEncodedData(long handle);
class ButtonEncodedDataCallbackListener implements OnClickListener {
public void onClick(View v) {
if (isEncodedDatacallbackRunning) {
stopEncodedDataCallback();
if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
ConfigControlEnable(true);
}
btnEncodedDataCallback.setText("启动编码数据回调");
isEncodedDatacallbackRunning = false;
if (publisherHandle2 != 0) {
libPublisher.SmartPublisherStopPublisher(publisherHandle2);
libPublisher.SmartPublisherClose(publisherHandle2);
publisherHandle2 = 0;
}
return;
}
Log.i(TAG, "onClick start encoded data callback..");
if (libPublisher == null)
return;
if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
InitAndSetConfig();
}
libPublisher.SmartPublisherSetAudioEncodedDataCallback(publisherHandle, new PublisherAudioEncodedDataCallback());
libPublisher.SmartPublisherSetVideoEncodedDataCallback(publisherHandle, new PublisherVideoEncodedDataCallback());
int startRet = libPublisher.SmartPublisherStartOutputEncodedData(publisherHandle);
if (startRet != 0) {
isEncodedDatacallbackRunning = false;
Log.e(TAG, "Failed to start encoded data callback.");
return;
}
if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
if (pushType == 0 || pushType == 1) {
CheckInitAudioRecorder(); //enable pure video publisher..
}
ConfigControlEnable(false);
}
btnEncodedDataCallback.setText("停止编码数据回调");
isEncodedDatacallbackRunning = true;
int audio_opt = 2;
int video_opt = 2;
publisherHandle2 = libPublisher.SmartPublisherOpen(myContext, audio_opt, video_opt,
videoWidth, videoHeight);
if (publisherHandle2 == 0) {
Log.e(TAG, "sdk open failed!");
return;
}
String relayUrl = "rtmp://player.daniulive.com:1935/hls/stream8888";
libPublisher.SmartPublisherSetURL(publisherHandle2, relayUrl);
libPublisher.SmartPublisherStartPublisher(publisherHandle2);
}
}
;
//停止编码后数据回调
private void stopEncodedDataCallback() {
if(!isEncodedDatacallbackRunning)
{
return;
}
if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
if (audioRecord_ != null) {
Log.i(TAG, "stopRecorder, call audioRecord_.StopRecording..");
audioRecord_.Stop();
if (audioRecordCallback_ != null) {
audioRecord_.RemoveCallback(audioRecordCallback_);
audioRecordCallback_ = null;
}
audioRecord_ = null;
}
}
if (libPublisher != null) {
libPublisher.SmartPublisherStopOutputEncodedData(publisherHandle);
}
if (!isPushing && !isRTSPPublisherRunning && !isRecording) {
if (publisherHandle != 0) {
if (libPublisher != null) {
libPublisher.SmartPublisherClose(publisherHandle);
publisherHandle = 0;
}
}
}
}
以上demo为了便于演示多实例效果,另启了个新的推送实例(对应新的publisherHandle),音视频编码后的数据,通过新的实例,调用编码后的音视频数据接口,继续推RTMP出去,从而实现多实例推送目的。