package com.qiniu.pili.droid.rtcstreaming.demo.activity.streaming;
import android.app.ProgressDialog;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.media.MediaRecorder;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.TextView;
import android.widget.Toast;
import com.qiniu.pili.droid.rtcstreaming.RTCAudioSource;
import com.qiniu.pili.droid.rtcstreaming.RTCConferenceOptions;
import com.qiniu.pili.droid.rtcstreaming.RTCConferenceState;
import com.qiniu.pili.droid.rtcstreaming.RTCConferenceStateChangedListener;
import com.qiniu.pili.droid.rtcstreaming.RTCMediaStreamingManager;
import com.qiniu.pili.droid.rtcstreaming.RTCRemoteWindowEventListener;
import com.qiniu.pili.droid.rtcstreaming.RTCStartConferenceCallback;
import com.qiniu.pili.droid.rtcstreaming.RTCSurfaceView;
import com.qiniu.pili.droid.rtcstreaming.RTCVideoWindow;
import com.qiniu.pili.droid.rtcstreaming.demo.R;
import com.qiniu.pili.droid.rtcstreaming.demo.core.QiniuAppServer;
import com.qiniu.pili.droid.streaming.AVCodecType;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting;
import com.qiniu.pili.droid.streaming.StreamStatusCallback;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.StreamingSessionListener;
import com.qiniu.pili.droid.streaming.StreamingState;
import com.qiniu.pili.droid.streaming.StreamingStateChangedListener;
import com.qiniu.pili.droid.streaming.WatermarkSetting;
import java.net.URISyntaxException;
import java.util.List;
/**
* 演示横屏 PK 模式下的副主播端代码
* PK 模式只支持一个主播和一个副主播进行连麦,主播的布局配置方法如下:
* 左右两个 GLSurfaceView,左边显示主播的预览,右边显示副主播的画面
* 主播需要配置合流画面的参数:把主播配置为 50%,位于左边,副主播为 50%,位于右边
*/
public class PKAnchorActivity extends AppCompatActivity {
private static final String TAG = "PKAnchorActivity";
private static final int MESSAGE_ID_RECONNECTING = 0x01;
private TextView mStatusTextView;
private TextView mStatTextView;
private Button mControlButton;
private CheckBox mMuteCheckBox;
private CheckBox mConferenceCheckBox;
private Toast mToast = null;
private ProgressDialog mProgressDialog;
private RTCMediaStreamingManager mRTCStreamingManager;
private StreamingProfile mStreamingProfile;
private boolean mIsActivityPaused = true;
private boolean mIsPublishStreamStarted = false;
private boolean mIsConferenceStarted = false;
private boolean mIsInReadyState = false;
private int mCurrentCamFacingIndex;
private String mRoomName;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_pk_anchor);
/**
* Step 1: find & init views
*/
GLSurfaceView cameraPreviewFrameView = (GLSurfaceView) findViewById(R.id.cameraPreview_surfaceView);
boolean isSwCodec = getIntent().getBooleanExtra("swcodec", true);
mRoomName = getIntent().getStringExtra("roomName");
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
boolean isBeautyEnabled = getIntent().getBooleanExtra("beauty", false);
boolean isWaterMarkEnabled = getIntent().getBooleanExtra("watermark", false);
boolean isDebugModeEnabled = getIntent().getBooleanExtra("debugMode", false);
mControlButton = (Button) findViewById(R.id.ControlButton);
mStatusTextView = (TextView) findViewById(R.id.StatusTextView);
mStatTextView = (TextView) findViewById(R.id.StatTextView);
mMuteCheckBox = (CheckBox) findViewById(R.id.MuteCheckBox);
mMuteCheckBox.setOnClickListener(mMuteButtonClickListener);
mConferenceCheckBox = (CheckBox) findViewById(R.id.ConferenceCheckBox);
mConferenceCheckBox.setOnClickListener(mConferenceButtonClickListener);
mConferenceCheckBox.setVisibility(View.VISIBLE);
CameraStreamingSetting.CAMERA_FACING_ID facingId = chooseCameraFacingId();
mCurrentCamFacingIndex = facingId.ordinal();
/**
* Step 2: config camera & microphone settings
*/
CameraStreamingSetting cameraStreamingSetting = new CameraStreamingSetting();
cameraStreamingSetting.setCameraFacingId(facingId)
.setContinuousFocusModeEnabled(true)
.setRecordingHint(false)
.setResetTouchFocusDelayInMs(3000)
.setFocusMode(CameraStreamingSetting.FOCUS_MODE_CONTINUOUS_PICTURE)
.setCameraPrvSizeLevel(CameraStreamingSetting.PREVIEW_SIZE_LEVEL.MEDIUM)
.setCameraPrvSizeRatio(CameraStreamingSetting.PREVIEW_SIZE_RATIO.RATIO_4_3);
if (isBeautyEnabled) {
cameraStreamingSetting.setBuiltInFaceBeautyEnabled(true); // Using sdk built in face beauty algorithm
cameraStreamingSetting.setFaceBeautySetting(new CameraStreamingSetting.FaceBeautySetting(0.8f, 0.8f, 0.6f)); // sdk built in face beauty settings
cameraStreamingSetting.setVideoFilter(CameraStreamingSetting.VIDEO_FILTER_TYPE.VIDEO_FILTER_BEAUTY); // set the beauty on/off
}
/**
* Step 3: Must disable this options in PK mode
*/
cameraStreamingSetting.setPreviewAdaptToEncodingSize(false);
/**
* Step 4: create streaming manager and set listeners
*/
AVCodecType codecType = isSwCodec ? AVCodecType.SW_VIDEO_WITH_SW_AUDIO_CODEC : AVCodecType.HW_VIDEO_YUV_AS_INPUT_WITH_HW_AUDIO_CODEC;
mRTCStreamingManager = new RTCMediaStreamingManager(getApplicationContext(), cameraPreviewFrameView, codecType);
mRTCStreamingManager.setDebugLoggingEnabled(isDebugModeEnabled);
mRTCStreamingManager.setConferenceStateListener(mRTCStreamingStateChangedListener);
mRTCStreamingManager.setRemoteWindowEventListener(mRTCRemoteWindowEventListener);
mRTCStreamingManager.setStreamStatusCallback(mStreamStatusCallback);
mRTCStreamingManager.setStreamingStateListener(mStreamingStateChangedListener);
mRTCStreamingManager.setStreamingSessionListener(mStreamingSessionListener);
/**
* Step 5: set conference options
*/
RTCConferenceOptions options = new RTCConferenceOptions();
// RATIO_4_3 & VIDEO_ENCODING_SIZE_HEIGHT_480 means the output size is 640 x 480
options.setVideoEncodingSizeRatio(RTCConferenceOptions.VIDEO_ENCODING_SIZE_RATIO.RATIO_4_3);
options.setVideoEncodingSizeLevel(RTCConferenceOptions.VIDEO_ENCODING_SIZE_HEIGHT_480);
options.setVideoEncodingOrientation(RTCConferenceOptions.VIDEO_ENCODING_ORIENTATION.PORT);
options.setVideoBitrateRange(300 * 1024, 800 * 1024);
// 15 fps is enough
options.setVideoEncodingFps(15);
mRTCStreamingManager.setConferenceOptions(options);
/**
* Step 6: Set position of local window
* This must be called before RTCMediaStreamingManager.prepare() or it won't work.
*/
// mRTCStreamingManager.setLocalWindowPosition(0, 0, options.getVideoEncodingWidth(), options.getVideoEncodingHeight());
mRTCStreamingManager.setLocalWindowPosition(0, 0, 480, 640);
/**
* Step 7: create the remote windows
*/
RTCVideoWindow windowA = new RTCVideoWindow((RTCSurfaceView) findViewById(R.id.RemoteGLSurfaceViewA));
/**
* Step 8: configure the mix stream position and size (only anchor)
* set mix overlay params with relative value
*/
// windowA.setAbsoluteMixOverlayRect(options.getVideoEncodingWidth(), 0, options.getVideoEncodingWidth(), options.getVideoEncodingHeight());
windowA.setAbsoluteMixOverlayRect(480, 0, 480, 640);
/**
* Step 9: add the remote windows
*/
mRTCStreamingManager.addRemoteWindow(windowA);
/**
* Step 10: config streaming profile(only anchor)
*/
mStreamingProfile = new StreamingProfile();
mStreamingProfile.setVideoQuality(StreamingProfile.VIDEO_QUALITY_MEDIUM2)
.setAudioQuality(StreamingProfile.AUDIO_QUALITY_MEDIUM1)
.setEncoderRCMode(StreamingProfile.EncoderRCModes.QUALITY_PRIORITY)
.setEncodingOrientation(StreamingProfile.ENCODING_ORIENTATION.LAND)
// .setPreferredVideoEncodingSize(options.getVideoEncodingHeight(),options.getVideoEncodingWidth() * 2); 320*480
.setPreferredVideoEncodingSize(960,640);
WatermarkSetting watermarksetting = null;
if (isWaterMarkEnabled) {
watermarksetting = new WatermarkSetting(this);
watermarksetting.setResourceId(R.drawable.qiniu_logo)
.setSize(WatermarkSetting.WATERMARK_SIZE.MEDIUM)
.setAlpha(100)
.setCustomPosition(0.5f, 0.5f);
}
/**
* Step 11: do prepare
*/
mRTCStreamingManager.prepare(cameraStreamingSetting, null, watermarksetting, mStreamingProfile);
mProgressDialog = new ProgressDialog(this);
}
@Override
protected void onResume() {
super.onResume();
mIsActivityPaused = false;
/**
* Step 12: You must start capture before conference or streaming
* You will receive `Ready` state callback when capture started success
*/
mRTCStreamingManager.startCapture();
}
@Override
protected void onPause() {
super.onPause();
mIsActivityPaused = true;
/**
* Step 13: You must stop capture, stop conference, stop streaming when activity paused
*/
mRTCStreamingManager.stopCapture();
stopConference();
stopPublishStreaming();
}
@Override
protected void onDestroy() {
super.onDestroy();
/**
* Step 14: You must call destroy to release some resources when activity destroyed
*/
mRTCStreamingManager.destroy();
}
public void onClickSwitchCamera(View v) {
mCurrentCamFacingIndex = (mCurrentCamFacingIndex + 1) % CameraStreamingSetting.getNumberOfCameras();
CameraStreamingSetting.CAMERA_FACING_ID facingId;
if (mCurrentCamFacingIndex == CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_BACK.ordinal()) {
facingId = CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_BACK;
} else if (mCurrentCamFacingIndex == CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT.ordinal()) {
facingId = CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT;
} else {
facingId = CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_3RD;
}
Log.i(TAG, "switchCamera:" + facingId);
mRTCStreamingManager.switchCamera(facingId);
}
public void onClickExit(View v) {
finish();
}
private boolean startConference() {
if (!QiniuAppServer.isNetworkAvailable(this)) {
Toast.makeText(PKAnchorActivity.this, "network is unavailable!!!", Toast.LENGTH_SHORT).show();
return false;
}
if (mIsConferenceStarted) {
return true;
}
mProgressDialog.setMessage("正在加入连麦 ... ");
mProgressDialog.show();
new Thread(new Runnable() {
@Override
public void run() {
startConferenceInternal();
}
}).start();
return true;
}
private boolean startConferenceInternal() {
String roomToken = QiniuAppServer.getInstance().requestRoomToken(QiniuAppServer.getTestUserId(this), mRoomName);
if (roomToken == null) {
dismissProgressDialog();
showToast("无法获取房间信息 !", Toast.LENGTH_SHORT);
return false;
}
mRTCStreamingManager.startConference(QiniuAppServer.getTestUserId(this), mRoomName, roomToken, new RTCStartConferenceCallback() {
@Override
public void onStartConferenceSuccess() {
dismissProgressDialog();
showToast(getString(R.string.start_conference), Toast.LENGTH_SHORT);
updateControlButtonText();
mIsConferenceStarted = true;
/**
* Because `startConference` is called in child thread
* So we should check if the activity paused.
*/
if (mIsActivityPaused) {
stopConference();
}
}
@Override
public void onStartConferenceFailed(int errorCode) {
setConferenceBoxChecked(false);
dismissProgressDialog();
showToast(getString(R.string.failed_to_start_conference) + errorCode, Toast.LENGTH_SHORT);
}
});
return true;
}
private boolean stopConference() {
if (!mIsConferenceStarted) {
return true;
}
mRTCStreamingManager.stopConference();
mIsConferenceStarted = false;
setConferenceBoxChecked(false);
showToast(getString(R.string.stop_conference), Toast.LENGTH_SHORT);
updateControlButtonText();
return true;
}
private boolean startPublishStreaming() {
if (!QiniuAppServer.isNetworkAvailable(this)) {
Toast.makeText(PKAnchorActivity.this, "network is unavailable!!!", Toast.LENGTH_SHORT).show();
return false;
}
if (mIsPublishStreamStarted) {
return true;
}
if (!mIsInReadyState) {
showToast(getString(R.string.stream_state_not_ready), Toast.LENGTH_SHORT);
return false;
}
mProgressDialog.setMessage("正在准备推流... ");
mProgressDialog.show();
new Thread(new Runnable() {
@Override
public void run() {
startPublishStreamingInternal();
}
}).start();
return true;
}
private boolean startPublishStreamingInternal() {
String publishAddr = "rtmp://pili-publish.lipengv2.qiniuts.com/lipengv2/delaytest?key=2dae2f1be51ad020";//QiniuAppServer.getInstance().requestPublishAddress(mRoomName);
if (publishAddr == null) {
dismissProgressDialog();
showToast("无法获取房间信息/推流地址 !", Toast.LENGTH_SHORT);
return false;
}
try {
mStreamingProfile.setPublishUrl(publishAddr);
} catch (URISyntaxException e) {
e.printStackTrace();
dismissProgressDialog();
showToast("无效的推流地址 !", Toast.LENGTH_SHORT);
return false;
}
mRTCStreamingManager.setStreamingProfile(mStreamingProfile);
if (!mRTCStreamingManager.startStreaming()) {
dismissProgressDialog();
showToast(getString(R.string.failed_to_start_streaming), Toast.LENGTH_SHORT);
return false;
}
dismissProgressDialog();
showToast(getString(R.string.start_streaming), Toast.LENGTH_SHORT);
updateControlButtonText();
mIsPublishStreamStarted = true;
/**
* Because `startPublishStreaming` need a long time in some weak network
* So we should check if the activity paused.
*/
if (mIsActivityPaused) {
stopPublishStreaming();
}
return true;
}
private boolean stopPublishStreaming() {
if (!mIsPublishStreamStarted) {
return true;
}
mRTCStreamingManager.stopStreaming();
mIsPublishStreamStarted = false;
showToast(getString(R.string.stop_streaming), Toast.LENGTH_SHORT);
updateControlButtonText();
return false;
}
private StreamingStateChangedListener mStreamingStateChangedListener = new StreamingStateChangedListener() {
@Override
public void onStateChanged(final StreamingState state, Object o) {
switch (state) {
case PREPARING:
setStatusText(getString(R.string.preparing));
Log.d(TAG, "onStateChanged state:" + "preparing");
break;
case READY:
mIsInReadyState = true;
setStatusText(getString(R.string.ready));
Log.d(TAG, "onStateChanged state:" + "ready");
break;
case CONNECTING:
Log.d(TAG, "onStateChanged state:" + "connecting");
break;
case STREAMING:
setStatusText(getString(R.string.streaming));
Log.d(TAG, "onStateChanged state:" + "streaming");
break;
case SHUTDOWN:
mIsInReadyState = true;
setStatusText(getString(R.string.ready));
Log.d(TAG, "onStateChanged state:" + "shutdown");
break;
case UNKNOWN:
Log.d(TAG, "onStateChanged state:" + "unknown");
break;
case SENDING_BUFFER_EMPTY:
Log.d(TAG, "onStateChanged state:" + "sending buffer empty");
break;
case SENDING_BUFFER_FULL:
Log.d(TAG, "onStateChanged state:" + "sending buffer full");
break;
case AUDIO_RECORDING_FAIL:
Log.d(TAG, "onStateChanged state:" + "audio recording failed");
showToast(getString(R.string.failed_open_microphone), Toast.LENGTH_SHORT);
stopPublishStreaming();
break;
case OPEN_CAMERA_FAIL:
Log.d(TAG, "onStateChanged state:" + "open camera failed");
showToast(getString(R.string.failed_open_camera), Toast.LENGTH_SHORT);
stopPublishStreaming();
break;
case IOERROR:
/**
* Network-connection is unavailable when `startStreaming`.
* You can do reconnecting or just finish the streaming
*/
Log.d(TAG, "onStateChanged state:" + "io error");
showToast(getString(R.string.io_error), Toast.LENGTH_SHORT);
stopPublishStreaming();
sendReconnectMessage();
// stopPublishStreaming();
break;
case DISCONNECTED:
/**
* Network-connection is broken after `startStreaming`.
* You can do reconnecting in `onRestartStreamingHandled` or just stop publish streaming
* You can do reconnecting in `onRestartStreamingHandled`
*/
Log.d(TAG, "onStateChanged state:" + "disconnected");
setStatusText(getString(R.string.disconnected));
// we do reconnect in `onRestartStreamingHandled`
// stopPublishStreaming();
// we will process this state in `onRestartStreamingHandled`
break;
}
}
};
private StreamingSessionListener mStreamingSessionListener = new StreamingSessionListener() {
@Override
public boolean onRecordAudioFailedHandled(int code) {
return false;
}
/**
* When the network-connection is broken, StreamingState#DISCONNECTED will notified first,
* and then invoked this method if the environment of restart streaming is ready.
*
* @return true means you handled the event; otherwise, given up and then StreamingState#SHUTDOWN
* will be notified.
*/
@Override
public boolean onRestartStreamingHandled(int code) {
Log.d(TAG, "onRestartStreamingHandled, reconnect ...");
return mRTCStreamingManager.startStreaming();
}
@Override
public Camera.Size onPreviewSizeSelected(List list) {
for (Camera.Size size : list) {
if (size.height >= 480) {
return size;
}
}
return null;
}
@Override
public int onPreviewFpsSelected(List list) {
return -1;
}
};
private Handler mHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
if (msg.what != MESSAGE_ID_RECONNECTING || mIsActivityPaused || !mIsPublishStreamStarted) {
return;
}
if (!QiniuAppServer.isNetworkAvailable(PKAnchorActivity.this)) {
sendReconnectMessage();
return;
}
Log.d(TAG, "do reconnecting ...");
mRTCStreamingManager.startStreaming();
}
};
private void sendReconnectMessage() {
showToast("正在重连...", Toast.LENGTH_SHORT);
mHandler.removeCallbacksAndMessages(null);
mHandler.sendMessageDelayed(mHandler.obtainMessage(MESSAGE_ID_RECONNECTING), 500);
}
private RTCConferenceStateChangedListener mRTCStreamingStateChangedListener = new RTCConferenceStateChangedListener() {
@Override
public void onConferenceStateChanged(RTCConferenceState state, int extra) {
switch (state) {
case READY:
// You must `StartConference` after `Ready`
showToast(getString(R.string.ready), Toast.LENGTH_SHORT);
break;
case RECONNECTING:
showToast(getString(R.string.reconnecting), Toast.LENGTH_SHORT);
break;
case RECONNECTED:
showToast(getString(R.string.reconnected), Toast.LENGTH_SHORT);
break;
case RECONNECT_FAIL:
showToast(getString(R.string.reconnect_failed), Toast.LENGTH_SHORT);
break;
case VIDEO_PUBLISH_FAILED:
case AUDIO_PUBLISH_FAILED:
showToast(getString(R.string.failed_to_publish_av_to_rtc) + extra, Toast.LENGTH_SHORT);
finish();
break;
case VIDEO_PUBLISH_SUCCESS:
showToast(getString(R.string.success_publish_video_to_rtc), Toast.LENGTH_SHORT);
break;
case AUDIO_PUBLISH_SUCCESS:
showToast(getString(R.string.success_publish_audio_to_rtc), Toast.LENGTH_SHORT);
break;
case USER_JOINED_AGAIN:
showToast(getString(R.string.user_join_other_where), Toast.LENGTH_SHORT);
finish();
break;
case USER_KICKOUT_BY_HOST:
showToast(getString(R.string.user_kickout_by_host), Toast.LENGTH_SHORT);
finish();
break;
case OPEN_CAMERA_FAIL:
showToast(getString(R.string.failed_open_camera), Toast.LENGTH_SHORT);
break;
case AUDIO_RECORDING_FAIL:
showToast(getString(R.string.failed_open_microphone), Toast.LENGTH_SHORT);
break;
default:
return;
}
}
};
private RTCRemoteWindowEventListener mRTCRemoteWindowEventListener = new RTCRemoteWindowEventListener() {
@Override
public void onRemoteWindowAttached(RTCVideoWindow window, String remoteUserId) {
Log.d(TAG, "onRemoteWindowAttached: " + remoteUserId);
}
@Override
public void onRemoteWindowDetached(RTCVideoWindow window, String remoteUserId) {
Log.d(TAG, "onRemoteWindowDetached: " + remoteUserId);
}
@Override
public void onFirstRemoteFrameArrived(String remoteUserId) {
Log.d(TAG, "onFirstRemoteFrameArrived: " + remoteUserId);
}
};
private View.OnClickListener mMuteButtonClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mMuteCheckBox.isChecked()) {
mRTCStreamingManager.mute(RTCAudioSource.MIC);
} else {
mRTCStreamingManager.unMute(RTCAudioSource.MIC);
}
}
};
private View.OnClickListener mConferenceButtonClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mConferenceCheckBox.isChecked()) {
startConference();
} else {
stopConference();
}
}
};
public void onClickStreaming(View v) {
if (!mIsPublishStreamStarted) {
startPublishStreaming();
} else {
stopPublishStreaming();
}
}
private void setStatusText(final String status) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mStatusTextView.setText(status);
}
});
}
private void updateControlButtonText() {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (mIsPublishStreamStarted) {
mControlButton.setText(getString(R.string.stop_streaming));
} else {
mControlButton.setText(getString(R.string.start_streaming));
}
}
});
}
private void setConferenceBoxChecked(final boolean enabled) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mConferenceCheckBox.setChecked(enabled);
}
});
}
private StreamStatusCallback mStreamStatusCallback = new StreamStatusCallback() {
@Override
public void notifyStreamStatusChanged(final StreamingProfile.StreamStatus streamStatus) {
runOnUiThread(new Runnable() {
@Override
public void run() {
String stat = "bitrate: " + streamStatus.totalAVBitrate / 1024 + " kbps"
+ "\naudio: " + streamStatus.audioFps + " fps"
+ "\nvideo: " + streamStatus.videoFps + " fps";
mStatTextView.setText(stat);
}
});
}
};
private void dismissProgressDialog() {
runOnUiThread(new Runnable() {
@Override
public void run() {
mProgressDialog.dismiss();
}
});
}
private void showToast(final String text, final int duration) {
if (mIsActivityPaused) {
return;
}
runOnUiThread(new Runnable() {
@Override
public void run() {
if (mToast != null) {
mToast.cancel();
}
mToast = Toast.makeText(PKAnchorActivity.this, text, duration);
mToast.show();
}
});
}
private CameraStreamingSetting.CAMERA_FACING_ID chooseCameraFacingId() {
if (CameraStreamingSetting.hasCameraFacing(CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_3RD)) {
return CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_3RD;
} else if (CameraStreamingSetting.hasCameraFacing(CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT)) {
return CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_FRONT;
} else {
return CameraStreamingSetting.CAMERA_FACING_ID.CAMERA_FACING_BACK;
}
}
}