webrt 代码方面做了一下小的整理
1、依赖
implementation 'org.webrtc:google-webrtc:1.0.+'
implementation('io.socket:socket.io-client:0.8.3') {
// excluding org.json which is provided by Android
exclude group: 'org.json', module: 'json'
}
2、权限
<uses-permission android:name="android.permission.CAMERA">uses-permission>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
3、webrtc代码部分
(1)初始化PeerConnectionFactory
PeerConnectionFactory.InitializationOptions initializationOptions =
PeerConnectionFactory.InitializationOptions.builder(this)
.setEnableVideoHwAcceleration(true)
.createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);
使用硬编解码
//Create a new PeerConnectionFactory instance - using Hardware encoder and decoder.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(
rootEglBase.getEglBaseContext(), /* enableIntelVp8Encoder */true, /* enableH264HighProfile */true);
DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
peerConnectionFactory = new PeerConnectionFactory(options, defaultVideoEncoderFactory, defaultVideoDecoderFactory);
(2)video Capturer——使用camera
webrtc 提供了开始捕获视频的步骤,因此你不需要知道camera的使用,通过webrtc调用就可以了
VideoCapturer videoCapturerAndroid;
videoCapturerAndroid = createCameraCapturer(new Camera1Enumerator(false));
createCameraCapturer()方法——获取前置摄像头
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();
// First, try to find front facing camera
Logging.d(TAG, "Looking for front facing cameras.");
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating front facing camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
// Front facing camera not found, try something else
Logging.d(TAG, "Looking for other cameras.");
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating other camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
(3)Local media stream
在webrtc中,我们有media stream 媒体流的概念
//Create MediaConstraints - Will be useful for specifying video and audio constraints.
audioConstraints = new MediaConstraints(); //audio一些媒体限制,下面会用到
videoConstraints = new MediaConstraints();//video一些媒体限制,暂时没用到,以前版本api会用到
//Create a VideoSource instance
if (videoCapturerAndroid != null) {
videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);
}
localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);//100:VIDEO_TRACK_ID
//create an AudioSource instance
audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);//101:AUDIO_TRACK_ID
if (videoCapturerAndroid != null) {
videoCapturerAndroid.startCapture(1024, 720, 30);
}
添加stream track(audio/video track)
private void addStreamToLocalPeer() {
//creating local mediastream
MediaStream stream = peerConnectionFactory.createLocalMediaStream("102");//102:LOCAL_STREAM_ID
stream.addTrack(localAudioTrack);
stream.addTrack(localVideoTrack);
localPeer.addStream(stream);
}
(4)使用SurfaceViewRenderer
有了上面几步,我们可以实现在本地预览,为了实现预览效果需要在移动端的布局中添加控件
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/activity_main"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1"
android:orientation="vertical"
tools:context="xyz.vivekc.webrtccodelab.MainActivity">
<org.webrtc.SurfaceViewRenderer
android:id="@+id/remote_gl_surface_view"
android:layout_width="match_parent"
android:layout_weight="1"
android:visibility="gone"
android:layout_height="match_parent" />
<org.webrtc.SurfaceViewRenderer
android:id="@+id/local_gl_surface_view"
android:layout_width="match_parent"
android:layout_gravity="bottom|end"
android:layout_height="match_parent" />
FrameLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"
android:orientation="horizontal">
<Button
android:layout_width="wrap_content"
android:layout_margin="5dp"
android:layout_height="wrap_content"
android:id="@+id/end_call"
android:text="Hangup" />
LinearLayout>
FrameLayout>
代码
SurfaceViewRenderer localVideoView;
SurfaceViewRenderer remoteVideoView;
private void initVideos() {
rootEglBase = EglBase.create();
localVideoView.init(rootEglBase.getEglBaseContext(), null);
remoteVideoView.init(rootEglBase.getEglBaseContext(), null);
localVideoView.setZOrderMediaOverlay(true);
remoteVideoView.setZOrderMediaOverlay(true);
}
localVideoView.setVisibility(View.VISIBLE);
//create a videoRenderer based on SurfaceViewRenderer instance
localRenderer = new VideoRenderer(localVideoView);
// And finally, with our VideoRenderer ready, we
// can add our renderer to the VideoTrack.
localVideoTrack.addRenderer(localRenderer);
localVideoView.setMirror(true);
remoteVideoView.setMirror(true);
(5)PeerConnection
为了创建peerconnection 我们需要提供stun 和turn server
List<PeerConnection.IceServer> peerIceServers = new ArrayList<>();
iceserver
private void initIceServer(){
//add stun
PeerConnection.IceServer stunpeerIceServer = PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer();
peerIceServers.add(stunpeerIceServer);
//add turn
PeerConnection.IceServer turnpeerIceServer = PeerConnection.IceServer.builder("turn:turn.tokbox.com:443")
.setUsername("user")
.setPassword("pass")
.createIceServer();
peerIceServers.add(turnpeerIceServer);
}
4、SDP 会话
(1)跟js api中的基本一样
(2)这里需要实现PeerConnectionObserver 和SDPObserver
(3)SDP over socket.IO
(4)这是媒体流的最后一步
(5)记得添加你的renderer
class CustomPeerConnectionObserver implements PeerConnection.Observer {
private String logTag;
CustomPeerConnectionObserver(String logTag) {
this.logTag = this.getClass().getCanonicalName();
this.logTag = this.logTag+" "+logTag;
}
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d(logTag, "onSignalingChange() called with: signalingState = [" + signalingState + "]");
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
Log.d(logTag, "onIceConnectionChange() called with: iceConnectionState = [" + iceConnectionState + "]");
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
Log.d(logTag, "onIceConnectionReceivingChange() called with: b = [" + b + "]");
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
Log.d(logTag, "onIceGatheringChange() called with: iceGatheringState = [" + iceGatheringState + "]");
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
Log.d(logTag, "onIceCandidate() called with: iceCandidate = [" + iceCandidate + "]");
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
Log.d(logTag, "onIceCandidatesRemoved() called with: iceCandidates = [" + iceCandidates + "]");
}
@Override
public void onAddStream(MediaStream mediaStream) {
Log.d(logTag, "onAddStream() called with: mediaStream = [" + mediaStream + "]");
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
Log.d(logTag, "onRemoveStream() called with: mediaStream = [" + mediaStream + "]");
}
@Override
public void onDataChannel(DataChannel dataChannel) {
Log.d(logTag, "onDataChannel() called with: dataChannel = [" + dataChannel + "]");
}
@Override
public void onRenegotiationNeeded() {
Log.d(logTag, "onRenegotiationNeeded() called");
}
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
Log.d(logTag, "onAddTrack() called with: rtpReceiver = [" + rtpReceiver + "], mediaStreams = [" + mediaStreams + "]");
}
}
class CustomSdpObserver implements SdpObserver {
private String tag;
CustomSdpObserver(String logTag) {
tag = this.getClass().getCanonicalName();
this.tag = this.tag + " " + logTag;
}
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.d(tag, "onCreateSuccess() called with: sessionDescription = [" + sessionDescription + "]");
}
@Override
public void onSetSuccess() {
Log.d(tag, "onSetSuccess() called");
}
@Override
public void onCreateFailure(String s) {
Log.d(tag, "onCreateFailure() called with: s = [" + s + "]");
}
@Override
public void onSetFailure(String s) {
Log.d(tag, "onSetFailure() called with: s = [" + s + "]");
}
}