AndroidStudio :3.5.2
org.webrtc:google-webrtc:1.0.28513
WebRTC,名称源自网页即时通信(英语:Web Real-Time Communication)的缩写,是一个支持网页浏览器进行实时语音对话或视频对话的API。它于2011年6月1日开源并在Google、Mozilla、Opera支持下被纳入万维网联盟的W3C推荐标准
点对点音频通话,视频通话,数据共享
/**
* 创建WebRTC连接工厂
*
* @return
*/
private PeerConnectionFactory createPeerConnectionFactory() {
VideoEncoderFactory encoderFactory;
VideoDecoderFactory decoderFactory;
// 其他参数设置成默认的
PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions());
// 使用默认的视频编解码器, true -是否支持Vp8编码 true -是否支持H264编码
encoderFactory = new DefaultVideoEncoderFactory(rootGLContext.getEglBaseContext(), true, true);
decoderFactory = new DefaultVideoDecoderFactory(rootGLContext.getEglBaseContext());
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
return PeerConnectionFactory.builder().setOptions(options)
.setAudioDeviceModule(JavaAudioDeviceModule.builder(context).createAudioDeviceModule()) // 设置音频设备
.setVideoDecoderFactory(decoderFactory) // 设置视频解码工厂
.setVideoEncoderFactory(encoderFactory) // 设置视频编码工厂
.createPeerConnectionFactory();
}
localStream = factory.createLocalMediaStream("ARDAMS");
/**
* 音频源的参数信息
*
* @return
*/
// googEchoCancellation 回音消除
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
// googNoiseSuppression 噪声抑制
private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
// googAutoGainControl 自动增益控制
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
// googHighpassFilter 高通滤波器
private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
/**
* 设置音频源参数
*
* @return
*/
private MediaConstraints createAudioConstraints() {
MediaConstraints audioConstraints = new MediaConstraints();
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT
, "true"));
audioConstraints.mandatory.add(
new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "true"));
audioConstraints.mandatory.add(
new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
audioConstraints.mandatory.add(
new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "true"));
return audioConstraints;
}
// 音频源
AudioSource audioSource = factory.createAudioSource(createAudioConstraints());
// 音频轨
AudioTrack audioTrack = factory.createAudioTrack("ARDAMSa0", audioSource);
// 添加音频轨
localStream.addTrack(audioTrack);
/**
* 创建视频捕捉器
*
* @return
*/
private VideoCapturer createVideoCapturer() {
VideoCapturer videoCapturer;
if (Camera2Enumerator.isSupported(context)) {
// 支持Camera2
Camera2Enumerator camera2Enumerator = new Camera2Enumerator(context);
videoCapturer = createVideoCapturer(camera2Enumerator);
} else {
// 不支持Camera2
Camera1Enumerator camera1Enumerator = new Camera1Enumerator(true);
videoCapturer = createVideoCapturer(camera1Enumerator);
}
return videoCapturer;
}
/**
* 使用前置或者后置摄像头
* 首选前置摄像头,如果没有前置摄像头,才用后置摄像头
*
* @param enumerator
* @return
*/
private VideoCapturer createVideoCapturer(CameraEnumerator enumerator) {
String[] deviceNames = enumerator.getDeviceNames();
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
for (String deviceName : deviceNames) {
if (enumerator.isBackFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
VideoCapturer videoCapturer = createVideoCapturer();
// 视频源
VideoSource videoSource = factory.createVideoSource(videoCapturer.isScreencast());
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"CaptureThread", rootGLContext.getEglBaseContext()
);
// 初始化视频捕捉, 需要将GL上下文传入, 直接渲染,不需要自己编解码了
videoCapturer.initialize(surfaceTextureHelper, context, videoSource.getCapturerObserver());
// 开启预览, 宽,高,帧数
videoCapturer.startCapture(300, 240, 10);
// 视频轨
VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);
// 添加视频轨
localStream.addTrack(videoTrack);
/**
* 本地流创建完成回调,显示画面
*
* @param localStream
* @param userId
*/
public void onSetLocalStream(final MediaStream localStream, final String userId) {
runOnUiThread(new Runnable() {
@Override
public void run() {
addView(localStream, userId);
}
});
}
/**
* 向FrameLayout中添加View
*
* @param stream
* @param userId
*/
private void addView(MediaStream stream, String userId) {
// 不用SurfaceView, 使用WebRTC提供的,他们直接渲染,开发者不需要关心
SurfaceViewRenderer renderer = new SurfaceViewRenderer(this);
// 初始化renderer
renderer.init(rootEglBase.getEglBaseContext(), null);
// SCALE_ASPECT_FIT 设置缩放模式 按照View的宽度 和高度设置 , SCALE_ASPECT_FILL按照摄像头预览的画面大小设置
renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
//翻转
renderer.setMirror(true);
// 将摄像头的数据 SurfaceViewRenderer
if (stream.videoTracks.size() > 0) {
stream.videoTracks.get(0).addSink(renderer);
}
// 会议室 1 + N个人
videoViews.put(userId, renderer);
persons.add(userId);
// 将SurfaceViewRenderer添加到FrameLayout width=0 height=0
mFrameLayout.addView(renderer);
int size = videoViews.size();
for (int i = 0; i < size; i++) {
String peerId = persons.get(i);
SurfaceViewRenderer renderer1 = videoViews.get(peerId);
if (renderer1 != null) {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
layoutParams.height = Utils.getWidth(this, size);
layoutParams.width = Utils.getWidth(this, size);
layoutParams.leftMargin = Utils.getX(this, size, i);
layoutParams.topMargin = Utils.getY(this, size, i);
renderer1.setLayoutParams(layoutParams);
}
}
}
peerConnection = factory.createPeerConnection(servers, callback);
/**
* 向房间其它人发送请求时的参数
*
*
* 设置传输音视频
* 音频()
* 视频(false)
*
* @return
*/
private MediaConstraints offerOrAnswerConstraint() {
// 媒体约束
MediaConstraints mediaConstraints = new MediaConstraints();
ArrayList keyValuePairs = new ArrayList<>();
// 音频 必须传输
keyValuePairs.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
// videoEnable
keyValuePairs.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", String.valueOf(isVideoOpen)));
mediaConstraints.mandatory.addAll(keyValuePairs);
return mediaConstraints;
}
peerConnection.createOffer(mPeer, offerOrAnswerConstraint());
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.i(TAG, "onCreateSuccess: ");
Log.d(TAG, "onCreateSuccess" + Peer.this.toString());
// 设置本地的SDP 如果设置成功则回调onSetSuccess
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
Log.d(TAG, "onSetSuccess" + Peer.this.toString());t
javaWebSocket.sendOffer(socketId, peerConnection.getLocalDescription());
}
/**
* 将sdp发送给远端
*
* @param socketId
* @param sdp
*/
public void sendOffer(String socketId, SessionDescription sdp) {
HashMap childMap1 = new HashMap();
childMap1.put("type", "offer");
childMap1.put("sdp", sdp.description);
HashMap childMap2 = new HashMap();
childMap2.put("socketId", socketId);
childMap2.put("sdp", childMap1);
HashMap map = new HashMap();
map.put("eventName", "__offer");
map.put("data", childMap2);
JSONObject object = new JSONObject(map);
String jsonString = object.toString();
Log.d(TAG, "send-->" + jsonString);
mWebSocketClient.send(jsonString);
}
peerConnection.setRemoteDescription(mPeer, sdp);
@Override
public void onSetSuccess() {
Log.d(TAG, "onSetSuccess" + Peer.this.toString());
peerConnection.createAnswer(Peer.this, offerOrAnswerConstraint());
}
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.i(TAG, "onCreateSuccess: ");
Log.d(TAG, "onCreateSuccess" + Peer.this.toString());
// 设置本地的SDP 如果设置成功则回调onSetSuccess
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
Log.d(TAG, "onSetSuccess" + Peer.this.toString());
javaWebSocket.sendAnswer(socketId, peerConnection.getLocalDescription().description);
}
peerConnection.setRemoteDescription(mPeer, sessionDescription);
@Override
public void onSetSuccess() {
Log.d(TAG, "onSetSuccess" + Peer.this.toString());
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
// socket-----》 传递
Log.d(TAG, "onIceCandidate" + Peer.this.toString());
javaWebSocket.sendIceCandidate(socketId, iceCandidate);
}
/**
* 将本机到服务的ice发送给服务器
*
* @param userId
* @param iceCandidate
*/
public void sendIceCandidate(String userId, IceCandidate iceCandidate) {
HashMap childMap = new HashMap();
childMap.put("id", iceCandidate.sdpMid);
childMap.put("label", iceCandidate.sdpMLineIndex);
childMap.put("candidate", iceCandidate.sdp);
childMap.put("socketId", userId);
HashMap map = new HashMap();
map.put("eventName", "__ice_candidate");
map.put("data", childMap);
JSONObject object = new JSONObject(map);
String jsonString = object.toString();
Log.d(TAG, "send-->" + jsonString);
mWebSocketClient.send(jsonString);
}
/**
* 收到对方的ice
*
* @param socketId
* @param iceCandidate
*/
public void onRemoteIceCandidate(String socketId, IceCandidate iceCandidate) {
// 通过socketId 取出连接对象
peerConnection.addIceCandidate(iceCandidate);
}
//p2p建立成功之后 mediaStream(视频流 音段流) 子线程
@Override
public void onAddStream(MediaStream mediaStream) {
context.onAddRemoteStream(mediaStream, socketId);
Log.d(TAG, "onAddStream" + Peer.this.toString());
}
/**
* 增加其它用户的流
*
* @param stream
* @param userId
*/
public void onAddRemoteStream(MediaStream stream, String userId) {
runOnUiThread(new Runnable() {
@Override
public void run() {
addView(stream, userId);
}
});
}
/**
* 向FrameLayout中添加View
*
* @param stream
* @param userId
*/
private void addView(MediaStream stream, String userId) {
// 不用SurfaceView, 使用WebRTC提供的,他们直接渲染,开发者不需要关心
SurfaceViewRenderer renderer = new SurfaceViewRenderer(this);
// 初始化renderer
renderer.init(rootEglBase.getEglBaseContext(), null);
// SCALE_ASPECT_FIT 设置缩放模式 按照View的宽度 和高度设置 , SCALE_ASPECT_FILL按照摄像头预览的画面大小设置
renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
//翻转
renderer.setMirror(true);
// 将摄像头的数据 SurfaceViewRenderer
if (stream.videoTracks.size() > 0) {
stream.videoTracks.get(0).addSink(renderer);
}
// 会议室 1 + N个人
videoViews.put(userId, renderer);
persons.add(userId);
// 将SurfaceViewRenderer添加到FrameLayout width=0 height=0
mFrameLayout.addView(renderer);
int size = videoViews.size();
for (int i = 0; i < size; i++) {
String peerId = persons.get(i);
SurfaceViewRenderer renderer1 = videoViews.get(peerId);
if (renderer1 != null) {
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
layoutParams.height = Utils.getWidth(this, size);
layoutParams.width = Utils.getWidth(this, size);
layoutParams.leftMargin = Utils.getX(this, size, i);
layoutParams.topMargin = Utils.getY(this, size, i);
renderer1.setLayoutParams(layoutParams);
}
}
}
感谢博主们写这么好的博客,给了我很多的帮助。
https://rtcdeveloper.com/t/topic/13777
https://www.jianshu.com/p/2a760b56e3a9