基于SRS服务器实现Android-Web端视频通话(3):Android端向SRS服务器推送WebRTC流

基于SRS服务器实现Android-Web端视频通话(1):SRS服务器启用HTTPS
基于SRS服务器实现Android-Web端视频通话(2):Android端从SRS服务器拉取WebRTC流
基于SRS服务器实现Android-Web端视频通话(3):Android端向SRS服务器推送WebRTC流

实现效果

publish.gif

引库

implementation 'org.webrtc:google-webrtc:1.0.32006'

其他版本,详见

推流流程

createPeerConnectionFactory -> createPeerConnection(addTransceiver) -> createOffer -> setLocalDescription(OFFER) -> get remote sdp(network requset) -> setRemoteDescription(ANSWER)

代码实现

初始化

//加载并初始化 WebRTC,在创建 PeerConnectionFactory 之前必须至少调用一次
PeerConnectionFactory.initialize(
    PeerConnectionFactory.InitializationOptions
        .builder(applicationContext).createInitializationOptions()
)

private val eglBaseContext = EglBase.create().eglBaseContext

createPeerConnectionFactory

private val peerConnectionFactory: PeerConnectionFactory = createPeerConnectionFactory()

private fun createPeerConnectionFactory(): PeerConnectionFactory {
    //先做默认配置,后面可能会遇到坑
    val options = PeerConnectionFactory.Options()
    val encoderFactory = DefaultVideoEncoderFactory(eglBaseContext, true, true)
    val decoderFactory = DefaultVideoDecoderFactory(eglBaseContext)
    return PeerConnectionFactory.builder()
        .setOptions(options)
        .setVideoEncoderFactory(encoderFactory)
        .setVideoDecoderFactory(decoderFactory)
        .createPeerConnectionFactory()
}

createPeerConnection(addTransceiver)

private fun initPeerConnection() {
    val createAudioSource = peerConnectionFactory.createAudioSource(createAudioConstraints())
    val audioTrack =
        peerConnectionFactory.createAudioTrack("local_audio_track", createAudioSource)

    cameraVideoCapturer = createVideoCapture(this)
    cameraVideoCapturer?.let { capture ->
        val videoSource = peerConnectionFactory.createVideoSource(capture.isScreencast)
        videoTrack =
            peerConnectionFactory.createVideoTrack("local_video_track", videoSource).apply {
                //显示到本地画面上
                addSink(mBinding.svr)
            }
        surfaceTextureHelper =
            SurfaceTextureHelper.create("surface_texture_thread", eglBaseContext)
        capture.initialize(surfaceTextureHelper, this, videoSource.capturerObserver)
        //开始捕帧,宽、高、帧率。
        capture.startCapture(640, 480, 20)
    }

    val rtcConfig = PeerConnection.RTCConfiguration(emptyList())
    /*
    

For users who wish to send multiple audio/video streams and need to stay interoperable with legacy WebRTC implementations, specify PLAN_B.

For users who wish to send multiple audio/video streams and/or wish to use the new RtpTransceiver API, specify UNIFIED_PLAN. */ //使用PeerConnection.SdpSemantics.UNIFIED_PLAN rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN peerConnection = peerConnectionFactory.createPeerConnection( rtcConfig, PeerConnectionObserver() )?.apply { //不可使用addStream()的方式,否则会报错 //addTransceiver()调用顺序会影响到生成sdp信息中video、audio信息顺序,对应关系,下面会提到; videoTrack?.let { //添加视频轨,设置仅发送即可 addTransceiver( it, RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY) ) } //添加音频轨,设置仅发送即可 addTransceiver( audioTrack, RtpTransceiver.RtpTransceiverInit(RtpTransceiver.RtpTransceiverDirection.SEND_ONLY) ) } } private fun createAudioConstraints(): MediaConstraints { val audioConstraints = MediaConstraints() //回声消除 audioConstraints.mandatory.add( MediaConstraints.KeyValuePair( "googEchoCancellation", "true" ) ) //自动增益 audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googAutoGainControl", "true")) //高音过滤 audioConstraints.mandatory.add(MediaConstraints.KeyValuePair("googHighpassFilter", "true")) //噪音处理 audioConstraints.mandatory.add( MediaConstraints.KeyValuePair( "googNoiseSuppression", "true" ) ) return audioConstraints } private fun createVideoCapture(context: Context): CameraVideoCapturer? { val enumerator: CameraEnumerator = if (Camera2Enumerator.isSupported(context)) { Camera2Enumerator(context) } else { Camera1Enumerator() } for (name in enumerator.deviceNames) { if (enumerator.isFrontFacing(name)) { return enumerator.createCapturer(name, null) } } for (name in enumerator.deviceNames) { if (enumerator.isBackFacing(name)) { return enumerator.createCapturer(name, null) } } return null }

createOffer && setLocalDescription

peerConnection.createOffer(object : SdpAdapter("createOffer") {
    override fun onCreateSuccess(description: SessionDescription?) {
        description?.let {
            if (it.type == SessionDescription.Type.OFFER) {
                connection.setLocalDescription(SdpAdapter("setLocalDescription"), it)
                //这个offerSdp将用于向SRS服务进行网络请求
                val offerSdp = it.description
                getRemoteSdp(offerSdp)
            }
        }
    }
}, MediaConstraints())

get remote sdp(netword requset)

基本配置,根据自己实际情况进行调整

object Constant {
    /**
     * SRS服务器IP
     */
    const val SRS_SERVER_IP = "192.168.2.91"

    /**
     * SRS服务http请求端口,默认1985
     */
    const val SRS_SERVER_HTTP_PORT = "1985"

    /**
     * SRS服务https请求端口,默认1990
     */
    const val SRS_SERVER_HTTPS_PORT = "1990"

    const val SRS_SERVER_HTTP = "$SRS_SERVER_IP:$SRS_SERVER_HTTP_PORT"

    const val SRS_SERVER_HTTPS = "$SRS_SERVER_IP:$SRS_SERVER_HTTPS_PORT"
}

Request Body (application/json)

data class SrsRequestBean(
    /**
     * [PeerConnection.createOffer]返回的sdp
     */
    @Json(name = "sdp")
    val sdp: String?,
    /**
     * 拉取的WebRTC流地址
     */
    @Json(name = "streamurl")
    val streamUrl: String?
)

Response Body (application/json)

data class SrsResponseBean(
    /**
     * 0:成功
     */
    @Json(name = "code")
    val code: Int,
    /**
     * 用于设置[PeerConnection.setRemoteDescription]
     */
    @Json(name = "sdp") val sdp: String?,
    @Json(name = "server")
    val server: String?,
    @Json(name = "sessionid")
    val sessionId: String?
)

网络请求地址
http请求:http://ip:port/rtc/v1/publish/
https请求:https://ip:port/rtc/v1/publish/
Method:POST

在Android P(28)系统的设备上,禁止应用使用的是非加密的明文流量的HTTP 网络请求。

retrofit事例

interface ApiService {

    @POST("/rtc/v1/publish/")
    suspend fun publish(@Body body: SrsRequestBean): SrsResponseBean
}

getRemoteSdp

private fun getRemoteSdp(offerSdp: String){
    //webrtc流地址
    val webrtcUrl="webrtc://${Constant.SRS_SERVER_IP}/live/camera"
    val srsBean = SrsRequestBean(offerSdp, webrtcUrl)
    lifecycleScope.launch {
        val result = try {
            withContext(Dispatchers.IO) {
                retrofitClient.apiService.publish(srsBean)
            }
        } catch (e: Exception) {
            println("网络请求出错:${e.printStackTrace()}")
            toastError("网络请求出错:${e.printStackTrace()}")
            null
        }

        result?.let { bean ->
            if (bean.code == 0) {
                println("网络请求成功,code:${bean.code}")
                setRemoteDescription(bean.sdp)
            } else {
                println("网络请求失败,code:${bean.code}")
            }
        }
    }
}

注意:这一步接口可能会返回code:400的情况,如果没有可跳过。

publish api failure.png

原因可能是:

1、推流地址被占用,解决方案换个推流地址即可;
2、服务器中打印create session : create session : add publisher : publish negotiate : no found valid H.264 payload type,原因:就是接口请求里offer sdpm=videoH.264相关信息,即WebRTC在createOffer时,返回的sdp没有H.264相关信息;SRS中使用WebRTC,视频编码仅支持H.264。

server warning.png

针对第二种情况,解决方案可参考我的另外一篇博客。
这样createPeerConnectionFactory就要改一下:
引库WebRTCExtension:

private val peerConnectionFactory: PeerConnectionFactory = createPeerConnectionFactory()

private fun createPeerConnectionFactory(): PeerConnectionFactory {
    val options = PeerConnectionFactory.Options()
    //调整VideoEncoderFactory
    val encoderFactory = createCustomVideoEncoderFactory(eglBaseContext, true, true,
            object : VideoEncoderSupportedCallback {
                override fun isSupportedH264(info: MediaCodecInfo): Boolean {
                    //这里自行添加支持H.264编码的MediaCodecInfo
                    return true
                }

            })
    val decoderFactory = DefaultVideoDecoderFactory(eglBaseContext)
    return PeerConnectionFactory.builder()
        .setOptions(options)
        .setVideoEncoderFactory(encoderFactory)
        .setVideoDecoderFactory(decoderFactory)
        .createPeerConnectionFactory()
}

setRemoteDescription

private fun setRemoteDescription(answerSdp: String){
    val remoteSdp = SessionDescription(SessionDescription.Type.ANSWER, /*关键点*/answerSdp)
    //注意这一步,可能会报错:Failed to set remote answer sdp: The order of m-lines in answer doesn't match order in offer. Rejecting answer.
    peerConnection.setRemoteDescription(SdpAdapter("setRemoteDescription"), remoteSdp)
}

如果你遇到这个错误:
Failed to set remote answer sdp: The order of m-lines in answer doesn't match order in offer. Rejecting answer.

可以看下我的另外一篇博客。
具体原因就是offer中sdp的video、audio和answer中sdp的video、audio顺序不一致,详见#3179。
解决方案目前大致有三种:
1、升级SRS服务器版本,截止2022-09-16,升级到v4.0.265及以上,即可解决,云服务器这个版本做了兼容处理;
2、如果无法升级SRS,可以调整上面提到的调整addTransceiver()调用顺序,确保顺序与SRS接口返回的sdp中video、audio顺序一致;

ps: 上面两种方案解决方式还要非常感谢SRS-杨成立指点,十分感谢!

3、使用博客中第一种原因的解决方式,我们需要手动调换下位置(当然这个方法也可以作为最后的保护手段):

/**
 * 转换AnswerSdp
 * @param offerSdp offerSdp:创建offer时生成的sdp
 * @param answerSdp answerSdp:网络请求srs服务器返回的sdp
 * @return 转换后的AnswerSdp
 */
private fun convertAnswerSdp(offerSdp: String, answerSdp: String?): String {
    if (answerSdp.isNullOrBlank()){
        return ""
    }
    val indexOfOfferVideo = offerSdp.indexOf("m=video")
    val indexOfOfferAudio = offerSdp.indexOf("m=audio")
    if (indexOfOfferVideo == -1 || indexOfOfferAudio == -1) {
        return answerSdp
    }
    val indexOfAnswerVideo = answerSdp.indexOf("m=video")
    val indexOfAnswerAudio = answerSdp.indexOf("m=audio")
    if (indexOfAnswerVideo == -1 || indexOfAnswerAudio == -1) {
        return answerSdp
    }

    val isFirstOfferVideo = indexOfOfferVideo < indexOfOfferAudio
    val isFirstAnswerVideo = indexOfAnswerVideo < indexOfAnswerAudio
    return if (isFirstOfferVideo == isFirstAnswerVideo) {
        //顺序一致
        answerSdp
    } else {
        //需要调换顺序
        buildString {
            append(answerSdp.substring(0, indexOfAnswerVideo.coerceAtMost(indexOfAnswerAudio)))
            append(
                answerSdp.substring(
                    indexOfAnswerVideo.coerceAtLeast(indexOfOfferVideo),
                    answerSdp.length
                )
            )
            append(
                answerSdp.substring(
                    indexOfAnswerVideo.coerceAtMost(indexOfAnswerAudio),
                    indexOfAnswerVideo.coerceAtLeast(indexOfOfferVideo)
                )
            )
        }
    }
}

修改方法:

private fun setRemoteDescription(offerSdp: String, answerSdp: String){
    val remoteSdp = SessionDescription(SessionDescription.Type.ANSWER, /*关键点*/convertAnswerSdp(offerSdp, answerSdp))
    peerConnection.setRemoteDescription(SdpAdapter("setRemoteDescription"), remoteSdp)
}

关闭

释放资源,避免内存泄漏

mBinding.svr.release()
cameraVideoCapturer?.dispose()
surfaceTextureHelper?.dispose()
videoTrack?.dispose()
peerConnection?.dispose()
peerConnectionFactory.dispose()

至此,推流流程结束。如有错误欢迎指正。

Github传送门

你可能感兴趣的:(基于SRS服务器实现Android-Web端视频通话(3):Android端向SRS服务器推送WebRTC流)