webrtc在移动端:Android, IOS上的视频编码器创建过程

目前webrtc在移动端,的编码器实现,会检测硬件编码器部分,如果支持,可以通过硬件编码器进行H264编码,当然包括VP8和VP9的硬件编码检测;

如果不支持,则默认通过VP8软件视频编码器;




1:Android

在Android端,默认编译不包括OPENH264和FFmpeg,需要通过参数设置将这两个库可以编译到Android端;


1:

peerconnectionfactory_jni.cc

JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
    JNIEnv* jni, jclass, jobject joptions) {
  // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
  // ThreadManager only WrapCurrentThread()s the thread where it is first
  // created.  Since the semantics around when auto-wrapping happens in
  // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
  // about ramifications of auto-wrapping there.
  rtc::ThreadManager::Instance()->WrapCurrentThread();
  webrtc::Trace::CreateTrace();


  std::unique_ptr network_thread =
      rtc::Thread::CreateWithSocketServer();
  network_thread->SetName("network_thread", nullptr);
  RTC_CHECK(network_thread->Start()) << "Failed to start thread";


  std::unique_ptr worker_thread = rtc::Thread::Create();
  worker_thread->SetName("worker_thread", nullptr);
  RTC_CHECK(worker_thread->Start()) << "Failed to start thread";


  std::unique_ptr signaling_thread = rtc::Thread::Create();
  signaling_thread->SetName("signaling_thread", NULL);
  RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";


  WebRtcVideoEncoderFactory* encoder_factory = nullptr;
  WebRtcVideoDecoderFactory* decoder_factory = nullptr;
  rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;


  PeerConnectionFactoryInterface::Options options;
  bool has_options = joptions != NULL;
  if (has_options) {
    options = ParseOptionsFromJava(jni, joptions);
  }


  if (video_hw_acceleration_enabled) {
    encoder_factory = new MediaCodecVideoEncoderFactory();
    decoder_factory = new MediaCodecVideoDecoderFactory();
  }






2:

//构造函数,直接获取判断Android手机是否支持硬件视频编码;(这里硬件编码包括,H264,VP8,VP9,不仅仅是H264;大部分手机只支持H264硬件编码)

MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
    : egl_context_(nullptr) {
  JNIEnv* jni = AttachCurrentThreadIfNeeded();
  ScopedLocalRefFrame local_ref_frame(jni);
  jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
  jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
  supported_codecs_.clear();


  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
      j_encoder_class,
      GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
  CHECK_EXCEPTION(jni);
  if (is_vp8_hw_supported) {
    ALOGD << "VP8 HW Encoder supported.";
    supported_codecs_.push_back(cricket::VideoCodec("VP8"));
  }


  bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
      j_encoder_class,
      GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
  CHECK_EXCEPTION(jni);
  if (is_vp9_hw_supported) {
    ALOGD << "VP9 HW Encoder supported.";
    supported_codecs_.push_back(cricket::VideoCodec("VP9"));
  }
  supported_codecs_with_h264_hp_ = supported_codecs_;


  // Check if high profile is supported by decoder. If yes, encoder can always
  // fall back to baseline profile as a subset as high profile.
  bool is_h264_high_profile_hw_supported = jni->CallStaticBooleanMethod(
      j_decoder_class,
      GetStaticMethodID(jni, j_decoder_class, "isH264HighProfileHwSupported",
                        "()Z"));
  CHECK_EXCEPTION(jni);
  if (is_h264_high_profile_hw_supported) {
    ALOGD << "H.264 High Profile HW Encoder supported.";
    // TODO(magjed): Enumerate actual level instead of using hardcoded level
    // 3.1. Level 3.1 is 1280x720@30fps which is enough for now.
    cricket::VideoCodec constrained_high(cricket::kH264CodecName);
    const webrtc::H264::ProfileLevelId constrained_high_profile(
        webrtc::H264::kProfileConstrainedHigh, webrtc::H264::kLevel3_1);
    constrained_high.SetParam(
        cricket::kH264FmtpProfileLevelId,
        *webrtc::H264::ProfileLevelIdToString(constrained_high_profile));
    constrained_high.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
    constrained_high.SetParam(cricket::kH264FmtpPacketizationMode, "1");
    supported_codecs_with_h264_hp_.push_back(constrained_high);
  }


  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
      j_encoder_class,
      GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
  CHECK_EXCEPTION(jni);
  if (is_h264_hw_supported) {                               //对了,就在这里判断;如果支持H264硬件编码,那么创建Android端的MediaCodec,同JNI实现MediaCodecVideoEncoder
    ALOGD << "H.264 HW Encoder supported.";
    // TODO(magjed): Push Constrained High profile as well when negotiation is
    // ready, http://crbug/webrtc/6337. We can negotiate Constrained High
    // profile as long as we have decode support for it and still send Baseline
    // since Baseline is a subset of the High profile.
    cricket::VideoCodec constrained_baseline(cricket::kH264CodecName);
    const webrtc::H264::ProfileLevelId constrained_baseline_profile(
        webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel3_1);
    constrained_baseline.SetParam(
        cricket::kH264FmtpProfileLevelId,
        *webrtc::H264::ProfileLevelIdToString(constrained_baseline_profile));
    constrained_baseline.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
    constrained_baseline.SetParam(cricket::kH264FmtpPacketizationMode, "1");
    supported_codecs_.push_back(constrained_baseline);
    supported_codecs_with_h264_hp_.push_back(constrained_baseline);
  }
}












3:
webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
    const cricket::VideoCodec& codec) {
  if (supported_codecs().empty()) {
    ALOGW << "No HW video encoder for codec " << codec.name;
    return nullptr;
  }
  if (FindMatchingCodec(supported_codecs(), codec)) {
    ALOGD << "Create HW video encoder for " << codec.name;
    JNIEnv* jni = AttachCurrentThreadIfNeeded();
    ScopedLocalRefFrame local_ref_frame(jni);
    return new MediaCodecVideoEncoder(jni, codec, egl_context_);        //这里在Android端,创建外部视频编码器,也就是实现的硬件编码部分;这部分当然也可以替换了;也可以自己随意实现任何编码其部分;
  }
  ALOGW << "Can not find HW video encoder for type " << codec.name;
  return nullptr;
}









4:

WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
    webrtc::Call* call,
    const StreamParams& sp,
    webrtc::VideoSendStream::Config config,
    const VideoOptions& options,
    WebRtcVideoEncoderFactory* external_encoder_factory,
    bool enable_cpu_overuse_detection,
    int max_bitrate_bps,
    const rtc::Optional& codec_settings,
    const rtc::Optional>& rtp_extensions,
    // TODO(deadbeef): Don't duplicate information between send_params,
    // rtp_extensions, options, etc.
    const VideoSendParameters& send_params)
    : worker_thread_(rtc::Thread::Current()),
      ssrcs_(sp.ssrcs),
      ssrc_groups_(sp.ssrc_groups),
      call_(call),
      enable_cpu_overuse_detection_(enable_cpu_overuse_detection),
      source_(nullptr),
      external_encoder_factory_(external_encoder_factory),    //这里保存外部的external_encoder_factory,这里通常就是Android端的MediaCodec,和IOS端的VideoToolBox;
      internal_encoder_factory_(new InternalEncoderFactory()), //这里“创建InternalEncoderFactory,InternalEncoderFactory就是软件视频编码部分,通常webrtc编译的时候默认只有VP8,VP9,可将OPENH264和FFmpeg通过设置编译参数,编译到webrtc中;

      stream_(nullptr),
      encoder_sink_(nullptr),
      parameters_(std::move(config), options, max_bitrate_bps, codec_settings),
      rtp_parameters_(CreateRtpParametersWithOneEncoding()),
      allocated_encoder_(nullptr, cricket::VideoCodec(), false),
      sending_(false) {








5:

//是的,这个函数很重要;

//如果external_encoder_factory_ 存在,那么创建完成后就直接返回了;

//只有,当没有external_encoder_factory_ 的时候,才创建internal_encoder_factory_中的视频编码器;前面已经讲了internal_encoder_factory_;

WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoder(
    const VideoCodec& codec,
    bool force_encoder_allocation) {
  RTC_DCHECK_RUN_ON(&thread_checker_);
  // Do not re-create encoders of the same type.
  if (!force_encoder_allocation && codec == allocated_encoder_.codec &&
      allocated_encoder_.encoder != nullptr) {
    return allocated_encoder_;
  }


  // Try creating external encoder.
  if (external_encoder_factory_ != nullptr &&
      FindMatchingCodec(external_encoder_factory_->supported_codecs(), codec)) {
    webrtc::VideoEncoder* encoder =
        external_encoder_factory_->CreateVideoEncoder(codec);
    if (encoder != nullptr)
      return AllocatedEncoder(encoder, codec, true /* is_external */);
  }



  // Try creating internal encoder.
  if (FindMatchingCodec(internal_encoder_factory_->supported_codecs(), codec)) {
    if (parameters_.encoder_config.content_type ==
            webrtc::VideoEncoderConfig::ContentType::kScreen &&
        parameters_.conference_mode && UseSimulcastScreenshare()) {
      // TODO(sprang): Remove this adapter once libvpx supports simulcast with
      // same-resolution substreams.
      WebRtcSimulcastEncoderFactory adapter_factory(
          internal_encoder_factory_.get());
      return AllocatedEncoder(adapter_factory.CreateVideoEncoder(codec), codec,
                              false /* is_external */);

    }
    return AllocatedEncoder(
        internal_encoder_factory_->CreateVideoEncoder(codec), codec,
        false /* is_external */);

  }



  // This shouldn't happen, we should not be trying to create something we don't
  // support.
  RTC_NOTREACHED();
  return AllocatedEncoder(NULL, cricket::VideoCodec(), false);
}




对上述CreateVideoEncoder的描述:

在Android中的外部VideoEncoderFactory创建:

webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
    const cricket::VideoCodec& codec) {
  if (supported_codecs().empty()) {
    ALOGW << "No HW video encoder for codec " << codec.name;
    return nullptr;
  }
  if (FindMatchingCodec(supported_codecs(), codec)) {
    ALOGD << "Create HW video encoder for " << codec.name;
    JNIEnv* jni = AttachCurrentThreadIfNeeded();
    ScopedLocalRefFrame local_ref_frame(jni);
    return new MediaCodecVideoEncoder(jni, codec, egl_context_);
  }
  ALOGW << "Can not find HW video encoder for type " << codec.name;
  return nullptr;
}




webrtc内部的VideoEncoderFactory创建;

// WebRtcVideoEncoderFactory implementation.
webrtc::VideoEncoder* InternalEncoderFactory::CreateVideoEncoder(
    const cricket::VideoCodec& codec) {
  const webrtc::VideoCodecType codec_type =
      webrtc::PayloadNameToCodecType(codec.name)
          .value_or(webrtc::kVideoCodecUnknown);
  switch (codec_type) {
    case webrtc::kVideoCodecH264:
      return webrtc::H264Encoder::Create(codec);
    case webrtc::kVideoCodecVP8:
      return webrtc::VP8Encoder::Create();
    case webrtc::kVideoCodecVP9:
      return webrtc::VP9Encoder::Create();
    default:
      return nullptr;
  }
}


默认情况下,是没有OpenH264软编码的;需要添加编译参数;

Android上次SDK接口以及有设置是否用软编码的接口参数了,所以不需要再webrtc内部代码中修改了;

当然也可以手动替换上述设置;








2:IOS:













你可能感兴趣的:(WebRTC)