我主要关注的是webrtc中的拥塞控制算法(Google Congestion Control)。在这里,我对GCC算法在ns3平台上进行仿真测试。
我分析的webrtc代码版本是m84。m84可以被GCC编译器编译,mediasoup给出详细的说明。
webrtc代码的下载,可以从香港租服务器。35元一个月,对于我这穷学生,还是很实惠的。
mkdir webrtc-checkout
cd webrtc-checkout
fetch --nohooks webrtc
gclient sync
cd src
git checkout -b m84 refs/remotes/branch-heads/4147
gclient sync
给yuv库中的basic_types.h(third_party/libyuv/include/libyuv)增加一个编译标志。
#define LIBYUV_LEGACY_TYPES
采用GCC编译:
gn gen out/m84 --args='is_debug=false is_component_build=false is_clang=false rtc_include_tests=false rtc_use_h264=true rtc_enable_protobuf=false use_rtti=true use_custom_libcxx=false treat_warnings_as_errors=false use_ozone=true'
生成libwebrtc:
ninja -C out/m84
webrtc包含一个简单的离散事件仿真器(webrtc/test/scenario),但是好像只能模拟两个点(EmulatedNetworkNode)。
//third_party/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc
CallClient* CreateVideoSendingClient(
Scenario* s,
CallClientConfig config,
std::vector send_link,
std::vector return_link) {
auto* client = s->CreateClient("send", std::move(config));
auto* route = s->CreateRoutes(client, send_link,
s->CreateClient("return", CallClientConfig()),
return_link);
s->CreateVideoStream(route->forward(), VideoStreamConfig());
return client;
}
需要从上面的函数看起。
本文按照视频数据的产生,编码,传输的顺序进行分析。
一般来说,仿真是没有必要从摄像头读取数据的。最简单的办法就是根据视频长宽,帧率,使用一个FakeFrameGenerator。那么编码器就需要使用FakeEncoder,直接按照拥塞控制的约束,产生数据流。当然也可以从文件中读取YUV帧,采用真正的编码器进行处理。
//third_party/webrtc/test/scenario/scenario.cc
VideoStreamPair* Scenario::CreateVideoStream(
std::pair clients,
std::function config_modifier) {
VideoStreamConfig config;
config_modifier(&config);
return CreateVideoStream(clients, config);
}
VideoStreamPair* Scenario::CreateVideoStream(
std::pair clients,
VideoStreamConfig config) {
video_streams_.emplace_back(
new VideoStreamPair(clients.first, clients.second, config));
return video_streams_.back().get();
}
//third_party/webrtc/test/scenario/video_stream.cc
VideoStreamPair::VideoStreamPair(CallClient* sender,
CallClient* receiver,
VideoStreamConfig config)
: config_(config),
matcher_(config.hooks.frame_pair_handlers),
send_stream_(sender, config, sender->transport_.get(), &matcher_),
receive_stream_(receiver,
config,
&send_stream_,
/*chosen_stream=*/0,
receiver->transport_.get(),
&matcher_) {}
//third_party/webrtc/test/scenario/video_stream.cc
SendVideoStream::SendVideoStream(CallClient* sender,
VideoStreamConfig config,
Transport* send_transport,
VideoFrameMatcher* matcher){
//FrameGeneratorCapturer ?
video_capturer_ = std::make_unique(
sender_->clock_, CreateFrameGenerator(sender_->clock_, config.source),
config.source.framerate,
*sender->time_controller_->GetTaskQueueFactory());
video_capturer_->Init();
using Encoder = VideoStreamConfig::Encoder;
using Codec = VideoStreamConfig::Encoder::Codec;
switch (config.encoder.implementation) {
//配置编码器工厂
case Encoder::Implementation::kFake:
encoder_factory_ =
std::make_unique([this]() {
MutexLock lock(&mutex_);
std::unique_ptr encoder;
if (config_.encoder.codec == Codec::kVideoCodecVP8) {
encoder = std::make_unique(sender_->clock_);
} else if (config_.encoder.codec == Codec::kVideoCodecGeneric) {
encoder = std::make_unique(sender_->clock_);
} else {
RTC_NOTREACHED();
}
fake_encoders_.push_back(encoder.get());
if (config_.encoder.fake.max_rate.IsFinite())
encoder->SetMaxBitrate(config_.encoder.fake.max_rate.kbps());
return encoder;
});
break;
case VideoStreamConfig::Encoder::Implementation::kSoftware:
encoder_factory_.reset(new InternalEncoderFactory());
break;
case VideoStreamConfig::Encoder::Implementation::kHardware:
encoder_factory_ = CreateHardwareEncoderFactory();
break;
}
// 在第二部分 编码 分析
}
配置视频产生器:
//third_party/webrtc/test/scenario/video_stream.cc
std::unique_ptr CreateFrameGenerator(
Clock* clock,
VideoStreamConfig::Source source) {
using Capture = VideoStreamConfig::Source::Capture;
switch (source.capture) {
case Capture::kGenerator:
return CreateSquareFrameGenerator(
source.generator.width, source.generator.height,
source.generator.pixel_format, /*num_squares*/ absl::nullopt);
case Capture::kVideoFile:
RTC_CHECK(source.video_file.width && source.video_file.height);
return CreateFromYuvFileFrameGenerator(
{TransformFilePath(source.video_file.name)}, source.video_file.width,
source.video_file.height, /*frame_repeat_count*/ 1);
case Capture::kGenerateSlides:
return CreateSlideFrameGenerator(
source.slides.generator.width, source.slides.generator.height,
source.slides.change_interval.seconds() * source.framerate);
case Capture::kImageSlides:
return CreateImageSlideGenerator(clock, source.slides, source.framerate);
}
}
一个假的视频生成器。
//third_party/webrtc/api/test/create_frame_generator.cc
std::unique_ptr CreateSquareFrameGenerator(
int width,
int height,
absl::optional type,
absl::optional num_squares) {
return std::make_unique(
width, height, type.value_or(FrameGeneratorInterface::OutputType::kI420),
num_squares.value_or(10));
}
视频捕获器按照帧率,不断生成假的视频数据。
void FrameGeneratorCapturer::Start() {
{
rtc::CritScope cs(&lock_);
sending_ = true;
}
if (!frame_task_.Running()) {
frame_task_ = RepeatingTaskHandle::Start(task_queue_.Get(), [this] {
InsertFrame();
return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate();
});
}
}
void FrameGeneratorCapturer::InsertFrame() {
if (sending_) {
FrameGeneratorInterface::VideoFrameData frame_data =
frame_generator_->NextFrame();
int decimation =
std::round(static_cast(source_fps_) / target_capture_fps_);
for (int i = 1; i < decimation; ++i)
frame_data = frame_generator_->NextFrame();
VideoFrame frame = VideoFrame::Builder()
.set_video_frame_buffer(frame_data.buffer)
.set_rotation(fake_rotation_)
.set_timestamp_us(clock_->TimeInMicroseconds())
.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds())
.set_update_rect(frame_data.update_rect)
.set_color_space(fake_color_space_)
.build();
if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame.ntp_time_ms();
}
TestVideoCapturer::OnFrame(frame);
}
}
void TestVideoCapturer::OnFrame(const VideoFrame& original_frame) {
//帧数据 投递给VideoStreamEncoder
broadcaster_.OnFrame(frame);
}
视频怎么从Capturer传递给encoder?SendVideoStream::SendVideoStream函数的下半分布有交代。
//third_party/webrtc/test/scenario/video_stream.cc
SendVideoStream::SendVideoStream(CallClient* sender,
VideoStreamConfig config,
Transport* send_transport,
VideoFrameMatcher* matcher){
video_capturer_ = std::make_unique(
sender_->clock_, CreateFrameGenerator(sender_->clock_, config.source),
config.source.framerate,
*sender->time_controller_->GetTaskQueueFactory());
video_capturer_->Init();
//...
bitrate_allocator_factory_ = CreateBuiltinVideoBitrateAllocatorFactory();
VideoSendStream::Config send_config =
CreateVideoSendStreamConfig(config, ssrcs_, rtx_ssrcs_, send_transport);
// analysis
send_config.encoder_settings.encoder_factory = encoder_factory_.get();
send_config.encoder_settings.bitrate_allocator_factory =
bitrate_allocator_factory_.get();
//sender_ type defined in third_party/webrtc/test/scenario/call_client.cc
//在第四部分分析 编码到传输
sender_->SendTask([&] {
if (config.stream.fec_controller_factory) {
send_stream_ = sender_->call_->CreateVideoSendStream(
std::move(send_config), std::move(encoder_config),
config.stream.fec_controller_factory->CreateFecController());
} else {
send_stream_ = sender_->call_->CreateVideoSendStream(
std::move(send_config), std::move(encoder_config));
}
if (matcher->Active()) {
frame_tap_ = std::make_unique(
sender_->clock_, matcher, video_capturer_.get());
send_stream_->SetSource(frame_tap_.get(),
config.encoder.degradation_preference);
} else {
send_stream_->SetSource(video_capturer_.get(),
config.encoder.degradation_preference);
}
});
}
send_config.encoder_settings.encoder_factory = encoder_factory_.get(),视频编码器工厂。
创建VideoSendStream,其中根据encoder_settings创建编码器。
//third_party/webrtc/call/call.cc
webrtc::VideoSendStream* Call::CreateVideoSendStream(
webrtc::VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
std::unique_ptr fec_controller) {
VideoSendStream* send_stream = new VideoSendStream(
clock_, num_cpu_cores_, module_process_thread_->process_thread(),
task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_,
bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_,
std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_,
suspended_video_payload_states_, std::move(fec_controller));
}
VideoSendStream::VideoSendStream(
Clock* clock,
int num_cpu_cores,
ProcessThread* module_process_thread,
TaskQueueFactory* task_queue_factory,
RtcpRttStats* call_stats,
RtpTransportControllerSendInterface* transport,
BitrateAllocatorInterface* bitrate_allocator,
SendDelayStats* send_delay_stats,
RtcEventLog* event_log,
VideoSendStream::Config config,
VideoEncoderConfig encoder_config,
const std::map& suspended_ssrcs,
const std::map& suspended_payload_states,
std::unique_ptr fec_controller)
: worker_queue_(transport->GetWorkerQueue()),
stats_proxy_(clock, config, encoder_config.content_type),
config_(std::move(config)),
content_type_(encoder_config.content_type) {
video_stream_encoder_ =
CreateVideoStreamEncoder(clock, task_queue_factory, num_cpu_cores,
&stats_proxy_, config_.encoder_settings);
worker_queue_->PostTask(ToQueuedTask(
[this, clock, call_stats, transport, bitrate_allocator, send_delay_stats,
event_log, &suspended_ssrcs, &encoder_config, &suspended_payload_states,
&fec_controller]() {
send_stream_.reset(new VideoSendStreamImpl(
clock, &stats_proxy_, worker_queue_, call_stats, transport,
bitrate_allocator, send_delay_stats, video_stream_encoder_.get(),
event_log, &config_, encoder_config.max_bitrate_bps,
encoder_config.bitrate_priority, suspended_ssrcs,
suspended_payload_states, encoder_config.content_type,
std::move(fec_controller)));
},
[this]() { thread_sync_event_.Set(); }));
//VideoSendStreamImpl向video_stream_encoder_注册自己为sink,获取编码后的数据
// ?
ReconfigureVideoEncoder(std::move(encoder_config));
}
//third_party/webrtc/api/video/video_stream_encoder_create.cc
std::unique_ptr CreateVideoStreamEncoder(
Clock* clock,
TaskQueueFactory* task_queue_factory,
uint32_t number_of_cores,
VideoStreamEncoderObserver* encoder_stats_observer,
const VideoStreamEncoderSettings& settings) {
return std::make_unique(
clock, number_of_cores, encoder_stats_observer, settings,
std::make_unique(encoder_stats_observer),
task_queue_factory);
}
VideoStreamEncoder::VideoStreamEncoder(
Clock* clock,
uint32_t number_of_cores,
VideoStreamEncoderObserver* encoder_stats_observer,
const VideoStreamEncoderSettings& settings,
std::unique_ptr overuse_detector,
TaskQueueFactory* task_queue_factory)
: shutdown_event_(true /* manual_reset */, false),
number_of_cores_(number_of_cores),
quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()),
sink_(nullptr),
settings_(settings){
// settings_的作用,包含编码器工厂
}
创建并配置编码器:
//third_party/webrtc/video/video_send_stream.cc
void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
// TODO(perkj): Some test cases in VideoSendStreamTest call
// ReconfigureVideoEncoder from the network thread.
// RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(content_type_ == config.content_type);
video_stream_encoder_->ConfigureEncoder(
std::move(config),
config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp));
}
//third_party/webrtc/video/video_stream_encoder.cc
void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config,
size_t max_data_payload_length) {
encoder_queue_.PostTask(
[this, config = std::move(config), max_data_payload_length]() mutable {
RTC_DCHECK_RUN_ON(&encoder_queue_);
RTC_DCHECK(sink_);
RTC_LOG(LS_INFO) << "ConfigureEncoder requested.";
pending_encoder_creation_ =
(!encoder_ || encoder_config_.video_format != config.video_format ||
max_data_payload_length_ != max_data_payload_length);
encoder_config_ = std::move(config);
max_data_payload_length_ = max_data_payload_length;
pending_encoder_reconfiguration_ = true;
// Reconfigure the encoder now if the encoder has an internal source or
// if the frame resolution is known. Otherwise, the reconfiguration is
// deferred until the next frame to minimize the number of
// reconfigurations. The codec configuration depends on incoming video
// frame size.
if (last_frame_info_) {
ReconfigureEncoder();
} else {
codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
encoder_config_.video_format);
if (HasInternalSource()) {
last_frame_info_ = VideoFrameInfo(kDefaultInputPixelsWidth,
kDefaultInputPixelsHeight, false);
ReconfigureEncoder();
}
}
});
}
创建编码器:
//third_party/webrtc/video/video_stream_encoder.cc
void VideoStreamEncoder::ReconfigureEncoder() {
if (pending_encoder_creation_) {
// Destroy existing encoder instance before creating a new one. Otherwise
// attempt to create another instance will fail if encoder factory
// supports only single instance of encoder of given type.
encoder_.reset();
encoder_ = settings_.encoder_factory->CreateVideoEncoder(
encoder_config_.video_format);
// TODO(nisse): What to do if creating the encoder fails? Crash,
// or just discard incoming frames?
RTC_CHECK(encoder_);
if (encoder_selector_) {
encoder_selector_->OnCurrentEncoder(encoder_config_.video_format);
}
encoder_->SetFecControllerOverride(fec_controller_override_);
codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
encoder_config_.video_format);
encoder_reset_required = true;
}
}
假设仿真的时候,采用的是test::FakeEncoder。编码输出的数据,按照current_rate_settings_中的配置信息生成。
int32_t FakeEncoder::Encode(const VideoFrame& input_image,
const std::vector* frame_types) {
rates = current_rate_settings_;
FrameInfo frame_info =
NextFrame(frame_types, keyframe, num_simulcast_streams, rates.bitrate,
simulcast_streams, static_cast(rates.framerate_fps + 0.5));
}
码率的设置接口。SetRates在VideoStreamEncoder中被调用。
void FakeEncoder::SetRates(const RateControlParameters& parameters) {
rtc::CritScope cs(&crit_sect_);
SetRatesLocked(parameters);
}
void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) {
current_rate_settings_ = parameters;
int allocated_bitrate_kbps = parameters.bitrate.get_sum_kbps();
// Scale bitrate allocation to not exceed the given max target bitrate.
if (max_target_bitrate_kbps_ > 0 &&
allocated_bitrate_kbps > max_target_bitrate_kbps_) {
for (uint8_t spatial_idx = 0; spatial_idx < kMaxSpatialLayers;
++spatial_idx) {
for (uint8_t temporal_idx = 0; temporal_idx < kMaxTemporalStreams;
++temporal_idx) {
if (current_rate_settings_.bitrate.HasBitrate(spatial_idx,
temporal_idx)) {
uint32_t bitrate = current_rate_settings_.bitrate.GetBitrate(
spatial_idx, temporal_idx);
bitrate = static_cast(
(bitrate * int64_t{max_target_bitrate_kbps_}) /
allocated_bitrate_kbps);
current_rate_settings_.bitrate.SetBitrate(spatial_idx, temporal_idx,
bitrate);
}
}
}
}
}
上一节中给出了编码器码率的设置接口。这里分析,从拥塞控制器获取速率,到配置编码器速率的流程。RtpTransportControllerSend中会从拥塞控制器中获取速率信息。更改编码器速率,有两个出发点,一个就是拥塞控制器速率发生变化的时候,另一个就是周期性地从拥塞控制器获取速率。
//third_party/webrtc/call/rtp_transport_controller_send.cc
//one
void RtpTransportControllerSend::PostUpdates(NetworkControlUpdate update) {
if (update.target_rate) {
control_handler_->SetTargetRate(*update.target_rate);
UpdateControlState();
}
}
//two
//constexpr TimeDelta kPacerQueueUpdateInterval = TimeDelta::Millis(25);
void RtpTransportControllerSend::StartProcessPeriodicTasks() {
if (!pacer_queue_update_task_.Running()) {
pacer_queue_update_task_ = RepeatingTaskHandle::DelayedStart(
task_queue_.Get(), kPacerQueueUpdateInterval, [this]() {
RTC_DCHECK_RUN_ON(&task_queue_);
TimeDelta expected_queue_time = pacer()->ExpectedQueueTime();
control_handler_->SetPacerQueue(expected_queue_time);
UpdateControlState();
return kPacerQueueUpdateInterval;
});
}
}
从UpdateControlState走起。
void RtpTransportControllerSend::UpdateControlState() {
absl::optional update = control_handler_->GetUpdate();
if (!update)
return;
retransmission_rate_limiter_.SetMaxRate(update->target_rate.bps());
observer_->OnTargetTransferRate(*update);
}
//third_party/webrtc/call/call.cc
void Call::OnTargetTransferRate(TargetTransferRate msg) {
uint32_t target_bitrate_bps = msg.target_rate.bps();
// For controlling the rate of feedback messages.
receive_side_cc_.OnBitrateChanged(target_bitrate_bps);
bitrate_allocator_->OnNetworkEstimateChanged(msg);
/*
worker_thread_->PostTask(
ToQueuedTask(task_safety_, [this, target_bitrate_bps]() {
RTC_DCHECK_RUN_ON(worker_thread_);
last_bandwidth_bps_ = target_bitrate_bps;
// Ignore updates if bitrate is zero (the aggregate network state is
// down) or if we're not sending video.
if (target_bitrate_bps == 0 || video_send_streams_.empty()) {
estimated_send_bitrate_kbps_counter_.ProcessAndPause();
pacer_bitrate_kbps_counter_.ProcessAndPause();
return;
}
estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000);
// Pacer bitrate may be higher than bitrate estimate if enforcing min
// bitrate.
uint32_t pacer_bitrate_bps =
std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_);
pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000);
}));
*/
}
//third_party/webrtc/call/bitrate_allocator.cc
void BitrateAllocator::OnNetworkEstimateChanged(TargetTransferRate msg) {
uint32_t protection_bitrate = config.observer->OnBitrateUpdated(update);
}
//third_party/webrtc/video/video_send_stream_impl.cc
uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) {
rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_->GetSendFrameRate());
encoder_target_rate_bps_ = rtp_video_sender_->GetPayloadBitrateBps();
const uint32_t protection_bitrate_bps =
rtp_video_sender_->GetProtectionBitrateBps();
DataRate link_allocation = DataRate::Zero();
if (encoder_target_rate_bps_ > protection_bitrate_bps) {
link_allocation =
DataRate::BitsPerSec(encoder_target_rate_bps_ - protection_bitrate_bps);
}
DataRate overhead =
update.target_bitrate - DataRate::BitsPerSec(encoder_target_rate_bps_);
DataRate encoder_stable_target_rate = update.stable_target_bitrate;
if (encoder_stable_target_rate > overhead) {
encoder_stable_target_rate = encoder_stable_target_rate - overhead;
} else {
encoder_stable_target_rate = DataRate::BitsPerSec(encoder_target_rate_bps_);
}
encoder_target_rate_bps_ =
std::min(encoder_max_bitrate_bps_, encoder_target_rate_bps_);
encoder_stable_target_rate =
std::min(DataRate::BitsPerSec(encoder_max_bitrate_bps_),
encoder_stable_target_rate);
DataRate encoder_target_rate = DataRate::BitsPerSec(encoder_target_rate_bps_);
link_allocation = std::max(encoder_target_rate, link_allocation);
video_stream_encoder_->OnBitrateUpdated(
encoder_target_rate, encoder_stable_target_rate, link_allocation,
rtc::dchecked_cast(update.packet_loss_ratio * 256),
update.round_trip_time.ms(), update.cwnd_reduce_ratio);
}
//third_party/webrtc/video/video_stream_encoder.cc
void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate,
DataRate stable_target_bitrate,
DataRate link_allocation,
uint8_t fraction_lost,
int64_t round_trip_time_ms,
double cwnd_reduce_ratio) {
EncoderRateSettings new_rate_settings{
VideoBitrateAllocation(), static_cast(framerate_fps),
link_allocation, target_bitrate, stable_target_bitrate};
SetEncoderRates(UpdateBitrateAllocationAndNotifyObserver(new_rate_settings));
}
//third_party/webrtc/video/video_stream_encoder.cc
void VideoStreamEncoder::SetEncoderRates(
const EncoderRateSettings& rate_settings) {
encoder_->SetRates(rate_settings.rate_control);
}
VideoSendStreamImpl注册到BitrateAllocator。
EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
auto enable_padding_task = [this]() {
if (disable_padding_) {
RTC_DCHECK_RUN_ON(worker_queue_);
disable_padding_ = false;
// To ensure that padding bitrate is propagated to the bitrate allocator.
SignalEncoderActive();
}
};
if (!worker_queue_->IsCurrent()) {
worker_queue_->PostTask(enable_padding_task);
} else {
enable_padding_task();
}
EncodedImageCallback::Result result(EncodedImageCallback::Result::OK);
result = rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info,
fragmentation);
}
void VideoSendStreamImpl::SignalEncoderActive() {
RTC_DCHECK_RUN_ON(worker_queue_);
if (rtp_video_sender_->IsActive()) {
RTC_LOG(LS_INFO) << "SignalEncoderActive, Encoder is active.";
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
}
}
void VideoSendStreamImpl::StartupVideoSendStream() {
bitrate_allocator_->AddObserver(this, GetAllocationConfig());
}
void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer,
MediaStreamAllocationConfig config) {
auto it = absl::c_find_if(
allocatable_tracks_,
[observer](const auto& config) { return config.observer == observer; });
// Update settings if the observer already exists, create a new one otherwise.
if (it != allocatable_tracks_.end()) {
it->config = config;
} else {
allocatable_tracks_.push_back(AllocatableTrack(observer, config));
}
}
//third_party/webrtc/test/scenario/video_stream.cc
SendVideoStream::SendVideoStream(CallClient* sender,
VideoStreamConfig config,
Transport* send_transport,
VideoFrameMatcher* matcher){
if (config.stream.fec_controller_factory) {
send_stream_ = sender_->call_->CreateVideoSendStream(
std::move(send_config), std::move(encoder_config),
config.stream.fec_controller_factory->CreateFecController());
} else {
send_stream_ = sender_->call_->CreateVideoSendStream(
std::move(send_config), std::move(encoder_config));
}
send_stream_->SetSource(video_capturer_.get(),
config.encoder.degradation_preference);
}
FrameGeneratorCapturer产生数据,传递到encoder处理,encoder将数据传递到sink( VideoSendStreamImpl)。
void VideoSendStream::SetSource(
rtc::VideoSourceInterface* source,
const DegradationPreference& degradation_preference) {
RTC_DCHECK_RUN_ON(&thread_checker_);
video_stream_encoder_->SetSource(source, degradation_preference);
}
int32_t FakeEncoder::Encode(const VideoFrame& input_image,
const std::vector* frame_types) {
if (callback->OnEncodedImage(encoded, &codec_specific, fragmentation.get())
.error != EncodedImageCallback::Result::OK) {
return -1;
}
}
EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation){
EncodedImageCallback::Result result = sink_->OnEncodedImage(
image_copy, codec_specific_info,
fragmentation_copy ? fragmentation_copy.get() : fragmentation);
}
EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation){
result = rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info,
fragmentation);
}
接下来就要分析rtp_video_sender_。
EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
uint32_t rtp_timestamp =
encoded_image.Timestamp() +
rtp_streams_[stream_index].rtp_rtcp->StartTimestamp();
bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage(
rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image,
fragmentation,
params_[stream_index].GetRtpVideoHeader(
encoded_image, codec_specific_info, shared_frame_id_),
expected_retransmission_time_ms);
}
bool RTPSenderVideo::SendEncodedImage(
int payload_type,
absl::optional codec_type,
uint32_t rtp_timestamp,
const EncodedImage& encoded_image,
const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional expected_retransmission_time_ms) {
return SendVideo(payload_type, codec_type, rtp_timestamp,
encoded_image.capture_time_ms_, encoded_image, fragmentation,
video_header, expected_retransmission_time_ms);
}
bool RTPSenderVideo::SendVideo(
int payload_type,
absl::optional codec_type,
uint32_t rtp_timestamp,
int64_t capture_time_ms,
rtc::ArrayView payload,
const RTPFragmentationHeader* fragmentation,
RTPVideoHeader video_header,
absl::optional expected_retransmission_time_ms) {
AddRtpHeaderExtensions(video_header, absolute_capture_time,
/*first_packet=*/true, /*last_packet=*/true,
single_packet.get());
AddRtpHeaderExtensions(video_header, absolute_capture_time,
/*first_packet=*/true, /*last_packet=*/false,
first_packet.get());
AddRtpHeaderExtensions(video_header, absolute_capture_time,
/*first_packet=*/false, /*last_packet=*/false,
middle_packet.get());
AddRtpHeaderExtensions(video_header, absolute_capture_time,
/*first_packet=*/false, /*last_packet=*/true,
last_packet.get());
std::unique_ptr packetizer = RtpPacketizer::Create(
codec_type, payload, limits, video_header, fragmentation);
const size_t num_packets = packetizer->NumPackets();
/*
// No FEC protection for upper temporal layers, if used.
if (fec_type_.has_value() &&
(temporal_id == 0 || temporal_id == kNoTemporalIdx)) {
if (fec_generator_) {
fec_generator_->AddPacketAndGenerateFec(*packet);
} else {
// Deferred FEC generation, just mark packet.
packet->set_fec_protect_packet(true);
}
}
if (red_enabled()) {
std::unique_ptr red_packet(new RtpPacketToSend(*packet));
BuildRedPayload(*packet, red_packet.get());
red_packet->SetPayloadType(*red_payload_type_);
red_packet->set_is_red(true);
// Send |red_packet| instead of |packet| for allocated sequence number.
red_packet->set_packet_type(RtpPacketMediaType::kVideo);
red_packet->set_allow_retransmission(packet->allow_retransmission());
rtp_packets.emplace_back(std::move(red_packet));
} else {
packet->set_packet_type(RtpPacketMediaType::kVideo);
rtp_packets.emplace_back(std::move(packet));
}
*/
LogAndSendToNetwork(std::move(rtp_packets), unpacketized_payload_size);
}
void RTPSenderVideo::LogAndSendToNetwork(
std::vector> packets,
size_t unpacketized_payload_size) {
{
MutexLock lock(&stats_mutex_);
size_t packetized_payload_size = 0;
for (const auto& packet : packets) {
if (*packet->packet_type() == RtpPacketMediaType::kVideo) {
video_bitrate_.Update(packet->size(), now_ms);
packetized_payload_size += packet->payload_size();
}
}
// AV1 packetizer may produce less packetized bytes than unpacketized.
if (packetized_payload_size >= unpacketized_payload_size) {
packetization_overhead_bitrate_.Update(
packetized_payload_size - unpacketized_payload_size,
clock_->TimeInMilliseconds());
}
}
rtp_sender_->EnqueuePackets(std::move(packets));
}
void RTPSender::EnqueuePackets(
std::vector> packets) {
RTC_DCHECK(!packets.empty());
int64_t now_ms = clock_->TimeInMilliseconds();
for (auto& packet : packets) {
RTC_DCHECK(packet);
RTC_CHECK(packet->packet_type().has_value())
<< "Packet type must be set before sending.";
if (packet->capture_time_ms() <= 0) {
packet->set_capture_time_ms(now_ms);
}
}
paced_sender_->EnqueuePackets(std::move(packets));
}
分析到最后,发现数据包不是立马发送到网络中,而是将数据交给了pacer处理。
还是从仿真器的初始化看起。回到文章开始的地方。
//third_party/webrtc/test/scenario/call_client.cc
CallClient::CallClient(
TimeController* time_controller,
std::unique_ptr log_writer_factory,
CallClientConfig config)
: time_controller_(time_controller),
clock_(time_controller->GetClock()),
log_writer_factory_(std::move(log_writer_factory)),
network_controller_factory_(log_writer_factory_.get(), config.transport),
header_parser_(RtpHeaderParser::CreateForTest()),
task_queue_(time_controller->GetTaskQueueFactory()->CreateTaskQueue(
"CallClient",
TaskQueueFactory::Priority::NORMAL)) {
call_.reset(CreateCall(time_controller_, event_log_.get(), config,
&network_controller_factory_,
fake_audio_setup_.audio_state, module_thread_));
transport_ = std::make_unique(clock_, call_.get());
}
Call* CreateCall(TimeController* time_controller,
RtcEventLog* event_log,
CallClientConfig config,
LoggingNetworkControllerFactory* network_controller_factory,
rtc::scoped_refptr audio_state,
rtc::scoped_refptr call_thread) {
CallConfig call_config(event_log);
call_config.bitrate_config.max_bitrate_bps =
config.transport.rates.max_rate.bps_or(-1);
call_config.bitrate_config.min_bitrate_bps =
config.transport.rates.min_rate.bps();
call_config.bitrate_config.start_bitrate_bps =
config.transport.rates.start_rate.bps();
call_config.task_queue_factory = time_controller->GetTaskQueueFactory();
call_config.network_controller_factory = network_controller_factory;
call_config.audio_state = audio_state;
call_config.trials = config.field_trials;
return Call::Create(call_config, time_controller->GetClock(),
std::move(call_thread),
time_controller->CreateProcessThread("Pacer"));
}
//third_party/webrtc/call/call.cc
Call* Call::Create(const Call::Config& config,
Clock* clock,
rtc::scoped_refptr call_thread,
std::unique_ptr pacer_thread) {
RTC_DCHECK(config.task_queue_factory);
return new internal::Call(
clock, config,
std::make_unique(
clock, config.event_log, config.network_state_predictor_factory,
config.network_controller_factory, config.bitrate_config,
std::move(pacer_thread), config.task_queue_factory, config.trials),
std::move(call_thread), config.task_queue_factory);
}
注册发送数据的transport(sender->transport_.get())。
VideoStreamPair* Scenario::CreateVideoStream(
std::pair clients,
VideoStreamConfig config) {
video_streams_.emplace_back(
new VideoStreamPair(clients.first, clients.second, config));
return video_streams_.back().get();
}
VideoStreamPair::VideoStreamPair(CallClient* sender,
CallClient* receiver,
VideoStreamConfig config)
: config_(config),
matcher_(config.hooks.frame_pair_handlers),
send_stream_(sender, config, sender->transport_.get(), &matcher_),
receive_stream_(receiver,
config,
&send_stream_,
/*chosen_stream=*/0,
receiver->transport_.get(),
&matcher_) {}
//third_party/webrtc/test/scenario/video_stream.cc
SendVideoStream::SendVideoStream(CallClient* sender,
VideoStreamConfig config,
Transport* send_transport,
VideoFrameMatcher* matcher){
VideoSendStream::Config send_config =
CreateVideoSendStreamConfig(config, ssrcs_, rtx_ssrcs_, send_transport);
send_stream_ = sender_->call_->CreateVideoSendStream(
std::move(send_config), std::move(encoder_config));
}
//send_transport
VideoSendStreamImpl::VideoSendStreamImpl():
rtp_video_sender_(transport_->CreateRtpVideoSender(suspended_ssrcs,
suspended_payload_states,
config_->rtp,
config_->rtcp_report_interval_ms,
config_->send_transport,
CreateObservers(call_stats,
&encoder_feedback_,
stats_proxy_,
send_delay_stats){
}
RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender(
std::map suspended_ssrcs,
const std::map& states,
const RtpConfig& rtp_config,
int rtcp_report_interval_ms,
Transport* send_transport,
const RtpSenderObservers& observers,
RtcEventLog* event_log,
std::unique_ptr fec_controller,
const RtpSenderFrameEncryptionConfig& frame_encryption_config,
rtc::scoped_refptr frame_transformer) {
video_rtp_senders_.push_back(std::make_unique(
clock_, suspended_ssrcs, states, rtp_config, rtcp_report_interval_ms,
send_transport, observers,
// TODO(holmer): Remove this circular dependency by injecting
// the parts of RtpTransportControllerSendInterface that are really used.
this, event_log, &retransmission_rate_limiter_, std::move(fec_controller),
frame_encryption_config.frame_encryptor,
frame_encryption_config.crypto_options, std::move(frame_transformer)));
return video_rtp_senders_.back().get();
}
RtpVideoSender::RtpVideoSender(
Clock* clock,
std::map suspended_ssrcs,
const std::map& states,
const RtpConfig& rtp_config,
int rtcp_report_interval_ms,
Transport* send_transport,
const RtpSenderObservers& observers,
RtpTransportControllerSendInterface* transport,
RtcEventLog* event_log,
RateLimiter* retransmission_limiter,
std::unique_ptr fec_controller,
FrameEncryptorInterface* frame_encryptor,
const CryptoOptions& crypto_options,
rtc::scoped_refptr frame_transformer)
: send_side_bwe_with_overhead_(absl::StartsWith(
field_trials_.Lookup("WebRTC-SendSideBwe-WithOverhead"),
"Enabled")),
account_for_packetization_overhead_(!absl::StartsWith(
field_trials_.Lookup("WebRTC-SubtractPacketizationOverhead"),
"Disabled")),
use_early_loss_detection_(!absl::StartsWith(
field_trials_.Lookup("WebRTC-UseEarlyLossDetection"),
"Disabled")),
has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)),
use_deferred_fec_(
absl::StartsWith(field_trials_.Lookup("WebRTC-DeferredFecGeneration"),
"Enabled")),
active_(false),
module_process_thread_(nullptr),
suspended_ssrcs_(std::move(suspended_ssrcs)),
fec_controller_(std::move(fec_controller)),
fec_allowed_(true),
rtp_streams_(CreateRtpStreamSenders(clock,
rtp_config,
observers,
rtcp_report_interval_ms,
send_transport,
transport->GetBandwidthObserver(),
transport,
suspended_ssrcs_,
event_log,
retransmission_limiter,
frame_encryptor,
crypto_options,
std::move(frame_transformer),
use_deferred_fec_,
field_trials_)),
rtp_config_(rtp_config),
codec_type_(GetVideoCodecType(rtp_config)),
transport_(transport),
transport_overhead_bytes_per_packet_(0),
encoder_target_rate_bps_(0),
frame_counts_(rtp_config.ssrcs.size()),
frame_count_observer_(observers.frame_count_observer) {
for (const RtpStreamSender& stream : rtp_streams_) {
constexpr bool remb_candidate = true;
transport->packet_router()->AddSendRtpModule(stream.rtp_rtcp.get(),
remb_candidate);
}
}
void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module,
bool remb_candidate) {}
void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module,
uint32_t ssrc) {
RTC_DCHECK(send_modules_map_.find(ssrc) == send_modules_map_.end());
// Always keep the audio modules at the back of the list, so that when we
// iterate over the modules in order to find one that can send padding we
// will prioritize video. This is important to make sure they are counted
// into the bandwidth estimate properly.
if (rtp_module->IsAudioConfigured()) {
send_modules_list_.push_back(rtp_module);
} else {
send_modules_list_.push_front(rtp_module);
}
send_modules_map_[ssrc] = rtp_module;
}
从transport发送数据。
void PacketRouter::SendPacket(std::unique_ptr packet,
const PacedPacketInfo& cluster_info) {
uint32_t ssrc = packet->Ssrc();
auto kv = send_modules_map_.find(ssrc);
RtpRtcpInterface* rtp_module = kv->second;
if (!rtp_module->TrySendPacket(packet.get(), cluster_info)) {
RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module.";
return;
}
}
bool ModuleRtpRtcpImpl2::TrySendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) {
RTC_DCHECK(rtp_sender_);
// TODO(sprang): Consider if we can remove this check.
if (!rtp_sender_->packet_generator.SendingMedia()) {
return false;
}
rtp_sender_->packet_sender.SendPacket(packet, pacing_info);
return true;
}
void RtpSenderEgress::SendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) {
const bool send_success = SendPacketToNetwork(*packet, options, pacing_info);
}
bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet,
const PacketOptions& options,
const PacedPacketInfo& pacing_info) {
if (transport_) {
bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options)
? static_cast(packet.size())
: -1;
if (event_log_ && bytes_sent > 0) {
event_log_->Log(std::make_unique(
packet, pacing_info.probe_cluster_id));
}
}
}
ModuleRtpRtcpImpl2中有rtcp_sender_和rtp_sender_。数据均是从transport发送。
仿真器中实现的transport。
bool NetworkNodeTransport::SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) {
int64_t send_time_ms = sender_clock_->TimeInMilliseconds();
rtc::SentPacket sent_packet;
sent_packet.packet_id = options.packet_id;
sent_packet.info.included_in_feedback = options.included_in_feedback;
sent_packet.info.included_in_allocation = options.included_in_allocation;
sent_packet.send_time_ms = send_time_ms;
sent_packet.info.packet_size_bytes = length;
sent_packet.info.packet_type = rtc::PacketType::kData;
sender_call_->OnSentPacket(sent_packet);
MutexLock lock(&mutex_);
if (!endpoint_)
return false;
rtc::CopyOnWriteBuffer buffer(packet, length);
endpoint_->SendPacket(local_address_, remote_address_, buffer,
packet_overhead_.bytes());
return true;
}
bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) {
rtc::CopyOnWriteBuffer buffer(packet, length);
MutexLock lock(&mutex_);
if (!endpoint_)
return false;
endpoint_->SendPacket(local_address_, remote_address_, buffer,
packet_overhead_.bytes());
return true;
}
只分析在仿真器中的数据接收流程。
CallClientPair* Scenario::CreateRoutes(
CallClient* first,
std::vector send_link,
DataSize first_overhead,
CallClient* second,
std::vector return_link,
DataSize second_overhead) {
CallClientPair* client_pair = new CallClientPair(first, second);
ChangeRoute(client_pair->forward(), send_link, first_overhead);
ChangeRoute(client_pair->reverse(), return_link, second_overhead);
client_pairs_.emplace_back(client_pair);
return client_pair;
}
void Scenario::ChangeRoute(std::pair clients,
std::vector over_nodes,
DataSize overhead) {
EmulatedRoute* route = network_manager_.CreateRoute(over_nodes);
uint16_t port = clients.second->Bind(route->to);
auto addr = rtc::SocketAddress(route->to->GetPeerLocalAddress(), port);
clients.first->transport_->Connect(route->from, addr, overhead);
}
int16_t CallClient::Bind(EmulatedEndpoint* endpoint) {
uint16_t port = endpoint->BindReceiver(0, this).value();
endpoints_.push_back({endpoint, port});
return port;
}
endpoint接收到数据包后,会调用CallClient中的数据接收接口。
void CallClient::OnPacketReceived(EmulatedIpPacket packet) {
MediaType media_type = MediaType::ANY;
if (!RtpHeaderParser::IsRtcp(packet.cdata(), packet.data.size())) {
auto ssrc = RtpHeaderParser::GetSsrc(packet.cdata(), packet.data.size());
RTC_CHECK(ssrc.has_value());
media_type = ssrc_media_types_[*ssrc];
}
task_queue_.PostTask(
[call = call_.get(), media_type, packet = std::move(packet)]() mutable {
call->Receiver()->DeliverPacket(media_type, packet.data,
packet.arrival_time.us());
});
}
花了一周时间,代码已经移植到ns3。解决掉一些bug。
注释掉webrtc几句代码,这个判断导致数据不能发送。
//third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
bool ModuleRtpRtcpImpl::TrySendPacket(RtpPacketToSend* packet,
const PacedPacketInfo& pacing_info) {
RTC_DCHECK(rtp_sender_);
// TODO(sprang): Consider if we can remove this check.
//if (!rtp_sender_->packet_generator.SendingMedia()) {
// return false;
// }
rtp_sender_->packet_sender.SendPacket(packet, pacing_info);
return true;
}
另外就是ns3的packet对象不要再ns3之外的线程使用。这里为了减少内存的分配次数,Buffer被放进了g_freeList,非线程安全。
Buffer::Data *
Buffer::Create (uint32_t dataSize)
{
NS_LOG_FUNCTION (dataSize);
/* try to find a buffer correctly sized. */
if (IS_UNINITIALIZED (g_freeList))
{
g_freeList = new Buffer::FreeList ();
}
else if (IS_INITIALIZED (g_freeList))
{
while (!g_freeList->empty ())
{
struct Buffer::Data *data = g_freeList->back ();
g_freeList->pop_back ();
if (data->m_size >= dataSize)
{
data->m_count = 1;
return data;
}
Buffer::Deallocate (data);
}
}
struct Buffer::Data *data = Buffer::Allocate (dataSize);
NS_ASSERT (data->m_count == 1);
return data;
}
程序崩溃的一些提示:
assert failed. cond="m_data->m_count == 1 || m_start == m_data->m_dirtyStart", file=../src/network/model/buffer.cc, line=321
terminate called without an active exception
0x00007fffef89619f in ns3::Buffer::Create (dataSize=1438)
at ../src/network/model/buffer.cc:133
133 if (data->m_size >= dataSize)
code-repo