最近项目中需要通过webrtc进行音视频传输,其中视频源为抓取window系统桌面。
在PeerConnectionClient Demo基础上实现抓屏接口。需要修改Conductor::AddTracks接口中的video_track_,由VcmCapturer 改为DesktopCapturer。
void Conductor::AddTracks() {
if (!peer_connection_->GetSenders().empty()) {
return; // Already added tracks.
}
rtc::scoped_refptr audio_track(
peer_connection_factory_->CreateAudioTrack(
kAudioLabel, peer_connection_factory_->CreateAudioSource(
cricket::AudioOptions())));
auto result_or_error = peer_connection_->AddTrack(audio_track, {kStreamId});
if (!result_or_error.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add audio track to PeerConnection: "
<< result_or_error.error().message();
}
//视频源修改为抓取window系统桌面
rtc::scoped_refptr video_device = new rtc::RefCountedObject();
if (video_device) {
video_device->startCapturer();
rtc::scoped_refptr video_track_(
peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
main_wnd_->StartLocalRenderer(video_track_);
result_or_error = peer_connection_->AddTrack(video_track_, {kStreamId});
if (!result_or_error.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add video track to PeerConnection: "
<< result_or_error.error().message();
}
} else {
RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
}
CreateVideoTrack接口需要传入VideoTrackSourceInterface* source接口的参数,我们使用webrtc::DesktopCapturer::Callback的接口OnCaptureResult抓取屏幕,再由AdaptedVideoTrackSource的OnFrame接口将转码后的yuv数据传给webrtp内部进行编码和打包传输。
直接上代码:
class MyCapturer : public rtc::AdaptedVideoTrackSource,
public rtc::MessageHandler,
public webrtc::DesktopCapturer::Callback {
public:
MyCapturer();
void startCapturer();
void CaptureFrame();
bool is_screencast() const override;
absl::optional needs_denoising() const override;
webrtc::MediaSourceInterface::SourceState state() const override;
bool remote() const override;
void OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr frame) override;
void OnMessage(rtc::Message* msg) override;
private:
std::unique_ptr capturer_;
rtc::scoped_refptr i420_buffer_;
//mutable volatile int ref_count_;
};
MyCapturer::MyCapturer() {
}
void MyCapturer::startCapturer() {
auto options = webrtc::DesktopCaptureOptions::CreateDefault();
options.set_allow_directx_capturer(true);
capturer_ = webrtc::DesktopCapturer::CreateScreenCapturer(options);
capturer_->Start(this);
CaptureFrame();
}
webrtc::MediaSourceInterface::SourceState MyCapturer::state() const {
return webrtc::MediaSourceInterface::kLive;
}
bool MyCapturer::remote() const {
return false;
}
bool MyCapturer::is_screencast() const {
return true;
}
absl::optional MyCapturer::needs_denoising() const {
return false;
}
void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr frame) {
if (result != webrtc::DesktopCapturer::Result::SUCCESS)
return;
int width = frame->size().width();
int height = frame->size().height();
if (!i420_buffer_.get() ||
i420_buffer_->width() * i420_buffer_->height() < width * height) {
i420_buffer_ = webrtc::I420Buffer::Create(width, height);
}
libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(),
i420_buffer_->StrideY(), i420_buffer_->MutableDataU(),
i420_buffer_->StrideU(), i420_buffer_->MutableDataV(),
i420_buffer_->StrideV(), 0, 0, width, height, width,
height, libyuv::kRotate0, libyuv::FOURCC_ARGB);
OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0));
}
void MyCapturer::OnMessage(rtc::Message* msg) {
if (msg->message_id == 0)
CaptureFrame();
}
void MyCapturer::CaptureFrame() {
capturer_->CaptureFrame();
//fps=30
rtc::Location loc(__FUNCTION__, __FILE__);
rtc::Thread::Current()->PostDelayed(loc, 33, this, 0);
}
Demo 入口:
如有问题,请联系[email protected].