webrtc屏幕共享

概述:通过webrtc/modules/desktop_caputre/模块实现屏幕采集(参照:https://www.jianshu.com/p/484edbd3311f)

原理:窗口采集主要使用GDI或者DirectX实现。编译webrtc_modules模块将desktop_capture包含,此时会涉及到winsock2.h和windows.h的冲突问题,只需要添加WIN32_LEAN_AND_MEAN预编译选项即可。实现屏幕共享类,其中VideoCapturer类主要是用于实现数据源。Callbask类主要用于屏幕采集后的数据回调。MessageHandle用于定时进行屏幕采集。

class windowscapture:public cricket::VideoCapturer,public webrtc::DesktopCapturer::Callback,public rtc::MessageHandler
{
}

修改peerconnet_client,使数据源为windowscapture模块,并添加静态库d3d11.lib和dxgi.lib的引用。

  rtc::scoped_refptr video_track(
      peer_connection_factory_->CreateVideoTrack(
          kVideoLabel,
          peer_connection_factory_->CreateVideoSource(std::unique_ptr(new windowscapture())/*OpenVideoCaptureDevice()*/,
                                                      NULL)));

代码实现

#include"windowscaputre.h"
#include"webrtc\modules\desktop_capture\desktop_capture_options.h"
#include"third_party\libyuv\x86-windows-cl14\include\libyuv.h"
#include"webrtc\base\messagequeue.h"
#include"webrtc\base\thread.h"

windowscaputre::windowscaputre()
{
	std::vectorformats;
	formats.push_back(cricket::VideoFormat(800, 600, cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));

	SetSupportedFormats(formats);
}
windowscaputre::~windowscaputre()
{

}

cricket::CaptureState windowscaputre::Start(const cricket::VideoFormat & capture_format)
{
	cricket::VideoFormat supported;
	
	if (GetBestCaptureFormat(capture_format, &supported))
	{
		SetCaptureFormat(&supported);
	}
	SetCaptureState(cricket::CS_RUNNING);

	auto options = webrtc::DesktopCaptureOptions::CreateDefault();
	options.set_allow_directx_capturer(true);
	
	capture = webrtc::DesktopCapturer::CreateScreenCapturer(options);
	capture->Start(this);
	
	CaptureFrame();

	return cricket::CS_RUNNING;
}

void windowscaputre::Stop()
{
	SetCaptureState(cricket::CS_STOPPED);
	SetCaptureFormat(NULL);
}

bool windowscaputre::IsRunning()
{
	return capture_state()==cricket::CS_RUNNING;
}

bool windowscaputre::IsScreencast() const
{
	return true;
}

bool windowscaputre::GetPreferredFourccs(std::vector* fourccs)
{
	fourccs->push_back(cricket::FOURCC_I420);
	fourccs->push_back(cricket::FOURCC_MJPG);

	return false;
}

void windowscaputre::OnCaptureResult(webrtc::DesktopCapturer::Result result, std::unique_ptr frame)
{
	if (result != webrtc::DesktopCapturer::Result::SUCCESS)
		return;

	int width = frame->size().width();
	int height = frame->size().height();

	if (!i420_buffer.get())
	{
		i420_buffer = webrtc::I420Buffer::Create(width, height);
	}
	libyuv::ConvertToI420(frame->data(),0,i420_buffer->MutableDataY(),
		i420_buffer->StrideY(),i420_buffer->MutableDataU(),
		i420_buffer->StrideU(),i420_buffer->MutableDataV(),
		i420_buffer->StrideV(),0,0,width,height,width,height,
		libyuv::kRotate0,libyuv::FOURCC_ABGR);
	OnFrame(webrtc::VideoFrame(i420_buffer, 0, 0, webrtc::kVideoRotation_0), width, height);
}

void windowscaputre::OnMessage(rtc::Message * msg)
{
	if (msg->message_id == 0)
	{
		CaptureFrame();
	}
}

void windowscaputre::CaptureFrame()
{
	capture->CaptureFrame();

	rtc::Location loc(__FUNCTION__, __FILE__);
	rtc::Thread::Current()->PostDelayed(loc, 33, this, 0);
}

最终效果如下:

你可能感兴趣的:(WebRTC)