} else {
//这里找到想要通信客户端(设为B)的ChannelMember对象,如果server端没找到,则返回NULL
ChannelMember* target = clients.IsTargetedRequest(s);
if (target) {
//这里将包含客户端A SDP协议信息转送给B
member->ForwardRequestToPeer(s, target);
} else if (s->PathEquals("/sign_out")) {
s->Send(“200 OK”, true, “text/plain”, “”, “”);
}
}
}
}
}
当client点击connect之后,客户端给服务器发过去"/sign_in"的get请求,服务器给当前客户列表中的每个都发一遍新用户的信息,格式是"
客户端逻辑
客户端稍微复杂些,和server端相比,多了多媒体内容,但是WebRTC的例子非常巧妙,使用较少的代码就实现待UI的视频通信的完整例子。
Conductor是封装了windows和PeerConnection两个主要类,是该工程的核心,conductor通过CreatePeerConnectionFactory方法创建PeerConnectionFactoryInterface接口的实现对象,通过改接口创建WebRTC核心协议的PeerConnectionInterface接口对象,PeerConnectionFactoryInterface还提供了创建本地音视频功能的接口,conductor的回调通过PeerConnectionObserver接口完成。
Linux UI
linux UI层使用了GTK显示框架,将点击、输入等事件使用信号槽的机制,将界面和需要执行的动作关联起来,如点击加入会议涉及到PeerConnection创建和连接,这里clicked是Button触发的信号,而Button触发时会自动触发row-activated信号,所以下面的两个回调都会被调用到,OnClickedCallback获取用户输入的登录服务IP地址和端口号,并向服务器发起HTTP登录请求,而row-activated信号的回调PeerConnection初始化并创建和发送SDP包。代码实现上将PeerConnection和windows作为两个独立的组件,用conductor类管理起来。
g_signal_connect(button, “clicked”, G_CALLBACK(OnClickedCallback), this);
g_signal_connect(peer_list_, “row-activated”,
G_CALLBACK(OnRowActivatedCallback), this);
P2P的创建过程有些琐碎,这里罗列如下:
peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
nullptr /* network_thread */, nullptr /* worker_thread */,
nullptr /* signaling_thread */, nullptr /* default_adm */,
webrtc::CreateBuiltinAudioEncoderFactory(),
webrtc::CreateBuiltinAudioDecoderFactory(),
webrtc::CreateBuiltinVideoEncoderFactory(),
webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */,
nullptr /* audio_processing */);
//这里config的一些配置包括dtls是否启用等标识
peer_connection_ = peer_connection_factory_->CreatePeerConnection(
config, nullptr, nullptr, this);
rtc::scoped_refptrwebrtc::AudioTrackInterface audio_track(
peer_connection_factory_->CreateAudioTrack(
kAudioLabel, peer_connection_factory_->CreateAudioSource(
cricket::AudioOptions())));
auto result_or_error = peer_connection_->AddTrack(audio_track, {kStreamId});
rtc::scoped_refptr video_device =
CapturerTrackSource::Create();
if (video_device) {
rtc::scoped_refptrwebrtc::VideoTrackInterface video_track_(
peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
main_wnd_->StartLocalRenderer(video_track_);
result_or_error = peer_connection_->AddTrack(video_track_, {kStreamId});
发送完登录请求和SDP请求之后,由网络线程一直检测来自网络的数据,并触发相应的函数,如更新列表和对端发来的数据,最终调用OnMessageFromPeer处理对端发来的数据,如果对端是新来的,则会创建InitializePeerConnection对象,这在主动发起会议时已经见过,如果是主动发起方回调了这个函数,就不会再创建这个对象了,但是依然要解析对端的SDP信息,SDP信息是JSON格式。
std::unique_ptrwebrtc::SessionDescriptionInterface session_description =
webrtc::CreateSessionDescription(type, sdp, &error);
peer_connection_->SetRemoteDescription(
DummySetSessionDescriptionObserver::Create(),
session_description.release());
if (type == webrtc::SdpType::kOffer) {
peer_connection_->CreateAnswer(
this, webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
拿到的信息除了SDP外还可能是打洞信息,则也需要将他们保存下来。
std::unique_ptrwebrtc::IceCandidateInterface candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error));
if (!peer_connection_->AddIceCandidate(candidate.get())) {
RTC_LOG(WARNING) << “Failed to apply the received candidate”;
return;
}
MAC UI
这里以MAC平台示例说明如何使用WebRTC,下图是object c所写的应用程序的界面,即上一节UI层相关内容:
程序最开始的执行函数在examples/objc/AppRTCMobile/mac目录的main.m(object c下的c扩展文件以.m结尾,c++扩展以.mm结尾),这个main函数如下:
APPRTCAppDelegate.h文件
@interface APPRTCAppDelegate : NSObject
@end
APPRTCAppDelegate.m文件
@implementation APPRTCAppDelegate {
APPRTCViewController* _viewController;
NSWindow* _window;
}
main.m文件
#import
#import “APPRTCAppDelegate.h”
int main(int argc, char* argv[]) {
@autoreleasepool {
[NSApplication sharedApplication];
APPRTCAppDelegate* delegate = [[APPRTCAppDelegate alloc] init];
[NSApp setDelegate:delegate];
[NSApp run];
}
}
这个函数是object的语法,NSApplicationDelegate是cocoa库里的应用程序代理,本身这个应用程序定理很多方法,如run运行等,@interface 表明APPRTCAppDelegate是继承?了NSApplicationDelegate协议,这个类本在实现时(@implementation)定义了两种组件,APPRTCViewController和NSWindow,NSWindow是上图看到的图像界面,比如长宽之类都在这里,APPRTCViewController用于和用户交互,如输入框和按钮。当应用程序在mian.m里执行到run时,会触发APPRTCAppDelegate.m(这和cocoa里的应用程序生命周期有关)。
- (void)applicationDidFinishLaunching:(NSNotification*)notification {
RTCInitializeSSL();
NSScreen* screen = [NSScreen mainScreen];
NSRect visibleRect = [screen visibleFrame];
NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
NSMidY(visibleRect),
1320,
1140);
NSUInteger styleMask = NSTitledWindowMask | NSClosableWindowMask;
_window = [[NSWindow alloc] initWithContentRect:windowRect
styleMask:styleMask
backing:NSBackingStoreBuffered
defer:NO];
_window.delegate = self;
[_window makeKeyAndOrderFront:self];
[_window makeMainWindow];
_viewController = [[APPRTCViewController alloc] initWithNibName:nil
bundle:nil];
[_window setContentView:[_viewController view]];
}
其中-号表示的私有实现,这里就是设置显示窗的参数,并初始化控制组件(APPRTCViewController),APPRTCViewController负责显示local和remote视频流,会议号和会议控制,当点击加入会议后会执行APPRTCViewController.m中的如下方法:
- (void)startCall:(id)sender {
NSString* roomString = _roomField.stringValue;
// Generate room id for loopback options.
if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
roomString = [NSUUID UUID].UUIDString;
roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
}
[self.delegate appRTCMainView:self
didEnterRoomId:roomString
loopback:_loopbackButton.intValue];
[self setNeedsUpdateConstraints:YES];
}
roomString是前面输入的房间号字符串,如果空的就是会测试模式.
@interface APPRTCViewController ()
@property(nonatomic, readonly) APPRTCMainView* mainView;
@end
这个控件因为要管理signaling和多媒体传输以及状态和多媒体显示,这里使用ARDAppClientDelegate实现前者,而APPRTCMainViewDelegate实现状态和多媒体显示;尖括号表示APPRTCViewController遵循上面两个protocol,点击start call最终会调到ARDAppClientDelegate里的connectToRoomWithId方法,下面的代码片段是mac目录掉到上一级目录AppRTCMobile的核心代码段。
mac/APPRTCViewController.m
ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
[client connectToRoomWithId:roomId
settings:[[ARDSettingsModel alloc] init] // Use default settings.
这个方法定义于src/examples/objc/AppRTCMobile/ARDAppClient.h。
- (void)connectToRoomWithId:(NSString *)roomId
settings:(ARDSettingsModel *)settings
isLoopback:(BOOL)isLoopback {
NSParameterAssert(roomId.length);
NSParameterAssert(_state == kARDAppClientStateDisconnected);
_settings = settings;
_isLoopback = isLoopback;
self.state = kARDAppClientStateConnecting;
//初始化视频编解码参数
RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init];
RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init];
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
_factory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
#if defined(WEBRTC_IOS)
if (kARDAppClientEnableTracing) {
NSString *filePath = [self documentsFilePathForFileName:@“webrtc-trace.txt”];
RTCStartInternalCapture(filePath);
}
#endif
// 申请 TURN.
__weak ARDAppClient *weakSelf = self;
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
NSError *error) {
if (error) {
RTCLogError(@“Error retrieving TURN servers: %@”, error.localizedDescription);
}
ARDAppClient *strongSelf = weakSelf;
[strongSelf.iceServers addObjectsFromArray:turnServers];
strongSelf.isTurnComplete = YES;
[strongSelf startSignalingIfReady];//关键
}];
// 通过room server加入开会房间
[_roomServerClient joinRoomWithRoomId:roomId
isLoopback:isLoopback
completionHandler:^(ARDJoinResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *joinError =
[[strongSelf class] errorForJoinResultType:response.result];
if (joinError) {
RTCLogError(@“Failed to join room:%@ on room server.”, roomId);
[strongSelf disconnect];
[strongSelf.delegate appClient:strongSelf didError:joinError];
return;
}
RTCLog(@“Joined room:%@ on room server.”, roomId);
strongSelf.roomId = response.roomId;
strongSelf.clientId = response.clientId;
strongSelf.isInitiator = response.isInitiator;
for (ARDSignalingMessage *message in response.messages) {
if (message.type == kARDSignalingMessageTypeOffer ||
message.type == kARDSignalingMessageTypeAnswer) {
strongSelf.hasReceivedSdp = YES;
[strongSelf.messageQueue insertObject:message atIndex:0];
} else {
[strongSelf.messageQueue addObject:message];
}
}
strongSelf.webSocketURL = response.webSocketURL;
strongSelf.webSocketRestURL = response.webSocketRestURL;
[strongSelf registerWithColliderIfReady];
[strongSelf startSignalingIfReady];
}];
// Join room on room server.
[_roomServerClient joinRoomWithRoomId:roomId
isLoopback:isLoopback
completionHandler:^(ARDJoinResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *joinError =
[[strongSelf class] errorForJoinResultType:response.result];
if (joinError) {
RTCLogError(@“Failed to join room:%@ on room server.”, roomId);
[strongSelf disconnect];
[strongSelf.delegate appClient:strongSelf didError:joinError];
return;
}
RTCLog(@“Joined room:%@ on room server.”, roomId);
strongSelf.roomId = response.roomId;
strongSelf.clientId = response.clientId;
strongSelf.isInitiator = response.isInitiator;
for (ARDSignalingMessage *message in response.messages) {
if (message.type == kARDSignalingMessageTypeOffer ||
message.type == kARDSignalingMessageTypeAnswer) {
strongSelf.hasReceivedSdp = YES;
[strongSelf.messageQueue insertObject:message atIndex:0];
} else {
[strongSelf.messageQueue addObject:message];
}
}
strongSelf.webSocketURL = response.webSocketURL;
strongSelf.webSocketRestURL = response.webSocketRestURL;
[strongSelf registerWithColliderIfReady];
[strongSelf startSignalingIfReady];
}];
}
SDK桥接层
SDK层负责将UI的视窗控件和Natvie层的相关组件(audio/video/network)相关连,这里的关联有两层意义,一层是调用Native层的相关功能,一层是将natvie层的相关类和方法进行聚合使用(如将au
dio和video组成一个mediastream使用)。在P2P的应用场景中,SDK层主要有video source、video track、audio source、audio track、DataChannel、PeerConnection、RTP、ICE、SessionDescription等组成。从字面即可了解他们的意义,其中PeerConnection是核心中的核心,SDK层的提供的音视频、网络等都或多或少和其相关。
P2P会议的状态由PeerConnection进行管理,signaling state用于信令握手管理,ice连接状态用于透传管理,
/** Represents the signaling state of the peer connection. */
typedef NS_ENUM(NSInteger, RTCSignalingState) {
RTCSignalingStateStable,
RTCSignalingStateHaveLocalOffer,
RTCSignalingStateHaveLocalPrAnswer,
RTCSignalingStateHaveRemoteOffer,
RTCSignalingStateHaveRemotePrAnswer,
// Not an actual state, represents the total number of states.
RTCSignalingStateClosed,
};
/** Represents the ice connection state of the peer connection. */
typedef NS_ENUM(NSInteger, RTCIceConnectionState) {
RTCIceConnectionStateNew,
RTCIceConnectionStateChecking,
RTCIceConnectionStateConnected,
RTCIceConnectionStateCompleted,
RTCIceConnectionStateFailed,
RTCIceConnectionStateDisconnected,
RTCIceConnectionStateClosed,
RTCIceConnectionStateCount,
};
/** Represents the combined ice+dtls connection state of the peer connection. */
typedef NS_ENUM(NSInteger, RTCPeerConnectionState) {
RTCPeerConnectionStateNew,
RTCPeerConnectionStateConnecting,
RTCPeerConnectionStateConnected,
RTCPeerConnectionStateDisconnected,
RTCPeerConnectionStateFailed,
RTCPeerConnectionStateClosed,
};
/** Represents the ice gathering state of the peer connection. */
typedef NS_ENUM(NSInteger, RTCIceGatheringState) {
RTCIceGatheringStateNew,
RTCIceGatheringStateGathering,
RTCIceGatheringStateComplete,
};
natvie API
在WebRTC应用程序示例中,多次使用到了API层,P2P连接的核心对象是peer_connection_factory_,其定义如下:
RTC_EXPORT rtc::scoped_refptr
CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr default_adm,
rtc::scoped_refptr audio_encoder_factory,
rtc::scoped_refptr audio_decoder_factory,
std::unique_ptr video_encoder_factory,
std::unique_ptr video_decoder_factory,
rtc::scoped_refptr audio_mixer,
rtc::scoped_refptr audio_processing);
} // namespace webrtc
其中三个线程分别是网络通信,工作者线程和信号线程;这之后分别是audio和video,本节就在API层展开多媒体的核心内,至于类是如何管理和实现多媒体功能的在video和audio章节中会有详细叙述。
adm
adm是audio device module的简写,这个类的定义在./modules/audio_device/include/audio_device.h,adm要适配不同的操作系统,所以定义了一个枚举类型来表征,这个枚举类型,显示了在所有平台上OS支持的audio API。
enum AudioLayer {
kPlatformDefaultAudio = 0,
kWindowsCoreAudio,
kWindowsCoreAudio2,
kLinuxAlsaAudio,
kLinuxPulseAudio,
kAndroidJavaAudio,
kAndroidOpenSLESAudio,
kAndroidJavaInputAndOpenSLESOutputAudio,
kAndroidAAudioAudio,
kAndroidJavaInputAndAAudioOutputAudio,
kDummyAudio,
};
WebRTC线程模型
WebRTC Native API使用信令线程(Signaling thread)和工作者线程(worker thread)这两个全局线程,应用程序可以自行实现这两个线程或者由WebRTC内部创建。Stream API和PeerConnection API的调用都会被代理到信令线程,这就意味着应用程序可以任何线程调用这些API。
Stream API定义于:media_stream_interface.h
PeerConnection API定义于:peer_connection_interface.h
由于不同平台的图形界面实现方式并不一样,MAC/IOS使用Cocoa SDK,安卓平台使用Android SDK,Linux 平台使用GTK,而Windows平台使用windows SDK,它们的编程差异比较大,实现的机理也有差异,为方便不同平台调用Stream API和Peerconnection API来使用Natvie 层的相关组件,都会在这个原生的基础上封装出一套代码以便使用。
P2P Native层 API
1.MediaStream:获取本地麦克风和camera的音视频同步多媒体流;
2.RTCPeerConnection:构建点对点之间稳定、高效的流传输组件;
3.RTCDataChannel:P2P之间构建一个高吞吐量、低延迟的信道,用于传输任意数据;
PI的调用都会被代理到信令线程,这就意味着应用程序可以任何线程调用这些API。
Stream API定义于:media_stream_interface.h
PeerConnection API定义于:peer_connection_interface.h
由于不同平台的图形界面实现方式并不一样,MAC/IOS使用Cocoa SDK,安卓平台使用Android SDK,Linux 平台使用GTK,而Windows平台使用windows SDK,它们的编程差异比较大,实现的机理也有差异,为方便不同平台调用Stream API和Peerconnection API来使用Natvie 层的相关组件,都会在这个原生的基础上封装出一套代码以便使用。
P2P Native层 API
1.MediaStream:获取本地麦克风和camera的音视频同步多媒体流;
2.RTCPeerConnection:构建点对点之间稳定、高效的流传输组件;
3.RTCDataChannel:P2P之间构建一个高吞吐量、低延迟的信道,用于传输任意数据;