====准备部分====
*首先在 [Agora.io 注册](https://dashboard.agora.io/cn/signup/) 注册账号,并创建自己的测试项目,获取到 AppID
*然后在 [Agora.io SDK](https://www.agora.io/cn/download/) 下载 **AMG 游戏 SDK**,解压后
*#把SDK中 **libs/Android/** 下的内容,复制到项目的 **Assets/Plugins/Android/AgoraRtcEngineKit.plugin/libs/** 文件夹下
*#把SDK中 **libs/iOS/** 下的内容,复制到项目的 **Assets/Plugins/iOS/** 文件夹下
*#把SDK中 **libs/Scripts/AgoraGamingSDK/** 下的内容,复制到项目的 **Assets/Scripts/AgoraGamingSDK/** 文件夹下
====接入部分====
*初始化Agora
public static void loadEngine(string appid)
{
// start sdk
Debug.Log("initializeEngine");
if (mRtcEngine != null)
{
Debug.Log("Engine exists. Please unload it first!");
return;
}
// init engine
mRtcEngine = IRtcEngine.getEngine(appid); //此时是之前申请的appid
// enable log
mRtcEngine.SetLogFilter(LOG_FILTER.DEBUG | LOG_FILTER.INFO | LOG_FILTER.WARNING | LOG_FILTER.ERROR | LOG_FILTER.CRITICAL);
LoadEngineCallbacks(); //这里是部分的回调函数,具体查看声网api
}
*部分的回调函数如下(这里只列出我暂时用到的)
//加入成功回调
static void EngineOnJoinChannelSuccess(string channelName, uint uid, int elapsed)
{
Debug.Log("JoinChannelSuccessHandler: uid = " + uid + "channelName=" + channelName);
}
//离开频道回调
static void EngineOnLeaveChannel(RtcStats stats)
{
string leaveChannelMessage = string.Format("leaveChannel callback duration {0}, tx: {1}, rx: {2}, tx kbps: {3}, rx kbps: {4}", stats.duration, stats.txBytes, stats.rxBytes, stats.txKBitRate, stats.rxKBitRate);
Debug.Log(leaveChannelMessage);
}
//用户加入 频道
static void EngineOnUserJoined(uint uid, int elapsed)
{
string userJoinedMessage = string.Format("onUserJoined callback uid {0} {1}", uid, elapsed);
Debug.Log(userJoinedMessage);
}
//用户离开频道
private static void EngineOnUserOffline(uint uid, USER_OFFLINE_REASON reason)
{
// remove video stream
Debug.Log("onUserOffline: uid = " + uid);
}
//语音路由发生改变时回调
public static void OnAudioRouteChanged(AUDIO_ROUTE route)
{
Debug.Log("语音路由发生变化回调"+(int)route);
if ((int)route == 1)
{
Debug.Log("强制修改到语音路由的外放模式");
SetEnableSpeakerphone(true);
}
}
*加入频道
public static void JoinChannel(string channelName, int uid)
{
Debug.Log("videoMode" + videoMode);
if (mRtcEngine == null)
{
Debug.Log("no mRtcEngine!");
return;
}
mRtcEngine.EnableVideo();
// allow camera output callback
mRtcEngine.EnableVideoObserver();
//isInAgoraAudio = true;
int joinChannel= mRtcEngine.JoinChannel(channelName, null, (uint)uid);
Debug.Log("joinChannel" + joinChannel);
}
*离开频道
public static void LeaveChannel()
{
if (mRtcEngine == null)
return;
// leave channel
mRtcEngine.LeaveChannel();
mRtcEngine.DisableVideoObserver();
UnLoadEngineCallbacks();
IRtcEngine.Destroy();
mRtcEngine = null;
}
*相关操作的api方法
//将静音所有的远端音效
public static void MuteAllRemoteAudioStreams(bool mute)
{
if (mRtcEngine == null) return;
mRtcEngine.MuteAllRemoteAudioStreams(mute);
}
//静音指定的远端音效
public static void MuteRemoteAudioStream(int uid, bool mute)
{
if (mRtcEngine == null) return;
mRtcEngine.MuteRemoteAudioStream((uint)uid, mute);
}
//打开视频
public static void EnableVideo()
{
if (mRtcEngine == null) return;
mRtcEngine.EnableVideo();
}
//关闭视频
public static void DisableVideo()
{
if (mRtcEngine == null) return;
mRtcEngine.DisableVideo();
}
// 暂停播放所有远端视频流(MuteAllRemoteAudioStreams)
public static void MuteAllRemoteVideoStreams(bool mute)
{
if (mRtcEngine == null) return;
mRtcEngine.MuteAllRemoteVideoStreams(mute);
}
public static void MuteRemoteVideoStream(uint uid, bool mute)
{
if (mRtcEngine == null) return;
mRtcEngine.MuteRemoteVideoStream(uid, mute);
}
*具体的接入流程
*#初始化Agora,设置相关回调函数
*#加入频道,需要带uid,我们这边的频道号 是gameid与房间号的组合,只要用户加入的是同一频道号就可以进行通信
====视频的渲染部分====
关于渲染部分,官方的demo的例子是在3D的gameObject上渲染的,我们这边是在rawImage上渲染的,具体的原理差不多:
*下面是渲染脚本的代码:
public class VideoSurface : MonoBehaviour {
// Use this for initialization
agora_gaming_rtc.IRtcEngine engine;
RawImage rend;
void Start () {
Debug.Log("VideoSurface Start");
engine = agora_gaming_rtc.IRtcEngine.QueryEngine();
rend = GetComponent();
}
// Update is called once per frame
void Update () {
if (engine == null)
return;
while (engine.GetMessageCount() > 0)
engine.Poll ();
#if UNITY_IOS || UNITY_ANDROID
uint uid = mUid;
if(mEnable) {
// create texture if not existent
if (rend.texture== null) {
System.IntPtr texPtr = (System.IntPtr)engine.GenerateNativeTexture ();
Texture2D nativeTexture = Texture2D.CreateExternalTexture(640, 360, TextureFormat.ARGB32, false, false, texPtr); // FIXME! texture size is subject to change
rend.texture = nativeTexture;
}
// update texture
if (rend.texture!= null && rend.texture is Texture2D) {
//Debug.Log("当前uid==="+uid);
Texture2D tex = rend.texture as Texture2D;
int texId = (int)tex.GetNativeTexturePtr ();
// update texture (possible size changing)
uint texWidth = 0;
uint texHeight = 0;
if(engine.UpdateTexture (texId, uid, ref texWidth, ref texHeight) == 0) {
// TODO: process texture then render
//Debug.Log("TODO: process texture then render");
}
}
}
else {
if (rend.texture != null && rend.texture is Texture2D) {
Texture2D tex = rend.texture as Texture2D;
int texId = (int)tex.GetNativeTexturePtr ();
engine.DeleteTexture(texId);
rend.texture = null;
}
}
#endif
}
public void SetForUser(uint uid) {
mUid = uid;
Debug.Log ("Set uid " + uid + " for " + gameObject.name);
}
public void SetEnable(bool enable) {
mEnable = enable;
}
private uint mUid = 0;
private bool mEnable = true; // if disabled, then no rendering happens
}
*这里有部分注意事项
*#这个脚本需要绑定在需要渲染的rawImage物体上
*#此脚本的 muid=0 时是代表渲染的自己的本地预览, 如果渲染其它人时,必须在加入频道成功后设置对面用户的muid才行
*#mEnable 是用来开启和禁用渲染的 一般是在加入频道成功后进行开启
====IOS和安卓 打包过程的配置====
*安卓需要配置manfiest 文件
*#获取摄像机权限
*#获取声音配置修改权限
*IOS配置
*#在info.pist 里面添加carmra usage description 获取摄像机权限
*#需要添加 libresolv.tbd ,videoToolbox.framework,CoreMotion.framework 3个framework
====接入过程中的采坑点====
*iOS 报错 UnityGfxDeviceWorker(14):EXC_BAD_ACCE(code = 1,address = 0x1) 这是因为sdk只支持openGLES2。所以解决的办法是在 PlayerSetting里面 取消勾选Auto Graphics API ,然后去掉Metal 即可
*打包iOS报错:”AssemblyResolutionException: Failed to resolve assembly: ‘UnityEngine, Version=0.0.0.0, Culture=neut”注意Unity也需要安装(或复制)到这个磁盘 ,运行这个磁盘的Unity
*在接入过程中,sdk方面在获取手机当前语音路由时会出现错误,sdk方面会有一个获取当前语音回调的函数
public static void OnAudioRouteChanged(AUDIO_ROUTE route)
{
Debug.Log("语音路由发生变化回调"+(int)route);
if ((int)route == 1)
{
SetEnableSpeakerphone(true);
}
}
在手机插入耳机,拔出耳机都会走这个回调,插入耳机时route会变为0,即耳机的模式。但是拔出耳机时route会改变为1,有一定的概率是3。为1时还耳机模式,导致声音很小或者听不清。所以这里做了一下操作,将其强制改变到外放模式。
*在接入实时视频的过程语音和我们的游戏语音会有冲突,在视频语音与游戏里面的音效同时播放时,sdk会把游戏语音变小。sdk官方有指定的api去解决这个冲突,即下面api 但是需要通过绝对路径获取本地游戏的声音资源,但是我们项目声音资源是在Resouces文件夹在移动平台是取不到Resouces文件下的路径,所以暂时不适用
public abstract int StartAudioMixing (string filePath, bool loopback, bool replace, int cycle, int playTime = 0);
指定本地音频文件来和麦克风采集的音频流进行混音和替换(用音频文件替换麦克风采集的音频流),可以通过参数选择是否让对方听到本地播放的音频和指定循环播放的次数。