Zed真是一个精品商品,提供的api的解释真的很齐全。 代码的可读性很高。能感觉到写代码的人在代码的规范度上下的功夫。
zed-unity的插件中,有几个比较核心的代码脚本。最主要的代码脚本有3500多行,里面已经提供了英文的注释,包括每一个变量,不得不说真的很良心。 因为自己本身也在通读学习,所以顺便把汉化注释和自己的理解放了进去,这样方便以后师门的师弟师妹们看。
考虑到性价比的问题,并不是专业汉化和专业解释,只是比较粗糙地注释了一下。
这是插件中的核心代码,是我们使用相机最优先的脚本。
using UnityEngine;
using System;
using System.Threading;
using UnityEngine.XR;
using System.Collections;
using System.Collections.Generic;
#if UNITY_EDITOR
using UnityEditor;
#endif
/// 注释者:lance
/// QQ:261983626
/// 日期:2020/8/27
///
/// The central script of the ZED Unity plugin, and the way a developer can interact with the camera.
/// 插件的核心脚本,是开发者与相机交互的优先渠道
/// It sets up and closes connection to the ZED, adjusts parameters based on user settings, enables/disables/handles
/// 它可以设置和关闭与zed相机的链接,并基于用户的设置调整参数
/// features like tracking, and holds numerous useful properties, methods, and callbacks.
/// 例如像追踪功能,以及大量的有用的属性、方法、回调函数
///
///
/// ZEDManager is attached to the root objects in the ZED_Rig_Mono and ZED_Rig_Stereo prefabs.
/// 这个脚本需要附属在ZED_Rig_Mono 和 ZED_Rig_Stereo 预形体上
/// If using ZED_Rig_Stereo, it will set isStereoRig to true, which triggers several behaviors unique to stereo pass-through AR.
/// 如果它用在 ZED_Rig_Stereo,它将会赋值 isStereoRig 为true,将会触发AR功能的专有的一些功能
///
public class ZEDManager : MonoBehaviour
{
///
/// Static function to get instance of the ZEDManager with a given camera_ID. See sl.ZED_CAMERA_ID for the available choices.
/// 静态函数,通过camera_ID 获得ZEDManager的实例,可以看sl.ZED_CAMERA_ID查找更多选择
/// 自己理解:
///
public static object grabLock; // 一个静态物品对象
static ZEDManager[] ZEDManagerInstance = null; //定义列表实例
public static ZEDManager GetInstance(sl.ZED_CAMERA_ID _id)
{
if (ZEDManagerInstance == null)
return null;
else
return ZEDManagerInstance[(int) _id];
}
///
/// Static function to get all ZEDManagers that have been properly instantiated.
/// 静态函数,用于获取已正确实例化的所有ZEDManager
/// Cameras may not necessarily be connected, if they haven't finished connecting, have disconnected,
/// or if no camera is available.
/// 如果尚未完成连接,或者已断开连接,又或没有可用的摄像机,则不一定需要连接摄像机。
///
///
public static List<ZEDManager> GetInstances()
{
List<ZEDManager> instances = new List<ZEDManager>();
for (int i = 0; i < (int) sl.Constant.MAX_CAMERA_PLUGIN; i++)
{
ZEDManager instance = GetInstance((sl.ZED_CAMERA_ID) i);
if (instance != null)
instances.Add(instance);
}
return instances;
}
///
/// For advanced debugging. Default false. Set true for the Unity wrapper to log all SDK calls to a new file
/// at C:/ProgramData/stereolabs/SL_Unity_wrapper.txt. This helps find issues that may occur within
/// the protected .dll, but can decrease performance.
///
private bool wrapperVerbose = false;
///
/// Current instance of the ZED Camera, which handles calls to the Unity wrapper .dll.
/// ZED Camera的当前实例,该实例处理对Unity包装器.dll的调用
/// Lance理解:Unity中想要操作的相机的直接对象,通过该对象可以调用sdk的功能,这个对象在我们写代码的时候会经常用到
///
public sl.ZEDCamera zedCamera = null;
/
/ Camera Settings ///
/
///
/// Resolution setting for all images retrieved from the camera. Higher resolution means lower framerate.
/// 从相机检索到的所有图像的分辨率设置。更高的分辨率意味着更低的帧率
/// HD720 is strongly recommended for pass-through AR.
/// HD720分辨率在透视AR功能中是被强烈推荐的
///
///
/// Camera ID
/// 相机的序号
///
[HideInInspector] public sl.ZED_CAMERA_ID cameraID = sl.ZED_CAMERA_ID.CAMERA_ID_01;
///
/// The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance.
/// 深度计算的准确性。较高的设置意味着更精确的遮挡和照明,但具有性价比
/// Lance 理解: 就是计算精度越高,运算代价越高
/// Note there's a significant jump in performance cost between QUALITY and ULTRA modes.
/// 请注意,在“QUALITY”和“ULTRA”模式之间,性能成本有很大的提高
/// Lance 理解: 两种模式的运算成本相差较大,应该是ULTRA比较高
///
/*[Tooltip("The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance.")]*/
[HideInInspector] public sl.DEPTH_MODE depthMode = sl.DEPTH_MODE.PERFORMANCE; //预定义深度数据的模式
///
/// Input Type in SDK (USB, SVO or Stream)
/// Lance 理解:输入类型 一般都用USB连接
///
[HideInInspector] public sl.INPUT_TYPE inputType = sl.INPUT_TYPE.INPUT_TYPE_USB;
///
/// Camera Resolution
/// 相机分辨率
///
[HideInInspector] public sl.RESOLUTION resolution = sl.RESOLUTION.HD720; //AR中推荐720
///
/// Targeted FPS, based on the resolution. VGA = 100, HD720 = 60, HD1080 = 30, HD2K = 15.
/// 数据采集频率,与分辨率有关, VGA = 100, HD720 = 60, HD1080 = 30, HD2K = 15
///
[HideInInspector] public int FPS = 60;
#region SVO 自己用不上的
///
/// SVO Input FileName
/// Lance 理解:是不是离线视频文件? 不知道,该部分估计用不到,先不注释
///
[HideInInspector] public string svoInputFileName = "";
///
/// SVO loop back option
///
[HideInInspector] public bool svoLoopBack = true;
///
/// SVO loop back option
///
[HideInInspector] public bool svoRealTimeMode = false;
///
/// Current frame being read from the SVO. Doesn't apply when recording.
/// 正在从SVO读取当前帧。录制时不适用
///
[HideInInspector] [SerializeField] private int currentFrame = 0;
///
/// Current frame being read from the SVO. Doesn't apply when recording.
/// 正在从SVO读取当前帧。录制时不适用
///
public int CurrentFrame
{
get { return currentFrame; }
set { currentFrame = value; }
}
///
/// Total number of frames in a loaded SVO.
///
[HideInInspector] [SerializeField] private int numberFrameMax = 0;
///
/// Total number of frames in a loaded SVO.
///
public int NumberFrameMax
{
set { numberFrameMax = value; }
get { return numberFrameMax; }
}
[HideInInspector] [SerializeField] public bool pauseSVOReading = false;
[HideInInspector] public bool pauseLiveReading = false;
///
/// Ask a new frame is in pause (SVO only)
///
[HideInInspector] public bool NeedNewFrameGrab = false;
#endregion
///
/// Streaming Input IP (v2.8)
///
[HideInInspector] public string streamInputIP = "127.0.0.1";
///
/// Streaming Input Port (v2.8)
///
[HideInInspector] public int streamInputPort = 30000;
#if ZED_HDRP
/
/ SRP Lighting //
/
public enum shaderType
{
Lit,
Unlit,
Greenscreen_Lit,
Greenscreen_Unlit,
DontChange
}
///
///
///
[HideInInspector]
public shaderType srpShaderType = shaderType.Lit;
///
/// How much the ZED image should light itself via emission.
/// Setting to zero is most realistic, but requires you to emulate the real-world lighting conditions within Unity. Higher settings cause the image\
/// to be uniformly lit, but light and shadow effects are less visible.
///
[HideInInspector]
public float selfIllumination = 0.5f;
///
///
///
[HideInInspector]
public bool applyZEDNormals = false;
#endif
/
/ Motion Tracking ///
/
///
/// If enabled, the ZED will move/rotate itself using its own inside-out tracking.
/// 如果启用,ZED将使用其自己的由内而外的跟踪进行移动/旋转
/// If false, the camera tracking will move with the VR HMD if connected and available.
/// 如果为false,则摄像机跟踪将随VR HMD(如果已连接且可用)一起移动。
/// Normally, ZEDManager's GameObject will move according to the tracking. But if in AR pass-through mode,
/// then the Camera_eyes object in ZED_Rig_Stereo will move while this object stays still.
/// 通常,ZEDManager的GameObject将根据跟踪移动。但是,如果在AR模式下,则ZED_Rig_Stereo中的Camera_eyes对象将移动,而该对象保持静止
///
[HideInInspector] public bool enableTracking = true; // 相机自身追踪使能
///
/// Enables the spatial memory. Will detect and correct tracking drift by remembering features and anchors in the environment,
/// but may cause visible jumps when it happens.
/// 启用空间内存。将通过记住环境中的特征和锚点来检测并纠正跟踪漂移,但在纠正的时候会导致可见的跳跃
/// Lance理解: 就是保存一些旧的的空间锚点,然后检测当前的相机位置是否准确,
///
[HideInInspector] public bool enableSpatialMemory = true; // 空间内存使能
///
/// If using Spatial Memory, you can specify a path to an existing .area file to start with some memory already loaded.
/// .area files are created by scanning a scene with ZEDSpatialMappingManager and saving the scan.
/// 如果使用空间内存,则可以指定现有.area文件的路径,以从已加载的某些内存开始。 .area文件是通过使用ZEDSpatialMappingManager扫描场景并保存扫描来创建的。
/// 意思就是说,先用相机扫描一下环境,并自动检测出里面的锚点,然后保存在离线的文件中, 然后在实际运行的时候加载这些离线锚点,用于校正相机跟踪的结果
///
[HideInInspector] public string pathSpatialMemory; // 离线锚点文件路径
///
/// Estimate initial position by detecting the floor.
/// 通过检测地面来计算初始的位置
///
[HideInInspector] public bool estimateInitialPosition = true; //检测初始位置使能
///
/// If true, tracking is enabled but doesn't move after initializing.
/// 如果为true,则启用跟踪,但初始化后不会移动
/// Lance 理解: 大概的意思是不是只在初始化的时候调用一下 知道相机的初始位置就可以了。 但是后面的不再使用内部的追踪器
///
[HideInInspector] public bool trackingIsStatic = false;
/
/ Spatial Mapping ///
/
///
/// Resolution setting for the scan. A higher resolution creates more submeshes and uses more memory, but is more accurate.
/// 扫描的分辨率设置。较高的分辨率会创建更多的子网格并使用更多的内存,但更为准确。
/// Lance 理解: 就是前面说的,扫描一段时间,计算出大量的锚点特征,然后保存在离线文件中以便运行的时候使用,这边设置的是扫描的分辨率,就是直接影响到锚点的精度问题
///
[HideInInspector] public ZEDSpatialMapping.RESOLUTION
mappingResolutionPreset = ZEDSpatialMapping.RESOLUTION.MEDIUM; // 初始分辨率为中等,这边的分辨率应该和depth分辨率是相似的,估计背后调用的是同一个接口
///
/// Maximum distance geometry can be from the camera to be scanned. Geometry scanned from farther away will be less accurate.
/// 指的是设置相机扫描环境的时候,扫描的最大深度设置。 越远的几何形状越不准确。
///
[HideInInspector] public ZEDSpatialMapping.RANGE mappingRangePreset = ZEDSpatialMapping.RANGE.MEDIUM;
///
/// Whether mesh filtering is needed.
/// 是否需要网格过滤,
/// Lance 理解: 应该指的是要不要把一些同一个平面的网格整合在一起,形成大的网格,这样可以减少网格的个数
///
[HideInInspector] public bool isMappingFilteringEnable = false;
///
/// Whether surface textures will be scanned and applied. Note that texturing will add further delay to the post-scan finalizing period.
/// 是否将扫描和应用表面纹理。请注意,纹理化将进一步延长扫描后的定稿时间。
///
[HideInInspector] public bool isMappingTextured = false;
///
/// Whether to save the mesh .obj and .area files once the scan is finished.
/// 扫描完成后,是否要保存网格和区域文件
///
[HideInInspector] public bool saveMeshWhenOver = false;
///
/// Path to save the .obj and .area files.
/// 保存网格和区域文件的路径
///
[HideInInspector] public string meshPath = "Assets/ZEDMesh.obj";
///
/// Filtering setting. More filtering results in fewer faces in the mesh, reducing both file size and accuracy.
/// 过滤设置。更多的过滤可以减少网格中的面,从而减小文件大小和准确性
///
[HideInInspector] public sl.FILTER meshFilterParameters; //网格滤波参数
///
/// Instance of the ZEDSpatialMapping class that handles the actual spatial mapping implementation within Unity.
/// ZEDSpatialMapping类的实例,用于处理Unity中的实际空间映射实现。
/// Lance 理解:这边定义了一个插入函数列表()
///
[HideInInspector] private ZEDSpatialMapping spatialMapping = null;
public ZEDSpatialMapping GetSpatialMapping
{
get { return spatialMapping; }
}
///
/// Whether the spatial mapping is currently scanning.
/// 空间映射当前是否正在扫描。
///
public bool IsMappingRunning
{
get { return spatialMapping != null ? spatialMapping.IsRunning() : false; }
}
///
/// List of the processed submeshes. This list isn't filled until StopSpatialMapping() is called.
/// 已处理子网格的列表。在调用StopSpatialMapping()之前,不会填充此列表。
///
public List<ZEDSpatialMapping.Chunk> MappingChunkList
{
get { return spatialMapping != null ? spatialMapping.ChunkList : null; }
}
///
/// Whether the mesh update thread is running.
/// 网格更新线程是否正在运行
///
public bool IsMappingUpdateThreadRunning
{
get { return spatialMapping != null ? spatialMapping.IsUpdateThreadRunning : false; }
}
///
/// Whether the spatial mapping was running but has been paused (not stopped) by the user.
/// 空间映射是否正在运行但是否已被用户暂停(未停止)。
///
public bool IsMappingPaused
{
get { return spatialMapping != null ? spatialMapping.IsPaused : false; }
}
///
/// Whether the mesh is in the texturing stage of finalization.
/// 网格是否处于定型的纹理化阶段
///
public bool IsMappingTexturingRunning
{
get { return spatialMapping != null ? spatialMapping.IsTexturingRunning : false; }
}
///
/// Gets a boolean value indicating whether the spatial mapping display is enabled.
/// 获取一个布尔值,该布尔值指示是否启用了空间映射显示。
///
public bool IsSpatialMappingDisplay
{
get { return spatialMapping != null ? spatialMapping.display : false; }
}
///
/// Gets a boolean value indicating whether the spatial mapping has chunks
/// 获取一个布尔值,该值指示空间映射是否具有块,具有块是什么意思
///
public bool SpatialMappingHasChunks
{
get { return spatialMapping != null ? spatialMapping.Chunks.Count > 0 : false; }
}
/
Object Detection //
/
///
/// Sync the Object on the image . It's recommended to leave it false for Real-Time applications.
/// 在图像上同步对象。对于实时应用程序,建议将其保留为false。
/// Lance 理解:不知道这边在图像上同步是什么意思,就是在图像里面框出来吗,这个地方可能是自己比较有用的地方
///
[HideInInspector] public bool objectDetectionImageSyncMode = false;
///
/// Whether to track objects across multiple frames using the ZED's position relative to the floor.
/// Requires tracking to be on. It's also recommended to enable Estimate Initial Position to find the floor.
/// 是否使用ZED相对于地板的位置跨多个帧跟踪对象。 需要启用跟踪。还建议启用“Estimate Initial Position”以查找地板。\
/// 就是在识别物品后,需不需要对它进行追踪,这样可以降低识别算力的需求
///
[HideInInspector] public bool objectDetectionTracking = true;
///
/// Whether to calculate 2D masks for each object, showing exactly which pixels within the 2D bounding box are the object.
/// Requires more performance, so do not enable unless needed.
/// 是否为每个对象计算2D遮罩,以准确显示2D边界框内的哪些像素是对象。 需要更高的性能,因此除非需要,否则不要启用。 这个相当于语言分割,应该对自己来说挺有用
///
[HideInInspector] public bool objectDetection2DMask = false; // 物品检测的二维蒙板
///
/// Detection sensitivity. Represents how sure the SDK must be that an object exists to report it. Ex: If the threshold is 80, then only objects
/// where the SDK is 80% sure or greater will appear in the list of detected objects.
/// 检测灵敏度。表示SDK是否必须确保存在对象才能报告它。例:如果阈值是80,则只有SDK 确定为80%或更高的对象才会出现在检测到的对象列表中
///
[HideInInspector] public float objectDetectionConfidenceThreshold = 50;
///
/// Whether to detect people during object detection.
/// 在物品检测的过程中要不要检测人
///
[HideInInspector] public bool objectClassPersonFilter = true;
///
/// Whether to detect vehicles during object detection.
/// 物品检测的时候要不要检测交通工具
///
[HideInInspector] public bool objectClassVehicleFilter = true;
///
/// Whether the object detection module has been activated successfully.
/// 对象检测模块是否已成功激活
///
private bool objectDetectionRunning = false;
///
/// Whether the object detection module has been activated successfully.
/// 对象检测模块是否已成功激活
///
public bool IsObjectDetectionRunning
{
get { return objectDetectionRunning; }
}
///
/// Set to true when there is not a fresh frame of detected objects waiting for processing, meaning we can retrieve the next one.
/// 当没有新的检测到的对象等待处理时将其设置为true,这意味着我们可以检索下一个对象。
/// Lance 理解:似乎是说,已经完成当前帧的物品检测
///
private bool requestobjectsframe = true;
///
/// Set to true when a new frame of detected objects has been retrieved in the image acquisition thread, ready for the main thread to process.
/// 当已在图像采集线程中检索到新的检测对象帧,并准备好处理主线程时,将其设置为true。
///
private bool newobjectsframeready = false;
///
/// Last object detection frame detected by the SDK. This data comes straight from the C++ SDK; see detectionFrame for an abstracted version
/// with many helper functions for use inside Unity.
/// SDK检测到的最后一个对象检测帧。这些数据直接来自C ++ SDK。参见detectionFrame中的抽象版本,其中包含许多可在Unity中使用的辅助函数。
/// Lance 理解: 是值返回的直接是底层C++做了物品检测后重新处理的图像数据吗,速度比较快?下面的函数在ZEDCommon文件中
///
private sl.ObjectsFrameSDK objectsFrameSDK = new sl.ObjectsFrameSDK();
///
/// Last object detection frame detected by the SDK. This data comes straight from the C++ SDK; see GetDetectionFrame for an abstracted version
/// with many helper functions for use inside Unity.
/// SDK检测到的最后一个对象检测帧。这些数据直接来自C ++ SDK。参见detectionFrame中的抽象版本,其中包含许多可在Unity中使用的辅助函数。
///
public sl.ObjectsFrameSDK GetSDKObjectsFrame
{
get { return objectsFrameSDK; }
}
///
/// Timestamp of the most recent object frame fully processed. This is used to calculate the FPS of the object detection module.
/// 最新对象帧的时间戳已完全处理。这用于计算对象检测模块的FPS。
/// Lance 理解: 应该是作为某个时间周期的起始点,同时也是上一个周期的终点
///
private ulong lastObjectFrameTimeStamp = 0;
///
/// Frame rate at which the object detection module is running. Only reports performance; changing this value has no effect on detection.
/// 对象检测模块运行的帧速率。仅报告效果;更改此值不会影响检测。
/// Lance 理解: 这个是物品检测的帧率,应该会在每一次检测的末尾被重新赋值,所以我们修改它没有效果
///
private float objDetectionModuleFPS = 15.0f;
///
/// Last object detection frame detected by the SDK, in the form of a DetectionFrame instance which has many helper functions for use in Unity.
/// SDK检测到的最后一个对象检测帧,形式为DetectionFrame实例,该实例具有许多可在Unity中使用的辅助函数
///
private DetectionFrame detectionFrame;
///
/// Last object detection frame detected by the SDK, in the form of a DetectionFrame instance which has many helper functions for use in Unity.
/// SDK检测到的最后一个对象检测帧,形式为DetectionFrame实例,该实例具有许多可在Unity中使用的辅助函数
///
public DetectionFrame GetDetectionFrame
{
get { return detectionFrame; }
}
#region 与检测帧事件相关,自己并不是很理解
///
/// Delegate for events that take an object detection frame straight from the SDK (not abstracted).
/// 委托直接从SDK(未抽象)获取对象检测框架的事件。
///
public delegate void onNewDetectionTriggerSDKDelegate(sl.ObjectsFrameSDK objFrame);
///
/// Event that's called whenever the Object Detection module detects a new frame.
/// Includes data straight from the C++ SDK. See OnObjectDetection/DetectionFrame for an abstracted version that has many helper functions
/// that makes it easier to use in Unity.
/// 每当“对象检测”模块检测到新帧时调用的事件。 包含直接来自C ++ SDK的数据。有关具有许多辅助函数的抽象版本,请参见OnObjectDetection / DetectionFrame,使在Unity中更容易使用。
///
public event onNewDetectionTriggerSDKDelegate OnObjectDetection_SDKData;
///
/// Delegate for events that take an object detection frame, in the form of a DetectionFrame object which has helper functions.
/// 委托带有对象检测框架的事件,形式为具有辅助功能的DetectionFrame对象。
///
public delegate void onNewDetectionTriggerDelegate(DetectionFrame objFrame);
///
/// Event that's called whenever the Object Detection module detects a new frame.
/// Supplies data in the form of a DetectionFrame instance, which has many helper functions for use in Unity.
/// 每当“对象检测”模块检测到新帧时调用的事件。 以DetectionFrame实例的形式提供数据,该实例具有许多可在Unity中使用的辅助函数。
///
public event onNewDetectionTriggerDelegate OnObjectDetection;
private sl.dll_ObjectDetectionRuntimeParameters od_runtime_params = new sl.dll_ObjectDetectionRuntimeParameters();
#endregion
/
/ Rendering ///
/
///
/// Rendering paths available to the ZED with the corresponding Unity rendering path.
/// ZED可用的渲染路径以及相应的Unity渲染路径
/// Lance:渲染路径指的是什么我还不是很懂,跟unity相关的东西确实还不熟
///
public enum ZEDRenderingMode //定义的是一个枚举结构?
{
FORWARD = RenderingPath.Forward,
DEFERRED = RenderingPath.DeferredShading
};
///
/// When enabled, the real world can occlude (cover up) virtual objects that are behind it.
/// Otherwise, virtual objects will appear in front.
/// 启用后,现实世界可以遮挡(隐藏)其背后的虚拟对象。 否则,虚拟对象将出现在前面。
/// Lance:这个功能在AR中特别有用,是增强现实的必须功能,就是把虚拟的物品的空间位置与实际物品的空间位置联系起来。就像虚拟物品真的在现实环境中一样
///
[HideInInspector] public bool depthOcclusion = true; //这个地方应该描述为空间视线遮挡更符合场景
///
/// Enables post-processing effects on virtual objects that blends them in with the real world.
/// 在虚拟对象上启用后处理效果,将它们与真实世界融合在一起。
/// Lance:这个含义还不是很理解,但是好像在做AR交互的时候应该很有用
///
[HideInInspector] public bool postProcessing = true;
///
/// Field version of CameraBrightness property.
/// CameraBrightness,属性的字段版本
///
[SerializeField] [HideInInspector] private int m_cameraBrightness = 100; //应该是亮度
/// Brightness of the final real-world image. Default is 100. Lower to darken the environment in a realistic-looking way.
/// This is a rendering setting that doesn't affect the raw input from the camera.
/// 最终真实世界图像的亮度。默认值为100。降低以逼真的环境使环境变暗。 这是一个渲染设置,不会影响来自相机的原始输入。
///
public int CameraBrightness
{
get { return m_cameraBrightness; }
set
{
if (m_cameraBrightness == value) return;
m_cameraBrightness = value;
if (OnCamBrightnessChange != null)
OnCamBrightnessChange(m_cameraBrightness);
}
}
///
/// Whether to enable the new color/gamma curve added to the ZED SDK in v3.0. Exposes more detail in darker regions
/// and removes a slight red bias.
/// 是否启用v3.0中添加到ZED SDK的新颜色/伽玛曲线。 在较暗的区域中显示更多细节,并消除了轻微的红色偏差。
///
[HideInInspector] [SerializeField] public bool enableImageEnhancement = true;
/// Field version of MaxDepthRange property.
/// 最大深度范围
///
[SerializeField] private float m_maxDepthRange = 40f; //40 米?
///
/// Maximum depth at which the camera will display the real world, in meters. Pixels further than this value will be invisible.
/// 相机显示真实世界的最大深度,以米为单位。超出此值的像素将不可见。
///
[HideInInspector]
public float MaxDepthRange //这部分是可以被访问的,如果赋值与初始不一样则返回新的值
{
get { return m_maxDepthRange; }
set
{
if (m_maxDepthRange == value) return;
m_maxDepthRange = value;
if (OnMaxDepthChange != null)
OnMaxDepthChange(m_maxDepthRange);
}
}
#region 录制视频模块,感觉自己应该不会用到
/
/ Recording Module //
/
///
/// SVO Output file name
///
[HideInInspector] public string svoOutputFileName = "Assets/Recording.svo";
///
/// SVO Compression mode used for recording
///
[HideInInspector] public sl.SVO_COMPRESSION_MODE svoOutputCompressionMode = sl.SVO_COMPRESSION_MODE.H264_BASED;
///
/// Indicates if frame must be recorded
///
[HideInInspector] public bool needRecordFrame = false;
#endregion
/
/ Streaming Module //
/
///
/// Enable/Disable Streaming module
/// 流模式使能
/// Lance: 流模式指的啥
///
[HideInInspector] public bool enableStreaming = false;
///
/// Status of streaming request
/// 流模式请求状态
///
private bool isStreamingEnable = false;
///
/// Codec used for Streaming
/// 用于流的编解码器
/// Lance:可能使用原始图像数据传输的话会比较慢,比较编码压缩?
///
[HideInInspector] public sl.STREAMING_CODEC streamingCodec = sl.STREAMING_CODEC.AVCHD_BASED;
///
/// port used for Streaming
/// 用于流传输的端口
///
[HideInInspector] public int streamingPort = 30000;
///
/// bitrate used for Streaming
/// 用于流式传输的比特率
///
[HideInInspector] public int bitrate = 8000;
///
/// gop size used for Streaming
/// 用于流的gop大小
///
[HideInInspector] public int gopSize = -1;
///
/// Enable/Disable adaptative bitrate
/// 启用/禁用自适应比特率
///
[HideInInspector] public bool adaptativeBitrate = false;
///
/// Enable/Disable adaptative bitrate
/// 启用/禁用自适应比特率
///
[HideInInspector] public int chunkSize = 32768;
/
/ Advanced control /
/
///
///
/// True to make the ZED image fade from black when the application starts.
/// 应用程序启动时为True,以使ZED图像从黑色褪色
/// Lance:应该是缓慢过渡的视觉感,才不会突然在AR中刺激人眼
///
[HideInInspector] public bool fadeInOnStart = true;
///
/// True to apply DontDestroyOnLoad() on the ZED rig in Awake(), preserving it between scenes.
/// 将DontDestroyOnLoad()应用于Awake()中的ZED绑定,并在场景之间保留它,则为True。
///
[HideInInspector] public bool dontDestroyOnLoad = false;
///
/// Grey Out Skybox on Start", "True to set the background to a neutral gray when the scene starts.
/// Recommended for AR so that lighting on virtual objects better matches the real world.
/// 开始时将Skybox变灰,在场景开始时正确地将背景设置为中性灰色。 推荐用于增强现实,以使虚拟对象上的光照更好地匹配现实世界。
///
[HideInInspector] public bool greySkybox = true;
///
/// Field version of confidenceThreshold property.
/// 置信度阈值属性的字段版本。
///
[SerializeField] [HideInInspector] private int m_confidenceThreshold = 100;
///
/// How tolerant the ZED SDK is to low confidence values. Lower values filter more pixels.
/// ZED SDK如何容忍低置信度值。较低的值可过滤更多像素。
/// Lance:这边不是很明确具体是什么数据的置信度
///
public int confidenceThreshold
{
get { return m_confidenceThreshold; }
set
{
if (value == m_confidenceThreshold) return;
m_confidenceThreshold = Mathf.RoundToInt(Mathf.Clamp(value, 0, 100));
if (Application.isPlaying && zedReady)
{
runtimeParameters.confidenceThreshold = m_confidenceThreshold;
}
}
}
[SerializeField] [HideInInspector] private int m_textureConfidenceThreshold = 100;
///
/// How tolerant the ZED SDK is to low confidence values. Lower values filter more pixels.
/// ZED SDK如何容忍低置信度值。较低的值可过滤更多像素。
///
public int textureConfidenceThreshold //纹理的置信度? 指的是什么纹理
{
get { return m_textureConfidenceThreshold; }
set
{
if (value == m_textureConfidenceThreshold) return;
m_textureConfidenceThreshold = Mathf.RoundToInt(Mathf.Clamp(value, 0, 100));
if (Application.isPlaying && zedReady)
{
runtimeParameters.textureConfidenceThreshold = m_textureConfidenceThreshold;
}
}
}
///
/// Options for enabling the depth measurement map for the right camera. Costs performance if on, even if not used.
/// 用于为右相机启用深度测量图的选项。无论使用不,都会也会降低性能。
///
public enum RightDepthEnabledMode //定义枚举类型
{
///
/// Right depth measure will be enabled if a ZEDRenderingPlane component set to the right eye is detected as a child of
/// ZEDManager's GameObject, as in the ZED rig prefabs.
/// 如果检测到设置为右眼的ZEDRenderingPlane组件是
/// ZEDManager的GameObject的子代,则将启用“右深度”度量,
/// 如ZED装备预制中一样。
/// Lance: 一般情况下是把右图匹配到左图获得深度信息,这边的意思是说,如果存在右眼的组件的话,也会启动在右图中匹配深度信息,
/// 其实深度数据本身就在左图匹配的时候计算过了。这边主要匹配了右图的像素坐标点,也有可能是重新把左图匹配到右图重新计算深度,毕竟左右两图并不一定都能一一对应
///
AUTO,
///
/// Right depth measure is disabled.
/// 不使能,不会自动匹配,即使有了右眼组件
///
OFF,
///
/// Right depth measure is enabled.
/// 使能
///
ON
}
///
/// Whether to enable depth measurements from the right camera. Required for depth effects in AR pass-through, but requires performance even if not used.
/// Auto enables it only if a ZEDRenderingPlane component set to the right eye is detected as a child of ZEDManager's GameObject (as in the ZED rig prefabs.)
/// 是否从正确的相机启用深度测量。对于AR直通中的深度效果而言是必需的,但即使不使用也需要性能。
/// 仅当检测到设置为右眼的ZEDRenderingPlane组件是ZEDManager的GameObject的子代时(如ZED装备预制件一样),Auto才启用它。
///
[HideInInspector] public RightDepthEnabledMode enableRightDepthMeasure = RightDepthEnabledMode.AUTO; //初始值先定义为自动吧
///
/// Delegate for OnCamBrightnessChange, which is used to update shader properties when the brightness setting changes.
/// 托管OnCamBrightnessChange,当亮度设置更改时,该代理用于更新着色器属性。
///
public delegate void onCamBrightnessChangeDelegate(int newVal);
///
/// Event fired when the camera brightness setting is changed. Used to update shader properties.
/// 更改相机亮度设置时触发事件。用于更新着色器属性。
///
public event onCamBrightnessChangeDelegate OnCamBrightnessChange;
///
/// Delegate for OnCamBrightnessChange, which is used to update shader properties when the max depth setting changes.
/// 代表OnCamBrightnessChange,该对象用于在最大深度设置更改时更新着色器属性。
///
public delegate void onMaxDepthChangeDelegate(float newVal);
///
/// Event fired when the max depth setting is changed. Used to update shader properties.
/// 更改最大深度设置时触发事件。用于更新着色器属性。
///
public event onMaxDepthChangeDelegate OnMaxDepthChange;
///
/// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output.
/// 是否显示用于立体AR模式的隐藏摄像机装备,以准备用于HMD输出的图像。
///
[SerializeField] [HideInInspector] private bool showarrig = false;
///
/// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output.
/// This is rarely needed, but can be useful for understanding how the ZED output works.
/// 是否显示用于立体AR模式的隐藏摄像机装备,以准备用于HMD输出的图像。 /// 这很少需要,但对于理解ZED输出的工作方式很有用。 para>
///
public bool showARRig
{
get { return showarrig; }
set
{
if (Application.isPlaying && showarrig != value && zedRigDisplayer != null)
{
zedRigDisplayer.hideFlags = value ? HideFlags.None : HideFlags.HideInHierarchy;
}
showarrig = value;
}
}
private float maxdepthrange = 40f;
public float maxDepthRange
{
get { return maxdepthrange; }
set
{
maxdepthrange = Mathf.Clamp(value, 0, 40);
if (Application.isPlaying)
{
setRenderingSettings();
}
}
}
///
/// If true, and you are using a ZED2 or ZED Mini, IMU fusion uses data from the camera's IMU to improve tracking results.
/// 如果为true,并且您使用的是ZED2或ZED Mini,则IMU融合会使用来自摄像机IMU的数据来改善跟踪结果。
///
public bool enableIMUFusion = true;
///
/// If true, the ZED SDK will subtly adjust the ZED's calibration during runtime to account for heat and other factors.
/// Reasons to disable this are rare.
/// 如果为true,则ZED SDK将在运行时巧妙地调整ZED的校准,以考虑热量和其他因素。 ///禁用此功能的原因很少
///
[HideInInspector] public bool enableSelfCalibration = true;
/
/ Video Settings
/
//Controls for the ZED's video settings (brightness, saturation, exposure, etc.)
#region 相机的视频设置,亮度、曝光度等,但是我最后用的HoloLens的全息显示,这部分以后应该不会去修改
///
/// Behavior options for how the ZED's video settings (brightness, saturation, etc.) are applied when the ZED first connects.
/// ZED首次连接时如何应用ZED视频设置(亮度,饱和度等)的行为选项。
///
public enum VideoSettingsInitMode
{
///
/// Camera will be assigned video settings set in ZEDManager's Inspector before running the scene.
/// 在运行场景之前,将为摄像机分配在ZEDManager的检查器中设置的视频设置。
///
Custom,
///
/// Camera will load settings last applied to the ZED. May have been from a source outside Unity.
/// This is the default behavior in the ZED SDK and most ZED apps.
/// 相机将加载最后应用到ZED的设置。可能来自Unity以外的来源。 ///这是ZED SDK和大多数ZED应用程序的默认行为
///
LoadFromSDK,
///
/// Camera will load default video settings.
/// 相机将加载默认视频设置。
///
Default
}
///
/// How the ZED's video settings (brightness, saturation, etc.) are applied when the ZED first connects.
/// ZED首次连接时如何应用ZED的视频设置(亮度,饱和度等)。
///
public VideoSettingsInitMode videoSettingsInitMode = VideoSettingsInitMode.Custom;
///
/// Brightness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
/// ZED相机本身的亮度设置。 当videoSettingsInitMode设置为Custom时,序列化值将在启动时应用于摄像机
///
[SerializeField] private int videoBrightness = 4;
///
/// Contrast setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
/// ZED相机本身的对比度设置。 当videoSettingsInitMode设置为Custom时,序列化值将在启动时应用于摄像机。
///
[SerializeField] private int videoContrast = 4;
///
/// Hue setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
/// ZED相机本身的色相设置。 当videoSettingsInitMode设置为Custom时,序列化值将在启动时应用于摄像机。
///
[SerializeField] private int videoHue = 0;
///
/// Saturation setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
/// ZED相机本身的饱和度设置。 当videoSettingsInitMode设置为Custom时,序列化值将在启动时应用于摄像机。
///
[SerializeField] private int videoSaturation = 4;
///
/// Auto gain/exposure setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
/// ZED相机本身的自动增益/曝光设置。
/// 当videoSettingsInitMode设置为Custom时,序列化值将在启动时应用于摄像机。
///
[SerializeField] private bool videoAutoGainExposure = true;
///
/// Gain setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoGainExposure is false.
/// ZED相机本身的增益设置。
/// 当videoSettingsInitMode设置为Custom且videoAutoGainExposure为false时,序列化值将在启动时应用于摄像机
///
[SerializeField] private int videoGain = 10;
///
/// Exposure setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoGainExposure is false.
/// ZED相机本身的曝光设置。 当videoSettingsInitMode设置为Custom且videoAutoGainExposure为false时,序列化值将在启动时应用于摄像机。
///
[SerializeField] public int videoExposure = 100;
///
/// Auto White Balance setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField] private bool videoAutoWhiteBalance = true;
///
/// White Balance temperature setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom and videoAutoWhiteBalance is false.
///
[SerializeField] private int videoWhiteBalance = 3200;
///
/// Sharpness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField] private int videoSharpness = 3;
///
/// Sharpness setting for the ZED camera itself.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField] private int videoGamma = 5;
///
/// Whether the LED on the ZED camera is on.
/// Serialized value is applied to the camera on start when videoSettingsInitMode is set to Custom.
///
[SerializeField] private bool videoLEDStatus = true;
#endregion
/
/ Status Report /
/
//Strings used for the Status display in the Inspector.
[Header("Status")] public string Header;
///
/// The camera model (ZED or ZED-M).
/// 相机类型,我的是mini
///
[ReadOnly("Camera S/N")] [HideInInspector]
public string cameraModel = "-";
///
/// The camera serial number.
/// 相机串口号
///
[ReadOnly("Camera S/N")] [HideInInspector]
public string cameraSerialNumber = "-";
///
/// The camera firmware version
/// 相机硬件版本
///
[ReadOnly("Camera Firmware")] [HideInInspector]
public string cameraFirmware = "-";
///
/// Version of the installed ZED SDK, for display in the Inspector.
/// SDK的版本,显示在前面板上
///
[ReadOnly("Version")] [HideInInspector]
public string versionZED = "-";
///
/// How many frames per second the engine is rendering, for display in the Inspector.
/// 工程每秒钟渲染的帧数
///
[ReadOnly("Engine FPS")] [HideInInspector]
public string engineFPS = "-";
///
/// How many images per second are received from the ZED, for display in the Inspector.
/// 从zed相机中每秒钟获得的图像数
///
[ReadOnly("Camera FPS")] [HideInInspector]
public string cameraFPS = "-";
///
/// The connected VR headset, if any, for display in the Inspector.
/// 连接的VR头盔设备
///
[ReadOnly("HMD Device")] [HideInInspector]
public string HMDDevice = "-";
///
/// Whether the ZED's tracking is on, off, or searching (lost position, trying to recover) for display in the Inspector.
/// 相机自身位姿追钟是否打开,关闭,或者搜索
///
[ReadOnly("Tracking State")] [HideInInspector]
public string trackingState = "-";
///
/// Object detection framerate
/// 物品检测的频率
///
[ReadOnly("Object Detection FPS")] [HideInInspector]
public string objectDetectionFPS = "-";
#region 这部分访问权限是私密的,所以在外部的程序是无法访问的,因此就不做注释了,尽量不要更改
Private ///
///
/// Initialization parameters used to start the ZED. Holds settings that can't be changed at runtime
/// (resolution, depth mode, .SVO path, etc.).
///
private sl.InitParameters initParameters;
///
/// Runtime parameters used to grab a new image. Settings can change each frame, but are lower level
/// (sensing mode, point cloud, if depth is enabled, etc.).
///
private sl.RuntimeParameters runtimeParameters;
///
/// Enables the ZED SDK's depth stabilizer, which improves depth accuracy and stability. There's rarely a reason to disable this.
/// 启用ZED SDK的深度稳定器,从而提高深度精度和稳定性。几乎没有理由禁用它。
///
private bool depthStabilizer = true;
///
/// Indicates if Sensors( IMU,...) is needed/required. For most applications, it is required.
/// Sensors are transmitted through USB2.0 lines. If USB2 is not available (USB3.0 only extension for example), set it to false.
/// 指示是否需要传感器(IMU,...)。对于大多数应用程序,这是必需的。
/// 传感器通过USB2.0线传输。如果USB2不可用(例如,仅USB3.0扩展名),请将其设置为false。
/// Lance:我的情况确实只有一根Usb3.0的线
///
private bool sensorsRequired = false;
///
/// Set the camera in Flip mode
/// 将相机设置为翻转模式
///
private bool cameraFlipMode = false;
///
/// Whether the camera is currently being tracked using the ZED's inside-out tracking.
/// 是否正在使用ZED的由内而外跟踪来跟踪摄像机。
///
private bool isZEDTracked = false;
///
/// Whether the ZED's inside-out tracking has been activated.
/// ZED的内外跟踪是否已激活。
///
private bool isTrackingEnable = false;
///
/// Whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking).
/// 是否以任何方式(ZED的跟踪或VR头戴式耳机的跟踪)跟踪摄像机。
///
private bool isCameraTracked = false;
///
/// Public accessor for whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking).
/// 公共访问器,用于以任何方式(ZED的跟踪或VR头戴式耳机的跟踪)跟踪摄像机。
///
public bool IsCameraTracked
{
get { return isCameraTracked; }
}
///
/// Whether the camera has a new frame available.
/// 相机是否有可用的新数据帧。
///
private bool isNewFrameGrabbed = false;
///
/// Public accessor for whether the camera has a new frame available.
///
public bool IsNewFrameGrabbed
{
get { return isNewFrameGrabbed; }
}
///
/// Orientation last returned by the ZED's tracking.
///
private Quaternion zedOrientation = Quaternion.identity;
///
/// Position last returned by the ZED's tracking.
///
private Vector3 zedPosition = Vector3.zero;
///
/// If Estimate Initial Position is true and we're in SVO mode with Loop enabled, we'll want to cache our first pose to initialPosition and initialRotation.
/// This flag lets us know if we've done that yet so we can only assign them on the first tracked frame.
///
private bool initialPoseCached = false;
///
/// Position of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR.
///
private Vector3 initialPosition = new Vector3();
///
/// Orientation of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR.
///
private Quaternion initialRotation = Quaternion.identity;
///
/// Sensing mode: STANDARD or FILL. FILL corrects for missing depth values.
/// Almost always better to use FILL, since we need depth without holes for proper occlusion.
///
[SerializeField] [HideInInspector] public sl.SENSING_MODE sensingMode = sl.SENSING_MODE.FILL;
///
/// Rotation offset used to retrieve the tracking with a rotational offset.
///
private Quaternion rotationOffset;
///
/// Position offset used to retrieve the tracking with a positional offset.
///
private Vector3 positionOffset;
///
/// Enables pose smoothing during drift correction. Leave it to true.
///
private bool enablePoseSmoothing = true;
///
/// The engine FPS, updated every frame.
///
private float fps_engine = 90.0f;
///
/// Recording state
///
private bool isRecording = false;
#endregion
///
/// Static States /
///
///
/// Whether AR mode is activated.
/// AR模式是否被打开
///
private bool isStereoRig = false; // 是否被激活
///
/// Whether AR mode is activated. Assigned by ZEDManager.CheckStereoMode() in Awake().
/// Will be true if the ZED_Rig_Stereo prefab (or a similarly-structured prefab) is used.
/// 是否激活AR模式。由Awake()中的ZEDManager.CheckStereoMode()分配。
/// 如果使用ZED_Rig_Stereo预制件(或结构类似的预制件),则为true
///
public bool IsStereoRig
{
get { return isStereoRig; }
}
///
/// Checks if the ZED has finished initializing.
/// 检查ZED是否已完成初始化。
///
private bool zedReady = false;
///
/// Checks if the ZED has finished initializing.
/// 检查ZED是否已完成初始化。
///
public bool IsZEDReady // 这么定义的好处在于,定义一个私有属性,不能被外部访问和修改,然后定义一个共有公有属性,但是仅返回私有属性的结果,同样不能修改
{
get { return zedReady; }
}
///
/// Flag set to true if the camera was connected and the wasn't anymore.
/// Causes ZEDDisconnected() to be called each frame, which attemps to restart it.
/// 如果已连接相机且不再存在,则将标志设置为true。 使ZEDDisconnected()在每个帧中被调用,并尝试重新启动它。
///
private bool isDisconnected = false; //这种思维方式好,定义一个相机的连接状态,可以在每次循环都询问一遍
///
/// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift.
/// 跟踪的当前状态:开,关或搜索(跟踪丢失,正在尝试恢复)。通过防漂移使用。
///
private sl.TRACKING_STATE zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF;
///
/// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift.
/// 跟踪的当前状态:开,关或搜索(跟踪丢失,正在尝试恢复)。通过防漂移使用。
///
public sl.TRACKING_STATE ZEDTrackingState
{
get { return zedtrackingState; }
}
///
/// First position registered after the tracking has started (whether via ZED or a VR HMD).
/// 跟踪开始后注册的第一个位置(通过ZED或VR HMD)。
/// Lance: 一般可以用作世界锚点,就是以相机的初始化的位置为参考点
///
public Vector3 OriginPosition { get; private set; }
///
/// First rotation/orientation registered after the tracking has started (whether via ZED or a VR HMD).
/// 跟踪开始后(无论是通过ZED还是VR HMD)记录的第一次旋转/方向。
/// Lance:一般可以用作参考姿态
///
public Quaternion OriginRotation { get; private set; }
///
/// In AR pass-through mode, whether to compare the ZED's IMU data against the reported position of
/// the VR headset. This helps compensate for drift and should usually be left on.
/// However, in some setups, like when using a custom mount, this can cause tracking errors.
/// 在AR直通模式下,是否将ZED的IMU数据与VR头戴设备的报告位置进行比较。
/// 这有助于补偿漂移,通常应保持开启状态。
/// 但是,在某些设置中,例如使用自定义安装时,这可能会导致跟踪错误。
///
/// Read more about the potential errors here: https://support.stereolabs.com/hc/en-us/articles/360026482413
/// 访问更多的错误帮助信息,可以访问这个网站
///
public bool setIMUPriorInAR = true;
///
/// If true, the ZED rig will enter 'pass-through' mode if it detects a stereo rig - at least two cameras as children with ZEDRenderingPlane
/// components, each with a different eye) - and a VR headset is connected. If false, it will never enter pass-through mode.
/// 如果为true,则ZED装备如果检测到立体声装备(至少两个作为ZEDRenderingPlane组件的子代,
/// 每个组件的眼睛不同),并且连接了VR耳机,则它将进入“直通”模式。如果为false,它将永远不会进入直通模式。
///
public bool allowARPassThrough = true;
///
///这几个变量指的是相机和头盔的锚点位置和方向吗
[HideInInspector] public Quaternion gravityRotation = Quaternion.identity;
[HideInInspector] public Vector3 ZEDSyncPosition; //这边的相机同步位置指的是啥
[HideInInspector] public Vector3 HMDSyncPosition; // 这边指的是头盔的同步位置
[HideInInspector] public Quaternion ZEDSyncRotation;
[HideInInspector] public Quaternion HMDSyncRotation;
///
/// Image acquisition thread.
/// 图片的实时采集线程
///
private Thread threadGrab = null; //采集线程,估计的从这里面采集深度数据
///
/// State of the image acquisition thread.
/// 图像采集线程的状态
///
private bool running = false;
///
/// Initialization thread.
/// 初始化线程
///
private Thread threadOpening = null;
///
/// Result of the latest attempt to initialize the ZED.
/// 初始化相机的最新结果
///
private sl.ERROR_CODE lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; //私有属性的开头字母都是小写
public sl.ERROR_CODE LastInitStatus
{
get { return lastInitStatus; }
} //公开属性为大写
///
/// State of the ZED initialization thread.
/// 相机线程的状态
///
private bool openingLaunched; //是否启动
///
/// Wait Handle used to safely tell the init thread to shut down.
/// Wait Handle用于安全地告诉init线程关闭。
///
EventWaitHandle initQuittingHandle;
///
/// When true, the init thread will close early instead of completing all its connection attempts.
/// Set to true when the application is closed before a camera finishes its initialization.
/// 如果为true,则init线程将提早关闭,而不是完成其所有连接尝试。
/// 在照相机完成初始化之前,想要关闭应用程序时,将其设置为true。
///
private bool forceCloseInit = false;
///
/// Tracking initialization thread. Used as the tracking takes some time to start.
/// 跟踪初始化线程。用作跟踪需要一些时间才能开始。
///
private Thread trackerThread = null;
///
// Camera and Player Transforms //
///
///
/// Transform of the left camera in the ZED rig.
/// ZED设备的左相机的Transform组件。
/// Lance: 左右相机位置不一样,需要单独设置一个Transform
///
private Transform camLeftTransform = null;
///
/// Transform of the right camera in the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo).
/// ZED设备的右相机的Transform组件。只存在于立体设备中
/// Lance:只有开启立体设备后才能够被激活
///
private Transform camRightTransform = null;
///
/// Contains the position of the player's head, which is different from the ZED's position in AR mode.
/// But its position relative to the ZED does not change during use (it's a rigid transform).
/// In ZED_Rig_Mono, this will be the root ZED_Rig_Mono object. In ZED_Rig_Stereo, this is Camera_eyes.
/// 包含玩家头部的位置,该位置与AR模式中ZED的位置不同。
/// 但是在使用过程中,它相对于ZED的位置不会改变(这是一个严格的转换)。
/// 在ZED_Rig_Mono中,这将是根ZED_Rig_Mono对象。在ZED_Rig_Stereo中,这是Camera_eyes。
///
private Transform zedRigRoot = null; //相机设备的根位置,应该是参考系位置
///
/// Left camera in the ZED rig. Also the "main" camera if in ZED_Rig_Mono.
/// ZED装备中的左摄像头。如果是ZED_Rig_Mono,也是“main”摄像机。
/// Lance:在zed_rig_mono预形中,zed摄像头将和主摄像头是一样的的。
///
private Camera cameraLeft;
///
/// Right camera of the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo).
/// ZED钻机的右侧摄像头。仅存在于立体装备中(例如ZED_Rig_Stereo)。
///
private Camera cameraRight;
///
/// Gets the center transform, which is the transform moved by the tracker in AR mode.
/// This is the root object in ZED_Rig_Mono, and Camera_eyes in ZED_Rig_Stereo.
/// 获取中心变换,它是跟踪器在AR模式下移动的变换。
/// 这是ZED_Rig_Mono中的根对象,而ZED_Rig_Stereo中是Camera_eyes。
/// Lance:前面定义的几个转换和其他属性都是私有的,不能够被外部访问和修改,
/// 但是需要提供一个访问接口,以获得当前某对象的数据,因此下面定义的是公共方法,返回内部属性的数据
///
public Transform GetZedRootTansform()
{
return zedRigRoot; //相当于是zed设备的参考系
}
///
/// Returns the left ZED camera transform. If there is no left camera but there is a right camera,
/// returns the right camera transform instead.
/// 返回左ZED相机变换。如果没有左摄像机,但是有右摄像机,
/// 则返回右摄像机变换。
///
///
public Transform GetMainCameraTransform()
{
if (camLeftTransform) return camLeftTransform;
else if (camRightTransform) return camRightTransform;
else return null;
}
///
/// Gets the left camera transform in the ZED rig. It's best to use this one as it's available in all configurations.
/// 获取ZED装备中的左侧相机的transform组件。最好使用此配置,因为它在所有配置中均可用。
/// Lance:大部分的情况都是基于做相机的
///
public Transform GetLeftCameraTransform()
{
return camLeftTransform;
}
///
/// Get the right camera transform in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo).
/// 在ZED装备中获得正确的相机transform。仅在立体装备(ZED_Rig_Stereo)中可用。
///
public Transform GetRightCameraTransform()
{
return camRightTransform;
}
///
/// Returns the left ZED camera. If there is no left camera but there is a right camera,
/// returns the right camera instead.
/// 返回左ZED相机。如果没有左摄像机,但有右摄像机,
/// 而是返回右摄像机。
///
///
public Camera GetMainCamera()
{
if (cameraLeft) return cameraLeft;
else if (cameraRight) return cameraRight;
else return null;
}
///
/// Gets the left camera in the ZED rig. Both ZED_Rig_Mono and ZED_Rig_Stereo have a left camera by default.
/// 在ZED装备中获取左侧的摄像头。默认情况下,ZED_Rig_Mono和ZED_Rig_Stereo都具有左摄像机。
///
public Camera GetLeftCamera()
{
if (cameraLeft == null && camLeftTransform != null)
cameraLeft = camLeftTransform.GetComponent<Camera>();
return cameraLeft;
}
///
/// Get the right camera in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo) unless configured otherwise.
/// 在ZED装备中获得合适的相机。除非另行配置,否则仅在立体声装备(ZED_Rig_Stereo)中可用。
///
public Camera GetRightCamera()
{
if (cameraRight == null && camRightTransform != null)
cameraRight = camRightTransform.GetComponent<Camera>();
return cameraRight;
}
#region foldout 和时间戳,感觉自己不会和这部分打交道,就不注释了
///
/// Save the foldout options as it was used last time
/// 保存上次使用的折叠选项
/// Lance:foldout是什么意思?
///
[SerializeField] [HideInInspector] private bool advancedPanelOpen = false;
[SerializeField] [HideInInspector] private bool spatialMappingFoldoutOpen = false;
[SerializeField] [HideInInspector] private bool objectDetectionFoldoutOpen = false;
[SerializeField] [HideInInspector] private bool recordingFoldoutOpen = false;
[SerializeField] [HideInInspector] private bool streamingOutFoldoutOpen = false;
[SerializeField] [HideInInspector] private bool camControlFoldoutOpen = false;
/
// Timestamps //
/
///
/// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet.
///
private ulong cameraTimeStamp = 0;
///
/// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet.
///
public ulong CameraTimeStamp
{
get { return cameraTimeStamp; }
}
///
/// Timestamp of the images used to create the current textures.
///
private ulong imageTimeStamp = 0;
///
/// Timestamp of the images used to create the current textures.
///
public ulong ImageTimeStamp
{
get { return imageTimeStamp; }
}
///
/// Whether the grabbing thread should grab a new frame from the ZED SDK.
/// True unless the last grabbed frame hasn't been applied yet, or the ZED isn't initialized.
///
private bool requestNewFrame = false;
///
/// Whether a new frame has been grabbed from the ZED SDK that needs to be updated.
/// 是否已从ZED SDK中获取了需要更新的新框架。
///
private bool newFrameAvailable = false;
#endregion
/
// Layers for ZED //
/
///
/// Layer assigned to the cameras and objects of a (normally hidden) AR camera rig created to handle
/// pass-through AR. This allows the cameras to see nothing but two canvas objects with the final MR images.
/// 分配给创建以处理直通AR的(通常是隐藏的)AR相机装备的相机和对象的图层。这使得摄像机只能看到两个带有最终MR图像的画布对象
/// Lance:大概的意思可能是使得用户看不到unity中虚拟的背景环境,而只能看到画布上的相机图像以及生成的虚拟物品对象
///
[HideInInspector]
public int arLayer //定义ar图层
{
get { return ZEDLayers.arlayer; }
}
[SerializeField]
[HideInInspector]
//private int arlayer = 30;
/
// ZED specific events //
/
///
/// Delegate for OnZEDReady.
///
public delegate void OnZEDManagerReady();
///
/// Called when the ZED has finished initializing successfully.
/// Used by many scripts to run startup logic that requires that the ZED is active.
/// ZED成功完成初始化后调用。
/// 被许多脚本用来运行需要ZED处于活动状态的启动逻辑。
/// Lance:例如在其他脚本中,需要使用zed,那么需要先看一下本脚本是否已经完成相机的基础配置
///
public event OnZEDManagerReady OnZEDReady;
///
/// Delegate for OnZEDDisconnected.
/// 定义一个委托类型 OnZEDDisconnected。
///
public delegate void OnZEDManagerDisconnected();
///
/// Event called when ZED was running but became disconnected.
/// ZED运行但断开连接时调用的事件。
/// Lance:就是当ZED运行结束后,执行的代码,把符合委托的输入输出的函数像添加列表元素一样添加到事件中就可以 事件+= 函数
///
public event OnZEDManagerDisconnected OnZEDDisconnected;
///
/// Delegate for new Frame grabbed for external module update
/// 委托, 当外部模块更新而获得新帧
///
public delegate void OnGrabAction();
///
/// Event called when ZED has grabbed a new frame.
/// ZED抓取新的一帧数据的时候调用的事件。
/// Lance:就是当获取新的一帧数据的时候需要执行的代码
///
public event OnGrabAction OnGrab;
#region CHECK_AR
private bool hasXRDevice()
{
return XRDevice.isPresent;
}
///
/// Checks if this GameObject is a stereo rig. Requires a child object called 'Camera_eyes' and
/// two cameras as children of that object, one with stereoTargetEye set to Left, the other two Right.
/// Regardless, sets references to leftCamera and (if relevant) rightCamera.
/// 检查此GameObject是否为stereo rig。需要一个名为'Camera_eyes'的子对象和
/// 两个相机作为该对象的子对象,其中一个将stereoTargetEye设置为Left,另外两个设置为Right。
/// 无论如何,将引用设置为leftCamera和(如果相关)rightCamera。
///
private void CheckStereoMode()
{
//The object moved by tracking. By default it's this Transform. May get changed.
//通过跟踪移动对象。默认情况下是此变换。可能会改变。
zedRigRoot =
gameObject.transform;
bool devicePresent = hasXRDevice(); //May not need.
//Set first left eye
//设置第一只左眼
Component[] cams = gameObject.GetComponentsInChildren<Camera>(); //获取场景下的所有相机
//Camera firstmonocam = null;
List<Camera> monocams = new List<Camera>();
foreach (Camera cam in cams)
{
switch (cam.stereoTargetEye) //判断立体设备眼睛的类型
{
case StereoTargetEyeMask.Left: // 如果是左眼
if (!cameraLeft) // 如果还没有赋值左眼
{
cameraLeft = cam; //左眼对象赋值
camLeftTransform = cam.transform; //左眼空间组件赋值
}
break;
case StereoTargetEyeMask.Right: // 如果是右眼
if (!cameraRight)
{
cameraRight = cam;
camRightTransform = cam.transform;
}
break;
case StereoTargetEyeMask.None: //如果不是立体设备的眼睛
monocams.Add(cam); // 添加到unity mono相机列表里面
break;
case StereoTargetEyeMask.Both:
default:
break;
}
}
//If the left camera or right camera haven't been assigned via stereo target eyes, search the monocams
//based on their ZEDRenderingPlane assignments.
//This won't affect whether the rig is in stereo mode, but allows the cameras to be accessed via GetLeftCamera() and GetRightCamera().
//如果尚未通过立体目标眼分配左摄像机或右摄像机,请根据其ZEDRenderingPlane分配搜索单摄像机。
//装备是否处于立体声模式不会造成任何影响,但是允许通过GetLeftCamera()和GetRightCamera()访问摄像机
if (cameraLeft == null || cameraRight == null) //如果前面没有找到眼睛对应的相机
{
foreach (Camera cam in monocams) // 再次在unity的相机里面寻找
{
ZEDRenderingPlane rendplane = cam.gameObject.GetComponent<ZEDRenderingPlane>(); // 找到该相机的zed渲染平面组件
if (!rendplane) continue; //如果不包含zed的相机渲染平面组件,就跳过
// 以下是默认已经有zed的渲染平面组件的相机对象
if (!cameraLeft && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT ||
rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT_FORCE)
) //大概意思就是在左相机没有找到的前提下,同时当前遍历的相机的viewSide符合两个条件中的一个
{
cameraLeft = cam;
camLeftTransform = cam.transform;
}
//同样查找右侧相机
else if (!cameraRight && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT ||
rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT_FORCE))
{
cameraRight = cam;
camRightTransform = cam.transform;
}
}
}
if (camLeftTransform && camRightTransform && cameraLeft.stereoTargetEye == StereoTargetEyeMask.Left
) //We found both a left- and right-eye camera.
//我们找到了左眼和右眼相机。
{
if (camLeftTransform.transform.parent != null) // z
{
zedRigRoot =
camLeftTransform
.parent; //Make the camera's parent object (Camera_eyes in the ZED_Rig_Stereo prefab) the new zedRigRoot to be tracked.
//使摄像机的父对象(ZED_Rig_Stereo预制中的Camera_eyes)成为要跟踪的新zedRigRoot。
}
if (UnityEngine.XR.XRDevice.isPresent && allowARPassThrough) //判断是不是采用了AR模式
{
isStereoRig = true; //是立体视觉模式
}
else
{
isStereoRig = false; //不是
//If there's no VR headset, then cameras set to Left and Right won't display in Unity. Set them both to None.
//如果没有VR耳机,则设置为“左右”的摄像机将不会在Unity中显示。将它们都设置为“无”。
if (cameraLeft) cameraLeft.stereoTargetEye = StereoTargetEyeMask.None;
if (cameraRight) cameraRight.stereoTargetEye = StereoTargetEyeMask.None;
}
}
else //Not all conditions for a stereo rig were met.
//并非所有条件都满足。
{
isStereoRig = false;
if (camLeftTransform)
{
Camera caml = camLeftTransform.gameObject.GetComponent<Camera>();
cameraLeft = caml;
if (camLeftTransform.transform.parent != null)
zedRigRoot = camLeftTransform.parent;
}
else
{
zedRigRoot = transform;
}
}
}
#endregion
///
/// Sets the target GameObject and all its children to the specified layer.
/// 将目标GameObject及其所有子级设置为指定层。
///
/// Target GameObject.
/// Layer that the GameObject and all children will be set to. GameObject和所有子级将设置为的图层。
public static void SetLayerRecursively(GameObject go, int layerNumber) //递归设置图层
{
if (go == null) return;
foreach (Transform trans in go.GetComponentsInChildren<Transform>(true))
{
trans.gameObject.layer = layerNumber;
}
}
///
/// Stops the initialization and grabbing threads.
/// 停止初始化和捕获线程。
///
public void Destroy()
{
running = false;
//In case the opening thread is still running.
if (threadOpening != null)
{
initQuittingHandle.Reset();
forceCloseInit = true;
initQuittingHandle.Set();
threadOpening.Join();
threadOpening = null;
}
//Shut down the image grabbing thread.
if (threadGrab != null)
{
threadGrab.Join();
threadGrab = null;
}
if (IsMappingRunning)
StopSpatialMapping();
Thread.Sleep(10);
}
///
/// Called by Unity when the application is closed.
/// Also called by Reset() to properly start from a 'clean slate.'
///
void OnApplicationQuit()
{
CloseManager();
//sl.ZEDCamera.UnloadPlugin();
//If this was the last camera to close, make sure all instances are closed.
bool notlast = false;
foreach (ZEDManager manager in ZEDManagerInstance)
{
if (manager != null && manager.IsZEDReady == true)
{
notlast = true;
break;
}
}
if (notlast == false)
{
sl.ZEDCamera.UnloadPlugin();
}
}
private void CloseManager()
{
if (spatialMapping != null)
spatialMapping.Dispose();
#if !ZED_LWRP && !ZED_HDRP
ClearRendering();
#endif
zedReady = false;
OnCamBrightnessChange -= SetCameraBrightness;
OnMaxDepthChange -= SetMaxDepthRange;
Destroy(); //Close the grab and initialization threads.
if (zedCamera != null)
{
if (isRecording)
{
zedCamera.DisableRecording();
}
zedCamera.Destroy();
zedCamera = null;
}
#if UNITY_EDITOR //Prevents building the app otherwise.
//Restore the AR layers that were hidden, if necessary.
if (!showarrig)
{
LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number.
UnityEditor.Tools.visibleLayers |= (layerNumberBinary);
}
#endif
sl.ZEDCamera.UnloadInstance((int) cameraID);
}
#if !ZED_LWRP && !ZED_HDRP
private void ClearRendering()
{
if (camLeftTransform != null)
{
ZEDRenderingPlane leftRenderingPlane = camLeftTransform.GetComponent<ZEDRenderingPlane>();
if (leftRenderingPlane)
{
leftRenderingPlane.Clear();
}
}
if (IsStereoRig)
{
ZEDRenderingPlane rightRenderingPlane = GetRightCameraTransform().GetComponent<ZEDRenderingPlane>();
rightRenderingPlane.Clear();
}
}
#endif
#region 从这边开始才开始执行程序 (Awake、Update)
///
/// Sets up starting properties and starts the ZED initialization co-routine.
/// 设置启动属性并启动ZED初始化协同例程。
///
void Awake()
{
// If never initialized, init the array of instances linked to each ZEDManager that could be created.
//如果从未初始化,则初始化链接到可以创建的每个ZEDManager的实例数组。
if (ZEDManagerInstance == null)
{
print(sl.Constant.MAX_CAMERA_PLUGIN);
ZEDManagerInstance = new ZEDManager[(int) sl.Constant.MAX_CAMERA_PLUGIN];
for (int i = 0; i < (int) sl.Constant.MAX_CAMERA_PLUGIN; i++)
ZEDManagerInstance[i] = null;
}
// 初始化的时候,初始的位置和位姿是脚本宿主的局部位置和位姿
initialPosition = transform.localPosition;
initialRotation = transform.localRotation;
zedReady = false;
ZEDManagerInstance[(int) cameraID] = this;
zedCamera = new sl.ZEDCamera(); // Lance:实例化zed相机,这是我们与sdk交互的主要对象
if (dontDestroyOnLoad)
//If you want the ZED rig not to be destroyed when loading a scene.
//如果您希望在加载场景时不破坏ZED装备。
DontDestroyOnLoad(transform.root);
#region 相机初始参数设置
//Set first few parameters for initialization. This will get passed to the ZED SDK when initialized.
//设置前几个参数进行初始化。初始化后,它将被传递到ZED SDK。
initParameters = new sl.InitParameters(); //实例化参数
initParameters.resolution = resolution; //相机分辨率
initParameters.cameraFPS = FPS; //相机帧率
initParameters.cameraDeviceID = (int) cameraID; // 相机ID号
initParameters.depthMode = depthMode; //深度数据模式
initParameters.depthStabilization = depthStabilizer; //一般都设置为True
initParameters.sensorsRequired = sensorsRequired; // 是否需要imu,这个默认为false
// 40 meters should be enough for all applications
// 40米应足以应付所有应用
// Lance:但是自己的可能只需要2米
initParameters.depthMaximumDistance = 40.0f;
initParameters.cameraImageFlip = cameraFlipMode; // 相机翻转模式
initParameters.enableImageEnhancement = enableImageEnhancement; //是否图片增强
initParameters.cameraDisableSelfCalib = !enableSelfCalibration; // 启动自我校准
//Check if this rig is a stereo rig. Will set isStereoRig accordingly.
//检查此装备是否为立体声装备。将相应地设置isStereoRig。
CheckStereoMode();
//Set initialization parameters that may change depending on what was done in CheckStereoMode().
//设置可能会有所不同的初始化参数,具体取决于在CheckStereoMode()中执行的操作。
isZEDTracked = enableTracking; // 是否启动ZED自身追踪
zedPosition = initialPosition; // zed初始位置
zedOrientation = initialRotation; //zed初始位姿
lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; //???初始化的结果,成功还是失败
bool res = zedCamera.CreateCamera((int) cameraID, wrapperVerbose); // 判断是否可以连接上相机
if (!res)
{
Debug.LogError("ZEDManager on " + gameObject.name + " couldn't connect to camera: " + cameraID +
". Check if another ZEDManager is already connected.");
this.gameObject.SetActive(false);
return;
}
initParameters.inputType = inputType; // 输入类型USB链接
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_USB) // zedMini时这个情况
{
}
else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
initParameters.pathSVO = svoInputFileName;
initParameters.svoRealTimeMode = svoRealTimeMode;
}
else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_STREAM)
{
initParameters.ipStream = streamInputIP;
initParameters.portStream = (ushort) streamInputPort;
}
versionZED = "[SDK]: " + sl.ZEDCamera.GetSDKVersion().ToString() + " [Plugin]: " +
sl.ZEDCamera.PluginVersion.ToString(); //相关设备和插件的版本 ,用于打印输出
#region AR模式下的功能,这部分我不关心,因为我最终是在hololens上实现AR不是透过这个设备
//Behavior specific to AR pass-through mode.
//特定于AR直通模式的行为。
if (isStereoRig) //如果是AR透视模式
{
//Creates a hidden camera rig that handles final output to the headset.
//创建一个隐藏的相机装备,以处理最终输出到头盔的操作。
GameObject o = CreateZEDRigDisplayer();
if (!showarrig) o.hideFlags = HideFlags.HideInHierarchy;
o.transform.parent = transform;
initParameters.depthMinimumDistance = 0.1f; //Allow depth calculation to very close objects.
//For the Game/output window, mirror the headset view using a custom script that avoids stretching.
CreateMirror();
}
#endregion
//Determine if we should enable the right depth measurement, which costs performance but is needed for pass-through AR.
//确定我们是否应该启用正确的深度测量,这会降低性能,但对于直通AR是必需的。
switch (enableRightDepthMeasure)
{
case RightDepthEnabledMode.AUTO: // 默认情况下是AUTO模式
default:
if (isStereoRig
) //If so, we've already determined we have both a left and right ZEDRenderingPlane, so skip the lookups.
{
initParameters.enableRightSideMeasure = true;
}
else
{
foreach (ZEDRenderingPlane renderplane in GetComponentsInChildren<ZEDRenderingPlane>())
{
//If we have any ZEDRenderingPlanes that are looking through the right side, enable the measurements.
if (renderplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT ||
renderplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT_FORCE)
{
initParameters.enableRightSideMeasure = true;
break;
}
}
}
break;
case RightDepthEnabledMode.OFF:
initParameters.enableRightSideMeasure = false;
break;
case RightDepthEnabledMode.ON:
initParameters.enableRightSideMeasure = true;
break;
}
#endregion
//Starts a coroutine that initializes the ZED without freezing the game.
//启动一个协程,该协程在不冻结游戏的情况下初始化ZED。就是在不干扰游戏正常启动的情况下,另开一个线程用于初始化ZED设备
lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; //目前的初始化状态
openingLaunched = false; //确定没有初始化zed设备
StartCoroutine(InitZED()); //开启线程,用于初始化ZED设备
OnCamBrightnessChange +=
SetCameraBrightness; //Subscribe event for adjusting brightness setting. //亮度要是被修改了,就执行设置亮度的函数
OnMaxDepthChange += SetMaxDepthRange; //最大深度要是被修改了,就调用一下设置函数
//Create Module Object
//Create the spatial mapping module object (even if not used necessarly)
spatialMapping = new ZEDSpatialMapping(transform, this);
}
void Start()
{
//adjust layers for multiple camera
//setLayersForMultiCamera ();
}
#endregion
#region INITIALIZATION
//const int MAX_OPENING_TRIES = 10;
private uint numberTriesOpening = 0; // 第几次尝试打开相机
/// Counter of tries to open the ZED
///
/// ZED opening function. Should be called in the initialization thread (threadOpening).
/// ZED开启功能。应该在初始化线程(threadOpening)中调用。
///
private void OpenZEDInBackground()
{
openingLaunched = true;
int timeout = 0;
do
{
//确保我们没有提前关闭,这仅发生在OnApplicationQuit()的Destroy()中。
initQuittingHandle
.WaitOne(0); //Makes sure we haven't been turned off early, which only happens in Destroy() from OnApplicationQuit().
if (forceCloseInit) break;
lastInitStatus = zedCamera.Init(ref initParameters); // 相机初始化,并返回初始化的最后状态
timeout++;
numberTriesOpening++; // 尝试第几次打开ZED设备
} while (lastInitStatus != sl.ERROR_CODE.SUCCESS); // 一旦最终的状态没有为成功的话,就继续执行
}
///
/// Initialization coroutine.
/// 初始化协程,用于初始化zed相机
///
private System.Collections.IEnumerator InitZED()
{
zedReady = false; //相机目前未准备
if (!openingLaunched) // 如果目前没有打开相机
{
initQuittingHandle = new EventWaitHandle(true, EventResetMode.ManualReset); // 线程操作方面的东西,需要补一下知识点
threadOpening = new Thread(new ThreadStart(OpenZEDInBackground)); //Assign thread.实例化新线程,用于在背景环境下打开ZED设备
threadOpening.Start(); //线程启动
}
while (lastInitStatus != sl.ERROR_CODE.SUCCESS) //如果没打开zed相机
{
yield return new WaitForSeconds(0.3f); //等待300ms再次尝试
}
//ZED has initialized successfully.
//ZED已成功初始化。
if (lastInitStatus == sl.ERROR_CODE.SUCCESS)
{
threadOpening.Join();
//Initialize the tracking thread, AR initial transforms and SVO read/write as needed.
//根据需要初始化跟踪线程,AR初始转换和SVO读/写。
ZEDReady(); // 相机打开后,需要对unity中相应的各类对象做初始化准备
//If using tracking, wait until the tracking thread has been initialized.
while (enableTracking && !isTrackingEnable)
{
yield return new WaitForSeconds(0.5f);
}
//Tells all the listeners that the ZED is ready! :)
if (OnZEDReady != null)
{
OnZEDReady();
}
//Make sure the screen is at 16:9 aspect ratio or close. Warn the user otherwise.
float ratio = (float) Screen.width / (float) Screen.height;
float target = 16.0f / 9.0f;
if (Mathf.Abs(ratio - target) > 0.01)
{
Debug.LogWarning(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.SCREEN_RESOLUTION));
}
//get informations from camera (S/N, firmware, model...)
cameraModel = zedCamera.GetCameraModel().ToString();
cameraFirmware = zedCamera.GetCameraFirmwareVersion().ToString() + "-" +
zedCamera.GetSensorsFirmwareVersion().ToString();
cameraSerialNumber = zedCamera.GetZEDSerialNumber().ToString();
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
numberFrameMax = zedCamera.GetSVONumberOfFrames();
}
// If streaming has been switched on before play
if (enableStreaming && !isStreamingEnable)
{
lock (zedCamera.grabLock)
{
sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint) bitrate,
(ushort) streamingPort, gopSize, adaptativeBitrate, chunkSize);
if (err == sl.ERROR_CODE.SUCCESS)
{
isStreamingEnable = true;
}
else
{
enableStreaming = false;
isStreamingEnable = false;
}
}
}
//If not already launched, launch the image grabbing thread.
if (!running)
{
running = true;
requestNewFrame = true;
threadGrab = new Thread(new ThreadStart(ThreadedZEDGrab));
threadGrab.Start();
}
zedReady = true;
isDisconnected = false; //In case we just regained connection.
setRenderingSettings(); //Find the ZEDRenderingPlanes in the rig and configure them.
AdjustZEDRigCameraPosition(); //If in AR mode, move cameras to proper offset relative to zedRigRoot.
}
}
///
/// Adjust camera(s) relative to zedRigRoot transform, which is what is moved each frame. Called at start of tracking.
/// In AR mode, offset is each camera's position relative to center of the user's head. Otherwise, cameras are just spaced
/// by the camera's baseline/IPD, or no offset is applied if there's just one camera.
///
void AdjustZEDRigCameraPosition()
{
//Vector3 rightCameraOffset = new Vector3(zedCamera.Baseline, 0.0f, 0.0f);
if (isStereoRig && hasXRDevice()) //Using AR pass-through mode.
{
//zedRigRoot transform (origin of the global camera) is placed on the HMD headset. Therefore, we move the
//camera in front of it by offsetHmdZEDPosition to compensate for the ZED's position on the headset.
//If values are wrong, tweak calibration file created in ZEDMixedRealityPlugin.
camLeftTransform.localPosition = arRig.HmdToZEDCalibration.translation;
camLeftTransform.localRotation = arRig.HmdToZEDCalibration.rotation;
if (camRightTransform)
camRightTransform.localPosition =
camLeftTransform.localPosition +
new Vector3(zedCamera.Baseline, 0.0f, 0.0f); //Space the eyes apart.
if (camRightTransform) camRightTransform.localRotation = camLeftTransform.localRotation;
}
else if (camLeftTransform && camRightTransform) //Using stereo rig, but no VR headset.
{
//When no VR HMD is available, simply put the origin at the left camera.
camLeftTransform.localPosition = Vector3.zero;
camLeftTransform.localRotation = Quaternion.identity;
camRightTransform.localPosition = new Vector3(zedCamera.Baseline, 0.0f, 0.0f); //Space the eyes apart.
camRightTransform.localRotation = Quaternion.identity;
}
else //Using mono rig (ZED_Rig_Mono). No offset needed.
{
if (GetMainCameraTransform())
{
GetMainCameraTransform().localPosition = Vector3.zero;
GetMainCameraTransform().localRotation = Quaternion.identity;
}
}
}
///
/// Find the ZEDRenderingPlane components in the ZED rig and set their rendering settings
/// (rendering path, shader values, etc.) for left and right cameras. Also activate/deactivate depth occlusions.
///
void setRenderingSettings()
{
ZEDRenderingPlane leftRenderingPlane = null;
if (GetLeftCameraTransform() != null)
{
leftRenderingPlane = GetLeftCameraTransform().GetComponent<ZEDRenderingPlane>();
if (leftRenderingPlane)
{
leftRenderingPlane.SetPostProcess(postProcessing);
GetLeftCameraTransform().GetComponent<Camera>().renderingPath = RenderingPath.UsePlayerSettings;
SetCameraBrightness(m_cameraBrightness);
cameraLeft.cullingMask &= ~(1 << zedCamera.TagInvisibleToZED);
}
}
ZEDRenderingPlane rightRenderingPlane = null;
if (GetRightCameraTransform() != null)
{
rightRenderingPlane = GetRightCameraTransform().GetComponent<ZEDRenderingPlane>();
if (rightRenderingPlane)
{
rightRenderingPlane.SetPostProcess(postProcessing);
cameraRight.renderingPath = RenderingPath.UsePlayerSettings;
cameraRight.cullingMask &= ~(1 << zedCamera.TagInvisibleToZED);
}
}
SetCameraBrightness(m_cameraBrightness);
SetMaxDepthRange(m_maxDepthRange);
#if ZED_HDRP
SetSelfIllumination(selfIllumination);
SetBoolValueOnPlaneMaterials("_ApplyZEDNormals", applyZEDNormals);
#endif
Camera maincam = GetMainCamera();
if (maincam != null)
{
ZEDRenderingMode renderingPath = (ZEDRenderingMode) maincam.actualRenderingPath;
//Make sure we're in either forward or deferred rendering. Default to forward otherwise.
if (renderingPath != ZEDRenderingMode.FORWARD && renderingPath != ZEDRenderingMode.DEFERRED)
{
Debug.LogError("[ZED Plugin] Only Forward and Deferred Shading rendering path are supported");
if (cameraLeft) cameraLeft.renderingPath = RenderingPath.Forward;
if (cameraRight) cameraRight.renderingPath = RenderingPath.Forward;
}
//Set depth occlusion.
if (renderingPath == ZEDRenderingMode.FORWARD)
{
if (leftRenderingPlane)
leftRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH_OCC");
if (rightRenderingPlane)
rightRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH_OCC");
}
else if (renderingPath == ZEDRenderingMode.DEFERRED)
{
if (leftRenderingPlane)
leftRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH_OCC");
if (rightRenderingPlane)
rightRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH_OCC");
}
}
}
#endregion
#region IMAGE_ACQUIZ
///
/// Continuously grabs images from the ZED. Runs on its own thread.
///
private void ThreadedZEDGrab()
{
runtimeParameters = new sl.RuntimeParameters();
runtimeParameters.sensingMode = sensingMode;
runtimeParameters.enableDepth = true;
runtimeParameters.confidenceThreshold = confidenceThreshold;
runtimeParameters.textureConfidenceThreshold = textureConfidenceThreshold;
//Don't change this reference frame. If we need normals in the world frame, better to do the conversion ourselves.
runtimeParameters.measure3DReferenceFrame = sl.REFERENCE_FRAME.CAMERA;
while (running)
{
if (zedCamera == null)
return;
if (runtimeParameters.sensingMode != sensingMode) runtimeParameters.sensingMode = sensingMode;
AcquireImages();
}
}
///
/// Grabs images from the ZED SDK and updates tracking, FPS and timestamp values.
/// Called from ThreadedZEDGrab() in a separate thread.
///
private void AcquireImages()
{
if (requestNewFrame && zedReady)
{
sl.ERROR_CODE e = sl.ERROR_CODE.FAILURE;
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO)
{
//handle pause
if (NeedNewFrameGrab && pauseSVOReading)
{
e = zedCamera.Grab(ref runtimeParameters);
NeedNewFrameGrab = false;
}
else if (!pauseSVOReading)
e = zedCamera.Grab(ref runtimeParameters);
currentFrame = zedCamera.GetSVOPosition();
}
else if (!pauseLiveReading)
{
e = zedCamera.Grab(ref runtimeParameters);
}
lock (zedCamera.grabLock)
{
if (e == sl.ERROR_CODE.CAMERA_NOT_DETECTED)
{
Debug.Log("Camera not detected or disconnected.");
isDisconnected = true;
Thread.Sleep(10);
requestNewFrame = false;
}
else if (e == sl.ERROR_CODE.SUCCESS)
{
#if UNITY_EDITOR
float camera_fps = zedCamera.GetCameraFPS();
cameraFPS = camera_fps.ToString() + " FPS";
#endif
//Update object detection here if using object sync.
if (objectDetectionRunning && objectDetectionImageSyncMode == true && requestobjectsframe)
{
RetrieveObjectDetectionFrame();
}
//Get position of camera
if (isTrackingEnable)
{
zedtrackingState = zedCamera.GetPosition(ref zedOrientation, ref zedPosition,
sl.TRACKING_FRAME.LEFT_EYE);
if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO && svoLoopBack == true &&
initialPoseCached == false)
{
initialPosition = zedPosition;
initialRotation = zedOrientation;
initialPoseCached = true;
}
}
else
{
zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF;
}
// Indicate that a new frame is available and pause the thread until a new request is called
newFrameAvailable = true;
requestNewFrame = false;
}
else
Thread.Sleep(1);
}
}
else
{
//To avoid "overheating."
Thread.Sleep(1);
}
}
#endregion
///
/// Initialize the SVO, and launch the thread to initialize tracking. Called once the ZED
/// is initialized successfully.
/// 初始化SVO,然后启动线程以初始化跟踪。
/// ZED成功初始化后调用。
///
private void ZEDReady()
{
//Apply camera settings based on user preference.
//根据用户喜好应用相机设置。
InitVideoSettings(videoSettingsInitMode); // 相机打开后,将会对其实时采集视频流,这边是初始化视频流的设定.
FPS = (int) zedCamera.GetRequestedCameraFPS(); //采集频率
if (enableTracking) //如果需要ZED相机内部的自动追踪,那么需要新建一个线程来追踪
{
trackerThread = new Thread(EnableTrackingThreaded);
trackerThread.Start();
}
else if (estimateInitialPosition)
{
sl.ERROR_CODE err = zedCamera.EstimateInitialPosition(ref initialRotation, ref initialPosition);
if (zedCamera.GetCameraModel() == sl.MODEL.ZED_M)
zedCamera.GetInternalIMUOrientation(ref initialRotation, sl.TIME_REFERENCE.IMAGE);
if (err != sl.ERROR_CODE.SUCCESS)
Debug.LogWarning("Failed to estimate initial camera position");
}
if (enableTracking)
trackerThread.Join();
if (isStereoRig && hasXRDevice())
{
ZEDMixedRealityPlugin.Pose pose = arRig.InitTrackingAR();
OriginPosition = pose.translation;
OriginRotation = pose.rotation;
if (!zedCamera.IsHmdCompatible && zedCamera.IsCameraReady)
Debug.LogWarning(
"WARNING: AR Passtrough with a ZED is not recommended. Consider using ZED Mini, designed for this purpose.");
}
else
{
OriginPosition = initialPosition;
OriginRotation = initialRotation;
}
//Set the original transform for the Rig
zedRigRoot.localPosition = OriginPosition;
zedRigRoot.localRotation = OriginRotation;
#if UNITY_EDITOR
EditorApplication.playmodeStateChanged = HandleOnPlayModeChanged;
#endif
}
///
/// Initializes the ZED's inside-out tracking. Started as a separate thread in OnZEDReady.
/// 初始化ZED的由内而外的跟踪。在OnZEDReady中作为单独的线程启动。
///
void EnableTrackingThreaded()
{
lock (zedCamera.grabLock) //获得相机的采集锁,获得该锁时,其他线程不能对其操作
{
//If using spatial memory and given a path to a .area file, make sure that path is valid.
//如果使用空间内存并指定了.area文件的路径,请确保该路径有效。
if (enableSpatialMemory && pathSpatialMemory != "" && !System.IO.File.Exists(pathSpatialMemory))
{
//如果设置正常,则输出文件名称和地点信息
Debug.Log("Specified path to .area file '" + pathSpatialMemory + "' does not exist. Ignoring.");
pathSpatialMemory = "";
}
//Now enable the tracking with the proper parameters.
//如果使用空间内存并指定了.area文件的路径,请确保该路径有效。
if (!(enableTracking = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition, enableSpatialMemory,
enablePoseSmoothing, estimateInitialPosition, trackingIsStatic, enableIMUFusion,
pathSpatialMemory) == sl.ERROR_CODE.SUCCESS))) // 如果相机底层内部的跟踪技术调用不成功
{
throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED)); //抛出错误
}
else
{
isTrackingEnable = true; //已经在自动追踪了
}
}
}
#if ZED_HDRP
public bool GetChosenSRPMaterial(out Material srpMat)
{
switch(srpShaderType)
{
case shaderType.Lit:
srpMat = Resources.Load<Material>("Materials/Lighting/Mat_ZED_HDRP_Lit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Lighting/Mat_ZED_HDRP_Lit");
return false;
}
else return true;
case shaderType.Unlit:
srpMat = Resources.Load<Material>("Materials/Unlit/Mat_ZED_Unlit_RawInput");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Unlit/Mat_ZED_Unlit_RawInput");
return false;
}
else return true;
case shaderType.Greenscreen_Lit:
srpMat = Resources.Load<Material>("Materials/Lighting/Mat_ZED_Greenscreen_HDRP_Lit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Lighting/Mat_ZED_Greenscreen_HDRP_Lit");
return false;
}
else return true;
case shaderType.Greenscreen_Unlit:
srpMat = Resources.Load<Material>("Materials/Unlit/Mat_ZED_Greenscreen_Unlit");
if (srpMat == null)
{
Debug.LogError("Couldn't find material in Resources. Path: " + "Materials/Unlit/Mat_ZED_Greenscreen_Unlit");
return false;
}
else return true;
case shaderType.DontChange:
default:
srpMat = null;
return false;
}
}
#endif
#if UNITY_EDITOR
///
/// Handler for playmodeStateChanged.
///
void HandleOnPlayModeChanged()
{
if (zedCamera == null) return;
#if UNITY_EDITOR
EditorApplication.playmodeStateChanged = HandleOnPlayModeChanged;
#endif
}
#endif
///
// ENGINE UPDATE REGION /
///
#region ENGINE_UPDATE unity引擎更新
///
/// If a new frame is available, this function retrieves the images and updates the Unity textures. Called in Update().
/// 如果有新的框架可用,此函数将检索图像并更新Unity纹理。在Update()中调用。
/// Lance:这种代码规范,在函数前面加上Update来提示说是在Update()里面调用的
///
public void UpdateImages()
{
if (zedCamera == null) //如果没有相机就不执行
return; // 直接返回,相当于在一个循环里面的continue
//ThreadedZEDGrab()/AcquireImages() grabbed images we haven't updated yet.
//ThreadedZEDGrab()/ AcquireImages()抓取了我们尚未更新的图像。
if (newFrameAvailable)
{
lock (zedCamera.grabLock) // 获得代码锁,防止其他线程同时对下面的对象进行操作
{
zedCamera.RetrieveTextures(); //Tell the wrapper to compute the textures.告诉包装器计算纹理。
zedCamera.UpdateTextures(); //Tell the wrapper to update the textures.告诉包装器更新纹理。
imageTimeStamp = zedCamera.GetImagesTimeStamp(); //获得当前的事件戳
}
//For external module ... Trigger the capture done event.
//对于外部模块...触发捕获完成事件。
//Lance:OnGrab是一个 插入函数列表(事件),如果有人往里面添加函数的话,在这边就会执行
if (OnGrab != null) // 检测是否有人在其他地方添加了函数
OnGrab(); //有人添加的话就执行相应代码
//SVO and loop back ? --> reset position if needed
//SVO和环回? ->根据需要重置位置
if (zedCamera.GetInputType() == sl.INPUT_TYPE.INPUT_TYPE_SVO && svoLoopBack)
{
int maxSVOFrame = zedCamera.GetSVONumberOfFrames();
if (zedCamera.GetSVOPosition() >= maxSVOFrame - (svoRealTimeMode ? 2 : 1))
{
zedCamera.SetSVOPosition(0);
if (enableTracking)
{
if (!(enableTracking = (zedCamera.ResetTracking(initialRotation, initialPosition) ==
sl.ERROR_CODE.SUCCESS)))
{
Debug.LogError(
"ZED Tracking disabled: Not available during SVO playback when Loop is enabled.");
}
}
zedRigRoot.localPosition = initialPosition;
zedRigRoot.localRotation = initialRotation;
}
}
//Lets ThreadedZEDGrab/AcquireImages() start grabbing again.
//让ThreadedZEDGrab / AcquireImages()再次开始抓取。
requestNewFrame = true;
newFrameAvailable = false; //新帧已经被用过了,所以重新设置为false
}
}
///
/// Gets the tracking position from the ZED and updates zedRigRoot's position. Also updates the AR tracking if enabled.
/// Only called in Live (not SVO playback) mode. Called in Update().
/// 仅在实时(不是SVO播放)模式下调用。在Update()中调用。
///
private void UpdateTracking()
{
if (!zedReady) // 这个相机准备,是准备好设备的运行就可以。可能比图片采集的准备要少一点.
return;
if (isZEDTracked) //ZED inside-out tracking is enabled and initialized.
//ZED由内而外的跟踪已启用并初始化。
{
Quaternion r; // 临时位姿
Vector3 v; // 临时位置
isCameraTracked = true; //相机自身追踪器已经打开
if (hasXRDevice() && isStereoRig) //AR pass-through mode. // 处于AR模式下
{
//If the HMD offset calibration file changed during runtime.
如果增强现实头盔的标定文件在运行的时候改变了
if (calibrationHasChanged)
{
AdjustZEDRigCameraPosition(); //Re-apply the ZED's offset from the VR headset.
//从VR耳机重新应用ZED的偏移量。
calibrationHasChanged = false; //重新置为false
}
arRig.ExtractLatencyPose(
imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation.
arRig.AdjustTrackingAR(zedPosition, zedOrientation, out r, out v, setIMUPriorInAR);
zedRigRoot.localRotation = r;
zedRigRoot.localPosition = v;
//Debug.DrawLine(new Vector3(0, 0.05f, 0), (r * Vector3.one * 5) + new Vector3(0, 0.05f, 0), Color.red);
//Debug.DrawLine(Vector3.zero, zedOrientation * Vector3.one * 5, Color.green);
ZEDSyncPosition = v;
ZEDSyncRotation = r;
HMDSyncPosition = arRig.LatencyPose().translation;
HMDSyncRotation = arRig.LatencyPose().rotation;
}
else //Not AR pass-through mode.
{
zedRigRoot.localRotation = zedOrientation;
if (!ZEDSupportFunctions.IsVector3NaN(zedPosition))
zedRigRoot.localPosition = zedPosition;
}
}
else if (hasXRDevice() && isStereoRig) //ZED tracking is off but HMD tracking is on. Fall back to that.
{
isCameraTracked = true;
arRig.ExtractLatencyPose(
imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation.
zedRigRoot.localRotation = arRig.LatencyPose().rotation;
zedRigRoot.localPosition = arRig.LatencyPose().translation;
}
else //The ZED is not tracked by itself or an HMD.
isCameraTracked = false;
}
///
/// Stores the HMD's current pose. Used in AR mode for latency compensation.
/// Pose will be applied to final canvases when a new image's timestamp matches
/// the time when this is called.
///
void UpdateHmdPose()
{
if (IsStereoRig && hasXRDevice())
arRig.CollectPose(); //Save headset pose with current timestamp.
}
///
/// Updates images, collects HMD poses for latency correction, and applies tracking.
/// Called by Unity each frame.
/// 更新图像,收集HMD姿势以进行延迟校正,并应用跟踪。
///由Unity每一帧调用。
///
void Update()
{
//Check if ZED is disconnected; invoke event and call function if so.
//检查ZED是否断开连接;如果是,则调用事件并调用函数。
if (isDisconnected)
{
if (OnZEDDisconnected != null)
//Invoke event. Used for GUI message and pausing ZEDRenderingPlanes.
//调用事件。用于GUI消息和暂停ZEDRenderingPlanes。
//Lance:在ZED连接时执行的代码
OnZEDDisconnected();
//Tries to reset the camera.
//尝试重置相机。
//Lance: 为什么这边要执行没有连接的代码,意思是说每次采集一张新图片就释放掉相机吗
ZEDDisconnected();
return;
}
// Then update all modules
//然后更新所有模块
UpdateImages(); //Image is updated first so we have its timestamp for latency compensation.
//图像首先更新,因此我们有其时间戳以进行延迟补偿。
UpdateHmdPose(); //Store the HMD's pose at the current timestamp.
//将HMD的姿势存储在当前时间戳上。
UpdateTracking(); //Apply position/rotation changes to zedRigRoot.
//将位置/旋转更改应用于zedRigRoot。
UpdateObjectsDetection(); //Update od if activated
//如果物品检测功能打开了,则更新一下
UpdateMapping(); //Update mapping if activated
//如果空间映射功能激活了,则更新一下
/// If in Unity Editor, update the ZEDManager status list
#if UNITY_EDITOR
//Update strings used for displaying stats in the Inspector.
if (zedCamera != null)
{
float frame_drop_count = zedCamera.GetFrameDroppedPercent();
float CurrentTickFPS = 1.0f / Time.deltaTime;
fps_engine = (fps_engine + CurrentTickFPS) / 2.0f;
engineFPS = fps_engine.ToString("F0") + " FPS";
if (frame_drop_count > 30 && fps_engine < 45)
engineFPS += "WARNING: Low engine framerate detected";
if (isZEDTracked)
trackingState = ZEDTrackingState.ToString();
else if (hasXRDevice() && isStereoRig)
trackingState = "HMD Tracking";
else
trackingState = "Camera Not Tracked";
}
#endif
}
public void LateUpdate()
{
if (IsStereoRig)
{
//Update textures on final AR rig for output to the headset.
更新最终AR装备上的纹理,以输出到头戴式耳机。
arRig.LateUpdateHmdRendering();
}
}
#endregion
///
/// Event called when camera is disconnected
/// 相机断开连接时调用的事件
///
void ZEDDisconnected()
{
cameraFPS = "Disconnected";
isDisconnected = true;
if (zedReady)
{
//Cache tracking, turn it off and turn it back on again.
//缓存跟踪,将其关闭然后再次打开。
Reset();
}
}
private void OnDestroy()
{
//OnApplicationQuit();
CloseManager();
}
///
SPATIAL MAPPING REGION /
///
#region MAPPING_MODULE
///
/// Tells ZEDSpatialMapping to begin a new scan. This clears the previous scan from the scene if there is one.
/// 告诉ZEDSpatialMapping开始新的扫描。如果存在,这将从场景中清除先前的扫描。
///
public void StartSpatialMapping()
{
transform.position = Vector3.zero;
transform.rotation = Quaternion.identity;
spatialMapping.StartStatialMapping(mappingResolutionPreset, mappingRangePreset, isMappingTextured);
}
///
/// Ends the current spatial mapping. Once called, the current mesh will be filtered, textured (if enabled) and saved (if enabled),
/// and a mesh collider will be added.
/// 结束当前的空间映射。调用后,将对当前网格物体进行过滤,纹理化(如果启用)并保存(如果启用),
/// 并将添加一个网格碰撞器。
///
public void StopSpatialMapping()
{
if (spatialMapping != null)
{
if (saveMeshWhenOver)
SaveMesh(meshPath);
spatialMapping.StopStatialMapping();
}
}
///
/// Updates the filtering parameters and call the ZEDSpatialMapping instance's Update() function.
/// 更新过滤参数并调用ZEDSpatialMapping实例的Update()函数。
///
private void UpdateMapping()
{
if (spatialMapping != null)
{
//if (IsMappingUpdateThreadRunning)
if (spatialMapping.IsRunning())
{
spatialMapping.filterParameters = meshFilterParameters;
spatialMapping.Update();
}
}
}
///
/// Toggles whether to display the mesh or not.
/// 切换是否显示网格。
///
/// True to make the mesh visible, false to make it invisible.
public void SwitchDisplayMeshState(bool state)
{
if (spatialMapping != null)
spatialMapping.SwitchDisplayMeshState(state);
}
public void ClearAllMeshes()
{
if (spatialMapping != null)
spatialMapping.ClearAllMeshes();
}
///
/// Pauses the current scan.
/// 暂停当前扫描。
///
/// True to pause the scanning, false to unpause it.
public void SwitchPauseState(bool state)
{
if (spatialMapping != null)
spatialMapping.SwitchPauseState(state);
}
///
/// Saves the mesh into a 3D model (.obj, .ply or .bin) file. Also saves an .area file for spatial memory for better tracking.
/// Calling this will end the spatial mapping if it's running. Note it can take a significant amount of time to finish.
///
/// Path where the mesh and .area files will be saved.
public void SaveMesh(string meshPath = "ZEDMeshObj.obj")
{
spatialMapping.RequestSaveMesh(meshPath);
}
///
/// Loads a mesh and spatial memory data from a file.
/// If scanning is running, it will be stopped. Existing scans in the scene will be cleared.
///
/// Path to the 3D mesh file (.obj, .ply or .bin) to load.
/// True if successfully loaded, false otherwise.
public bool LoadMesh(string meshPath = "ZEDMeshObj.obj")
{
//Cache the save setting and set to false, to avoid overwriting the mesh file during the load.
bool oldSaveWhenOver = saveMeshWhenOver;
saveMeshWhenOver = false;
gravityRotation = Quaternion.identity;
spatialMapping.SetMeshRenderer();
bool loadresult = spatialMapping.LoadMesh(meshPath);
saveMeshWhenOver = oldSaveWhenOver; //Restoring old setting.
return loadresult;
}
#endregion
///
OBJECT DETECTION REGION //
///
#region OBJECT_DETECTION
///
/// True when the object detection coroutine is in the process of starting.
/// Used to prevent object detection from being launched multiple times at once, which causes instability.
/// 当对象检测协程正在启动时为true。
/// 用于防止同时启动对象检测多次,这会导致不稳定。
///
private bool odIsStarting = false;
///
/// Starts the ZED object detection.
/// Note: This will lock the main thread for a moment, which may appear to be a freeze.
/// 开始ZED对象检测。
/// 注意:这将锁定主线程一会儿,这似乎是冻结。 para>J
/// Lance:这边指的是,当前要初次初始化物品检测的模型,可能考虑到要加载模型或者使用gpu加速设置,会导致游戏暂停一会
///
public void StartObjectDetection()
{
//We start a coroutine so we can delay actually starting the detection.
//This is because the main thread is locked for awhile when you call this, appearing like a freeze.
//This time lets us deliver a log message to the user indicating that this is expected.
//我们启动协程,因此我们可以实际上延迟了检测的启动。
//这是因为调用此方法时主线程被锁定了一段时间,看起来像是冻结。
//这次,我们可以向用户发送一条日志消息,表明这是预期的。
//Lance:可能我们的物品检测算法要与这部分相关
StartCoroutine(startObjectDetection());
}
///
/// Starts the object detection module after a two-frame delay, allowing us to deliver a log message
/// to the user indicating that what appears to be a freeze is actually expected and will pass.
/// 在两帧延迟后启动对象检测模块,使我们可以向用户发送
/// 消息,表明实际上是预期的冻结并将通过。
/// Lance:大概的意思就是说,系统当前要进行物品检测了,我先发送消息到界面上提示用户说一会要卡住了,才不会引起不适
///
///
private IEnumerator startObjectDetection()
{
if (odIsStarting == true) //判断是否正在启动物品检测,这边指的是,是否正在初始化
{ //“试图在对象检测已经启动时启动它。您是否有两个脚本试图启动它?”
Debug.LogError(
"Tried to start Object Detection while it was already starting. Do you have two scripts trying to start it?");
yield break;
}
if (objectDetectionRunning) //如果已经启动完成,处于运行的状态
{
//模型已经在运行状态了,就可以尝试进行物品检测
Debug.LogWarning("Tried to start Object Detection while it was already running.");
}
if (zedCamera != null)
{
odIsStarting = true; //物品检测正在启动
//发出信息提醒, "启动对象检测。这可能需要一点时间。"
Debug.LogWarning("Starting Object Detection. This may take a moment.");
bool
oldpausestate =
pauseSVOReading; //The two frame delay will cause you to miss some SVO frames if playing back from an SVO, unless we pause.
//如果从SVO回放,两帧延迟会导致您错过一些SVO帧,除非我们暂停。
pauseSVOReading = true;
yield return null;
yield return null;
pauseSVOReading = oldpausestate;
sl.dll_ObjectDetectionParameters od_param = new sl.dll_ObjectDetectionParameters(); // 实例化物品检测i相关参数的变量
od_param.imageSync = objectDetectionImageSyncMode; //设置物品检测的参数
od_param.enableObjectTracking = objectDetectionTracking; //是否对物品检测进行追踪
od_param.enable2DMask = objectDetection2DMask; // 是否需要返回2D的蒙版
od_runtime_params.detectionConfidenceThreshold = objectDetectionConfidenceThreshold; //物品检测的置信阈值设定
// 是否要过滤掉部分检测的对象,比如不要检测人,不要检测交通工具,估计zed底层的代码是通过调用不同的识别模型实现的,或者直接是一个通用模型,然后滤掉部分结果,会不会可能同时用多个识别模型对同张图片进行识别,然后整合?
// 例如对于一些固定不动的事物可以考虑用运算较慢的模型来识别,之后只需要考虑追踪就可以, 而一些具有主动运动的,比如汽车,人,动物,采用实时性要求比较高的来实现?
od_runtime_params.objectClassFilter = new int[(int) sl.OBJECT_CLASS.LAST];
od_runtime_params.objectClassFilter[(int) sl.OBJECT_CLASS.PERSON] =
Convert.ToInt32(objectClassPersonFilter); // objectClassPersonFilter 为True 的话就不检测人
od_runtime_params.objectClassFilter[(int) sl.OBJECT_CLASS.VEHICLE] =
Convert.ToInt32(objectClassVehicleFilter); // objectClassVehicleFilter 为True的话就不检测交通工具
//Time how long the loading takes so we can tell the user.
//加载的时长,用于反馈给用户
System.Diagnostics.Stopwatch
watch = new System.Diagnostics.Stopwatch();
watch.Start(); //打开一个计时器
sl.ERROR_CODE err = zedCamera.EnableObjectsDetection(ref od_param); //将相机参数地址传递给底层SDK
if (err == sl.ERROR_CODE.SUCCESS)// 如果启动成功
{
Debug.Log("Object Detection module started in " + watch.Elapsed.Seconds + " seconds.");
objectDetectionRunning = true; // 输出载入事件和设定物品检测正在运行
}
else
{
//启动失败
Debug.Log("Object Detection failed to start. (Error: " + err + " )");
objectDetectionRunning = false;
}
watch.Stop();//计时器停止
odIsStarting = false; //没启动
}
}
///
/// Stops the object detection.
/// 停止对象检测。
///
public void StopObjectDetection()
{
if (zedCamera != null && running)
{
zedCamera.DisableObjectsDetection();
objectDetectionRunning = false;
}
}
///
/// Updates the objects detection by triggering the detection event
/// 通过触发检测事件来更新对象检测,
/// Lance:这部分可能以后我们会需要用到,例如某个时间段我们需要更新一下
///
public void UpdateObjectsDetection()
{
if (!objectDetectionRunning) return; //是否在运行
//Update the runtime parameters in case the user made changes.
//如果用户进行了更改,请更新运行时参数。
od_runtime_params.detectionConfidenceThreshold = objectDetectionConfidenceThreshold;
od_runtime_params.objectClassFilter = new int[(int) sl.OBJECT_CLASS.LAST];
od_runtime_params.objectClassFilter[(int) sl.OBJECT_CLASS.PERSON] = Convert.ToInt32(objectClassPersonFilter);
od_runtime_params.objectClassFilter[(int) sl.OBJECT_CLASS.VEHICLE] = Convert.ToInt32(objectClassVehicleFilter);
if (objectDetectionImageSyncMode == false)
//If true, this is called in the AcquireImages function in the image acquisition thread.
//如果为true,则在采集图像线程中的AcquireImages函数中调用此函数。
RetrieveObjectDetectionFrame();
if (newobjectsframeready)
{
lock (zedCamera.grabLock)
{
float objdetect_fps = 1000000000.0f / (objectsFrameSDK.timestamp - lastObjectFrameTimeStamp);
objDetectionModuleFPS = (objDetectionModuleFPS + objdetect_fps) / 2.0f;
objectDetectionFPS = objDetectionModuleFPS.ToString("F1") + " FPS";
///Trigger the event that holds the raw data, and pass the whole objects frame.
if (OnObjectDetection_SDKData != null)
{
OnObjectDetection_SDKData(objectsFrameSDK);
}
//If there are any subscribers to the non-raw data, create that data and publish the event.
if (OnObjectDetection != null)
{
DetectionFrame
oldoframe =
detectionFrame; //Cache so we can clean it up once we're done setting up the new one.
//DetectionFrame oframe = new DetectionFrame(objectsFrame, this);
detectionFrame = new DetectionFrame(objectsFrameSDK, this);
OnObjectDetection(detectionFrame);
if (oldoframe != null) oldoframe.CleanUpAllObjects();
}
//Now that all events have been sent out, it's safe to let the image acquisition thread detect more objects.
requestobjectsframe = true;
newobjectsframeready = false;
}
}
}
///
/// Requests the latest object detection frame information. If it's new, it'll fill the objectsFrame object
/// with the new frame info, set requestobjectsframe to true, and set newobjectsframeready to true.
/// 请求最新的物体检测框架信息。如果是新的,它将用新的帧信息填充objectsFrame对象
/// 将requestobjectsframe设置为true,并将newobjectsframeready设置为true。
///
private void RetrieveObjectDetectionFrame()
{
sl.ObjectsFrameSDK oframebuffer = new sl.ObjectsFrameSDK();
sl.ERROR_CODE res = zedCamera.RetrieveObjectsDetectionData(ref od_runtime_params, ref oframebuffer);
if (res == sl.ERROR_CODE.SUCCESS && oframebuffer.timestamp >= objectsFrameSDK.timestamp) //这边的时间戳比较是怎么回事
{
//Release memory from masks.
//释放物品检测的蒙版内存。
//Lance:这边指的是把旧的物品检测的二维蒙版清空
for (int i = 0; i < objectsFrameSDK.numObject; i++)
{
sl.ZEDMat oldmat = new sl.ZEDMat(objectsFrameSDK.objectData[i].mask);
oldmat.Free();
}
objectsFrameSDK = oframebuffer; //把新的结果赋值给这个变量
requestobjectsframe = false; //将索取物品新帧置为false 等待这些新的数据被应用完毕再重新要求检测
newobjectsframeready = true; //新的检测结果已经准备完毕,等待其他地方应用这些成果
}
}
///
/// Switchs the state of the object detection pause.
/// 切换对象检测暂停的状态。
/// Lance:如何传入的state为true的话,就会暂停物品检测的功能。相当于一个暂停开关
///
/// If set to true state, the object detection will pause. It will resume otherwise
public void SwitchObjectDetectionPauseState(bool state)
{
if (zedCamera != null)
{
if (objectDetectionRunning)
zedCamera.PauseObjectsDetection(state);
}
}
#endregion
///
// AR REGION //
///
#region AR_CAMERAS
///
/// Stereo rig that adjusts images from ZED_Rig_Stereo to look correct in the HMD.
/// Hidden by default as it rarely needs to be changed.
/// 立体声装备可调整来自ZED_Rig_Stereo的图像,使其在HMD中看起来正确。
/// 默认情况下处于隐藏状态,因为它很少需要更改。 para>
///
[HideInInspector] public GameObject zedRigDisplayer;
private ZEDMixedRealityPlugin arRig;
///
/// Create a GameObject to display the ZED in an headset (ZED-M Only).
/// 创建一个GameObject以在耳机中显示ZED(仅限ZED-M)。
///
///
private GameObject CreateZEDRigDisplayer()
{
//Make sure we don't already have one, such as if the camera disconnected and reconnected.
//确保创建之前没有这个物品,例如在相机断开了然后重连的时候,这个对象便有旧的。
if (zedRigDisplayer != null) Destroy(zedRigDisplayer); //如果有旧的就注销
zedRigDisplayer = new GameObject("ZEDRigDisplayer"); //实例化该对象
arRig = zedRigDisplayer.AddComponent<ZEDMixedRealityPlugin>(); //给这个对象添加组件
/*Screens left and right */ //左右两个屏幕对象,这个我不会用到
GameObject leftScreen = GameObject.CreatePrimitive(PrimitiveType.Quad); //
leftScreen.name = "Quad - Left";
MeshRenderer meshLeftScreen = leftScreen.GetComponent<MeshRenderer>();
meshLeftScreen.lightProbeUsage = UnityEngine.Rendering.LightProbeUsage.Off;
meshLeftScreen.reflectionProbeUsage = UnityEngine.Rendering.ReflectionProbeUsage.Off;
meshLeftScreen.receiveShadows = false;
meshLeftScreen.motionVectorGenerationMode = MotionVectorGenerationMode.ForceNoMotion;
meshLeftScreen.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
meshLeftScreen.sharedMaterial = Resources.Load("Materials/Unlit/Mat_ZED_Unlit") as Material;
leftScreen.layer = arLayer;
GameObject.Destroy(leftScreen.GetComponent<MeshCollider>());
GameObject rightScreen = GameObject.CreatePrimitive(PrimitiveType.Quad);
rightScreen.name = "Quad - Right";
MeshRenderer meshRightScreen = rightScreen.GetComponent<MeshRenderer>();
meshRightScreen.lightProbeUsage = UnityEngine.Rendering.LightProbeUsage.Off;
meshRightScreen.reflectionProbeUsage = UnityEngine.Rendering.ReflectionProbeUsage.Off;
meshRightScreen.receiveShadows = false;
meshRightScreen.motionVectorGenerationMode = MotionVectorGenerationMode.ForceNoMotion;
meshRightScreen.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
GameObject.Destroy(rightScreen.GetComponent<MeshCollider>());
meshRightScreen.sharedMaterial = Resources.Load("Materials/Unlit/Mat_ZED_Unlit") as Material;
rightScreen.layer = arLayer;
/*Camera left and right*/
GameObject camLeft = new GameObject("cameraLeft");
camLeft.transform.SetParent(zedRigDisplayer.transform);
Camera camL = camLeft.AddComponent<Camera>();
camL.stereoTargetEye = StereoTargetEyeMask.Both; //Temporary setting to fix loading screen issue.
camL.renderingPath = RenderingPath.Forward; //Minimal overhead
camL.clearFlags = CameraClearFlags.Color;
camL.backgroundColor = Color.black;
camL.cullingMask = 1 << arLayer;
camL.allowHDR = false;
camL.allowMSAA = false;
camL.depth = camLeftTransform.GetComponent<Camera>().depth;
GameObject camRight = new GameObject("cameraRight");
camRight.transform.SetParent(zedRigDisplayer.transform);
Camera camR = camRight.AddComponent<Camera>();
camR.renderingPath = RenderingPath.Forward; //Minimal overhead
camR.clearFlags = CameraClearFlags.Color;
camR.backgroundColor = Color.black;
camR.stereoTargetEye = StereoTargetEyeMask.Both; //Temporary setting to fix loading screen issue.
camR.cullingMask = 1 << arLayer;
camR.allowHDR = false;
camR.allowMSAA = false;
camR.depth = camRightTransform.GetComponent<Camera>().depth;
HideFromWrongCameras.RegisterZEDCam(camL);
HideFromWrongCameras lhider = leftScreen.AddComponent<HideFromWrongCameras>();
lhider.SetRenderCamera(camL);
lhider.showInNonZEDCameras = false;
HideFromWrongCameras.RegisterZEDCam(camR);
HideFromWrongCameras rhider = rightScreen.AddComponent<HideFromWrongCameras>();
rhider.SetRenderCamera(camR);
rhider.showInNonZEDCameras = false;
SetLayerRecursively(camRight, arLayer);
SetLayerRecursively(camLeft, arLayer);
//Hide camera in editor.
#if UNITY_EDITOR
if (!showarrig)
{
LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number.
LayerMask flippedVisibleLayers = ~UnityEditor.Tools.visibleLayers;
UnityEditor.Tools.visibleLayers = ~(flippedVisibleLayers | layerNumberBinary);
}
#endif
leftScreen.transform.SetParent(zedRigDisplayer.transform);
rightScreen.transform.SetParent(zedRigDisplayer.transform);
arRig.finalCameraLeft = camLeft;
arRig.finalCameraRight = camRight;
arRig.ZEDEyeLeft = camLeftTransform.gameObject;
arRig.ZEDEyeRight = camRightTransform.gameObject;
arRig.quadLeft = leftScreen.transform;
arRig.quadRight = rightScreen.transform;
ZEDMixedRealityPlugin.OnHmdCalibChanged += CalibrationHasChanged;
if (hasXRDevice())
HMDDevice = XRDevice.model;
return zedRigDisplayer;
}
#endregion
#region MIRROR
private ZEDMirror mirror = null; //镜子,用来看自己的吗
private GameObject mirrorContainer = null; //镜子
void CreateMirror()
{
GameObject camLeft;
Camera camL;
if (mirrorContainer == null)
{
mirrorContainer = new GameObject("Mirror");
mirrorContainer.hideFlags = HideFlags.HideInHierarchy;
camLeft = new GameObject("MirrorCamera");
camLeft.hideFlags = HideFlags.HideInHierarchy;
mirror = camLeft.AddComponent<ZEDMirror>();
mirror.manager = this;
camL = camLeft.AddComponent<Camera>();
}
else
{
camLeft = mirror.gameObject;
camL = camLeft.GetComponent<Camera>();
}
camLeft.transform.parent = mirrorContainer.transform;
camL.stereoTargetEye = StereoTargetEyeMask.None;
camL.renderingPath = RenderingPath.Forward; //Minimal overhead
camL.clearFlags = CameraClearFlags.Color;
camL.backgroundColor = Color.black;
camL.cullingMask =
0; //It should see nothing. It gets its final image entirely from a Graphics.Blit call in ZEDMirror.
camL.allowHDR = false;
camL.allowMSAA = false;
camL.useOcclusionCulling = false;
camL.depth =
cameraLeft.GetComponent<Camera>()
.depth; //Make sure it renders after the left cam so we can copy texture from latest frame.
}
#endregion
///
/// Closes out the current stream, then starts it up again while maintaining tracking data.
/// Used when the zed becomes unplugged, or you want to change a setting at runtime that
/// requires re-initializing the camera.
/// 关闭当前流,然后在保持跟踪数据的同时再次启动它。
/// 当zed拔出插头,或者您想在运行时更改需要重新初始化相机的设置时使用。
///
public void Reset()
{
//Save tracking
if (enableTracking && isTrackingEnable)
{
zedCamera.GetPosition(ref zedOrientation, ref zedPosition);
}
CloseManager();
openingLaunched = false;
running = false;
numberTriesOpening = 0;
forceCloseInit = false;
Awake();
}
public void InitVideoSettings(VideoSettingsInitMode mode)
{
if (!zedCamera.IsCameraReady) // 判断相机是否准备号
{
Debug.LogError("Tried to apply camera settings before ZED camera was ready.");
return;
}
switch (mode) //看模式是什么模式
{
case VideoSettingsInitMode.Custom:
ApplyLocalVideoSettingsToZED();
return;
case VideoSettingsInitMode.LoadFromSDK:
default:
//This is the SDK's default behavior, so we don't need to specify anything. Just apply the ZED's values locally.
GetCurrentVideoSettings();
return;
case VideoSettingsInitMode.Default:
zedCamera.ResetCameraSettings();
GetCurrentVideoSettings();
return;
}
}
private void GetCurrentVideoSettings()
{
//Sets all the video setting values to the ones currently applied to the ZED.
videoBrightness = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.BRIGHTNESS);
videoContrast = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.SATURATION);
videoHue = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.HUE);
videoSaturation = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.SATURATION);
videoSharpness = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.SHARPNESS);
videoGamma = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.GAMMA);
videoAutoGainExposure = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.AEC_AGC) == 1 ? true : false;
if (!videoAutoGainExposure)
{
videoGain = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.GAIN);
videoExposure = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.EXPOSURE);
}
videoAutoWhiteBalance = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.AUTO_WHITEBALANCE) == 1 ? true : false;
if (!videoAutoWhiteBalance)
{
videoWhiteBalance = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.WHITEBALANCE);
}
videoLEDStatus = zedCamera.GetCameraSettings(sl.CAMERA_SETTINGS.LED_STATUS) == 1 ? true : false;
}
private void ApplyLocalVideoSettingsToZED()
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.BRIGHTNESS, videoBrightness);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.CONTRAST, videoContrast);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.HUE, videoHue);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.SATURATION, videoSaturation);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.SHARPNESS, videoSharpness);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.GAMMA, videoGamma);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.AEC_AGC, videoAutoGainExposure ? 1 : 0);
if (!videoAutoGainExposure)
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.GAIN, videoGain);
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.EXPOSURE, videoExposure);
}
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.AUTO_WHITEBALANCE, videoAutoWhiteBalance ? 1 : 0);
if (!videoAutoWhiteBalance)
{
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.WHITEBALANCE, videoWhiteBalance);
}
zedCamera.SetCameraSettings(sl.CAMERA_SETTINGS.LED_STATUS, 1);
}
#region EventHandler
///
/// Changes the real-world brightness by setting the brightness value in the shaders.
/// 通过在着色器中设置亮度值来更改实际亮度。
///
/// New brightness value to be applied. Should be between 0 and 100.
public void SetCameraBrightness(int newVal)
{
SetFloatValueOnPlaneMaterials("_ZEDFactorAffectReal", newVal / 100f);
}
#if ZED_HDRP
public void SetSelfIllumination(float newVal)
{
SetFloatValueOnPlaneMaterials("_SelfIllumination", newVal);
}
#endif
///
/// Sets the maximum depth range of real-world objects. Pixels further than this range are discarded.
/// 设置实际对象的最大深度范围。超出此范围的像素将被丢弃。
///
/// Furthest distance, in meters, that the camera will display pixels for. Should be between 0 and 20.
public void SetMaxDepthRange(float newVal)
{
if (newVal < 0 || newVal > 40) //默认情况下为40米,如果设置的深度大于40米,则设置无效,会自动被转换为40米
{
Debug.LogWarning("Tried to set max depth range to " + newVal + "m. Must be within 0m and 40m.");
newVal = Mathf.Clamp(newVal, 0, 40);
}
SetFloatValueOnPlaneMaterials("_MaxDepth", newVal); //根据新设定的深度
}
///
/// Sets a value of a float property on the material(s) rendering the ZED image.
/// Used to set things like brightness and maximum depth.
/// 在渲染ZED图像的材料上设置float属性的值。
/// 用于设置亮度和最大深度。
///
/// Name of value/property within Shader.
/// New value for the specified property.
private void SetFloatValueOnPlaneMaterials(string propertyname, float newvalue)
{
foreach (ZEDRenderingPlane renderPlane in GetComponentsInChildren<ZEDRenderingPlane>())
{
Material rendmat; //定义一个材料变量
if (renderPlane.ActualRenderingPath == RenderingPath.Forward) //如果遍历到的这个渲染平面符合向前渲染的要求,那么就认为是我们的目标渲染平面
rendmat = renderPlane.canvas.GetComponent<Renderer>().material; //获得其画布上的材料
else if (renderPlane.ActualRenderingPath == RenderingPath.DeferredShading) // ?? 渲染模块还需要再加强一下
rendmat = renderPlane.deferredMat;
else
{
Debug.LogError("Can't set " + propertyname + " value for Rendering Path " +
renderPlane.ActualRenderingPath +
": only Forward and DeferredShading are supported.");
return;
}
rendmat.SetFloat(propertyname, newvalue); //设置其显示的浮点数的最大范围,也就是深度数据的最大范围
}
}
private void SetBoolValueOnPlaneMaterials(string propertyname, bool newvalue)
{
foreach (ZEDRenderingPlane renderPlane in GetComponentsInChildren<ZEDRenderingPlane>())
{
Material rendmat;
MeshRenderer rend = renderPlane.canvas.GetComponent<MeshRenderer>(); //在这个对象上去遍历某个组件
if (!rend) continue;
rendmat = rend.material;
rendmat.SetInt(propertyname, newvalue ? 1 : 0);
}
}
///
/// Flag set to true when the HMD-to-ZED calibration file has changed during runtime.
/// Causes values from the new file to be applied during Update().
/// 当HMD到ZED校准文件在运行时更改时,该标志设置为true。
/// 使新文件中的值在Update()期间应用。
///
private bool calibrationHasChanged = false;
///
/// Sets the calibrationHasChanged flag to true, which causes the next Update() to
/// re-apply the HMD-to-ZED offsets.
/// 将CalibrationHasChanged标志设置为true,这将导致下一个Update()
/// 重新应用HMD至ZED偏移量。
///
private void CalibrationHasChanged()
{
calibrationHasChanged = true;
}
#endregion
#if UNITY_EDITOR
///
/// Handles changes to tracking or graphics settings changed from the Inspector.
/// 处理从检查器更改为跟踪的更改或图形设置。
///
void OnValidate()
{
if (zedCamera != null)
{
// If tracking has been switched on
if (zedCamera.IsCameraReady && !isTrackingEnable && enableTracking)
{
//Enables tracking and initializes the first position of the camera.
if (!(enableTracking = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition,
enableSpatialMemory, enablePoseSmoothing, estimateInitialPosition,
trackingIsStatic,
enableIMUFusion, pathSpatialMemory) == sl.ERROR_CODE.SUCCESS)))
{
isZEDTracked = false;
throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED));
}
else
{
isZEDTracked = true;
isTrackingEnable = true;
}
}
// If tracking has been switched off
if (isTrackingEnable && !enableTracking)
{
isZEDTracked = false;
lock (zedCamera.grabLock)
{
zedCamera.DisableTracking();
}
isTrackingEnable = false;
}
// If streaming has been switched on
if (enableStreaming && !isStreamingEnable)
{
lock (zedCamera.grabLock)
{
sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint) bitrate,
(ushort) streamingPort, gopSize, adaptativeBitrate, chunkSize);
if (err == sl.ERROR_CODE.SUCCESS)
{
isStreamingEnable = true;
}
else
{
enableStreaming = false;
isStreamingEnable = false;
}
}
}
// If streaming has been switched off
if (!enableStreaming && isStreamingEnable)
{
lock (zedCamera.grabLock)
{
zedCamera.DisableStreaming();
isStreamingEnable = false;
}
}
//Reapplies graphics settings based on current values.
setRenderingSettings();
}
}
#endif
}