实现效果:
Unity版本:5.3.5
项目包下载地址:点击打开链接
关键原理:
1.利用得到的Kinect的Body信息
2.利用Body信息来检测面部的创建和移除,面部追踪ID和Body追踪ID相同
3.新建面部识别源
4.从源中得到面部的各种信息,比如表情状态,旋转,5官点位等等
主要知识点:
1.FaceFrameSource.Create
2.FaceFrameFeatures
3.FaceFrameReader和FaceFrameResult
主要脚本:
KinectFaceControl用于获得Kinect获得的身体信息并根据其创建面部信息
CustomKinectFace用于测试面部各种信息的获取和显示,提供事件监听接口
CamerColorView用于UGUI显示Kinect摄像头内容
using UnityEngine;
using System.Collections;
using Windows.Kinect;
using UnityEngine.UI;
///
/// 摄像机信息 主要原理就是通过Kinect得到图像RGB数据 然后在Texture上绘制
/// 基本逻辑和获取身体信息相同
///
public class CamerColorView : MonoBehaviour
{
//Kinect对象
private KinectSensor _kinectSensor;
//颜色信息读取流
private ColorFrameReader _colorReader;
private Texture2D _textureInfo;
private byte[] _colorData;
private RawImage _cameraMaterial;
// Use this for initialization
void Start ()
{
_cameraMaterial = this.gameObject.GetComponent();
_cameraMaterial.uvRect = new Rect(0, 0, 1, -1);
_kinectSensor = KinectSensor.GetDefault();
if (_kinectSensor == null)
return;
_colorReader = _kinectSensor.ColorFrameSource.OpenReader();
//初始化图片信息
FrameDescription color = _kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
_textureInfo = new Texture2D(color.Width, color.Height, TextureFormat.RGBA32, false);
_colorData = new byte[color.BytesPerPixel * color.LengthInPixels];
if (!_kinectSensor.IsOpen)
_kinectSensor.Open();
}
// Update is called once per frame
void Update ()
{
if (_colorReader == null)
return;
ColorFrame frame = _colorReader.AcquireLatestFrame();
if (frame == null)
return;
//存储rgb信息
frame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Rgba);
//texture加载信息
_textureInfo.LoadRawTextureData(_colorData);
//应用
_textureInfo.Apply();
frame.Dispose();
frame = null;
//设置显示信息
_cameraMaterial.texture = _textureInfo;
}
}
using UnityEngine;
using System.Collections;
using Windows.Kinect;
using System.Collections.Generic;
using Microsoft.Kinect.Face;
///
/// 体感游戏对象控制器 主要用于得到检测到的人体信息
///
public class KinectFaceControl : MonoBehaviour
{
//Kinect对象
private KinectSensor _kinectSensor;
//身体信息读取流
private BodyFrameReader _bodyReader;
//从Kinect得到的身体信息 每帧获取
private Body[] _bodyDatas = null;
//场景中的体感对象面部信息
private Dictionary _faces = new Dictionary();
//当前追踪到的游戏对象
private List _nowTrackedIDs = new List();
//用于缓存已知的追踪到的游戏对象
private List _nowCreatedBodyID = new List();
void Start()
{
//得到kinect设备对象
_kinectSensor = KinectSensor.GetDefault();
if (_kinectSensor != null)
{
//得到身体数据流
_bodyReader = _kinectSensor.BodyFrameSource.OpenReader();
//设备是否开启
if (!_kinectSensor.IsOpen)
_kinectSensor.Open();
}
}
void Update()
{
//通过Kinect得到身体信息
GetBodyInfos();
//得到身体信息后 更新场景中的身体对象
UpdateFaseObject();
}
///
/// 得到身体信息
///
private void GetBodyInfos()
{
if (_bodyReader != null)
{
BodyFrame frame = _bodyReader.AcquireLatestFrame();
if (frame != null)
{
//初始化身体数组信息
if (_bodyDatas == null)
_bodyDatas = new Body[_kinectSensor.BodyFrameSource.BodyCount];
//得到该帧的身体信息
frame.GetAndRefreshBodyData(_bodyDatas);
//清楚该帧
frame.Dispose();
frame = null;
}
}
}
///
/// 根据身体信息更新场景上的对象信息 创建 骨骼位置更新等
///
private void UpdateFaseObject()
{
//判断是否得到了身体信息
if (_bodyDatas == null)
return;
_nowTrackedIDs.Clear();
//记录当前追踪到的对象
foreach (Body body in _bodyDatas)
{
if (body == null)
continue;
if (body.IsTracked)
_nowTrackedIDs.Add(body.TrackingId);//trackingid是追踪到对象的唯一ID
}
//记录当前已经有的身体对象
_nowCreatedBodyID = new List(_faces.Keys);
//遍历场景中已经创建的体感游戏对象 看看 是否当前已经不再被追踪了 动态的删除它
foreach (ulong trackingID in _nowCreatedBodyID)
{
//当前kinect已经没有再扫描到对应对象了 那么在场景上删除它 并清楚记录
if (!_nowTrackedIDs.Contains(trackingID))
{
_faces[trackingID].Dispose();
_faces.Remove(trackingID);
}
}
//现在开始更新游戏对象信息 或者动态创建游戏对象
foreach (Body body in _bodyDatas)
{
if (body == null)
continue;
//身体信息是否被追踪
if (body.IsTracked)
{
//场景上没有对应游戏对象 创建对象
if (!_faces.ContainsKey(body.TrackingId))
CreateFaceObject(body.TrackingId);
//更新对象的位置信息
_faces[body.TrackingId].UpdateFace();
}
}
}
///
/// 动态 在场景上创建对象
///
///
private void CreateFaceObject(ulong id)
{
CustomKinectFace info = new CustomKinectFace();
info.CreateKinectFace(_kinectSensor, id);
//监听点信息
info.handleFacePointInfo += HandleFacePointInfo;
//同理 可以在这里监听其它事件 用于处理自己的逻辑
_faces.Add(id, info);
}
///
/// 监听处理脸部点信息
///
///
private void HandleFacePointInfo(Dictionary pointInfoDic)
{
//可以写入需要用点信息处理的逻辑
}
private int index = 0;
///
/// 主要用于显示面部测试信息
///
void OnGUI()
{
index = 0;
foreach (CustomKinectFace faceLogic in _faces.Values)
{
faceLogic.DebugGUIInfo(index);
++index;
}
}
}
using UnityEngine;
using System.Collections;
using Windows.Kinect;
using Microsoft.Kinect.Face;
using System.Collections.Generic;
//处理脸部状态委托
public delegate void HandleFaceProperty(Dictionary property);
//处理脸部角度委托
public delegate void HandleFaceQuaternion(Quaternion quat);
//处理脸部检测框委托
public delegate void HandleFaceRect( RectI rect);
//处理脸部5个点的委托
public delegate void HandleFacePointInfo(Dictionary pointInfoDic);
///
/// 自定义面部类
///
public class CustomKinectFace
{
//面部追踪ID 和身体一样
private ulong _trackingId = 0;
//面部信息流
private FaceFrameReader _faceReader;
//当前帧面部信息结果
private FaceFrameResult _nowResult;
//面部对象 测试时显示面部各个点
private GameObject _faceObject;
//用于画方框 测试时用
private LineRenderer _faceLine;
//是否是第一次创建面部 为了方便观察 把摄像机位置 在鼻子的地方设置一次
private static bool _isFirst = true;
//是否处于测试状态
private static bool _isDebug = true;
///
/// 处理面部点位的事件监听
///
public event HandleFacePointInfo handleFacePointInfo;
///
/// 处理面部追踪范围
///
public event HandleFaceRect handleFaceRect;
///
/// 处理面部旋转角度
///
public event HandleFaceQuaternion handleFaceQuaternion;
///
/// 处理面部状态属性
///
public event HandleFaceProperty handleFaceProperty;
///
/// 创建Kinect面部源信息
///
/// kinect设备
/// 跟踪id
public void CreateKinectFace( KinectSensor sensor, ulong initialTrackingId)
{
_trackingId = initialTrackingId;
//1-从对应设备创建一个面部源信息
//2-这里的跟踪id会爆body检测的id传入 用于标示不同人的面部表情
//3-第三个参数 是你需要获取哪些信息 需要什么 就传入什么 多个 就用 |
FaceFrameSource source = FaceFrameSource.Create(sensor, _trackingId,
FaceFrameFeatures.PointsInColorSpace |
FaceFrameFeatures.BoundingBoxInColorSpace |
FaceFrameFeatures.FaceEngagement |
FaceFrameFeatures.Glasses |
FaceFrameFeatures.Happy |
FaceFrameFeatures.LeftEyeClosed |
FaceFrameFeatures.RightEyeClosed |
FaceFrameFeatures.LookingAway |
FaceFrameFeatures.MouthMoved |
FaceFrameFeatures.MouthOpen|
FaceFrameFeatures.RotationOrientation);
_faceReader = source.OpenReader();
}
///
/// 移除对象
///
public void Dispose()
{
if (_faceObject != null)
GameObject.Destroy(_faceObject);
}
///
/// 更新面部信息
///
public void UpdateFace()
{
if (_faceReader == null)
return;
//得到最后一帧信息
FaceFrame faceFrame = _faceReader.AcquireLatestFrame();
if (faceFrame == null)
return;
if (!faceFrame.IsTrackingIdValid)
return;
//得到面部帧信息中的具体结果信息
_nowResult = faceFrame.FaceFrameResult;
if (_nowResult == null)
return;
//测试状态为true时进入
DebugInfo();
//监听事件处理
HandleEvent();
faceFrame.Dispose();
faceFrame = null;
}
///
/// 处理事件监听
///
public void HandleEvent()
{
if (handleFacePointInfo != null)
handleFacePointInfo(_nowResult.FacePointsInColorSpace);
if (handleFaceProperty != null)
handleFaceProperty(_nowResult.FaceProperties);
if (handleFaceQuaternion != null)
handleFaceQuaternion(GetQuaternionFromeFace(_nowResult.FaceRotationQuaternion));
if (handleFaceRect != null)
handleFaceRect(_nowResult.FaceBoundingBoxInColorSpace);
}
///
/// 得到面部坐标 由于Kinect返回的数值比较大 所以 这里除以100 并且y轴和u3d中是反向的
///
///
///
public Vector3 GetVector3FromFacePoint(Point point)
{
return new Vector3(point.X/100f, -point.Y/100f, 0);
}
///
/// 得到面部角度信息
///
///
///
public Quaternion GetQuaternionFromeFace(Windows.Kinect.Vector4 v4)
{
return new Quaternion(v4.X, v4.Y, v4.Z, v4.W);
}
///
/// 测试信息更新
///
///
private void DebugInfo()
{
//测试状态
if (_isDebug)
{
if (_faceObject == null)
CreateDebugFaceObject();
RefreshDebugFaceObject();
//画方框线
_faceLine.SetPosition(0,new Vector3(_nowResult.FaceBoundingBoxInColorSpace.Left/100f, -_nowResult.FaceBoundingBoxInColorSpace.Top/100f, 0));
_faceLine.SetPosition(1, new Vector3(_nowResult.FaceBoundingBoxInColorSpace.Right / 100f, -_nowResult.FaceBoundingBoxInColorSpace.Top / 100f, 0));
_faceLine.SetPosition(2, new Vector3(_nowResult.FaceBoundingBoxInColorSpace.Right / 100f, -_nowResult.FaceBoundingBoxInColorSpace.Bottom / 100f, 0));
_faceLine.SetPosition(3, new Vector3(_nowResult.FaceBoundingBoxInColorSpace.Left / 100f, -_nowResult.FaceBoundingBoxInColorSpace.Bottom / 100f, 0));
_faceLine.SetPosition(4, new Vector3(_nowResult.FaceBoundingBoxInColorSpace.Left / 100f, -_nowResult.FaceBoundingBoxInColorSpace.Top / 100f, 0));
}
}
///
/// 调试时可用于GUI信息显示 在外部Mono类中的OnGUI中调用 主要用于测试
///
public void DebugGUIInfo(int index)
{
if (_nowResult == null || !_isDebug)
return;
GUI.Label(new Rect(Screen.width - 120*(index+1), 0, 140, 50), "是否开心:" + _nowResult.FaceProperties[FaceProperty.Happy].ToString());
GUI.Label(new Rect(Screen.width - 120*(index + 1), 20, 140, 50), "是否戴眼镜:" + _nowResult.FaceProperties[FaceProperty.WearingGlasses].ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 40, 140, 50), "左眼是否关闭:" + _nowResult.FaceProperties[FaceProperty.LeftEyeClosed].ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 60, 140, 50), "右眼是否关闭:" + _nowResult.FaceProperties[FaceProperty.RightEyeClosed].ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 80, 140, 50), "嘴是否打开:" + _nowResult.FaceProperties[FaceProperty.MouthOpen].ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 100, 140, 50), "嘴是否移动:" + _nowResult.FaceProperties[FaceProperty.MouthMoved].ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 120, 140, 50), "是否看四周:" + _nowResult.FaceProperties[FaceProperty.LookingAway].ToString());
Quaternion q = GetQuaternionFromeFace(_nowResult.FaceRotationQuaternion);
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 140, 140, 50), "X轴角度" + q.eulerAngles.x.ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 160, 140, 50), "y轴角度" + q.eulerAngles.y.ToString());
GUI.Label(new Rect(Screen.width - 120 * (index + 1), 180, 140, 50), "z轴角度" + q.eulerAngles.z.ToString());
}
///
/// 创建面部点
///
///
private void CreateDebugFaceObject()
{
_faceObject = new GameObject("Face:" + _trackingId);
_faceLine = _faceObject.AddComponent();
_faceLine.useWorldSpace = true;
_faceLine.SetVertexCount(5);
_faceLine.SetWidth(0.1f, 0.1f);
foreach (FacePointType type in _nowResult.FacePointsInColorSpace.Keys)
{
GameObject pointObj = GameObject.CreatePrimitive(PrimitiveType.Sphere);
pointObj.transform.localScale = Vector3.one * 0.3f;
pointObj.name = type.ToString();
pointObj.transform.parent = _faceObject.transform;
}
}
///
/// 更新面部对象位置
///
private void RefreshDebugFaceObject()
{
foreach (FacePointType type in _nowResult.FacePointsInColorSpace.Keys)
{
Transform pointObj = _faceObject.transform.Find(type.ToString());
pointObj.transform.localPosition = GetVector3FromFacePoint(_nowResult.FacePointsInColorSpace[type]);
//这里只是第一次 设置射线机的位置 方便观察
if (_isFirst && type == FacePointType.Nose)
{
Camera.main.transform.position = new Vector3(pointObj.transform.position.x, pointObj.transform.position.y, Camera.main.transform.position.z);
_isFirst = false;
}
}
}
}
额外知识点,如何得到Kinect的高清面部信息
关键用法:
//高清面部信息源
HighDefinitionFaceFrameSource source = HighDefinitionFaceFrameSource.Create(sensor);
source.TrackingId = _trackingId;
//信息流
HighDefinitionFaceFrameReader _faceHDReader = source.OpenReader();
//顶点信息
FaceAlignment _faceAlignment = FaceAlignment.Create();
//面部模型点信息
FaceModel _faceModel = FaceModel.Create();
//每一帧的高清面部信息
HighDefinitionFaceFrame faceFrame = _faceHDReader.AcquireLatestFrame();
//得到顶点和状态
faceFrame.GetAndRefreshFaceAlignmentResult(_faceAlignment);
//得到模型点
_faceModel.CalculateVerticesForAlignment(_faceAlignment)