该样例是一个简单的坐姿识别,项目挂在github上面
GestureDetector.cs
//------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
//------------------------------------------------------------------------------
namespace Microsoft.Samples.Kinect.DiscreteGestureBasics
{
using System;
using System.Collections.Generic;
using Microsoft.Kinect;
using Microsoft.Kinect.VisualGestureBuilder;
///
/// Gesture Detector类,用于侦听来自服务的VisualGestureBuilderFrame事件
/// 并使用“坐着”手势的最新结果更新关联的GestureResultView对象
/// Gesture Detector class which listens for VisualGestureBuilderFrame events from the service
/// and updates the associated GestureResultView object with the latest results for the 'Seated' gesture
///
public class GestureDetector : IDisposable
{
// 被VGB处理的手势数据库
/// Path to the gesture database that was trained with VGB
private readonly string gestureDatabase = @"Database\Seated.gbd";
//定义离散手势的名字
/// Name of the discrete gesture in the database that we want to track
private readonly string seatedGestureName = "Seated";
//手势帧源应绑定到身体跟踪ID
/// Gesture frame source which should be tied to a body tracking ID
private VisualGestureBuilderFrameSource vgbFrameSource = null;
// 手势框架阅读器,将处理来自传感器的手势事件
/// Gesture frame reader which will handle gesture events coming from the sensor
private VisualGestureBuilderFrameReader vgbFrameReader = null;
///
/// 初始化GestureDetector类的新实例以及手势帧源和读取器
/// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
///
/// Active sensor to initialize the VisualGestureBuilderFrameSource object with
/// GestureResultView object to store gesture results of a single body to
public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
{
if (kinectSensor == null)
{
throw new ArgumentNullException("kinectSensor");
}
if (gestureResultView == null)
{
throw new ArgumentNullException("gestureResultView");
}
this.GestureResultView = gestureResultView;
//创建vgb源。 当有效的身体框架从传感器到达时,将设置相关的身体跟踪ID。
// create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;
//打开阅读器的vgb帧
// open the reader for the vgb frames
this.vgbFrameReader = this.vgbFrameSource.OpenReader();
if (this.vgbFrameReader != null)
{
this.vgbFrameReader.IsPaused = true;
this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
}
//从手势数据库加载“坐着”手势
// load the 'Seated' gesture from the gesture database
using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
{
//我们可以通过调用vgbFrameSource.AddGestures(database.AvailableGestures)来加载数据库中所有可用的手势,
//但是对于这个程序,我们只想跟踪数据库中的一个离散手势,所以我们将按名称加载它
// we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures),
// but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
foreach (Gesture gesture in database.AvailableGestures)
{
if (gesture.Name.Equals(this.seatedGestureName))
{
this.vgbFrameSource.AddGesture(gesture);
}
}
}
}
//获取GestureResultView对象,该对象存储检测器结果以在UI中显示
/// Gets the GestureResultView object which stores the detector results for display in the UI
public GestureResultView GestureResultView { get; private set; }
///
/// 获取或设置与当前检测器关联的正文跟踪ID
///只要正文进入/退出范围,跟踪ID就会发生变化
/// Gets or sets the body tracking ID associated with the current detector
/// The tracking ID can change whenever a body comes in/out of scope
///
public ulong TrackingId
{
get
{
return this.vgbFrameSource.TrackingId;
}
set
{
if (this.vgbFrameSource.TrackingId != value)
{
this.vgbFrameSource.TrackingId = value;
}
}
}
///
/// 获取或设置一个值,指示检测器当前是否暂停
/// 如果与探测器关联的身体跟踪ID无效,则应暂停探测器
/// Gets or sets a value indicating whether or not the detector is currently paused
/// If the body tracking ID associated with the detector is not valid, then the detector should be paused
///
public bool IsPaused
{
get
{
return this.vgbFrameReader.IsPaused;
}
set
{
if (this.vgbFrameReader.IsPaused != value)
{
this.vgbFrameReader.IsPaused = value;
}
}
}
///
/// 处理该类的所有非托管资源
/// Disposes all unmanaged resources for the class
///
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
///
/// 处置VisualGestureBuilderFrameSource和VisualGestureBuilderFrameReader对象
/// Disposes the VisualGestureBuilderFrameSource and VisualGestureBuilderFrameReader objects
///
/// 如果直接调用Dispose,则为True;如果GC处理处置,则为false
/// True if Dispose was called directly, false if the GC handles the disposing
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (this.vgbFrameReader != null)
{
this.vgbFrameReader.FrameArrived -= this.Reader_GestureFrameArrived;
this.vgbFrameReader.Dispose();
this.vgbFrameReader = null;
}
if (this.vgbFrameSource != null)
{
this.vgbFrameSource.TrackingIdLost -= this.Source_TrackingIdLost;
this.vgbFrameSource.Dispose();
this.vgbFrameSource = null;
}
}
}
///
/// 处理从传感器到达的关联身体跟踪Id的手势检测结果
/// Handles gesture detection results arriving from the sensor for the associated body tracking Id
///
/// object sending the event
/// event arguments
private void Reader_GestureFrameArrived(object sender, VisualGestureBuilderFrameArrivedEventArgs e)
{
VisualGestureBuilderFrameReference frameReference = e.FrameReference;
using (VisualGestureBuilderFrame frame = frameReference.AcquireFrame())
{
if (frame != null)
{
// get the discrete gesture results which arrived with the latest frame
//获得最新帧到达的离散手势结果
IReadOnlyDictionary discreteResults = frame.DiscreteGestureResults;
if (discreteResults != null)
{
// we only have one gesture in this source object, but you can get multiple gestures
// 我们在此源对象中只有一个手势,但您可以获得多个手势
foreach (Gesture gesture in this.vgbFrameSource.Gestures)
{
if (gesture.Name.Equals(this.seatedGestureName) && gesture.GestureType == GestureType.Discrete)
{
DiscreteGestureResult result = null;
discreteResults.TryGetValue(gesture, out result);
if (result != null)
{
// update the GestureResultView object with new gesture result values
// 使用新的手势结果值更新GestureResultView对象
this.GestureResultView.UpdateGestureResult(true, result.Detected, result.Confidence);
}
}
}
}
}
}
}
///
/// Handles the TrackingIdLost event for the VisualGestureBuilderSource object
/// 处理VisualGestureBuilderSource对象的TrackingIdLost事件
///
/// object sending the event
/// event arguments
private void Source_TrackingIdLost(object sender, TrackingIdLostEventArgs e)
{
// update the GestureResultView object to show the 'Not Tracked' image in the UI
// 更新GestureResultView对象以在UI中显示“未跟踪”图像
this.GestureResultView.UpdateGestureResult(false, false, 0.0f);
}
}
}
GestureResultView.cs
//------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
//------------------------------------------------------------------------------
namespace Microsoft.Samples.Kinect.DiscreteGestureBasics
{
using System;
using System.ComponentModel;
using System.Runtime.CompilerServices;
using System.Windows.Media;
using System.Windows.Media.Imaging;
///
/// Stores discrete gesture results for the GestureDetector.存储gesturedetector的离散手势
/// Properties are stored/updated for display in the UI.存储/更新属性以在UI中显示
///
public sealed class GestureResultView : INotifyPropertyChanged
{
//显示“已检测”属性何时对于被跟踪的正文为真的图像
/// Image to show when the 'detected' property is true for a tracked body
private readonly ImageSource seatedImage = new BitmapImage(new Uri(@"Images\Seated.png", UriKind.Relative));
//显示被跟踪物体的“检测到”属性为假时的图像
/// Image to show when the 'detected' property is false for a tracked body
private readonly ImageSource notSeatedImage = new BitmapImage(new Uri(@"Images\NotSeated.png", UriKind.Relative));
//显示何时未跟踪与GestureResultView对象关联的正文的图像
/// Image to show when the body associated with the GestureResultView object is not being tracked
private readonly ImageSource notTrackedImage = new BitmapImage(new Uri(@"Images\NotTracked.png", UriKind.Relative));
//用于跟踪身体的刷子颜色数组; 数组位置对应于KinectBodyView类中使用的主体颜色(即六个人的不同六种颜色)
/// Array of brush colors to use for a tracked body; array position corresponds to the body colors used in the KinectBodyView class
private readonly Brush[] trackedColors = new Brush[] { Brushes.Red, Brushes.Orange, Brushes.Green, Brushes.Blue, Brushes.Indigo, Brushes.Violet };
//UI的背景颜色
/// Brush color to use as background in the UI
private Brush bodyColor = Brushes.Gray;
//与当前手势检测器相关联的身体索引(0-5)
/// The body index (0-5) associated with the current gesture detector
private int bodyIndex = 0;
//离散手势报告的置信度值
/// Current confidence value reported by the discrete gesture
private float confidence = 0.0f;
//该值为真、如果离散手势当前被检测到
/// True, if the discrete gesture is currently being detected
private bool detected = false;
//在UI中显示的图像对应于跟踪/检测状态
/// Image to display in UI which corresponds to tracking/detection state
private ImageSource imageSource = null;
//如果身体当前正在被追踪为真
/// True, if the body is currently being tracked
private bool isTracked = false;
///
/// 初始化一个gestureresultview新类
/// Initializes a new instance of the GestureResultView class and sets initial property values
///
/// 下面四值不在翻译、和上面一样
/// Body Index associated with the current gesture detector
/// True, if the body is currently tracked
/// True, if the gesture is currently detected for the associated body
/// Confidence value for detection of the 'Seated' gesture
public GestureResultView(int bodyIndex, bool isTracked, bool detected, float confidence)
{
this.BodyIndex = bodyIndex;
this.IsTracked = isTracked;
this.Detected = detected;
this.Confidence = confidence;
this.ImageSource = this.notTrackedImage;
}
///
/// INotifyPropertyChangedPropertyChanged事件以允许窗口控件绑定到可更改的数据
/// INotifyPropertyChangedPropertyChanged event to allow window controls to bind to changeable data
///
public event PropertyChangedEventHandler PropertyChanged;
///
/// 获取与当前手势检测器结果关联的主体索引
/// Gets the body index associated with the current gesture detector result
///
public int BodyIndex
{
get
{
return this.bodyIndex;
}
private set
{
if (this.bodyIndex != value)
{
this.bodyIndex = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 获取与结果的主体索引对应的主体颜色
/// Gets the body color corresponding to the body index for the result
///
public Brush BodyColor
{
get
{
return this.bodyColor;
}
private set
{
if (this.bodyColor != value)
{
this.bodyColor = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 获取一个值,该值指示当前是否正在跟踪与手势检测器关联的正文
/// Gets a value indicating whether or not the body associated with the gesture detector is currently being tracked
///
public bool IsTracked
{
get
{
return this.isTracked;
}
private set
{
if (this.IsTracked != value)
{
this.isTracked = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 获取一个值,该值指示是否已检测到离散手势
/// Gets a value indicating whether or not the discrete gesture has been detected
///
public bool Detected
{
get
{
return this.detected;
}
private set
{
if (this.detected != value)
{
this.detected = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 获取一个浮点值,该值指示检测器对关联体的手势发生的置信度
/// Gets a float value which indicates the detector's confidence that the gesture is occurring for the associated body
///
public float Confidence
{
get
{
return this.confidence;
}
private set
{
if (this.confidence != value)
{
this.confidence = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 获取要在UI中显示的图像,该图像表示关联主体的当前手势结果
/// Gets an image for display in the UI which represents the current gesture result for the associated body
///
public ImageSource ImageSource
{
get
{
return this.imageSource;
}
private set
{
if (this.ImageSource != value)
{
this.imageSource = value;
this.NotifyPropertyChanged();
}
}
}
///
/// 更新与离散手势检测结果相关联的值
/// Updates the values associated with the discrete gesture detection result
///
/// 不再翻译
/// True, if the body associated with the GestureResultView object is still being tracked
/// True, if the discrete gesture is currently detected for the associated body
/// Confidence value for detection of the discrete gesture
public void UpdateGestureResult(bool isBodyTrackingIdValid, bool isGestureDetected, float detectionConfidence)
{
this.IsTracked = isBodyTrackingIdValid;
this.Confidence = 0.0f;
if (!this.IsTracked)
{
this.ImageSource = this.notTrackedImage;
this.Detected = false;
this.BodyColor = Brushes.Gray;
}
else
{
this.Detected = isGestureDetected;
this.BodyColor = this.trackedColors[this.BodyIndex];
if (this.Detected)
{
this.Confidence = detectionConfidence;
this.ImageSource = this.seatedImage;
}
else
{
this.ImageSource = this.notSeatedImage;
}
}
}
///
/// 通知UI属性已更改
/// Notifies UI that a property has changed
///
/// Name of property that has changed
private void NotifyPropertyChanged([CallerMemberName] string propertyName = "")
{
if (this.PropertyChanged != null)
{
this.PropertyChanged(this, new PropertyChangedEventArgs(propertyName));
}
}
}
}
KinectBodyView.cs
//------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
//------------------------------------------------------------------------------
//此类主要是可视化身体数据、不再进行无用的翻译
namespace Microsoft.Samples.Kinect.DiscreteGestureBasics
{
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Media;
using Microsoft.Kinect;
///
/// Visualizes the Kinect Body stream for display in the UI
/// 可视化Kinect Body流以在UI中显示
///
public sealed class KinectBodyView
{
///
/// Radius of drawn hand circles
/// 画圆圈的半径
///
private const double HandSize = 30;
///
/// Thickness of drawn joint lines
/// 绘制的关节线的厚度
///
private const double JointThickness = 3;
///
/// Thickness of clip edge rectangles
/// 夹边缘矩形的厚度
///
private const double ClipBoundsThickness = 10;
///
/// Constant for clamping Z values of camera space points from being negative
/// 用于将相机空间点的Z值钳制为负的常数
///
private const float InferredZPositionClamp = 0.1f;
///
/// Brush used for drawing hands that are currently tracked as closed
/// 用于绘制当前被跟踪为关闭的手的画笔
///
private readonly Brush handClosedBrush = new SolidColorBrush(Color.FromArgb(128, 255, 0, 0));
///
/// Brush used for drawing hands that are currently tracked as opened
/// 用于绘制当前被跟踪为已打开的手的画笔
///
private readonly Brush handOpenBrush = new SolidColorBrush(Color.FromArgb(128, 0, 255, 0));
///
/// Brush used for drawing hands that are currently tracked as in lasso (pointer) position
/// 用于绘制当前跟踪的手的画笔,如套索(指针)位置
///
private readonly Brush handLassoBrush = new SolidColorBrush(Color.FromArgb(128, 0, 0, 255));
///
/// Brush used for drawing joints that are currently tracked
///
private readonly Brush trackedJointBrush = new SolidColorBrush(Color.FromArgb(255, 68, 192, 68));
///
/// Brush used for drawing joints that are currently inferred
///
private readonly Brush inferredJointBrush = Brushes.Yellow;
///
/// Pen used for drawing bones that are currently inferred
///
private readonly Pen inferredBonePen = new Pen(Brushes.Gray, 1);
///
/// Drawing group for body rendering output
///
private DrawingGroup drawingGroup;
///
/// Drawing image that we will display
///
private DrawingImage imageSource;
///
/// Coordinate mapper to map one type of point to another
///
private CoordinateMapper coordinateMapper = null;
///
/// definition of bones
///
private List> bones;
///
/// Width of display (depth space)
///
private int displayWidth;
///
/// Height of display (depth space)
///
private int displayHeight;
///
/// List of colors for each body tracked
///
private List bodyColors;
///
/// Initializes a new instance of the KinectBodyView class
///
/// Active instance of the KinectSensor
public KinectBodyView(KinectSensor kinectSensor)
{
if (kinectSensor == null)
{
throw new ArgumentNullException("kinectSensor");
}
// get the coordinate mapper
this.coordinateMapper = kinectSensor.CoordinateMapper;
// get the depth (display) extents
FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription;
// get size of joint space
this.displayWidth = frameDescription.Width;
this.displayHeight = frameDescription.Height;
// a bone defined as a line between two joints
this.bones = new List>();
// Torso
this.bones.Add(new Tuple(JointType.Head, JointType.Neck));
this.bones.Add(new Tuple(JointType.Neck, JointType.SpineShoulder));
this.bones.Add(new Tuple(JointType.SpineShoulder, JointType.SpineMid));
this.bones.Add(new Tuple(JointType.SpineMid, JointType.SpineBase));
this.bones.Add(new Tuple(JointType.SpineShoulder, JointType.ShoulderRight));
this.bones.Add(new Tuple(JointType.SpineShoulder, JointType.ShoulderLeft));
this.bones.Add(new Tuple(JointType.SpineBase, JointType.HipRight));
this.bones.Add(new Tuple(JointType.SpineBase, JointType.HipLeft));
// Right Arm
this.bones.Add(new Tuple(JointType.ShoulderRight, JointType.ElbowRight));
this.bones.Add(new Tuple(JointType.ElbowRight, JointType.WristRight));
this.bones.Add(new Tuple(JointType.WristRight, JointType.HandRight));
this.bones.Add(new Tuple(JointType.HandRight, JointType.HandTipRight));
this.bones.Add(new Tuple(JointType.WristRight, JointType.ThumbRight));
// Left Arm
this.bones.Add(new Tuple(JointType.ShoulderLeft, JointType.ElbowLeft));
this.bones.Add(new Tuple(JointType.ElbowLeft, JointType.WristLeft));
this.bones.Add(new Tuple(JointType.WristLeft, JointType.HandLeft));
this.bones.Add(new Tuple(JointType.HandLeft, JointType.HandTipLeft));
this.bones.Add(new Tuple(JointType.WristLeft, JointType.ThumbLeft));
// Right Leg
this.bones.Add(new Tuple(JointType.HipRight, JointType.KneeRight));
this.bones.Add(new Tuple(JointType.KneeRight, JointType.AnkleRight));
this.bones.Add(new Tuple(JointType.AnkleRight, JointType.FootRight));
// Left Leg
this.bones.Add(new Tuple(JointType.HipLeft, JointType.KneeLeft));
this.bones.Add(new Tuple(JointType.KneeLeft, JointType.AnkleLeft));
this.bones.Add(new Tuple(JointType.AnkleLeft, JointType.FootLeft));
// populate body colors, one for each BodyIndex
this.bodyColors = new List();
this.bodyColors.Add(new Pen(Brushes.Red, 6));
this.bodyColors.Add(new Pen(Brushes.Orange, 6));
this.bodyColors.Add(new Pen(Brushes.Green, 6));
this.bodyColors.Add(new Pen(Brushes.Blue, 6));
this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
this.bodyColors.Add(new Pen(Brushes.Violet, 6));
// Create the drawing group we'll use for drawing
this.drawingGroup = new DrawingGroup();
// Create an image source that we can use in our image control
this.imageSource = new DrawingImage(this.drawingGroup);
}
///
/// Gets the bitmap to display
///
public ImageSource ImageSource
{
get
{
return this.imageSource;
}
}
///
/// Updates the body array with new information from the sensor
/// Should be called whenever a new BodyFrameArrivedEvent occurs
///
/// Array of bodies to update
public void UpdateBodyFrame(Body[] bodies)
{
if (bodies != null)
{
using (DrawingContext dc = this.drawingGroup.Open())
{
// Draw a transparent background to set the render size
dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight));
int penIndex = 0;
foreach (Body body in bodies)
{
Pen drawPen = this.bodyColors[penIndex++];
if (body.IsTracked)
{
this.DrawClippedEdges(body, dc);
IReadOnlyDictionary joints = body.Joints;
// convert the joint points to depth (display) space
Dictionary jointPoints = new Dictionary();
foreach (JointType jointType in joints.Keys)
{
// sometimes the depth(Z) of an inferred joint may show as negative
// clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity)
CameraSpacePoint position = joints[jointType].Position;
if (position.Z < 0)
{
position.Z = InferredZPositionClamp;
}
DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(position);
jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y);
}
this.DrawBody(joints, jointPoints, dc, drawPen);
this.DrawHand(body.HandLeftState, jointPoints[JointType.HandLeft], dc);
this.DrawHand(body.HandRightState, jointPoints[JointType.HandRight], dc);
}
}
// prevent drawing outside of our render area
this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight));
}
}
}
///
/// Draws a body
///
/// joints to draw
/// translated positions of joints to draw
/// drawing context to draw to
/// specifies color to draw a specific body
private void DrawBody(IReadOnlyDictionary joints, IDictionary jointPoints, DrawingContext drawingContext, Pen drawingPen)
{
// Draw the bones
foreach (var bone in this.bones)
{
this.DrawBone(joints, jointPoints, bone.Item1, bone.Item2, drawingContext, drawingPen);
}
// Draw the joints
foreach (JointType jointType in joints.Keys)
{
Brush drawBrush = null;
TrackingState trackingState = joints[jointType].TrackingState;
if (trackingState == TrackingState.Tracked)
{
drawBrush = this.trackedJointBrush;
}
else if (trackingState == TrackingState.Inferred)
{
drawBrush = this.inferredJointBrush;
}
if (drawBrush != null)
{
drawingContext.DrawEllipse(drawBrush, null, jointPoints[jointType], JointThickness, JointThickness);
}
}
}
///
/// Draws one bone of a body (joint to joint)
///
/// joints to draw
/// translated positions of joints to draw
/// first joint of bone to draw
/// second joint of bone to draw
/// drawing context to draw to
/// /// specifies color to draw a specific bone
private void DrawBone(IReadOnlyDictionary joints, IDictionary jointPoints, JointType jointType0, JointType jointType1, DrawingContext drawingContext, Pen drawingPen)
{
Joint joint0 = joints[jointType0];
Joint joint1 = joints[jointType1];
// If we can't find either of these joints, exit
if (joint0.TrackingState == TrackingState.NotTracked ||
joint1.TrackingState == TrackingState.NotTracked)
{
return;
}
// We assume all drawn bones are inferred unless BOTH joints are tracked
Pen drawPen = this.inferredBonePen;
if ((joint0.TrackingState == TrackingState.Tracked) && (joint1.TrackingState == TrackingState.Tracked))
{
drawPen = drawingPen;
}
drawingContext.DrawLine(drawPen, jointPoints[jointType0], jointPoints[jointType1]);
}
///
/// Draws a hand symbol if the hand is tracked: red circle = closed, green circle = opened; blue circle = lasso
///
/// state of the hand
/// position of the hand
/// drawing context to draw to
private void DrawHand(HandState handState, Point handPosition, DrawingContext drawingContext)
{
switch (handState)
{
case HandState.Closed:
drawingContext.DrawEllipse(this.handClosedBrush, null, handPosition, HandSize, HandSize);
break;
case HandState.Open:
drawingContext.DrawEllipse(this.handOpenBrush, null, handPosition, HandSize, HandSize);
break;
case HandState.Lasso:
drawingContext.DrawEllipse(this.handLassoBrush, null, handPosition, HandSize, HandSize);
break;
}
}
///
/// Draws indicators to show which edges are clipping body data
///
/// body to draw clipping information for
/// drawing context to draw to
private void DrawClippedEdges(Body body, DrawingContext drawingContext)
{
FrameEdges clippedEdges = body.ClippedEdges;
if (clippedEdges.HasFlag(FrameEdges.Bottom))
{
drawingContext.DrawRectangle(
Brushes.Red,
null,
new Rect(0, this.displayHeight - ClipBoundsThickness, this.displayWidth, ClipBoundsThickness));
}
if (clippedEdges.HasFlag(FrameEdges.Top))
{
drawingContext.DrawRectangle(
Brushes.Red,
null,
new Rect(0, 0, this.displayWidth, ClipBoundsThickness));
}
if (clippedEdges.HasFlag(FrameEdges.Left))
{
drawingContext.DrawRectangle(
Brushes.Red,
null,
new Rect(0, 0, ClipBoundsThickness, this.displayHeight));
}
if (clippedEdges.HasFlag(FrameEdges.Right))
{
drawingContext.DrawRectangle(
Brushes.Red,
null,
new Rect(this.displayWidth - ClipBoundsThickness, 0, ClipBoundsThickness, this.displayHeight));
}
}
}
}
MainWindow.xaml.cs
//---------------------------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
//
//
// This program tracks up to 6 people simultaneously.
// If a person is tracked, the associated gesture detector will determine if that person is seated or not.
// If any of the 6 positions are not in use, the corresponding gesture detector(s) will be paused
// and the 'Not Tracked' image will be displayed in the UI.
//
//----------------------------------------------------------------------------------------------------
namespace Microsoft.Samples.Kinect.DiscreteGestureBasics
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Windows;
using System.Windows.Controls;
using Microsoft.Kinect;
using Microsoft.Kinect.VisualGestureBuilder;
///
/// MainWindow的交互逻辑
/// Interaction logic for the MainWindow
///
public partial class MainWindow : Window, INotifyPropertyChanged
{
//激活kinect
/// Active Kinect sensor
private KinectSensor kinectSensor = null;
//身体数组、至多追踪6个人
/// Array for the bodies (Kinect will track up to 6 people simultaneously)
private Body[] bodies = null;
//身体框架阅读器
/// Reader for body frames
private BodyFrameReader bodyFrameReader = null;
//要显示的当前状态文本
/// Current status text to display
private string statusText = null;
//KinectBodyView对象,用于处理将Kinect实体绘制到UI中的“视图”框
/// KinectBodyView object which handles drawing the Kinect bodies to a View box in the UI
private KinectBodyView kinectBodyView = null;
//手势探测器列表,将为每个潜在的身体创建一个探测器(最多6个)
/// List of gesture detectors, there will be one detector created for each potential body (max of 6)
private List gestureDetectorList = null;
///
/// 初始化MainWindow类的新实例
/// Initializes a new instance of the MainWindow class
///
public MainWindow()
{
// only one sensor is currently supported
//当前只有一个传感器被支持
this.kinectSensor = KinectSensor.GetDefault();
// set IsAvailableChanged event notifier
//设置IsAvailableChanged事件通知程序
this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
// open the sensor
//打开kinect
this.kinectSensor.Open();
// set the status text
//设置状态正文
this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
: Properties.Resources.NoSensorStatusText;
// open the reader for the body frames
//打开身体框架阅读器
this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
// set the BodyFramedArrived event notifier
// 设置BodyFramedArrived事件通知程序
this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
// initialize the BodyViewer object for displaying tracked bodies in the UI
// 初始化BodyViewer对象以在UI中显示跟踪的实体
this.kinectBodyView = new KinectBodyView(this.kinectSensor);
// initialize the gesture detection objects for our gestures
// 为我们的手势初始化手势检测对象
this.gestureDetectorList = new List();
// initialize the MainWindow
// 初始化主窗口
this.InitializeComponent();
// set our data context objects for display in UI
// 设置我们的数据上下文对象以在UI中显示
this.DataContext = this;
this.kinectBodyViewbox.DataContext = this.kinectBodyView;
// create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
// 为每个身体创建一个手势检测器(6个身体=> 6个检测器)并创建内容控件以在UI中显示结果
int col0Row = 0;
int col1Row = 0;
int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
for (int i = 0; i < maxBodies; ++i)
{
GestureResultView result = new GestureResultView(i, false, false, 0.0f);
GestureDetector detector = new GestureDetector(this.kinectSensor, result);
this.gestureDetectorList.Add(detector);
// split gesture results across the first two columns of the content grid
// 在内容网格的前两列中分割手势结果
ContentControl contentControl = new ContentControl();
contentControl.Content = this.gestureDetectorList[i].GestureResultView;
if (i % 2 == 0)
{
// Gesture results for bodies: 0, 2, 4
// 身体的手势结果:0,2,4
Grid.SetColumn(contentControl, 0);
Grid.SetRow(contentControl, col0Row);
++col0Row;
}
else
{
// Gesture results for bodies: 1, 3, 5
//身体的手势结果:1,3,5
Grid.SetColumn(contentControl, 1);
Grid.SetRow(contentControl, col1Row);
++col1Row;
}
this.contentGrid.Children.Add(contentControl);
}
}
///
/// INotifyPropertyChangedPropertyChanged事件以允许窗口控件绑定到可更改的数据
/// INotifyPropertyChangedPropertyChanged event to allow window controls to bind to changeable data
///
public event PropertyChangedEventHandler PropertyChanged;
///
/// 获取或设置要显示的当前状态文本
/// Gets or sets the current status text to display
///
public string StatusText
{
get
{
return this.statusText;
}
set
{
if (this.statusText != value)
{
this.statusText = value;
// notify any bound elements that the text has changed
// 通知任何绑定元素文本已更改
if (this.PropertyChanged != null)
{
this.PropertyChanged(this, new PropertyChangedEventArgs("StatusText"));
}
}
}
}
///
/// 执行关闭任务
/// Execute shutdown tasks
///
/// object sending the event
/// event arguments
private void MainWindow_Closing(object sender, CancelEventArgs e)
{
if (this.bodyFrameReader != null)
{
// BodyFrameReader is IDisposable
this.bodyFrameReader.FrameArrived -= this.Reader_BodyFrameArrived;
this.bodyFrameReader.Dispose();
this.bodyFrameReader = null;
}
if (this.gestureDetectorList != null)
{
// The GestureDetector contains disposable members (VisualGestureBuilderFrameSource and VisualGestureBuilderFrameReader)
// GestureDetector包含一次性成员(VisualGestureBuilderFrameSource和VisualGestureBuilderFrameReader)
foreach (GestureDetector detector in this.gestureDetectorList)
{
detector.Dispose();
}
this.gestureDetectorList.Clear();
this.gestureDetectorList = null;
}
if (this.kinectSensor != null)
{
this.kinectSensor.IsAvailableChanged -= this.Sensor_IsAvailableChanged;
this.kinectSensor.Close();
this.kinectSensor = null;
}
}
///
/// 当传感器不可用时(例如暂停,关闭,拔下插头)处理事件。
/// Handles the event when the sensor becomes unavailable (e.g. paused, closed, unplugged).
///
/// object sending the event
/// event arguments
private void Sensor_IsAvailableChanged(object sender, IsAvailableChangedEventArgs e)
{
// on failure, set the status text
this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
: Properties.Resources.SensorNotAvailableStatusText;
}
///
/// 处理从传感器到达的身体框架数据并更新每个身体的相关手势检测器对象
/// Handles the body frame data arriving from the sensor and updates the associated gesture detector object for each body
///
/// object sending the event
/// event arguments
private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e)
{
bool dataReceived = false;
using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
{
if (bodyFrame != null)
{
if (this.bodies == null)
{
//创建一个由6个实体组成的数组,这是Kinect可以同时跟踪的最大实体数
// creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously
this.bodies = new Body[bodyFrame.BodyCount];
}
//第一次调用GetAndRefreshBodyData时,Kinect将在数组中分配每个Body。
//只要这些正文对象没有被处理掉并且在数组中没有设置为null,
//将重复使用这些正文对象。
// The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
// As long as those body objects are not disposed and not set to null in the array,
// those body objects will be re-used.
bodyFrame.GetAndRefreshBodyData(this.bodies);
dataReceived = true;
}
}
if (dataReceived)
{
// 可视化新的身体数据
// visualize the new body data
this.kinectBodyView.UpdateBodyFrame(this.bodies);
//我们可能已经丢失 / 获得了身体,因此请更新相应的手势检测器
// we may have lost/acquired bodies, so update the corresponding gesture detectors
if (this.bodies != null)
{
// 遍历所有主体以查看是否需要更新任何手势检测器
// loop through all bodies to see if any of the gesture detectors need to be updated
int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
for (int i = 0; i < maxBodies; ++i)
{
Body body = this.bodies[i];
ulong trackingId = body.TrackingId;
//如果当前的主体TrackingId已更改,请使用新值更新相应的手势检测器
// if the current body TrackingId changed, update the corresponding gesture detector with the new value
if (trackingId != this.gestureDetectorList[i].TrackingId)
{
this.gestureDetectorList[i].TrackingId = trackingId;
//如果跟踪当前正文,则取消暂停其检测器以获取VisualGestureBuilderFrameArrived事件
//如果未跟踪当前正文,请暂停其检测器,这样我们就不会浪费资源来尝试获取无效的手势结果
// if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events
// if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results
this.gestureDetectorList[i].IsPaused = trackingId == 0;
}
}
}
}
}
}
}
我的github链接:https://github.com/MADMAX110/DiscreteGestureBasics-WPF