通俗的讲,卡尔曼滤波就是综合上一步骤对当前步骤的预测值,结合当前步骤的测量值,来估计当前的状态。
最近在做一个Unity项目时,用到了卡尔曼滤波消除抖动,主要是消除头部抖动,从Dlib实时获得68个特征值的points,由points.x points.y points.z 获得头部的旋转四元数作为测量值,初始值可以任意定义,因为几次迭代后预测值就逼近真实值了。因为Unity用c#编写,c#本身不存在矩阵库,对矩阵运算比较吃力,需要自己导入矩阵库,方法很简单,首先定义矩阵库的命名空间,然后使用该命名空间即可。
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Slash.Lastwork
{
public class Matrix
{
double[,] A;
//m行n列
int m, n;
string name;
public Matrix(int am, int an)
{
m = am;
n = an;
A = new double[m, n];
name = "Result";
}
public Matrix(int am, int an, string aName)
{
m = am;
n = an;
A = new double[m, n];
name = aName;
}
public int getM
{
get { return m; }
}
public int getN
{
get { return n; }
}
public double[,] Detail
{
get { return A; }
set { A = value; }
}
public string Name
{
get { return name; }
set { name = value; }
}
}
public class MatrixOperator
{
MatrixOperator()
{ }
///
/// 矩阵加法
///
///
///
///
public static Matrix MatrixAdd(Matrix Ma, Matrix Mb)
{
int m = Ma.getM;
int n = Ma.getN;
int m2 = Mb.getM;
int n2 = Mb.getN;
if ((m != m2) || (n != n2))
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
Matrix Mc = new Matrix(m, n);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
double[,] b = Mb.Detail;
int i, j;
for (i = 0; i < m; i++)
for (j = 0; j < n; j++)
c[i, j] = a[i, j] + b[i, j];
return Mc;
}
///
/// 矩阵减法
///
///
///
///
public static Matrix MatrixSub(Matrix Ma, Matrix Mb)
{
int m = Ma.getM;
int n = Ma.getN;
int m2 = Mb.getM;
int n2 = Mb.getN;
if ((m != m2) || (n != n2))
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
Matrix Mc = new Matrix(m, n);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
double[,] b = Mb.Detail;
int i, j;
for (i = 0; i < m; i++)
for (j = 0; j < n; j++)
c[i, j] = a[i, j] - b[i, j];
return Mc;
}
///
/// 矩阵打印
///
///
///
public static string MatrixPrint(Matrix Ma)
{
string s;
s = Ma.Name + ":\n";
int m = Ma.getM;
int n = Ma.getN;
double[,] a = Ma.Detail;
for (int i = 0; i < m; i++)
{
for (int j = 0; j < n; j++)
{
s += a[i, j].ToString("0.0000") + "\t";
}
s += "\n";
}
return s;
}
///
/// 矩阵乘法
///
///
///
///
public static Matrix MatrixMulti(Matrix Ma, Matrix Mb)
{
int m = Ma.getM;
int n = Ma.getN;
int m2 = Mb.getM;
int n2 = Mb.getN;
if (n != m2)
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
Matrix Mc = new Matrix(m, n2);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
double[,] b = Mb.Detail;
int i, j, k;
for (i = 0; i < m; i++)
for (j = 0; j < n2; j++)
{
c[i, j] = 0;
for (k = 0; k < n; k++)
c[i, j] += a[i, k] * b[k, j];
}
return Mc;
}
///
/// 矩阵数乘
///
///
///
///
public static Matrix MatrixSimpleMulti(double k, Matrix Ma)
{
int n = Ma.getN;
int m = Ma.getM;
Matrix Mc = new Matrix(m, n);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
int i, j;
for (i = 0; i < m; i++)
for (j = 0; j < n; j++)
c[i, j] = a[i, j] * k;
return Mc;
}
///
/// 矩阵转置
///
///
///
///
public static Matrix MatrixTrans(Matrix Ma)
{
int m = Ma.getM;
int n = Ma.getN;
Matrix Mc = new Matrix(n, m);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
for (int i = 0; i < n; i++)
for (int j = 0; j < m; j++)
c[i, j] = a[j, i];
return Mc;
}
///
/// 矩阵求逆(高斯法)
///
///
///
public static Matrix MatrixInv(Matrix Ma)
{
int m = Ma.getM;
int n = Ma.getN;
if (m != n)
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
Matrix Mc = new Matrix(m, n);
double[,] a0 = Ma.Detail;
double[,] a = (double[,])a0.Clone();
double[,] b = Mc.Detail;
int i, j, row, k;
double max, temp;
//单位矩阵
for (i = 0; i < n; i++)
{
b[i, i] = 1;
}
for (k = 0; k < n; k++)
{
max = 0; row = k;
//找最大元,其所在行为row
for (i = k; i < n; i++)
{
temp = Math.Abs(a[i, k]);
if (max < temp)
{
max = temp;
row = i;
}
}
if (max == 0)
{
Exception myException = new Exception("没有逆矩阵");
throw myException;
}
//交换k与row行
if (row != k)
{
for (j = 0; j < n; j++)
{
temp = a[row, j];
a[row, j] = a[k, j];
a[k, j] = temp;
temp = b[row, j];
b[row, j] = b[k, j];
b[k, j] = temp;
}
}
//首元化为1
for (j = k + 1; j < n; j++) a[k, j] /= a[k, k];
for (j = 0; j < n; j++) b[k, j] /= a[k, k];
a[k, k] = 1;
//k列化为0
//对a
for (j = k + 1; j < n; j++)
{
for (i = 0; i < k; i++) a[i, j] -= a[i, k] * a[k, j];
for (i = k + 1; i < n; i++) a[i, j] -= a[i, k] * a[k, j];
}
//对b
for (j = 0; j < n; j++)
{
for (i = 0; i < k; i++) b[i, j] -= a[i, k] * b[k, j];
for (i = k + 1; i < n; i++) b[i, j] -= a[i, k] * b[k, j];
}
for (i = 0; i < n; i++) a[i, k] = 0;
a[k, k] = 1;
}
return Mc;
}
///
/// 矩阵求逆(伴随矩阵法)
///
///
///
public static Matrix MatrixInvByCom(Matrix Ma)
{
double d = MatrixOperator.MatrixDet(Ma);
if (d == 0)
{
Exception myException = new Exception("没有逆矩阵");
throw myException;
}
Matrix Ax = MatrixOperator.MatrixCom(Ma);
Matrix An = MatrixOperator.MatrixSimpleMulti((1.0 / d), Ax);
return An;
}
///
/// 对应行列式的代数余子式矩阵
///
///
///
public static Matrix MatrixSpa(Matrix Ma, int ai, int aj)
{
int m = Ma.getM;
int n = Ma.getN;
if (m != n)
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
int n2 = n - 1;
Matrix Mc = new Matrix(n2, n2);
double[,] a = Ma.Detail;
double[,] b = Mc.Detail;
//左上
for (int i = 0; i < ai; i++)
for (int j = 0; j < aj; j++)
{
b[i, j] = a[i, j];
}
//右下
for (int i = ai; i < n2; i++)
for (int j = aj; j < n2; j++)
{
b[i, j] = a[i + 1, j + 1];
}
//右上
for (int i = 0; i < ai; i++)
for (int j = aj; j < n2; j++)
{
b[i, j] = a[i, j + 1];
}
//左下
for (int i = ai; i < n2; i++)
for (int j = 0; j < aj; j++)
{
b[i, j] = a[i + 1, j];
}
//符号位
if ((ai + aj) % 2 != 0)
{
for (int i = 0; i < n2; i++)
b[i, 0] = -b[i, 0];
}
return Mc;
}
///
/// 矩阵的行列式
///
///
///
public static double MatrixDet(Matrix Ma)
{
int m = Ma.getM;
int n = Ma.getN;
if (m != n)
{
Exception myException = new Exception("数组维数不匹配");
throw myException;
}
double[,] a = Ma.Detail;
if (n == 1) return a[0, 0];
double D = 0;
for (int i = 0; i < n; i++)
{
D += a[1, i] * MatrixDet(MatrixSpa(Ma, 1, i));
}
return D;
}
///
/// 矩阵的伴随矩阵
///
///
///
public static Matrix MatrixCom(Matrix Ma)
{
int m = Ma.getM;
int n = Ma.getN;
Matrix Mc = new Matrix(m, n);
double[,] c = Mc.Detail;
double[,] a = Ma.Detail;
for (int i = 0; i < m; i++)
for (int j = 0; j < n; j++)
c[i, j] = MatrixDet(MatrixSpa(Ma, j, i));
return Mc;
}
}
}
using UnityEngine;
using System.Collections.Generic;
using System;
using System.IO;
using Slash.Lastwork;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using DlibFaceLandmarkDetector;
using DlibFaceLandmarkDetectorSample;
using Moments;
namespace Slash.Facemoji
{
///
/// FaceTracking, Use Dlib to detect face landmark and use Live2D model to track faces
///
[AddComponentMenu("Facemoji/FaceTracking")]
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class FaceTracking : MonoBehaviour
{
///矩阵的运算
//F_K 状态转移矩阵
Matrix stateConvert = new Matrix(6, 6, "stateConvert");
//P_K 后验估计误差协方差矩阵,度量估计值的精确程度
Matrix errorCovPost = new Matrix(6, 6, "errorCovPsot");
//H_K 观测矩阵
Matrix measureMatrix = new Matrix(6, 3, "measureMatrix");
//Q_K 过程噪声
Matrix processNoiseConvert = new Matrix(6, 6, "processNoiseConvert");
//R_K 观测噪声
Matrix measureNoiseConvert = new Matrix(3, 3, "measureNoiseConvert");
//X_0 初始状态
Matrix originState = new Matrix(3, 1, "originState");
///
/// All Live2D Texture
///
private enum WhichTexture
{
noir_santa = 1,
uni = 2,
};
///
/// The selected Live2D Texture now.(Default shizuku)
///
private WhichTexture selectedTexture = WhichTexture.uni;
///
/// Start Button
///
public static GameObject startBtn;
///
/// Finish Button
///
public static GameObject finishBtn;
///
/// live2DModel.transform.localScale
///
public float modelScale = 0.9f;
///
/// The web cam texture to mat helper.
///
WebCamTextureToMatHelper webCamTextureToMatHelper;
///
/// The face landmark detector.
///
FaceLandmarkDetector faceLandmarkDetector;
///
/// The live2DModel.
///
public Live2DModel live2DModel;
///
/// The frontal face parameter.
///
FrontalFaceParam frontalFaceParam;
///
/// The shape_predictor_68_face_landmarks_dat_filepath.
///
private string shape_predictor_68_face_landmarks_dat_filepath;
///
/// Model file path.
///
private string moc_filepath;
private string physics_filepath;
private string pose_filepath;
private string[] texture_filepath = new string[6];
///
/// 相关矩阵的计算.
///
//F_K 状态转移矩阵
Matrix F_K = new Matrix(6, 6, "F_K");
//P_K 后验估计误差协方差矩阵,度量估计值的精确程度
Matrix P_K = new Matrix(6, 6, "errorCovPsot");
//P_k+1
Matrix P_front = new Matrix(6, 6, "errorCovPsot");
//H_K 观测矩阵
Matrix H_K = new Matrix(3, 6, "H_K");
//Q_K 过程噪声
Matrix Q_K = new Matrix(6, 6, "Q_K");
//R_K 观测噪声
Matrix R_K = new Matrix(3, 3, "R_K");
//Z_K 测量值
Matrix Z_K = new Matrix(3, 1, "Z_K");
//X_K
Matrix X_K = new Matrix(6, 1, "state");
//X_K+1
Matrix X_front = new Matrix(6, 1, "state");
//eye
Matrix eye = new Matrix(6, 6, "state");
// Use this for initialization
void Start()
{
startBtn = GameObject.Find("StartButton");
finishBtn = GameObject.Find("FinishButton");
startBtn.SetActive(true);
finishBtn.SetActive(false);
webCamTextureToMatHelper = gameObject.GetComponent();
#if UNITY_WEBGL && !UNITY_EDITOR
webCamTextureToMatHelper.flipHorizontal = true;
StartCoroutine(getFilePathCoroutine());
#else
// FaceLandmark model filepath
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath("shape_predictor_68_face_landmarks.dat");
// Load Texture filepath
LoadTexture();
Run();
#endif
}
private void LoadTexture()
{
// Load model filepath
switch (selectedTexture)
{
case WhichTexture.noir_santa:
{
moc_filepath = OpenCVForUnity.Utils.getFilePath("noir_santa/noir_santa.moc.bytes");
physics_filepath = OpenCVForUnity.Utils.getFilePath("noir_santa/noir_santa.physics.json");
pose_filepath = OpenCVForUnity.Utils.getFilePath("noir_santa/noir_santa.pose.json");
for (int i = 0; i < texture_filepath.Length; i++)
{
texture_filepath[i] = OpenCVForUnity.Utils.getFilePath("noir_santa/noir_santa.1024/texture_0" + i + ".png");
}
break;
}
case WhichTexture.uni:
{
moc_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.moc.bytes");
physics_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.physics.json");
pose_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.pose.json");
for (int i = 0; i < texture_filepath.Length; i++)
{
texture_filepath[i] = OpenCVForUnity.Utils.getFilePath("uni/uni.1024/texture_0" + i + ".png");
}
break;
}
default:
{
moc_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.moc.bytes");
physics_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.physics.json");
pose_filepath = OpenCVForUnity.Utils.getFilePath("uni/uni.pose.json");
for (int i = 0; i < texture_filepath.Length; i++)
{
texture_filepath[i] = OpenCVForUnity.Utils.getFilePath("uni/uni.1024/texture_0" + i + ".png");
}
break;
}
}
live2DModel.textureFiles = new Texture2D[texture_filepath.Length];
for (int i = 0; i < texture_filepath.Length; i++)
{
if (string.IsNullOrEmpty(texture_filepath[i]))
continue;
Texture2D tex = new Texture2D(2, 2);
tex.LoadImage(File.ReadAllBytes(texture_filepath[i]));
live2DModel.textureFiles[i] = tex;
}
if (!string.IsNullOrEmpty(moc_filepath))
live2DModel.setMocFileFromBytes(File.ReadAllBytes(moc_filepath));
if (!string.IsNullOrEmpty(physics_filepath))
live2DModel.setPhysicsFileFromBytes(File.ReadAllBytes(physics_filepath));
if (!string.IsNullOrEmpty(pose_filepath))
live2DModel.setPoseFileFromBytes(File.ReadAllBytes(pose_filepath));
}
private void Run ()
{
Debug.Log ("Run");
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
frontalFaceParam = new FrontalFaceParam ();
// Use the front camera to Init
webCamTextureToMatHelper.Init(null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
//// Default initialization
//webCamTextureToMatHelper.Init();
//webCamTextureToMatHelper.Init(null, 320, 240, false);
}
///
/// Raises the web cam texture to mat helper inited event.
///
public void OnWebCamTextureToMatHelperInited ()
{
Debug.Log ("OnWebCamTextureToMatHelperInited");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = gameObject.transform.localScale.x;
float height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
if (live2DModel != null) {
// Set live2DModel localScale
live2DModel.transform.localScale = new Vector3 (Camera.main.orthographicSize * modelScale, Camera.main.orthographicSize * modelScale, 1);
}
}
///
/// Raises the web cam texture to mat helper disposed event.
///
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
}
///
/// Raises the web cam texture to mat helper error occurred event.
///
public void OnWebCamTextureToMatHelperErrorOccurred ()
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred");
}
// Update is called once per frame
void Update ()
{
if (Input.GetKeyDown(KeyCode.Escape))
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene("FacemojiStart");
#else
Application.LoadLevel("FacemojiStart");
#endif
}
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
//获得特征匹配矩形
List detectResult = faceLandmarkDetector.Detect ();
foreach (var rect in detectResult) {
//获得68个采样点
List points = faceLandmarkDetector.DetectLandmark (rect);
if (points.Count > 0) {
// angle
Vector3 angles = frontalFaceParam.getFrontalFaceAngle(points);
//初始状态
double[,] originState = X_K.Detail;
originState[0, 0] = angles.x;
originState[1, 0] = angles.y;
originState[2, 0] = angles.z;
originState[3, 0] = 0;
originState[4, 0] = 0;
originState[5, 0] = 0;
//eye
double[,] unit = eye.Detail;
unit[0, 0] = 1;
unit[0, 1] = 0;
unit[0, 2] = 0;
unit[0, 3] = 0;
unit[0, 4] = 0;
unit[0, 5] = 0;
unit[1, 0] = 0;
unit[1, 1] = 1;
unit[1, 2] = 0;
unit[1, 3] = 0;
unit[1, 4] = 0;
unit[1, 5] = 0;
unit[2, 0] = 0;
unit[2, 1] = 0;
unit[2, 2] = 1;
unit[2, 3] = 0;
unit[2, 4] = 0;
unit[2, 5] = 0;
unit[3, 0] = 0;
unit[3, 1] = 0;
unit[3, 2] = 0;
unit[3, 3] = 1;
unit[3, 4] = 0;
unit[3, 5] = 0;
unit[4, 0] = 0;
unit[4, 1] = 0;
unit[4, 2] = 0;
unit[4, 3] = 0;
unit[4, 4] = 1;
unit[4, 5] = 0;
unit[5, 0] = 0;
unit[5, 1] = 0;
unit[5, 2] = 0;
unit[5, 3] = 0;
unit[5, 4] = 0;
unit[5, 5] = 1;
//F_K(F_k)
double[,] stateC = F_K.Detail;
stateC[0, 0] = 1;
stateC[0, 1] = 0;
stateC[0, 2] = 0;
stateC[0, 3] = 1;
stateC[0, 4] = 0;
stateC[0, 5] = 0;
stateC[1, 0] = 0;
stateC[1, 1] = 1;
stateC[1, 2] = 0;
stateC[1, 3] = 0;
stateC[1, 4] = 1;
stateC[1, 5] = 0;
stateC[2, 0] = 0;
stateC[2, 1] = 0;
stateC[2, 2] = 1;
stateC[2, 3] = 0;
stateC[2, 4] = 0;
stateC[2, 5] = 1;
stateC[3, 0] = 0;
stateC[3, 1] = 0;
stateC[3, 2] = 0;
stateC[3, 3] = 1;
stateC[3, 4] = 0;
stateC[3, 5] = 0;
stateC[4, 0] = 0;
stateC[4, 1] = 0;
stateC[4, 2] = 0;
stateC[4, 3] = 0;
stateC[4, 4] = 1;
stateC[4, 5] = 0;
stateC[5, 0] = 0;
stateC[5, 1] = 0;
stateC[5, 2] = 0;
stateC[5, 3] = 0;
stateC[5, 4] = 0;
stateC[5, 5] = 1;
//P_K(P_k)
double[,] errCovPost = P_K.Detail;
errCovPost[0, 0] = 0.1;
errCovPost[0, 1] = 0;
errCovPost[0, 2] = 0;
errCovPost[0, 3] = 0;
errCovPost[0, 4] = 0;
errCovPost[0, 5] = 0;
errCovPost[1, 0] = 0;
errCovPost[1, 1] = 0.1;
errCovPost[1, 2] = 0;
errCovPost[1, 3] = 0;
errCovPost[1, 4] = 0;
errCovPost[1, 5] = 0;
errCovPost[2, 0] = 0;
errCovPost[2, 1] = 0;
errCovPost[2, 2] = 0.1;
errCovPost[2, 3] = 0;
errCovPost[2, 4] = 0;
errCovPost[2, 5] = 0;
errCovPost[3, 0] = 0;
errCovPost[3, 1] = 0;
errCovPost[3, 2] = 0;
errCovPost[3, 3] = 0.1;
errCovPost[3, 4] = 0;
errCovPost[3, 5] = 0;
errCovPost[4, 0] = 0;
errCovPost[4, 1] = 0;
errCovPost[4, 2] = 0;
errCovPost[4, 3] = 0;
errCovPost[4, 4] = 0.1;
errCovPost[4, 5] = 0;
errCovPost[5, 0] = 0;
errCovPost[5, 1] = 0;
errCovPost[5, 2] = 0;
errCovPost[5, 3] = 0;
errCovPost[5, 4] = 0;
errCovPost[5, 5] = 0.1;
//Debug.Log(MatrixOperator.MatrixPrint(P_K));
//H_K(H_K)
double[,] measureMat = H_K.Detail;
measureMat[0, 0] = 1;
measureMat[0, 1] = 0;
measureMat[0, 2] = 0;
measureMat[0, 3] = 0;
measureMat[0, 4] = 0;
measureMat[0, 5] = 0;
measureMat[1, 0] = 0;
measureMat[1, 1] = 1;
measureMat[1, 2] = 0;
measureMat[1, 3] = 0;
measureMat[1, 4] = 0;
measureMat[1, 5] = 0;
measureMat[2, 0] = 0;
measureMat[2, 1] = 0;
measureMat[2, 2] = 1;
measureMat[2, 3] = 0;
measureMat[2, 4] = 0;
measureMat[2, 5] = 0;
//Q_K(Q_K)
double[,] processNoiseCov = Q_K.Detail;
processNoiseCov[0, 0] = 1;
processNoiseCov[0, 1] = 0;
processNoiseCov[0, 2] = 0;
processNoiseCov[0, 3] = 0;
processNoiseCov[0, 4] = 0;
processNoiseCov[0, 5] = 0;
processNoiseCov[1, 0] = 0;
processNoiseCov[1, 1] = 1;
processNoiseCov[1, 2] = 0;
processNoiseCov[1, 3] = 0;
processNoiseCov[1, 4] = 0;
processNoiseCov[1, 5] = 0;
processNoiseCov[2, 0] = 0;
processNoiseCov[2, 1] = 0;
processNoiseCov[2, 2] = 1;
processNoiseCov[2, 3] = 0;
processNoiseCov[2, 4] = 0;
processNoiseCov[2, 5] = 0;
processNoiseCov[3, 0] = 0;
processNoiseCov[3, 1] = 0;
processNoiseCov[3, 2] = 0;
processNoiseCov[3, 3] = 1;
processNoiseCov[3, 4] = 0;
processNoiseCov[3, 5] = 0;
processNoiseCov[4, 0] = 0;
processNoiseCov[4, 1] = 0;
processNoiseCov[4, 2] = 0;
processNoiseCov[4, 3] = 0;
processNoiseCov[4, 4] = 1;
processNoiseCov[4, 5] = 0;
processNoiseCov[5, 0] = 0;
processNoiseCov[5, 1] = 0;
processNoiseCov[5, 2] = 0;
processNoiseCov[5, 3] = 0;
processNoiseCov[5, 4] = 0;
processNoiseCov[5, 5] = 1;
//R_K(R_K)
double[,] measureNoiseCov = R_K.Detail;
measureNoiseCov[0, 0] = 0.03;
measureNoiseCov[0, 1] = 0;
measureNoiseCov[0, 2] = 0;
measureNoiseCov[1, 0] = 0;
measureNoiseCov[1, 1] = 0.03;
measureNoiseCov[1, 2] = 0;
measureNoiseCov[2, 0] = 0;
measureNoiseCov[2, 1] = 0;
measureNoiseCov[2, 2] = 0.03;
double[] Arr = new double[] { angles.x, angles.y, angles.z};
//X_0 初始状态
double[,] measure = Z_K.Detail;
measure[0, 0] = Arr[0];
measure[1, 0] = Arr[1];
measure[2, 0] = Arr[2];
//首先计算推测值
Matrix X_front = MatrixOperator.MatrixMulti(F_K, X_K);
//然后计算推测值与真实值之间的误差协方差矩阵
Matrix P_front = MatrixOperator.MatrixMulti(MatrixOperator.MatrixMulti(MatrixOperator.MatrixMulti(F_K, P_K), MatrixOperator.MatrixTrans(F_K)), Q_K);
//再计算卡尔曼增益
Matrix Kg = MatrixOperator.MatrixMulti(MatrixOperator.MatrixMulti(P_front, MatrixOperator.MatrixTrans(H_K)), MatrixOperator.MatrixInv(MatrixOperator.MatrixAdd(MatrixOperator.MatrixMulti(MatrixOperator.MatrixMulti(H_K, P_front), MatrixOperator.MatrixTrans(H_K)), R_K)));
//再得到估计值
Matrix X_front_ = MatrixOperator.MatrixAdd(X_front, MatrixOperator.MatrixMulti(Kg, MatrixOperator.MatrixSub(Z_K, MatrixOperator.MatrixMulti(H_K, X_front))));
//最后计算估计值与真实值之间的误差协方差矩阵,为下次递推做准备
Matrix P_front_ = MatrixOperator.MatrixMulti(MatrixOperator.MatrixSub(eye, MatrixOperator.MatrixMulti(Kg, H_K)), P_front);
X_K = X_front_;
P_K = P_front_;
double[,] c = X_K.Detail;
double aa = c[0, 0];
double bb = c[1,0];
double cc = c[2, 0];
angles.x = (int)aa;
angles.y = (int)bb;
angles.z = (int)cc;
float rotateX = (angles.x > 180) ? angles.x - 360 : angles.x;
float rotateY = (angles.y > 180) ? angles.y - 360 : angles.y;
float rotateZ = (angles.z > 180) ? angles.z - 360 : angles.z;
// Coordinate transformation
//live2DModel.PARAM_ANGLE.Set(-rotateY, rotateX, -rotateZ);
live2DModel.PARAM_ANGLE.Set(rotateX, rotateY, rotateZ);
// Make sure your line of sight is always facing the camera
// eye_ball_X
live2DModel.PARAM_EYE_BALL_X = rotateX / 60f;
// eye_ball_Y
live2DModel.PARAM_EYE_BALL_Y = rotateY / 60f - 0.25f;
live2DModelUpdate (points);
break;
}
}
}
}
private void live2DModelUpdate (List points)
{
if (live2DModel != null) {
//live2DModel.PARAM_ANGLE.Set(-Roundf(rotateY, 0.5f), Roundf(rotateX, 0.5f), -Roundf(rotateZ, 0.5f));
// brow_L_Y
float brow_L_Y = getRaitoOfBROW_L_Y(points);
// Keep three decimal places to reduce the vibration
live2DModel.PARAM_BROW_L_Y = Roundf(brow_L_Y, 1000f);
//live2DModel.PARAM_BROW_L_Y = (float)Math.Round(brow_L_Y, 2);
// brow_R_Y
float brow_R_Y = getRaitoOfBROW_R_Y(points);
// Keep three decimal places to reduce the vibration
live2DModel.PARAM_BROW_R_Y = Roundf(brow_R_Y, 1000f);
//live2DModel.PARAM_BROW_R_Y = (float)Math.Round(brow_R_Y, 2);
// eye_open_L
float eyeOpen_L = getRaitoOfEyeOpen_L (points);
if (eyeOpen_L > 0.6f && eyeOpen_L < 1.1f)
eyeOpen_L = 1;
else if (eyeOpen_L >= 1.1f && brow_L_Y >= 0 )
eyeOpen_L = 2;
else if (eyeOpen_L <= 0.6f)
eyeOpen_L = 0;
live2DModel.PARAM_EYE_L_OPEN = eyeOpen_L;
// eye_open_R
float eyeOpen_R = getRaitoOfEyeOpen_R (points);
if (eyeOpen_R > 0.6f && eyeOpen_R < 1.1f)
eyeOpen_R = 1;
else if (eyeOpen_R >= 1.1f && brow_R_Y >= 0)
eyeOpen_R = 2;
else if (eyeOpen_R < 0.6f)
eyeOpen_R = 0;
live2DModel.PARAM_EYE_R_OPEN = eyeOpen_R;
// mouth_open
float mouthOpen = getRaitoOfMouthOpen_Y (points);
if (mouthOpen < 0.6f)
mouthOpen = 0;
live2DModel.PARAM_MOUTH_OPEN_Y = mouthOpen;
// mouth_size
float mouthSize = getRaitoOfMouthSize (points);
live2DModel.PARAM_MOUTH_SIZE = mouthSize;
}
}
// Keep decimal places to reduce the vibration
private float Roundf(float f, float multiple)
{
if (multiple == 0)
return f;
int i = (int)(f * multiple);
return i / multiple;
}
// Calculate the degree of eye opening
private float getRaitoOfEyeOpen_L (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
return Mathf.Clamp (Mathf.Abs (points [43].y - points [47].y) / (Mathf.Abs (points [43].x - points [44].x) * 0.75f), -0.1f, 2.0f);
}
private float getRaitoOfEyeOpen_R (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
return Mathf.Clamp (Mathf.Abs (points [38].y - points [40].y) / (Mathf.Abs (points [37].x - points [38].x) * 0.75f), -0.1f, 2.0f);
}
// Eyebrows move up and down
private float getRaitoOfBROW_L_Y (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
//float y = Mathf.Ceil(Mathf.Abs(points[24].y - points[27].y)) / Mathf.Abs (points [27].y - points [29].y);
float y = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
y -= 1;
y *= 4f;
return Mathf.Clamp (y, -1.0f, 1.0f);
}
private float getRaitoOfBROW_R_Y (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
//float y = Mathf.Ceil(Mathf.Abs(points[19].y - points[27].y)) / Mathf.Abs(points[27].y - points[29].y);
float y = Mathf.Abs(Mathf.Sqrt(Mathf.Pow(points[19].y - points[27].y,2)+Mathf.Pow(points[19].x - points[27].x,2))) / Mathf.Abs(Mathf.Sqrt(Mathf.Pow(points[27].y - points[29].y,2)+Mathf.Pow(points[27].x - points[29].x,2))) ;
Debug.Log("y是:"+y);
y -= 1;
y *= 4f;
return Mathf.Clamp (y, -1.0f, 1.0f);
}
// Calculate the degree of mouth opening
private float getRaitoOfMouthOpen_Y (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
return (float)((points[57].y - points[51].y) / (points[64].x - points[60].x) - 0.2);
//return Mathf.Clamp01 (Mathf.Abs (points [62].y - points [66].y) / (Mathf.Abs (points [51].y - points [62].y) + Mathf.Abs (points [66].y - points [57].y)));
}
// Calculate the width of the mouth
private float getRaitoOfMouthSize (List points)
{
if (points.Count != 68)
throw new ArgumentNullException ("Invalid landmark_points.");
float size = Mathf.Abs (points [48].x - points [54].x) / (Mathf.Abs (points [31].x - points [35].x) * 1.8f);
size -= 1;
size *= 4f;
return Mathf.Clamp (size, -1.0f, 1.0f);
}
///
/// Raises the disable event.
///
void OnDisable ()
{
if(webCamTextureToMatHelper != null) webCamTextureToMatHelper.Dispose ();
if(faceLandmarkDetector != null) faceLandmarkDetector.Dispose ();
if(frontalFaceParam != null) frontalFaceParam.Dispose ();
}
public void OnBackButton()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene("FacemojiStart");
#else
Application.LoadLevel("FacemojiStart");
#endif
}
///
/// Raises the change camera button event.
///
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
}
public void OnStartButton()
{
startBtn.SetActive(false);
if (!Record.isRecording)
{
//Start recording
Record.isRecording = true;
finishBtn.SetActive(true);
}
}
public void OnFinishButton()
{
finishBtn.SetActive(false);
if (Record.isRecording)
{
//Finish recording
Record.isRecording = false;
startBtn.SetActive(true);
}
}
public void OnShizukuButton()
{
if(selectedTexture != WhichTexture.noir_santa)
{
selectedTexture = WhichTexture.noir_santa;
LoadTexture();
}
}
public void OnHaruButton()
{
if (selectedTexture != WhichTexture.uni)
{
selectedTexture = WhichTexture.uni;
LoadTexture();
}
}
}
}