HMM前向算法,维比特算法,后向算法,前向后向算法代码

typedef struct
{
int N; /* 隐藏状态数目;Q={1,2,…,N} */
int M; /* 观察符号数目; V={1,2,…,M}*/
double **A; /* 状态转移矩阵A[1..N][1..N]. a[i][j] 是从t时刻状态i到t+1时刻状态j的转移概率 */
double **B; /* 混淆矩阵B[1..N][1..M]. b[j][k]在状态j时观察到符合k的概率。*/
double *pi; /* 初始向量pi[1..N],pi[i] 是初始状态概率分布 */
} HMM;

前向算法程序示例如下:
/*
 函数参数说明:
 *phmm:已知的HMM模型;T:观察符号序列长度;
 *O:观察序列;**alpha:局部概率;*pprob:最终的观察概率
*/
void Forward(HMM *phmm, int T, int *O, double **alpha, double *pprob)
{
  int i, j;   /* 状态索引 */
  int t;    /* 时间索引 */
  double sum; /*求局部概率时的中间值 */
  /* 1. 初始化:计算t=1时刻所有状态的局部概率: */
  for (i = 1; i <= phmm->N; i++)
    alpha[1][i] = phmm->pi[i]* phmm->B[i][O[1]];
  
  /* 2. 归纳:递归计算每个时间点,t=2,… ,T时的局部概率 */
  for (t = 1; t < T; t++)
  {
    for (j = 1; j <= phmm->N; j++)
    {
      sum = 0.0;
      for (i = 1; i <= phmm->N; i++)
        sum += alpha[t][i]* (phmm->A[i][j]);
      alpha[t+1][j] = sum*(phmm->B[j][O[t+1]]);
    }
  }
  /* 3. 终止:观察序列的概率等于T时刻所有局部概率之和*/
  *pprob = 0.0;
  for (i = 1; i <= phmm->N; i++)
    *pprob += alpha[T][i];
}



维比特算法:
void Viterbi(HMM *phmm, int T, int *O, double **delta, int **psi,int *q, double *pprob)
{
  int i, j; /* state indices */
  int t; /* time index */
  int maxvalind;
  double maxval, val;
  /* 1. Initialization */
  for (i = 1; i <= phmm->N; i++)
  {
    delta[1][i] = phmm->pi[i] * (phmm->B[i][O[1]]);
    psi[1][i] = 0;
  }
  /* 2. Recursion */
  for (t = 2; t <= T; t++)
  {
    for (j = 1; j <= phmm->N; j++)
    {
      maxval = 0.0;
      maxvalind = 1;
      for (i = 1; i <= phmm->N; i++)
      {
        val = delta[t-1][i]*(phmm->A[i][j]);
        if (val > maxval)
        {
          maxval = val;
          maxvalind = i;
        }
      }
      delta[t][j] = maxval*(phmm->B[j][O[t]]);
      psi[t][j] = maxvalind;
    }
  }
  /* 3. Termination */
  *pprob = 0.0;
  q[T] = 1;
  for (i = 1; i <= phmm->N; i++)
  {
    if (delta[T][i] > *pprob)
    {
      *pprob = delta[T][i];
      q[T] = i;
    }
  }
  /* 4. Path (state sequence) backtracking */
  for (t = T – 1; t >= 1; t–)
    q[t] = psi[t+1][q[t+1]];
}


后向算法:
void Backward(HMM *phmm, int T, int *O, double **beta, double *pprob)
{
  int i, j; /* state indices */
  int t; /* time index */
  double sum;
  /* 1. Initialization */
  for (i = 1; i <= phmm->N; i++)
    beta[T][i] = 1.0;
  /* 2. Induction */
  for (t = T - 1; t >= 1; t--)
  {
    for (i = 1; i <= phmm->N; i++)
    {
      sum = 0.0;
      for (j = 1; j <= phmm->N; j++)
        sum += phmm->A[i][j] *
              (phmm->B[j][O[t+1]])*beta[t+1][j];
      beta[t][i] = sum;
    }
  }
  /* 3. Termination */
  *pprob = 0.0;
  for (i = 1; i <= phmm->N; i++)
    *pprob += beta[1][i];
}


前向后向算法:
void BaumWelch(HMM *phmm, int T, int *O, double **alpha, double **beta, double **gamma, int *pniter, double *plogprobinit, double *plogprobfinal)
{
  int i, j, k;
  int t, l = 0;
  double logprobf, logprobb, threshold;
  double numeratorA, denominatorA;
  double numeratorB, denominatorB;
  double ***xi, *scale;
  double delta, deltaprev, logprobprev;
  deltaprev = 10e-70;
  xi = AllocXi(T, phmm->N);
  scale = dvector(1, T);
  ForwardWithScale(phmm, T, O, alpha, scale, &logprobf);
  *plogprobinit = logprobf; /* log P(O |intial model) */
  BackwardWithScale(phmm, T, O, beta, scale, &logprobb);
  ComputeGamma(phmm, T, alpha, beta, gamma);
  ComputeXi(phmm, T, O, alpha, beta, xi);
  logprobprev = logprobf;
  do
  {
    /* reestimate frequency of state i in time t=1 */
    for (i = 1; i <= phmm->N; i++)
      phmm->pi[i] = .001 + .999*gamma[1][i];
    /* reestimate transition matrix and symbol prob in
        each state */
    for (i = 1; i <= phmm->N; i++)
    {
      denominatorA = 0.0;
      for (t = 1; t <= T - 1; t++)
        denominatorA += gamma[t][i];
      for (j = 1; j <= phmm->N; j++)
      {
        numeratorA = 0.0;
        for (t = 1; t <= T - 1; t++)
          numeratorA += xi[t][i][j];
        phmm->A[i][j] = .001 +
                 .999*numeratorA/denominatorA;
      }
      denominatorB = denominatorA + gamma[T][i];
      for (k = 1; k <= phmm->M; k++)
      {
        numeratorB = 0.0;
        for (t = 1; t <= T; t++)
        {
          if (O[t] == k)
            numeratorB += gamma[t][i];
        }
        phmm->B[i][k] = .001 +
                 .999*numeratorB/denominatorB;
      }
    }
    ForwardWithScale(phmm, T, O, alpha, scale, &logprobf);
    BackwardWithScale(phmm, T, O, beta, scale, &logprobb);
    ComputeGamma(phmm, T, alpha, beta, gamma);
    ComputeXi(phmm, T, O, alpha, beta, xi);
    /* compute difference between log probability of
      two iterations */
    delta = logprobf - logprobprev;
    logprobprev = logprobf;
    l++;
  }
  while (delta > DELTA); /* if log probability does not
              change much, exit */
  *pniter = l;
  *plogprobfinal = logprobf; /* log P(O|estimated model) */
  FreeXi(xi, T, phmm->N);
  free_dvector(scale, 1, T);
}

你可能感兴趣的:(机器学习,算法,机器学习,预测,代码)