隐马尔科夫--hmmlearn包(python)

# coding=utf-8
import numpy as np
from hmmlearn import hmm

states = ["box 1", "box 2", "box3"]
n_states = len(states)
observations = ["red", "white"]
n_observations = len(observations)
start_probability = np.array([0.2, 0.4, 0.4])
transition_probability = np.array([
    [0.5, 0.2, 0.3],
    [0.3, 0.5, 0.2],
    [0.2, 0.3, 0.5]
])
emission_probability = np.array([
    [0.5, 0.5],
    [0.4, 0.6],
    [0.7, 0.3]
])
model = hmm.MultinomialHMM(n_components=n_states)
model.startprob_ = start_probability
model.transmat_ = transition_probability
model.emissionprob_ = emission_probability
seen_list = [0, 1, 0, 0, 0, 0]
seen = np.array([seen_list]).T

# 使用前向算法计算观测序列的概率
box2 = model.predict(seen)
print "The ball picked:", ", ".join(map(lambda x: observations[x], seen_list))
print "The hidden box", ", ".join(map(lambda x: states[x], box2))
print model.score(seen)

# 使用维比特算法预测观测序列最有可能的对应的状态序列
print '-' * 100
logprob, box = model.decode(seen, algorithm="viterbi")
print "The ball picked:", ", ".join(map(lambda x: observations[x], seen_list))
print "The hidden box", ", ".join(map(lambda x: states[x], box))

# 求解模型参数的问题。由于鲍姆-韦尔奇算法是基于EM算法的近似算法,所以我们需要多跑几次,选择一个比较优的模型参数
print '-' * 100
model2 = hmm.MultinomialHMM(n_components=n_states, n_iter=20, tol=0.01)
X2 = np.array([[0, 1, 0, 1], [0, 0, 0, 1], [1, 0, 1, 1]])
model2.fit(X2)
print model2.startprob_
print model2.transmat_
print model2.emissionprob_
print model2.score(X2)
model2.fit(X2)
print model2.startprob_
print model2.transmat_
print model2.emissionprob_
print model2.score(X2)
model2.fit(X2)
print model2.startprob_
print model2.transmat_
print model2.emissionprob_
print model2.score(X2)

你可能感兴趣的:(机器学习算法,HMM,python库)