|
ch6_slides系引用
Hidden Markov Models for Time Series An Introduction Using R
这本书的教材.
大家有兴趣的话,可上作者网站
下载R code,data,及各章节slides
package "HMM" 采用
observation,transition probabilities,and emission probabilities
方法和 matlab相同.
然matlab有function hmmtrain()
[guessTR,guessE,logliks] = hmmtrain(seqs,guessTR,guessE,varargin)
可以算出 log likelihood
如果楼主有用matlab的话,可以试试
在R的话,需要用到
observation,bw$hmm$transProbs,bw$hmm$emissionProbs加以编程
######in R
library(HMM)
# Initial HMM
hmm = initHMM(c("A","B"),c("L","R"),
transProbs=matrix(c(.9,.1,.1,.9),2),
emissionProbs=matrix(c(.5,.51,.5,.49),2))
print(hmm)
# Sequence of observation
a = sample(c(rep("L",100),rep("R",300)))
b = sample(c(rep("L",300),rep("R",100)))
observation = c(a,b)
# Baum-Welch
bw = baumWelch(hmm,observation,10)
print(bw$hmm)
#$transProbs
# to
#from A B
# A 9.974870e-01 0.002513041
# B 5.296986e-06 0.999994703
#$emissionProbs
# symbols
#states L R
# A 0.2485981 0.7514019
# B 0.7497916 0.2502084
%%%%%%%in Matlab
tr=[0.9 0.1; 0.1 0.9]
e=[0.50 0.50; 0.51 0.49]
load('123_HMM.txt')
x=X123_HMM';
[guessTR,guessE,logliks] = hmmtrain(x,tr,e)
guessTR =
0.9975 0.0025
0.0000 1
guessE =
0.2486 0.7514
0.7498 0.2502
logliks=-554.4527 -553.5753 -546.9715 -518.1480 -484.5900
-471.5483 -463.1453 -457.1819 -454.6034 -454.2221
-454.1987 -454.1975 -454.1974
|