Skip to content

Commit 2178b9c

Browse files
add log version too
1 parent f3b0eac commit 2178b9c

File tree

1 file changed

+42
-2
lines changed

1 file changed

+42
-2
lines changed

hmm_class/hmmc_scaled_concat_diag.py

+42-2
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,42 @@ def log_likelihood(self, x):
195195
alpha[t] = alpha_t_prime / scale[t]
196196
return np.log(scale).sum()
197197

198+
def get_state_sequence(self, x):
199+
# returns the most likely state sequence given observed sequence x
200+
# using the Viterbi algorithm
201+
T = len(x)
202+
203+
# make the emission matrix B
204+
logB = np.zeros((self.M, T))
205+
for j in range(self.M):
206+
for t in range(T):
207+
for k in range(self.K):
208+
p = np.log(self.R[j,k]) + mvn.logpdf(x[t], self.mu[j,k], self.sigma[j,k])
209+
logB[j,t] += p
210+
print("logB:", logB)
211+
212+
# perform Viterbi as usual
213+
delta = np.zeros((T, self.M))
214+
psi = np.zeros((T, self.M))
215+
216+
# smooth pi in case it is 0
217+
pi = self.pi + 1e-10
218+
pi /= pi.sum()
219+
220+
delta[0] = np.log(pi) + logB[:,0]
221+
for t in range(1, T):
222+
for j in range(self.M):
223+
next_delta = delta[t-1] + np.log(self.A[:,j])
224+
delta[t,j] = np.max(next_delta) + logB[j,t]
225+
psi[t,j] = np.argmax(next_delta)
226+
227+
# backtrack
228+
states = np.zeros(T, dtype=np.int32)
229+
states[T-1] = np.argmax(delta[T-1])
230+
for t in range(T-2, -1, -1):
231+
states[t] = psi[t+1, states[t+1]]
232+
return states
233+
198234
def log_likelihood_multi(self, X):
199235
return np.array([self.log_likelihood(x) for x in X])
200236

@@ -247,7 +283,11 @@ def fake_signal(init=big_init):
247283
L = hmm.log_likelihood_multi(signals).sum()
248284
print("LL for actual params:", L)
249285

286+
# print most likely state sequence
287+
print("Most likely state sequence for initial observation:")
288+
print(hmm.get_state_sequence(signals[0]))
289+
250290
if __name__ == '__main__':
251-
real_signal()
252-
# fake_signal()
291+
# real_signal()
292+
fake_signal()
253293

0 commit comments

Comments
 (0)