├── DHMM_Testing.py ├── Exam_4_25_2020.csv ├── HMM_Testing.py ├── Hidden_Markov_Model ├── .ipynb_checkpoints │ └── Hidden Markov Model Testing-checkpoint.ipynb ├── DHMM.py ├── HMM.py ├── __init__.py └── __pycache__ │ ├── DHMM.cpython-38.pyc │ ├── HMM.cpython-38.pyc │ ├── Preprocessing.cpython-38.pyc │ └── __init__.cpython-38.pyc ├── License.txt └── README.md /DHMM_Testing.py: -------------------------------------------------------------------------------- 1 | from Hidden_Markov_Model import * 2 | from Hidden_Markov_Model.DHMM import * 3 | 4 | import time 5 | Start=time.time() 6 | Train_ratio=0.8 7 | Max_state=3 8 | Iter=1000 9 | Feat=1 10 | N=2000 11 | T=50 12 | flag=0 13 | N_symb=3 14 | Path= 'Path to CSV file' 15 | Data=pd.read_csv(Path) 16 | Data=Data.astype(int) 17 | First_DHMM=Supervised_DHMM(Train_ratio,Max_state,Iter,Feat,N,T,Data,N_symb) 18 | First_DHMM.Best_States() 19 | END=time.time() 20 | print('Total Time Takes in seconds',END-Start) -------------------------------------------------------------------------------- /HMM_Testing.py: -------------------------------------------------------------------------------- 1 | # I will first implement this in a form of Jupyter Notebook. 2 | from Hidden_Markov_Model import * 3 | from Hidden_Markov_Model.HMM import * 4 | import time 5 | 6 | import time 7 | Start=time.time() 8 | Train_ratio=0.1 9 | Cov_Type='diag' 10 | Max_state=3 11 | Max_mixture=4 12 | Iter=1000 13 | Feat=1 14 | N=2000 15 | T=50 16 | flag=1 17 | Path='Path to CSV file' 18 | Data=pd.read_csv(Path) 19 | Exam_HMM=Supervised_HMM(Train_ratio,Cov_Type,Max_state,Max_mixture,Iter,Feat,N,T,Data,flag) 20 | Exam_HMM.Best_States() 21 | END=time.time() 22 | print('Total Time Takes in seconds',END-Start) -------------------------------------------------------------------------------- /Hidden_Markov_Model/.ipynb_checkpoints/Hidden Markov Model Testing-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 4 6 | } 7 | -------------------------------------------------------------------------------- /Hidden_Markov_Model/DHMM.py: -------------------------------------------------------------------------------- 1 | 2 | from Hidden_Markov_Model import * 3 | 4 | class DHMM_Model: 5 | def __init__(self,Train_ratio,Max_state,Iter,Feat,N,T,Data,N_symb): 6 | self.Train_ratio=Train_ratio 7 | self.Test_ratio=1-Train_ratio 8 | self.Max_state=Max_state 9 | self.Iter=Iter 10 | self.Model=[] 11 | self.Feat=Feat 12 | self.N=N 13 | self.BEST=[] 14 | self.T=T 15 | self.Data=Data 16 | self.N_symb=N_symb 17 | 18 | def __repr__(self): 19 | return f'''The Model has the following configuration: 20 | Model = {self.Model} 21 | Number of Feature = {self.Feat} 22 | Training Ratio = {self.Train_ratio} 23 | Number of Cases = {self.N} 24 | Number of Iterations = {self.Iter} 25 | Maximum Number of Hidden States = {self.Max_state} 26 | Number of Free Parameters = {self.num_params} 27 | Best Parameters are = {self.BEST} 28 | Length of Each Time Series is = {self.T} 29 | ''' 30 | 31 | def Dstate_sorting(self): 32 | last_col=self.Model.emissionprob_[:,-1] # Choose the last column 33 | last_col=last_col.ravel().tolist() 34 | last_col_sorted=last_col[:] # Another copy of the list to compare the sorted and normal list 35 | last_col_sorted.sort(reverse=True) #since the higher probability the better(Remember the last column corresponds to the best observations.) 36 | result = [last_col.index(ii) for ii in last_col_sorted] 37 | return result 38 | 39 | def AIC_BIC(self): 40 | AIC=[] 41 | BIC=[] 42 | Record2=[] 43 | self.Component2=[] 44 | Record_aic=np.zeros((1,2)) # The state and the score. 45 | Record_bic=np.zeros((1,2)) 46 | for ii in range(2,self.Max_state+1): 47 | print(f'Iteration Corresponding to State {ii}') 48 | 49 | self.num_params = ii*(ii-1)+ ii*(self.N_symb-1)+(ii-1) # these parameters can be automatically selected 50 | # based on transmat_ and emission_prob_. In this case the self.num_params will be used after Model. 51 | 52 | Model=MultinomialHMM(n_components=ii,tol=pow(10,-5)).fit(self.Train_Data,self.Len) 53 | AIC.append(-2 * Model.score(self.Train_Data) + 2 * self.num_params) 54 | BIC.append(-2 * Model.score(self.Train_Data) + self.num_params * np.log(self.Train_Data.shape[0])) 55 | 56 | Temp1=np.argmin(AIC) 57 | opt_state=Temp1+2 # This is because 1 state is not possible and if np.argmin is equal to 0 this means we have 2 states. 58 | Record_aic[0,:]=np.array([opt_state,min(AIC)]) 59 | Temp2=np.argmin(BIC) 60 | opt_state=Temp2+2 # This is because 1 state is not possible 61 | Record_bic[0,:]=np.array([opt_state,min(BIC)]) 62 | Record2.append(Record_aic) 63 | Record2.append(Record_bic) 64 | 65 | self.Component2.append(Record2) 66 | Record2=[] 67 | 68 | def Best_BIC(self): 69 | Hold_bic=[] 70 | Hold_state=[] 71 | Hold_bic.append(self.Component2[0][1][0][1])# 72 | Hold_state.append(self.Component2[0][1][0][0]) 73 | 74 | self.BEST.append(Hold_state[Hold_bic.index(min(Hold_bic))]) # Corresponds to the best state. 75 | 76 | def Viterbi_list(self): # Test_data is list of numpy arrays. 77 | self.traj=[] 78 | 79 | mapping=self.Dstate_sorting() # what this one does is the order of elements in descending order 80 | 81 | Temp=mapping[:] 82 | for ii in range(self.Test_data.shape[0]): 83 | count10=1000 84 | seq1=self.Test_data[ii,:].reshape((-1,self.Feat)) 85 | States_Viterbi=self.Model.predict(seq1) 86 | L=len(mapping) # This is number of states 87 | # we want the highest mastery level is mapped to the highest number. 88 | for kk in range(L): 89 | for jj in range(len(States_Viterbi)): 90 | if (States_Viterbi[jj]==mapping[0]): 91 | States_Viterbi[jj]=count10 92 | 93 | del mapping[0] 94 | count10=count10-1 95 | mapping=Temp[:] 96 | 97 | self.traj.append(States_Viterbi) 98 | count10=1000 99 | for mm in range(len(self.traj)): 100 | ML_old=list(range(count10,count10-L,-1)) 101 | for zz in range(L): 102 | for vv in range(len(self.traj[mm])): 103 | if(self.traj[mm][vv]==ML_old[0]): 104 | self.traj[mm][vv]=ML_old[0]-(count10-L) 105 | del ML_old[0] 106 | 107 | 108 | 109 | # return self.traj 110 | 111 | def Best_States(self): 112 | self.Data_train=(self.Data.iloc[0:int(self.N*self.Train_ratio),:]) 113 | self.Test_data=(np.array(self.Data.iloc[int(self.N*self.Train_ratio):self.N,:])) 114 | 115 | self.Len=[self.T for ii in range(0,self.Data_train.shape[0])] # Lengths must be list 116 | self.Train_Data = np.array(self.Data_train).reshape((-1,1)) # Convert to numpy array with one column 117 | self.AIC_BIC() # Return list of mixture components, states and BIC and AIC values 118 | self.Best_BIC() # Find the best state and mixture component 119 | self.Model=MultinomialHMM(n_components=int(self.BEST[0]),tol=pow(10,-5)).fit(self.Train_Data,self.Len) 120 | self.score=self.Model.score(self.Train_Data,self.Len) 121 | self.Viterbi_list() 122 | return self.traj 123 | 124 | class Supervised_DHMM(DHMM_Model): 125 | def AIC_BIC(self): 126 | AIC=[] 127 | BIC=[] 128 | Record2=[] 129 | self.Component2=[] 130 | Record_aic=np.zeros((1,2)) 131 | Record_bic=np.zeros((1,2)) 132 | print(f'The Number of States is {self.Max_state}') 133 | 134 | self.num_params = self.Max_state*(self.Max_state-1)+ self.Max_state*(self.N_symb-1)+(self.Max_state-1) # these parameters can be automatically selected 135 | Model=MultinomialHMM(n_components=self.Max_state,tol=pow(10,-5)).fit(self.Train_Data,self.Len) 136 | AIC.append(-2 * Model.score(self.Train_Data) + 2 * self.num_params) 137 | BIC.append(-2 * Model.score(self.Train_Data) + self.num_params * np.log(self.Train_Data.shape[0])) 138 | 139 | Temp1=np.argmin(AIC) 140 | opt_state=self.Max_state 141 | 142 | Record_aic[0,:]=np.array([opt_state,min(AIC)]) 143 | 144 | Temp2=np.argmin(BIC) 145 | opt_state=self.Max_state # This is because 1 state is not possible 146 | Record_bic[0,:]=np.array([opt_state,min(BIC)]) 147 | Record2.append(Record_aic) 148 | Record2.append(Record_bic) 149 | 150 | 151 | self.Component2.append(Record2) 152 | Record2=[] 153 | 154 | def __repr__(self): 155 | return f'''The Model has the following configuration: 156 | Model = {self.Model} 157 | Number of Feature = {self.Feat} 158 | Training Ratio = {self.Train_ratio} 159 | Number of Cases = {self.N} 160 | Number of Iterations = {self.Iter} 161 | Number of Hidden States = {self.Max_state} 162 | Length of Each Time Series is = {self.T} 163 | ''' 164 | 165 | 166 | -------------------------------------------------------------------------------- /Hidden_Markov_Model/HMM.py: -------------------------------------------------------------------------------- 1 | from Hidden_Markov_Model import * 2 | 3 | class HMM_Model(): 4 | def __init__(self,Train_ratio,Cov_Type,Max_state,Max_mixture,Iter,Feat,N,T,Data,flag): 5 | self.Train_ratio=Train_ratio 6 | self.Test_ratio=1-Train_ratio 7 | self.Cov_Type=Cov_Type 8 | self.Max_state=Max_state 9 | self.Max_mixture=Max_mixture 10 | self.Iter=Iter 11 | self.Model=[] 12 | self.Feat=Feat 13 | self.N=N 14 | self.BEST=[] 15 | self.T=T 16 | self.Data=Data 17 | self.flag=flag # Whether good state is sorted normally or in reverse 18 | # @staticmethod 19 | def State_sorting(self): 20 | list_mean=np.mean(self.Model.means_,axis=1) 21 | list_mean=list_mean.ravel().tolist() 22 | list_mean_sorted=list_mean[:] 23 | if(self.flag==1): 24 | list_mean_sorted.sort(reverse=True) # Sort the list 25 | elif(self.flag==0): 26 | list_mean_sorted.sort() # Sort the list 27 | result = [list_mean.index(i) for i in list_mean_sorted] 28 | return result 29 | 30 | def __repr__(self): 31 | return f'''The Model has the following configuration: 32 | Model = {self.Model} 33 | Number of Feature = {self.Feat} 34 | Train to Test Ratio = {self.Train_ratio} 35 | Number of Cases = {self.N} 36 | Covariance Type = {self.Cov_Type} 37 | Number of Iterations = {self.Iter} 38 | Maximum Number of Hidden States = {self.Max_state} 39 | Maximum Number of Mixture Components = {self.Max_mixture} 40 | Number of Free Parameters = {self.num_params} 41 | Best Parameters are = {self.BEST} 42 | Length of Each Time Series is = {self.T} 43 | ''' 44 | 45 | def Viterbi_list(self): # Test_data is list of numpy arrays. 46 | self.traj=[] 47 | mapping=self.State_sorting() # what this one does is the order of elements in descending order 48 | 49 | Temp=mapping[:] 50 | for ii in range(self.Test_data.shape[0]): 51 | count10=1000 52 | seq1=self.Test_data[ii,:].reshape((-1,self.Feat)) 53 | States_Viterbi=self.Model.predict(seq1) 54 | L=len(mapping) # This is number of states 55 | 56 | for kk in range(L): 57 | for jj in range(len(States_Viterbi)): 58 | # for kk in range(L): 59 | if (States_Viterbi[jj]==mapping[0]): 60 | States_Viterbi[jj]=count10 61 | 62 | del mapping[0] 63 | count10=count10-1 64 | mapping=Temp[:] 65 | 66 | self.traj.append(States_Viterbi) 67 | count10=1000 68 | # ML_old=list(range(count10,count10-L,-1)) # from 1000 to 996 for example with 5 states 69 | for mm in range(len(self.traj)): 70 | # import pdb 71 | # pdb.set_trace() 72 | ML_old=list(range(count10,count10-L,-1)) 73 | for zz in range(L): 74 | for vv in range(len(self.traj[mm])): 75 | if(self.traj[mm][vv]==ML_old[0]): 76 | self.traj[mm][vv]=ML_old[0]-(count10-L) 77 | del ML_old[0] 78 | 79 | 80 | 81 | # return self.traj 82 | 83 | def AIC_BIC(self): 84 | # Cov_Type=['diag','spherical','full','tied'] 85 | # We compute the log likelihood and from that we calculate the aic and bic from that and choose the one with the 86 | # lowest aic and bic 87 | # Len1=[n_section[0] for i in range((Chapter1_Train.shape[0]//n_section[0]))] 88 | # Len1=(section_completed[0:Train_length,1].tolist()) # Lengths must be list 89 | # Len2=[int(i) for i in Len1] 90 | ### Len is the length of each individual time series 91 | ### Train_data is the training data 92 | ### N_state is maximum number of state to sweep 93 | ### N_mixture is the maximum of mixture components to sweep (used for GMM) 94 | ### feature is dimension of the time series. Univariate time series, feature is 1. 95 | AIC=[] 96 | BIC=[] 97 | Record2=[] 98 | self.Component2=[] 99 | Init=[26,64,75,100] 100 | Record_aic=np.zeros((len(Init),2)) 101 | Record_bic=np.zeros((len(Init),2)) 102 | 103 | for ii in range(1,self.Max_mixture+1): 104 | 105 | Record_aic=np.zeros((1,2)) 106 | Record_bic=np.zeros((1,2)) 107 | print('One mixture component is over',ii) 108 | 109 | AIC.clear() 110 | BIC.clear() 111 | for jj in range(2,self.Max_state+1): 112 | 113 | self.num_params = jj*(jj-1)+ jj*(ii-1)+(ii*jj)*self.Feat+(jj*ii*self.Feat) 114 | # 115 | Model=GMMHMM(n_components=jj,n_mix=ii,params='stmcw', init_params='stmcw',tol=pow(10,-5),n_iter=self.Iter).fit(self.Train_Data,self.Len) 116 | AIC.append(-2 * Model.score(self.Train_Data) + 2 * self.num_params) 117 | BIC.append(-2 * Model.score(self.Train_Data) + self.num_params * np.log(self.Train_Data.shape[0])) 118 | 119 | Temp1=np.argmin(AIC) 120 | opt_state=Temp1+2 121 | 122 | Record_aic[0,:]=np.array([opt_state,min(AIC)]) 123 | Temp2=np.argmin(BIC) 124 | opt_state=Temp2+2 125 | Record_bic[0,:]=np.array([opt_state,min(BIC)]) 126 | Record2.append(Record_aic) 127 | Record2.append(Record_bic) 128 | 129 | self.Component2.append(Record2) 130 | Record2=[] 131 | 132 | def Best_BIC(self): 133 | Hold_bic=[] 134 | Hold_state=[] 135 | for ii in range(len(self.Component2)): 136 | Hold_bic.append(self.Component2[ii][1][0][1])# print(count13) # This derermines the mixture component 137 | Hold_state.append(self.Component2[ii][1][0][0]) 138 | self.BEST.append(Hold_bic.index(min(Hold_bic))+1) 139 | 140 | self.BEST.append(Hold_state[Hold_bic.index(min(Hold_bic))]) 141 | 142 | def Best_States(self): 143 | self.Data_train=(self.Data.iloc[0:int(self.N*self.Train_ratio),:]) 144 | self.Test_data=(np.array(self.Data.iloc[int(self.N*self.Train_ratio):self.N,:])) 145 | 146 | self.Len=[self.T for ii in range(0,self.Data_train.shape[0])] # Lengths must be list 147 | self.Train_Data = np.array(self.Data_train).reshape((-1,1)) # Convert to numpy array with one column 148 | self.AIC_BIC() 149 | self.Best_BIC() 150 | 151 | self.Model=GMMHMM(n_components=int(self.BEST[1]),n_mix=int(self.BEST[0]),covariance_type=self.Cov_Type,params='stmcw', init_params='stmcw',tol=pow(10,-5),n_iter=self.Iter).fit(self.Train_Data,self.Len) 152 | self.score=self.Model.score(self.Train_Data,self.Len) 153 | self.Viterbi_list() 154 | return self.traj 155 | 156 | class Supervised_HMM(HMM_Model): 157 | def AIC_BIC(self): 158 | AIC=[] 159 | BIC=[] 160 | Record2=[] 161 | self.Component2=[] 162 | Init=[26,64,75,100] 163 | Record_aic=np.zeros((len(Init),2)) 164 | Record_bic=np.zeros((len(Init),2)) 165 | 166 | 167 | for ii in range(1,self.Max_mixture+1): 168 | # for ii in range(len(n_component)): 169 | Record_aic=np.zeros((1,2)) 170 | Record_bic=np.zeros((1,2)) 171 | print('One mixture component is over',ii) 172 | # for mm in range(len(Init)): 173 | AIC.clear() 174 | BIC.clear() 175 | # for jj in range(2,self.Max_state+1): 176 | # self.num_params=self.Max_state*(self.Max_state-1)+ self.Max_state*(ii-1)+(ii*self.Max_state)*self.Feat+((self.Feat**2+self.Feat)/2)*ii*self.Max_state # Full Covariance 177 | self.num_params = self.Max_state*(self.Max_state-1)+ self.Max_state*(ii-1)+(ii*self.Max_state)*self.Feat+(self.Max_state*ii*self.Feat) # Diagonal 178 | Model=GMMHMM(n_components=self.Max_state,n_mix=ii,covariance_type=self.Cov_Type,params='stmcw', init_params='stmcw',tol=pow(10,-5),n_iter=self.Iter).fit(self.Train_Data,self.Len) 179 | AIC.append(-2 * Model.score(self.Train_Data) + 2 * self.num_params) 180 | BIC.append(-2 * Model.score(self.Train_Data) + self.num_params * np.log(self.Train_Data.shape[0])) 181 | 182 | Temp1=np.argmin(AIC) 183 | opt_state=self.Max_state 184 | Record_aic[0,:]=np.array([opt_state,min(AIC)]) 185 | Temp2=np.argmin(BIC) 186 | opt_state=self.Max_state 187 | Record_bic[0,:]=np.array([opt_state,min(BIC)]) 188 | Record2.append(Record_aic) 189 | Record2.append(Record_bic) 190 | 191 | 192 | self.Component2.append(Record2) 193 | Record2=[] 194 | 195 | def __repr__(self): 196 | return f'''The Model has the following configuration: 197 | Model = {self.Model} 198 | Number of Feature = {self.Feat} 199 | Training Ratio = {self.Train_ratio} 200 | Number of Cases = {self.N} 201 | Covariance Type = {self.Cov_Type} 202 | Number of Iterations = {self.Iter} 203 | Number of Hidden States = {self.Max_state} 204 | Maximum Number of Mixture Components = {self.Max_mixture} 205 | Length of Each Time Series is = {self.T} 206 | Number of Free Parameters = {self.num_params} 207 | Best Parameters are = {self.BEST} 208 | ''' 209 | 210 | 211 | -------------------------------------------------------------------------------- /Hidden_Markov_Model/__init__.py: -------------------------------------------------------------------------------- 1 | from hmmlearn.hmm import GMMHMM,GaussianHMM 2 | import pandas as pd 3 | import numpy as np 4 | from sklearn.cluster import KMeans 5 | from hmmlearn.hmm import MultinomialHMM 6 | from sklearn import metrics 7 | from scipy.spatial.distance import cdist 8 | import matplotlib.pyplot as plt 9 | from sklearn.metrics import silhouette_score 10 | import math -------------------------------------------------------------------------------- /Hidden_Markov_Model/__pycache__/DHMM.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manitadayon/Auto_HMM/ed463bc61011973da7748f3c6a8396251ebbe06e/Hidden_Markov_Model/__pycache__/DHMM.cpython-38.pyc -------------------------------------------------------------------------------- /Hidden_Markov_Model/__pycache__/HMM.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manitadayon/Auto_HMM/ed463bc61011973da7748f3c6a8396251ebbe06e/Hidden_Markov_Model/__pycache__/HMM.cpython-38.pyc -------------------------------------------------------------------------------- /Hidden_Markov_Model/__pycache__/Preprocessing.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manitadayon/Auto_HMM/ed463bc61011973da7748f3c6a8396251ebbe06e/Hidden_Markov_Model/__pycache__/Preprocessing.cpython-38.pyc -------------------------------------------------------------------------------- /Hidden_Markov_Model/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manitadayon/Auto_HMM/ed463bc61011973da7748f3c6a8396251ebbe06e/Hidden_Markov_Model/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /License.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Manie Tadayon 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Auto_HMM 2 | Hidden Markov Model 3 | 4 | ## If you would like to buy me a coffee 5 | 6 | Buy Me A Coffee 7 | 8 | 9 | **Auto HMM: Automatic Discrete and Continous HMM including Model selection** 10 | 11 | [Description](#Description) 12 | 13 | [Citation](#Citaton) 14 | 15 | [Features](#Features) 16 | 17 | [Instruction](#Instruction) 18 | 19 | [License](#License) 20 | 21 | ---- 22 | 23 | ### **Description** 24 | 25 | #### Python package to automatically perfoming model selection for discrete and continuous unsupervised HMM. 26 | --- 27 | ### **Citation** 28 | 29 | #### If you find this package useful or if you use it in your research or work please consider citing it as follows: 30 | ``` 31 | @article{tadayon2020comparative, 32 | title={Comparative analysis of the hidden markov model and lstm: A simulative approach}, 33 | author={Tadayon, Manie and Pottie, Greg}, 34 | journal={arXiv preprint arXiv:2008.03825}, 35 | year={2020} 36 | } 37 | ``` 38 | --- 39 | ### **Instruction** 40 | 41 | For more information, please go over two example (HMM_testing and DHMM_testing files). 42 | 43 | For more information, please refer to my Youtube videos: 44 | 45 | https://www.youtube.com/watch?v=xyMuhXxv6cc&ab_channel=AIandMLFundamentals 46 | 47 | 48 | --- 49 | 50 | ### **License** 51 | 52 | This software is released under the MIT liecense. 53 | --------------------------------------------------------------------------------