public Decode ( int observations, bool logarithm, double &probability ) : int[] | ||
observations | int | A sequence of observations. |
logarithm | bool | True to return the log-likelihood, false to return /// the likelihood. Default is false (default is to return the likelihood). |
probability | double | The state optimized probability. |
리턴 | int[] |
private void btnModel_Click(object sender, EventArgs e) { var transition = new double[,] { {2.0/8, 1.0/8, 2.0/8, 3.0/8}, {0, 0, 0, 1.0/8}, {0, 0, 0, 0}, {1, 0, 0, 0}, }; var emission = new[,] { {2.0/8, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 1, 0, 0}, {0, 1, 0, 1, 0}, }; var start = new double[] {1, 0, 0, 0}; var hmm = new HiddenMarkovModel(transition, emission, start, false); var liklyhood = 0d; var x = hmm.Decode(new[] {1}, out liklyhood); }
public void LearnTest4() { int[][] sequences = new int[][] { new int[] { 0, 3, 1 }, new int[] { 0, 2 }, new int[] { 1, 0, 3 }, new int[] { 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 4, 5 }, }; HiddenMarkovModel hmm = new HiddenMarkovModel(3, 6); var teacher = new ViterbiLearning(hmm) { Iterations = 100, Tolerance = 0 }; double ll = teacher.Run(sequences); double l0; hmm.Decode(sequences[0], out l0); double l1; hmm.Decode(sequences[1], out l1); double l2; hmm.Decode(sequences[2], out l2); double pl = System.Math.Exp(ll); double p0 = System.Math.Exp(l0); double p1 = System.Math.Exp(l1); double p2 = System.Math.Exp(l2); Assert.AreEqual(0.078050218613091762, pl, 1e-10); Assert.AreEqual(0.008509757587448558, p0, 1e-10); Assert.AreEqual(0.010609567901234561, p1, 1e-10); Assert.AreEqual(0.008509757587448558, p2, 1e-10); }
static void runArbitraryDensityHiddenMarkovModelExample() { // Create the transition matrix A. double[,] transitions = { { 0.7, 0.3 }, { 0.4, 0.6 } }; // Create the vector of emission densities B. GeneralDiscreteDistribution[] emissions = { new GeneralDiscreteDistribution(0.1, 0.4, 0.5), new GeneralDiscreteDistribution(0.6, 0.3, 0.1) }; // Create the initial probabilities pi. double[] initial = { 0.6, 0.4 }; // Create a new hidden Markov model with discrete probabilities. var hmm = new HiddenMarkovModel<GeneralDiscreteDistribution>(transitions, emissions, initial); // Query the probability of a sequence occurring. We will consider the sequence. double[] sequence = new double[] { 0, 1, 2 }; // Evaluate its likelihood. double logLikelihood = hmm.Evaluate(sequence); // The log-likelihood of the sequence occurring within the model is -3.3928721329161653. Console.WriteLine("log-likelihood = {0}", logLikelihood); // Get the Viterbi path of the sequence. int[] path = hmm.Decode(sequence, out logLikelihood); // The state path will be 1-0-0 and the log-likelihood will be -4.3095199438871337. Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood); foreach (int state in path) Console.Write("{0},", state); Console.WriteLine("]"); }
public void LearnTest3() { // We will try to create a Hidden Markov Model which // can detect if a given sequence starts with a zero // and has any number of ones after that. int[][] sequences = new int[][] { new int[] { 0,1,1,1,1,0,1,1,1,1 }, new int[] { 0,1,1,1,0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, }; // Creates a new Hidden Markov Model with 3 states for // an output alphabet of two characters (zero and one) HiddenMarkovModel hmm = new HiddenMarkovModel(3, 2); // Try to fit the model to the data until the difference in // the average log-likelihood changes only by as little as 0.0001 var teacher = new BaumWelchLearning(hmm) { Tolerance = 0.0001, Iterations = 0 }; double ll = teacher.Run(sequences); // Calculate the probability that the given // sequences originated from the model double l1; hmm.Decode(new int[] { 0, 1 }, out l1); // 0.4999 double l2; hmm.Decode(new int[] { 0, 1, 1, 1 }, out l2); // 0.1145 // Sequences which do not start with zero have much lesser probability. double l3; hmm.Decode(new int[] { 1, 1 }, out l3); // 0.0000 double l4; hmm.Decode(new int[] { 1, 0, 0, 0 }, out l4); // 0.0000 // Sequences which contains few errors have higher probability // than the ones which do not start with zero. This shows some // of the temporal elasticity and error tolerance of the HMMs. double l5; hmm.Decode(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0002 double l6; hmm.Decode(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0002 ll = System.Math.Exp(ll); l1 = System.Math.Exp(l1); l2 = System.Math.Exp(l2); l3 = System.Math.Exp(l3); l4 = System.Math.Exp(l4); l5 = System.Math.Exp(l5); l6 = System.Math.Exp(l6); Assert.AreEqual(0.95151126952069587, ll, 1e-4); Assert.AreEqual(0.4999419764097881, l1, 1e-4); Assert.AreEqual(0.1145702973735144, l2, 1e-4); Assert.AreEqual(0.0000529972606821, l3, 1e-4); Assert.AreEqual(0.0000000000000001, l4, 1e-4); Assert.AreEqual(0.0002674509390361, l5, 1e-4); Assert.AreEqual(0.0002674509390361, l6, 1e-4); Assert.IsTrue(l1 > l3 && l1 > l4); Assert.IsTrue(l2 > l3 && l2 > l4); }
public void LearnTest4() { int[][] sequences = new int[][] { new int[] { 0, 3, 1 }, new int[] { 0, 2 }, new int[] { 1, 0, 3 }, new int[] { 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 4, 5 }, }; HiddenMarkovModel hmm = new HiddenMarkovModel(3, 6); var teacher = new BaumWelchLearning(hmm) { Iterations = 100, Tolerance = 0 }; double ll = teacher.Run(sequences); double l0; hmm.Decode(sequences[0], out l0); double l1; hmm.Decode(sequences[1], out l1); double l2; hmm.Decode(sequences[2], out l2); double pl = System.Math.Exp(ll); double p0 = System.Math.Exp(l0); double p1 = System.Math.Exp(l1); double p2 = System.Math.Exp(l2); Assert.AreEqual(0.49788370872923726, pl, 1e-10); Assert.AreEqual(0.014012065043262294, p0, 1e-10); Assert.AreEqual(0.016930905415294094, p1, 1e-10); Assert.AreEqual(0.001936595918966074, p2, 1e-10); }
public void DecodeTest() { // Example taken from http://en.wikipedia.org/wiki/Viterbi_algorithm // Create the transition matrix A double[,] transition = { { 0.7, 0.3 }, { 0.4, 0.6 } }; // Create the emission matrix B double[,] emission = { { 0.1, 0.4, 0.5 }, { 0.6, 0.3, 0.1 } }; // Create the initial probabilities pi double[] initial = { 0.6, 0.4 }; // Create a new hidden Markov model HiddenMarkovModel hmm = new HiddenMarkovModel(transition, emission, initial); // After that, one could, for example, query the probability // of a sequence occurring. We will consider the sequence int[] sequence = new int[] { 0, 1, 2 }; // And now we will evaluate its likelihood double logLikelihood = hmm.Evaluate(sequence); // At this point, the log-likelihood of the sequence // occurring within the model is -3.3928721329161653. // We can also get the Viterbi path of the sequence int[] path = hmm.Decode(sequence, out logLikelihood); // At this point, the state path will be 1-0-0 and the // log-likelihood will be -4.3095199438871337 Assert.AreEqual(logLikelihood, Math.Log(0.01344), 1e-10); Assert.AreEqual(path[0], 1); Assert.AreEqual(path[1], 0); Assert.AreEqual(path[2], 0); }
public void DecodeTest() { // Example taken from http://en.wikipedia.org/wiki/Viterbi_algorithm double[,] transition = { { 0.7, 0.3 }, { 0.4, 0.6 } }; double[,] emission = { { 0.1, 0.4, 0.5 }, { 0.6, 0.3, 0.1 } }; double[] initial = { 0.6, 0.4 }; HiddenMarkovModel hmm = new HiddenMarkovModel(transition, emission, initial); double logLikelihood; int[] sequence = new int[] { 0, 1, 2 }; int[] path = hmm.Decode(sequence, out logLikelihood); double expected = Math.Log(0.01344); Assert.AreEqual(logLikelihood, expected, 1e-10); Assert.AreEqual(path[0], 1); Assert.AreEqual(path[1], 0); Assert.AreEqual(path[2], 0); }
public void LearnTest7() { // Create continuous sequences. In the sequences below, there // seems to be two states, one for values between 0 and 1 and // another for values between 5 and 7. The states seems to be // switched on every observation. double[][] sequences = new double[][] { new double[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, new double[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }, new double[] { 0.1, 7.0, 0.1, 7.0, 0.2, 5.6 }, }; // Specify a initial normal distribution for the samples. var density = new NormalDistribution(); // Creates a continuous hidden Markov Model with two states organized in a forward // topology and an underlying univariate Normal distribution as probability density. var model = new HiddenMarkovModel<NormalDistribution>(new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier until the // difference in the average log-likelihood changes only by as little as 0.0001 var teacher = new BaumWelchLearning<NormalDistribution>(model) { Tolerance = 0.0001, Iterations = 0, }; // Fit the model double logLikelihood = teacher.Run(sequences); // See the log-probability of the sequences learned double a1 = model.Evaluate(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }); // -0.12799388666109757 double a2 = model.Evaluate(new[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }); // 0.01171157434400194 // See the probability of an unrelated sequence double a3 = model.Evaluate(new[] { 1.1, 2.2, 1.3, 3.2, 4.2, 1.0 }); // -298.7465244473417 double likelihood = Math.Exp(logLikelihood); a1 = Math.Exp(a1); // 0.879 a2 = Math.Exp(a2); // 1.011 a3 = Math.Exp(a3); // 0.000 // We can also ask the model to decode one of the sequences. After // this step the resulting sequence will be: { 0, 1, 0, 1, 0, 1 } // int[] states = model.Decode(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }); Assert.IsTrue(states.IsEqual(0, 1, 0, 1, 0, 1)); Assert.AreEqual(1.1341500279562791, likelihood, 1e-10); Assert.AreEqual(0.8798587580029778, a1, 1e-10); Assert.AreEqual(1.0117804233450216, a2, 1e-10); Assert.AreEqual(1.8031545195073828E-130, a3, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(a1)); Assert.IsFalse(double.IsNaN(a2)); Assert.IsFalse(double.IsNaN(a3)); Assert.AreEqual(2, model.Emissions.Length); var state1 = (model.Emissions[0] as NormalDistribution); var state2 = (model.Emissions[1] as NormalDistribution); Assert.AreEqual(0.16666666666666, state1.Mean, 1e-10); Assert.AreEqual(6.11111111111111, state2.Mean, 1e-10); Assert.IsFalse(Double.IsNaN(state1.Mean)); Assert.IsFalse(Double.IsNaN(state2.Mean)); Assert.AreEqual(0.007499999999999, state1.Variance, 1e-10); Assert.AreEqual(0.538611111111111, state2.Variance, 1e-10); Assert.IsFalse(Double.IsNaN(state1.Variance)); Assert.IsFalse(Double.IsNaN(state2.Variance)); Assert.AreEqual(2, model.Transitions.GetLength(0)); Assert.AreEqual(2, model.Transitions.GetLength(1)); var A = Matrix.Exp(model.Transitions); Assert.AreEqual(0, A[0, 0], 1e-16); Assert.AreEqual(1, A[0, 1], 1e-16); Assert.AreEqual(1, A[1, 0], 1e-16); Assert.AreEqual(0, A[1, 1], 1e-16); Assert.IsFalse(A.HasNaN()); }
public void DecodeTest5() { var density = new MultivariateNormalDistribution(3); var hmm = new HiddenMarkovModel<MultivariateNormalDistribution>(2, density); double logLikelihood; int[] path = hmm.Decode(new double[][] { new double[] { 0, 1, 2 }, new double[] { 0, 1, 2 }, }, out logLikelihood); Assert.AreEqual(-11.206778379787982, logLikelihood); }
public void DecodeIntegersTest() { double[,] transitions = { { 0.7, 0.3 }, { 0.4, 0.6 } }; GeneralDiscreteDistribution[] emissions = { new GeneralDiscreteDistribution(0.1, 0.4, 0.5), new GeneralDiscreteDistribution(0.6, 0.3, 0.1) }; double[] initial = { 0.6, 0.4 }; var hmm = new HiddenMarkovModel<GeneralDiscreteDistribution>(transitions, emissions, initial); int[] sequence = new int[] { 0, 1, 2 }; double logLikelihood = hmm.Evaluate(sequence); int[] path = hmm.Decode(sequence, out logLikelihood); Assert.AreEqual(logLikelihood, Math.Log(0.01344), 1e-10); Assert.AreEqual(path[0], 1); Assert.AreEqual(path[1], 0); Assert.AreEqual(path[2], 0); }
public void DecodeTest() { // Create the transation matrix A double[,] transitions = { { 0.7, 0.3 }, { 0.4, 0.6 } }; // Create the vector of emission densities B GeneralDiscreteDistribution[] emissions = { new GeneralDiscreteDistribution(0.1, 0.4, 0.5), new GeneralDiscreteDistribution(0.6, 0.3, 0.1) }; // Create the initial probabilities pi double[] initial = { 0.6, 0.4 }; // Create a new hidden Markov model with discrete probabilities var hmm = new HiddenMarkovModel<GeneralDiscreteDistribution>(transitions, emissions, initial); // After that, one could, for example, query the probability // of a sequence ocurring. We will consider the sequence double[] sequence = new double[] { 0, 1, 2 }; // And now we will evaluate its likelihood double logLikelihood = hmm.Evaluate(sequence); // At this point, the log-likelihood of the sequence // ocurring within the model is -3.3928721329161653. // We can also get the Viterbi path of the sequence int[] path = hmm.Decode(sequence, out logLikelihood); // At this point, the state path will be 1-0-0 and the // log-likelihood will be -4.3095199438871337 Assert.AreEqual(logLikelihood, Math.Log(0.01344), 1e-10); Assert.AreEqual(path[0], 1); Assert.AreEqual(path[1], 0); Assert.AreEqual(path[2], 0); }
public void LearnTest12() { // Suppose we have a set of six sequences and we would like to // fit a hidden Markov model with mixtures of Normal distributions // as the emission densities. // First, let's consider a set of univariate sequences: double[][] sequences = { new double[] { -0.223, -1.05, -0.574, 0.965, -0.448, 0.265, 0.087, 0.362, 0.717, -0.032 }, new double[] { -1.05, -0.574, 0.965, -0.448, 0.265, 0.087, 0.362, 0.717, -0.032, -0.346 }, new double[] { -0.574, 0.965, -0.448, 0.265, 0.087, 0.362, 0.717, -0.032, -0.346, -0.989 }, new double[] { 0.965, -0.448, 0.265, 0.087, 0.362, 0.717, -0.032, -0.346, -0.989, -0.619 }, new double[] { -0.448, 0.265, 0.087, 0.362, 0.717, -0.032, -0.346, -0.989, -0.619, 0.02 }, new double[] { 0.265, 0.087, 0.362, 0.717, -0.032, -0.346, -0.989, -0.619, 0.02, -0.297 }, }; // Now we can begin specifing a initial Gaussian mixture distribution. It is // better to add some different initial parameters to the mixture components: var density = new Mixture<NormalDistribution>( new NormalDistribution(mean: 2, stdDev: 1.0), // 1st component in the mixture new NormalDistribution(mean: 0, stdDev: 0.6), // 2nd component in the mixture new NormalDistribution(mean: 4, stdDev: 0.4), // 3rd component in the mixture new NormalDistribution(mean: 6, stdDev: 1.1) // 4th component in the mixture ); // Let's then create a continuous hidden Markov Model with two states organized in a forward // topology with the underlying univariate Normal mixture distribution as probability density. var model = new HiddenMarkovModel<Mixture<NormalDistribution>>(new Forward(2), density); // Now we should configure the learning algorithms to train the sequence classifier. We will // learn until the difference in the average log-likelihood changes only by as little as 0.0001 var teacher = new BaumWelchLearning<Mixture<NormalDistribution>>(model) { Tolerance = 0.0001, Iterations = 0, // Note, however, that since this example is extremely simple and we have only a few // data points, a full-blown mixture wouldn't really be needed. Thus we will have a // great chance that the mixture would become degenerated quickly. We can avoid this // by specifying some regularization constants in the Normal distribution fitting: FittingOptions = new MixtureOptions() { Iterations = 1, // limit the inner e-m to a single iteration InnerOptions = new NormalOptions() { Regularization = 1e-5 // specify a regularization constant } } }; // Finally, we can fit the model double logLikelihood = teacher.Run(sequences); // And now check the likelihood of some approximate sequences. double[] newSequence = { -0.223, -1.05, -0.574, 0.965, -0.448, 0.265, 0.087, 0.362, 0.717, -0.032 }; double a1 = Math.Exp(model.Evaluate(newSequence)); // 11729312967893.566 int[] path = model.Decode(newSequence); // We can see that the likelihood of an unrelated sequence is much smaller: double a3 = Math.Exp(model.Evaluate(new double[] { 8, 2, 6, 4, 1 })); // 0.0 Assert.AreEqual(11729312967893.566, a1); Assert.AreEqual(0.0, a3); Assert.IsFalse(Double.IsNaN(a1)); Assert.IsFalse(Double.IsNaN(a3)); }
static void runArbitraryDensityHiddenMarkovModelLearningExample() { // Create continuous sequences. // In the sequences below, there seems to be two states, one for values between 0 and 1 and another for values between 5 and 7. // The states seems to be switched on every observation. double[][] observationSequences = new double[][] { new double[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, new double[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }, new double[] { 0.1, 7.0, 0.1, 7.0, 0.2, 5.6 }, }; // Creates a continuous hidden Markov Model with two states organized in a ergoric topology // and an underlying univariate Normal distribution as probability density. var hmm = new HiddenMarkovModel<NormalDistribution>(topology: new Ergodic(states: 2), emissions: new NormalDistribution()); // Configure the learning algorithms to train the sequence classifier // until the difference in the average log-likelihood changes only by as little as 0.0001. var trainer = new BaumWelchLearning<NormalDistribution>(hmm) { Tolerance = 0.0001, Iterations = 0, }; // Fit the model. double averageLogLikelihood = trainer.Run(observationSequences); Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood); // The log-probability of the sequences learned. double logLik1 = hmm.Evaluate(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }); // -0.12799388666109757. double logLik2 = hmm.Evaluate(new[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }); // 0.01171157434400194. // The log-probability of an unrelated sequence. double logLik3 = hmm.Evaluate(new[] { 1.1, 2.2, 1.3, 3.2, 4.2, 1.0 }); // -298.7465244473417. // Transform the log-probabilities to actual probabilities. Console.WriteLine("probability = {0}", Math.Exp(logLik1)); // 0.879. Console.WriteLine("probability = {0}", Math.Exp(logLik2)); // 1.011. Console.WriteLine("probability = {0}", Math.Exp(logLik3)); // 0.000. // Ask the model to decode one of the sequences. // The state variable will contain: { 0, 1, 0, 1, 0, 1 }. double logLikelihood = 0.0; int[] path = hmm.Decode(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, out logLikelihood); Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood); foreach (int state in path) Console.Write("{0},", state); Console.WriteLine("]"); }
static void runDiscreteDensityHiddenMarkovModelExample() { // Create the transition matrix A. double[,] transition = { { 0.7, 0.3 }, { 0.4, 0.6 } }; // Create the emission matrix B. double[,] emission = { { 0.1, 0.4, 0.5 }, { 0.6, 0.3, 0.1 } }; // Create the initial probabilities pi. double[] initial = { 0.6, 0.4 }; // Create a new hidden Markov model. HiddenMarkovModel hmm = new HiddenMarkovModel(transition, emission, initial); // Query the probability of a sequence occurring. int[] sequence = new int[] { 0, 1, 2 }; // Evaluate its likelihood. double logLikelihood = hmm.Evaluate(sequence); // The log-likelihood of the sequence occurring within the model is -3.3928721329161653. Console.WriteLine("log-likelihood = {0}", logLikelihood); // Get the Viterbi path of the sequence. int[] path = hmm.Decode(sequence, out logLikelihood); // The state path will be 1-0-0 and the log-likelihood will be -4.3095199438871337. Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood); foreach (int state in path) Console.Write("{0},", state); Console.WriteLine("]"); }
public void LearnTest10_Independent() { // Let's say we have 2 meteorological sensors gathering data // from different time periods of the day. Those periods are // represented below: double[][][] data = { new double[][] // first sequence (we just repeated the measurements { // once, so there is only one observation sequence) new double[] { 1, 2 }, // Day 1, 15:00 pm new double[] { 6, 7 }, // Day 1, 16:00 pm new double[] { 2, 3 }, // Day 1, 17:00 pm new double[] { 2, 2 }, // Day 1, 18:00 pm new double[] { 9, 8 }, // Day 1, 19:00 pm new double[] { 1, 0 }, // Day 1, 20:00 pm new double[] { 1, 3 }, // Day 1, 21:00 pm new double[] { 8, 9 }, // Day 1, 22:00 pm new double[] { 3, 3 }, // Day 1, 23:00 pm new double[] { 1, 3 }, // Day 2, 00:00 am new double[] { 1, 1 }, // Day 2, 01:00 am } }; // Let's assume those sensors are unrelated (for simplicity). As // such, let's assume the data gathered from the sensors may reside // into circular centroids denoting each state the underlying system // might be in. NormalDistribution[] initial_components = { new NormalDistribution(), // initial value for the first variable's distribution new NormalDistribution() // initial value for the second variable's distribution }; // Specify a initial independent normal distribution for the samples. var density = new Independent<NormalDistribution>(initial_components); // Creates a continuous hidden Markov Model with two states organized in an Ergodic // topology and an underlying independent Normal distribution as probability density. var model = new HiddenMarkovModel<Independent<NormalDistribution>>(new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier until the // difference in the average log-likelihood changes only by as little as 0.0001 var teacher = new BaumWelchLearning<Independent<NormalDistribution>>(model) { Tolerance = 0.0001, Iterations = 0, }; // Fit the model double error = teacher.Run(data); // Get the hidden state associated with each observation // double logLikelihood; // log-likelihood of the Viterbi path int[] hidden_states = model.Decode(data[0], out logLikelihood); Assert.AreEqual(-33.978800850637882, error); Assert.AreEqual(-33.9788008509802, logLikelihood); Assert.AreEqual(11, hidden_states.Length); }
public void LearnTest3() { // We will try to create a Hidden Markov Model which // can detect if a given sequence starts with a zero // and has any number of ones after that. int[][] sequences = new int[][] { new int[] { 0,1,1,1,1,0,1,1,1,1 }, new int[] { 0,1,1,1,0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, }; // Creates a new Hidden Markov Model with 3 states for // an output alphabet of two characters (zero and one) HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(3), 2); // Try to fit the model to the data until the difference in // the average log-likelihood changes only by as little as 0.0001 var teacher = new ViterbiLearning(hmm) { Tolerance = 0.0001, Iterations = 0 }; double ll = teacher.Run(sequences); // Calculate the probability that the given // sequences originated from the model double l1; hmm.Decode(new int[] { 0, 1 }, out l1); // 0.5394 double l2; hmm.Decode(new int[] { 0, 1, 1, 1 }, out l2); // 0.4485 // Sequences which do not start with zero have much lesser probability. double l3; hmm.Decode(new int[] { 1, 1 }, out l3); // 0.0864 double l4; hmm.Decode(new int[] { 1, 0, 0, 0 }, out l4); // 0.0004 // Sequences which contains few errors have higher probability // than the ones which do not start with zero. This shows some // of the temporal elasticity and error tolerance of the HMMs. double l5; hmm.Decode(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0154 double l6; hmm.Decode(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0154 ll = System.Math.Exp(ll); l1 = System.Math.Exp(l1); l2 = System.Math.Exp(l2); l3 = System.Math.Exp(l3); l4 = System.Math.Exp(l4); l5 = System.Math.Exp(l5); l6 = System.Math.Exp(l6); Assert.AreEqual(1.754393540912413, ll, 1e-6); Assert.AreEqual(0.53946360153256712, l1, 1e-6); Assert.AreEqual(0.44850249229903377, l2, 1e-6); Assert.AreEqual(0.08646414524833077, l3, 1e-6); Assert.AreEqual(0.00041152263374485, l4, 1e-6); Assert.AreEqual(0.01541807695931400, l5, 1e-6); Assert.AreEqual(0.01541807695931400, l6, 1e-6); Assert.IsTrue(l1 > l3 && l1 > l4); Assert.IsTrue(l2 > l3 && l2 > l4); }
public void DecodeTest4() { var density = new MultivariateNormalDistribution(3); var hmm = new HiddenMarkovModel<MultivariateNormalDistribution>(2, density); bool thrown = false; try { double logLikelihood; int[] path = hmm.Decode(new double[] { 0, 1, 2 }, out logLikelihood); } catch { thrown = true; } Assert.IsTrue(thrown); }
public void DecodeTest2() { double[,] transitions = { { 0.7, 0.3 }, { 0.4, 0.6 } }; double[,] emissions = { { 0.1, 0.4, 0.5 }, { 0.6, 0.3, 0.1 } }; double[] initial = { 0.6, 0.4 }; var hmm = new HiddenMarkovModel<GeneralDiscreteDistribution, double>(transitions, GeneralDiscreteDistribution.FromMatrix(emissions), initial); double logLikelihood; double[] sequence = new double[] { 0, 1, 2 }; int[] path = hmm.Decode(sequence, out logLikelihood); double expected = Math.Log(0.01344); Assert.AreEqual(logLikelihood, expected, 1e-10); Assert.AreEqual(path[0], 1); Assert.AreEqual(path[1], 0); Assert.AreEqual(path[2], 0); }