Exemple #1
0
        public void DecodeTest2()
        {
            double[,] transitions =
            {
                { 0.7, 0.3 },
                { 0.4, 0.6 }
            };

            double[,] emissions =
            {
                { 0.1, 0.4, 0.5 },
                { 0.6, 0.3, 0.1 }
            };

            double[] initial =
            {
                0.6, 0.4
            };

            var hmm = HiddenMarkovModel.CreateGeneric(transitions, emissions, initial);

            double logLikelihood;

            double[] sequence = new double[] { 0, 1, 2 };
            int[]    path     = hmm.Decode(sequence, out logLikelihood);

            double expected = Math.Log(0.01344);

            Assert.AreEqual(logLikelihood, expected, 1e-10);
            Assert.AreEqual(path[0], 1);
            Assert.AreEqual(path[1], 0);
            Assert.AreEqual(path[2], 0);
        }
Exemple #2
0
        public void LearnTest3()
        {
            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a new Hidden Markov Model with 3 states
            var hmm = HiddenMarkovModel.CreateGeneric(3, 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance = 0.0001
            };
            double ll = teacher.Run(sequences);

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1; hmm.Decode(new double[] { 0, 1 }, out l1);                      // 0.4999
            double l2; hmm.Decode(new double[] { 0, 1, 1, 1 }, out l2);                // 0.1145

            double l3; hmm.Decode(new double[] { 1, 1 }, out l3);                      // 0.0000
            double l4; hmm.Decode(new double[] { 1, 0, 0, 0 }, out l4);                // 0.0000

            double l5; hmm.Decode(new double[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0002
            double l6; hmm.Decode(new double[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0002


            ll = System.Math.Exp(ll);
            l1 = System.Math.Exp(l1);
            l2 = System.Math.Exp(l2);
            l3 = System.Math.Exp(l3);
            l4 = System.Math.Exp(l4);
            l5 = System.Math.Exp(l5);
            l6 = System.Math.Exp(l6);

            Assert.AreEqual(0.95151018769760853, ll, 1e-4);
            Assert.AreEqual(0.4999419764097881, l1, 1e-4);
            Assert.AreEqual(0.1145702973735144, l2, 1e-4);
            Assert.AreEqual(0.0000529972606821, l3, 1e-4);
            Assert.AreEqual(0.0000000000000001, l4, 1e-4);
            Assert.AreEqual(0.0002674509390361, l5, 1e-4);
            Assert.AreEqual(0.0002674509390361, l6, 1e-4);

            Assert.IsTrue(l1 > l3 && l1 > l4);
            Assert.IsTrue(l2 > l3 && l2 > l4);

            Assert.AreEqual(1, hmm.Dimension);
        }
Exemple #3
0
        public void PredictTest3()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            double[][] sequences =
            {
                new double[] { 1, 2, 3, 4, 5 },
                new double[] { 1, 2, 4, 3, 5 },
                new double[] { 1, 2,5 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            var hmm = HiddenMarkovModel.CreateGeneric(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            double[] input = { 1, 2 }; // base sequence for prediction


            // Predict the next observation in sequence
            Mixture <GeneralDiscreteDistribution> mixture = null;

            double prediction = hmm.Predict(input, out mixture);


            // At this point, prediction probabilities
            // should be equilibrated around 3, 4 and 5
            Assert.AreEqual(4, mixture.Mean, 0.1);
            Assert.IsFalse(double.IsNaN(mixture.Mean));


            double[] input2 = { 1 };

            // The only possible value after 1 must be 2.
            prediction = hmm.Predict(input2, out mixture);

            Assert.AreEqual(2, prediction);
        }
        public static HiddenMarkovModel <GeneralDiscreteDistribution> CreateModel4()
        {
            double[] initial = { 0.5, 0.5 };

            double[,] transitions =
            {
                { 0.5, 0.5 },
                { 0.4, 0.6 },
            };

            double[,] emissions =
            {
                //         A    C    G    T
                /* H */ { 0.2, 0.3, 0.3, 0.2 },
                /* L */ { 0.3, 0.2, 0.2, 0.3 },
            };

            return(HiddenMarkovModel.CreateGeneric(transitions, emissions, initial));
        }
        public void LearnTest5()
        {
            double[][][] sequences = new double[][][]
            {
                new double[][] { new double[] { 0 }, new double[] { 3 }, new double[] { 1 } },
                new double[][] { new double[] { 0 }, new double[] { 2 } },
                new double[][] { new double[] { 1 }, new double[] { 0 }, new double[] { 3 } },
                new double[][] { new double[] { 3 }, new double[] { 4 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 3 }, new double[] { 4 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 4 }, new double[] { 5 } },
            };

            var hmm = HiddenMarkovModel.CreateGeneric(3, 6);

            var teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm)
            {
                Iterations = 100, Tolerance = 0
            };
            double ll = teacher.Run(sequences);

            double l0; hmm.Decode(sequences[0], out l0);
            double l1; hmm.Decode(sequences[1], out l1);
            double l2; hmm.Decode(sequences[2], out l2);

            double pl = System.Math.Exp(ll);
            double p0 = System.Math.Exp(l0);
            double p1 = System.Math.Exp(l1);
            double p2 = System.Math.Exp(l2);

            Assert.AreEqual(0.077427215162407442, pl, 1e-6);
            Assert.AreEqual(0.009958847736625515, p0, 1e-6);
            Assert.AreEqual(0.006790123456790126, p1, 1e-6);
            Assert.AreEqual(0.009958847736625515, p2, 1e-6);

            Assert.AreEqual(1, hmm.Dimension);



            double[][] sequences2 = new double[][]
            {
                new double[] { 0, 3, 1 },
                new double[] { 0, 2 },
                new double[] { 1, 0, 3 },
                new double[] { 3, 4 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 3, 4 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 1, 3, 4, 5 },
            };

            hmm = HiddenMarkovModel.CreateGeneric(3, 6);

            teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm)
            {
                Iterations = 100
            };
            double ll2 = teacher.Run(sequences2);

            double l02; hmm.Decode(sequences2[0], out l02);
            double l12; hmm.Decode(sequences2[1], out l12);
            double l22; hmm.Decode(sequences2[2], out l22);

            Assert.AreEqual(ll, ll2);
            Assert.AreEqual(l0, l02);
            Assert.AreEqual(l1, l12);
            Assert.AreEqual(l2, l22);

            Assert.AreEqual(1, hmm.Dimension);
        }
        public void LearnTest6()
        {
            // Continuous Markov Models can operate using any
            // probability distribution, including discrete ones.

            // In the following example, we will try to create a
            // Continuous Hidden Markov Model using a discrete
            // distribution to detect if a given sequence starts
            // with a zero and has any number of ones after that.

            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Create a new Hidden Markov Model with 3 states and
            //  a generic discrete distribution with two symbols
            var hmm = HiddenMarkovModel.CreateGeneric(new Forward(3), 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                FittingOptions = new GeneralDiscreteOptions()
                {
                    UseLaplaceRule = true
                }
            };

            double ll = teacher.Run(sequences);

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1 = hmm.Evaluate(new double[] { 0, 1 });       // 0.613
            double l2 = hmm.Evaluate(new double[] { 0, 1, 1, 1 }); // 0.500

            // Sequences which do not start with zero have much lesser probability.
            double l3 = hmm.Evaluate(new double[] { 1, 1 });       // 0.186
            double l4 = hmm.Evaluate(new double[] { 1, 0, 0, 0 }); // 0.003

            // Sequences which contains few errors have higher probability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double l5 = hmm.Evaluate(new double[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }); // 0.033
            double l6 = hmm.Evaluate(new double[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }); // 0.026


            double pl = System.Math.Exp(ll);
            double p1 = System.Math.Exp(l1);
            double p2 = System.Math.Exp(l2);
            double p3 = System.Math.Exp(l3);
            double p4 = System.Math.Exp(l4);
            double p5 = System.Math.Exp(l5);
            double p6 = System.Math.Exp(l6);

            Assert.AreEqual(1.754393540912413, pl, 1e-6);
            Assert.AreEqual(0.61368718756104801, p1, 1e-6);
            Assert.AreEqual(0.50049466955818356, p2, 1e-6);
            Assert.AreEqual(0.18643340385264684, p3, 1e-6);
            Assert.AreEqual(0.00300262431355424, p4, 1e-6);
            Assert.AreEqual(0.03338686211012481, p5, 1e-6);
            Assert.AreEqual(0.02659161933179825, p6, 1e-6);

            Assert.IsFalse(Double.IsNaN(ll));
            Assert.IsFalse(Double.IsNaN(l1));
            Assert.IsFalse(Double.IsNaN(l2));
            Assert.IsFalse(Double.IsNaN(l3));
            Assert.IsFalse(Double.IsNaN(l4));
            Assert.IsFalse(Double.IsNaN(l5));
            Assert.IsFalse(Double.IsNaN(l6));

            Assert.IsTrue(l1 > l3 && l1 > l4);
            Assert.IsTrue(l2 > l3 && l2 > l4);
        }
        public void LearnTest3()
        {
            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a new Hidden Markov Model with 3 states
            var hmm = HiddenMarkovModel.CreateGeneric(new Forward(3), 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                FittingOptions = new GeneralDiscreteOptions()
                {
                    UseLaplaceRule = true
                }
            };


            double ll = teacher.Run(sequences);

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1; hmm.Decode(new double[] { 0, 1 }, out l1);                      // 0.4999
            double l2; hmm.Decode(new double[] { 0, 1, 1, 1 }, out l2);                // 0.1145

            double l3; hmm.Decode(new double[] { 1, 1 }, out l3);                      // 0.0000
            double l4; hmm.Decode(new double[] { 1, 0, 0, 0 }, out l4);                // 0.0000

            double l5; hmm.Decode(new double[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0002
            double l6; hmm.Decode(new double[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0002


            ll = System.Math.Exp(ll);
            l1 = System.Math.Exp(l1);
            l2 = System.Math.Exp(l2);
            l3 = System.Math.Exp(l3);
            l4 = System.Math.Exp(l4);
            l5 = System.Math.Exp(l5);
            l6 = System.Math.Exp(l6);

            Assert.AreEqual(1.754393540912413, ll, 1e-6);
            Assert.AreEqual(0.53946360153256712, l1, 1e-6);
            Assert.AreEqual(0.44850249229903377, l2, 1e-6);
            Assert.AreEqual(0.08646414524833077, l3, 1e-6);
            Assert.AreEqual(0.00041152263374485, l4, 1e-6);
            Assert.AreEqual(0.01541807695931400, l5, 1e-6);
            Assert.AreEqual(0.01541807695931400, l6, 1e-6);

            Assert.IsTrue(l1 > l3 && l1 > l4);
            Assert.IsTrue(l2 > l3 && l2 > l4);

            Assert.AreEqual(1, hmm.Dimension);
        }
        public void LearnTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);
            double[][][] sequences = new double[500][][];
            for (int i = 0; i < sequences.Length; i++)
            {
                sequences[i] = new double[Accord.Math.Tools.Random.Next(20, 80)][];

                int start = Accord.Math.Tools.Random.Next();

                for (int j = 0; j < sequences[i].Length; j++)
                {
                    double s = Math.Sin(j + start);
                    double u = ((s + 1) / 2.0);
                    sequences[i][j] = new double[] { (int)(u * 10) };
                }
            }

            HiddenMarkovModel <GeneralDiscreteDistribution> hmm1;
            double ll1;

            {
                Accord.Math.Tools.SetupGenerator(0);
                hmm1 = HiddenMarkovModel.CreateGeneric(10, 10, true);
                var teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm1)
                {
                    Iterations     = 1,
                    Tolerance      = 1e-15,
                    Batches        = 1,
                    UseLaplaceRule = true,
                    FittingOptions = new GeneralDiscreteOptions
                    {
                        UseLaplaceRule = true
                    }
                };
                ll1 = teacher.Run(sequences);
            }

            HiddenMarkovModel <GeneralDiscreteDistribution> hmm10;
            double ll10;

            {
                Accord.Math.Tools.SetupGenerator(0);
                hmm10 = HiddenMarkovModel.CreateGeneric(10, 10, true);

                var teacher = new ViterbiLearning <GeneralDiscreteDistribution>(hmm10)
                {
                    Iterations     = 100,
                    Tolerance      = 1e-15,
                    Batches        = 1,
                    UseLaplaceRule = true,
                    FittingOptions = new GeneralDiscreteOptions
                    {
                        UseLaplaceRule = true
                    }
                };

                ll10 = teacher.Run(sequences);
            }

            Assert.IsTrue(ll10 > ll1);
            Assert.AreNotEqual(ll1, ll10, 10);

            // Those results must match the ones in ViterbiLearningTest.
            Assert.AreEqual(-33.834836461044411, ll1);
            Assert.AreEqual(-23.362967205628703, ll10);

            Assert.IsFalse(AreEqual(hmm1, hmm10));
        }
Exemple #9
0
        public void PredictTest()
        {
            double[][] sequences = new double[][]
            {
                new double[] { 0, 3, 1, 2 },
            };


            var hmm = HiddenMarkovModel.CreateGeneric(new Forward(4), 4);

            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 1e-10,
                Iterations = 0
            };
            double ll = teacher.Run(sequences);

            double l11, l12, l13, l14;

            double p1 = hmm.Predict(new double[] { 0 }, out l11);
            double p2 = hmm.Predict(new double[] { 0, 3 }, out l12);
            double p3 = hmm.Predict(new double[] { 0, 3, 1 }, out l13);
            double p4 = hmm.Predict(new double[] { 0, 3, 1, 2 }, out l14);

            Assert.AreEqual(3, p1);
            Assert.AreEqual(1, p2);
            Assert.AreEqual(2, p3);
            Assert.AreEqual(2, p4);

            double l21 = hmm.Evaluate(new double[] { 0, 3 });
            double l22 = hmm.Evaluate(new double[] { 0, 3, 1 });
            double l23 = hmm.Evaluate(new double[] { 0, 3, 1, 2 });
            double l24 = hmm.Evaluate(new double[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(l11, l21, 1e-10);
            Assert.AreEqual(l12, l22, 1e-10);
            Assert.AreEqual(l13, l23, 1e-10);
            Assert.AreEqual(l14, l24, 1e-2);

            Assert.IsFalse(double.IsNaN(l11));
            Assert.IsFalse(double.IsNaN(l12));
            Assert.IsFalse(double.IsNaN(l13));
            Assert.IsFalse(double.IsNaN(l14));

            Assert.IsFalse(double.IsNaN(l21));
            Assert.IsFalse(double.IsNaN(l22));
            Assert.IsFalse(double.IsNaN(l23));
            Assert.IsFalse(double.IsNaN(l24));

            double ln1;

            double[] pn = hmm.Predict(new double[] { 0 }, 4, out ln1);

            Assert.AreEqual(4, pn.Length);
            Assert.AreEqual(3, pn[0]);
            Assert.AreEqual(1, pn[1]);
            Assert.AreEqual(2, pn[2]);
            Assert.AreEqual(2, pn[3]);

            double ln2 = hmm.Evaluate(new double[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(ln1, ln2, 1e-2);
            Assert.IsFalse(double.IsNaN(ln1));
            Assert.IsFalse(double.IsNaN(ln2));


            // Get the mixture distribution defining next state likelihoods
            Mixture <GeneralDiscreteDistribution> mixture = null;
            double ml11;
            double mp1 = hmm.Predict(new double[] { 0 }, out ml11, out mixture);

            Assert.AreEqual(l11, ml11);
            Assert.AreEqual(p1, mp1);
            Assert.IsNotNull(mixture);

            Assert.AreEqual(4, mixture.Coefficients.Length);
            Assert.AreEqual(4, mixture.Components.Length);
            Assert.AreEqual(0, mixture.Coefficients[0], 1e-10);
            Assert.AreEqual(1, mixture.Coefficients[1], 1e-10);
            Assert.AreEqual(0, mixture.Coefficients[2], 1e-10);
            Assert.AreEqual(0, mixture.Coefficients[3], 1e-10);

            for (int i = 0; i < mixture.Coefficients.Length; i++)
            {
                Assert.IsFalse(double.IsNaN(mixture.Coefficients[i]));
            }
        }
Exemple #10
0
        public void ConstructorTest()
        {
            double[,] A;
            double[] pi;

            var hmm = HiddenMarkovModel.CreateGeneric(2, 4);

            A = new double[, ]
            {
                { 0.5, 0.5 },
                { 0.5, 0.5 }
            };

            pi = new double[] { 1, 0 };

            var logA  = Matrix.Log(A);
            var logPi = Matrix.Log(pi);

            Assert.AreEqual(2, hmm.States);
            Assert.AreEqual(1, hmm.Dimension);
            Assert.IsTrue(logA.IsEqual(hmm.Transitions));
            Assert.IsTrue(logPi.IsEqual(hmm.Probabilities));



            hmm = HiddenMarkovModel.CreateGeneric(new Forward(2), 4);

            A = new double[, ]
            {
                { 0.5, 0.5 },
                { 0.0, 1.0 }
            };

            pi = new double[] { 1, 0 };

            logA  = Matrix.Log(A);
            logPi = Matrix.Log(pi);

            Assert.AreEqual(2, hmm.States);
            Assert.AreEqual(1, hmm.Dimension);
            Assert.IsTrue(logA.IsEqual(hmm.Transitions));
            Assert.IsTrue(logPi.IsEqual(hmm.Probabilities));



            A = new double[, ]
            {
                { 0.7, 0.3 },
                { 0.4, 0.6 }
            };

            GeneralDiscreteDistribution[] B =
            {
                new GeneralDiscreteDistribution(0.1, 0.4, 0.5),
                new GeneralDiscreteDistribution(0.6, 0.3, 0.1)
            };

            pi = new double[]
            {
                0.6, 0.4
            };

            hmm = new HiddenMarkovModel <GeneralDiscreteDistribution>(A, B, pi);

            logA  = Matrix.Log(A);
            logPi = Matrix.Log(pi);

            Assert.AreEqual(2, hmm.States);
            Assert.AreEqual(1, hmm.Dimension);
            Assert.IsTrue(logA.IsEqual(hmm.Transitions));
            Assert.IsTrue(logPi.IsEqual(hmm.Probabilities));
            Assert.AreEqual(B, hmm.Emissions);
        }
Exemple #11
0
        public void LearnTest6()
        {
            // Continuous Markov Models can operate using any
            // probability distribution, including discrete ones.

            // In the follwing example, we will try to create a
            // Continuous Hidden Markov Model using a discrete
            // distribution to detect if a given sequence starts
            // with a zero and has any number of ones after that.

            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Create a new Hidden Markov Model with 3 states and
            //  a generic discrete distribution with two symbols
            var hmm = HiddenMarkovModel.CreateGeneric(3, 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            double ll = Math.Exp(teacher.Run(sequences));

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1 = Math.Exp(hmm.Evaluate(new double[] { 0, 1 }));       // 0.999
            double l2 = Math.Exp(hmm.Evaluate(new double[] { 0, 1, 1, 1 })); // 0.916

            // Sequences which do not start with zero have much lesser probability.
            double l3 = Math.Exp(hmm.Evaluate(new double[] { 1, 1 }));       // 0.000
            double l4 = Math.Exp(hmm.Evaluate(new double[] { 1, 0, 0, 0 })); // 0.000

            // Sequences which contains few errors have higher probabability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double l5 = Math.Exp(hmm.Evaluate(new double[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 })); // 0.034
            double l6 = Math.Exp(hmm.Evaluate(new double[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 })); // 0.034


            Assert.AreEqual(0.95151018769760853, ll, 1e-4);
            Assert.AreEqual(0.99996863060890995, l1, 1e-4);
            Assert.AreEqual(0.91667240076011669, l2, 1e-4);
            Assert.AreEqual(0.00002335133758386, l3, 1e-4);
            Assert.AreEqual(0.00000000000000012, l4, 1e-4);
            Assert.AreEqual(0.03423723144322685, l5, 1e-4);
            Assert.AreEqual(0.03423719592053246, l6, 1e-4);

            Assert.IsFalse(Double.IsNaN(ll));
            Assert.IsFalse(Double.IsNaN(l1));
            Assert.IsFalse(Double.IsNaN(l2));
            Assert.IsFalse(Double.IsNaN(l3));
            Assert.IsFalse(Double.IsNaN(l4));
            Assert.IsFalse(Double.IsNaN(l5));
            Assert.IsFalse(Double.IsNaN(l6));

            Assert.IsTrue(l1 > l3 && l1 > l4);
            Assert.IsTrue(l2 > l3 && l2 > l4);
        }
Exemple #12
0
        public void LearnTest5()
        {
            double[][][] sequences = new double[][][]
            {
                new double[][] { new double[] { 0 }, new double[] { 3 }, new double[] { 1 } },
                new double[][] { new double[] { 0 }, new double[] { 2 } },
                new double[][] { new double[] { 1 }, new double[] { 0 }, new double[] { 3 } },
                new double[][] { new double[] { 3 }, new double[] { 4 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 3 }, new double[] { 4 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 5 } },
                new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 4 }, new double[] { 5 } },
            };

            var hmm = HiddenMarkovModel.CreateGeneric(3, 6);

            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Iterations = 100, Tolerance = 0
            };
            double ll = teacher.Run(sequences);

            double l0; hmm.Decode(sequences[0], out l0);
            double l1; hmm.Decode(sequences[1], out l1);
            double l2; hmm.Decode(sequences[2], out l2);

            double pl = System.Math.Exp(ll);
            double p0 = System.Math.Exp(l0);
            double p1 = System.Math.Exp(l1);
            double p2 = System.Math.Exp(l2);

            Assert.AreEqual(0.49788370872923726, pl, 1e-6);
            Assert.AreEqual(0.014012065043262257, p0, 1e-6);
            Assert.AreEqual(0.016930905415294066, p1, 1e-6);
            Assert.AreEqual(0.0019365959189660638, p2, 1e-6);

            Assert.AreEqual(1, hmm.Dimension);



            double[][] sequences2 = new double[][]
            {
                new double[] { 0, 3, 1 },
                new double[] { 0, 2 },
                new double[] { 1, 0, 3 },
                new double[] { 3, 4 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 3, 4 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 1, 3, 5 },
                new double[] { 0, 1, 3, 4, 5 },
            };

            hmm = HiddenMarkovModel.CreateGeneric(3, 6);

            teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Iterations = 100
            };
            double ll2 = teacher.Run(sequences2);

            double l02; hmm.Decode(sequences2[0], out l02);
            double l12; hmm.Decode(sequences2[1], out l12);
            double l22; hmm.Decode(sequences2[2], out l22);

            Assert.AreEqual(ll, ll2);
            Assert.AreEqual(l0, l02);
            Assert.AreEqual(l1, l12);
            Assert.AreEqual(l2, l22);

            Assert.AreEqual(1, hmm.Dimension);
        }