public void ComputeTest()
        {

            HiddenMarkovModel hmm = DiscreteHiddenMarkovModelFunctionTest.CreateModel2();

            int states = hmm.States;


            var function = new MarkovDiscreteFunction(hmm);
            var target = new ConditionalRandomField<int>(states, function);
            double p1, p2;

            int[] observations, expected, actual;

            observations = new int[] { 0, 0, 1, 1, 1, 2 };
            expected = hmm.Decode(observations, out p1);
            actual = target.Compute(observations, out p2);

            Assert.IsTrue(expected.IsEqual(actual));
            Assert.AreEqual(p1, p2, 1e-6);


            observations = new int[] { 0, 1, 2, 2, 2 };
            expected = hmm.Decode(observations, out p1);
            actual = target.Compute(observations, out p2);

            Assert.IsTrue(expected.IsEqual(actual));
            Assert.AreEqual(p1, p2, 1e-6);
        }
        public void BackwardTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel2();

            var function = new MarkovDiscreteFunction(hmm);


            //                     A  B  B  A
            int[] observations = { 0, 1, 1, 0 };

            double logLikelihood;
            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Backward(function.Factors[0], observations, 0, out logLikelihood);

            var A = Matrix.Exp(hmm.Transitions);
            var B = Matrix.Exp(hmm.Emissions);
            var P = Matrix.Exp(hmm.Probabilities);

            double a30 = 1;
            double a31 = 1;

            double a20 = A[0, 0] * B[0, 0] * a30 + A[0, 1] * B[1, 0] * a31;
            double a21 = A[1, 0] * B[0, 0] * a30 + A[1, 1] * B[1, 0] * a31;

            double a10 = A[0, 0] * B[0, 1] * a20 + A[0, 1] * B[1, 1] * a21;
            double a11 = A[1, 0] * B[0, 1] * a20 + A[1, 1] * B[1, 1] * a21;

            double a00 = A[0, 0] * B[0, 1] * a10 + A[0, 1] * B[1, 1] * a11;
            double a01 = A[1, 0] * B[0, 1] * a10 + A[1, 1] * B[1, 1] * a11;


            Assert.AreEqual(actual[0, 0], a00, 1e-10);
            Assert.AreEqual(actual[0, 1], a01, 1e-10);

            Assert.AreEqual(actual[1, 0], a10, 1e-10);
            Assert.AreEqual(actual[1, 1], a11, 1e-10);

            Assert.AreEqual(actual[2, 0], a20, 1e-10);
            Assert.AreEqual(actual[2, 1], a21, 1e-10);

            Assert.AreEqual(actual[3, 0], a30, 1e-10);
            Assert.AreEqual(actual[3, 1], a31, 1e-10);

            foreach (double e in actual)
                Assert.IsFalse(double.IsNaN(e));

            double p = 0;
            for (int i = 0; i < hmm.States; i++)
                p += actual[0, i] * P[i] * B[i, observations[0]];

            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));

            p = System.Math.Exp(logLikelihood);
            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void HiddenConditionalRandomFieldConstructorTest()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            var function = new MarkovDiscreteFunction(hmm);
            var target = new HiddenConditionalRandomField<int>(function);

            Assert.AreEqual(function, target.Function);
            Assert.AreEqual(2, target.Function.Factors[0].States);
        }
        public void ConditionalRandomFieldConstructorTest()
        {
            HiddenMarkovModel hmm = DiscreteHiddenMarkovModelFunctionTest.CreateModel1();

            int states = 2;
            var function = new MarkovDiscreteFunction(hmm);
            var target = new ConditionalRandomField<int>(states, function);


            Assert.AreEqual(function, target.Function);
            Assert.AreEqual(2, target.States);
        }
        public void ComputeTest()
        {
            HiddenMarkovClassifier hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,1,1,0 },   // Class 0
                new int[] { 0,0,1,0 },   // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,0 },     // Class 0

                new int[] { 1,0,0,1 },   // Class 1
                new int[] { 1,1,0,1 },   // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
                new int[] { 1,0,1 },     // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            var function = new MarkovDiscreteFunction(hmm);
            var target = new HiddenConditionalRandomField<int>(function);


            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = hmm.Compute(inputs[i]);

                int actual = target.Compute(inputs[i]);

                double h0 = hmm.LogLikelihood(inputs[i], 0);
                double h1 = hmm.LogLikelihood(inputs[i], 1);

                double c0 = target.LogLikelihood(inputs[i], 0);
                double c1 = target.LogLikelihood(inputs[i], 1);

                Assert.AreEqual(expected, actual);
                Assert.AreEqual(h0, c0, 1e-10);
                Assert.AreEqual(h1, c1, 1e-10);

                Assert.IsFalse(double.IsNaN(c0));
                Assert.IsFalse(double.IsNaN(c1));
            }
        }
        public void RunTest()
        {
            var inputs = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;

            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new HiddenGradientDescentLearning<int>(model);
            target.LearningRate = 1000;

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            double ll0 = model.LogLikelihood(inputs, outputs);

            double error = Double.NegativeInfinity;
            for (int i = 0; i < 50; i++)
                error = target.RunEpoch(inputs, outputs);

            double ll1 = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10);
            Assert.AreEqual(0.00027018722449589916, error, 1e-10);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
        public void RunTest()
        {
            var inputs = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;

            HiddenMarkovClassifier hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new HiddenConjugateGradientLearning<int>(model);

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            double ll0 = model.LogLikelihood(inputs, outputs);

            double error = target.Run(inputs, outputs);

            double ll1 = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.0019419916698781847, ll0, 1e-10);
            Assert.AreEqual(0.00050271005636426391, error, 1e-10);
            Assert.AreEqual(error, -ll1);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
Esempio n. 8
0
        public void ComputeTest()
        {
            var hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            IPotentialFunction<int> owner = new MarkovDiscreteFunction(hmm);

            int[] x = new int[] { 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0 };

            foreach (var factor in owner.Factors)
            {
                for (int y = 0; y < owner.Outputs; y++)
                {
                    double[,] fwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Forward(factor, x, y);

                    double[,] bwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Backward(factor, x, y);

                    double[,] lnfwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogForward(factor, x, y);

                    double[,] lnbwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogBackward(factor, x, y);

                    for (int i = 0; i < fwd.GetLength(0); i++)
                        for (int j = 0; j < fwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10);

                    for (int i = 0; i < bwd.GetLength(0); i++)
                        for (int j = 0; j < bwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10);


                    foreach (var feature in factor)
                    {
                        double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y));
                        double actual = feature.LogMarginal(lnfwd, lnbwd, x, y);

                        Assert.AreEqual(expected, actual, 1e-10);
                        Assert.IsFalse(Double.IsNaN(actual));
                    }

                }
            }
        }
        public void RunTest()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new HiddenQuasiNewtonLearning<int>(model);

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            double ll0 = model.LogLikelihood(inputs, outputs);

            double error = target.Run(inputs, outputs);

            double ll1 = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10);
            Assert.AreEqual(0.0, error, 1e-10);
            Assert.AreEqual(error, -ll1);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
Esempio n. 10
0
        public void RunTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            var function = new MarkovDiscreteFunction(2, 2, 2);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new HiddenQuasiNewtonLearning <int>(model);

            double[] actual   = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }


            double ll0   = model.LogLikelihood(inputs, outputs);
            double error = target.Run(inputs, outputs);
            double ll1   = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }


            Assert.AreEqual(-5.5451774444795623, ll0, 1e-10);
            Assert.AreEqual(0, error, 1e-10);
            Assert.IsFalse(double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            Assert.IsTrue(ll1 > ll0);
        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            var features = target.Features;

            double[] weights = target.Weights;

            Assert.AreEqual(22, features.Length);
            Assert.AreEqual(22, weights.Length);

            int k = 0;

            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                {
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].States; j++)
                    {
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);
                    }
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model.Symbols; j++)
                    {
                        Assert.AreEqual(model[c].Emissions[i, j], weights[k++]);
                    }
                }
            }
        }
        public void ForwardScalingTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel2();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     A  B  B  A
            int[] observations = { 0, 1, 1, 0 };

            double[] scaling;
            double   logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0, out scaling, out logLikelihood);

            double p = System.Math.Exp(logLikelihood);

            Assert.AreEqual(0.054814695, p, 1e-7);
            Assert.IsFalse(double.IsNaN(p));
        }
Esempio n. 13
0
        public void GradientTest()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model    = new HiddenConditionalRandomField <int>(function);
            var target   = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void ComputeTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            double actual;
            double expected;

            int[] x = { 0, 1 };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    expected = model.Priors[c] *
                               Math.Exp(model[c].Probabilities[i]) * Math.Exp(model[c].Emissions[i, x[0]]);
                    actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            expected = model.Priors[c] *
                                       Math.Exp(model[c].Transitions[i, j]) * Math.Exp(model[c].Emissions[j, x[t]]);
                            actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }
        }
Esempio n. 15
0
        public void ComputeTest()
        {
            HiddenMarkovModel model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            double actual;
            double expected;

            int[] x = { 0, 1 };

            for (int i = 0; i < model.States; i++)
            {
                // Check initial state transitions
                expected = Math.Exp(model.Probabilities[i]) * Math.Exp(model.Emissions[i, x[0]]);
                actual   = Math.Exp(target.Factors[0].Compute(-1, i, x, 0));
                Assert.AreEqual(expected, actual, 1e-6);
            }

            for (int t = 0; t < x.Length; t++)
            {
                for (int i = 0; i < model.States; i++)
                {
                    // Check initial state transitions
                    expected = Math.Exp(model.Probabilities[i]) * Math.Exp(model.Emissions[i, x[0]]);
                    actual   = Math.Exp(target.Factors[0].Compute(-1, i, x, 0));
                    Assert.AreEqual(expected, actual, 1e-6);

                    // Check normal state transitions
                    for (int j = 0; j < model.States; j++)
                    {
                        double xb = Math.Exp(model.Transitions[i, j]);
                        double xc = Math.Exp(model.Emissions[j, x[t]]);
                        expected = xb * xc;
                        actual   = Math.Exp(target.Factors[0].Compute(i, j, x, t));
                        Assert.AreEqual(expected, actual, 1e-6);
                    }
                }
            }
        }
        public void LogBackwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            int[] observations = { 0, 0, 1, 1 };

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                                            ForwardBackwardAlgorithm.Backward(function.Factors[0], observations, 0));

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.LogBackward(function.Factors[0], observations, 0);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            foreach (double p in actual)
            {
                Assert.IsFalse(double.IsNaN(p));
            }
        }
        public void GradientTest_DiscreteMarkov2()
        {
            HiddenMarkovClassifier hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            var inputs  = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
            }
        }
        public void BackwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     L  L  R  R
            int[] observations = { 0, 0, 1, 1 };

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.Backward(function.Factors[0], observations);

            // Backward matrices from R's HMM package are
            // transposed in relation to the framework's:

            Assert.AreEqual(4, actual.GetLength(0));
            Assert.AreEqual(2, actual.GetLength(1));

            Assert.AreEqual(0.128074432, actual[0, 0], 1e-10);
            Assert.AreEqual(0.07923051, actual[0, 1], 1e-8);

            Assert.AreEqual(0.196816, actual[1, 0]);
            Assert.AreEqual(0.453856, actual[1, 1]);

            Assert.AreEqual(0.376, actual[2, 0]);
            Assert.AreEqual(0.691, actual[2, 1]);

            Assert.AreEqual(1, actual[3, 0]);
            Assert.AreEqual(1, actual[3, 1]);

            foreach (double p in actual)
            {
                Assert.IsFalse(double.IsNaN(p));
            }
        }
Esempio n. 19
0
        public void HiddenMarkovModelFunctionConstructorTest()
        {
            HiddenMarkovModel model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            var features = target.Features;

            double[] weights = target.Weights;

            Assert.AreEqual(features.Length, 12);
            Assert.AreEqual(weights.Length, 12);

            int k = 0;

            for (int i = 0; i < model.States; i++)
            {
                Assert.AreEqual(model.Probabilities[i], weights[k++]);
            }

            for (int i = 0; i < model.States; i++)
            {
                for (int j = 0; j < model.States; j++)
                {
                    Assert.AreEqual(model.Transitions[i, j], weights[k++]);
                }
            }

            for (int i = 0; i < model.States; i++)
            {
                for (int j = 0; j < model.Symbols; j++)
                {
                    Assert.AreEqual(model.Emissions[i, j], weights[k++]);
                }
            }
        }
        public void ForwardTest3()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     L  L  R  R
            int[] observations = { 0, 0, 1, 1 };

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.Forward(function.Factors[0], observations);

            // Forward matrices from R's HMM package are
            // transposed in relation to the framework's:

            Assert.AreEqual(4, actual.GetLength(0));
            Assert.AreEqual(2, actual.GetLength(1));

            Assert.AreEqual(0.675, actual[0, 0], 1e-10);
            Assert.AreEqual(0.011, actual[0, 1], 1e-10);

            Assert.AreEqual(0.407475, actual[1, 0], 1e-10);
            Assert.AreEqual(0.015697, actual[1, 1], 1e-10);

            Assert.AreEqual(0.08267228, actual[2, 0], 1e-8);
            Assert.AreEqual(0.08138495, actual[2, 1], 1e-8);

            Assert.AreEqual(0.02263833, actual[3, 0], 1e-8);
            Assert.AreEqual(0.06468345, actual[3, 1], 1e-8);

            foreach (double p in actual)
            {
                Assert.IsFalse(double.IsNaN(p));
            }
        }
        public void LogForwardBackwardTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel1();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     G  G  C  A
            int[] observations = { 2, 2, 1, 0 };

            double fwdLogLikelihood;

            double[,] fwd = Accord.Statistics.Models.Fields.
                            ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out fwdLogLikelihood);

            double bwdLogLikelihood;

            double[,] bwd = Accord.Statistics.Models.Fields.
                            ForwardBackwardAlgorithm.LogBackward(function.Factors[0], observations, 0, out bwdLogLikelihood);

            Assert.AreEqual(fwdLogLikelihood, bwdLogLikelihood, 1e-10); // -5.5614629361549142
            Assert.AreEqual(-5.5614629361549142, fwdLogLikelihood, 1e-10);
            Assert.IsFalse(double.IsNaN(fwdLogLikelihood));
        }
        public void ForwardScalingTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel1();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);


            //                     G  G  C  A
            int[] observations = { 2, 2, 1, 0 };

            double[] scaling;
            double   logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0, out scaling, out logLikelihood);

            double[] P = Matrix.Exp(hmm.Probabilities);
            double[,] B = Matrix.Exp(hmm.Emissions);
            double[,] A = Matrix.Exp(hmm.Transitions);

            double a00 = P[0] * B[0, 2];
            double a01 = P[1] * B[1, 2];
            double t0  = a00 + a01;

            a00 /= t0;
            a01 /= t0;

            double a10 = (a00 * A[0, 0] + a01 * A[1, 0]) * B[0, 2];
            double a11 = (a01 * A[1, 1] + a00 * A[0, 1]) * B[1, 2];
            double t1  = a10 + a11;

            a10 /= t1;
            a11 /= t1;

            double a20 = (a10 * A[0, 0] + a11 * A[1, 0]) * B[0, 1];
            double a21 = (a11 * A[1, 1] + a10 * A[0, 1]) * B[1, 1];
            double t2  = a20 + a21;

            a20 /= t2;
            a21 /= t2;

            double a30 = (a20 * A[0, 0] + a21 * A[1, 0]) * B[0, 0];
            double a31 = (a21 * A[1, 1] + a20 * A[0, 1]) * B[1, 0];
            double t3  = a30 + a31;

            a30 /= t3;
            a31 /= t3;

            Assert.AreEqual(a00, actual[0, 0], 1e-10);
            Assert.AreEqual(a01, actual[0, 1], 1e-10);

            Assert.AreEqual(a10, actual[1, 0], 1e-10);
            Assert.AreEqual(a11, actual[1, 1], 1e-10);

            Assert.AreEqual(a20, actual[2, 0], 1e-10);
            Assert.AreEqual(a21, actual[2, 1], 1e-10);

            Assert.AreEqual(a30, actual[3, 0], 1e-10);
            Assert.AreEqual(a31, actual[3, 1], 1e-10);

            foreach (double e in actual)
            {
                Assert.IsFalse(double.IsNaN(e));
            }


            double p = System.Math.Exp(logLikelihood);

            Assert.AreEqual(0.00384315, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void LikelihoodTest()
        {
            HiddenMarkovModel hmm = HiddenMarkovModelFunctionTest.CreateModel2();

            int states = hmm.States;
            int symbols = hmm.Symbols;


            var function1 = new MarkovDiscreteFunction(hmm);
            var target1 = new ConditionalRandomField<int>(states, function1);

            var function2 = new MarkovDiscreteFunction(states, symbols);
            var target2 = new ConditionalRandomField<int>(states, function2);


            int[] observations;

            double a, b, la, lb;

            observations = new int[] { 0, 0, 1, 1, 1, 2 };
            a = target1.LogLikelihood(observations, observations);
            b = target2.LogLikelihood(observations, observations);
            Assert.IsTrue(a > b);

            observations = new int[] { 0, 0, 1, 1, 1, 2 };
            la = target1.LogLikelihood(observations, observations);
            lb = target2.LogLikelihood(observations, observations);
            Assert.IsTrue(la > lb);

            double lla = System.Math.Log(a);
            double llb = System.Math.Log(b);

            Assert.AreEqual(lla, la, 1e-6);
            Assert.AreEqual(llb, lb, 1e-6);
        }
Esempio n. 24
0
        private static void resilientgradienthiddenlearning()
        {
            // Suppose we would like to learn how to classify the
            // following set of sequences among three class labels: 
            int[][] inputSequences =
            {
                // First class of sequences: starts and
                // ends with zeros, ones in the middle:
                new[] { 0, 1, 1, 1, 0 },        
                new[] { 0, 0, 1, 1, 0, 0 },     
                new[] { 0, 1, 1, 1, 1, 0 },     
 
                // Second class of sequences: starts with
                // twos and switches to ones until the end.
                new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 },
 
                // Third class of sequences: can start
                // with any symbols, but ends with three.
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 0, 0, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 2, 2, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 2, 2, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 3, 3, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
            };

            // Now consider their respective class labels
            int[] outputLabels =
            {
                /* Sequences  1-3 are from class 0: */ 0, 0, 0,
                /* Sequences  4-6 are from class 1: */ 1, 1, 1,
                /* Sequences 7-14 are from class 2: */ 2, 2, 2, 2, 2, 2, 2, 2
            };


            // Create the Hidden Conditional Random Field using a set of discrete features
            var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3);
            var classifier = new HiddenConditionalRandomField<int>(function);

            // Create a learning algorithm
            var teacher = new HiddenResilientGradientLearning<int>(classifier)
            {
                Iterations = 50
            };

            // Run the algorithm and learn the models
            teacher.Run(inputSequences, outputLabels);

            int[] answers = inputSequences.Apply(classifier.Compute);

        }
        public void GradientTest()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model = new HiddenConditionalRandomField<int>(function);
            var target = new ForwardBackwardGradient<int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            var features = target.Features;
            double[] weights = target.Weights;

            Assert.AreEqual(22, features.Length);
            Assert.AreEqual(22, weights.Length);

            int k = 0;
            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    for (int j = 0; j < model[c].States; j++)
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    for (int j = 0; j < model.Symbols; j++)
                        Assert.AreEqual(model[c].Emissions[i, j], weights[k++]);
            }

        }
        public void BackwardTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                                    ForwardBackwardAlgorithmTest.CreateModel2();

            var function = new MarkovDiscreteFunction(hmm);


            //                     A  B  B  A
            int[] observations = { 0, 1, 1, 0 };

            double logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.Backward(function.Factors[0], observations, 0, out logLikelihood);

            var A = Matrix.Exp(hmm.Transitions);
            var B = Matrix.Exp(hmm.Emissions);
            var P = Matrix.Exp(hmm.Probabilities);

            double a30 = 1;
            double a31 = 1;

            double a20 = A[0, 0] * B[0, 0] * a30 + A[0, 1] * B[1, 0] * a31;
            double a21 = A[1, 0] * B[0, 0] * a30 + A[1, 1] * B[1, 0] * a31;

            double a10 = A[0, 0] * B[0, 1] * a20 + A[0, 1] * B[1, 1] * a21;
            double a11 = A[1, 0] * B[0, 1] * a20 + A[1, 1] * B[1, 1] * a21;

            double a00 = A[0, 0] * B[0, 1] * a10 + A[0, 1] * B[1, 1] * a11;
            double a01 = A[1, 0] * B[0, 1] * a10 + A[1, 1] * B[1, 1] * a11;


            Assert.AreEqual(actual[0, 0], a00, 1e-10);
            Assert.AreEqual(actual[0, 1], a01, 1e-10);

            Assert.AreEqual(actual[1, 0], a10, 1e-10);
            Assert.AreEqual(actual[1, 1], a11, 1e-10);

            Assert.AreEqual(actual[2, 0], a20, 1e-10);
            Assert.AreEqual(actual[2, 1], a21, 1e-10);

            Assert.AreEqual(actual[3, 0], a30, 1e-10);
            Assert.AreEqual(actual[3, 1], a31, 1e-10);

            foreach (double e in actual)
            {
                Assert.IsFalse(double.IsNaN(e));
            }

            double p = 0;

            for (int i = 0; i < hmm.States; i++)
            {
                p += actual[0, i] * P[i] * B[i, observations[0]];
            }

            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));

            p = System.Math.Exp(logLikelihood);
            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void ForwardScalingTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel2();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     A  B  B  A
            int[] observations = { 0, 1, 1, 0 };

            double[] scaling;
            double logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0, out scaling, out logLikelihood);

            double p = System.Math.Exp(logLikelihood);
            Assert.AreEqual(0.054814695, p, 1e-7);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void ForwardScalingTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel1();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);


            //                     G  G  C  A
            int[] observations = { 2, 2, 1, 0 };

            double[] scaling;
            double logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0, out scaling, out logLikelihood);

            double[] P = Matrix.Exp(hmm.Probabilities);
            double[,] B = Matrix.Exp(hmm.Emissions);
            double[,] A = Matrix.Exp(hmm.Transitions);

            double a00 = P[0] * B[0, 2];
            double a01 = P[1] * B[1, 2];
            double t0 = a00 + a01;

            a00 /= t0;
            a01 /= t0;

            double a10 = (a00 * A[0, 0] + a01 * A[1, 0]) * B[0, 2];
            double a11 = (a01 * A[1, 1] + a00 * A[0, 1]) * B[1, 2];
            double t1 = a10 + a11;

            a10 /= t1;
            a11 /= t1;

            double a20 = (a10 * A[0, 0] + a11 * A[1, 0]) * B[0, 1];
            double a21 = (a11 * A[1, 1] + a10 * A[0, 1]) * B[1, 1];
            double t2 = a20 + a21;

            a20 /= t2;
            a21 /= t2;

            double a30 = (a20 * A[0, 0] + a21 * A[1, 0]) * B[0, 0];
            double a31 = (a21 * A[1, 1] + a20 * A[0, 1]) * B[1, 0];
            double t3 = a30 + a31;

            a30 /= t3;
            a31 /= t3;

            Assert.AreEqual(a00, actual[0, 0], 1e-10);
            Assert.AreEqual(a01, actual[0, 1], 1e-10);

            Assert.AreEqual(a10, actual[1, 0], 1e-10);
            Assert.AreEqual(a11, actual[1, 1], 1e-10);

            Assert.AreEqual(a20, actual[2, 0], 1e-10);
            Assert.AreEqual(a21, actual[2, 1], 1e-10);

            Assert.AreEqual(a30, actual[3, 0], 1e-10);
            Assert.AreEqual(a31, actual[3, 1], 1e-10);

            foreach (double e in actual)
                Assert.IsFalse(double.IsNaN(e));


            double p = System.Math.Exp(logLikelihood);
            Assert.AreEqual(0.00384315, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void RunTest2()
        {
            var inputs = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;


            Accord.Math.Tools.SetupGenerator(0);

            var function = new MarkovDiscreteFunction(2, 2, 2);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new HiddenConjugateGradientLearning<int>(model);

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }


            double ll0 = model.LogLikelihood(inputs, outputs);
            double error = target.Run(inputs, outputs);
            double ll1 = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }


            Assert.AreEqual(-5.5451774444795623, ll0, 1e-10);
            Assert.AreEqual(0, error, 1e-10);
            Assert.IsFalse(double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
        public void BackwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     L  L  R  R
            int[] observations = { 0, 0, 1, 1 };

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Backward(function.Factors[0], observations);

            // Backward matrices from R's HMM package are
            // transposed in relation to the framework's:

            Assert.AreEqual(4, actual.GetLength(0));
            Assert.AreEqual(2, actual.GetLength(1));

            Assert.AreEqual(0.128982144, actual[0, 0], 1e-10);
            Assert.AreEqual(0.082407504, actual[0, 1], 1e-10);

            Assert.AreEqual(0.196816, actual[1, 0], 1e-10);
            Assert.AreEqual(0.453856, actual[1, 1], 1e-10);

            Assert.AreEqual(0.376, actual[2, 0], 1e-10);
            Assert.AreEqual(0.691, actual[2, 1], 1e-10);

            foreach (double p in actual)
                Assert.IsFalse(double.IsNaN(p));
        }
        public void HiddenMarkovModelFunctionConstructorTest()
        {
            HiddenMarkovModel model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            var features = target.Features;
            double[] weights = target.Weights;

            Assert.AreEqual(features.Length, 12);
            Assert.AreEqual(weights.Length, 12);

            int k = 0;

            for (int i = 0; i < model.States; i++)
                Assert.AreEqual(model.Probabilities[i], weights[k++]);

            for (int i = 0; i < model.States; i++)
                for (int j = 0; j < model.States; j++)
                    Assert.AreEqual(model.Transitions[i, j], weights[k++]);

            for (int i = 0; i < model.States; i++)
                for (int j = 0; j < model.Symbols; j++)
                    Assert.AreEqual(model.Emissions[i, j], weights[k++]);
        }
Esempio n. 33
0
        public void ComputeTest2()
        {
            // Suppose we would like to learn how to classify the
            // following set of sequences among three class labels:

            int[][] inputSequences =
            {
                // First class of sequences: starts and
                // ends with zeros, ones in the middle:
                new[] { 0, 1, 1, 1, 0 },
                new[] { 0, 0, 1, 1,0, 0 },
                new[] { 0, 1, 1, 1,1, 0 },

                // Second class of sequences: starts with
                // twos and switches to ones until the end.
                new[] { 2, 2, 2, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2,2, 1, 1, 1, 1 },

                // Third class of sequences: can start
                // with any symbols, but ends with three.
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 0, 0, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,2, 2, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 2, 2, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,3, 3, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
            };

            // Now consider their respective class labels
            int[] outputLabels =
            {
                /* Sequences  1-3 are from class 0: */ 0, 0, 0,
                /* Sequences  4-6 are from class 1: */ 1, 1, 1,
                /* Sequences 7-14 are from class 2: */ 2, 2, 2, 2, 2, 2, 2, 2
            };


            // Create the Hidden Conditional Random Field using a set of discrete features
            var function   = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3);
            var classifier = new HiddenConditionalRandomField <int>(function);

            // Create a learning algorithm
            var teacher = new HiddenResilientGradientLearning <int>(classifier)
            {
                Iterations = 50
            };

            // Run the algorithm and learn the models
            teacher.Run(inputSequences, outputLabels);


            // After training has finished, we can check the
            // output classification label for some sequences.

            int y1 = classifier.Compute(new[] { 0, 1, 1, 1, 0 });    // output is y1 = 0
            int y2 = classifier.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y1 = 0

            int y3 = classifier.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y2 = 1
            int y4 = classifier.Compute(new[] { 2, 2, 1, 1 });       // output is y2 = 1

            int y5 = classifier.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y3 = 2
            int y6 = classifier.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y3 = 2

            Assert.AreEqual(0, y1);
            Assert.AreEqual(0, y2);
            Assert.AreEqual(1, y3);
            Assert.AreEqual(1, y4);
            Assert.AreEqual(2, y5);
            Assert.AreEqual(2, y6);
        }
        public void ComputeTest()
        {
            HiddenMarkovModel model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            double actual;
            double expected;

            int[] x = { 0, 1 };

            for (int i = 0; i < model.States; i++)
            {
                // Check initial state transitions
                expected = Math.Exp(model.Probabilities[i]) * Math.Exp(model.Emissions[i, x[0]]);
                actual = Math.Exp(target.Factors[0].Compute(-1, i, x, 0));
                Assert.AreEqual(expected, actual, 1e-6);
            }

            for (int t = 0; t < x.Length; t++)
            {
                for (int i = 0; i < model.States; i++)
                {
                    // Check initial state transitions
                    expected = Math.Exp(model.Probabilities[i]) * Math.Exp(model.Emissions[i, x[0]]);
                    actual = Math.Exp(target.Factors[0].Compute(-1, i, x, 0));
                    Assert.AreEqual(expected, actual, 1e-6);

                    // Check normal state transitions
                    for (int j = 0; j < model.States; j++)
                    {
                        double xb = Math.Exp(model.Transitions[i, j]);
                        double xc = Math.Exp(model.Emissions[j, x[t]]);
                        expected = xb * xc;
                        actual = Math.Exp(target.Factors[0].Compute(i, j, x, t));
                        Assert.AreEqual(expected, actual, 1e-6);
                    }
                }
            }

        }
        public void ComputeTest2()
        {
            // Suppose we would like to learn how to classify the
            // following set of sequences among three class labels: 

            int[][] inputSequences =
            {
                // First class of sequences: starts and
                // ends with zeros, ones in the middle:
                new[] { 0, 1, 1, 1, 0 },        
                new[] { 0, 0, 1, 1, 0, 0 },     
                new[] { 0, 1, 1, 1, 1, 0 },     
 
                // Second class of sequences: starts with
                // twos and switches to ones until the end.
                new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 },
 
                // Third class of sequences: can start
                // with any symbols, but ends with three.
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 0, 0, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 2, 2, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 2, 2, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 3, 3, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
            };

            // Now consider their respective class labels
            int[] outputLabels =
            {
                /* Sequences  1-3 are from class 0: */ 0, 0, 0,
                /* Sequences  4-6 are from class 1: */ 1, 1, 1,
                /* Sequences 7-14 are from class 2: */ 2, 2, 2, 2, 2, 2, 2, 2
            };


            // Create the Hidden Conditional Random Field using a set of discrete features
            var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3);
            var classifier = new HiddenConditionalRandomField<int>(function);

            // Create a learning algorithm
            var teacher = new HiddenResilientGradientLearning<int>(classifier)
            {
                Iterations = 50
            };

            // Run the algorithm and learn the models
            teacher.Run(inputSequences, outputLabels);


            // After training has finished, we can check the 
            // output classification label for some sequences. 

            int y1 = classifier.Compute(new[] { 0, 1, 1, 1, 0 });    // output is y1 = 0
            int y2 = classifier.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y1 = 0

            int y3 = classifier.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y2 = 1
            int y4 = classifier.Compute(new[] { 2, 2, 1, 1 });       // output is y2 = 1

            int y5 = classifier.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y3 = 2
            int y6 = classifier.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y3 = 2

            Assert.AreEqual(0, y1);
            Assert.AreEqual(0, y2);
            Assert.AreEqual(1, y3);
            Assert.AreEqual(1, y4);
            Assert.AreEqual(2, y5);
            Assert.AreEqual(2, y6);
        }
        public void ComputeTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            double actual;
            double expected;

            int[] x = { 0, 1 };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    double xa = model.Priors[c];
                    double xb = Math.Exp(model[c].Probabilities[i]);
                    double xc = Math.Exp(model[c].Emissions[i, x[0]]);
                    expected = xa * xb * xc;
                    actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            double xb = Math.Exp(model[c].Transitions[i, j]);
                            double xc = Math.Exp(model[c].Emissions[j, x[t]]);
                            expected = xb * xc;
                            actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }

        }
        static void runHiddenConditionalRandomFieldLearningExample()
        {
            // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols.
            int[][] observationSequences =
            {
                // First class of sequences: starts and ends with zeros, ones in the middle.
                new[] { 0, 1, 1, 1, 0 },
                new[] { 0, 0, 1, 1,0, 0 },
                new[] { 0, 1, 1, 1,1, 0 },

                // Second class of sequences: starts with twos and switches to ones until the end.
                new[] { 2, 2, 2, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2,2, 1, 1, 1, 1 },

                // Third class of sequences: can start with any symbols, but ends with three.
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 0, 0, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,2, 2, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 2, 2, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,3, 3, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
            };

            // Consider their respective class labels.
            // Class labels have to be zero-based and successive integers.
            int[] classLabels =
            {
                0, 0, 0,               // Sequences 1-3 are from class 0.
                1, 1, 1,               // Sequences 4-6 are from class 1.
                2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2.
            };

            // Create the Hidden Conditional Random Field using a set of discrete features.
            var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3);
            var hcrf     = new HiddenConditionalRandomField <int>(function);

            // Create a learning algorithm.
            var trainer = new HiddenResilientGradientLearning <int>(hcrf)
            {
                Iterations = 50
            };

            // Run the algorithm and learn the models.
            double error = trainer.Run(observationSequences, classLabels);

            Console.WriteLine("the error in the last iteration = {0}", error);

            // Check the output classificaton label for some sequences.
            int y1 = hcrf.Compute(new[] { 0, 1, 1, 1, 0 });  // output is y1 = 0.

            Console.WriteLine("output class = {0}", y1);
            int y2 = hcrf.Compute(new[] { 0, 0, 1, 1, 0, 0 });  // output is y2 = 0.

            Console.WriteLine("output class = {0}", y2);

            int y3 = hcrf.Compute(new[] { 2, 2, 2, 2, 1, 1 });  // output is y3 = 1.

            Console.WriteLine("output class = {0}", y3);
            int y4 = hcrf.Compute(new[] { 2, 2, 1, 1 });  // output is y4 = 1.

            Console.WriteLine("output class = {0}", y4);

            int y5 = hcrf.Compute(new[] { 0, 0, 1, 3, 3, 3 });  // output is y5 = 2.

            Console.WriteLine("output class = {0}", y5);
            int y6 = hcrf.Compute(new[] { 2, 0, 2, 2, 3, 3 });  // output is y6 = 2.

            Console.WriteLine("output class = {0}", y6);
        }
        public void LogBackwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            int[] observations = { 0, 0, 1, 1 };

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
              ForwardBackwardAlgorithm.Backward(function.Factors[0], observations, 0));

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogBackward(function.Factors[0], observations, 0);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            foreach (double p in actual)
                Assert.IsFalse(double.IsNaN(p));
        }
        public void GradientTest3()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new ForwardBackwardGradient<int>(model);
            target.Regularization = 2;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void LogForwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel2();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            int[] observations = { 0, 1, 1, 0 };

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));

            double logLikelihood;
            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double p = 0;
            for (int i = 0; i < hmm.States; i++)
                p += Math.Exp(actual[observations.Length - 1, i]);

            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.AreEqual(0.054814695, Math.Exp(logLikelihood), 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void LogForwardBackwardTest()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel1();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     G  G  C  A
            int[] observations = { 2, 2, 1, 0 };

            double fwdLogLikelihood;
            double[,] fwd = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out fwdLogLikelihood);

            double bwdLogLikelihood;
            double[,] bwd = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogBackward(function.Factors[0], observations, 0, out bwdLogLikelihood);

            Assert.AreEqual(fwdLogLikelihood, bwdLogLikelihood, 1e-10); // -5.5614629361549142
            Assert.AreEqual(-5.5614629361549142, fwdLogLikelihood, 1e-10);
            Assert.IsFalse(double.IsNaN(fwdLogLikelihood));
        }
        public void ForwardTest2()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel2();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);


            //                     A  B  B  A
            int[] observations = { 0, 1, 1, 0 };

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations);

            var A = Matrix.Exp(hmm.Transitions);
            var B = Matrix.Exp(hmm.Emissions);
            var P = Matrix.Exp(hmm.Probabilities);

            double a00 = P[0] * B[0, 0];
            double a01 = P[1] * B[1, 0];

            double a10 = (a00 * A[0, 0] + a01 * A[1, 0]) * B[0, 1];
            double a11 = (a01 * A[1, 1] + a00 * A[0, 1]) * B[1, 1];

            double a20 = (a10 * A[0, 0] + a11 * A[1, 0]) * B[0, 1];
            double a21 = (a11 * A[1, 1] + a10 * A[0, 1]) * B[1, 1];

            double a30 = (a20 * A[0, 0] + a21 * A[1, 0]) * B[0, 0];
            double a31 = (a21 * A[1, 1] + a20 * A[0, 1]) * B[1, 0];


            Assert.AreEqual(a00, actual[0, 0], 1e-10);
            Assert.AreEqual(a01, actual[0, 1], 1e-10);

            Assert.AreEqual(a10, actual[1, 0], 1e-10);
            Assert.AreEqual(a11, actual[1, 1], 1e-10);

            Assert.AreEqual(a20, actual[2, 0], 1e-10);
            Assert.AreEqual(a21, actual[2, 1], 1e-10);

            Assert.AreEqual(a30, actual[3, 0], 1e-10);
            Assert.AreEqual(a31, actual[3, 1], 1e-10);

            foreach (double e in actual)
                Assert.IsFalse(double.IsNaN(e));

            double p = 0;
            for (int i = 0; i < hmm.States; i++)
                p += actual[observations.Length - 1, i];

            Assert.AreEqual(0.054814695, p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void RunTest()
        {
            int nstates = 3;
            int symbols = 3;

            int[][] sequences = new int[][] 
            {
                new int[] { 0, 1, 1, 1, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 0, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2 },
                new int[] { 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2, 2 },
            };


            var function = new MarkovDiscreteFunction(nstates, symbols);
            var model = new ConditionalRandomField<int>(nstates, function);


            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[] s = sequences[i];
                int[] r = model.Compute(s, out p);
                Assert.IsFalse(s.IsEqual(r));
            }

            var target = new QuasiNewtonLearning<int>(model); 

            int[][] labels = sequences;
            int[][] observations = sequences;

            double ll0 = model.LogLikelihood(observations, labels);

            double actual = target.Run(observations, labels);

            double ll1 = model.LogLikelihood(observations, labels);

            Assert.IsTrue(ll1 > ll0);


            Assert.AreEqual(0, actual, 1e-8);

            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[] s = sequences[i];
                int[] r = model.Compute(s, out p);
                Assert.IsTrue(s.IsEqual(r));
            }
            
        }
        public void ForwardTest3()
        {
            HiddenMarkovModel hmm = Accord.Tests.Statistics.Models.Markov.
                ForwardBackwardAlgorithmTest.CreateModel3();

            MarkovDiscreteFunction function = new MarkovDiscreteFunction(hmm);

            //                     L  L  R  R
            int[] observations = { 0, 0, 1, 1 };

            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations);

            // Forward matrices from R's HMM package are
            // transposed in relation to the framework's:

            Assert.AreEqual(4, actual.GetLength(0));
            Assert.AreEqual(2, actual.GetLength(1));

            Assert.AreEqual(0.675, actual[0, 0], 1e-10);
            Assert.AreEqual(0.012, actual[0, 1], 1e-10);

            Assert.AreEqual(0.4077, actual[1, 0], 1e-10);
            Assert.AreEqual(0.017208, actual[1, 1], 1e-10);

            Assert.AreEqual(0.0828306, actual[2, 0], 1e-10);
            Assert.AreEqual(0.082355328, actual[2, 1], 1e-10);

            Assert.AreEqual(0.0227427696, actual[3, 0], 1e-10);
            Assert.AreEqual(0.065309067648, actual[3, 1], 1e-10);

            foreach (double p in actual)
                Assert.IsFalse(double.IsNaN(p));
        }
        static void runHiddenConditionalRandomFieldLearningExample()
        {
            // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols.
            int[][] observationSequences =
            {
                // First class of sequences: starts and ends with zeros, ones in the middle.
                new[] { 0, 1, 1, 1, 0 },
                new[] { 0, 0, 1, 1, 0, 0 },
                new[] { 0, 1, 1, 1, 1, 0 },

                // Second class of sequences: starts with twos and switches to ones until the end.
                new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 },

                // Third class of sequences: can start with any symbols, but ends with three.
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 0, 0, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 2, 2, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 2, 2, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 3, 3, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
            };

            // Consider their respective class labels.
            // Class labels have to be zero-based and successive integers.
            int[] classLabels =
            {
                0, 0, 0,  // Sequences 1-3 are from class 0.
                1, 1, 1,  // Sequences 4-6 are from class 1.
                2, 2, 2, 2, 2, 2, 2, 2  // Sequences 7-14 are from class 2.
            };

            // Create the Hidden Conditional Random Field using a set of discrete features.
            var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3);
            var hcrf = new HiddenConditionalRandomField<int>(function);

            // Create a learning algorithm.
            var trainer = new HiddenResilientGradientLearning<int>(hcrf)
            {
                Iterations = 50
            };

            // Run the algorithm and learn the models.
            double error = trainer.Run(observationSequences, classLabels);
            Console.WriteLine("the error in the last iteration = {0}", error);

            // Check the output classificaton label for some sequences.
            int y1 = hcrf.Compute(new[] { 0, 1, 1, 1, 0 });  // output is y1 = 0.
            Console.WriteLine("output class = {0}", y1);
            int y2 = hcrf.Compute(new[] { 0, 0, 1, 1, 0, 0 });  // output is y2 = 0.
            Console.WriteLine("output class = {0}", y2);

            int y3 = hcrf.Compute(new[] { 2, 2, 2, 2, 1, 1 });  // output is y3 = 1.
            Console.WriteLine("output class = {0}", y3);
            int y4 = hcrf.Compute(new[] { 2, 2, 1, 1 });  // output is y4 = 1.
            Console.WriteLine("output class = {0}", y4);

            int y5 = hcrf.Compute(new[] { 0, 0, 1, 3, 3, 3 });  // output is y5 = 2.
            Console.WriteLine("output class = {0}", y5);
            int y6 = hcrf.Compute(new[] { 2, 0, 2, 2, 3, 3 });  // output is y6 = 2.
            Console.WriteLine("output class = {0}", y6);
        }