예제 #1
0
        public void GradientTest3()
        {
            var hmm      = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new QuasiNewtonHiddenLearning <double[]>(model);

            target.Regularization = 2;

            var inputs  = inputs1;
            var outputs = outputs1;



            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
예제 #2
0
        public void RunTest()
        {
            var hmm      = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new QuasiNewtonHiddenLearning <double[]>(model);

            var inputs  = inputs1;
            var outputs = outputs1;

            double[] actual   = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            double llm = hmm.LogLikelihood(inputs, outputs);
            double ll0 = model.LogLikelihood(inputs, outputs);

            Assert.AreEqual(llm, ll0, 1e-10);
            Assert.IsFalse(double.IsNaN(llm));
            Assert.IsFalse(double.IsNaN(ll0));

            double error = target.RunEpoch(inputs, outputs);
            double ll1   = model.LogLikelihood(inputs, outputs);

            Assert.AreEqual(-ll1, error, 1e-10);
            Assert.IsFalse(double.IsNaN(ll1));
            Assert.IsFalse(double.IsNaN(error));


            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.0000041736023117522336, ll0, 1e-10);

            Assert.AreEqual(error, -ll1);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            Assert.IsTrue(ll1 > ll0);
        }
        public void LogForwardTest3()
        {
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var observations = inputs[0];

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                                            ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));

            double logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double p = 0;

            for (int i = 0; i < hmm[0].States; i++)
            {
                p += Math.Exp(actual[observations.Length - 1, i]);
            }

            Assert.AreEqual(Math.Exp(logLikelihood), p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
예제 #4
0
        public void GradientTest()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new QuasiNewtonHiddenLearning <double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest3()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new QuasiNewtonHiddenLearning<double[]>(model);
            target.Regularization = 2;

            var inputs = inputs1;
            var outputs = outputs1;



            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new QuasiNewtonHiddenLearning<double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void RunTest()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new QuasiNewtonHiddenLearning<double[]>(model);

            var inputs = inputs1;
            var outputs = outputs1;

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            double llm = hmm.LogLikelihood(inputs, outputs);
            double ll0 = model.LogLikelihood(inputs, outputs);
            Assert.AreEqual(llm, ll0, 1e-10);
            Assert.IsFalse(double.IsNaN(llm));
            Assert.IsFalse(double.IsNaN(ll0));

            double error = target.RunEpoch(inputs, outputs);
            double ll1 = model.LogLikelihood(inputs, outputs);
            Assert.AreEqual(-ll1, error, 1e-10);
            Assert.IsFalse(double.IsNaN(ll1));
            Assert.IsFalse(double.IsNaN(error));


            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.0000041736023117522336, ll0, 1e-10);
            
            Assert.AreEqual(error, -ll1);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
        public void ComputeTest5()
        {
            var model = CreateModel3(states: 7);

            var target = new MultivariateNormalMarkovClassifierFunction(model);

            double actual;
            double expected;

            double[][] x = { new double[] { 0, 1 }, new double[] { 3, 2 } };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * model[c].Emissions[i].ProbabilityDensityFunction(x[0]);
                    actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            expected = Math.Exp(model[c].Transitions[i, j]) * model[c].Emissions[j].ProbabilityDensityFunction(x[t]);
                            actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }

            var hcrf = new HiddenConditionalRandomField<double[]>(target);

            for (int i = 0; i < inputTest.Length; i++)
            {
                int h = model.Compute(inputTest[i]);
                int c = hcrf.Compute(inputTest[i]);
                Assert.AreEqual(h, c);
            }
        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest2()
        {
            var model = CreateModel2();

            var target = new MultivariateNormalMarkovClassifierFunction(model);

            var features = target.Features;
            double[] weights = target.Weights;

            Assert.AreEqual(38, features.Length);
            Assert.AreEqual(38, weights.Length);

            int k = 0;
            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    for (int j = 0; j < model[c].States; j++)
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    for (int j = 0; j < model[c].Dimension; j++)
                    {
                        double mean = model[c].Emissions[i].Mean[j];
                        double var = model[c].Emissions[i].Variance[j];

                        double l2ps = System.Math.Log(2 * System.Math.PI * var);

                        Assert.AreEqual(-0.5 * (l2ps + (mean * mean) / var), weights[k++]);
                        Assert.AreEqual(mean / var, weights[k++]);
                        Assert.AreEqual(-1.0 / (2 * var), weights[k++]);
                    }
            }

        }
예제 #10
0
        public void ComputeTest3()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            var owner = new MultivariateNormalMarkovClassifierFunction(hmm);


            double[][] x =
            {
                new double[] {              0 },
                new double[] {              1 },
                new double[] {              3 },
                new double[] {              1 },
                new double[] {              2 },
                new double[] {              8 },
                new double[] {              0 },
                new double[] {             10 },
                new double[] { System.Math.PI },
            };

            foreach (var factor in owner.Factors)
            {
                for (int y = 0; y < owner.Outputs; y++)
                {
                    double[,] fwd = Accord.Statistics.Models.Fields
                                    .ForwardBackwardAlgorithm.Forward(factor, x, y);

                    double[,] bwd = Accord.Statistics.Models.Fields
                                    .ForwardBackwardAlgorithm.Backward(factor, x, y);

                    double[,] lnfwd = Accord.Statistics.Models.Fields
                                      .ForwardBackwardAlgorithm.LogForward(factor, x, y);

                    double[,] lnbwd = Accord.Statistics.Models.Fields
                                      .ForwardBackwardAlgorithm.LogBackward(factor, x, y);


                    for (int i = 0; i < fwd.GetLength(0); i++)
                    {
                        for (int j = 0; j < fwd.GetLength(1); j++)
                        {
                            Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10);
                        }
                    }

                    for (int i = 0; i < bwd.GetLength(0); i++)
                    {
                        for (int j = 0; j < bwd.GetLength(1); j++)
                        {
                            Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10);
                        }
                    }


                    foreach (var feature in factor)
                    {
                        double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y));
                        double actual   = feature.LogMarginal(lnfwd, lnbwd, x, y);

                        Assert.AreEqual(expected, actual, 1e-10);
                        Assert.IsFalse(Double.IsNaN(actual));
                    }
                }
            }
        }
예제 #11
0
        public void ComputeTest3()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            var owner = new MultivariateNormalMarkovClassifierFunction(hmm);


            double[][] x = 
            { 
                new double[] { 0 },
                new double[] { 1 },
                new double[] { 3 },
                new double[] { 1 },
                new double[] { 2 },
                new double[] { 8 },
                new double[] { 0 },
                new double[] { 10 },
                new double[] { System.Math.PI },
            };

            foreach (var factor in owner.Factors)
            {
                for (int y = 0; y < owner.Outputs; y++)
                {
                    double[,] fwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Forward(factor, x, y);

                    double[,] bwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Backward(factor, x, y);

                    double[,] lnfwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogForward(factor, x, y);

                    double[,] lnbwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogBackward(factor, x, y);


                    for (int i = 0; i < fwd.GetLength(0); i++)
                        for (int j = 0; j < fwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10);

                    for (int i = 0; i < bwd.GetLength(0); i++)
                        for (int j = 0; j < bwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10);


                    foreach (var feature in factor)
                    {
                        double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y));
                        double actual = feature.LogMarginal(lnfwd, lnbwd, x, y);

                        Assert.AreEqual(expected, actual, 1e-10);
                        Assert.IsFalse(Double.IsNaN(actual));
                    }

                }
            }
        }
        public void LogForwardTest3()
        {
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var observations = inputs[0];

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));

            double logLikelihood;
            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double p = 0;
            for (int i = 0; i < hmm[0].States; i++)
                p += Math.Exp(actual[observations.Length - 1, i]);

            Assert.AreEqual(Math.Exp(logLikelihood), p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }