public void GradientTest3()
        {
            var hmm      = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new NormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double>(function);
            var target = new QuasiNewtonHiddenLearning <double>(model);

            target.Regularization = 2;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-2);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Пример #2
0
        public void ComputeTest2()
        {
            var hmm = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            IPotentialFunction <double> owner = new NormalMarkovClassifierFunction(hmm);


            double[] x = new double[] { 0, 1, 2, 1, 7, 2, 1, -2, 5, 3, 4 };

            foreach (var factor in owner.Factors)
            {
                for (int y = 0; y < owner.Outputs; y++)
                {
                    double[,] fwd = Accord.Statistics.Models.Fields
                                    .ForwardBackwardAlgorithm.Forward(factor, x, y);

                    double[,] bwd = Accord.Statistics.Models.Fields
                                    .ForwardBackwardAlgorithm.Backward(factor, x, y);

                    double[,] lnfwd = Accord.Statistics.Models.Fields
                                      .ForwardBackwardAlgorithm.LogForward(factor, x, y);

                    double[,] lnbwd = Accord.Statistics.Models.Fields
                                      .ForwardBackwardAlgorithm.LogBackward(factor, x, y);


                    for (int i = 0; i < fwd.GetLength(0); i++)
                    {
                        for (int j = 0; j < fwd.GetLength(1); j++)
                        {
                            Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10);
                        }
                    }

                    for (int i = 0; i < bwd.GetLength(0); i++)
                    {
                        for (int j = 0; j < bwd.GetLength(1); j++)
                        {
                            Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10);
                        }
                    }

                    foreach (var feature in factor)
                    {
                        double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y));
                        double actual   = feature.LogMarginal(lnfwd, lnbwd, x, y);

                        Assert.AreEqual(expected, actual, 1e-10);
                        Assert.IsFalse(Double.IsNaN(actual));
                    }
                }
            }
        }
        public void RunTest()
        {
            var hmm      = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new NormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double>(function);
            var target = new QuasiNewtonHiddenLearning <double>(model);

            double[] actual   = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            double llm = hmm.LogLikelihood(inputs, outputs);
            double ll0 = model.LogLikelihood(inputs, outputs);

            Assert.AreEqual(llm, ll0, 1e-10);
            Assert.IsFalse(Double.IsNaN(llm));
            Assert.IsFalse(Double.IsNaN(ll0));

            double error = target.RunEpoch(inputs, outputs);
            double ll1   = model.LogLikelihood(inputs, outputs);

            Assert.AreEqual(-ll1, error, 1e-10);
            Assert.IsFalse(Double.IsNaN(ll1));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.0000041736023099758768, ll0, 1e-10);

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            Assert.IsTrue(ll1 > ll0);
        }
Пример #4
0
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier <NormalDistribution> model = CreateModel1();

            var target = new NormalMarkovClassifierFunction(model);

            var features = target.Features;

            double[] weights = target.Weights;

            Assert.AreEqual(26, features.Length);
            Assert.AreEqual(26, weights.Length);

            int k = 0;

            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                {
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].States; j++)
                    {
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);
                    }
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].Dimension; j++)
                    {
                        double mean = model[c].Emissions[i].Mean;
                        double var  = model[c].Emissions[i].Variance;

                        double l2ps = System.Math.Log(2 * System.Math.PI * var);

                        Assert.AreEqual(-0.5 * (l2ps + (mean * mean) / var), weights[k++]);
                        Assert.AreEqual(mean / var, weights[k++]);
                        Assert.AreEqual(-1.0 / (2 * var), weights[k++]);
                    }
                }
            }
        }
Пример #5
0
        public void ComputeTest()
        {
            var model = CreateModel1();

            var target = new NormalMarkovClassifierFunction(model);

            double actual;
            double expected;

            double[] x = { 0, 1 };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * model[c].Emissions[i].ProbabilityDensityFunction(x[0]);
                    actual   = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            expected = model.Priors[c] * Math.Exp(model[c].Transitions[i, j]) * model[c].Emissions[j].ProbabilityDensityFunction(x[t]);
                            actual   = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }
        }
        public void RunTest()
        {
            var hmm = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new NormalMarkovClassifierFunction(hmm);

            var model = new HiddenConditionalRandomField<double>(function);
            var target = new QuasiNewtonHiddenLearning<double>(model);

            double[] actual = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            double llm = hmm.LogLikelihood(inputs, outputs);
            double ll0 = model.LogLikelihood(inputs, outputs);
            Assert.AreEqual(llm, ll0, 1e-10);
            Assert.IsFalse(Double.IsNaN(llm));
            Assert.IsFalse(Double.IsNaN(ll0));

            double error = target.RunEpoch(inputs, outputs);
            double ll1 = model.LogLikelihood(inputs, outputs);
            Assert.AreEqual(-ll1, error, 1e-10);
            Assert.IsFalse(Double.IsNaN(ll1));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.0000041736023099758768, ll0, 1e-10);
            
            for (int i = 0; i < inputs.Length; i++)
                Assert.AreEqual(expected[i], actual[i]);

            Assert.IsTrue(ll1 > ll0);
        }
        public void GradientTest3()
        {
            var hmm = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new NormalMarkovClassifierFunction(hmm);

            var model = new HiddenConditionalRandomField<double>(function);
            var target = new QuasiNewtonHiddenLearning<double>(model);
            target.Regularization = 2;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-2);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Пример #8
0
        public void ComputeTest2()
        {
            var hmm = NormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();

            IPotentialFunction<double> owner = new NormalMarkovClassifierFunction(hmm);


            double[] x = new double[] { 0, 1, 2, 1, 7, 2, 1, -2, 5, 3, 4 };

            foreach (var factor in owner.Factors)
            {
                for (int y = 0; y < owner.Outputs; y++)
                {
                    double[,] fwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Forward(factor, x, y);

                    double[,] bwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.Backward(factor, x, y);

                    double[,] lnfwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogForward(factor, x, y);

                    double[,] lnbwd = Accord.Statistics.Models.Fields
                        .ForwardBackwardAlgorithm.LogBackward(factor, x, y);


                    for (int i = 0; i < fwd.GetLength(0); i++)
                        for (int j = 0; j < fwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10);

                    for (int i = 0; i < bwd.GetLength(0); i++)
                        for (int j = 0; j < bwd.GetLength(1); j++)
                            Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10);

                    foreach (var feature in factor)
                    {
                        double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y));
                        double actual = feature.LogMarginal(lnfwd, lnbwd, x, y);

                        Assert.AreEqual(expected, actual, 1e-10);
                        Assert.IsFalse(Double.IsNaN(actual));
                    }
                }
            }
        }
        public void ComputeTest()
        {
            var model = CreateModel1();

            var target = new NormalMarkovClassifierFunction(model);

            double actual;
            double expected;

            double[] x = { 0, 1 };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * model[c].Emissions[i].ProbabilityDensityFunction(x[0]);
                    actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            expected = model.Priors[c] * Math.Exp(model[c].Transitions[i, j]) * model[c].Emissions[j].ProbabilityDensityFunction(x[t]);
                            actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }

        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier<NormalDistribution> model = CreateModel1();

            var target = new NormalMarkovClassifierFunction(model);

            var features = target.Features;
            double[] weights = target.Weights;

            Assert.AreEqual(26, features.Length);
            Assert.AreEqual(26, weights.Length);

            int k = 0;
            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                    for (int j = 0; j < model[c].States; j++)
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].Dimension; j++)
                    {
                        double mean = model[c].Emissions[i].Mean;
                        double var = model[c].Emissions[i].Variance;

                        double l2ps = System.Math.Log(2 * System.Math.PI * var);

                        Assert.AreEqual(-0.5 * (l2ps + (mean * mean) / var), weights[k++]);
                        Assert.AreEqual(mean / var, weights[k++]);
                        Assert.AreEqual(-1.0 / (2 * var), weights[k++]);
                    }
                }
            }

        }