Beispiel #1
0
        public void GradientTest3()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new ForwardBackwardGradient <int>(model);

            target.Regularization = 2;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Beispiel #2
0
        public void GradientTest4()
        {
            var hmm      = IndependentMarkovClassifierPotentialFunctionTest.CreateModel2();
            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                                               IndependentMarkovClassifierPotentialFunctionTest.sequences,
                                               IndependentMarkovClassifierPotentialFunctionTest.labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights,
                                                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                                                IndependentMarkovClassifierPotentialFunctionTest.labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                {
                    continue;
                }

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientDeoptimizeTest2()
        {
            double[][][] sequences2;
            int[]        labels2;

            var hmm      = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            var inputs  = sequences2;
            var outputs = labels2;

            double[] actual = target.Gradient(function.Weights, inputs, outputs);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);
            diff.Function = parameters => func(model, parameters, inputs, outputs);
            double[] expected = diff.Compute(function.Weights);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Beispiel #4
0
        public void GradientTest_MarkovNormal_Regularization()
        {
            var hmm      = MarkovContinuousFunctionTest.CreateModel1();
            var function = new MarkovContinuousFunction(hmm);

            var model  = new HiddenConditionalRandomField <double>(function);
            var target = new ForwardBackwardGradient <double>(model);

            target.Regularization = 2;

            var inputs  = NormalQuasiNewtonHiddenLearningTest.inputs;
            var outputs = NormalQuasiNewtonHiddenLearningTest.outputs;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-2);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest3()
        {
            double[][][] sequences2;
            int[]        labels2;
            var          hmm      = CreateModel3(out sequences2, out labels2);
            var          function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 2;

            var inputs  = sequences2;
            var outputs = labels2;


            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                double e = expected[i];
                double a = actual[i];
                Assert.AreEqual(e, a, 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Beispiel #6
0
        public void GradientTest_MarkovIndependentNormal_NoPriors()
        {
            double[][][] observations;
            int[]        labels;
            HiddenMarkovClassifier <Independent <NormalDistribution> > hmm =
                IndependentMarkovFunctionTest.CreateModel4(out observations, out labels, usePriors: false);

            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                                               observations,
                                               labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, observations, labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                {
                    continue;
                }

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientTest_DiscreteMarkov()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model    = new HiddenConditionalRandomField <int>(function);
            var target   = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length)
            {
                StepSize = 1e-5
            };

            var inputs  = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
            }
        }
        public void GradientTest_MarkovMultivariate()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length)
            {
                StepSize = 1e-5
            };

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Beispiel #9
0
        public void GradientTest()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model    = new HiddenConditionalRandomField <int>(function);
            var target   = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest3()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 2;

            var inputs = inputs1;
            var outputs = outputs1;



            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest4()
        {
            var hmm = IndependentMarkovClassifierPotentialFunctionTest.CreateModel2();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                IndependentMarkovClassifierPotentialFunctionTest.labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights,
                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                IndependentMarkovClassifierPotentialFunctionTest.labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                    continue;

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientTest2()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new ForwardBackwardGradient<int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest2()
        {
            var hmm = CreateModel3();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);

            var inputs = sequences2;
            var outputs = labels2;

            double[] actual = target.Gradient(function.Weights, inputs, outputs);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);
            diff.Function = parameters => func(model, parameters, inputs, outputs);
            double[] expected = diff.Compute(function.Weights);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientDeoptimizeTest3()
        {
            double[][][] sequences2;
            int[] labels2;
            var hmm = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 2;

            var inputs = sequences2;
            var outputs = labels2;


            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                double e = expected[i];
                double a = actual[i];
                Assert.AreEqual(e, a, 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }