コード例 #1
0
        public void LogForwardGesturesDeoptimizedTest()
        {
            int[]        labels;
            double[][][] words;
            var          classifier = IndependentMarkovFunctionTest.CreateModel4(out words, out labels, false);

            var function = new MarkovMultivariateFunction(classifier);

            function.Deoptimize();

            var target = new HiddenConditionalRandomField <double[]>(function);

            foreach (var word in words)
            {
                for (int c = 0; c < 3; c++)
                {
                    var actual = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm.LogForward(
                        target.Function.Factors[c], word, c);

                    var expected = Accord.Statistics.Models.Markov.ForwardBackwardAlgorithm.LogForward(
                        classifier[c], word);

                    for (int i = 0; i < actual.GetLength(0); i++)
                    {
                        for (int j = 0; j < actual.GetLength(1); j++)
                        {
                            double a = actual[i, j];
                            double e = expected[i, j];
                            Assert.IsTrue(e.IsRelativelyEqual(a, 0.1));
                        }
                    }
                }
            }
        }
コード例 #2
0
        public void GradientDeoptimizeTest2()
        {
            double[][][] sequences2;
            int[]        labels2;

            var hmm      = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            var inputs  = sequences2;
            var outputs = labels2;

            double[] actual = target.Gradient(function.Weights, inputs, outputs);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);
            diff.Function = parameters => func(model, parameters, inputs, outputs);
            double[] expected = diff.Compute(function.Weights);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
コード例 #3
0
        public void ComputeDeoptimizeTest3()
        {
            double[][][] sequences;
            int[]        labels;
            var          model = CreateModel3(out sequences, out labels);

            var target = new MarkovMultivariateFunction(model);

#pragma warning disable 0618
            target.Deoptimize();
#pragma warning restore 0618

            var hcrf = new HiddenConditionalRandomField <double[]>(target);


            Assert.AreEqual(2, model.Priors.Length);
            Assert.AreEqual(1 / 2.0, model.Priors[0]);
            Assert.AreEqual(1 / 2.0, model.Priors[1]);

            check4(sequences, model, target, hcrf);
        }
コード例 #4
0
        public void ComputeDeoptimizeTest4()
        {
            int[]        labels;
            double[][][] words;
            var          model = CreateModel4(out words, out labels, false);

            var target = new MarkovMultivariateFunction(model);

#pragma warning disable 0618
            target.Deoptimize();
#pragma warning restore 0618

            var hcrf = new HiddenConditionalRandomField <double[]>(target);


            Assert.AreEqual(3, model.Priors.Length);
            Assert.AreEqual(1 / 3.0, model.Priors[0]);
            Assert.AreEqual(1 / 3.0, model.Priors[1]);
            Assert.AreEqual(1 / 3.0, model.Priors[2]);

            check4(words, model, target, hcrf);
        }
コード例 #5
0
        public void LogForwardGesturesPriorsDeoptimizedTest()
        {
            int[] labels;
            double[][][] words;
            var classifier = IndependentMarkovFunctionTest.CreateModel4(out words, out labels, true);

            var deopFun = new MarkovMultivariateFunction(classifier);
            deopFun.Deoptimize();
            var target1 = new HiddenConditionalRandomField<double[]>(deopFun);

            var function = new MarkovMultivariateFunction(classifier);
            var target2 = new HiddenConditionalRandomField<double[]>(function);

            foreach (var word in words)
            {
                for (int c = 0; c < 3; c++)
                {
                    for (int y = 0; y < 3; y++)
                    {
                        var actual = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm
                            .LogForward(target1.Function.Factors[c], word, y);

                        var expected = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm
                            .LogForward(target2.Function.Factors[c], word, y);

                        for (int i = 0; i < actual.GetLength(0); i++)
                        {
                            for (int j = 0; j < actual.GetLength(1); j++)
                            {
                                double a = actual[i, j];
                                double e = expected[i, j];
                                Assert.IsTrue(e.IsRelativelyEqual(a, 0.1));
                            }

                        }
                    }
                }
            }
        }
コード例 #6
0
        public void GradientDeoptimizeTest3()
        {
            double[][][] sequences2;
            int[] labels2;
            var hmm = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 2;

            var inputs = sequences2;
            var outputs = labels2;


            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                double e = expected[i];
                double a = actual[i];
                Assert.AreEqual(e, a, 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
コード例 #7
0
        public void ComputeDeoptimizeTest4()
        {
            int[] labels;
            double[][][] words;
            var model = CreateModel4(out words, out labels, false);

            var target = new MarkovMultivariateFunction(model);

#pragma warning disable 0618
            target.Deoptimize();
#pragma warning restore 0618

            var hcrf = new HiddenConditionalRandomField<double[]>(target);


            Assert.AreEqual(3, model.Priors.Length);
            Assert.AreEqual(1 / 3.0, model.Priors[0]);
            Assert.AreEqual(1 / 3.0, model.Priors[1]);
            Assert.AreEqual(1 / 3.0, model.Priors[2]);

            check4(words, model, target, hcrf);
        }
コード例 #8
0
        public void ComputeDeoptimizeTest3()
        {
            double[][][] sequences;
            int[] labels;
            var model = CreateModel3(out sequences, out labels);

            var target = new MarkovMultivariateFunction(model);

#pragma warning disable 0618
            target.Deoptimize();
#pragma warning restore 0618

            var hcrf = new HiddenConditionalRandomField<double[]>(target);


            Assert.AreEqual(2, model.Priors.Length);
            Assert.AreEqual(1 / 2.0, model.Priors[0]);
            Assert.AreEqual(1 / 2.0, model.Priors[1]);

            check4(sequences, model, target, hcrf);
        }