Esempio n. 1
0
        public void GradientTest3()
        {
            var hmm      = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MultivariateNormalMarkovClassifierFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new QuasiNewtonHiddenLearning <double[]>(model);

            target.Regularization = 2;

            var inputs  = inputs1;
            var outputs = outputs1;



            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Esempio n. 2
0
        public void QuadraticConstraintConstructorTest()
        {
            IObjectiveFunction objective = null;

            double[,] quadraticTerms =
            {
                { 1, 2, 3 },
                { 4, 5, 6 },
                { 7, 8, 9 },
            };

            double[] linearTerms = { 1, 2, 3 };

            objective = new NonlinearObjectiveFunction(3, f => f[0] + f[1] + f[2]);

            QuadraticConstraint target = new QuadraticConstraint(objective,
                                                                 quadraticTerms, linearTerms,
                                                                 ConstraintType.LesserThanOrEqualTo, 0);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] {      1,  2,   3 },
                new double[] {      3,  1,   4 },
                new double[] {     -6,  5,   9 },
                new double[] {     31, 25, 246 },
                new double[] { -0.102,  0,  10 },
            };


            { // Function test
                for (int i = 0; i < x.Length; i++)
                {
                    double expected =
                        (x[i].Multiply(quadraticTerms)).InnerProduct(x[i])
                        + linearTerms.InnerProduct(x[i]);

                    double actual = function(x[i]);

                    Assert.AreEqual(expected, actual, 1e-8);
                }
            }

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Compute(x[i]);
                    double[] actual   = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                    {
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                    }
                }
            }
        }
        private static double[] finiteDifferences(double[][] input, double[] output, bool stochastic)
        {
            LogisticRegression      regression;
            LogisticGradientDescent teacher;

            regression = new LogisticRegression(inputs: 2);

            teacher = new LogisticGradientDescent(regression)
            {
                Stochastic   = stochastic,
                LearningRate = 1e-4,
            };

            FiniteDifferences diff = new FiniteDifferences(3);

            diff.Function = (x) =>
            {
                for (int i = 0; i < x.Length; i++)
                {
                    regression.Coefficients[i] = x[i];
                }

                return(regression.GetLogLikelihood(input, output));
            };

            return(diff.Compute(regression.Coefficients));
        }
Esempio n. 4
0
        public void GradientTest4()
        {
            var hmm      = IndependentMarkovClassifierPotentialFunctionTest.CreateModel2();
            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                                               IndependentMarkovClassifierPotentialFunctionTest.sequences,
                                               IndependentMarkovClassifierPotentialFunctionTest.labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights,
                                                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                                                IndependentMarkovClassifierPotentialFunctionTest.labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                {
                    continue;
                }

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientTest3()
        {
            double[][][] sequences2;
            int[]        labels2;
            var          hmm      = CreateModel3(out sequences2, out labels2);
            var          function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 2;

            var inputs  = sequences2;
            var outputs = labels2;


            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                double e = expected[i];
                double a = actual[i];
                Assert.AreEqual(e, a, 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Esempio n. 6
0
        public void GradientTest_MarkovIndependentNormal_NoPriors()
        {
            double[][][] observations;
            int[]        labels;
            HiddenMarkovClassifier <Independent <NormalDistribution> > hmm =
                IndependentMarkovFunctionTest.CreateModel4(out observations, out labels, usePriors: false);

            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                                               observations,
                                               labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, observations, labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                {
                    continue;
                }

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientDeoptimizeTest2()
        {
            double[][][] sequences2;
            int[]        labels2;

            var hmm      = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            var inputs  = sequences2;
            var outputs = labels2;

            double[] actual = target.Gradient(function.Weights, inputs, outputs);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);
            diff.Function = parameters => func(model, parameters, inputs, outputs);
            double[] expected = diff.Compute(function.Weights);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest_DiscreteMarkov()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model    = new HiddenConditionalRandomField <int>(function);
            var target   = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length)
            {
                StepSize = 1e-5
            };

            var inputs  = QuasiNewtonHiddenLearningTest.inputs;
            var outputs = QuasiNewtonHiddenLearningTest.outputs;

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
            }
        }
Esempio n. 9
0
        public void GradientTest3()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new ForwardBackwardGradient <int>(model);

            target.Regularization = 2;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Esempio n. 10
0
        public void GradientTest_MarkovNormal_Regularization()
        {
            var hmm      = MarkovContinuousFunctionTest.CreateModel1();
            var function = new MarkovContinuousFunction(hmm);

            var model  = new HiddenConditionalRandomField <double>(function);
            var target = new ForwardBackwardGradient <double>(model);

            target.Regularization = 2;

            var inputs  = NormalQuasiNewtonHiddenLearningTest.inputs;
            var outputs = NormalQuasiNewtonHiddenLearningTest.outputs;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-2);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Esempio n. 11
0
        public void Hessian_test_3()
        {
            // x² + log(y) + xy + exp(x+y) + 47
            Func <double[], double> function = x => Math.Pow(x[0], 2) + Math.Log(x[1]) + x[0] * x[1] + Math.Exp(x[0] + x[1]) + 47;

            var calculator = new FiniteDifferences(variables: 2)
            {
                Function       = function,
                NumberOfPoints = 7
            };

            Func <double[], double[][]> expectedFormula = (double[] x) =>
                                                          new double[][]
            {
                new double[] { Math.Exp(x[0] + x[1]) + 2, Math.Exp(x[0] + x[1]) + 1 },
                new double[] { Math.Exp(x[0] + x[1]) + 1, Math.Exp(x[0] + x[1]) - 1.0 / Math.Pow(x[1], 2) },
            };


            for (double i = 1; i < 10; i++)
            {
                for (double j = 1; j < 10; j++)
                {
                    double[]   value    = new double[] { i, j };
                    double[][] actual   = calculator.Hessian(value);
                    double[][] expected = expectedFormula(value);

                    Assert.IsTrue(actual.IsEqual(expected, rtol: 1e-5));
                }
            }
        }
Esempio n. 12
0
        double[] diffedRange(double z, double[] a)
        {
            Func <double[], double> fbeta3 = x => fD(z, a);
            var fDiffs = new FiniteDifferences(N, fbeta3);

            double[] result = fDiffs.Gradient(a);
            return(result);
        }
Esempio n. 13
0
        /// <summary>
        ///   Gets the probability density function (pdf) for
        ///   this distribution evaluated at point <c>x</c>.
        /// </summary>
        ///
        /// <param name="x">A single point in the distribution range.</param>
        ///
        /// <returns>
        ///   The probability of <c>x</c> occurring
        ///   in the current distribution.
        /// </returns>
        ///
        protected internal override double InnerProbabilityDensityFunction(double x)
        {
            if (pdf != null)
            {
                return(pdf(x));
            }

            return(FiniteDifferences.Derivative(cdf, x, 1, 1e-6));
        }
        /// <summary>
        ///   Gets the probability density function (pdf) for
        ///   this distribution evaluated at point <c>x</c>.
        /// </summary>
        ///
        /// <param name="x">A single point in the distribution range.</param>
        ///
        /// <returns>
        ///   The probability of <c>x</c> occurring
        ///   in the current distribution.
        /// </returns>
        ///
        public override double ProbabilityDensityFunction(double x)
        {
            if (pdf != null)
            {
                return(pdf(x));
            }

            return(FiniteDifferences.Derivative(cdf, x, 1, 1e-6));
        }
        public void LinearTest()
        {
            double[,] quadraticTerms =
            {
                { 0, 0, 0 },
                { 0, 0, 0 },
                { 0, 0, 0 },
            };

            double[] linearTerms = { 1, 2, 3 };

            var target = new QuadraticObjectiveFunction(quadraticTerms, linearTerms);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] {      1,  2,   3 },
                new double[] {      3,  1,   4 },
                new double[] {     -6,  5,   9 },
                new double[] {     31, 25, 246 },
                new double[] { -0.102,  0,  10 },
            };


            { // Function test
                for (int i = 0; i < x.Length; i++)
                {
                    double expected = 0.5 *
                                      (x[i]
                                       .Dot(quadraticTerms))
                                      .Dot(x[i]) + linearTerms
                                      .Dot(x[i]);

                    double actual = function(x[i]);

                    Assert.AreEqual(expected, actual, 1e-8);
                }
            }

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Gradient(x[i]);
                    double[] actual   = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                    {
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                    }
                }
            }
        }
        /// <summary>
        ///   Finds the minimum value of a function. The solution vector
        ///   will be made available at the <see cref="IOptimizationMethod{TInput, TOutput}.Solution"/> property.
        /// </summary>
        ///
        /// <returns>Returns <c>true</c> if the method converged to a <see cref="IOptimizationMethod{TInput, TOutput}.Solution"/>.
        ///   In this case, the found value will also be available at the <see cref="IOptimizationMethod{TInput, TOutput}.Value"/>
        ///   property.</returns>
        ///
        public override bool Minimize()
        {
            if (Gradient == null)
            {
                this.Gradient = FiniteDifferences.Gradient(Function, NumberOfVariables);
            }

            NonlinearObjectiveFunction.CheckGradient(Gradient, Solution);

            return(base.Minimize());
        }
        public static double[] InverseKinematics(List <MDHParameters> dht, double[] target, ref bool success)
        {
            Func <double[], double> f   = x => Distance(dht, target, x);
            var calculator              = new FiniteDifferences(dht.Count, f);
            Func <double[], double[]> g = calculator.Gradient;

            var optimizer = new BroydenFletcherGoldfarbShanno(numberOfVariables: dht.Count, function: f, gradient: g);

            optimizer.Minimize();
            success = !(optimizer.Value >= 0.01);

            return(optimizer.Solution);
        }
Esempio n. 18
0
        public void ComputeTest()
        {
            int numberOfParameters   = 2;
            FiniteDifferences target = new FiniteDifferences(numberOfParameters);

            double[] inputs = { -1, 0.4 };

            target.Function = BroydenFletcherGoldfarbShannoTest.rosenbrockFunction;

            double[] expected = BroydenFletcherGoldfarbShannoTest.rosenbrockGradient(inputs);
            double[] actual   = target.Compute(inputs);

            Assert.IsTrue(expected.IsEqual(actual, 0.05));
        }
Esempio n. 19
0
        public void Hessian_test_2()
        {
            Func <double[], double> function = x => Math.Pow(x[0], 2) + x[1] + x[0] * x[1] + 47;

            var calculator = new FiniteDifferences(2, function);

            double[][] result   = calculator.Hessian(new[] { 2.0, -1.0 });
            double[][] expected =
            {
                new double[] { 2, 1 },
                new double[] { 1, 0 },
            };

            Assert.IsTrue(result.IsEqual(expected, 1e-8));
        }
Esempio n. 20
0
        private static double[] finiteDifferences(double[][] input, double[] output, bool stochastic)
        {
            var regression = new LogisticRegression(inputs: 2);
            var diff       = new FiniteDifferences(3);

            diff.Function = (x) =>
            {
                for (int i = 0; i < x.Length; i++)
                {
                    regression.Coefficients[i] = x[i];
                }

                return(regression.GetLogLikelihood(input, output));
            };

            return(diff.Compute(regression.Coefficients));
        }
        /// <summary>
        ///   Finds the maximum value of a function. The solution vector
        ///   will be made available at the <see cref="IOptimizationMethod{TInput, TOutput}.Solution"/> property.
        /// </summary>
        ///
        /// <returns>Returns <c>true</c> if the method converged to a <see cref="IOptimizationMethod{TInput, TOutput}.Solution"/>.
        ///   In this case, the found value will also be available at the <see cref="IOptimizationMethod{TInput, TOutput}.Value"/>
        ///   property.</returns>
        ///
        public override bool Maximize()
        {
            if (Gradient == null)
            {
                this.Gradient = FiniteDifferences.Gradient(Function, NumberOfVariables);
            }

            NonlinearObjectiveFunction.CheckGradient(Gradient, Solution);

            var g = Gradient;

            Gradient = (x) => g(x).Multiply(-1);

            bool success = base.Maximize();

            Gradient = g;

            return(success);
        }
        public void GradientTest_MarkovMultivariate()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length)
            {
                StepSize = 1e-5
            };

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
Esempio n. 23
0
        public void Hessian_test_4()
        {
            const int    Size      = 30;
            const double Tolerance = 1e-8;

            for (int i = 0; i < 10; i++)
            {
                double[,] mat = Matrix.Random(Size);
                double[,] Q   = mat.DotWithTransposed(mat);
                double[] d = Vector.Random(Size);

                var qof = new QuadraticObjectiveFunction(Q, d);

                var calculator = new FiniteDifferences(Size, qof.Function);

                double[][] result = calculator.Hessian(Vector.Random(Size));

                Assert.IsTrue(result.IsEqual(Q, Tolerance));
            }
        }
Esempio n. 24
0
        public void ComputeTest2()
        {
            // Create a simple function with two parameters: f(x,y) = x² + y
            Func <double[], double> function = x => Math.Pow(x[0], 2) + x[1];

            // The gradient w.r.t to x should be 2x,
            // the gradient w.r.t to y should be  1


            // Create a new finite differences calculator
            var calculator = new FiniteDifferences(2, function);

            // Evaluate the gradient function at the point (2, -1)
            double[] result = calculator.Compute(2, -1); // answer is (4, 1)

            Assert.AreEqual(4, result[0], 1e-10);
            Assert.AreEqual(1, result[1], 1e-10);
            Assert.IsFalse(Double.IsNaN(result[0]));
            Assert.IsFalse(Double.IsNaN(result[1]));
        }
Esempio n. 25
0
        public void test_order()
        {
            // https://www.wolframalpha.com/input/?i=third+derivative+of+(1+-+x)%5E2+%2B+100(y+-+x%5E2)%5E2+at+(-1,0.4)

            int numberOfParameters   = 2;
            FiniteDifferences target = new FiniteDifferences(numberOfParameters)
            {
                NumberOfPoints = 7,
                Order          = 3,
            };

            double[] inputs = { -1, 0.4 };

            target.Function = BroydenFletcherGoldfarbShannoTest.rosenbrockFunction;

            double[] expected = { -2400, 0 };
            double[] actual   = target.Compute(inputs);

            Assert.IsTrue(expected.IsEqual(actual, 1e-5));
        }
Esempio n. 26
0
        public void GradientTest()
        {
            var function = new MarkovDiscreteFunction(2, 2, 2);
            var model    = new HiddenConditionalRandomField <int>(function);
            var target   = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void HomogeneousTest()
        {
            double[,] quadraticTerms =
            {
                { 8, 3, 1 },
                { 3, 4, 2 },
                { 1, 2, 6 },
            };

            double[] linearTerms = { 0, 0, 0 };

            var target = new QuadraticObjectiveFunction(quadraticTerms, linearTerms);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] {      1,  2,   3 },
                new double[] {      3,  1,   4 },
                new double[] {     -6,  5,   9 },
                new double[] {     31, 25, 246 },
                new double[] { -0.102,  0,  10 },
            };

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Gradient(x[i]);
                    double[] actual   = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                    {
                        Assert.AreEqual(expected[j], actual[j], 1e-6);
                    }
                }
            }
        }
        public void GradientTest()
        {
            for (double a = 0.1; a < 3; a += 0.1)
            {
                for (double b = 0.1; b < 3; b += 0.1)
                {
                    var target = new BetaDistribution(a, b);

                    Assert.AreEqual(a, target.Alpha);
                    Assert.AreEqual(b, target.Beta);

                    FiniteDifferences fd = new FiniteDifferences(2);
                    fd.Function = (double[] parameters) => BetaDistribution.LogLikelihood(samples, parameters[0], parameters[1]);

                    double[] expected = fd.Compute(a, b);
                    double[] actual   = BetaDistribution.Gradient(samples, a, b);

                    Assert.IsTrue(expected[0].IsRelativelyEqual(actual[0], 0.05));
                    Assert.IsTrue(expected[1].IsRelativelyEqual(actual[1], 0.05));
                }
            }
        }
Esempio n. 29
0
        public void GradientTest()
        {
            double[][] inputs;
            int[]      outputs;

            MultinomialLogisticRegressionTest.CreateInputOutputsExample1(out inputs, out outputs);

            // Create an algorithm to estimate the regression
            var msgd = new MultinomialLogisticLearning <ConjugateGradient>();

            msgd.Method.MaxIterations = 1;

            msgd.Learn(inputs, outputs);

            int variables = inputs.Columns() * outputs.DistinctCount();
            var fd        = new FiniteDifferences(variables, msgd.crossEntropy);

            double[] probe    = { 0.1, 0.2, 0.5, 0.6, 0.2, 0.1 };
            double[] expected = fd.Compute(probe);
            double[] actual   = msgd.crossEntropyGradient(probe);

            Assert.IsTrue(expected.IsEqual(actual, 1e-5));
        }
Esempio n. 30
0
        public void Hessian_test()
        {
            #region doc_hessian
            // Create a simple function with two parameters: f(x,y) = x² + y
            Func <double[], double> function = x => Math.Pow(x[0], 2) + x[1];

            // The gradient w.r.t to x should be 2x,
            // the gradient w.r.t to y should be  1

            // Create a new finite differences calculator
            var calculator = new FiniteDifferences(2, function);

            // Evaluate the gradient function at the point (2, -1)
            double[][] result = calculator.Hessian(new[] { 2.0, -1.0 }); // answer is [(2, 0), (0, 0)]
            #endregion

            double[][] expected =
            {
                new double[] { 2, 0 },
                new double[] { 0, 0 },
            };

            Assert.IsTrue(result.IsEqual(expected, 1e-8));
        }