Derivative approximation by finite differences.

Numerical differentiation is a technique of numerical analysis to produce an estimate of the derivative of a mathematical function or function subroutine using values from the function and perhaps other knowledge about the function.

References: Trent F. Guidry, Calculating derivatives of a function numerically. Available on: http://www.trentfguidry.net/post/2009/07/12/Calculate-derivatives-function-numerically.aspx

        public void QuadraticConstraintConstructorTest()
        {
            IObjectiveFunction objective = null;

            double[,] quadraticTerms = 
            {
                {  1, 2, 3 },
                {  4, 5, 6 },
                {  7, 8, 9 },
            };

            double[] linearTerms = { 1, 2, 3 };

            objective = new NonlinearObjectiveFunction(3, f => f[0] + f[1] + f[2]);

            QuadraticConstraint target = new QuadraticConstraint(objective,
                quadraticTerms, linearTerms,
                ConstraintType.LesserThanOrEqualTo, 0);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] { 1, 2, 3 },
                new double[] { 3, 1, 4 },
                new double[] { -6 , 5, 9 },
                new double[] { 31, 25, 246 },
                new double[] { -0.102, 0, 10 },
            };


            { // Function test
                for (int i = 0; i < x.Length; i++)
                {
                    double expected =
                        (x[i].Multiply(quadraticTerms)).InnerProduct(x[i])
                        + linearTerms.InnerProduct(x[i]);

                    double actual = function(x[i]);

                    Assert.AreEqual(expected, actual, 1e-8);
                }
            }

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Compute(x[i]);
                    double[] actual = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                }
            }


        }
Example #2
0
        /// <summary>
        ///   Computes the derivative for a simpler unidimensional function.
        /// </summary>
        ///
        /// <param name="function">The function to be differentiated.</param>
        /// <param name="order">The derivative order that should be obtained. Default is 1.</param>
        /// <param name="stepSize">The relative step size used to approximate the derivatives. Default is 0.01.</param>
        /// <param name="value">The value <c>x</c> at which the derivative should be evaluated.</param>
        ///
        /// <returns>The derivative of the function at the point <paramref name="value">x</paramref>.</returns>
        ///
        public static double Derivative(Func <double, double> function, double value, int order, double stepSize)
        {
            double output   = function(value);
            double original = value;

            if (original != 0.0)
            {
                stepSize *= System.Math.Abs(original);
            }


            // Create the interpolation points
            double[] outputs = new double[coefficientCache.Length];

            int center = (outputs.Length - 1) / 2;

            for (int i = 0; i < outputs.Length; i++)
            {
                if (i != center)
                {
                    // Recompute the function to measure its importance
                    outputs[i] = function(original + (i - center) * stepSize);
                }
                else
                {
                    // The center point is the original function
                    outputs[i] = output;
                }
            }

            return(FiniteDifferences.Interpolate(coefficientCache,
                                                 outputs, order, center, stepSize));
        }
        public void QuadraticConstructorTest()
        {
            double[,] quadraticTerms = 
            {
                {  1, 2, 3 },
                {  2, 5, 6 },
                {  3, 6, 9 },
            };

            double[] linearTerms = { 1, 2, 3 };

            var target = new QuadraticObjectiveFunction(quadraticTerms, linearTerms);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] { 1, 2, 3 },
                new double[] { 3, 1, 4 },
                new double[] { -6 , 5, 9 },
                new double[] { 31, 25, 246 },
                new double[] { -0.102, 0, 10 },
            };


            { // Function test
                for (int i = 0; i < x.Length; i++)
                {
                    double expected = 0.5 * 
                        (x[i].Multiply(quadraticTerms)).InnerProduct(x[i])
                        + linearTerms.InnerProduct(x[i]);

                    double actual = function(x[i]);

                    Assert.AreEqual(expected, actual, 1e-8);
                }
            }

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Compute(x[i]);
                    double[] actual = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                }
            }
        }
Example #4
0
        public void ComputeTest()
        {
            int numberOfParameters = 2;
            FiniteDifferences target = new FiniteDifferences(numberOfParameters);

            double[] inputs = { -1, 0.4 };

            target.Function = BroydenFletcherGoldfarbShannoTest.rosenbrockFunction;

            double[] expected = BroydenFletcherGoldfarbShannoTest.rosenbrockGradient(inputs);
            double[] actual = target.Compute(inputs);

            Assert.IsTrue(expected.IsEqual(actual, 0.05));
        }
Example #5
0
        public void ComputeTest2()
        {
            // Create a simple function with two parameters: f(x,y) = x² + y
            Func<double[], double> function = x => Math.Pow(x[0], 2) + x[1];

            // The gradient w.r.t to x should be 2x,
            // the gradient w.r.t to y should be  1


            // Create a new finite differences calculator
            var calculator = new FiniteDifferences(2, function);

            // Evaluate the gradient function at the point (2, -1)
            double[] result = calculator.Compute(2, -1); // answer is (4, 1)

            Assert.AreEqual(4, result[0], 1e-10);
            Assert.AreEqual(1, result[1], 1e-10);
            Assert.IsFalse(Double.IsNaN(result[0]));
            Assert.IsFalse(Double.IsNaN(result[1]));
        }
        public void GradientTest()
        {
            for (double a = 0.1; a < 3; a += 0.1)
            {
                for (double b = 0.1; b < 3; b += 0.1)
                {
                    var target = new BetaDistribution(a, b);

                    Assert.AreEqual(a, target.Alpha);
                    Assert.AreEqual(b, target.Beta);

                    FiniteDifferences fd = new FiniteDifferences(2);
                    fd.Function = (double[] parameters) => BetaDistribution.LogLikelihood(samples, parameters[0], parameters[1]);

                    double[] expected = fd.Compute(a, b);
                    double[] actual = BetaDistribution.Gradient(samples, a, b);

                    Assert.IsTrue(expected[0].IsRelativelyEqual(actual[0], 0.05));
                    Assert.IsTrue(expected[1].IsRelativelyEqual(actual[1], 0.05));
                }
            }
        }
        public void HomogeneousTest2()
        {
            double[,] quadraticTerms = 
            {
                {  1, 0, 1 },
                {  0, 2, 0 },
                {  1, 0, 1 },
            };

            double[] linearTerms = { 0, 0, 0 };

            var target = new QuadraticObjectiveFunction(quadraticTerms, linearTerms);

            var function = target.Function;
            var gradient = target.Gradient;

            FiniteDifferences fd = new FiniteDifferences(3, function);

            double[][] x =
            {
                new double[] { 1, 2, 3 },
                new double[] { 3, 1, 4 },
                new double[] { -6 , 5, 9 },
                new double[] { 31, 25, 246 },
                new double[] { -0.102, 0, 10 },
            };

            { // Gradient test
                for (int i = 0; i < x.Length; i++)
                {
                    double[] expected = fd.Compute(x[i]);
                    double[] actual = gradient(x[i]);

                    for (int j = 0; j < actual.Length; j++)
                        Assert.AreEqual(expected[j], actual[j], 1e-8);
                }
            }
        }
        private static double[] finiteDifferences(double[][] input, double[] output, bool stochastic)
        {
            LogisticRegression regression;
            LogisticGradientDescent teacher;

            regression = new LogisticRegression(inputs: 2);

            teacher = new LogisticGradientDescent(regression)
            {
                Stochastic = stochastic,
                LearningRate = 1e-4,
            };

            FiniteDifferences diff = new FiniteDifferences(3);

            diff.Function = (x) =>
            {
                for (int i = 0; i < x.Length; i++)
                    regression.Coefficients[i] = x[i];

                return regression.GetLogLikelihood(input, output);
            };

            return diff.Compute(regression.Coefficients);
        }
        public void GradientTest()
        {
            var function = new DiscreteMarkovClassifierFunction(2, 2, 2);
            var model = new HiddenConditionalRandomField<int>(function);
            var target = new QuasiNewtonHiddenLearning<int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-4);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest4()
        {
            var hmm = IndependentMarkovClassifierPotentialFunctionTest.CreateModel2();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 0;

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters,
                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                IndependentMarkovClassifierPotentialFunctionTest.labels);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights,
                IndependentMarkovClassifierPotentialFunctionTest.sequences,
                IndependentMarkovClassifierPotentialFunctionTest.labels);


            for (int i = 0; i < actual.Length; i++)
            {
                if (double.IsNaN(expected[i]))
                    continue;

                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
            }
        }
        public void GradientTest2()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model = new HiddenConditionalRandomField<int>(function);
            var target = new ForwardBackwardGradient<int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientTest2()
        {
            var hmm = CreateModel3();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);

            var inputs = sequences2;
            var outputs = labels2;

            double[] actual = target.Gradient(function.Weights, inputs, outputs);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);
            diff.Function = parameters => func(model, parameters, inputs, outputs);
            double[] expected = diff.Compute(function.Weights);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void GradientDeoptimizeTest3()
        {
            double[][][] sequences2;
            int[] labels2;
            var hmm = CreateModel3(out sequences2, out labels2);
            var function = new MarkovMultivariateFunction(hmm);

#pragma warning disable 0618
            function.Deoptimize();
#pragma warning restore 0618

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 2;

            var inputs = sequences2;
            var outputs = labels2;


            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                double e = expected[i];
                double a = actual[i];
                Assert.AreEqual(e, a, 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        public void RunTest1()
        {
            // Example from https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm

            double[,] data =
            {
                { 0.03, 0.1947, 0.425, 0.626, 1.253, 2.500, 3.740 },
                { 0.05, 0.127, 0.094, 0.2122, 0.2729, 0.2665, 0.3317}
            };

            double[][] inputs = data.GetRow(0).ToArray();
            double[] outputs = data.GetRow(1);


            RegressionFunction rate = (double[] weights, double[] xi) =>
            {
                double x = xi[0];
                return (weights[0] * x) / (weights[1] + x);
            };

            RegressionGradientFunction grad = (double[] weights, double[] xi, double[] result) =>
            {
                double x = xi[0];

                FiniteDifferences diff = new FiniteDifferences(2);
                diff.Function = (bla) => rate(bla, xi);
                double[] compare = diff.Compute(weights);

                result[0] = -((-x) / (weights[1] + x));
                result[1] = -((weights[0] * x) / Math.Pow(weights[1] + x, 2));
            };


            NonlinearRegression regression = new NonlinearRegression(2, rate, grad);

            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression, new GaussNewton(2));

            Assert.IsTrue(nls.Algorithm is GaussNewton);

            regression.Coefficients[0] = 0.9; // β1
            regression.Coefficients[1] = 0.2; // β2

            int iterations = 10;
            double[] errors = new double[iterations];
            for (int i = 0; i < errors.Length; i++)
                errors[i] = nls.Run(inputs, outputs);

            double b1 = regression.Coefficients[0];
            double b2 = regression.Coefficients[1];

            Assert.AreEqual(0.362, b1, 1e-3);
            Assert.AreEqual(0.556, b2, 3e-3);

            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            for (int i = 1; i < errors.Length; i++)
            {
                Assert.IsFalse(Double.IsNaN(errors[i - 1]));
                Assert.IsTrue(errors[i - 1] >= errors[i]);
            }

            Assert.AreEqual(1.23859, regression.StandardErrors[0], 1e-3);
            Assert.AreEqual(6.06352, regression.StandardErrors[1], 3e-3);
        }
        private static double[] finiteDifferences(double[][] input, double[] output, bool stochastic)
        {
            LogisticRegression regression;

            regression = new LogisticRegression(inputs: 2);

            FiniteDifferences diff = new FiniteDifferences(3);

            diff.Function = (x) =>
            {
                for (int i = 0; i < x.Length; i++)
                    regression.Coefficients[i] = x[i];

                return regression.GetLogLikelihood(input, output);
            };

            return diff.Compute(regression.Coefficients);
        }
Example #17
0
 static Func<double[], double[]> Grad(int n, Func<double[], double> fn){
     var gradient = new FiniteDifferences (n, fn);
     return a => gradient.Compute (a);
 }
        public void GradientTest3()
        {
            var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);
            target.Regularization = 2;

            var inputs = inputs1;
            var outputs = outputs1;



            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);

                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }