Example #1
0
        /// <summary>
        ///   Creates a new object that is a copy of the current instance.
        /// </summary>
        ///
        /// <returns>
        ///   A new object that is a copy of this instance.
        /// </returns>
        ///
        public object Clone()
        {
            var clone = new NonlinearRegression(Coefficients.Length, function, gradient);

            clone.coefficients   = (double[])this.coefficients.Clone();
            clone.standardErrors = (double[])this.standardErrors.Clone();
            return(clone);
        }
 /// <summary>
 ///   Creates a new object that is a copy of the current instance.
 /// </summary>
 /// 
 /// <returns>
 ///   A new object that is a copy of this instance.
 /// </returns>
 /// 
 public object Clone()
 {
     var clone = new NonlinearRegression(Coefficients.Length, function, gradient);
     clone.coefficients = (double[])this.coefficients.Clone();
     clone.standardErrors = (double[])this.standardErrors.Clone();
     return clone;
 }
        public void RunTest()
        {
            double[,] data =
            {
                { -40,    -21142.1111111111 },
                { -30,    -21330.1111111111 },
                { -20,    -12036.1111111111 },
                { -10,      7255.3888888889 },
                {   0,     32474.8888888889 },
                {  10,     32474.8888888889 },
                {  20,      9060.8888888889 },
                {  30,    -11628.1111111111 },
                {  40,    -15129.6111111111 },
            };

            double[][] inputs = data.GetColumn(0).ToArray();
            double[] outputs = data.GetColumn(1);

            NonlinearRegression regression = new NonlinearRegression(4, function, gradient);


            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            regression.Coefficients[0] = 0; // m
            regression.Coefficients[1] = 80; // s
            regression.Coefficients[2] = 53805; // a
            regression.Coefficients[3] = -21330.11; //b

            double error = 0;
            for (int i = 0; i < 100; i++)
                error = nls.Run(inputs, outputs);

            double m = regression.Coefficients[0];
            double s = regression.Coefficients[1];
            double a = regression.Coefficients[2];
            double b = regression.Coefficients[3];

            Assert.AreEqual(5.316196154830604, m, 1e-3);
            Assert.AreEqual(12.792301798208918, s, 1e-3);
            Assert.AreEqual(56794.832645792514, a, 1e-3);
            Assert.AreEqual(-20219.675997523173, b, 1e-2);

            Assert.IsFalse(Double.IsNaN(m));
            Assert.IsFalse(Double.IsNaN(s));
            Assert.IsFalse(Double.IsNaN(a));
            Assert.IsFalse(Double.IsNaN(b));
        }
        public void RunTest1()
        {
            // Example from https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm

            double[,] data =
            {
                { 0.03, 0.1947, 0.425, 0.626, 1.253, 2.500, 3.740 },
                { 0.05, 0.127, 0.094, 0.2122, 0.2729, 0.2665, 0.3317}
            };

            double[][] inputs = data.GetRow(0).ToArray();
            double[] outputs = data.GetRow(1);


            RegressionFunction rate = (double[] weights, double[] xi) =>
            {
                double x = xi[0];
                return (weights[0] * x) / (weights[1] + x);
            };

            RegressionGradientFunction grad = (double[] weights, double[] xi, double[] result) =>
            {
                double x = xi[0];

                FiniteDifferences diff = new FiniteDifferences(2);
                diff.Function = (bla) => rate(bla, xi);
                double[] compare = diff.Compute(weights);

                result[0] = -((-x) / (weights[1] + x));
                result[1] = -((weights[0] * x) / Math.Pow(weights[1] + x, 2));
            };


            NonlinearRegression regression = new NonlinearRegression(2, rate, grad);

            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression, new GaussNewton(2));

            Assert.IsTrue(nls.Algorithm is GaussNewton);

            regression.Coefficients[0] = 0.9; // β1
            regression.Coefficients[1] = 0.2; // β2

            int iterations = 10;
            double[] errors = new double[iterations];
            for (int i = 0; i < errors.Length; i++)
                errors[i] = nls.Run(inputs, outputs);

            double b1 = regression.Coefficients[0];
            double b2 = regression.Coefficients[1];

            Assert.AreEqual(0.362, b1, 1e-3);
            Assert.AreEqual(0.556, b2, 3e-3);

            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            for (int i = 1; i < errors.Length; i++)
            {
                Assert.IsFalse(Double.IsNaN(errors[i - 1]));
                Assert.IsTrue(errors[i - 1] >= errors[i]);
            }

            Assert.AreEqual(1.23859, regression.StandardErrors[0], 1e-3);
            Assert.AreEqual(6.06352, regression.StandardErrors[1], 3e-3);
        }
        public void ExampleTest()
        {
            // Suppose we would like to map the continuous values in the
            // second column to the integer values in the first column.
            double[,] data =
            {
                { -40,    -21142.1111111111 },
                { -30,    -21330.1111111111 },
                { -20,    -12036.1111111111 },
                { -10,      7255.3888888889 },
                {   0,     32474.8888888889 },
                {  10,     32474.8888888889 },
                {  20,      9060.8888888889 },
                {  30,    -11628.1111111111 },
                {  40,    -15129.6111111111 },
            };

            // Extract inputs and outputs
            double[][] inputs = data.GetColumn(0).ToJagged();
            double[] outputs = data.GetColumn(1);

            // Create a Nonlinear regression using 
            var regression = new NonlinearRegression(3,

                // Let's assume a quadratic model function: ax² + bx + c
                function: (w, x) => w[0] * x[0] * x[0] + w[1] * x[0] + w[2], 

                // Derivative in respect to the weights:
                gradient: (w, x, r) =>
                {
                    r[0] = 2 * w[0]; // w.r.t a: 2a  
                    r[1] = w[1];     // w.r.t b: b
                    r[2] = w[2];     // w.r.t c: 0
                }
            );

            // Create a non-linear least squares teacher
            var nls = new NonlinearLeastSquares(regression);

            // Initialize to some random values
            regression.Coefficients[0] = 4.2;
            regression.Coefficients[1] = 0.3;
            regression.Coefficients[2] = 1;

            // Run the function estimation algorithm
            double error = Double.PositiveInfinity;
            for (int i = 0; i < 100; i++)
                error = nls.Run(inputs, outputs);

            // Use the function to compute the input values
            double[] predict = inputs.Apply(regression.Compute);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            Assert.AreEqual(1318374605.8436923d, error);

            Assert.AreEqual(-12.025250289329851, regression.Coefficients[0], 1e-3);
            Assert.AreEqual(-0.082208180694676766, regression.Coefficients[1], 1e-3);
            Assert.AreEqual(-0.27402726898225627, regression.Coefficients[2], 1e-3);

            Assert.AreEqual(-19237.386162968953, predict[0]);
            Assert.AreEqual(-10820.533042245008, predict[1]);
            Assert.AreEqual(-4808.7299793870288, predict[2]);
            Assert.AreEqual(-1203.6211380089139, predict[5]);
        }
        public void RunTest()
        {
            // Suppose we would like to map the continuous values in the
            // second column to the integer values in the first column.
            double[,] data =
            {
                { -40,    -21142.1111111111 },
                { -30,    -21330.1111111111 },
                { -20,    -12036.1111111111 },
                { -10,      7255.3888888889 },
                {   0,     32474.8888888889 },
                {  10,     32474.8888888889 },
                {  20,      9060.8888888889 },
                {  30,    -11628.1111111111 },
                {  40,    -15129.6111111111 },
            };

            // Extract inputs and outputs
            double[][] inputs = data.GetColumn(0).ToJagged();
            double[] outputs = data.GetColumn(1);

            // Create a Nonlinear regression using 
            NonlinearRegression regression = new NonlinearRegression(4, function, gradient);


            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            regression.Coefficients[0] = 0; // m
            regression.Coefficients[1] = 80; // s
            regression.Coefficients[2] = 53805; // a
            regression.Coefficients[3] = -21330.11; //b

            double error = Double.PositiveInfinity;
            for (int i = 0; i < 100; i++)
                error = nls.Run(inputs, outputs);

            double m = regression.Coefficients[0];
            double s = regression.Coefficients[1];
            double a = regression.Coefficients[2];
            double b = regression.Coefficients[3];

            Assert.AreEqual(010345587.465428974, error);

            Assert.AreEqual(5.316196154830604, m, 1e-3);
            Assert.AreEqual(12.792301798208918, s, 1e-3);
            Assert.AreEqual(56794.832645792514, a, 1e-3);
            Assert.AreEqual(-20219.675997523173, b, 1e-2);

            Assert.IsFalse(Double.IsNaN(m));
            Assert.IsFalse(Double.IsNaN(s));
            Assert.IsFalse(Double.IsNaN(a));
            Assert.IsFalse(Double.IsNaN(b));
        }