예제 #1
0
        public void RunTest()
        {
            // Suppose we would like to map the continuous values in the
            // second column to the integer values in the first column.
            double[,] data =
            {
                { -40, -21142.1111111111 },
                { -30, -21330.1111111111 },
                { -20, -12036.1111111111 },
                { -10,   7255.3888888889 },
                {   0,  32474.8888888889 },
                {  10,  32474.8888888889 },
                {  20,   9060.8888888889 },
                {  30, -11628.1111111111 },
                {  40, -15129.6111111111 },
            };

            // Extract inputs and outputs
            double[][] inputs  = data.GetColumn(0).ToJagged();
            double[]   outputs = data.GetColumn(1);

            // Create a Nonlinear regression using
            NonlinearRegression regression = new NonlinearRegression(4, function, gradient);


            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            regression.Coefficients[0] = 0;         // m
            regression.Coefficients[1] = 80;        // s
            regression.Coefficients[2] = 53805;     // a
            regression.Coefficients[3] = -21330.11; //b

            double error = Double.PositiveInfinity;

            for (int i = 0; i < 100; i++)
            {
                error = nls.Run(inputs, outputs);
            }

            double m = regression.Coefficients[0];
            double s = regression.Coefficients[1];
            double a = regression.Coefficients[2];
            double b = regression.Coefficients[3];

            Assert.AreEqual(010345587.465428974, error);

            Assert.AreEqual(5.316196154830604, m, 1e-3);
            Assert.AreEqual(12.792301798208918, s, 1e-3);
            Assert.AreEqual(56794.832645792514, a, 1e-3);
            Assert.AreEqual(-20219.675997523173, b, 1e-2);

            Assert.IsFalse(Double.IsNaN(m));
            Assert.IsFalse(Double.IsNaN(s));
            Assert.IsFalse(Double.IsNaN(a));
            Assert.IsFalse(Double.IsNaN(b));
        }
        public void RunTest()
        {
            double[,] data =
            {
                { -40, -21142.1111111111 },
                { -30, -21330.1111111111 },
                { -20, -12036.1111111111 },
                { -10,   7255.3888888889 },
                {   0,  32474.8888888889 },
                {  10,  32474.8888888889 },
                {  20,   9060.8888888889 },
                {  30, -11628.1111111111 },
                {  40, -15129.6111111111 },
            };

            double[][] inputs  = data.GetColumn(0).ToArray();
            double[]   outputs = data.GetColumn(1);

            NonlinearRegression regression = new NonlinearRegression(4, function, gradient);


            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            regression.Coefficients[0] = 0;         // m
            regression.Coefficients[1] = 80;        // s
            regression.Coefficients[2] = 53805;     // a
            regression.Coefficients[3] = -21330.11; //b

            double error = 0;

            for (int i = 0; i < 100; i++)
            {
                error = nls.Run(inputs, outputs);
            }

            double m = regression.Coefficients[0];
            double s = regression.Coefficients[1];
            double a = regression.Coefficients[2];
            double b = regression.Coefficients[3];

            Assert.AreEqual(5.316196154830604, m, 1e-3);
            Assert.AreEqual(12.792301798208918, s, 1e-3);
            Assert.AreEqual(56794.832645792514, a, 1e-3);
            Assert.AreEqual(-20219.675997523173, b, 1e-2);

            Assert.IsFalse(Double.IsNaN(m));
            Assert.IsFalse(Double.IsNaN(s));
            Assert.IsFalse(Double.IsNaN(a));
            Assert.IsFalse(Double.IsNaN(b));
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="NonlinearLeastSquares"/> class.
        /// </summary>
        /// 
        /// <param name="regression">The regression model.</param>
        /// <param name="algorithm">The <see cref="ILeastSquaresMethod">least squares</see>
        /// algorithm to be used to estimate the regression parameters. Default is to
        /// use a <see cref="LevenbergMarquardt">Levenberg-Marquardt</see> algorithm.</param>
        /// 
        public NonlinearLeastSquares(NonlinearRegression regression, ILeastSquaresMethod algorithm)
        {
            if (regression == null)
                throw new ArgumentNullException("regression");

            if (algorithm == null)
                throw new ArgumentNullException("algorithm");

            if (regression.Gradient == null)
                throw new ArgumentException("The regression must have a gradient function defined.", "regression");

            this.solver = algorithm;
            this.solver.Solution = regression.Coefficients;
            this.solver.Function = new LeastSquaresFunction(regression.Function);
            this.solver.Gradient = new LeastSquaresGradientFunction(regression.Gradient);
            this.regression = regression;
        }
예제 #4
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="NonlinearLeastSquares"/> class.
        /// </summary>
        ///
        /// <param name="regression">The regression model.</param>
        /// <param name="algorithm">The <see cref="ILeastSquaresMethod">least squares</see>
        /// algorithm to be used to estimate the regression parameters. Default is to
        /// use a <see cref="LevenbergMarquardt">Levenberg-Marquardt</see> algorithm.</param>
        ///
        public NonlinearLeastSquares(NonlinearRegression regression, ILeastSquaresMethod algorithm)
        {
            if (regression == null)
            {
                throw new ArgumentNullException("regression");
            }

            if (algorithm == null)
            {
                throw new ArgumentNullException("algorithm");
            }

            if (regression.Gradient == null)
            {
                throw new ArgumentException("The regression must have a gradient function defined.", "regression");
            }

            this.solver          = algorithm;
            this.solver.Solution = regression.Coefficients;
            this.solver.Function = new LeastSquaresFunction(regression.Function);
            this.solver.Gradient = new LeastSquaresGradientFunction(regression.Gradient);
            this.regression      = regression;
        }
        public void RunTest1()
        {
            // Example from https://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm

            double[,] data =
            {
                { 0.03, 0.1947, 0.425,  0.626,  1.253,  2.500,  3.740 },
                { 0.05,  0.127, 0.094, 0.2122, 0.2729, 0.2665, 0.3317 }
            };

            double[][] inputs  = data.GetRow(0).ToArray();
            double[]   outputs = data.GetRow(1);


            RegressionFunction rate = (double[] weights, double[] xi) =>
            {
                double x = xi[0];
                return((weights[0] * x) / (weights[1] + x));
            };

            RegressionGradientFunction grad = (double[] weights, double[] xi, double[] result) =>
            {
                double x = xi[0];

                FiniteDifferences diff = new FiniteDifferences(2);
                diff.Function = (bla) => rate(bla, xi);
                double[] compare = diff.Compute(weights);

                result[0] = -((-x) / (weights[1] + x));
                result[1] = -((weights[0] * x) / Math.Pow(weights[1] + x, 2));
            };


            NonlinearRegression regression = new NonlinearRegression(2, rate, grad);

            NonlinearLeastSquares nls = new NonlinearLeastSquares(regression, new GaussNewton(2));

            Assert.IsTrue(nls.Algorithm is GaussNewton);

            regression.Coefficients[0] = 0.9; // β1
            regression.Coefficients[1] = 0.2; // β2

            int iterations = 10;

            double[] errors = new double[iterations];
            for (int i = 0; i < errors.Length; i++)
            {
                errors[i] = nls.Run(inputs, outputs);
            }

            double b1 = regression.Coefficients[0];
            double b2 = regression.Coefficients[1];

            Assert.AreEqual(0.362, b1, 1e-3);
            Assert.AreEqual(0.556, b2, 3e-3);

            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            for (int i = 1; i < errors.Length; i++)
            {
                Assert.IsFalse(Double.IsNaN(errors[i - 1]));
                Assert.IsTrue(errors[i - 1] >= errors[i]);
            }

            Assert.AreEqual(1.23859, regression.StandardErrors[0], 1e-3);
            Assert.AreEqual(6.06352, regression.StandardErrors[1], 3e-3);
        }
예제 #6
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="NonlinearLeastSquares"/> class.
 /// </summary>
 ///
 /// <param name="regression">The regression model.</param>
 ///
 public NonlinearLeastSquares(NonlinearRegression regression)
     : this(regression, new LevenbergMarquardt(regression.Coefficients.Length))
 {
 }
예제 #7
0
        public void ExampleTest()
        {
            // Suppose we would like to map the continuous values in the
            // second column to the integer values in the first column.
            double[,] data =
            {
                { -40, -21142.1111111111 },
                { -30, -21330.1111111111 },
                { -20, -12036.1111111111 },
                { -10,   7255.3888888889 },
                {   0,  32474.8888888889 },
                {  10,  32474.8888888889 },
                {  20,   9060.8888888889 },
                {  30, -11628.1111111111 },
                {  40, -15129.6111111111 },
            };

            // Extract inputs and outputs
            double[][] inputs  = data.GetColumn(0).ToJagged();
            double[]   outputs = data.GetColumn(1);

            // Create a Nonlinear regression using
            var regression = new NonlinearRegression(3,

                                                     // Let's assume a quadratic model function: ax² + bx + c
                                                     function: (w, x) => w[0] * x[0] * x[0] + w[1] * x[0] + w[2],

                                                     // Derivative in respect to the weights:
                                                     gradient: (w, x, r) =>
            {
                r[0] = 2 * w[0];     // w.r.t a: 2a
                r[1] = w[1];         // w.r.t b: b
                r[2] = w[2];         // w.r.t c: 0
            }
                                                     );

            // Create a non-linear least squares teacher
            var nls = new NonlinearLeastSquares(regression);

            // Initialize to some random values
            regression.Coefficients[0] = 4.2;
            regression.Coefficients[1] = 0.3;
            regression.Coefficients[2] = 1;

            // Run the function estimation algorithm
            double error = Double.PositiveInfinity;

            for (int i = 0; i < 100; i++)
            {
                error = nls.Run(inputs, outputs);
            }

            // Use the function to compute the input values
            double[] predict = inputs.Apply(regression.Compute);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            Assert.AreEqual(1318374605.8436923d, error);

            Assert.AreEqual(-12.025250289329851, regression.Coefficients[0], 1e-3);
            Assert.AreEqual(-0.082208180694676766, regression.Coefficients[1], 1e-3);
            Assert.AreEqual(-0.27402726898225627, regression.Coefficients[2], 1e-3);

            Assert.AreEqual(-19237.386162968953, predict[0]);
            Assert.AreEqual(-10820.533042245008, predict[1]);
            Assert.AreEqual(-4808.7299793870288, predict[2]);
            Assert.AreEqual(-1203.6211380089139, predict[5]);
        }
예제 #8
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="NonlinearLeastSquares"/> class.
 /// </summary>
 /// 
 /// <param name="regression">The regression model.</param>
 /// 
 public NonlinearLeastSquares(NonlinearRegression regression)
     : this(regression, new LevenbergMarquardt(regression.Coefficients.Length))
 {
 }
        public void ExampleTest()
        {
            // Suppose we would like to map the continuous values in the
            // second column to the integer values in the first column.
            double[,] data =
            {
                { -40, -21142.1111111111 },
                { -30, -21330.1111111111 },
                { -20, -12036.1111111111 },
                { -10,   7255.3888888889 },
                {   0,  32474.8888888889 },
                {  10,  32474.8888888889 },
                {  20,   9060.8888888889 },
                {  30, -11628.1111111111 },
                {  40, -15129.6111111111 },
            };

            // Extract inputs and outputs
            double[][] inputs  = data.GetColumn(0).ToJagged();
            double[]   outputs = data.GetColumn(1);

            // Create a Nonlinear regression using
            var regression = new NonlinearRegression(3,

                                                     // Let's assume a quadratic model function: ax² + bx + c
                                                     function: (w, x) => w[0] * x[0] * x[0] + w[1] * x[0] + w[2],

                                                     // Derivative in respect to the weights:
                                                     gradient: (w, x, r) =>
            {
                r[0] = 2 * w[0];     // w.r.t a: 2a
                r[1] = w[1];         // w.r.t b: b
                r[2] = w[2];         // w.r.t c: 0
            }
                                                     );

            // Create a non-linear least squares teacher
            var nls = new NonlinearLeastSquares(regression);

            // Initialize to some random values
            regression.Coefficients[0] = 4.2;
            regression.Coefficients[1] = 0.3;
            regression.Coefficients[2] = 1;

            // Run the function estimation algorithm
            double error = Double.PositiveInfinity;

            for (int i = 0; i < 100; i++)
            {
                error = nls.Run(inputs, outputs);
            }

            // Use the function to compute the input values
            double[] predict = inputs.Apply(regression.Compute);

            Assert.IsTrue(nls.Algorithm is LevenbergMarquardt);

            Assert.AreEqual(2145404235.739383, error, 1e-7);

            Assert.AreEqual(-11.916652026711853, regression.Coefficients[0], 1e-3);
            Assert.AreEqual(-358.9758898959638, regression.Coefficients[1], 1e-3);
            Assert.AreEqual(-107.31273008811895, regression.Coefficients[2], 1e-3);

            Assert.AreEqual(-4814.9203769986034, predict[0], 1e-10);
            Assert.AreEqual(-63.02285725721211, predict[1], 1e-10);
            Assert.AreEqual(2305.5442571416661, predict[2], 1e-10);
            Assert.AreEqual(-4888.736831716782, predict[5], 1e-10);
        }
 public void ComputeRAResult()
 {
     Regression = LeastSquares.Learn(Inputs, Outputs);
     RAResult   = new RAResultNonLinear(this);
     RAResult.ComputeValuations();
 }