Esempio n. 1
0
        protected double GenerateRegression(double[] x, double[] y)
        {
            OrdinaryLeastSquares   ols        = new OrdinaryLeastSquares();
            SimpleLinearRegression regression = ols.Learn(x, y);

            return(regression.Slope);
        }
        public override void SolveTransforamtion()
        {
            //https://numerics.mathdotnet.com/Regression.html#Regularization

            if (records.Count > minNumberOfPointToSolve)
            {
                if (records.Count % caclulationSteps == 0 || records.Count == minNumberOfPointToSolve)
                {
                    double[][] out_x  = new double[records.Count][];
                    double[][] out_z  = new double[records.Count][];
                    double[][] out_y  = new double[records.Count][];
                    double[][] in_XY  = new double[records.Count][];
                    double[][] in_XYZ = new double[records.Count][];
                    int        idx    = 0;
                    foreach (TransformationRecord r in records)
                    {
                        out_x[idx] = new double[] { r.ArPosition.x };
                        out_z[idx] = new double[] { r.ArPosition.z };
                        out_y[idx] = new double[] { r.ArPosition.y };

                        in_XY[idx]  = new double[] { r.GpsPosition.Longitude, r.GpsPosition.Latitude };
                        in_XYZ[idx] = new double[] { r.GpsPosition.Longitude, r.GpsPosition.Latitude, r.GpsPosition.Altitude };
                        idx++;
                    }
                    regression_x = ols_x.Learn(in_XY, out_x);
                    regression_z = ols_z.Learn(in_XY, out_z);
                    regression_y = ols_y.Learn(in_XYZ, out_y);
                    if (CalcualteError && testRecords.Count > 0)
                    {
                        List <Vector3> testArLocationsPredicted = TransformGpsToWorld(testRecords.GetGPSLocations());
                        Error_Horizontal = CalculateRMSE(testArLocationsPredicted, testRecords.GetARPositions());
                    }
                }
            }
        }
Esempio n. 3
0
        private static void multivariateLinear()
        {
            double[][] inputs =
            {
                // variables:  x1  x2  x3
                new double[] { 1, 1, 1 },    // input sample 1
                new double[] { 2, 1, 1 },    // input sample 2
                new double[] { 3, 1, 1 },    // input sample 3
            };

            double[][] outputs =
            {
                // variables:  y1  y2
                new double[] { 2, 3 },   // corresponding output to sample 1
                new double[] { 4, 6 },   // corresponding output to sample 2
                new double[] { 6, 9 },   // corresponding output to sample 3
            };

            // Use Ordinary Least Squares to create the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Now, compute the multivariate linear regression:
            MultivariateLinearRegression regression = ols.Learn(inputs, outputs);

            // We can obtain predictions using
            double[][] predictions = regression.Transform(inputs);

            // The prediction error is
            double error = new SquareLoss(outputs).Loss(predictions); // 0
        }
Esempio n. 4
0
        private MultipleLinearRegression PerformRegression(List <LeadingIndicator> indicators)
        {
            double[][] inputs =
            {
                new double[] { indicators[0].StockIndex, indicators[0].M2Level },
                new double[] { indicators[1].StockIndex, indicators[1].M2Level },
                new double[] { indicators[2].StockIndex, indicators[2].M2Level },
                new double[] { indicators[3].StockIndex, indicators[3].M2Level },
            };

            double[] outputs =
            {
                indicators[0].GdpOutput,
                indicators[1].GdpOutput,
                indicators[2].GdpOutput,
                indicators[3].GdpOutput,
            };

            // We will use Ordinary Least Squares to create a
            // linear regression model with an intercept term
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Use Ordinary Least Squares to estimate a regression model
            MultipleLinearRegression regression = ols.Learn(inputs, outputs);

            return(regression);
        }
Esempio n. 5
0
        public static void test2()
        {
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };



            double[][] inputs =
            {
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 0, 0 },
            };

            double[] outputs = { 1, 1, 1, 1 };
            MultipleLinearRegression regression = ols.Learn(inputs, outputs);

            double a = regression.Weights[0];            // a = 0
            double b = regression.Weights[1];            // b = 0
            double c = regression.Intercept;             // c = 1

            double[] predicted = regression.Transform(inputs);

            double error = new SquareLoss(outputs).Loss(predicted);
        }
Esempio n. 6
0
        public override void SolveTransforamtion()
        {
            //https://numerics.mathdotnet.com/Regression.html#Regularization

            /// <summary>
            /// Least-Squares fitting the points (X,y) = ((x0,x1,..,xk),y) to a linear surface y : X -> p0*x0 + p1*x1 + ... + pk*xk,
            /// returning a function y' for the best fitting combination.
            /// If an intercept is added, its coefficient will be prepended to the resulting parameters.
            /// </summary>
            if (records.Count > minNumberOfPointToSolve)
            {
                if (records.Count % caclulationSteps == 0 || records.Count == minNumberOfPointToSolve)
                {
                    //double[] out_x = new double[records.Count];
                    //double[] out_y = new double[records.Count];
                    double[][] out_xy = new double[records.Count][];
                    double[][] in_XY  = new double[records.Count][];
                    int        idx    = 0;
                    foreach (TransformationRecord r in records)
                    {
                        out_xy[idx] = new double[] { r.ArPosition.x, r.ArPosition.z };
                        in_XY[idx]  = new double[] { r.GpsPosition.Longitude, r.GpsPosition.Latitude };
                        idx++;
                    }
                    regression = ols.Learn(in_XY, out_xy);
                    //// We can obtain predictions using
                    //double[][] predictions = regression.Transform(in_XY);

                    //// The prediction error is
                    //double error = new SquareLoss(out_xy).Loss(predictions); // 0
                    //double[] r2 = regression.CoefficientOfDetermination(in_XY, out_xy);
                }
            }
        }
Esempio n. 7
0
        private static void LinearRegressionLearning(IEnumerable <MatchingPair> trainingData, IEnumerable <MatchingPair> testData, IDictionary <string, IndexableAttributeMetadata> actualMetadata)
        {
            var stopWatch = new Stopwatch();

            stopWatch.Start();

            var trainingInputs  = trainingData.Select(data => data.ToVectorArray(actualMetadata)).ToArray();
            var trainingOutputs = trainingData.Select(data => new[] { data.PercentMatch }).ToArray();
            var testInputs      = testData.Select(data => data.ToVectorArray(actualMetadata)).ToArray();
            var testOutputs     = testData.Select(data => new[] { data.PercentMatch }).ToArray();

            var leastSquares = new OrdinaryLeastSquares();

            var regression = leastSquares.Learn(trainingInputs, trainingOutputs);

            var predictions = regression.Transform(trainingInputs);
            var error       = new SquareLoss(trainingOutputs).Loss(predictions);

            Logger.InfoFormat("Linear Regression: In-sample error: {0}", error);

            predictions = regression.Transform(testInputs);
            error       = new SquareLoss(testOutputs).Loss(predictions);
            Logger.InfoFormat("Linear Regression: Out-of-sample error: {0}", error);

            stopWatch.Stop();
            Logger.InfoFormat("Linear Regression learning took {0}", stopWatch.Elapsed);
        }
Esempio n. 8
0
        public CircleModel FittingCircleWithLeastSquares(double[][] points)
        {
            var ols = new OrdinaryLeastSquares
            {
                UseIntercept = true,
                IsRobust     = true
            };
            var outputs    = points.Select(t => Math.Pow(t[0], 2) + Math.Pow(t[1], 2)).ToArray();
            var regression = ols.Learn(points, outputs);

            // As result, we will be given the following:
            var a = regression.Weights[0] / 2; // a = 0
            var b = regression.Weights[1] / 2; // b = 0
            var c = regression.Intercept;      // c = 1

            c = Math.Sqrt(c + a * a + b * b);

            var midPoint = points[points.Length / 2];
            var result   = new CircleModel
            {
                X       = a,
                Y       = b,
                XSource = midPoint[0],
                YSource = midPoint[1],
                R       = c,
                R_geo   = GeoTools.CalculateDistance(midPoint[1], midPoint[0], b, a)
            };

            return(result);
        }
Esempio n. 9
0
        //EXAMPLE: http://accord-framework.net/docs/html/T_Accord_Statistics_Models_Regression_Linear_MultivariateLinearRegression.htm
        static void Main(string[] args)
        {
            CSV_Parser     parser = new CSV_Parser();
            RegressionData data   = parser.ParseDataFile();

            // Use Ordinary Least Squares to create the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Now, compute the multivariate linear regression:
            MultivariateLinearRegression regression = ols.Learn(data.InterestRatings, data.MajorRatings);

            // We can obtain predictions using
            double[][] predictions = regression.Transform(data.InterestRatings);

            // The prediction error is
            double error = new SquareLoss(data.MajorRatings).Loss(predictions); // 0

            // We can also check the r-squared coefficients of determination:
            //double[] r2 = regression.CoefficientOfDetermination(topicRatings, majorRatings);
            double[][] r2 = regression.Weights;
            Console.WriteLine("WEIGHTS:");
            //writeCSVfile(data, r2);
            GenerateCSFile(data, r2);

            Console.WriteLine("Coefficient Of Determination");
            double[] r3 = regression.CoefficientOfDetermination(data.InterestRatings, data.MajorRatings);
            for (int i = 0; i < r3.Length; i++)
            {
                Console.WriteLine(r3[i]);
            }

            Console.Read();
        }
Esempio n. 10
0
        /// <summary>
        /// The main application entry point.
        /// </summary>
        /// <param name="args">Command line arguments.</param>
        public static void Main(string[] args)
        {
            // get data
            Console.WriteLine("Loading data....");
            var path = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "..");

            path = Path.Combine(path, "..");
            path = Path.Combine(path, "california_housing.csv");
            var housing = Frame.ReadCsv(path, separators: ",");
            // housing = housing.Where(kv => ((decimal)kv.Value["median_house_value"]) < 500000);

            // set up a few series
            var total_rooms        = housing["total_rooms"];
            var median_house_value = housing["median_house_value"];
            var median_income      = housing["median_income"];

            // convert the house value range to thousands
            median_house_value /= 1000;

            // set up feature and label
            var feature = total_rooms.Values.ToArray();
            // var feature = median_income.Values.ToArray();
            var labels = median_house_value.Values.ToArray();

            // train the model
            Console.WriteLine("Training model....");
            var learner = new OrdinaryLeastSquares();
            var model   = learner.Learn(feature, labels);

            // show results
            Console.WriteLine($"Slope:       {model.Slope}");
            Console.WriteLine($"Intercept:   {model.Intercept}");

            // validate the model
            var predictions = model.Transform(feature);
            var rmse        = Math.Sqrt(new SquareLoss(labels).Loss(predictions));

            var range = Math.Abs(labels.Max() - labels.Min());

            Console.WriteLine($"Label range: {range}");
            Console.WriteLine($"RMSE:        {rmse} {rmse / range * 100:0.00}%");

            // generate plot arrays
            var x = feature.Concat(feature).ToArray();
            var y = predictions.Concat(labels).ToArray();

            // set up color array
            var colors1 = Enumerable.Repeat(1, labels.Length).ToArray();
            var colors2 = Enumerable.Repeat(2, labels.Length).ToArray();
            var c       = colors1.Concat(colors2).ToArray();

            // plot the data
            var plot = new Scatterplot("Training", "feature", "label");

            plot.Compute(x, y, c);
            ScatterplotBox.Show(plot);

            Console.ReadLine();
        }
Esempio n. 11
0
        public void prediction_test()
        {
            // Example from http://www.real-statistics.com/multiple-regression/confidence-and-prediction-intervals/
            var dt = Accord.IO.CsvReader.FromText(Resources.linreg, true).ToTable();

            double[]   y = dt.Columns["Poverty"].ToArray();
            double[][] x = dt.ToArray("Infant Mort", "White", "Crime");

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Use OLS to learn the multiple linear regression
            MultipleLinearRegression regression = ols.Learn(x, y);

            Assert.AreEqual(3, regression.NumberOfInputs);
            Assert.AreEqual(1, regression.NumberOfOutputs);

            Assert.AreEqual(0.443650703716698, regression.Intercept, 1e-5);
            Assert.AreEqual(1.2791842411083394, regression.Weights[0], 1e-5);
            Assert.AreEqual(0.036259242392669415, regression.Weights[1], 1e-5);
            Assert.AreEqual(0.0014225014835705938, regression.Weights[2], 1e-5);

            double rse = regression.GetStandardError(x, y);

            Assert.AreEqual(rse, 2.4703520840798507, 1e-5);


            double[][] im  = ols.GetInformationMatrix();
            double     mse = regression.GetStandardError(x, y);

            double[] se = regression.GetStandardErrors(mse, im);

            Assert.AreEqual(0.30063086032754965, se[0], 1e-10);
            Assert.AreEqual(0.033603448179240082, se[1], 1e-10);
            Assert.AreEqual(0.0022414548866296342, se[2], 1e-10);
            Assert.AreEqual(3.9879881671805824, se[3], 1e-10);

            double[] x0 = new double[] { 7, 80, 400 };
            double   y0 = regression.Transform(x0);

            Assert.AreEqual(y0, 12.867680376316864, 1e-5);

            double actual = regression.GetStandardError(x0, mse, im);

            Assert.AreEqual(0.35902764658470271, actual, 1e-10);

            DoubleRange ci = regression.GetConfidenceInterval(x0, mse, x.Length, im);

            Assert.AreEqual(ci.Min, 12.144995206616116, 1e-5);
            Assert.AreEqual(ci.Max, 13.590365546017612, 1e-5);

            actual = regression.GetPredictionStandardError(x0, mse, im);
            Assert.AreEqual(2.4963053239397244, actual, 1e-10);

            DoubleRange pi = regression.GetPredictionInterval(x0, mse, x.Length, im);

            Assert.AreEqual(pi.Min, 7.8428783761994554, 1e-5);
            Assert.AreEqual(pi.Max, 17.892482376434273, 1e-5);
        }
Esempio n. 12
0
        public static void ModelTrain()
        {
            // var m = RoomsAvito.GroupBy(x => x.id).Select(x => x.First());
            // var RoomsAvito = HomeController.RoomsAvito.Take(30000);
            var m = repo.List().Count();



            double[][] inp  = new double[repo.List().Count()][];
            double[]   outp = new double[repo.List().Count()];
            int        i    = 0;


            foreach (Room roomAvito in repo.List())
            {
                int n = 1;
                if (roomAvito.room_type == "Вторичка")
                {
                    n = 0;
                }
                double k = 0;
                foreach (MetroInfo info in MetroInfos)
                {
                    if (info.metro == roomAvito.metro)
                    {
                        k = info.k;
                        break;
                    }
                }
                inp[i]  = new double[] { k, roomAvito.centre_distance, roomAvito.metro_distance, roomAvito.S, roomAvito.num, n };
                outp[i] = (int)roomAvito.price;
                i++;
            }
            Accord.Math.Random.Generator.Seed = 0;
            var ols = new OrdinaryLeastSquares();

            {
                ols.UseIntercept = true;
                ols.IsRobust     = true;
            };

            regression = ols.Learn(inp, outp);


            Polynomial p = new Polynomial(2, 1);

            double[][] z = p.Transform(inp);

            // Now, create an usual OLS algorithm
            var ols1 = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Use the algorithm to learn a multiple regression
            regression1 = ols1.Learn(z, outp);

            // Check the quality of the regression:
        }
Esempio n. 13
0
        private static double[] LinearRegression(List <Wine> testingSet, List <Wine> trainingSet)
        {
            var teacher = new OrdinaryLeastSquares();
            var forest  = teacher.Learn(trainingSet.Select(x => x.GetParams()).ToArray(), trainingSet.Select(x => (double)x.Quality).ToArray());
            var result  = forest.Transform(testingSet.Select(x => x.GetParams()).ToArray());

            return(result);
        }
        /// <summary>
        /// Run the lesson.
        /// </summary>
        public static void Run()
        {
            // get data
            Console.WriteLine("Loading data....");
            var path    = Path.GetFullPath(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"..\..\..\..\california_housing.csv"));
            var housing = Frame.ReadCsv(path, separators: ",");

            housing = housing.Where(kv => ((decimal)kv.Value["median_house_value"]) < 500000);

            // create the median_high_house_value feature
            housing.AddColumn("median_high_house_value",
                              housing["median_house_value"].Select(v => v.Value >= 265000 ? 1.0 : 0.0));

            // shuffle the frame
            var rnd     = new Random();
            var indices = Enumerable.Range(0, housing.Rows.KeyCount).OrderBy(v => rnd.NextDouble());

            housing = housing.IndexRowsWith(indices).SortRowsByKey();

            // create training, validation, and test frames
            var training   = housing.Rows[Enumerable.Range(0, 12000)];
            var validation = housing.Rows[Enumerable.Range(12000, 2500)];
            var test       = housing.Rows[Enumerable.Range(14500, 2500)];

            // build the list of features we're going to use
            var columns = new string[] {
                "latitude",
                "longitude",
                "housing_median_age",
                "total_rooms",
                "total_bedrooms",
                "population",
                "households",
                "median_income"
            };

            // train the model using a linear regressor
            var learner = new OrdinaryLeastSquares()
            {
                IsRobust = true
            };
            var regression = learner.Learn(
                training.Columns[columns].ToArray2D <double>().ToJagged(),
                training["median_high_house_value"].Values.ToArray());

            // get probabilities
            var features_validation = validation.Columns[columns].ToArray2D <double>().ToJagged();
            var label_validation    = validation["median_high_house_value"].Values.ToArray();
            var probabilities       = regression.Transform(features_validation);

            // calculate the histogram of probabilities
            var histogram = new Histogram();

            histogram.Compute(probabilities, 0.05);

            // draw the histogram
            Plot(histogram, "Probability Histogram", "prediction", "count");
        }
        static void Main(string[] args)
        {
            // sample input and output
            double[] inputs  = { 10, 20, 30, 40, 50 };
            double[] outputs = { 1, 2, 3, 4, 5 };

            // 1. Linear Regression
            var learner = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };
            var model = learner.Learn(inputs, outputs);
            var preds = model.Transform(inputs);

            Console.WriteLine("\n\n* Linear Regression Preds: {0}", String.Join(", ", preds));

            // 2. Linear SVM
            var learner2 = new LinearRegressionNewtonMethod()
            {
                Epsilon   = 2.1,
                Tolerance = 1e-5,
                UseComplexityHeuristic = true
            };

            var svmInputs = inputs.Select(x => new double[] { x, x }).ToArray();

            var model2 = learner2.Learn(svmInputs, outputs);
            var preds2 = model2.Score(svmInputs);

            Console.WriteLine("\n\n* Linear SVM Preds: {0}", String.Join(", ", preds2));

            // 3. Polynomial SVM
            var learner3 = new FanChenLinSupportVectorRegression <Polynomial>()
            {
                Kernel = new Polynomial(3)
            };
            var model3 = learner3.Learn(svmInputs, outputs);

            var preds3 = model3.Score(svmInputs);

            Console.WriteLine("\n\n* Polynomial SVM Preds: {0}", String.Join(", ", preds3));

            // 4. Gaussian SVM
            var learner4 = new FanChenLinSupportVectorRegression <Gaussian>()
            {
                Kernel = new Gaussian()
            };
            var model4 = learner4.Learn(svmInputs, outputs);

            var preds4 = model4.Score(svmInputs);

            Console.WriteLine("\n\n* Gaussian SVM Preds: {0}", String.Join(", ", preds4));


            Console.WriteLine("\n\n\n\nDONE!!");
            Console.ReadKey();
        }
        public void weight_test()
        {
            MultivariateLinearRegression reference;

            double[] referenceR2;

            {
                double[][] data =
                {
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, // 5 times weight 1
                    new[] { 1.0, 12.5, 3.6 },
                    new[] { 1.0, 43.2, 7.6 },
                    new[] { 1.0, 10.2, 1.1 },
                };

                double[][] x = Jagged.ColumnVector(data.GetColumn(1));
                double[][] y = Jagged.ColumnVector(data.GetColumn(2));

                var ols = new OrdinaryLeastSquares();
                reference   = ols.Learn(x, y);
                referenceR2 = reference.CoefficientOfDetermination(x, y);
            }

            MultivariateLinearRegression target;

            double[] targetR2;

            {
                double[][] data =
                {
                    new[] { 5.0, 10.7, 2.4 }, // 1 times weight 5
                    new[] { 1.0, 12.5, 3.6 },
                    new[] { 1.0, 43.2, 7.6 },
                    new[] { 1.0, 10.2, 1.1 },
                };

                double[]   weights = data.GetColumn(0);
                double[][] x       = Jagged.ColumnVector(data.GetColumn(1));
                double[][] y       = Jagged.ColumnVector(data.GetColumn(2));

                OrdinaryLeastSquares ols = new OrdinaryLeastSquares();
                target   = ols.Learn(x, y, weights);
                targetR2 = target.CoefficientOfDetermination(x, y, weights: weights);
            }

            Assert.IsTrue(reference.Weights.IsEqual(target.Weights));
            Assert.IsTrue(reference.Intercepts.IsEqual(target.Intercepts, 1e-8));
            Assert.AreEqual(0.16387475666214069, target.Weights[0][0], 1e-6);
            Assert.AreEqual(0.59166925681755056, target.Intercepts[0], 1e-6);

            Assert.AreEqual(referenceR2[0], targetR2[0], 1e-8);
            Assert.AreEqual(0.91476129548901486, targetR2[0], 1e-10);
        }
Esempio n. 17
0
        public void weight_test_linear()
        {
            SimpleLinearRegression reference;
            double referenceR2;

            {
                double[][] data =
                {
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, //
                    new[] { 1.0, 10.7, 2.4 }, // 5 times weight 1
                    new[] { 1.0, 12.5, 3.6 },
                    new[] { 1.0, 43.2, 7.6 },
                    new[] { 1.0, 10.2, 1.1 },
                };

                double[] x = data.GetColumn(1);
                double[] y = data.GetColumn(2);

                var ols = new OrdinaryLeastSquares();
                reference   = ols.Learn(x, y);
                referenceR2 = reference.CoefficientOfDetermination(x, y);
            }

            SimpleLinearRegression target;
            double targetR2;

            {
                double[][] data =
                {
                    new[] { 5.0, 10.7, 2.4 }, // 1 times weight 5
                    new[] { 1.0, 12.5, 3.6 },
                    new[] { 1.0, 43.2, 7.6 },
                    new[] { 1.0, 10.2, 1.1 },
                };

                double[] weights = data.GetColumn(0);
                double[] x       = data.GetColumn(1);
                double[] y       = data.GetColumn(2);

                OrdinaryLeastSquares ols = new OrdinaryLeastSquares();
                target   = ols.Learn(x, y, weights);
                targetR2 = target.CoefficientOfDetermination(x, y, weights);
            }

            Assert.AreEqual(reference.Slope, target.Slope);
            Assert.AreEqual(reference.Intercept, target.Intercept, 1e-8);
            Assert.AreEqual(0.16387475666214069, target.Slope, 1e-6);
            Assert.AreEqual(0.59166925681755056, target.Intercept, 1e-6);

            Assert.AreEqual(referenceR2, targetR2, 1e-8);
            Assert.AreEqual(0.91476129548901486, targetR2);
        }
        public MultipleLinearRegression Learn(double[][] inputs, double[] outputs)
        {
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Use Ordinary Least Squares to estimate a regression model
            MultipleLinearRegression regression = ols.Learn(inputs, outputs);

            // As result, we will be given the following:
            //double a = regression.Weights[0]; // a = 0
            //double b = regression.Weights[1]; // b = 0
            //double c = regression.Intercept;  // c = 1

            // This is the plane described by the equation
            // ax + by + c = z => 0x + 0y + 1 = z => 1 = z.

            // We can compute the predicted points using
            double[] predicted = regression.Transform(inputs);

            // And the squared error loss using
            double error = new SquareLoss(outputs).Loss(predicted);

            // We can also compute other measures, such as the coefficient of determination r²
            double r2 = new RSquaredLoss(numberOfInputs: 2, expected: outputs).Loss(predicted); // should be 1

            // We can also compute the adjusted or weighted versions of r² using
            var r2loss = new RSquaredLoss(numberOfInputs: 2, expected: outputs)
            {
                Adjust = true,
                // Weights = weights; // (if you have a weighted problem)
            };

            double ar2 = r2loss.Loss(predicted); // should be 1

            // Alternatively, we can also use the less generic, but maybe more user-friendly method directly:
            double ur2 = regression.CoefficientOfDetermination(inputs, outputs, adjust: true); // should be 1

            Console.WriteLine("Weights:");
            foreach (var w in regression.Weights)
            {
                Console.WriteLine($",{w}");
            }
            Console.WriteLine("Intercept:");
            Console.WriteLine($",{regression.Intercept}");
            Console.WriteLine($"error:{error}");
            Console.WriteLine($"r2:{r2}");
            Console.WriteLine($"r2loss:{r2loss}");
            Console.WriteLine($"ar2:{ar2}");
            Console.WriteLine($"ur2:{ur2}");

            return(regression);
        }
        public void Learn(IList <XYtoZ> dsLearn)
        {
            double [][] inputs  = dsLearn.Select(i => new double[] { i.X, i.Y }).ToArray();
            double []   outputs = dsLearn.Select(i => i.Z).ToArray();
            var         ols     = new OrdinaryLeastSquares()
            {
                IsRobust = _isRobust
            };

            _multipleLinearRegression = ols.Learn(inputs, outputs);
        }
Esempio n. 20
0
        public static SimpleLinearRegression AnalyzeMonthToGradeDependency()
        {
            CalculateCorrelationMonthToGrade();
            OrdinaryLeastSquares   ols        = new OrdinaryLeastSquares();
            SimpleLinearRegression regression = ols.Learn(DataHandler.Reviews.Select(r => (double)r.reviewTime.Month).ToArray(), DataHandler.Reviews.Select(r => r.overall).ToArray());

            double s = regression.Slope;
            double c = regression.Intercept;

            return(regression);
        }
Esempio n. 21
0
        public void Learn(IList <XtoY> dsLearn)
        {
            double [] inputs  = dsLearn.Select(i => i.X).ToArray();
            double [] outputs = dsLearn.Select(i => i.Y).ToArray();
            var       ols     = new OrdinaryLeastSquares()
            {
                IsRobust = _isRobust
            };

            _simpleLinearRegression = ols.Learn(inputs, outputs);
        }
Esempio n. 22
0
        public static void TrainRegression()
        {
            // use ordinary least squares as train type
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Use Ordinary Least Squares to estimate a regression model
            Predictor.MultipleGeneralRegression = ols.Learn(PredictorPointsTrain, FrequencyLabelsDouble);
        }
Esempio n. 23
0
        public double Slope(KalibreringDTO kDTO)
        {
            double[] kalibreringer = new double[] { (kDTO.KalibrerDoubles[0] + kDTO.KalibrerDoubles[3]) / 2, (kDTO.KalibrerDoubles[1] + kDTO.KalibrerDoubles[4]) / 2, (kDTO.KalibrerDoubles[2] + kDTO.KalibrerDoubles[5]) / 2 };

            double[] output = new double[] { 10, 50, 100 };

            OrdinaryLeastSquares   ols        = new OrdinaryLeastSquares();
            SimpleLinearRegression regression = ols.Learn(kalibreringer, output);

            slope = regression.Slope;
            return(slope);
        }
Esempio n. 24
0
        /// <summary>
        /// Calculates the Variance Inflation Factors (VIFs) for the different coefficients.
        /// </summary>
        /// <returns>An array containing corresponding VIFs.</returns>
        /// <param name="inputs">The inputs that a model was trained on.</param>
        public static float[] CalculateVIFs(double[][] inputs)
        {
            //Rotate array and create resultant array.
            inputs = MathUtils.RotateArray(inputs);
            float[] VIFs = new float[inputs.Length];

            //Loop through each variable
            for (int a = 0; a < inputs.Length; a++)
            {
                //The inputs/outputs for the regression models.
                double[][] regressionInputs = new double[inputs[0].Length][];
                double[]   regressionOutput = new double[inputs[0].Length];

                //Loop through and assign all of the independent variables as IVs,
                //except inputs[a], which becomes the dependent variable.
                for (int b = 0; b < inputs[0].Length; b++)
                {
                    regressionInputs[b] = new double[inputs.Length - 1];

                    for (int c = 0, d = 0; c < inputs.Length; c++)
                    {
                        if (a == c)
                        {
                            regressionOutput[b] = inputs[a][b];
                        }
                        else
                        {
                            regressionInputs[b][d] = inputs[c][b];
                            d++;
                        }
                    }
                }

                //Perform regression
                OrdinaryLeastSquares ols = new OrdinaryLeastSquares()
                {
                    UseIntercept = true
                };

                MultipleLinearRegression regression = ols.Learn(regressionInputs, regressionOutput);

                //Make predictions
                double[] predictions = regression.Transform(regressionInputs);

                //Calculate the loss
                double r2 = (new RSquaredLoss(inputs.Length - 1, regressionOutput)).Loss(predictions);

                //Calculate the VIF
                VIFs[a] = (float)(1.0f / (1.0f - r2));
            }

            return(VIFs);
        }
Esempio n. 25
0
        private void Train(IEnumerable <SprintDataRow> trainingDataset)
        {
            // Set the independant variables
            double[][] inputs = trainingDataset.Select(x => new double[] { x.SprintNumber, x.HoursProgrammer1, x.HoursProgrammer2, x.HoursProgrammer3 }).ToArray();

            // Set the dependant variables
            double[] outputs = trainingDataset.Select(x => x.NumberOfProcessedStoryPoints).ToArray();

            // Train the model
            var ols = new OrdinaryLeastSquares();

            this._multipleLinearRegressionModel = ols.Learn(inputs, outputs);
        }
Esempio n. 26
0
        public RegressionResult PerformRegression(double[] trainX, double[] trainY, double[] testX, double[] testY)
        {
            OrdinaryLeastSquares   ols        = new OrdinaryLeastSquares();
            SimpleLinearRegression regression = ols.Learn(trainX, trainY);

            return(new RegressionResult
            {
                FormulaUsed = regression.ToString(),
                PredictionOnTestSet = testX.Select(regression.Transform).ToArray(),
                PredictionOnTrainingSet = trainX.Select(regression.Transform).ToArray(),
                Regression = regression
            });
        }
        /// <summary>
        /// Gets a fitted approximation to the forward values.
        /// </summary>
        /// <param name="date">The date at which the regressors should be observed.</param>
        /// <param name="cfs">The sum of the PV of all the cashflows on the path that take place after <paramref name="date"/>.</param>
        /// <returns></returns>
        public double[] FitCFs(Date date, double[] cfs)
        {
            //double[][] inputs = GetPolynomialValsRegular(date, 3);
            var inputs = GetIntrinsic(date, 10);

            var ols = new OrdinaryLeastSquares {
                UseIntercept = true, IsRobust = true
            };
            var regression = ols.Learn(inputs, cfs);
            var result     = regression.Transform(inputs);

            return(result);
        }
        private void Train(IEnumerable <SprintDataRow> trainingDataset)
        {
            // Our independant variable is the number of hours
            double[] inputs = trainingDataset.Select(x => Convert.ToDouble(x.NumberOfHours)).ToArray();

            // Our dependant variable is the number of processed story points
            double[] outputs = trainingDataset.Select(x => x.NumberOfProcessedStoryPoints).ToArray();

            // Train the model
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            this._linearRegressionModel = ols.Learn(inputs, outputs);
        }
Esempio n. 29
0
        public double EstimateSurface(uint price)
        {
            var apartments = _repository.GetAll();

            double[] inputs  = apartments.Select(x => (double)x.Price).ToArray();
            double[] outputs = apartments.Select(x => x.Surface).ToArray();

            var    ols        = new OrdinaryLeastSquares();
            var    regression = ols.Learn(inputs, outputs);
            double result     = regression.Transform(price);

            return(result);
        }
Esempio n. 30
0
        /// <summary>
        /// Register data that corresponds to the gesture
        /// </summary>
        protected override void RegisterGesture()
        {
            //We only detectr frame with a minimum of velocity in the palm gesture
            if (Math.Abs(this.SelectedHand.PalmVelocity.x) >= MIN_GESTURE_VELOCITY_X_FRAME_DETECTION)
            {
                //We keep the departure point for the gesture
                if (_frameGestureCount == 0)
                {
                    _gestureFirstPoint = this.SelectedHand.StabilizedPalmPosition;
                }

                _frameGestureCount++;

                //Determine the direction of the initiated gesture
                _currentGestureDirection = this.SelectedHand.PalmVelocity.x > 0 ? Side.Right : Side.Left;

                _inputs.Add(this.SelectedHand.StabilizedPalmPosition.x);
                _outputs.Add(this.SelectedHand.StabilizedPalmPosition.y);

                //Use Ordinary Least Squares to learn the regression
                try
                {
                    _regression = _ols.Learn(_inputs.ToArray(), _outputs.ToArray());


                    //Gets the coefficient of determination, as known R-squared
                    _coefficientDetermination = _regression.CoefficientOfDetermination(_inputs.ToArray(), _outputs.ToArray());

                    //Checking max velocity on the gesture
                    //Abs use for compatibility for both gesture (right and left)
                    _xVelocityMax = Math.Max(Math.Abs(this.SelectedHand.PalmVelocity.x), _xVelocityMax);
                    _xVelocityMin = Math.Min(Math.Abs(this.SelectedHand.PalmVelocity.x), _xVelocityMin);

                    //Calc the distance from the first point of the gesture
                    //Do the hypothenus of the triangle given from the delta between the first point and the current point
                    _distance = Math.Sqrt(Math.Pow(this.SelectedHand.StabilizedPalmPosition.x - _gestureFirstPoint.x, 2) +
                                          Math.Pow(this.SelectedHand.StabilizedPalmPosition.y - _gestureFirstPoint.y, 2));
                }
                catch (InvalidOperationException ex)
                {
                    Console.WriteLine("Exception: {0}", ex.Message);
                    _regression        = null;
                    _frameGestureCount = 0;
                    _xVelocityMax      = 0;
                    _xVelocityMin      = Double.MaxValue;
                    _distance          = 0;
                    _inputs.Clear();
                    _outputs.Clear();
                }
            }
        }
        public void logarithm_learn()
        {
            #region doc_learn
            // This is the same data from the example available at
            // http://mathbits.com/MathBits/TISection/Statistics2/logarithmic.htm

            // Declare your inputs and output data
            double[] inputs = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
            double[] outputs = { 6, 9.5, 13, 15, 16.5, 17.5, 18.5, 19, 19.5, 19.7, 19.8 };

            // Transform inputs to logarithms
            double[] logx = Matrix.Log(inputs);

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Use OLS to learn the simple linear regression
            SimpleLinearRegression lr = ols.Learn(logx, outputs);

            // Compute predicted values for inputs
            double[] predicted = lr.Transform(logx);

            // Get an expression representing the learned regression model
            // We just have to remember that 'x' will actually mean 'log(x)'
            string result = lr.ToString("N4", CultureInfo.InvariantCulture);

            // Result will be "y(x) = 6.1082x + 6.0993"

            // The mean squared error between the expected and the predicted is
            double error = new SquareLoss(outputs).Loss(predicted); // 0.261454
            #endregion

            Assert.AreEqual(0.26145460024250794, error, 1e-8);
            Assert.AreEqual(6.1081800414945704, lr.Slope, 1e-8);
            Assert.AreEqual(6.0993411396126653, lr.Intercept, 1e-8);
            Assert.AreEqual("y(x) = 6.1082x + 6.0993", result);
        }
        public void learn_test()
        {
            #region doc_learn
            // We will try to model a plane as an equation in the form
            // "ax + by + c = z". We have two input variables (x and y)
            // and we will be trying to find two parameters a and b and 
            // an intercept term c.

            // We will use Ordinary Least Squares to create a
            // linear regression model with an intercept term
            var ols = new OrdinaryLeastSquares()
            {
                UseIntercept = true
            };

            // Now suppose you have some points
            double[][] inputs = 
            {
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 0, 0 },
            };

            // located in the same Z (z = 1)
            double[] outputs = { 1, 1, 1, 1 };

            // Use Ordinary Least Squares to estimate a regression model
            MultipleLinearRegression regression = ols.Learn(inputs, outputs);

            // As result, we will be given the following:
            double a = regression.Coefficients[0]; // a = 0
            double b = regression.Coefficients[1]; // b = 0
            double c = regression.Intercept; // c = 1

            // This is the plane described by the equation
            // ax + by + c = z => 0x + 0y + 1 = z => 1 = z.

            // We can compute the predicted points using
            double[] predicted = regression.Transform(inputs);

            // And the squared error loss using 
            double error = new SquareLoss(outputs).Loss(predicted);
            #endregion

            Assert.AreEqual(2, regression.NumberOfInputs);
            Assert.AreEqual(1, regression.NumberOfOutputs);


            Assert.AreEqual(0.0, a, 1e-6);
            Assert.AreEqual(0.0, b, 1e-6);
            Assert.AreEqual(1.0, c, 1e-6);
            Assert.AreEqual(0.0, error, 1e-6);

            double[] expected = regression.Compute(inputs);
            double[] actual = regression.Transform(inputs);
            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double r = regression.CoefficientOfDetermination(inputs, outputs);
            Assert.AreEqual(1.0, r);
        }
        public void learn_test()
        {
            #region doc_learn
            // Let's say we have some univariate, continuous sets of input data,
            // and a corresponding univariate, continuous set of output data, such
            // as a set of points in R². A simple linear regression is able to fit
            // a line relating the input variables to the output variables in which
            // the minimum-squared-error of the line and the actual output points
            // is minimum.

            // Declare some sample test data.
            double[] inputs = { 80, 60, 10, 20, 30 };
            double[] outputs = { 20, 40, 30, 50, 60 };

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Use OLS to learn the simple linear regression
            SimpleLinearRegression regression = ols.Learn(inputs, outputs);

            // Compute the output for a given input:
            double y = regression.Transform(85); // The answer will be 28.088

            // We can also extract the slope and the intercept term
            // for the line. Those will be -0.26 and 50.5, respectively.
            double s = regression.Slope;     // -0.264706
            double c = regression.Intercept; // 50.588235
            #endregion

            // Expected slope and intercept
            double eSlope = -0.264706;
            double eIntercept = 50.588235;

            Assert.AreEqual(28.088235294117649, y, 1e-10);
            Assert.AreEqual(eSlope, s, 1e-5);
            Assert.AreEqual(eIntercept, c, 1e-5);

            Assert.IsFalse(double.IsNaN(y));
        }
        public void prediction_test()
        {
            // example data from http://www.real-statistics.com/regression/confidence-and-prediction-intervals/
            double[][] input = 
            {
                new double[] { 5, 80 },
                new double[] { 23, 78 },
                new double[] { 25, 60 },
                new double[] { 48, 53 },
                new double[] { 17, 85 },
                new double[] { 8, 84 },
                new double[] { 4, 73 },
                new double[] { 26, 79 },
                new double[] { 11, 81 },
                new double[] { 19, 75 },
                new double[] { 14, 68 },
                new double[] { 35, 72 },
                new double[] { 29, 58 },
                new double[] { 4, 92 },
                new double[] { 23, 65 },
            };

            double[] cig = input.GetColumn(0);
            double[] exp = input.GetColumn(1);

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Use OLS to learn the simple linear regression
            SimpleLinearRegression regression = ols.Learn(cig, exp);

            Assert.AreEqual(1, regression.NumberOfInputs);
            Assert.AreEqual(1, regression.NumberOfOutputs);

            double x0 = 20;
            double y0 = regression.Transform(x0);
            Assert.AreEqual(y0, 73.1564, 1e-4);

            double syx = regression.GetStandardError(cig, exp);
            Assert.AreEqual(7.974682, syx, 1e-5);

            double ssx = cig.Subtract(cig.Mean()).Pow(2).Sum();
            Assert.AreEqual(2171.6, ssx, 1e-5);

            double n = exp.Length;
            double x0c = x0 - cig.Mean();
            double var = 1 / n + (x0c * x0c) / ssx;
            Assert.AreEqual(0.066832443052741455, var, 1e-10);
            double expected = syx * Math.Sqrt(var);
            double actual = regression.GetStandardError(x0, cig, exp);

            Assert.AreEqual(2.061612, expected, 1e-5);
            Assert.AreEqual(expected, actual, 1e-10);

            DoubleRange ci = regression.GetConfidenceInterval(x0, cig, exp);
            Assert.AreEqual(ci.Min, 68.702569616457751, 1e-5);
            Assert.AreEqual(ci.Max, 77.610256563931543, 1e-5);

            actual = regression.GetPredictionStandardError(x0, cig, exp);
            Assert.AreEqual(8.2368569010499666, actual, 1e-10);

            DoubleRange pi = regression.GetPredictionInterval(x0, cig, exp);
            Assert.AreEqual(pi.Min, 55.361765613397054, 1e-5);
            Assert.AreEqual(pi.Max, 90.95106056699224, 1e-5);
        }
Esempio n. 35
0
        public void new_api_test()
        {
            #region doc_learn
            // Fix the random number generator
            Accord.Math.Random.Generator.Seed = 0;

            double[,] data = // This is the same data used in the RANSAC sample app
            {
                {  1.0,  0.79 }, {  3,    2.18 }, {  5,    5.99 }, {  7.0,  7.65 },
                {  9.0,  9.55 }, { 11,   11.89 }, { 13,   13.73 }, { 15.0, 14.77 }, 
                { 17.0, 18.00 }, {  1.2,  1.45 }, {  1.5,  1.18 }, {  1.8,  1.92 },
                {  2.1,  1.47 }, {  2.4,  2.41 }, {  2.7,  2.35 }, {  3.0,  3.41 },
                {  3.3,  3.78 }, {  3.6,  3.21 }, {  3.9,  4.76 }, {  4.2,  5.03 },
                {  4.5,  4.19 }, {  4.8,  3.81 }, {  5.1,  6.07 }, {  5.4,  5.74 },
                {  5.7,  6.39 }, {  6,    6.11 }, {  6.3,  6.86 }, {  6.6,  6.35 },
                {  6.9,  7.9  }, {  7.2,  8.04 }, {  7.5,  8.48 }, {  7.8,  8.07 },
                {  8.1,  8.22 }, {  8.4,  8.41 }, {  8.7,  9.4  }, {  9,    8.8 },
                {  9.3,  8.44 }, {  9.6,  9.32 }, {  9.9,  9.18 }, { 10.2,  9.86 },
                { 10.5, 10.16 }, { 10.8, 10.28 }, { 11.1, 11.07 }, { 11.4,  11.66 },
                { 11.7, 11.13 }, { 12,   11.55 }, { 12.3, 12.62 }, { 12.6,  12.27 },
                { 12.9, 12.33 }, { 13.2, 12.37 }, { 13.5, 12.75 }, { 13.8,  14.44 },
                { 14.1, 14.71 }, { 14.4, 13.72 }, { 14.7, 14.54 }, { 15,    14.67 },
                { 15.3, 16.04 }, { 15.6, 15.21 }, {  1,    3.9  }, {  2,    11.5 },
                {  3.0, 13.0  }, {  4,    0.9  }, {  5,    5.5  }, {  6,    16.2 },
                {  7.0,  0.8  }, {  8,    9.4  }, {  9,    9.5  }, { 10,    17.5 },
                { 11.0,  6.3  }, { 12,   12.6  }, { 13,    1.5  }, { 14,     1.5 },
                {  2.0,  10   }, {  3,    9    }, { 15,    2    }, { 15.5,   1.2 },
            };


            // First, fit simple linear regression directly for comparison reasons.
            double[] x = data.GetColumn(0); // Extract the independent variable
            double[] y = data.GetColumn(1); // Extract the dependent variable

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Estimate a line passing through the (x, y) points
            SimpleLinearRegression regression = ols.Learn(x, y);

            // Now, compute the values predicted by the 
            // regression for the original input points
            double[] commonOutput = regression.Transform(x);


            // Now, fit simple linear regression using RANSAC
            int maxTrials = 1000;
            int minSamples = 20;
            double probability = 0.950;
            double errorThreshold = 1000;

            // Create a RANSAC algorithm to fit a simple linear regression
            var ransac = new RANSAC<SimpleLinearRegression>(minSamples)
            {
                Probability = probability,
                Threshold = errorThreshold,
                MaxEvaluations = maxTrials,

                // Define a fitting function
                Fitting = (int[] sample) =>
                {
                    // Build a Simple Linear Regression model
                    return new OrdinaryLeastSquares()
                        .Learn(x.Get(sample), y.Get(sample));
                },

                // Define a inlier detector function
                Distances = (SimpleLinearRegression r, double threshold) =>
                {
                    var inliers = new List<int>();
                    for (int i = 0; i < x.Length; i++)
                    {
                        // Compute error for each point
                        double error = r.Transform(x[i]) - y[i];

                        // If the square error is low enough,
                        if (error * error < threshold)
                            inliers.Add(i); //  the point is considered an inlier.
                    }

                    return inliers.ToArray();
                }
            };


            // Now that the RANSAC hyperparameters have been specified, we can 
            // compute another regression model using the RANSAC algorithm:

            int[] inlierIndices;
            SimpleLinearRegression robustRegression = ransac.Compute(data.Rows(), out inlierIndices);

            // Compute the output of the model fitted by RANSAC
            double[] ransacOutput = robustRegression.Transform(x);

            #endregion

            Assert.AreEqual(ransac.TrialsNeeded, 0);
            Assert.AreEqual(ransac.TrialsPerformed, 1);

            string a = inlierIndices.ToCSharp();
            string b = ransacOutput.ToCSharp();
            int[] expectedInliers = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 };
            double[] expectedOutput = new double[] { 1.96331236445045, 3.42042856976283, 4.87754477507521, 6.33466098038758, 7.79177718569996, 9.24889339101234, 10.7060095963247, 12.1631258016371, 13.6202420069495, 2.10902398498169, 2.32759141577855, 2.5461588465754, 2.76472627737226, 2.98329370816912, 3.20186113896597, 3.42042856976283, 3.63899600055969, 3.85756343135654, 4.0761308621534, 4.29469829295026, 4.51326572374711, 4.73183315454397, 4.95040058534082, 5.16896801613768, 5.38753544693454, 5.6061028777314, 5.82467030852825, 6.04323773932511, 6.26180517012196, 6.48037260091882, 6.69894003171568, 6.91750746251253, 7.13607489330939, 7.35464232410625, 7.5732097549031, 7.79177718569996, 8.01034461649682, 8.22891204729367, 8.44747947809053, 8.66604690888738, 8.88461433968424, 9.1031817704811, 9.32174920127795, 9.54031663207481, 9.75888406287167, 9.97745149366852, 10.1960189244654, 10.4145863552622, 10.6331537860591, 10.8517212168559, 11.0702886476528, 11.2888560784497, 11.5074235092465, 11.7259909400434, 11.9445583708402, 12.1631258016371, 12.3816932324339, 12.6002606632308, 1.96331236445045, 2.69187046710664, 3.42042856976283, 4.14898667241902, 4.87754477507521, 5.6061028777314, 6.33466098038758, 7.06321908304377, 7.79177718569996, 8.52033528835615, 9.24889339101234, 9.97745149366852, 10.7060095963247, 11.4345676989809, 2.69187046710664, 3.42042856976283, 12.1631258016371, 12.5274048529652 };

            Assert.IsTrue(inlierIndices.IsEqual(expectedInliers));
            Assert.IsTrue(ransacOutput.IsEqual(expectedOutput, 1e-10));
        }
        public void prediction_test()
        {
            // Example from http://www.real-statistics.com/multiple-regression/confidence-and-prediction-intervals/
            var dt = Accord.IO.CsvReader.FromText(Resources.linreg, true).ToTable();

            double[][] y = dt.ToArray("Poverty");
            double[][] x = dt.ToArray("Infant Mort", "White", "Crime");

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Use OLS to learn the multiple linear regression
            MultivariateLinearRegression regression = ols.Learn(x, y);

            Assert.AreEqual(3, regression.NumberOfInputs);
            Assert.AreEqual(1, regression.NumberOfOutputs);

            Assert.AreEqual(0.443650703716698, regression.Intercepts[0], 1e-5);
            Assert.AreEqual(1.2791842411083394, regression.Weights[0][0], 1e-5);
            Assert.AreEqual(0.036259242392669415, regression.Weights[1][0], 1e-5);
            Assert.AreEqual(0.0014225014835705938, regression.Weights[2][0], 1e-5);

            double rse = regression.GetStandardError(x, y)[0];
            Assert.AreEqual(rse, 2.4703520840798507, 1e-5);


            double[][] im = ols.GetInformationMatrix();
            double[] mse = regression.GetStandardError(x, y);
            double[][] se = regression.GetStandardErrors(mse, im);

            Assert.AreEqual(0.30063086032754965, se[0][0], 1e-10);
            Assert.AreEqual(0.033603448179240082, se[0][1], 1e-10);
            Assert.AreEqual(0.0022414548866296342, se[0][2], 1e-10);
            Assert.AreEqual(3.9879881671805824, se[0][3], 1e-10);

            double[] x0 = new double[] { 7, 80, 400 };
            double y0 = regression.Transform(x0)[0];
            Assert.AreEqual(y0, 12.867680376316864, 1e-5);

            double actual = regression.GetStandardError(x0, mse, im)[0];

            Assert.AreEqual(0.35902764658470271, actual, 1e-10);

            DoubleRange ci = regression.GetConfidenceInterval(x0, mse, x.Length, im)[0];
            Assert.AreEqual(ci.Min, 12.144995206616116, 1e-5);
            Assert.AreEqual(ci.Max, 13.590365546017612, 1e-5);

            actual = regression.GetPredictionStandardError(x0, mse, im)[0];
            Assert.AreEqual(2.4963053239397244, actual, 1e-10);

            DoubleRange pi = regression.GetPredictionInterval(x0, mse, x.Length, im)[0];
            Assert.AreEqual(pi.Min, 7.8428783761994554, 1e-5);
            Assert.AreEqual(pi.Max, 17.892482376434273, 1e-5);
        }
        public void learn_test1()
        {
            #region doc_learn
            // The multivariate linear regression is a generalization of
            // the multiple linear regression. In the multivariate linear
            // regression, not only the input variables are multivariate,
            // but also are the output dependent variables.

            // In the following example, we will perform a regression of
            // a 2-dimensional output variable over a 3-dimensional input
            // variable.

            double[][] inputs = 
            {
                // variables:  x1  x2  x3
                new double[] {  1,  1,  1 }, // input sample 1
                new double[] {  2,  1,  1 }, // input sample 2
                new double[] {  3,  1,  1 }, // input sample 3
            };

            double[][] outputs = 
            {
                // variables:  y1  y2
                new double[] {  2,  3 }, // corresponding output to sample 1
                new double[] {  4,  6 }, // corresponding output to sample 2
                new double[] {  6,  9 }, // corresponding output to sample 3
            };

            // With a quick eye inspection, it is possible to see that
            // the first output variable y1 is always the double of the
            // first input variable. The second output variable y2 is
            // always the triple of the first input variable. The other
            // input variables are unused. Nevertheless, we will fit a
            // multivariate regression model and confirm the validity
            // of our impressions:

            // Use Ordinary Least Squares to create the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();
            
            // Now, compute the multivariate linear regression:
            MultivariateLinearRegression regression = ols.Learn(inputs, outputs);

            // We can obtain predictions using
            double[][] predictions = regression.Transform(inputs);

            // The prediction error is
            double error = new SquareLoss(outputs).Loss(predictions); // 0

            // At this point, the regression error will be 0 (the fit was
            // perfect). The regression coefficients for the first input
            // and first output variables will be 2. The coefficient for
            // the first input and second output variables will be 3. All
            // others will be 0.
            //
            // regression.Coefficients should be the matrix given by
            //
            // double[,] coefficients = {
            //                              { 2, 3 },
            //                              { 0, 0 },
            //                              { 0, 0 },
            //                          };
            //

            // We can also check the r-squared coefficients of determination:
            double[] r2 = regression.CoefficientOfDetermination(inputs, outputs);
            #endregion

            // The first input variable coefficients will be 2 and 3:
            Assert.AreEqual(2, regression.Coefficients[0, 0], 1e-10);
            Assert.AreEqual(3, regression.Coefficients[0, 1], 1e-10);

            // And all other coefficients will be 0:
            Assert.AreEqual(0, regression.Coefficients[1, 0], 1e-10);
            Assert.AreEqual(0, regression.Coefficients[1, 1], 1e-10);
            Assert.AreEqual(0, regression.Coefficients[2, 0], 1e-10);
            Assert.AreEqual(0, regression.Coefficients[2, 1], 1e-10);

            Assert.AreEqual(3, regression.NumberOfInputs);
            Assert.AreEqual(2, regression.NumberOfOutputs);

            // Which should be one for both output variables:
            Assert.AreEqual(1, r2[0]);
            Assert.AreEqual(1, r2[1]);

            foreach (var e in regression.Coefficients)
                Assert.IsFalse(double.IsNaN(e));

            Assert.AreEqual(0, error, 1e-10);
            Assert.IsFalse(double.IsNaN(error));
        }
Esempio n. 38
0
        private void btnCompute_Click(object sender, EventArgs e)
        {
            DataTable dataTable = dgvAnalysisSource.DataSource as DataTable;

            if (dataTable == null)
                return;

            // Gather the available data
            double[][] data = dataTable.ToArray();

            // First, fit simple linear regression directly for comparison reasons.
            double[] x = data.GetColumn(0); // Extract the independent variable
            double[] y = data.GetColumn(1); // Extract the dependent variable

            // Use Ordinary Least Squares to learn the regression
            OrdinaryLeastSquares ols = new OrdinaryLeastSquares();

            // Estimate a line passing through the (x, y) points
            SimpleLinearRegression regression = ols.Learn(x, y);

            // Now, compute the values predicted by the 
            // regression for the original input points
            double[] commonOutput = regression.Transform(x);


            // Now, fit simple linear regression using RANSAC
            int maxTrials = (int)numMaxTrials.Value;
            int minSamples = (int)numSamples.Value;
            double probability = (double)numProbability.Value;
            double errorThreshold = (double)numThreshold.Value;

            // Create a RANSAC algorithm to fit a simple linear regression
            var ransac = new RANSAC<SimpleLinearRegression>(minSamples)
            {
                Probability = probability,
                Threshold = errorThreshold,
                MaxEvaluations = maxTrials,

                // Define a fitting function
                Fitting = delegate(int[] sample)
                {
                    // Retrieve the training data
                    double[] inputs = x.Get(sample);
                    double[] outputs = y.Get(sample);

                    // Build a Simple Linear Regression model
                    return new OrdinaryLeastSquares().Learn(inputs, outputs);
                },

                // Define a check for degenerate samples
                Degenerate = delegate(int[] sample)
                {
                    // In this case, we will not be performing such checks.
                    return false;
                },

                // Define a inlier detector function
                Distances = delegate(SimpleLinearRegression r, double threshold)
                {
                    List<int> inliers = new List<int>();
                    for (int i = 0; i < x.Length; i++)
                    {
                        // Compute error for each point
                        double error = r.Transform(x[i]) - y[i];

                        // If the squared error is below the given threshold,
                        //  the point is considered to be an inlier.
                        if (error * error < threshold)
                            inliers.Add(i);
                    }

                    return inliers.ToArray();
                }
            };


            // Now that the RANSAC hyperparameters have been specified, we can 
            // compute another regression model using the RANSAC algorithm:

            int[] inlierIndices;
            SimpleLinearRegression robustRegression = ransac.Compute(data.Length, out inlierIndices);


            if (robustRegression == null)
            {
                lbStatus.Text = "RANSAC failed. Please try again after adjusting its parameters.";
                return; // the RANSAC algorithm did not find any inliers and no model was created
            }



            // Compute the output of the model fitted by RANSAC
            double[] ransacOutput = robustRegression.Transform(x);

            // Create scatter plot comparing the outputs from the standard
            //  linear regression and the RANSAC-fitted linear regression.
            CreateScatterplot(graphInput, x, y, commonOutput, ransacOutput,
                x.Get(inlierIndices), y.Get(inlierIndices));

            lbStatus.Text = "Regression created! Please compare the RANSAC "
                + "regression (blue) with the simple regression (in red).";
        }