public void zero_inliers_test() { // Fix the random number generator Accord.Math.Random.Generator.Seed = 0; double[,] data = // This is the same data used in the RANSAC sample app { { 1.0, 0.79 }, { 3, 2.18 }, { 5, 5.99 }, { 7.0, 7.65 }, { 9.0, 9.55 }, { 11, 11.89 }, { 13, 13.73 }, { 15.0, 14.77 }, { 17.0, 18.00 }, { 1.2, 1.45 }, { 1.5, 1.18 }, { 1.8, 1.92 }, { 2.1, 1.47 }, { 2.4, 2.41 }, { 2.7, 2.35 }, { 3.0, 3.41 }, { 3.3, 3.78 }, { 3.6, 3.21 }, { 3.9, 4.76 }, { 4.2, 5.03 }, { 4.5, 4.19 }, { 4.8, 3.81 }, { 5.1, 6.07 }, { 5.4, 5.74 }, { 5.7, 6.39 }, { 6, 6.11 }, { 6.3, 6.86 }, { 6.6, 6.35 }, { 6.9, 7.9 }, { 7.2, 8.04 }, { 7.5, 8.48 }, { 7.8, 8.07 }, { 8.1, 8.22 }, { 8.4, 8.41 }, { 8.7, 9.4 }, { 9, 8.8 }, { 9.3, 8.44 }, { 9.6, 9.32 }, { 9.9, 9.18 }, { 10.2, 9.86 }, { 10.5, 10.16 }, { 10.8, 10.28 }, { 11.1, 11.07 }, { 11.4, 11.66 }, { 11.7, 11.13 }, { 12, 11.55 }, { 12.3, 12.62 }, { 12.6, 12.27 }, { 12.9, 12.33 }, { 13.2, 12.37 }, { 13.5, 12.75 }, { 13.8, 14.44 }, { 14.1, 14.71 }, { 14.4, 13.72 }, { 14.7, 14.54 }, { 15, 14.67 }, { 15.3, 16.04 }, { 15.6, 15.21 }, { 1, 3.9 }, { 2, 11.5 }, { 3.0, 13.0 }, { 4, 0.9 }, { 5, 5.5 }, { 6, 16.2 }, { 7.0, 0.8 }, { 8, 9.4 }, { 9, 9.5 }, { 10, 17.5 }, { 11.0, 6.3 }, { 12, 12.6 }, { 13, 1.5 }, { 14, 1.5 }, { 2.0, 10 }, { 3, 9 }, { 15, 2 }, { 15.5, 1.2 }, }; // First, fit simple linear regression directly for comparison reasons. double[] x = data.GetColumn(0); // Extract the independent variable double[] y = data.GetColumn(1); // Extract the dependent variable // Create a simple linear regression var regression = new SimpleLinearRegression(); Assert.AreEqual(1, regression.NumberOfInputs); Assert.AreEqual(1, regression.NumberOfOutputs); // Estimate a line passing through the (x, y) points double sumOfSquaredErrors = regression.Regress(x, y); // Now, compute the values predicted by the // regression for the original input points double[] commonOutput = regression.Compute(x); // Now, fit simple linear regression using RANSAC int maxTrials = 1000; int minSamples = 20; double probability = 0.950; double errorThreshold = 1000; int count = 0; // Create a RANSAC algorithm to fit a simple linear regression var ransac = new RANSAC <SimpleLinearRegression>(minSamples) { Probability = probability, Threshold = errorThreshold, MaxEvaluations = maxTrials, // Define a fitting function Fitting = delegate(int[] sample) { // Retrieve the training data double[] inputs = x.Submatrix(sample); double[] outputs = y.Submatrix(sample); // Build a Simple Linear Regression model var r = new SimpleLinearRegression(); r.Regress(inputs, outputs); return(r); }, // Define a check for degenerate samples Degenerate = delegate(int[] sample) { // In this case, we will not be performing such checks. return(false); }, // Define a inlier detector function Distances = delegate(SimpleLinearRegression r, double threshold) { count++; List <int> inliers = new List <int>(); // Generate 0 inliers twice, then proceed as normal if (count > 2) { for (int i = 0; i < x.Length; i++) { // Compute error for each point double error = r.Compute(x[i]) - y[i]; // If the squared error is below the given threshold, // the point is considered to be an inlier. if (error * error < threshold) { inliers.Add(i); } } } return(inliers.ToArray()); } }; // Now that the RANSAC hyperparameters have been specified, we can // compute another regression model using the RANSAC algorithm: int[] inlierIndices; SimpleLinearRegression robustRegression = ransac.Compute(data.Rows(), out inlierIndices); // Compute the output of the model fitted by RANSAC double[] ransacOutput = robustRegression.Compute(x); Assert.AreEqual(ransac.TrialsNeeded, 0); Assert.AreEqual(ransac.TrialsPerformed, 3); string a = inlierIndices.ToCSharp(); string b = ransacOutput.ToCSharp(); int[] expectedInliers = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 }; double[] expectedOutput = new double[] { 4.62124895918799, 5.37525473445784, 6.12926050972769, 6.88326628499754, 7.63727206026739, 8.39127783553724, 9.14528361080709, 9.89928938607694, 10.6532951613468, 4.69664953671498, 4.80975040300545, 4.92285126929593, 5.03595213558641, 5.14905300187689, 5.26215386816736, 5.37525473445784, 5.48835560074832, 5.6014564670388, 5.71455733332927, 5.82765819961975, 5.94075906591023, 6.05385993220071, 6.16696079849118, 6.28006166478166, 6.39316253107214, 6.50626339736262, 6.61936426365309, 6.73246512994357, 6.84556599623405, 6.95866686252453, 7.071767728815, 7.18486859510548, 7.29796946139596, 7.41107032768644, 7.52417119397691, 7.63727206026739, 7.75037292655787, 7.86347379284835, 7.97657465913882, 8.0896755254293, 8.20277639171978, 8.31587725801026, 8.42897812430073, 8.54207899059121, 8.65517985688169, 8.76828072317216, 8.88138158946264, 8.99448245575312, 9.1075833220436, 9.22068418833408, 9.33378505462455, 9.44688592091503, 9.55998678720551, 9.67308765349599, 9.78618851978646, 9.89928938607694, 10.0123902523674, 10.1254911186579, 4.62124895918799, 4.99825184682292, 5.37525473445784, 5.75225762209277, 6.12926050972769, 6.50626339736262, 6.88326628499754, 7.26026917263247, 7.63727206026739, 8.01427494790232, 8.39127783553724, 8.76828072317216, 9.14528361080709, 9.52228649844202, 4.99825184682292, 5.37525473445784, 9.89928938607694, 10.0877908298944 }; Assert.IsTrue(inlierIndices.IsEqual(expectedInliers)); Assert.IsTrue(ransacOutput.IsEqual(expectedOutput, 1e-10)); }
public double Slope(KalibreringDTO kDTO) { double[] kalibreringer = new double[] { (kDTO.KalibrerDoubles[0] + kDTO.KalibrerDoubles[3]) / 2, (kDTO.KalibrerDoubles[1] + kDTO.KalibrerDoubles[4]) / 2, (kDTO.KalibrerDoubles[2] + kDTO.KalibrerDoubles[5]) / 2 }; double[] output = new double[] { 10, 50, 100 }; OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); SimpleLinearRegression regression = ols.Learn(kalibreringer, output); slope = regression.Slope; return(slope); }
private void Train(IEnumerable <SprintDataRow> trainingDataset) { // Our independant variable is the number of hours double[] inputs = trainingDataset.Select(x => Convert.ToDouble(x.NumberOfHours)).ToArray(); // Our dependant variable is the number of processed story points double[] outputs = trainingDataset.Select(x => x.NumberOfProcessedStoryPoints).ToArray(); // Train the model OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); this._linearRegressionModel = ols.Learn(inputs, outputs); }
public RegressionResult PerformRegression(double[] trainX, double[] trainY, double[] testX, double[] testY) { OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); SimpleLinearRegression regression = ols.Learn(trainX, trainY); return(new RegressionResult { FormulaUsed = regression.ToString(), PredictionOnTestSet = testX.Select(regression.Transform).ToArray(), PredictionOnTrainingSet = trainX.Select(regression.Transform).ToArray(), Regression = regression }); }
/// <summary> /// This class detect swipe and execute some events based of the result of the swipe. /// init local variables /// </summary> /// <<param name="rightOrLeftHanded">Determine whether we are in Right-Handed mode or Left-Handed mode</param> public SwipeGestureDetector(Side rightOrLeftHanded) : base(rightOrLeftHanded) { _regression = null; _ols = new OrdinaryLeastSquares(); _inputs = new List <double>(); _outputs = new List <double>(); _frameGestureCount = 0; _xVelocityMax = 0; _lastGestureDetectedInMillis = 0; _xVelocityMin = Double.MaxValue; _distance = 0; }
/// <summary> /// Register data that corresponds to the gesture /// </summary> protected override void RegisterGesture() { //We only detectr frame with a minimum of velocity in the palm gesture if (Math.Abs(this.SelectedHand.PalmVelocity.x) >= MIN_GESTURE_VELOCITY_X_FRAME_DETECTION) { //We keep the departure point for the gesture if (_frameGestureCount == 0) { _gestureFirstPoint = this.SelectedHand.StabilizedPalmPosition; } _frameGestureCount++; //Determine the direction of the initiated gesture _currentGestureDirection = this.SelectedHand.PalmVelocity.x > 0 ? Side.Right : Side.Left; _inputs.Add(this.SelectedHand.StabilizedPalmPosition.x); _outputs.Add(this.SelectedHand.StabilizedPalmPosition.y); //Use Ordinary Least Squares to learn the regression try { _regression = _ols.Learn(_inputs.ToArray(), _outputs.ToArray()); //Gets the coefficient of determination, as known R-squared _coefficientDetermination = _regression.CoefficientOfDetermination(_inputs.ToArray(), _outputs.ToArray()); //Checking max velocity on the gesture //Abs use for compatibility for both gesture (right and left) _xVelocityMax = Math.Max(Math.Abs(this.SelectedHand.PalmVelocity.x), _xVelocityMax); _xVelocityMin = Math.Min(Math.Abs(this.SelectedHand.PalmVelocity.x), _xVelocityMin); //Calc the distance from the first point of the gesture //Do the hypothenus of the triangle given from the delta between the first point and the current point _distance = Math.Sqrt(Math.Pow(this.SelectedHand.StabilizedPalmPosition.x - _gestureFirstPoint.x, 2) + Math.Pow(this.SelectedHand.StabilizedPalmPosition.y - _gestureFirstPoint.y, 2)); } catch (InvalidOperationException ex) { Console.WriteLine("Exception: {0}", ex.Message); _regression = null; _frameGestureCount = 0; _xVelocityMax = 0; _xVelocityMin = Double.MaxValue; _distance = 0; _inputs.Clear(); _outputs.Clear(); } } }
static void Main(string [] args) { test3(); test3(); byte[,] temp = new byte[2, 2]; test2(); test1(); double[] inputs = { 80, 60, 10, 20, 30 }; double[] outputs = { 20, 40, 30, 50, 60 }; OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); SimpleLinearRegression regression = ols.Learn(inputs, outputs); }
/// <summary> /// Clear tracked data if necessary /// </summary> public override void ClearFrames() { //If the gesture change the direction it started with, we clear it if ((_currentGestureDirection == Side.Right && this.SelectedHand.PalmVelocity.x < 0) || (_currentGestureDirection == Side.Left && this.SelectedHand.PalmVelocity.x > 0) || _frameGestureCount >= FRAME_MAX_GESTURE_LENGTH || _distance >= GESTURE_LENGTH) //The gesture ended we have to check several things { _regression = null; _frameGestureCount = 0; _xVelocityMax = 0; _xVelocityMin = Double.MaxValue; _inputs.Clear(); _outputs.Clear(); } }
private void btnOK_Click(object sender, EventArgs e) { //将Input放在X轴,OutPut放在Y轴 var GraphPane = zedGraph.GraphPane; GraphPane.CurveList.Clear(); GraphPane.XAxis.Title.Text = cmbInputField.Text; GraphPane.YAxis.Title.Text = cmbOutputField.Text; //获得Input,Output列表 double[] inliersX = new double[mongoCol.Count()]; double[] inliersY = new double[mongoCol.Count()]; int Cnt = 0; foreach (var item in mongoCol.FindAllAs <BsonDocument>()) { inliersX[Cnt] = item[cmbInputField.Text].AsInt32; inliersY[Cnt] = item[cmbOutputField.Text].AsInt32; Cnt++; } var myCurve = GraphPane.AddCurve("Point", new PointPairList(inliersX, inliersY), Color.Blue, SymbolType.Default); myCurve.Line.IsVisible = false; myCurve.Symbol.Fill = new Fill(Color.Blue); //线性回归 // Create a new simple linear regression SimpleLinearRegression regression = new SimpleLinearRegression(); // Compute the linear regression regression.Regress(inliersX, inliersY); double[] InputX = new double[2]; double[] OutputY = new double[2]; InputX[0] = 0; InputX[1] = inliersX.Max(); OutputY[0] = regression.Compute(0); OutputY[1] = regression.Compute(inliersX.Max()); myCurve = GraphPane.AddCurve("Regression:" + regression.ToString(), new PointPairList(InputX, OutputY), Color.Blue, SymbolType.Default); myCurve.Line.IsVisible = true; myCurve.Line.Color = Color.Red; //更新坐标轴和图表 zedGraph.AxisChange(); zedGraph.Invalidate(); }
public TrendLine(IEnumerable <ScatterErrorPoint> l) { DataPoints = l; if (DataPoints.Count() > 1) { double[] inputs = DataPoints.Select((dp) => dp.X).ToArray(); double[] outputs = DataPoints.Select((dp) => dp.Y).ToArray(); double[] weights = DataPoints.Select((dp) => 1 / (dp.ErrorY * dp.ErrorY)).ToArray(); OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); SimpleLinearRegression regression = ols.Learn(inputs, outputs, weights); Slope = regression.Slope; Offset = regression.Intercept; } }
public static double RegressCut(double[] inputs, double[] outputs) { OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression regression = ols.Learn(inputs, outputs); // Compute the output for a given input: //double y = regression.Transform(85); // The answer will be 28.088 // We can also extract the slope and the intercept term // for the line. Those will be -0.26 and 50.5, respectively. double s = regression.Slope; // -0.264706 double c = regression.Intercept; // 50.588235 return(c); }
/// <summary> /// Receives a list of Records and trains a Simple Linear Regression algorithm upon them. /// </summary> /// <param name="records"></param> public SimpleLinearRegression TrainLinearRegression(List <Record> records) { //Orders records by ascending date. records = records.OrderBy(x => x.Date).ToList(); var trainingInputs = new double[records.Count]; var trainingOutputs = new double[records.Count]; //Fills training arrays. for (var i = 0; i < records.Count; i++) { trainingInputs[i] = i; trainingOutputs[i] = records.ElementAt(i).AllCrimes; } //Trains the global object slr. slr = _ols.Learn(trainingInputs, trainingOutputs); return(slr); }
public static SimpleLinearRegression AnalyzeYearToGradeDependency() { CalculateCorrelationYearToGrade(); OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); SimpleLinearRegression regression = ols.Learn(DataHandler.Reviews.Select(r => Math.Abs(2000 - (double)r.reviewTime.Year)).ToArray(), DataHandler.Reviews.Select(r => r.overall).ToArray()); double s = regression.Slope; double c = regression.Intercept; //var z = regression.Transform(9); //var z1 = regression.Transform(14); //var z2 = regression.Transform(11); //var z3 = regression.Transform(1); //var z4 = regression.Transform(2); return(regression); }
public void Learn(IList <XtoY> dsLearn) { List <double> data = new List <double>(mylist.Count); double[] date = new double[mylist.Count]; for (int i = 0; i < mylist.Count; i++) { data.Add(mylist[i].Cases); date[i] = i + 1; } data.ToArray(); var ols = new OrdinaryLeastSquares() { IsRobust = _isRobust }; _simpleLinearRegression = ols.Learn(data.ToArray(), date); }
public void When_Calculate_Linear_Regression() { var inputs = new double[] { 230.1, 44.5, 17.2, 151.5, 180.8, 8.7, 57.5, 120.2, 8.6, 199.8, 66.1, 214.7, 23.8, 97.5, 204.1, 195.4, 67.8, 281.4, 69.2, 147.3, 218.4, 237.4, 13.2, 228.3, 62.3, 262.9, 142.9, 240.1, 248.8, 70.6, 292.9, 112.9, 97.2, 265.6, 95.7, 290.7, 266.9, 74.7, 43.1, 228, 202.5, 177, 293.6, 206.9, 25.1, 175.1, 89.7, 239.9, 227.2, 66.9, 199.8, 100.4, 216.4, 182.6, 262.7, 198.9, 7.3, 136.2, 210.8, 210.7, 53.5, 261.3, 239.3, 102.7, 131.1, 69, 31.5, 139.3, 237.4, 216.8, 199.1, 109.8, 26.8, 129.4, 213.4, 16.9, 27.5, 120.5, 5.4, 116, 76.4, 239.8, 75.3, 68.4, 213.5, 193.2, 76.3, 110.7, 88.3, 109.8, 134.3, 28.6, 217.7, 250.9, 107.4, 163.3, 197.6, 184.9, 289.7, 135.2, 222.4, 296.4, 280.2, 187.9, 238.2, 137.9, 25, 90.4, 13.1, 255.4, 225.8, 241.7, 175.7, 209.6, 78.2, 75.1, 139.2, 76.4, 125.7, 19.4, 141.3, 18.8, 224, 123.1, 229.5, 87.2, 7.8, 80.2, 220.3, 59.6, 0.7, 265.2, 8.4, 219.8, 36.9, 48.3, 25.6, 273.7, 43, 184.9, 73.4, 193.7, 220.5, 104.6, 96.2, 140.3, 240.1, 243.2, 38, 44.7, 280.7, 121, 197.6, 171.3, 187.8, 4.1, 93.9, 149.8, 11.7, 131.7, 172.5, 85.7, 188.4, 163.5, 117.2, 234.5, 17.9, 206.8, 215.4, 284.3, 50, 164.5, 19.6, 168.4, 222.4, 276.9, 248.4, 170.2, 276.7, 165.6, 156.6, 218.5, 56.2, 287.6, 253.8, 205, 139.5, 191.1, 286, 18.7, 39.5, 75.5, 17.2, 166.8, 149.7, 38.2, 94.2, 177, 283.6, 232.1 }; var outputs = new double[] { 22.1, 10.4, 9.3, 18.5, 12.9, 7.2, 11.8, 13.2, 4.8, 10.6, 8.6, 17.4, 9.2, 9.7, 19, 22.4, 12.5, 24.4, 11.3, 14.6, 18, 12.5, 5.6, 15.5, 9.7, 12, 15, 15.9, 18.9, 10.5, 21.4, 11.9, 9.6, 17.4, 9.5, 12.8, 25.4, 14.7, 10.1, 21.5, 16.6, 17.1, 20.7, 12.9, 8.5, 14.9, 10.6, 23.2, 14.8, 9.7, 11.4, 10.7, 22.6, 21.2, 20.2, 23.7, 5.5, 13.2, 23.8, 18.4, 8.1, 24.2, 15.7, 14, 18, 9.3, 9.5, 13.4, 18.9, 22.3, 18.3, 12.4, 8.8, 11, 17, 8.7, 6.9, 14.2, 5.3, 11, 11.8, 12.3, 11.3, 13.6, 21.7, 15.2, 12, 16, 12.9, 16.7, 11.2, 7.3, 19.4, 22.2, 11.5, 16.9, 11.7, 15.5, 25.4, 17.2, 11.7, 23.8, 14.8, 14.7, 20.7, 19.2, 7.2, 8.7, 5.3, 19.8, 13.4, 21.8, 14.1, 15.9, 14.6, 12.6, 12.2, 9.4, 15.9, 6.6, 15.5, 7, 11.6, 15.2, 19.7, 10.6, 6.6, 8.8, 24.7, 9.7, 1.6, 12.7, 5.7, 19.6, 10.8, 11.6, 9.5, 20.8, 9.6, 20.7, 10.9, 19.2, 20.1, 10.4, 11.4, 10.3, 13.2, 25.4, 10.9, 10.1, 16.1, 11.6, 16.6, 19, 15.6, 3.2, 15.3, 10.1, 7.3, 12.9, 14.4, 13.3, 14.9, 18, 11.9, 11.9, 8, 12.2, 17.1, 15, 8.4, 14.5, 7.6, 11.7, 11.5, 27, 20.2, 11.7, 11.8, 12.6, 10.5, 12.2, 8.7, 26.2, 17.6, 22.6, 10.3, 17.3, 15.9, 6.7, 10.8, 9.9, 5.9, 19.6, 17.3, 7.6, 9.7, 12.8, 25.5, 13.4 }; var linearRegression = new SimpleLinearRegression(MatrixDecompositionAlgs.GOLUB_REINSCH); var result = linearRegression.Regress(inputs, outputs); Assert.Equal(0.0475, System.Math.Round(result.LinearRegression.SlopeLst.First().Value, 4)); Assert.Equal(7.0326, System.Math.Round(result.LinearRegression.Intercept.Value, 4)); Assert.Equal(0, System.Math.Round(result.LinearRegression.SlopeLst.First().PValue)); Assert.Equal(17.668, System.Math.Round(result.LinearRegression.SlopeLst.First().TStatistic, 3)); Assert.Equal(0, System.Math.Round(result.LinearRegression.Intercept.PValue)); Assert.Equal(15.36, System.Math.Round(result.LinearRegression.Intercept.TStatistic, 2)); }
public TestLinearRegression() { double[] inputs = { 80, 60, 10, 20, 30 }; double[] outputs = { 20, 40, 30, 50, 60 }; var regression = new SimpleLinearRegression(); regression.Regress(inputs, outputs); var slope = regression.Slope; var mySlope = CalculateSlope(inputs, outputs); if (slope == mySlope) { //You ARE GENIOUS } // }
private static void linearRegression() { // Declare some sample test data. double[] inputs = { 80, 60, 10, 20, 30 }; double[] outputs = { 20, 40, 30, 50, 60 }; // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression regression = ols.Learn(inputs, outputs); // Compute the output for a given input: double y = regression.Transform(85); // The answer will be 28.088 // We can also extract the slope and the intercept term // for the line. Those will be -0.26 and 50.5, respectively. double s = regression.Slope; // -0.264706 double c = regression.Intercept; // 50.588235 }
public static void test3() { var poly2 = CreateFunc(1, 1); Random rnd = new Random(); var pos = Enumerable.Range(0, 20).Select(x => new double [] { x, poly2(x) + rnd.NextDouble() }).ToArray(); double[] inputs = pos.Select(x => x[0]).ToArray(); double[] outputs = pos.Select(x => x[1]).ToArray(); var ls = new PolynomialLeastSquares() { Degree = 2 }; PolynomialRegression poly = ls.Learn(inputs, outputs); double a = poly.Weights[0]; // a = 0 double b = poly.Weights[1]; // b = 0 double c = poly.Intercept; // c = 1 double[] predicted = poly.Transform(inputs); double error = new SquareLoss(outputs).Loss(predicted); var ols = new OrdinaryLeastSquares(); SimpleLinearRegression mul = ols.Learn(inputs, outputs); double a1 = mul.Slope; // a = 0 double b1 = mul.Intercept; // b = 0 double[] simplepredict = mul.Transform(inputs); double erroe2 = new SquaredHingeLoss(outputs).Loss(simplepredict); Console.WriteLine("Done"); }
public void learn_test() { #region doc_learn // Let's say we have some univariate, continuous sets of input data, // and a corresponding univariate, continuous set of output data, such // as a set of points in R². A simple linear regression is able to fit // a line relating the input variables to the output variables in which // the minimum-squared-error of the line and the actual output points // is minimum. // Declare some sample test data. double[] inputs = { 80, 60, 10, 20, 30 }; double[] outputs = { 20, 40, 30, 50, 60 }; // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression regression = ols.Learn(inputs, outputs); // Compute the output for a given input: double y = regression.Transform(85); // The answer will be 28.088 // We can also extract the slope and the intercept term // for the line. Those will be -0.26 and 50.5, respectively. double s = regression.Slope; // -0.264706 double c = regression.Intercept; // 50.588235 #endregion // Expected slope and intercept double eSlope = -0.264706; double eIntercept = 50.588235; Assert.AreEqual(28.088235294117649, y, 1e-10); Assert.AreEqual(eSlope, s, 1e-5); Assert.AreEqual(eIntercept, c, 1e-5); Assert.IsFalse(double.IsNaN(y)); }
public static object TestRegression(double[] x, double[] y) { SimpleLinearRegression slr = new SimpleLinearRegression(); double err = slr.Regress(x, y); double[] values = slr.Compute(x); object[,] ret = new object[values.Length + 3, 2]; ret[0, 0] = "R^2"; ret[0, 1] = slr.CoefficientOfDetermination(x, y); ret[1, 0] = "Slope"; ret[1, 1] = slr.Slope; ret[2, 0] = "Error"; ret[2, 1] = err; for (int i = 0; i < values.Length; ++i) { ret[i + 3, 0] = x[i]; ret[i + 3, 1] = values[i]; } return(ret); }
public void logarithm_learn() { #region doc_learn // This is the same data from the example available at // http://mathbits.com/MathBits/TISection/Statistics2/logarithmic.htm // Declare your inputs and output data double[] inputs = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 }; double[] outputs = { 6, 9.5, 13, 15, 16.5, 17.5, 18.5, 19, 19.5, 19.7, 19.8 }; // Transform inputs to logarithms double[] logx = Matrix.Log(inputs); // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression lr = ols.Learn(logx, outputs); // Compute predicted values for inputs double[] predicted = lr.Transform(logx); // Get an expression representing the learned regression model // We just have to remember that 'x' will actually mean 'log(x)' string result = lr.ToString("N4", CultureInfo.InvariantCulture); // Result will be "y(x) = 6.1082x + 6.0993" // The mean squared error between the expected and the predicted is double error = new SquareLoss(outputs).Loss(predicted); // 0.261454 #endregion Assert.AreEqual(0.26145460024250794, error, 1e-8); Assert.AreEqual(6.1081800414945704, lr.Slope, 1e-8); Assert.AreEqual(6.0993411396126653, lr.Intercept, 1e-8); Assert.AreEqual("y(x) = 6.1082x + 6.0993", result); }
private void btnCompute_Click(object sender, EventArgs e) { DataTable dataTable = dgvAnalysisSource.DataSource as DataTable; if (dataTable == null) { return; } // Gather the available data double[][] data = dataTable.ToArray(); // First, fit simple linear regression directly for comparison reasons. double[] x = data.GetColumn(0); // Extract the independent variable double[] y = data.GetColumn(1); // Extract the dependent variable // Create a simple linear regression var regression = new SimpleLinearRegression(); // Estimate a line passing through the (x, y) points double sumOfSquaredErrors = regression.Regress(x, y); // Now, compute the values predicted by the // regression for the original input points double[] commonOutput = regression.Compute(x); // Now, fit simple linear regression using RANSAC int maxTrials = (int)numMaxTrials.Value; int minSamples = (int)numSamples.Value; double probability = (double)numProbability.Value; double errorThreshold = (double)numThreshold.Value; // Create a RANSAC algorithm to fit a simple linear regression var ransac = new RANSAC <SimpleLinearRegression>(minSamples) { Probability = probability, Threshold = errorThreshold, MaxEvaluations = maxTrials, // Define a fitting function Fitting = delegate(int[] sample) { // Retrieve the training data double[] inputs = x.Submatrix(sample); double[] outputs = y.Submatrix(sample); // Build a Simple Linear Regression model var r = new SimpleLinearRegression(); r.Regress(inputs, outputs); return(r); }, // Define a check for degenerate samples Degenerate = delegate(int[] sample) { // In this case, we will not be performing such checks. return(false); }, // Define a inlier detector function Distances = delegate(SimpleLinearRegression r, double threshold) { List <int> inliers = new List <int>(); for (int i = 0; i < x.Length; i++) { // Compute error for each point double error = r.Compute(x[i]) - y[i]; // If the squared error is below the given threshold, // the point is considered to be an inlier. if (error * error < threshold) { inliers.Add(i); } } return(inliers.ToArray()); } }; // Now that the RANSAC hyperparameters have been specified, we can // compute another regression model using the RANSAC algorithm: int[] inlierIndices; SimpleLinearRegression robustRegression = ransac.Compute(data.Length, out inlierIndices); if (robustRegression == null) { lbStatus.Text = "RANSAC failed. Please try again after adjusting its parameters."; return; // the RANSAC algorithm did not find any inliers and no model was created } // Compute the output of the model fitted by RANSAC double[] ransacOutput = robustRegression.Compute(x); // Create scatter plot comparing the outputs from the standard // linear regression and the RANSAC-fitted linear regression. CreateScatterplot(graphInput, x, y, commonOutput, ransacOutput, x.Submatrix(inlierIndices), y.Submatrix(inlierIndices)); lbStatus.Text = "Regression created! Please compare the RANSAC " + "regression (blue) with the simple regression (in red)."; }
public void prediction_test() { // example data from http://www.real-statistics.com/regression/confidence-and-prediction-intervals/ double[][] input = { new double[] { 5, 80 }, new double[] { 23, 78 }, new double[] { 25, 60 }, new double[] { 48, 53 }, new double[] { 17, 85 }, new double[] { 8, 84 }, new double[] { 4, 73 }, new double[] { 26, 79 }, new double[] { 11, 81 }, new double[] { 19, 75 }, new double[] { 14, 68 }, new double[] { 35, 72 }, new double[] { 29, 58 }, new double[] { 4, 92 }, new double[] { 23, 65 }, }; double[] cig = input.GetColumn(0); double[] exp = input.GetColumn(1); // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression regression = ols.Learn(cig, exp); Assert.AreEqual(1, regression.NumberOfInputs); Assert.AreEqual(1, regression.NumberOfOutputs); double x0 = 20; double y0 = regression.Transform(x0); Assert.AreEqual(y0, 73.1564, 1e-4); double syx = regression.GetStandardError(cig, exp); Assert.AreEqual(7.974682, syx, 1e-5); double ssx = cig.Subtract(cig.Mean()).Pow(2).Sum(); Assert.AreEqual(2171.6, ssx, 1e-5); double n = exp.Length; double x0c = x0 - cig.Mean(); double var = 1 / n + (x0c * x0c) / ssx; Assert.AreEqual(0.066832443052741455, var, 1e-10); double expected = syx * Math.Sqrt(var); double actual = regression.GetStandardError(x0, cig, exp); Assert.AreEqual(2.061612, expected, 1e-5); Assert.AreEqual(expected, actual, 1e-10); DoubleRange ci = regression.GetConfidenceInterval(x0, cig, exp); Assert.AreEqual(ci.Min, 68.702569616457751, 1e-5); Assert.AreEqual(ci.Max, 77.610256563931543, 1e-5); actual = regression.GetPredictionStandardError(x0, cig, exp); Assert.AreEqual(8.2368569010499666, actual, 1e-10); DoubleRange pi = regression.GetPredictionInterval(x0, cig, exp); Assert.AreEqual(pi.Min, 55.361765613397054, 1e-5); Assert.AreEqual(pi.Max, 90.95106056699224, 1e-5); }
public void TrainWithOrdinaryLeastSquares(double[] input, double[] output) { var ols = new OrdinaryLeastSquares(); regression = ols.Learn(input, output); }
public RecognitionState Recognition(double[] inputs, double[] outputs) { RecognitionState ret = new RecognitionState(); if (inputs.Length > 5) { /*一次线性回归*/ SimpleLinearRegression regression = this.ols.Learn(inputs, outputs, null); double k = regression.Slope; const double TAN45 = 1.0; const double TAN15 = 0.2679492; double k1 = Math.Abs(k); if (k1 > TAN45) { ret.Slope = SlopeState.Steep; } else if (k1 > TAN15) { ret.Slope = SlopeState.moderate; } else { ret.Slope = SlopeState.gentle; } /*二次线性回归*/ PolynomialRegression poly = this.pls.Learn(inputs, outputs, null); double a = poly.Weights[0]; double b = poly.Weights[1]; if (k > 0 && a > 0) { ret.Shape = ShapeState.Rise; } else if (k > 0 && a < 0) { ret.Shape = ShapeState.FallAfterRise; } else if (k < 0 && a < 0) { ret.Shape = ShapeState.Fall; } else if (k < 0 && a > 0) { ret.Shape = ShapeState.RiseAfterFall; } double last = inputs[inputs.Length - 1]; double s = 2 * a * last + b; double s1 = Math.Abs(s); if (s1 > TAN45) { ret.Speed = SpeedState.Rapid; } else if (s1 > TAN15) { ret.Speed = SpeedState.Steady; } else { ret.Speed = SpeedState.Slow; } /*显示图形*/ if (this.showForm != null) { double[] outputs2 = regression.Transform(inputs); double[] outputs3 = poly.Transform(inputs); this.showForm.ShowGraph(inputs, outputs, inputs, outputs2, inputs, outputs3); } Console.WriteLine("k={0},a={1},b={2}", k, a, b); } return(ret); }
public void new_api_test() { #region doc_learn // Fix the random number generator Accord.Math.Random.Generator.Seed = 0; double[,] data = // This is the same data used in the RANSAC sample app { { 1.0, 0.79 }, { 3, 2.18 }, { 5, 5.99 }, { 7.0, 7.65 }, { 9.0, 9.55 }, { 11, 11.89 }, { 13, 13.73 }, { 15.0, 14.77 }, { 17.0, 18.00 }, { 1.2, 1.45 }, { 1.5, 1.18 }, { 1.8, 1.92 }, { 2.1, 1.47 }, { 2.4, 2.41 }, { 2.7, 2.35 }, { 3.0, 3.41 }, { 3.3, 3.78 }, { 3.6, 3.21 }, { 3.9, 4.76 }, { 4.2, 5.03 }, { 4.5, 4.19 }, { 4.8, 3.81 }, { 5.1, 6.07 }, { 5.4, 5.74 }, { 5.7, 6.39 }, { 6, 6.11 }, { 6.3, 6.86 }, { 6.6, 6.35 }, { 6.9, 7.9 }, { 7.2, 8.04 }, { 7.5, 8.48 }, { 7.8, 8.07 }, { 8.1, 8.22 }, { 8.4, 8.41 }, { 8.7, 9.4 }, { 9, 8.8 }, { 9.3, 8.44 }, { 9.6, 9.32 }, { 9.9, 9.18 }, { 10.2, 9.86 }, { 10.5, 10.16 }, { 10.8, 10.28 }, { 11.1, 11.07 }, { 11.4, 11.66 }, { 11.7, 11.13 }, { 12, 11.55 }, { 12.3, 12.62 }, { 12.6, 12.27 }, { 12.9, 12.33 }, { 13.2, 12.37 }, { 13.5, 12.75 }, { 13.8, 14.44 }, { 14.1, 14.71 }, { 14.4, 13.72 }, { 14.7, 14.54 }, { 15, 14.67 }, { 15.3, 16.04 }, { 15.6, 15.21 }, { 1, 3.9 }, { 2, 11.5 }, { 3.0, 13.0 }, { 4, 0.9 }, { 5, 5.5 }, { 6, 16.2 }, { 7.0, 0.8 }, { 8, 9.4 }, { 9, 9.5 }, { 10, 17.5 }, { 11.0, 6.3 }, { 12, 12.6 }, { 13, 1.5 }, { 14, 1.5 }, { 2.0, 10 }, { 3, 9 }, { 15, 2 }, { 15.5, 1.2 }, }; // First, fit simple linear regression directly for comparison reasons. double[] x = data.GetColumn(0); // Extract the independent variable double[] y = data.GetColumn(1); // Extract the dependent variable // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Estimate a line passing through the (x, y) points SimpleLinearRegression regression = ols.Learn(x, y); // Now, compute the values predicted by the // regression for the original input points double[] commonOutput = regression.Transform(x); // Now, fit simple linear regression using RANSAC int maxTrials = 1000; int minSamples = 20; double probability = 0.950; double errorThreshold = 1000; // Create a RANSAC algorithm to fit a simple linear regression var ransac = new RANSAC <SimpleLinearRegression>(minSamples) { Probability = probability, Threshold = errorThreshold, MaxEvaluations = maxTrials, // Define a fitting function Fitting = (int[] sample) => { // Build a Simple Linear Regression model return(new OrdinaryLeastSquares() .Learn(x.Get(sample), y.Get(sample))); }, // Define a inlier detector function Distances = (SimpleLinearRegression r, double threshold) => { var inliers = new List <int>(); for (int i = 0; i < x.Length; i++) { // Compute error for each point double error = r.Transform(x[i]) - y[i]; // If the square error is low enough, if (error * error < threshold) { inliers.Add(i); // the point is considered an inlier. } } return(inliers.ToArray()); } }; // Now that the RANSAC hyperparameters have been specified, we can // compute another regression model using the RANSAC algorithm: int[] inlierIndices; SimpleLinearRegression robustRegression = ransac.Compute(data.Rows(), out inlierIndices); // Compute the output of the model fitted by RANSAC double[] ransacOutput = robustRegression.Transform(x); #endregion Assert.AreEqual(ransac.TrialsNeeded, 0); Assert.AreEqual(ransac.TrialsPerformed, 1); string a = inlierIndices.ToCSharp(); string b = ransacOutput.ToCSharp(); int[] expectedInliers = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 }; double[] expectedOutput = new double[] { 1.96331236445045, 3.42042856976283, 4.87754477507521, 6.33466098038758, 7.79177718569996, 9.24889339101234, 10.7060095963247, 12.1631258016371, 13.6202420069495, 2.10902398498169, 2.32759141577855, 2.5461588465754, 2.76472627737226, 2.98329370816912, 3.20186113896597, 3.42042856976283, 3.63899600055969, 3.85756343135654, 4.0761308621534, 4.29469829295026, 4.51326572374711, 4.73183315454397, 4.95040058534082, 5.16896801613768, 5.38753544693454, 5.6061028777314, 5.82467030852825, 6.04323773932511, 6.26180517012196, 6.48037260091882, 6.69894003171568, 6.91750746251253, 7.13607489330939, 7.35464232410625, 7.5732097549031, 7.79177718569996, 8.01034461649682, 8.22891204729367, 8.44747947809053, 8.66604690888738, 8.88461433968424, 9.1031817704811, 9.32174920127795, 9.54031663207481, 9.75888406287167, 9.97745149366852, 10.1960189244654, 10.4145863552622, 10.6331537860591, 10.8517212168559, 11.0702886476528, 11.2888560784497, 11.5074235092465, 11.7259909400434, 11.9445583708402, 12.1631258016371, 12.3816932324339, 12.6002606632308, 1.96331236445045, 2.69187046710664, 3.42042856976283, 4.14898667241902, 4.87754477507521, 5.6061028777314, 6.33466098038758, 7.06321908304377, 7.79177718569996, 8.52033528835615, 9.24889339101234, 9.97745149366852, 10.7060095963247, 11.4345676989809, 2.69187046710664, 3.42042856976283, 12.1631258016371, 12.5274048529652 }; Assert.IsTrue(inlierIndices.IsEqual(expectedInliers)); Assert.IsTrue(ransacOutput.IsEqual(expectedOutput, 1e-10)); }
public override void Train(double[] predictors, double[] results) { regression = new SimpleLinearRegression(); regression.Regress(predictors, results); }
public RegressionService() { regression = new SimpleLinearRegression(); }
static void Main(string[] args) { //getting matrix for parabola regression (order n) Matrix mParabolaRegression = Matrix.GetMatrixFromTXT("data\\parabola_regression.txt", '\t'); NOrderSimpleParabolaRegression nospr = new NOrderSimpleParabolaRegression(); Matrix z = nospr.GetRegressionCoefficients(mParabolaRegression, 2); double yVal = nospr.GetYForVectorX(z, 84.0); //getting matrix from file Matrix mFromFile = Matrix.GetMatrixFromTXT("data\\regress_data.txt", '\t'); //Muliple Linear Regression var mlr = new MultipleLinearRegression(); int[] rows = Enumerable.Range(0, mFromFile.matrixBase.GetLength(0)) .Select(i => i) .ToArray(); Matrix bVector = mlr.GetBCoefficientsForMatrix(mFromFile.GetMatrixPart(rows, new int[] { 1, 2, 3 }), mFromFile.GetMatrixPart(rows, new int[] { 0 })); //getting y for x0 = 1, x2 = 81, x3 = 259 double y = mlr.GetYForVectorXs(bVector, new int[] { 1, 81, 259 }); Console.WriteLine(mFromFile + "\n"); Console.WriteLine("y for x0 = 1, x1 = 81, x2 = 259 = " + y); double[,] a = new double[3, 2]; a[0, 0] = 2; a[0, 1] = 6; a[1, 0] = 7; a[1, 1] = 3; a[2, 0] = 5; a[2, 1] = 2; double[,] b = new double[2, 3]; b[0, 0] = 1; b[0, 1] = 7; b[0, 2] = 3; b[1, 0] = 2; b[1, 1] = 5; b[1, 2] = 6; //double[,] sqvArr = new double[2, 2]; //sqvArr[0, 0] = 1; //sqvArr[0, 1] = 7; //sqvArr[1, 0] = 3; //sqvArr[1, 1] = 2; double[,] sqvArr = new double[3, 3]; sqvArr[0, 0] = -1; sqvArr[0, 1] = -2; sqvArr[0, 2] = 2; sqvArr[1, 0] = 2; sqvArr[1, 1] = 1; sqvArr[1, 2] = 1; sqvArr[2, 0] = 3; sqvArr[2, 1] = 4; sqvArr[2, 2] = 5; Matrix matrixA = new Matrix(a); Matrix matrixB = new Matrix(b); Matrix matrixSkv = new Matrix(sqvArr); //Invert matrix Matrix invertedSkv = matrixSkv.Invert(); //determinant of matrix double determinant = matrixSkv.GetDeterminant(); //Transporate matrix Console.WriteLine(matrixA); Console.WriteLine("\n"); matrixA = matrixA.Transpose(); Console.WriteLine(matrixA); //Multipling to scalar Console.WriteLine(matrixA); matrixA = matrixA.MultiplyToScalar(10); Console.WriteLine(matrixA); Matrix matrixC = Matrix.MultiplyMatrices(matrixA, matrixB); Console.WriteLine("Multiply marix\n\n{0}\n\nto matrix\n\n{1}\n\nIt gives:\n\n{2}\n\n" , matrixA, matrixB, matrixC); //Calculate simple linear regression SimpleLinearRegression slr = new SimpleLinearRegression(xVals, yVals); double yPredicted = slr.PredictY(4); Console.WriteLine(yPredicted); Console.ReadLine(); }
public HttpResponseMessage ScrapeSheet(SheetDocumentModel model) { try { var watch = System.Diagnostics.Stopwatch.StartNew(); var filePath = AppDomain.CurrentDomain.BaseDirectory + "//RawData//" + model.FileName; File.WriteAllBytes(filePath, Convert.FromBase64String(model.FileData)); var connectionString = $"Provider=Microsoft.ACE.OLEDB.12.0;Data Source={filePath};Extended Properties=Excel 12.0;"; var adapter = new OleDbDataAdapter("SELECT * FROM [Sheet1$]", connectionString); var ds = new DataSet(); adapter.Fill(ds, "DataTable"); var data = ds.Tables["DataTable"].AsEnumerable(); var scrapedResult = data.Where(w => w.Field <DateTime?>("DateTime") != null).Select(x => new SheetResult() { DateTime = x.Field <DateTime?>("DateTime"), Value = x.Field <double?>("Value"), Unit = x.Field <string>("Unit"), }).ToList(); var regressionInput = scrapedResult.Where(w => w.Value != null).Select(s => Convert.ToDouble(s.DateTime?.Hour)).ToArray(); var regressionOutput = scrapedResult.Where(w => w.Value != null).Select(s => Convert.ToDouble(s.Value.Value)).ToArray(); // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the simple linear regression SimpleLinearRegression regression = ols.Learn(regressionInput, regressionOutput); // Compute the output for a given input: foreach (var v in scrapedResult) { if (v.Value != null) { continue; } v.Value = regression.Transform(Convert.ToDouble(v.DateTime?.Hour)); v.PredictedValue = true; regressionInput = scrapedResult.Where(w => w.Value != null).Select(s => Convert.ToDouble(s.DateTime?.Hour)).ToArray(); regressionOutput = scrapedResult.Where(w => w.Value != null).Select(s => Convert.ToDouble(s.Value.Value)).ToArray(); regression = ols.Learn(regressionInput, regressionOutput); } var list = new JavaScriptSerializer().Serialize(scrapedResult); var dataFormatted = JToken.Parse(list).ToString(Formatting.Indented); _db.Data.Add(new DataEntity() { CreatedOn = DateTime.Now, IdCollectionType = (int)CollectionTypeEnum.Sheet, JsonObject = dataFormatted }); _db.SaveChanges(); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; System.Diagnostics.Debug.WriteLine("Timp sheet scraper: " + elapsedMs); return(Request.CreateResponse(HttpStatusCode.OK, scrapedResult)); } catch (Exception ex) { return(Request.CreateResponse(HttpStatusCode.InternalServerError)); } }