public void RegressTest2() { MultipleLinearRegression target = new MultipleLinearRegression(1, false); double[][] inputs = { new double[] { 80, 1 }, new double[] { 60, 1 }, new double[] { 10, 1 }, new double[] { 20, 1 }, new double[] { 30, 1 }, }; double[] outputs = { 20, 40, 30, 50, 60 }; double error = target.Regress(inputs, outputs); double slope = target.Coefficients[0]; double intercept = target.Coefficients[1]; Assert.AreEqual(-0.264706, slope, 1e-5); Assert.AreEqual(50.588235, intercept, 1e-5); Assert.AreEqual(761.764705, error, 1e-5); double r = target.CoefficientOfDetermination(inputs, outputs); Assert.AreEqual(0.23823529, r, 1e-6); string str = target.ToString(); Assert.AreEqual("y(x0, x1) = -0,264705882352942*x0 + 50,5882352941177*x1", str); }
public void RegressTest() { MultipleLinearRegression target = new MultipleLinearRegression(1, true); double[][] inputs = { new double[] { 80 }, new double[] { 60 }, new double[] { 10 }, new double[] { 20 }, new double[] { 30 }, }; double[] outputs = { 20, 40, 30, 50, 60 }; double error = target.Regress(inputs, outputs); double slope = target.Coefficients[0]; double intercept = target.Coefficients[1]; Assert.AreEqual(-0.264706, slope, 1e-5); Assert.AreEqual(50.588235, intercept, 1e-5); Assert.AreEqual(761.764705, error, 1e-5); double r = target.CoefficientOfDetermination(inputs, outputs); Assert.AreEqual(0.23823529, r, 1e-6); string str = target.ToString(null, System.Globalization.CultureInfo.GetCultureInfo("pt-BR")); Assert.AreEqual("y(x0) = -0,264705882352941*x0 + 50,5882352941176", str); }
public static void test2() { var ols = new OrdinaryLeastSquares() { UseIntercept = true }; double[][] inputs = { new double[] { 1, 1 }, new double[] { 0, 1 }, new double[] { 1, 0 }, new double[] { 0, 0 }, }; double[] outputs = { 1, 1, 1, 1 }; MultipleLinearRegression regression = ols.Learn(inputs, outputs); double a = regression.Weights[0]; // a = 0 double b = regression.Weights[1]; // b = 0 double c = regression.Intercept; // c = 1 double[] predicted = regression.Transform(inputs); double error = new SquareLoss(outputs).Loss(predicted); }
public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); var res = assembly.GetManifestResourceStream("AIFH_Vol1.Resources.abalone.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); // The following ranges are setup for the Abalone data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.EncodeOneOfN(0, 0, 1); istream.Close(); var trainingData = ds.ExtractSupervised(0, 10, 10, 1); var reg = new MultipleLinearRegression(10); var train = new TrainLeastSquares(reg, trainingData); train.Iteration(); Query(reg, trainingData); Console.WriteLine("Error: " + train.Error); }
public IActionResult GetLinarRegressionResult([FromBody] GetLinearRegressionRequest request) { var simpleLinearRegression = new MultipleLinearRegression(MatrixDecompositionAlgs.GOLUB_REINSCH); var result = simpleLinearRegression.Regress(request.Inputs, request.Outputs); return(new OkObjectResult(result.LinearRegression.ToDto())); }
public void TestTrain() { double[][] x = { new [] { 1.0 }, new [] { 3.0 }, new [] { 2.0 }, new [] { 200.0 }, new [] { 230.0 } }; double[][] y = { new [] { 1.0 }, new [] { 1.0 }, new [] { 1.0 }, new [] { 0.0 }, new [] { 0.0 } }; var trainingData = BasicData.ConvertArrays(x, y); var regression = new MultipleLinearRegression(1) { LinkFunction = new LogitLinkFunction() }; var train = new TrainReweightLeastSquares(regression, trainingData); train.Iteration(); double[] input = { 0 }; double[] output = regression.ComputeRegression(input); Assert.AreEqual(0.883301730269988, output[0], AIFH.DefaultPrecision); }
public void ExampleTest3() { // Suppose we would like to map the continuous values in the // second column to the integer values in the first column. var inputs = new[] { new[] { 1.0, 1.0, 1.0, 1.0 }, new[] { 2.0, 4.0, 8.0, 16.0 }, new[] { 3.0, 9.0, 27.0, 81.0 }, new[] { 4.0, 16.0, 64.0, 256.0 }, }; var ouputs = new[] { 0.73, 3.24, 8.31, 16.72 }; var regression = new MultipleLinearRegression(4); var nnls = new NonNegativeLeastSquares(regression) { MaxIterations = 100 }; nnls.Run(inputs, ouputs); Assert.AreEqual(0.1, nnls.Coefficients[0], 1e-3); Assert.AreEqual(0.5, nnls.Coefficients[1], 1e-3); Assert.AreEqual(0.13, nnls.Coefficients[2], 1e-3); Assert.AreEqual(0, nnls.Coefficients[3], 1e-3); }
private void button1_Click(object sender, EventArgs e) { MultipleLinearRegression target = new MultipleLinearRegression(2, true); //ulazi double[][] ulazi = { new double[] { 0, 0 }, new double[] { 0, 1 }, new double[] { 1, 0 }, new double[] { 1, 1 } }; double[] izlazi = { 1, 1, 1, 1 }; double greska = target.Regress(ulazi, izlazi); double a = target.Coefficients[0]; double b = target.Coefficients[1]; double c = target.Coefficients[2]; Console.WriteLine("Greska " + greska); Console.WriteLine("a " + a); Console.WriteLine("b " + b); Console.WriteLine("c " + c); }
public void load(string path) { XmlDocument xmlDocument = new XmlDocument(); xmlDocument.Load(path); string xmlString = xmlDocument.OuterXml; using (StringReader read = new StringReader(xmlString)) { Type outType = typeof(BuySellRegressionPair); XmlSerializer serializer = new XmlSerializer(outType); using (XmlReader reader = new XmlTextReader(read)) { BuySellRegressionPair pair = (BuySellRegressionPair)serializer.Deserialize(reader); logisticBuy = pair.buy; logisticSell = pair.sell; reader.Close(); } read.Close(); } }
/// <summary> /// Creates a new Generalized Linear Regression Model. /// </summary> /// /// <param name="function">The link function to use.</param> /// public GeneralizedLinearRegression(ILinkFunction function) { this.linear = new MultipleLinearRegression(); this.linkFunction = function; this.NumberOfOutputs = 1; this.NumberOfInputs = 1; }
private MultipleLinearRegression PerformRegression(List <LeadingIndicator> indicators) { double[][] inputs = { new double[] { indicators[0].StockIndex, indicators[0].M2Level }, new double[] { indicators[1].StockIndex, indicators[1].M2Level }, new double[] { indicators[2].StockIndex, indicators[2].M2Level }, new double[] { indicators[3].StockIndex, indicators[3].M2Level }, }; double[] outputs = { indicators[0].GdpOutput, indicators[1].GdpOutput, indicators[2].GdpOutput, indicators[3].GdpOutput, }; // We will use Ordinary Least Squares to create a // linear regression model with an intercept term var ols = new OrdinaryLeastSquares() { UseIntercept = true }; // Use Ordinary Least Squares to estimate a regression model MultipleLinearRegression regression = ols.Learn(inputs, outputs); return(regression); }
public void RegressTest5() { int count = 1000; double[][] inputs = new double[count][]; double[] output = new double[count]; for (int i = 0; i < inputs.Length; i++) { double x = i + 1; double y = 2 * (i + 1) - 1; inputs[i] = new[] { x, y }; output[i] = 4 * x - y; // no constant term } { MultipleLinearRegression target = new MultipleLinearRegression(2, true); double error = target.Regress(inputs, output); Assert.IsTrue(target.HasIntercept); Assert.AreEqual(0, error, 1e-10); } { MultipleLinearRegression target = new MultipleLinearRegression(2, false); double error = target.Regress(inputs, output); Assert.IsFalse(target.HasIntercept); Assert.AreEqual(0, error, 1e-10); } }
public void ExampleTest3() { var inputs = new[] { new[] { 1.0, 1.0, 1.0, 1.0 }, new[] { 2.0, 4.0, 8.0, 16.0 }, new[] { 3.0, 9.0, 27.0, 81.0 }, new[] { 4.0, 16.0, 64.0, 256.0 }, }; var ouputs = new[] { 0.73, 3.24, 8.31, 16.72 }; var regression = new MultipleLinearRegression(4); var nnls = new NonNegativeLeastSquares(regression) { MaxIterations = 100 }; nnls.Run(inputs, ouputs); Assert.AreEqual(0.1, nnls.Coefficients[0], 1e-3); Assert.AreEqual(0.5, nnls.Coefficients[1], 1e-3); Assert.AreEqual(0.13, nnls.Coefficients[2], 1e-3); Assert.AreEqual(0, nnls.Coefficients[3], 1e-3); Assert.AreEqual(0.1, regression.Coefficients[0], 1e-3); Assert.AreEqual(0.5, regression.Coefficients[1], 1e-3); Assert.AreEqual(0.13, regression.Coefficients[2], 1e-3); }
public void RegressTest6() { MultipleLinearRegression target = new MultipleLinearRegression(2, false); double[][] inputs = { new double[] { 0, 0 }, new double[] { 0, 0 }, new double[] { 0, 0 }, new double[] { 0, 0 }, new double[] { 0, 0 }, }; double[] outputs = { 20, 40, 30, 50, 60 }; double error = target.Regress(inputs, outputs); double slope = target.Coefficients[0]; double intercept = target.Coefficients[1]; Assert.AreEqual(0, slope, 1e-5); Assert.AreEqual(0, intercept, 1e-5); Assert.AreEqual(9000, error); double r = target.CoefficientOfDetermination(inputs, outputs); Assert.AreEqual(-8, r, 1e-6); string str = target.ToString(null, System.Globalization.CultureInfo.GetCultureInfo("pt-BR")); Assert.AreEqual("y(x0, x1) = 0*x0 + 0*x1", str); }
public static void ModelTrain() { // var m = RoomsAvito.GroupBy(x => x.id).Select(x => x.First()); // var RoomsAvito = HomeController.RoomsAvito.Take(30000); var m = repo.List().Count(); double[][] inp = new double[repo.List().Count()][]; double[] outp = new double[repo.List().Count()]; int i = 0; foreach (Room roomAvito in repo.List()) { int n = 1; if (roomAvito.room_type == "Вторичка") { n = 0; } double k = 0; foreach (MetroInfo info in MetroInfos) { if (info.metro == roomAvito.metro) { k = info.k; break; } } inp[i] = new double[] { k, roomAvito.centre_distance, roomAvito.metro_distance, roomAvito.S, roomAvito.num, n }; outp[i] = (int)roomAvito.price; i++; } Accord.Math.Random.Generator.Seed = 0; var ols = new OrdinaryLeastSquares(); { ols.UseIntercept = true; ols.IsRobust = true; }; regression = ols.Learn(inp, outp); Polynomial p = new Polynomial(2, 1); double[][] z = p.Transform(inp); // Now, create an usual OLS algorithm var ols1 = new OrdinaryLeastSquares() { UseIntercept = true }; // Use the algorithm to learn a multiple regression regression1 = ols1.Learn(z, outp); // Check the quality of the regression: }
public void prediction_test() { // Example from http://www.real-statistics.com/multiple-regression/confidence-and-prediction-intervals/ var dt = Accord.IO.CsvReader.FromText(Resources.linreg, true).ToTable(); double[] y = dt.Columns["Poverty"].ToArray(); double[][] x = dt.ToArray("Infant Mort", "White", "Crime"); // Use Ordinary Least Squares to learn the regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); // Use OLS to learn the multiple linear regression MultipleLinearRegression regression = ols.Learn(x, y); Assert.AreEqual(3, regression.NumberOfInputs); Assert.AreEqual(1, regression.NumberOfOutputs); Assert.AreEqual(0.443650703716698, regression.Intercept, 1e-5); Assert.AreEqual(1.2791842411083394, regression.Weights[0], 1e-5); Assert.AreEqual(0.036259242392669415, regression.Weights[1], 1e-5); Assert.AreEqual(0.0014225014835705938, regression.Weights[2], 1e-5); double rse = regression.GetStandardError(x, y); Assert.AreEqual(rse, 2.4703520840798507, 1e-5); double[][] im = ols.GetInformationMatrix(); double mse = regression.GetStandardError(x, y); double[] se = regression.GetStandardErrors(mse, im); Assert.AreEqual(0.30063086032754965, se[0], 1e-10); Assert.AreEqual(0.033603448179240082, se[1], 1e-10); Assert.AreEqual(0.0022414548866296342, se[2], 1e-10); Assert.AreEqual(3.9879881671805824, se[3], 1e-10); double[] x0 = new double[] { 7, 80, 400 }; double y0 = regression.Transform(x0); Assert.AreEqual(y0, 12.867680376316864, 1e-5); double actual = regression.GetStandardError(x0, mse, im); Assert.AreEqual(0.35902764658470271, actual, 1e-10); DoubleRange ci = regression.GetConfidenceInterval(x0, mse, x.Length, im); Assert.AreEqual(ci.Min, 12.144995206616116, 1e-5); Assert.AreEqual(ci.Max, 13.590365546017612, 1e-5); actual = regression.GetPredictionStandardError(x0, mse, im); Assert.AreEqual(2.4963053239397244, actual, 1e-10); DoubleRange pi = regression.GetPredictionInterval(x0, mse, x.Length, im); Assert.AreEqual(pi.Min, 7.8428783761994554, 1e-5); Assert.AreEqual(pi.Max, 17.892482376434273, 1e-5); }
static void Main(string[] args) { DataTable tableAttHp = new ExcelReader("HsAttHp.xlsx").GetWorksheet("Sheet1"); double[][] tableAttHpMatrix = tableAttHp.ToArray <double>(); DataTable tableCost = new ExcelReader("HsCost.xlsx").GetWorksheet("Sheet1"); double[] tableCostMatrix = tableCost.Columns[0].ToArray <double>(); //double[,] scores = Accord.Statistics.Tools.ZScores(tableAttHpMatrix); //double[,] centered = Accord.Statistics.Tools.Center(tableAttHpMatrix); //double[,] standard = Accord.Statistics.Tools.Standardize(tableAttHpMatrix); //foreach (double i in scores ) { Console.WriteLine(i); } //Console.ReadKey(); //foreach (double i in centered) { Console.WriteLine(i); } //Console.ReadKey(); //foreach (double i in standard) { Console.WriteLine(i); } // Plot the data //ScatterplotBox.Show("Hs", tableAttHpMatrix, tableCostMatrix).Hold(); var target = new MultipleLinearRegression(2, true); double error = target.Regress(tableAttHpMatrix, tableCostMatrix); double a = target.Coefficients[0]; // a = 0 double b = target.Coefficients[1]; // b = 0 double c = target.Coefficients[2]; // c = 1 Console.WriteLine(a + " " + b + " " + c); Console.ReadKey(); double[] inputs = { 2005, 2006, 2007, 2008, 2009, 2010, 2011 }; double[] outputs = { 12, 19, 29, 37, 45, 23, 33 }; // Create a new simple linear regression SimpleLinearRegression regression = new SimpleLinearRegression(); // Compute the linear regression regression.Regress(inputs, outputs); // Compute the output for a given input. The double y = regression.Compute(85); // The answer will be 28.088 // We can also extract the slope and the intercept term // for the line. Those will be -0.26 and 50.5, respectively. double s = regression.Slope; double cut = regression.Intercept; Console.WriteLine(s + "x+" + cut); Console.ReadKey(); }
/// <summary> /// Performs an explicit conversion from <see cref="MultipleLinearRegression"/> to <see cref="SupportVectorMachine"/>. /// </summary> /// /// <param name="regression">The linear regression to be converted.</param> /// /// <returns>The result of the conversion.</returns> /// public static SupportVectorMachine FromRegression(MultipleLinearRegression regression) { return(new SupportVectorMachine(regression.NumberOfInputs) { Weights = new[] { 1.0 }, SupportVectors = new[] { regression.Weights }, Threshold = regression.Intercept, }); }
public MultipleLinearRegression Learn(double[][] inputs, double[] outputs) { var ols = new OrdinaryLeastSquares() { UseIntercept = true }; // Use Ordinary Least Squares to estimate a regression model MultipleLinearRegression regression = ols.Learn(inputs, outputs); // As result, we will be given the following: //double a = regression.Weights[0]; // a = 0 //double b = regression.Weights[1]; // b = 0 //double c = regression.Intercept; // c = 1 // This is the plane described by the equation // ax + by + c = z => 0x + 0y + 1 = z => 1 = z. // We can compute the predicted points using double[] predicted = regression.Transform(inputs); // And the squared error loss using double error = new SquareLoss(outputs).Loss(predicted); // We can also compute other measures, such as the coefficient of determination r² double r2 = new RSquaredLoss(numberOfInputs: 2, expected: outputs).Loss(predicted); // should be 1 // We can also compute the adjusted or weighted versions of r² using var r2loss = new RSquaredLoss(numberOfInputs: 2, expected: outputs) { Adjust = true, // Weights = weights; // (if you have a weighted problem) }; double ar2 = r2loss.Loss(predicted); // should be 1 // Alternatively, we can also use the less generic, but maybe more user-friendly method directly: double ur2 = regression.CoefficientOfDetermination(inputs, outputs, adjust: true); // should be 1 Console.WriteLine("Weights:"); foreach (var w in regression.Weights) { Console.WriteLine($",{w}"); } Console.WriteLine("Intercept:"); Console.WriteLine($",{regression.Intercept}"); Console.WriteLine($"error:{error}"); Console.WriteLine($"r2:{r2}"); Console.WriteLine($"r2loss:{r2loss}"); Console.WriteLine($"ar2:{ar2}"); Console.WriteLine($"ur2:{ur2}"); return(regression); }
public void Learn(IList <XYtoZ> dsLearn) { double [][] inputs = dsLearn.Select(i => new double[] { i.X, i.Y }).ToArray(); double [] outputs = dsLearn.Select(i => i.Z).ToArray(); var ols = new OrdinaryLeastSquares() { IsRobust = _isRobust }; _multipleLinearRegression = ols.Learn(inputs, outputs); }
/// <summary> /// MultipleLinearRegression。 /// </summary> /// <param name="learner">MultipleLinearRegression</param> /// <param name="version">AIプレイヤのバージョン</param> public void SaveMultipleLinearRegression(MultipleLinearRegression learner, AiPlayer.Version version) { var filePath = string.Format(LearningConfig.LearnerSavePath + @"/{0}_{1}_{2}.csv", MultipleLinearRegressionFileName, DateTime.Now.ToString("yyyyMMddhhmmss"), version.ToString().ToLower()); var sb = new StringBuilder(); foreach (var w in learner.Weights) { sb.Append($"{w},"); } sb.Append(learner.Intercept.ToString()); FileHelper.Write(sb.ToString(), filePath); }
/// <summary> /// Calculates the Variance Inflation Factors (VIFs) for the different coefficients. /// </summary> /// <returns>An array containing corresponding VIFs.</returns> /// <param name="inputs">The inputs that a model was trained on.</param> public static float[] CalculateVIFs(double[][] inputs) { //Rotate array and create resultant array. inputs = MathUtils.RotateArray(inputs); float[] VIFs = new float[inputs.Length]; //Loop through each variable for (int a = 0; a < inputs.Length; a++) { //The inputs/outputs for the regression models. double[][] regressionInputs = new double[inputs[0].Length][]; double[] regressionOutput = new double[inputs[0].Length]; //Loop through and assign all of the independent variables as IVs, //except inputs[a], which becomes the dependent variable. for (int b = 0; b < inputs[0].Length; b++) { regressionInputs[b] = new double[inputs.Length - 1]; for (int c = 0, d = 0; c < inputs.Length; c++) { if (a == c) { regressionOutput[b] = inputs[a][b]; } else { regressionInputs[b][d] = inputs[c][b]; d++; } } } //Perform regression OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = true }; MultipleLinearRegression regression = ols.Learn(regressionInputs, regressionOutput); //Make predictions double[] predictions = regression.Transform(regressionInputs); //Calculate the loss double r2 = (new RSquaredLoss(inputs.Length - 1, regressionOutput)).Loss(predictions); //Calculate the VIF VIFs[a] = (float)(1.0f / (1.0f - r2)); } return(VIFs); }
public void Train(IEnumerable <double[]> inputs, IEnumerable <double> labels) { if (inputs.Count() < 1) { throw new Exception("Must use at least one labelled instance to train a regression model"); } int inputFeatureCount = inputs.First().Count(); //disregard the target variable for inputFeatureCount model = new MultipleLinearRegression(inputFeatureCount, USE_INTERCEPT); Error = model.Regress(inputs.ToArray(), labels.ToArray()); }
private void Train(IEnumerable <SprintDataRow> trainingDataset) { // Set the independant variables double[][] inputs = trainingDataset.Select(x => new double[] { x.SprintNumber, x.HoursProgrammer1, x.HoursProgrammer2, x.HoursProgrammer3 }).ToArray(); // Set the dependant variables double[] outputs = trainingDataset.Select(x => x.NumberOfProcessedStoryPoints).ToArray(); // Train the model var ols = new OrdinaryLeastSquares(); this._multipleLinearRegressionModel = ols.Learn(inputs, outputs); }
private void SetValuesOnDeserialized(StreamingContext context) { if (linear == null) { linear = new MultipleLinearRegression() { #pragma warning disable 612, 618 Weights = coefficients.Get(1, 0), Intercept = coefficients[0] #pragma warning restore 612, 618 }; } }
public void SimpleMonthlyMLRAprMayJun() { CreateSeries(); var y = MultipleLinearRegression.MlrInterpolation(list, new int[] { 4, 5, 6 }, .7, true); y.EstimatedSeries.WriteToConsole(); var may = y.EstimatedSeries["2000-5-1"]; Assert.AreEqual(5, may.Value, 0.01, "may value should be 5"); var feb = y.EstimatedSeries["2000-2-1"]; Assert.IsTrue(feb.IsMissing, "Value in Feb should not be estimated"); }
public void RegressTest3() { // We will try to model a plane as an equation in the form // "ax + by + c = z". We have two input variables (x and y) // and we will be trying to find two parameters a and b and // an intercept term c. // Create a multiple linear regression for two input and an intercept var target = new MultipleLinearRegression(2, true); // Now suppose you have some points double[][] inputs = { new double[] { 1, 1 }, new double[] { 0, 1 }, new double[] { 1, 0 }, new double[] { 0, 0 }, }; // located in the same Z (z = 1) double[] outputs = { 1, 1, 1, 1 }; // Now we will try to fit a regression model double error = target.Regress(inputs, outputs); Assert.AreEqual(2, target.NumberOfInputs); Assert.AreEqual(1, target.NumberOfOutputs); // As result, we will be given the following: double a = target.Coefficients[0]; // a = 0 double b = target.Coefficients[1]; // b = 0 double c = target.Coefficients[2]; // c = 1 // This is the plane described by the equation // ax + by + c = z => 0x + 0y + 1 = z => 1 = z. Assert.AreEqual(0.0, a, 1e-6); Assert.AreEqual(0.0, b, 1e-6); Assert.AreEqual(1.0, c, 1e-6); Assert.AreEqual(0.0, error, 1e-6); double[] expected = target.Compute(inputs); double[] actual = target.Transform(inputs); double r = target.CoefficientOfDetermination(inputs, outputs); Assert.AreEqual(1.0, r); }
public void Learn() { if (Count == 0) { return; } var ols = new OrdinaryLeastSquares() { UseIntercept = true }; m_Regression = ols.Learn(m_Inputs.ToArray(), m_Outputs.ToArray()); }
/// <summary> /// Gets a fitted approximation to the forward values. /// </summary> /// <param name="date">The date at which the regressors should be observed.</param> /// <param name="cfs">The sum of the pv of all the cashlows on the path that take place after <paramref name="date"/>.</param> /// <returns></returns> public double[] FitCFs(Date date, double[] cfs) { //double[][] inputs = GetPolynomialValsRegular(date, 3); double[][] inputs = GetIntrinsic(date, 10); var ols = new OrdinaryLeastSquares() { UseIntercept = true, IsRobust = true }; MultipleLinearRegression regression = ols.Learn(inputs, cfs); double[] result = regression.Transform(inputs); return(result); }
public void MultipleLinearRegressionLearning() { int n = LearningData.Length; double[] dependentVariables = LearningData.GetColumn(3); double[][] independentVariables = new double[n][]; for (int j = 0; j < n; j++) { dependentVariables[j] = LearningData[j][3]; } for (int j = 0; j < n; j++) { independentVariables[j] = new double[3]; independentVariables[j][0] = NormalizedInputData[j][0]; independentVariables[j][1] = NormalizedInputData[j][1]; independentVariables[j][2] = NormalizedInputData[j][2]; } multipleLinearRegressionObj = MultipleLinearRegression.FromData(independentVariables, dependentVariables); double[] prediction = multipleLinearRegressionObj.Transform(independentVariables); PredictedData[0] = prediction; ErrorMLR = new SquareLoss(dependentVariables).Loss(prediction); double[] coefOfFunction = new double[multipleLinearRegressionObj.Weights.Length + 1]; coefOfFunction[0] = multipleLinearRegressionObj.Intercept; int index = multipleLinearRegressionObj.Weights.Length - 1; for (int i = 1; i <= multipleLinearRegressionObj.Weights.Length; i++) { coefOfFunction[i] = multipleLinearRegressionObj.Weights[index]; index--; } double func(double _x1, double _x2, double _x3) => (coefOfFunction[0] + coefOfFunction[3] * _x1 + coefOfFunction[2] * _x2 + coefOfFunction[1] * _x3); XYpairMLR[0] = new double[100]; XYpairMLR[1] = new double[100]; for (int i = 0; i < 100; i++) { XYpairMLR[0][i] = i; XYpairMLR[1][i] = func(i, i, i); } }