/* * calculateSubjectMLR returns a multiple linear * regression line based off of previous results and performance * in a specific subject. * * In the database, there are previous results and their according * homework, test and minimum target grade grades. * This algorithm feeds them all in and does a multiple linear * regression calculation. * * This is then used later in calculateGrade to make a grade prediciton. */ public static double[] calculateSubjectMLR( List <double> HomeworkResults, List <double> TestResults, List <double> MTGResults, List <double> FinalResults) { // Create a jagged array, performanceGrades, with the homework, test and mtg grades. double[][] performanceGrades = new double[HomeworkResults.Count][]; // populate x - jagged arrays don't like being uninitalised. for (int i = 0; i < HomeworkResults.Count; i++) { performanceGrades[i] = new double[3]; } // Array finalGrades is just the resulting final grades. double[] finalGrades = new double[FinalResults.Count]; // Populate these arrays with the input of grades and their according final result. for (int i = 0; i < HomeworkResults.Count; i++) { performanceGrades[i][0] = HomeworkResults[i]; performanceGrades[i][1] = TestResults[i]; performanceGrades[i][2] = MTGResults[i]; finalGrades[i] = FinalResults[i]; } // Return the multilinear regression line. return(Fit.MultiDim(performanceGrades, finalGrades, intercept: true)); }
/// <summary> /// Fits the autoregressive component in the <see cref="TwoStepFit"/> method. /// </summary> /// <param name="lags">An array of lagged data (<see cref="TimeSeriesIndicator.LaggedSeries"/>).</param> /// <param name="data">The input series, differenced <see cref="_diffOrder"/> times.</param> /// <param name="errorAr">The summed residuals (by default 0) associated with the AR component.</param> private void AutoRegressiveStep(double[][] lags, double[] data, double errorAr) { double[] arFits; // The function (lags[time][lagged X]) |---> ΣᵢφᵢXₜ₋ᵢ arFits = Fit.MultiDim(lags, data.Skip(_arOrder).ToArray(), method: DirectRegressionMethod.NormalEquations); var fittedVec = Vector.Build.Dense(arFits); for (var i = 0; i < data.Length; i++) // Calculate the error assoc. with model. { if (i < _arOrder) { _residuals.Add(0); // 0-padding continue; } var residual = data[i] - Vector.Build.Dense(lags[i - _arOrder]).DotProduct(fittedVec); errorAr += Math.Pow(residual, 2); _residuals.Add(residual); } ArResidualError = errorAr / (data.Length - _arOrder - 1); if (_maOrder == 0) { ArParameters = arFits; // Will not be thrown out } }
public void CalculateRegression(Player player, IEnumerable <Game> randomGames) { var regressionX = new List <double[]>(); var results = new List <double>(); var likes = player.Likes; foreach (var like in likes) { var profile = like.Game.Profile; regressionX.Add(profile.MakeRegressionArray()); results.Add(1); } foreach (var game in randomGames) { var profile = game.Profile; regressionX.Add(profile.MakeRegressionArray()); results.Add(0); } var p = Fit.MultiDim( regressionX.ToArray(), results.ToArray(), true); player.RegressionAlpha = (float)p[0]; player.Profile = RegressionProfile.MakeRegressionProfile(p); }
public double[] Calculate(VectorSet vset) { if (vset.Dimensions < 2) { throw new NotSupportedException("linreg only makes sense on vector spaces (2+ dimensions)"); } if (vset.Dimensions == 2) {//As dimensions go up, closed form solutions become sparse, but for 2-D, it isn't so bad, and its more precise double a = 1; double b; double sumXYResidual = 0; double sumXSquareResidual = 0; for (int i = 0; i < vset.Length; i++) { sumXYResidual += (vset.Vectors[i][0] - vset.DataSets[0].Mean) * (vset.Vectors[i][1] - vset.DataSets[1].Mean); sumXSquareResidual += Math.Pow((vset.Vectors[i][0] - vset.DataSets[0].Mean), 2); } b = sumXYResidual / (sumXSquareResidual); a = vset.DataSets[1].Mean - (b * vset.DataSets[0].Mean);//LSRL always passes through the point (x̅,y̅) return(new double[] { a, b }); } else { List <double[]> inputX = new List <double[]>(); double[] inputY = new double[vset.Length]; for (int i = 0; i < vset.Length; i++) { for (int j = 0; j < vset.Dimensions; j++) { if (j != vset.Dimensions - 1) { if (j == 0) { inputX.Add(new double[vset.Dimensions - 1]); } inputX[i][j] = vset.Vectors[i][j]; } else { inputY[i] = vset.Vectors[i][j]; } } } return(Fit.MultiDim(inputX.ToArray(), inputY, intercept: true)); } }
public void BuildLinearModel(IEnumerable <User> users) { var inputs = users.Select(u => new[] { u.Weight }).ToArray(); var outputs = users.Select(u => u.Height).ToArray(); try { _coeffs = Fit.MultiDim(inputs, outputs, intercept: true); } catch (ArgumentException e) { //if no weights entered model won't build correctly } }
/// <summary> /// Uses MathNets Fit.MultiDim to calculate least squared coefficients for model. /// </summary> /// <param name="y">Response variable</param> /// <param name="x">Explanatory variables for the model. List contains data for one /// variable.</param> /// <returns>OLS-coefficients for the model.</returns> /// <exception cref="ArgumentException">Thrown when fitting the least square points fails.</exception> private static double[] GetCoefficients(List <double> y, List <List <double> > x) { double[][] columns = Matrix.InvertVariableList(x); double[] coefficients; try { coefficients = Fit.MultiDim(columns, y.ToArray(), true); } catch (ArgumentException) { throw new MathError("Matrix constructed wasn't a positive definite"); } return(coefficients); }
public static double[] CalculatePlane(double[][] x) { Vector <double> y = Vector <double> .Build.Dense(x.Length); double[] result = Fit.MultiDim(x, y.ToArray(), true, DirectRegressionMethod.NormalEquations); return(result); //Vector<double> p = MultipleRegression.NormalEquations(X, y); //MultipleRegression.QR or MultipleRegression.Svd //double[] p = Fit.Polynomial(xdata, ydata, 3); // polynomial of order 3 // warning: preliminary api //var p = WeightedRegression.Local(X, y, t, radius, kernel); }
double[] calculateCoefficient() { List <double> HomeworkResults = new List <double>(); List <double> MockResults = new List <double>(); List <double> MTGResults = new List <double>(); List <double> FinalResults = new List <double>(); for (int i = 1; i < SqlTools.getRows("Results") + 1; i++) { using (SqlTools tools = new SqlTools()) { tools.reader = SqlTools.executeReader("SELECT HWResult, MockResult, MTGResult, FinalResult FROM Results where ResultID = " + i); while (tools.reader.Read()) { HomeworkResults.Add(Grades[tools.reader[0].ToString().TrimEnd()]); MockResults.Add(Grades[tools.reader[1].ToString().TrimEnd()]); MTGResults.Add(Grades[tools.reader[2].ToString().TrimEnd()]); FinalResults.Add(Grades[tools.reader[3].ToString().TrimEnd()]); } } } double[][] x = new double[HomeworkResults.Count][]; // populate x for (int i = 0; i < HomeworkResults.Count; i++) { x[i] = new double[3]; } double[] y = new double[FinalResults.Count]; Debug.WriteLine(HomeworkResults.Count); Debug.WriteLine(HomeworkResults[1]); for (int i = 0; i < HomeworkResults.Count; i++) { x[i][0] = HomeworkResults[i]; x[i][1] = MockResults[i]; x[i][2] = MTGResults[i]; y[i] = FinalResults[i]; } return(Fit.MultiDim(x, y, intercept: true)); //return null; }
/// <summary> /// Fits the moving average component in the <see cref="TwoStepFit"/> method. /// </summary> /// <param name="lags">An array of lagged data (<see cref="TimeSeriesIndicator.LaggedSeries"/>).</param> /// <param name="data">The input series, differenced <see cref="_diffOrder"/> times.</param> /// <param name="errorMa">The summed residuals (by default 0) associated with the MA component.</param> private void MovingAverageStep(double[][] lags, double[] data, double errorMa) { var appendedData = new List <double[]>(); var laggedErrors = LaggedSeries(_maOrder, _residuals.ToArray()); for (var i = 0; i < laggedErrors.Length; i++) { var doubles = lags[i].ToList(); doubles.AddRange(laggedErrors[i]); appendedData.Add(doubles.ToArray()); } var maFits = Fit.MultiDim(appendedData.ToArray(), data.Skip(_maOrder).ToArray(), method: DirectRegressionMethod.NormalEquations, intercept: _intercept); for (var i = _maOrder; i < data.Length; i++) // Calculate the error assoc. with model. { var paramVector = _intercept ? Vector.Build.Dense(maFits.Skip(1).ToArray()) : Vector.Build.Dense(maFits); var residual = data[i] - Vector.Build.Dense(appendedData[i - _maOrder]).DotProduct(paramVector); errorMa += Math.Pow(residual, 2); } switch (_intercept) { case true: MaResidualError = errorMa / (data.Length - Math.Max(_arOrder, _maOrder) - 1); MaParameters = maFits.Skip(1 + _arOrder).ToArray(); ArParameters = maFits.Skip(1).Take(_arOrder).ToArray(); Intercept = maFits[0]; break; default: MaResidualError = errorMa / (data.Length - Math.Max(_arOrder, _maOrder) - 1); MaParameters = maFits.Skip(_arOrder).ToArray(); ArParameters = maFits.Take(_arOrder).ToArray(); break; } }
public static double[] CalculateCenterOfCircle(TouchPoint[][] touchPoints) { List <double[]> X = new List <double[]>(); List <double> Y = new List <double>(); foreach (var pointsInSameRow in touchPoints) { var point0 = pointsInSameRow[0]; var point1 = pointsInSameRow[1]; var point2 = pointsInSameRow[2]; var x0 = point0.X; var y0 = point0.Y; var x1 = point1.X; var y1 = point1.Y; var x2 = point2.X; var y2 = point2.Y; var Y0 = x1 * x1 + y1 * y1 - x0 * x0 - y0 * y0; var Y1 = x2 * x2 + y2 * y2 - x1 * x1 - y1 * y1; Y.Add(Y0); Y.Add(Y1); var X11 = (x1 - x0) * 2; var X12 = (y1 - y0) * 2; X.Add(new[] { X11, X12 }); var X21 = (x2 - x1) * 2; var X22 = (y2 - y1) * 2; X.Add(new[] { X21, X22 }); } double[] result = Fit.MultiDim(X.ToArray(), Y.ToArray(), false, DirectRegressionMethod.NormalEquations); return(result); }
public double[] Predict(double[] data) { int totalVariable = 0; if (data.Count() > 2) { if ((data.Count() - MAX_VARIABLE) > MAX_VARIABLE) { totalVariable = MAX_VARIABLE; } else { if (data.Count() % 2 == 0) { totalVariable = data.Count() / 2 - 1; } else { totalVariable = (int)Math.Floor((double)data.Count() / 2); } } } if (totalVariable == 0) { return(null); } double[][] xValues = new double[data.Count() - totalVariable][]; double[] yValues = new double[data.Count() - totalVariable]; for (int i = 0; i < data.Count() - totalVariable; i++) { xValues[i] = new double[totalVariable]; int numZero = 0; for (int j = 0; j < totalVariable; j++) { xValues[i][j] = data[i + j]; if (data[i + j] == 0) { numZero++; } } yValues[i] = data[i + totalVariable]; if (yValues[i] == 0) { numZero++; } if (numZero > 1) { xValues[i] = xValues[i].Select(r => r + 1).ToArray(); yValues[i] = yValues[i] + 1; } } double[] c = new double[totalVariable + 1]; try { c = Fit.MultiDim( xValues, yValues, intercept: true); } catch (Exception e) { Console.WriteLine(e.ToString()); } double[][] predictedInput = new double[PREDICT_SIZE][]; double[] predictedOutput = new double[PREDICT_SIZE]; predictedInput[0] = new double[totalVariable]; predictedOutput[0] = c[0]; for (int i = 0; i < totalVariable; i++) { predictedInput[0][i] = data[data.Count() - 1 - i]; predictedOutput[0] += c[i + 1] * predictedInput[0][i]; } if (predictedOutput[0] < 0) { predictedOutput[0] = 0; } for (int i = 1; i < PREDICT_SIZE; i++) { predictedInput[i] = new double[totalVariable]; predictedOutput[i] = c[0]; for (int j = 1; j < totalVariable; j++) { predictedInput[i][j - 1] = predictedInput[i - 1][j]; predictedOutput[i] += c[j] * predictedInput[i][j - 1]; } predictedInput[i][totalVariable - 1] = predictedOutput[i - 1]; predictedOutput[i] += c[totalVariable] * predictedInput[i][totalVariable - 1]; if (predictedOutput[i] < 0) { predictedOutput[i] = 0; } else { predictedOutput[i] = Math.Round(predictedOutput[i]); } } return(predictedOutput); }
/* 计算多项式拟合参数(校准参数列表通过函数参数返回) */ private bool CalcPolynomialParams(List <double> paramList, List <double> paramListP, List <double> paramListN, List <uint> sampleIndexList) { /* 确保结果列表清空状态 */ paramList.Clear(); paramListP.Clear(); paramListN.Clear(); /* 样本数量 */ uint sampleNum = (uint)sampleIndexList.Count; /* 构造方程组参数矩阵 */ List <double[]> matrixAList = new List <double[]>(); // 全局matrixA List <double> vectorBList = new List <double>(); // 全局vectorB List <double[]> matrixAListP = new List <double[]>(); // 正方向matrixA List <double> vectorBListP = new List <double>(); // 正方向vectorB List <double[]> matrixAListN = new List <double[]>(); // 负方向matrixA List <double> vectorBListN = new List <double>(); // 负方向vectorB for (int i = 0; i < sampleNum; ++i) { var sampleIndex = sampleIndexList[i]; /* * 拟合多项式参数 * F=a*P^0+b*P^1+c*P^2+... * V/t=V*sampleRate=a*(P1^0+P2^0+...+Pn^0)+b*(P1^1+P2^1+...+Pn^1)+c*(P1^2+P2^2+...+Pn^2)+... * 求参数:a b c ... */ var samplePresureAvg = SamplePresureAvg(sampleIndex); // 压差均值 #if false if (samplePresureAvg > 0) { // 正方向 double[] matrixAi = new double[POLYNOMIAL_ORDER]; double[] matrixAiP = new double[POLYNOMIAL_P_ORDER]; var sampleDataIterator = m_waveAnalyzer.SampleDataIterator(sampleIndex); foreach (double presure in sampleDataIterator) { double pn = 1.0; // P^0 for (int n = 0; n < Math.Max(POLYNOMIAL_P_ORDER, POLYNOMIAL_ORDER); n++) { pn *= presure; // P^n if (n < matrixAi.Length) { matrixAi[n] += pn; } if (n < matrixAiP.Length) { matrixAiP[n] += pn; } } } double y = CalVolume * SAMPLE_RATE; matrixAListP.Add(matrixAiP); vectorBListP.Add(y); matrixAList.Add(matrixAi); vectorBList.Add(y); } else //if (samplePresureAvg <= 0) { // 负方向 double[] matrixAi = new double[POLYNOMIAL_ORDER]; double[] matrixAiN = new double[POLYNOMIAL_N_ORDER]; var sampleDataIterator = m_waveAnalyzer.SampleDataIterator(sampleIndex); foreach (double presure in sampleDataIterator) { double pn = 1.0; // P^0 for (int n = 0; n < Math.Max(POLYNOMIAL_N_ORDER, POLYNOMIAL_ORDER); n++) { pn *= presure; // P^n if (n < matrixAi.Length) { matrixAi[n] += pn; } if (n < matrixAiN.Length) { matrixAiN[n] += pn; } } } double y = -CalVolume * SAMPLE_RATE; matrixAListN.Add(matrixAiN); vectorBListN.Add(y); matrixAList.Add(matrixAi); vectorBList.Add(y); } #else if (samplePresureAvg > 0) { // 正方向 double[] matrixAi = new double[POLYNOMIAL_ORDER + 1]; double[] matrixAiP = new double[POLYNOMIAL_P_ORDER + 1]; var sampleDataIterator = m_waveAnalyzer.SampleDataIterator(sampleIndex); foreach (double presure in sampleDataIterator) { double pn = 1.0; // P^0 for (int n = 0; n < Math.Max(POLYNOMIAL_P_ORDER, POLYNOMIAL_ORDER) + 1; n++) { if (n <= POLYNOMIAL_ORDER) { matrixAi[n] += pn; } if (n <= POLYNOMIAL_P_ORDER) { matrixAiP[n] += pn; } pn *= presure; // P^n } } double y = CalVolume * SAMPLE_RATE; matrixAListP.Add(matrixAiP); vectorBListP.Add(y); matrixAList.Add(matrixAi); vectorBList.Add(y); } else //if (samplePresureAvg <= 0) { // 负方向 double[] matrixAi = new double[POLYNOMIAL_ORDER + 1]; double[] matrixAiN = new double[POLYNOMIAL_N_ORDER + 1]; var sampleDataIterator = m_waveAnalyzer.SampleDataIterator(sampleIndex); foreach (double presure in sampleDataIterator) { double pn = 1.0; // P^0 for (int n = 0; n < Math.Max(POLYNOMIAL_N_ORDER, POLYNOMIAL_ORDER) + 1; n++) { if (n <= POLYNOMIAL_ORDER) { matrixAi[n] += pn; } if (n <= POLYNOMIAL_N_ORDER) { matrixAiN[n] += pn; } pn *= presure; // P^n } } double y = -CalVolume * SAMPLE_RATE; matrixAListN.Add(matrixAiN); vectorBListN.Add(y); matrixAList.Add(matrixAi); vectorBList.Add(y); } #endif } bool bRet = false; try { /* 使用多元线性最小二乘法(linear least squares)拟合最优参数集 */ /* 全局 */ double[][] matrixA = matrixAList.ToArray(); double[] vectorB = vectorBList.ToArray(); double[] result = Fit.MultiDim(matrixA, vectorB, false, MathNet.Numerics.LinearRegression.DirectRegressionMethod.Svd); paramList.AddRange(result); /* 正方向 */ double[][] matrixAP = matrixAListP.ToArray(); double[] vectorBP = vectorBListP.ToArray(); double[] resultP = Fit.MultiDim(matrixAP, vectorBP, false, MathNet.Numerics.LinearRegression.DirectRegressionMethod.Svd); paramListP.AddRange(resultP); /* 负方向 */ double[][] matrixAN = matrixAListN.ToArray(); double[] vectorBN = vectorBListN.ToArray(); double[] resultN = Fit.MultiDim(matrixAN, vectorBN, false, MathNet.Numerics.LinearRegression.DirectRegressionMethod.Svd); paramListN.AddRange(resultN); bRet = true; } catch (Exception e) { Console.WriteLine($"Exception: {e.Message}"); bRet = false; } return(bRet); }
/// <summary> /// 多元回归算法 /// </summary> /// <param name="n">自变量个数</param> /// <param name="u">归一化标识,1做归一化处理,0不做归一化处理</param> /// <param name="vib">过滤,去最大百分比滤波处理数组</param> /// <param name="vis">过滤,去最小百分比滤波处理数组</param> /// <param name="xij">自变量二维数组</param> /// <param name="Y">因变量数组</param> /// <param name="XF">预处理方式</param> /// <param name="k">返回过滤后的有效数据</param> /// <param name="p">返回过滤后的有效数据占比</param> /// <param name="mList">返回系数数组</param> ///// <param name="b">返回截距</param> /// <param name="errmsg">返回错误信息</param> /// <returns>错误编码S</returns> public int Regression(int u, double[] vib, double[] vis, double[][] xij, double[] Y, List <XFClass> XF, ref int k, ref double p, ref double[] mList, ref double[] resultList, ref string errmsg) { int S = 0; try { //mList = Fit.MultiDim(xij, Y, true, MathNet.Numerics.LinearRegression.DirectRegressionMethod.Svd); #region 数据初始判断 //if (n != xij[0].Count()) //{ // errmsg = "自变量二维数组列数与自变量个数n不符。"; // return 5; //} if (vib.Count() != xij[0].Count() + 1) { errmsg = "自变量二维数组列数与最大百分比滤波数量不符。"; return(5); } if (vis.Count() != xij[0].Count() + 1) { errmsg = "自变量二维数组列数与最小百分比滤波数量不符。"; return(5); } //if (XF.GetLength(0) != xij[0].Count()) //{ // errmsg = "自变量二维数组列数与预处理个数不符。"; // return 5; //} if (Y.Count() != xij.GetLength(0)) { errmsg = "自变量二维数组行数与因变量数组个数不符。"; return(4); } if (xij.GetLength(0) < XF.Count) { errmsg = "自变量二维数组行数小于自变量个数。"; return(1); } int getXCount = xij[0].Count(); #endregion #region 将二维数组变化为List List <IndependentVariableClass> newXijList = new List <IndependentVariableClass>(); newXijList = getNewList(xij, Y); #endregion #region 滤波过滤 List <int> removeList = getRemoveIdList(newXijList, vib, vis); foreach (int item in removeList) { newXijList.RemoveAll(x => x.id == item); } if (newXijList.Count < XF.Count) { errmsg = "滤波后自变量二维数组行数小于自变量个数。"; return(2); } #endregion #region 预处理 foreach (IndependentVariableClass item in newXijList) { while (item.valueList.Count < XF.Count) { item.valueList.Add(0.0); } } foreach (IndependentVariableClass item in newXijList) { for (int i = 0; i < item.valueList.Count; i++) { switch (XF[i].A) { case 0: break; case 1: item.valueList[i] = Preprocessing1(item.valueList[i], XF[i].B); break; case 2: item.valueList[i] = Preprocessing2(item.valueList[i], XF[i].B); break; case 3: item.valueList[i] = Preprocessing3(item.valueList[i], XF[i].B); break; case 4: item.valueList[i] = Preprocessing4(item.valueList[i]); break; case 5: item.valueList[i] = Preprocessing5(item.valueList, item.y, XF[i].B, getXCount); break; //case 6: // item.valueList[i] = Preprocessing6(item.valueList[i], XF[i][1]); // break; default: break; } } } #endregion #region 归一化处理 if (u == 1) { newXijList = Normalization(newXijList, XF.Count); } #endregion k = newXijList.Count; //有效数据行数 p = Math.Round((Convert.ToDouble(newXijList.Count) / Convert.ToDouble(xij.GetLength(0))), 4); //有效数据占比 double[][] newX = new double[newXijList.Count][]; double[] newY = new double[newXijList.Count]; for (int i = 0; i < newXijList.Count; i++) { double[] xList = new double[newXijList[i].valueList.Count]; for (int l = 0; l < newXijList[i].valueList.Count; l++) { xList[l] = newXijList[i].valueList[l]; } newX[i] = xList; newY[i] = newXijList[i].y; } mList = Fit.MultiDim(newX, newY, true, MathNet.Numerics.LinearRegression.DirectRegressionMethod.NormalEquations);//系数数组 double df = k - XF.Count - 1; List <double> yyList = new List <double>(); foreach (IndependentVariableClass item in newXijList) { double yy = mList[0]; for (int i = 0; i < item.valueList.Count; i++) { yy += item.valueList[i] * mList[i + 1]; } yyList.Add(yy); } double avgY = newXijList.Average(t => t.y); double SSR = 0; for (int i = 0; i < yyList.Count; i++) { SSR += Math.Pow(avgY - yyList[i], 2.0); } double SSE = 0; for (int i = 0; i < newXijList.Count; i++) { SSE += Math.Pow(newXijList[i].y - yyList[i], 2.0); } double SST = SSR + SSE; double F = Math.Round(((SSR / XF.Count + 0.0) / (SSE / df)), 3); double R2 = Math.Round((SSR / SST), 3); double sey = Math.Round(Math.Sqrt(SSE / df), 3); resultList = new double[6]; resultList[0] = R2; resultList[1] = sey; resultList[2] = F; resultList[3] = df; resultList[4] = SSR; resultList[5] = SSE; return(S); } catch (Exception ex) { errmsg = ex.Message; S = 3; } return(S); }