public void learn_test_2() { #if NETCORE var culture = CultureInfo.CreateSpecificCulture("en-US"); CultureInfo.CurrentCulture = culture; #endif #region doc_learn // Let's say we would like to learn 2nd degree polynomial that // can map the first column X into its second column Y. We have // 5 examples of those (x,y) pairs that we can use to learn this // function: double[,] data = { // X Y { 12, 144 }, // example #1 { 15, 225 }, // example #2 { 20, 400 }, // example #3 { 25, 625 }, // example #4 { 35, 1225 }, // example #5 }; // Let's retrieve the input and output data: double[] inputs = data.GetColumn(0); // X double[] outputs = data.GetColumn(1); // Y // We can create a learning algorithm var ls = new PolynomialLeastSquares() { Degree = 2 }; // Now, we can use the algorithm to learn a polynomial PolynomialRegression poly = ls.Learn(inputs, outputs); // The learned polynomial will be given by string str = poly.ToString("N1"); // "y(x) = 1.0x^2 + 0.0x^1 + 0.0" // Where its weights can be accessed using double[] weights = poly.Weights; // { 1.0000000000000024, -1.2407665029287351E-13 } double intercept = poly.Intercept; // 1.5652369518855253E-12 // Finally, we can use this polynomial // to predict values for the input data double[] pred = poly.Transform(inputs); // Where the mean-squared-error (MSE) should be double error = new SquareLoss(outputs).Loss(pred); // 0.0 #endregion Assert.AreEqual(0, error, 1e-10); string ex = weights.ToCSharp(); double[] expected = { 1, 0 }; Assert.AreEqual("y(x) = 1.0x^2 + 0.0x^1 + 0.0", str); Assert.IsTrue(weights.IsEqual(expected, 1e-6)); Assert.AreEqual(0, intercept, 1e-6); }
public void AddPoints(bool writeToFile) { int year = configManager_.GetYear(); int reverseYears = configManager_.GetReverseYears(); MemoryStream statsMemoryStream = new MemoryStream(1024); MemoryStream ratingsMemoryStream = new MemoryStream(1024); int width = 12; for (int i = 0; i < reverseYears; ++i) { AddPointsForYear(year - i); GenerateStatsForYear(year - i, i == 0, ref statsMemoryStream); GenerateRatingsForYear(year - i, width, i == 0, ref ratingsMemoryStream); } AggregateTotalRatings(ref ratingsMemoryStream); PolynomialRegression r1 = GenerateEquation(ratingsMemoryStream, width, '1'); PolynomialRegression rx = GenerateEquation(ratingsMemoryStream, width, 'X'); PolynomialRegression r2 = GenerateEquation(ratingsMemoryStream, width, '2'); if (writeToFile) { for (int i = 0; i < reverseYears; ++i) { AddFairOddsForYear(year - i, r1, rx, r2); } } }
public override void HandleNextTick(Tick t) { double emaVal = EMA.HandleNextTick(t); LRValues.Enqueue(emaVal); double[] Y = LRValues.ToArray(); double stdErr = 0; if (Y.Count() == X.Length) { Vector x_data = new Vector(X); Vector y_data = new Vector(LRValues.ToArray()); var poly = new PolynomialRegression(x_data, y_data, 2); for (int i = 0; i < Period; i++) { double x = (i); double y = poly.Fit(x); stdErr += Math.Pow(LRValues.ToArray()[i] - y, 2); } stdErr = Math.Sqrt(stdErr); } HighData.Enqueue(EMA[0] + STDEV * stdErr); MiddleData.Enqueue(EMA[0]); LowData.Enqueue(EMA[0] - STDEV * stdErr); }
public void ToStringTest() { // Issue 51: PolynomialRegression poly = new PolynomialRegression(2); var x = new double[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; var y = new double[] { 1, 6, 17, 34, 57, 86, 121, 162, 209, 262, 321 }; poly.Regress(x, y); { string expected = "y(x) = 3x^2 + 1.99999999999999x^1 + 1.00000000000006x^0"; expected = expected.Replace(".", System.Globalization.CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator); string actual = poly.ToString(); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3x^2 + 1.99999999999999x^1 + 1.00000000000006x^0"; string actual = poly.ToString(null, System.Globalization.CultureInfo.GetCultureInfo("en-US")); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3.0x^2 + 2.0x^1 + 1.0x^0"; string actual = poly.ToString("N1", System.Globalization.CultureInfo.GetCultureInfo("en-US")); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3,00x^2 + 2,00x^1 + 1,00x^0"; string actual = poly.ToString("N2", System.Globalization.CultureInfo.GetCultureInfo("pt-BR")); Assert.AreEqual(expected, actual); } }
private string Execute(double[] data) { PolynomialRegression regression = new PolynomialRegression(); Polynomial actualPolynomial = regression.Fit(data); string actual = actualPolynomial.ToString(); return(actual); }
private PolynomialRegression PRLearning(double[] independentVariables, double[] dependentVariables) { var polyTeacher = new PolynomialLeastSquares() { Degree = 6 }; PolynomialRegression objRegressionLocal = polyTeacher.Learn(independentVariables, dependentVariables); double[] prediction = objRegressionLocal.Transform(independentVariables); predictionPL[indexPredictionPL] = prediction; //PredictedData[1] = prediction; errorPR += new SquareLoss(independentVariables).Loss(prediction); double[] coefOfFunction = new double[objRegressionLocal.Weights.Length + 1]; coefOfFunction[0] = objRegressionLocal.Intercept; int index = objRegressionLocal.Weights.Length - 1; for (int i = 1; i <= objRegressionLocal.Weights.Length; i++) { coefOfFunction[i] = objRegressionLocal.Weights[index]; index--; } double func(double x) { double y = 0; for (int i = 0; i <= polyTeacher.Degree; i++) { y += coefOfFunction[i] * Math.Pow(x, i); } return(y); } double min = NormalizedInputData.GetColumn(indexPredictionPL).Min(), max = NormalizedInputData.GetColumn(indexPredictionPL).Max(); XYpairPL[indexPredictionPL] = new double[2][]; XYpairPL[indexPredictionPL][0] = new double[100]; XYpairPL[indexPredictionPL][1] = new double[100]; index = 0; for (double i = min; i <= max; i += 0.01) { XYpairPL[indexPredictionPL][0][index] = i; XYpairPL[indexPredictionPL][1][index] = func(i); index++; } indexPredictionPL++; return(objRegressionLocal); }
public void Learn(IList <XtoY> dsLearn) { double [] inputs = dsLearn.Select(i => i.X).ToArray(); double [] outputs = dsLearn.Select(i => i.Y).ToArray(); var pls = new PolynomialLeastSquares() { Degree = _degree, IsRobust = _isRobust }; _polynomialRegression = pls.Learn(inputs, outputs); }
public void PolyRegression() { if (dgvTestingSource.DataSource == null) { MessageBox.Show("Please Select a data set"); return; } // Creates a matrix from the source data table double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(); double[,] data = table; // Let's retrieve the input and output data: double[] inputs = data.GetColumn(0); // X double[] outputs = data.GetColumn(1); // Y // We can create a learning algorithm var ls = new PolynomialLeastSquares() { Degree = 2 }; // Now, we can use the algorithm to learn a polynomial PolynomialRegression poly = ls.Learn(inputs, outputs); // The learned polynomial will be given by string str = poly.ToString("N1"); // "y(x) = 1.0x^2 + 0.0x^1 + 0.0" // Where its weights can be accessed using double[] weights = poly.Weights; // { 1.0000000000000024, -1.2407665029287351E-13 } double intercept = poly.Intercept; // 1.5652369518855253E-12 // Finally, we can use this polynomial // to predict values for the input data double[] pred = poly.Transform(inputs); // Where the mean-squared-error (MSE) should be double error = new SquareLoss(outputs).Loss(pred); // 0.0 double[][] tmpInputs = new double[inputs.Length][]; for (int i = 0; i < inputs.Length; i++) { tmpInputs[i] = new double[1] { inputs[i] }; } CreateResultScatterplot(zedGraphControl1, tmpInputs, outputs, pred); }
public static PolynomialRegression Fit <T>(this IEnumerable <T> items, Func <T, double> xFunc, Func <T, double> yFunc, int order = 1) { var itemList = items.AsList(); #pragma warning disable 0618 // this is obsolete due to a new "learn" interface for many machine learning algorthms // unfortunately, on 5/14/17 the new interface is broken. // https://github.com/accord-net/framework/issues/434 var regression = new PolynomialRegression(order); regression.Regress(itemList.Select(xFunc).ToArray(), itemList.Select(yFunc).ToArray()); #pragma warning restore 0618 return(regression); }
static void Main(string[] args) { Vector x_data = new Vector(new double[] { 0, 1, 2, 3, 4 }); Vector y_data = new Vector(new double[] { 1.0, 1.4, 1.6, 1.3, 0.9 }); var poly = new PolynomialRegression(x_data, y_data, 2); Console.WriteLine("{0,6}{1,9}", "x", "y"); for (int i = 0; i < 10; i++) { double x = (i * 0.5); double y = poly.Fit(x); Console.WriteLine("{0,6:F2}{1,9:F4}", x, y); } }
public void Learn(IList <XtoY> dsLearn) { List <double> data = new List <double>(mylist.Count); for (int i = 0; i < mylist.Count; i++) { data.Add(mylist[i].Cases); } double[] inputs = dsLearn.Select(i => i.X).ToArray(); var pls = new PolynomialLeastSquares() { Degree = _degree, IsRobust = _isRobust }; _polynomialRegression = pls.Learn(inputs, data.ToArray()); }
public void TestPolynomialRegression() { var polyDegrees = 3; var sample = new Vector2[] { new Vector2(80, 6.47), new Vector2(40, 6.24), new Vector2(-40, 5.72), new Vector2(-120, 5.09), new Vector2(-200, 4.30), new Vector2(-280, 3.33) }; var polyReg = new PolynomialRegression(sample, polyDegrees); Assert.Equal(Math.Round(6.408, 3), Math.Round(polyReg.PredictPoint(70), 3)); }
private async Task SmoothData() { await Task.Run(() => { _axialCurvePlot.Values.Clear(); var x = _axialData.OrderBy(d => d.X).Select(d => d.X).ToArray(); var y = _axialData.OrderBy(d => d.X).Select(d => d.Y).ToArray(); var result = PolynomialRegression.Y(x, y, PolynomDegree); for (int i = 0; i < result.Item2.Length; i++) { _axialCurvePlot.Values.Add(new ObservablePoint { X = x[i], Y = result.Item2[i] }); } }); }
public void PolynomialRegressionRegressTest() { double[] inputs = { 15.2, 229.7, 3500 }; double[] outputs = { 0.51, 105.66, 1800 }; int degree = 2; PolynomialRegression target = new PolynomialRegression(degree); double[] expected = { 8.003175717e-6, 4.882498125e-1, -6.913246203 }; double[] actual; target.Regress(inputs, outputs); actual = target.Coefficients; Assert.AreEqual(expected[0], actual[0], 000.1); Assert.AreEqual(expected[1], actual[1], 000.1); Assert.AreEqual(expected[2], actual[2], 000.1); }
public static PointPairList PolynomialRegresion(Dictionary <double, double> variablePair, int degree) // y = DOUBLE_Array[1]*x + DOUBLE_Array[0]; { var polyTeacher = new PolynomialLeastSquares() { Degree = 3 }; PolynomialRegression objRegression = polyTeacher.Learn(variablePair.Keys.ToArray(), variablePair.Values.ToArray()); double[] coefOfFunction = new double[objRegression.Weights.Length + 1]; coefOfFunction[0] = objRegression.Intercept; int index = objRegression.Weights.Length - 1; for (int i = 1; i <= objRegression.Weights.Length; i++) { coefOfFunction[i] = objRegression.Weights[index]; index--; } double func(double x) { double y = 0; for (int i = 0; i <= degree; i++) { y += coefOfFunction[i] * Math.Pow(x, i); } return(y); } double[] independentValueArray = new double[variablePair.Count], dependentValueArray = new double[variablePair.Count]; index = 0; foreach (var pair in variablePair) { independentValueArray[index] = pair.Key; dependentValueArray[index] = func(pair.Key); index++; } return(new PointPairList(independentValueArray, dependentValueArray)); }
private static void RegressionPoly() { var regression = new PolynomialRegression(); regression.Add(1.1, 1); regression.Add(2, 5); regression.Add(5, 7); regression.Add(5.5, 8); regression.Add(7, 4.3); regression.Add(9, 5); regression.Degree = 4; for (int n = 10; n < 91; n++) { double regressionValue = regression.Value(n / 10.0); Console.WriteLine($"{n:N1};{regressionValue:N3}"); } }
public static void test3() { var poly2 = CreateFunc(1, 1); Random rnd = new Random(); var pos = Enumerable.Range(0, 20).Select(x => new double [] { x, poly2(x) + rnd.NextDouble() }).ToArray(); double[] inputs = pos.Select(x => x[0]).ToArray(); double[] outputs = pos.Select(x => x[1]).ToArray(); var ls = new PolynomialLeastSquares() { Degree = 2 }; PolynomialRegression poly = ls.Learn(inputs, outputs); double a = poly.Weights[0]; // a = 0 double b = poly.Weights[1]; // b = 0 double c = poly.Intercept; // c = 1 double[] predicted = poly.Transform(inputs); double error = new SquareLoss(outputs).Loss(predicted); var ols = new OrdinaryLeastSquares(); SimpleLinearRegression mul = ols.Learn(inputs, outputs); double a1 = mul.Slope; // a = 0 double b1 = mul.Intercept; // b = 0 double[] simplepredict = mul.Transform(inputs); double erroe2 = new SquaredHingeLoss(outputs).Loss(simplepredict); Console.WriteLine("Done"); }
public double HandleNextTick(double currentTick) { if (!currentTick.Equals(double.NaN)) { double value = double.NaN; tickdata.Enqueue(currentTick); if (tickdata.Capacity.Equals(tickdata.Count)) { Vector x_data = new Vector(X); Vector y_data = new Vector(tickdata.ToArray()); var poly = new PolynomialRegression(x_data, y_data, 2); value = poly.Fit(Period); } indicatorData.Enqueue(value); return(value); } return(double.NaN); }
private PolynomialRegression GenerateRegressionFitting(SortedDictionary <double, double> values, char result) { // Extract inputs and outputs double[] inputs = values.Keys.ToArray(); double[] outputs = values.Values.ToArray(); // We can create a learning algorithm PolynomialLeastSquares ls = new PolynomialLeastSquares() { Degree = 2 }; // Now, we can use the algorithm to learn a polynomial PolynomialRegression poly = ls.Learn(inputs, outputs); // The learned polynomial will be given by #pragma warning disable IDE0059 // Unnecessary assignment of a value string str = poly.ToString("N1"); // "y(x) = 1.0x^2 + 0.0x^1 + 0.0" // Where its weights can be accessed using double[] weights = poly.Weights; // { 1.0000000000000024, -1.2407665029287351E-13 } double intercept = poly.Intercept; // 1.5652369518855253E-12 #pragma warning restore IDE0059 // Unnecessary assignment of a value // Finally, we can use this polynomial // to predict values for the input data double[] prediction = poly.Transform(inputs); double r2 = new RSquaredLoss(outputs.Length, outputs).Loss(prediction); // should be > 0.85 (close to 1 is ok) //LastGamesMetric: 0.77 0.81 0.08 //GoalsScoredMetric: 0.75 0.85 0.02 if (r2 == 1.0) { r2 = 0.0; } r2Values_.Add(result, r2); return(poly); }
public void learn_test() { double[] inputs = { 15.2, 229.7, 3500 }; double[] outputs = { 0.51, 105.66, 1800 }; var ls = new PolynomialLeastSquares() { Degree = 2 }; PolynomialRegression target = ls.Learn(inputs, outputs); double[] expected = { 8.003175717e-6, 4.882498125e-1, -6.913246203 }; double[] actual; actual = target.Weights; Assert.AreEqual(2, actual.Length); Assert.AreEqual(expected[0], actual[0], 1e-3); Assert.AreEqual(expected[1], actual[1], 1e-3); Assert.AreEqual(expected[2], target.Intercept, 1e-3); }
public void AddFairOddsForYear(int year, PolynomialRegression r1, PolynomialRegression rx, PolynomialRegression r2) { string leagueName = configManager_.GetLeagueName(); string inputFilePath = "..\\..\\DBEX\\" + leagueName + year + ".csv"; string outputFilePath = "..\\..\\DBEX\\" + leagueName + year + "_ex.csv"; List <MetricInterface> metrics = MetricFactory.GetMetrics(metricConfigs_, year, configManager_, fixtureRetriever_); using (TextFieldParser parser = new TextFieldParser(inputFilePath)) { using FileStream fileStream = new FileStream(outputFilePath, FileMode.Create, FileAccess.Write); using StreamWriter outputFile = new StreamWriter(fileStream); string outputLine = parser.ReadLine() + ',' + "FOH" + ',' + "FOD" + ',' + "FOA"; outputFile.WriteLine(outputLine); List <Fixture> fixtures = fixtureRetriever_.GetAllFixtures(year); int index = 0; while (!parser.EndOfData) { Fixture fixture = fixtures[index]; int diff = 0; foreach (MetricInterface m in metrics) { m.GetPoints(out double pctTeam1, out double pctTeam2, fixture.homeTeamId, fixture.awayTeamId, fixture); diff += (int)pctTeam1; diff -= (int)pctTeam2; } outputLine = parser.ReadLine() + ',' + (100 / r1.Transform(diff)).ToString("0.00") + ',' + (100 / rx.Transform(diff)).ToString("0.00") + ',' + (100 / r2.Transform(diff)).ToString("0.00"); outputFile.WriteLine(outputLine); index++; } } File.Delete(inputFilePath); File.Move(outputFilePath, inputFilePath); }
public void learn_ToStringTest() { var x = new double[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; var y = new double[] { 1, 6, 17, 34, 57, 86, 121, 162, 209, 262, 321 }; var pls = new PolynomialLeastSquares() { Degree = 2 }; PolynomialRegression poly = pls.Learn(x, y); { string expected = "y(x) = 3x^2 + 1.99999999999998x^1 + 1.00000000000005"; expected = expected.Replace(".", CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator); string actual = poly.ToString(); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3x^2 + 1.99999999999998x^1 + 1.00000000000005"; string actual = poly.ToString(null, CultureInfo.GetCultureInfo("en-US")); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3.0x^2 + 2.0x^1 + 1.0"; string actual = poly.ToString("N1", CultureInfo.GetCultureInfo("en-US")); Assert.AreEqual(expected, actual); } { string expected = "y(x) = 3,00x^2 + 2,00x^1 + 1,00"; string actual = poly.ToString("N2", CultureInfo.GetCultureInfo("pt-BR")); Assert.AreEqual(expected, actual); } }
public RegreesionPlot(IEnumerable <T> data, Func <T, double> xValue, Func <T, double> yValue, int order) : base(data, xValue, yValue) { WithGlyph(Glyphs.Hide); Equation = Data.Fit(xValue, yValue, order); }
static void Main(string [] args) { Vector x_data = new Vector(new double[] { 0, 1, 2, 3, 4 }); Vector y_data = new Vector(new double[] { 1.0, 1.4, 1.6, 1.3, 0.9 }); var poly = new PolynomialRegression(x_data, y_data, 2); Console.WriteLine("{0,6}{1,9}", "x", "y"); for (int i = 0; i < 10; i++) { double x = (i * 0.5); double y = poly.Fit(x); Console.WriteLine("{0,6:F2}{1,9:F4}", x, y); } }
private void button1_Click(object sender, EventArgs e) { // Declare some sample test data. double[] inputs = { 80, 60, 10, 20, 30 }; double[] outputs = { 20, 40, 30, 50, 60 }; //// Use Ordinary Least Squares to learn the regression //OrdinaryLeastSquares ols = new OrdinaryLeastSquares(); //// Use OLS to learn the simple linear regression //SimpleLinearRegression regression = ols.Learn(inputs, outputs); //// Compute the output for a given input: ////double y = regression.Transform(85); // The answer will be 28.088 //// We can also extract the slope and the intercept term //// for the line. Those will be -0.26 and 50.5, respectively. //double s = regression.Slope; // -0.264706 //double c = regression.Intercept; // 50.588235 //double[] x= new double[20]; //double[] y = new double[20]; //for(int i = 0; i < 20; i++) //{ // x[i] = 5 + (i * 5); // y[i] = s*x[i] + c; //} // We can create a learning algorithm var ls = new PolynomialLeastSquares() { Degree = 2 }; // Now, we can use the algorithm to learn a polynomial PolynomialRegression poly = ls.Learn(inputs, outputs); // The learned polynomial will be given by string str = poly.ToString("N1"); // "y(x) = 1.0x^2 + 0.0x^1 + 0.0" Console.WriteLine(str); // Where its weights can be accessed using double[] weights = poly.Weights; // { 1.0000000000000024, -1.2407665029287351E-13 } double intercept = poly.Intercept; // 1.5652369518855253E-12 Console.WriteLine("{0},{1}", weights[0], intercept); // Finally, we can use this polynomial // to predict values for the input data double[] pred = poly.Transform(inputs); double error = new SquareLoss(outputs).Loss(pred); Console.WriteLine(error); double[] x = new double[20]; double[] y = new double[20]; for (int i = 0; i < 20; i++) { x[i] = 5 + (i * 5); y[i] = weights[0] * x[i] * x[i] + weights[1] * x[i] + intercept; } GraphPane myPane = this.zedGraphControl1.GraphPane; myPane.CurveList.Clear(); // Set the titles myPane.Title.IsVisible = false; myPane.Chart.Border.IsVisible = false; myPane.XAxis.Title.Text = "X"; myPane.YAxis.Title.Text = "Y"; myPane.XAxis.IsAxisSegmentVisible = true; myPane.YAxis.IsAxisSegmentVisible = true; myPane.XAxis.MinorGrid.IsVisible = false; myPane.YAxis.MinorGrid.IsVisible = false; myPane.XAxis.MinorTic.IsOpposite = false; myPane.XAxis.MajorTic.IsOpposite = false; myPane.YAxis.MinorTic.IsOpposite = false; myPane.YAxis.MajorTic.IsOpposite = false; myPane.XAxis.Scale.MinGrace = 0; myPane.XAxis.Scale.MaxGrace = 0; myPane.XAxis.Scale.Max = 90; //myPane.XAxis.Scale.Min = -10; myPane.YAxis.Scale.MinGrace = 0; myPane.YAxis.Scale.MaxGrace = 0; //myPane.YAxis.Scale.Min = -10; myPane.YAxis.Scale.Max = 70; PointPairList list1 = new PointPairList(inputs, outputs); PointPairList list2 = new PointPairList(x, y); LineItem myCurve; // Add the curves myCurve = myPane.AddCurve("points", list1, Color.Blue, SymbolType.Circle); myCurve.Line.IsVisible = false; myCurve.Symbol.Fill = new Fill(Color.Blue); myCurve = myPane.AddCurve("Simple", list2, Color.Red, SymbolType.Circle); myCurve.Line.IsAntiAlias = true; myCurve.Line.IsVisible = true; myCurve.Symbol.IsVisible = false; this.zedGraphControl1.AxisChange(); this.zedGraphControl1.Invalidate(); }
public HomePageViewModel(IRegionManager regionManager) { _regionManager = regionManager; Data = new ObservableCollection <ObservablePoint>(); _axialData = new HashSet <ObservablePoint>(); _axialDataPlot = new LineSeries { Title = "Axial", Fill = Brushes.Transparent, Stroke = Brushes.Black, LineSmoothness = 0, StrokeDashArray = new DoubleCollection { 2 }, Values = new ChartValues <ObservablePoint>(), DataLabels = true }; _radialData = new HashSet <ObservablePoint>(); _radialDataPlot = new LineSeries { Title = "Radial Data", Fill = Brushes.Transparent, Stroke = Brushes.Orange, LineSmoothness = 0, StrokeDashArray = new DoubleCollection { 2 }, Values = new ChartValues <ObservablePoint>() }; _radialCurvePlot = new LineSeries { Title = "Radial Curve" }; _axialCurvePlot = new LineSeries { Title = "Axial", Fill = Brushes.Transparent, Stroke = Brushes.Red, LineSmoothness = 0.2, Values = new ChartValues <ObservablePoint>() }; _tgPlot = new LineSeries { Title = "Tg", Fill = Brushes.Transparent, Stroke = Brushes.Green, Values = new ChartValues <ObservablePoint>() }; _generatedCurveDataPlot = new LineSeries { Title = "Generated Data", Fill = Brushes.Transparent, Stroke = Brushes.Magenta, PointGeometrySize = 0.1, Values = new ChartValues <ObservablePoint>() }; StressStrainCurveSeries = new SeriesCollection(); StressStrainCurveSeries.Add(_axialDataPlot); StressStrainCurveSeries.Add(_radialDataPlot); StressStrainCurveSeries.Add(_axialCurvePlot); StressStrainCurveSeries.Add(_tgPlot); StressStrainCurveSeries.Add(_generatedCurveDataPlot); //Commands StressStrainCurveModeCommand = new DelegateCommand(() => { _regionManager.RequestNavigate(Configurations.Regions.ChartRegion, nameof(StressStrainCurveChartPage)); }); PoissonsRatioStressCurveModeCommand = new DelegateCommand(() => { _regionManager.RequestNavigate(Configurations.Regions.ChartRegion, nameof(PoissonsRatioStressCurveChartPage)); }); ModulusStressCurveModeCommand = new DelegateCommand(() => { _regionManager.RequestNavigate(Configurations.Regions.ChartRegion, nameof(ModulusStressCurveChartPage)); }); AddDataCommand = new DelegateCommand(() => AddData(_inputMicrostrain, _inputStress)); EditDataCommand = new DelegateCommand <ChartPoint>(p => { EditMicrostrain = p.X; EditStress = p.Y; }); MouseDoubleClickCommand = new DelegateCommand(() => { AddData(_currentXData, _currentYData); }); SmoothDataCommand = new DelegateCommand(async() => await SmoothData()); ResetCommand = new DelegateCommand(() => { Data.Clear(); _axialData.Clear(); _axialDataPlot.Values.Clear(); _axialCurvePlot.Values.Clear(); _tgPlot.Values.Clear(); _generatedCurveDataPlot.Values.Clear(); _radialData.Clear(); _radialDataPlot.Values.Clear(); }); DrawTangentCommand = new DelegateCommand <bool?>(v => { _tgPlot.Values.Clear(); _tgPlot.Visibility = (bool)v ? Visibility.Visible : Visibility.Hidden; if ((bool)v) { var x = _axialData.OrderBy(d => d.X).Select(d => d.X).ToArray(); var y = _axialData.OrderBy(d => d.X).Select(d => d.Y).ToArray(); var t = PolynomialRegression.LeastSquartPolynonial(x, y, PolynomDegree); var tgY = Tangent.Y(t, _axialData.OrderBy(p => p.X).Last().X); _tgPlot.Values.Add(new ObservablePoint { X = 0, Y = 0 }); _tgPlot.Values.Add(new ObservablePoint { X = _axialData.OrderBy(p => p.X).Last().X, Y = tgY }); } }); GenerateDataCommand = new DelegateCommand(() => { _generatedCurveDataPlot.Values.Clear(); var x = _axialData.OrderBy(d => d.X).Select(d => d.X).ToArray(); var y = _axialData.OrderBy(d => d.X).Select(d => d.Y).ToArray(); var t = PolynomialRegression.LeastSquartPolynonial(x, y, PolynomDegree); Task.Run(() => { var generatedData = PolynomialRegression.Generate(t, 50, _axialData.OrderBy(d => d.X).Last().X, GeneratedDataNoise); var strain = generatedData.Item1; var stress = generatedData.Item2; for (int i = 0; i < strain.Length; i++) { _generatedCurveDataPlot.Values.Add(new ObservablePoint { X = strain[i], Y = stress[i] }); } }); }); }
public RecognitionState Recognition(double[] inputs, double[] outputs) { RecognitionState ret = new RecognitionState(); if (inputs.Length > 5) { /*一次线性回归*/ SimpleLinearRegression regression = this.ols.Learn(inputs, outputs, null); double k = regression.Slope; const double TAN45 = 1.0; const double TAN15 = 0.2679492; double k1 = Math.Abs(k); if (k1 > TAN45) { ret.Slope = SlopeState.Steep; } else if (k1 > TAN15) { ret.Slope = SlopeState.moderate; } else { ret.Slope = SlopeState.gentle; } /*二次线性回归*/ PolynomialRegression poly = this.pls.Learn(inputs, outputs, null); double a = poly.Weights[0]; double b = poly.Weights[1]; if (k > 0 && a > 0) { ret.Shape = ShapeState.Rise; } else if (k > 0 && a < 0) { ret.Shape = ShapeState.FallAfterRise; } else if (k < 0 && a < 0) { ret.Shape = ShapeState.Fall; } else if (k < 0 && a > 0) { ret.Shape = ShapeState.RiseAfterFall; } double last = inputs[inputs.Length - 1]; double s = 2 * a * last + b; double s1 = Math.Abs(s); if (s1 > TAN45) { ret.Speed = SpeedState.Rapid; } else if (s1 > TAN15) { ret.Speed = SpeedState.Steady; } else { ret.Speed = SpeedState.Slow; } /*显示图形*/ if (this.showForm != null) { double[] outputs2 = regression.Transform(inputs); double[] outputs3 = poly.Transform(inputs); this.showForm.ShowGraph(inputs, outputs, inputs, outputs2, inputs, outputs3); } Console.WriteLine("k={0},a={1},b={2}", k, a, b); } return(ret); }
public double[] Normalize(double[] input) { PolynomialRegression pr = PolynomialRegression.FromData(degree, integers.Submatrix(0, input.Length - 1), input); return(pr.Compute(GetInputArray(input.Length, normalizationLength))); }