public void Bug6162() { // When UncertianMeasurementSample.FitToPolynomial used Cholesky inversion of (A^T A), this inversion // would fail when roundoff errors would made the matrix non-positive-definite. We have now changed // to QR decomposition, which is more robust. //real data double[] X_axis = new double[] { 40270.65625, 40270.6569444444, 40270.6576388888, 40270.6583333332, 40270.6590277776, 40270.659722222, 40270.6604166669, 40270.6611111113, 40270.6618055557, 40270.6625000001 }; double[] Y_axis = new double[] { 246.824996948242, 246.850006103516, 245.875, 246.225006103516, 246.975006103516, 247.024993896484, 246.949996948242, 246.875, 247.5, 247.100006103516 }; UncertainMeasurementSample DataSet = new UncertainMeasurementSample(); for (int i = 0; i < 10; i++) { DataSet.Add(X_axis[i], Y_axis[i], 1); } UncertainMeasurementFitResult DataFit = DataSet.FitToPolynomial(3); BivariateSample bs = new BivariateSample(); for (int i = 0; i < 10; i++) { bs.Add(X_axis[i], Y_axis[i]); } PolynomialRegressionResult bsFit = bs.PolynomialRegression(3); foreach (Parameter p in bsFit.Parameters) { Console.WriteLine(p); } }
public void BivariateLinearPolynomialRegressionAgreement() { // A degree-1 polynomial fit should give the same answer as a linear fit BivariateSample B = new BivariateSample(); B.Add(0.0, 5.0); B.Add(3.0, 6.0); B.Add(1.0, 7.0); B.Add(4.0, 8.0); B.Add(2.0, 9.0); GeneralLinearRegressionResult PR = B.PolynomialRegression(1); GeneralLinearRegressionResult LR = B.LinearRegression(); Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters.ValuesVector, LR.Parameters.ValuesVector)); Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters.CovarianceMatrix, LR.Parameters.CovarianceMatrix)); }
public void BivariateLinearPolynomialRegressionAgreement() { // A degree-1 polynomial fit should give the same answer as a linear fit BivariateSample B = new BivariateSample(); B.Add(0.0, 5.0); B.Add(3.0, 6.0); B.Add(1.0, 7.0); B.Add(4.0, 8.0); B.Add(2.0, 9.0); FitResult PR = B.PolynomialRegression(1); FitResult LR = B.LinearRegression(); Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters, LR.Parameters)); Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.CovarianceMatrix, LR.CovarianceMatrix)); Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.GoodnessOfFit.Statistic, LR.GoodnessOfFit.Statistic)); }
public void BivariatePolynomialRegressionCovariance() { // do a set of polynomial regression fits // make sure not only that the fit parameters are what they should be, but that their variances/covariances are as claimed Random rng = new Random(271828); // define logistic parameters double[] a = new double[] { 0.0, -1.0, 2.0, -3.0 }; // keep track of sample of returned a and b fit parameters MultivariateSample A = new MultivariateSample(a.Length); // also keep track of returned covariance estimates // since these vary slightly from fit to fit, we will average them SymmetricMatrix C = new SymmetricMatrix(a.Length); // also keep track of test statistics Sample F = new Sample(); // do 100 fits for (int k = 0; k < 100; k++) { // we should be able to draw x's from any distribution; noise should be drawn from a normal distribution ContinuousDistribution xd = new CauchyDistribution(); ContinuousDistribution nd = new NormalDistribution(0.0, 4.0); // generate a synthetic data set BivariateSample s = new BivariateSample(); for (int j = 0; j < 20; j++) { double x = xd.GetRandomValue(rng); double y = nd.GetRandomValue(rng); for (int i = 0; i < a.Length; i++) { y += a[i] * MoreMath.Pow(x, i); } s.Add(x, y); } // do the regression PolynomialRegressionResult r = s.PolynomialRegression(a.Length - 1); ColumnVector ps = r.Parameters.ValuesVector; // record best fit parameters A.Add(ps); // record estimated covariances C += r.Parameters.CovarianceMatrix; // record the fit statistic F.Add(r.F.Statistic.Value); } C = (1.0 / A.Count) * C; // allow matrix division by real numbers // check that mean parameter estimates are what they should be: the underlying population parameters for (int i = 0; i < A.Dimension; i++) { Assert.IsTrue(A.Column(i).PopulationMean.ConfidenceInterval(0.95).ClosedContains(a[i])); } // check that parameter covarainces are what they should be: the reported covariance estimates for (int i = 0; i < A.Dimension; i++) { for (int j = i; j < A.Dimension; j++) { Assert.IsTrue(A.TwoColumns(i, j).PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(C[i, j])); } } // check that F is distributed as it should be //Console.WriteLine(fs.KolmogorovSmirnovTest(new FisherDistribution(2, 48)).LeftProbability); }