public void BivariatePolynomialRegression() { // do a set of polynomial regression fits // make sure not only that the fit parameters are what they should be, but that their variances/covariances are as claimed Random rng = new Random(271828); // define logistic parameters double[] a = new double[] { 0.0, -1.0, 2.0, -3.0 }; // keep track of sample of returned a and b fit parameters MultivariateSample A = new MultivariateSample(a.Length); // also keep track of returned covariance estimates // since these vary slightly from fit to fit, we will average them SymmetricMatrix C = new SymmetricMatrix(a.Length); // also keep track of test statistics Sample F = new Sample(); // do 100 fits for (int k = 0; k < 100; k++) { // we should be able to draw x's from any distribution; noise should be drawn from a normal distribution Distribution xd = new CauchyDistribution(); Distribution nd = new NormalDistribution(0.0, 4.0); // generate a synthetic data set BivariateSample s = new BivariateSample(); for (int j = 0; j < 20; j++) { double x = xd.GetRandomValue(rng); double y = nd.GetRandomValue(rng); for (int i = 0; i < a.Length; i++) { y += a[i] * MoreMath.Pow(x, i); } s.Add(x, y); } // do the regression FitResult r = s.PolynomialRegression(a.Length - 1); ColumnVector ps = r.Parameters; //Console.WriteLine("{0} {1} {2}", ps[0], ps[1], ps[2]); // record best fit parameters A.Add(ps); // record estimated covariances C += r.CovarianceMatrix; // record the fit statistic F.Add(r.GoodnessOfFit.Statistic); //Console.WriteLine("F={0}", r.GoodnessOfFit.Statistic); } C = (1.0 / A.Count) * C; // allow matrix division by real numbers // check that mean parameter estimates are what they should be: the underlying population parameters for (int i = 0; i < A.Dimension; i++) { Console.WriteLine("{0} {1}", A.Column(i).PopulationMean, a[i]); Assert.IsTrue(A.Column(i).PopulationMean.ConfidenceInterval(0.95).ClosedContains(a[i])); } // check that parameter covarainces are what they should be: the reported covariance estimates for (int i = 0; i < A.Dimension; i++) { for (int j = i; j < A.Dimension; j++) { Console.WriteLine("{0} {1} {2} {3}", i, j, C[i, j], A.TwoColumns(i, j).PopulationCovariance); Assert.IsTrue(A.TwoColumns(i, j).PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(C[i, j])); } } // check that F is distributed as it should be //Console.WriteLine(fs.KolmogorovSmirnovTest(new FisherDistribution(2, 48)).LeftProbability); }
public void CauchyStudentAgreement() { StudentDistribution S = new StudentDistribution(1); CauchyDistribution C = new CauchyDistribution(); // don't compare moments directly, because NaN != NaN foreach (double P in probabilities) { double xS = S.InverseLeftProbability(P); double xC = C.InverseLeftProbability(P); Console.WriteLine("{0} {1} {2}", P, xS, xC); Assert.IsTrue(TestUtilities.IsNearlyEqual(xS, xC)); Assert.IsTrue(TestUtilities.IsNearlyEqual(S.ProbabilityDensity(xS), C.ProbabilityDensity(xC))); } }
public void SpearmanNullDistributionTest() { // pick independent distributions for x and y, which needn't be normal and needn't be related Distribution xDistrubtion = new UniformDistribution(); Distribution yDistribution = new CauchyDistribution(); Random rng = new Random(1); // generate bivariate samples of various sizes foreach (int n in TestUtilities.GenerateIntegerValues(4, 64, 8)) { Sample testStatistics = new Sample(); Distribution testDistribution = null; for (int i = 0; i < 128; i++) { BivariateSample sample = new BivariateSample(); for (int j = 0; j < n; j++) { sample.Add(xDistrubtion.GetRandomValue(rng), yDistribution.GetRandomValue(rng)); } TestResult result = sample.SpearmanRhoTest(); testStatistics.Add(result.Statistic); testDistribution = result.Distribution; } TestResult r2 = testStatistics.KuiperTest(testDistribution); Console.WriteLine("n={0} P={1}", n, r2.LeftProbability); Assert.IsTrue(r2.RightProbability > 0.05); Assert.IsTrue(testStatistics.PopulationMean.ConfidenceInterval(0.99).ClosedContains(testDistribution.Mean)); Assert.IsTrue(testStatistics.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(testDistribution.Variance)); } }
public void CauchyFWHM() { // Check that FWHM really is the full-width at half-maximum. CauchyDistribution D = new CauchyDistribution(1.0, 2.0); double p = D.ProbabilityDensity(D.Median); Assert.IsTrue(TestUtilities.IsNearlyEqual(D.ProbabilityDensity(D.Median - D.FullWithAtHalfMaximum / 2.0), p / 2.0)); Assert.IsTrue(TestUtilities.IsNearlyEqual(D.ProbabilityDensity(D.Median + D.FullWithAtHalfMaximum / 2.0), p / 2.0)); }
public void TimeCauchyGenerators() { Random rng = new Random(1); IDeviateGenerator nRng = new CauchyGenerator(); Distribution d = new CauchyDistribution(); Sample sample = new Sample(); Stopwatch timer = Stopwatch.StartNew(); double sum = 0.0; for (int i = 0; i < 10000000; i++) { sum += nRng.GetNext(rng); //sum += d.InverseLeftProbability(rng.NextDouble()); //sample.Add(nRng.GetNext(rng)); } timer.Stop(); //Console.WriteLine(sample.KolmogorovSmirnovTest(d).RightProbability); Console.WriteLine(sum); Console.WriteLine(timer.ElapsedMilliseconds); }