Ejemplo n.º 1
0
        public void MultivariateLinearRegressionNullDistribution()
        {
            int d = 4;

            Random             rng = new Random(1);
            NormalDistribution n   = new NormalDistribution();

            Sample fs = new Sample();

            for (int i = 0; i < 64; i++)
            {
                MultivariateSample ms = new MultivariateSample(d);
                for (int j = 0; j < 8; j++)
                {
                    double[] x = new double[d];
                    for (int k = 0; k < d; k++)
                    {
                        x[k] = n.GetRandomValue(rng);
                    }
                    ms.Add(x);
                }
                FitResult r = ms.LinearRegression(0);
                fs.Add(r.GoodnessOfFit.Statistic);
            }

            // conduct a KS test to check that F follows the expected distribution
            TestResult ks = fs.KolmogorovSmirnovTest(new FisherDistribution(3, 4));

            Assert.IsTrue(ks.LeftProbability < 0.95);
        }
Ejemplo n.º 2
0
        public void MultivariateLinearRegressionAgreement()
        {
            Random rng = new Random(1);

            MultivariateSample SA = new MultivariateSample(2);

            for (int i = 0; i < 10; i++)
            {
                SA.Add(rng.NextDouble(), rng.NextDouble());
            }
            FitResult       RA = SA.LinearRegression(0);
            ColumnVector    PA = RA.Parameters;
            SymmetricMatrix CA = RA.CovarianceMatrix;

            MultivariateSample SB = SA.Columns(1, 0);
            FitResult          RB = SB.LinearRegression(1);
            ColumnVector       PB = RB.Parameters;
            SymmetricMatrix    CB = RB.CovarianceMatrix;

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PA[0], PB[1])); Assert.IsTrue(TestUtilities.IsNearlyEqual(PA[1], PB[0]));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[0, 0], CB[1, 1])); Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[0, 1], CB[1, 0])); Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[1, 1], CB[0, 0]));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(RA.GoodnessOfFit.Statistic, RB.GoodnessOfFit.Statistic));

            BivariateSample SC = SA.TwoColumns(1, 0);
            FitResult       RC = SC.LinearRegression();
            ColumnVector    PC = RC.Parameters;
            SymmetricMatrix CC = RC.CovarianceMatrix;

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PA, PC));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(CA, CC));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(RA.GoodnessOfFit.Statistic, RC.GoodnessOfFit.Statistic));
        }
Ejemplo n.º 3
0
        public void NormalFitCovariances()
        {
            NormalDistribution N = new NormalDistribution(-1.0, 2.0);

            // Create a bivariate sample to hold our fitted best mu and sigma values
            // so we can determine their covariance as well as their means and variances
            BivariateSample    parameters  = new BivariateSample();
            MultivariateSample covariances = new MultivariateSample(3);

            // A bunch of times, create a normal sample
            for (int i = 0; i < 128; i++)
            {
                // We use small samples so the variation in mu and sigma will be more substantial.
                Sample s = TestUtilities.CreateSample(N, 8, i);

                // Fit each sample to a normal distribution
                FitResult fit = NormalDistribution.FitToSample(s);

                // and record the mu and sigma values from the fit into our bivariate sample
                parameters.Add(fit.Parameter(0).Value, fit.Parameter(1).Value);

                // also record the claimed covariances among these parameters
                covariances.Add(fit.Covariance(0, 0), fit.Covariance(1, 1), fit.Covariance(0, 1));
            }

            // the mean fit values should agree with the population distribution
            Assert.IsTrue(parameters.X.PopulationMean.ConfidenceInterval(0.95).ClosedContains(N.Mean));
            Assert.IsTrue(parameters.Y.PopulationMean.ConfidenceInterval(0.95).ClosedContains(N.StandardDeviation));

            // but also the covariances of those fit values should agree with the claimed covariances
            Assert.IsTrue(parameters.X.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(0).Mean));
            Assert.IsTrue(parameters.Y.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(1).Mean));
            Assert.IsTrue(parameters.PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(2).Mean));
        }
Ejemplo n.º 4
0
        public void Bug7213()
        {
            Sample s = new Sample();

            s.Add(0.00590056, 0.00654598, 0.0066506, 0.00679065, 0.008826);
            FitResult r = WeibullDistribution.FitToSample(s);
        }
Ejemplo n.º 5
0
        public void WaldFit()
        {
            WaldDistribution wald = new WaldDistribution(3.5, 2.5);

            BivariateSample    parameters = new BivariateSample();
            MultivariateSample variances  = new MultivariateSample(3);

            for (int i = 0; i < 128; i++)
            {
                Sample s = SampleTest.CreateSample(wald, 16, i);

                FitResult r = WaldDistribution.FitToSample(s);
                parameters.Add(r.Parameters[0], r.Parameters[1]);
                variances.Add(r.Covariance(0, 0), r.Covariance(1, 1), r.Covariance(0, 1));

                Assert.IsTrue(r.GoodnessOfFit.Probability > 0.01);
            }

            Assert.IsTrue(parameters.X.PopulationMean.ConfidenceInterval(0.99).ClosedContains(wald.Mean));
            Assert.IsTrue(parameters.Y.PopulationMean.ConfidenceInterval(0.99).ClosedContains(wald.Shape));

            Assert.IsTrue(parameters.X.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(0).Median));
            Assert.IsTrue(parameters.Y.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(1).Median));
            Assert.IsTrue(parameters.PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(2).Median));
        }
Ejemplo n.º 6
0
        public void GumbelFit()
        {
            GumbelDistribution d = new GumbelDistribution(-1.0, 2.0);

            MultivariateSample parameters = new MultivariateSample(2);
            MultivariateSample variances  = new MultivariateSample(3);

            // Do a bunch of fits, record reported parameters an variances
            for (int i = 0; i < 32; i++)
            {
                Sample s = SampleTest.CreateSample(d, 64, i);

                FitResult r = GumbelDistribution.FitToSample(s);
                parameters.Add(r.Parameters);
                variances.Add(r.Covariance(0, 0), r.Covariance(1, 1), r.Covariance(0, 1));

                Assert.IsTrue(r.GoodnessOfFit.Probability > 0.01);
            }

            // The reported parameters should agree with the underlying parameters
            Assert.IsTrue(parameters.Column(0).PopulationMean.ConfidenceInterval(0.99).ClosedContains(d.Location));
            Assert.IsTrue(parameters.Column(1).PopulationMean.ConfidenceInterval(0.99).ClosedContains(d.Scale));

            // The reported covariances should agree with the observed covariances
            Assert.IsTrue(parameters.Column(0).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(0).Mean));
            Assert.IsTrue(parameters.Column(1).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(1).Mean));
            Assert.IsTrue(parameters.TwoColumns(0, 1).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(variances.Column(2).Mean));
        }
Ejemplo n.º 7
0
        public void FitDataToProportionalityTest()
        {
            Interval r = Interval.FromEndpoints(0.0, 0.1);
            Func <double, double> fv = delegate(double x) {
                return(0.5 * x);
            };
            Func <double, double> fu = delegate(double x) {
                return(0.02);
            };
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 20);

            // fit to proportionality
            FitResult prop = set.FitToProportionality();

            Assert.IsTrue(prop.Dimension == 1);
            Assert.IsTrue(prop.Parameter(0).ConfidenceInterval(0.95).ClosedContains(0.5));
            Assert.IsTrue(prop.GoodnessOfFit.LeftProbability < 0.95);

            // fit to line
            FitResult line = set.FitToLine();

            Assert.IsTrue(line.Dimension == 2);

            // line's intercept should be compatible with zero and slope with proportionality constant
            Assert.IsTrue(line.Parameter(0).ConfidenceInterval(0.95).ClosedContains(0.0));
            Assert.IsTrue(line.Parameter(1).ConfidenceInterval(0.95).ClosedContains(prop.Parameter(0).Value));

            // the fit should be better, but not too much better
            Assert.IsTrue(line.GoodnessOfFit.Statistic < prop.GoodnessOfFit.Statistic);
        }
Ejemplo n.º 8
0
        public void FitDataToPolynomialChiSquaredTest()
        {
            // we want to make sure that the chi^2 values we are producing from polynomial fits are distributed as expected

            // create a sample to hold chi^2 values
            Sample chis = new Sample();

            // define a model
            Interval r = Interval.FromEndpoints(-5.0, 15.0);
            Func <double, double> fv = delegate(double x) {
                return(1.0 * x - 2.0 * x * x);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + 0.5 * Math.Sin(x));
            };

            // draw 50 data sets from the model and fit year
            // store the resulting chi^2 value in the chi^2 set
            for (int i = 0; i < 50; i++)
            {
                UncertainMeasurementSample xs = CreateDataSet(r, fv, fu, 10, i);
                FitResult fit = xs.FitToPolynomial(2);
                double    chi = fit.GoodnessOfFit.Statistic;
                chis.Add(chi);
            }

            // sanity check the sample
            Assert.IsTrue(chis.Count == 50);

            // test whether the chi^2 values are distributed as expected
            ContinuousDistribution chiDistribution = new ChiSquaredDistribution(7);
            TestResult             ks = chis.KolmogorovSmirnovTest(chiDistribution);

            Assert.IsTrue(ks.LeftProbability < 0.95);
        }
Ejemplo n.º 9
0
        public void FitDataToLinearFunctionTest()
        {
            // create a data set from a linear combination of sine and cosine
            Interval r = Interval.FromEndpoints(-4.0, 6.0);

            double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
            Func <double, double> fv = delegate(double x) {
                return(2.0 * Math.Cos(x) + 1.0 * Math.Sin(x));
            };
            Func <double, double> fu = delegate(double x) {
                return(0.1 + 0.1 * Math.Abs(x));
            };
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 20, 2);

            // fit the data set to a linear combination of sine and cosine
            Func <double, double>[] fs = new Func <double, double>[]
            { delegate(double x) { return(Math.Cos(x)); }, delegate(double x) { return(Math.Sin(x)); } };
            FitResult result = set.FitToLinearFunction(fs);

            // the fit should be right right dimension
            Assert.IsTrue(result.Dimension == 2);

            // the coefficients should match
            Console.WriteLine(result.Parameter(0));
            Console.WriteLine(result.Parameter(1));
            Assert.IsTrue(result.Parameter(0).ConfidenceInterval(0.95).ClosedContains(2.0));
            Assert.IsTrue(result.Parameter(1).ConfidenceInterval(0.95).ClosedContains(1.0));

            // diagonal covarainces should match errors
            Assert.IsTrue(TestUtilities.IsNearlyEqual(Math.Sqrt(result.Covariance(0, 0)), result.Parameter(0).Uncertainty));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(Math.Sqrt(result.Covariance(1, 1)), result.Parameter(1).Uncertainty));
        }
Ejemplo n.º 10
0
        public void ExponentialFitUncertainty()
        {
            // check that the uncertainty in reported fit parameters is actually meaningful
            // it should be the standard deviation of fit parameter values in a sample of many fits

            // define a population distribution
            ExponentialDistribution distribution = new ExponentialDistribution(4.0);

            // draw a lot of samples from it; fit each sample and
            // record the reported parameter value and error of each
            Sample values        = new Sample();
            Sample uncertainties = new Sample();

            for (int i = 0; i < 128; i++)
            {
                Sample         sample = SampleTest.CreateSample(distribution, 8, i);
                FitResult      fit    = ExponentialDistribution.FitToSample(sample);
                UncertainValue lambda = fit.Parameter(0);
                values.Add(lambda.Value);
                uncertainties.Add(lambda.Uncertainty);
            }

            // the reported values should agree with the source distribution
            Assert.IsTrue(values.PopulationMean.ConfidenceInterval(0.95).ClosedContains(distribution.Mean));

            // the reported errors should agree with the standard deviation of the reported parameters
            Assert.IsTrue(values.PopulationStandardDeviation.ConfidenceInterval(0.95).ClosedContains(uncertainties.Mean));
        }
Ejemplo n.º 11
0
        public void FitDataToLineUncertaintyTest()
        {
            double[] xs = TestUtilities.GenerateUniformRealValues(0.0, 10.0, 10);
            Func <double, double> fv = delegate(double x) {
                return(2.0 * x - 1.0);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + x);
            };

            MultivariateSample sample     = new MultivariateSample(2);
            SymmetricMatrix    covariance = new SymmetricMatrix(2);

            // create a bunch of small data sets
            for (int i = 0; i < 100; i++)
            {
                UncertainMeasurementSample data = CreateDataSet(xs, fv, fu, i);
                FitResult fit = data.FitToLine();

                sample.Add(fit.Parameters);
                covariance = fit.CovarianceMatrix;
                // because it depends only on the x's and sigmas, the covariance is always the same

                Console.WriteLine("cov_00 = {0}", covariance[0, 0]);
            }

            // the measured covariances should agree with the claimed covariances
            //Assert.IsTrue(sample.PopulationCovariance(0,0).ConfidenceInterval(0.95).ClosedContains(covariance[0,0]));
            //Assert.IsTrue(sample.PopulationCovariance(0,1).ConfidenceInterval(0.95).ClosedContains(covariance[0,1]));
            //Assert.IsTrue(sample.PopulationCovariance(1,0).ConfidenceInterval(0.95).ClosedContains(covariance[1,0]));
            //Assert.IsTrue(sample.PopulationCovariance(1,1).ConfidenceInterval(0.95).ClosedContains(covariance[1,1]));
        }
Ejemplo n.º 12
0
        public void TimeSeriesBadFit()
        {
            // Fit AR1 to MA1; the fit should be bad

            TimeSeries series = GenerateMA1TimeSeries(0.4, 0.3, 0.2, 1000);

            FitResult result = series.FitToAR1();

            Assert.IsTrue(result.GoodnessOfFit.Probability < 0.01);
        }
Ejemplo n.º 13
0
        public void TimeSeriesFitAR1()
        {
            double alpha = 0.3;
            double mu    = 0.2;
            double sigma = 0.4;
            int    n     = 20;

            // For our fit to AR(1), we have incorporated bias correction (at least
            // for the most important parameter alpha), so we can do a small-n test.

            MultivariateSample parameters  = new MultivariateSample(3);
            MultivariateSample covariances = new MultivariateSample(6);
            Sample             tests       = new Sample();

            for (int i = 0; i < 100; i++)
            {
                TimeSeries series = GenerateAR1TimeSeries(alpha, mu, sigma, n, i + 314159);

                FitResult result = series.FitToAR1();

                parameters.Add(result.Parameters);
                covariances.Add(
                    result.CovarianceMatrix[0, 0],
                    result.CovarianceMatrix[1, 1],
                    result.CovarianceMatrix[2, 2],
                    result.CovarianceMatrix[0, 1],
                    result.CovarianceMatrix[0, 2],
                    result.CovarianceMatrix[1, 2]
                    );

                tests.Add(result.GoodnessOfFit.Probability);
            }

            // Check that fit parameters agree with inputs
            Assert.IsTrue(parameters.Column(0).PopulationMean.ConfidenceInterval(0.99).ClosedContains(alpha));
            Assert.IsTrue(parameters.Column(1).PopulationMean.ConfidenceInterval(0.99).ClosedContains(mu));
            Assert.IsTrue(parameters.Column(2).PopulationMean.ConfidenceInterval(0.99).ClosedContains(sigma));

            // Check that reported variances agree with actual variances
            Assert.IsTrue(parameters.Column(0).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(0).Median));
            Assert.IsTrue(parameters.Column(1).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(1).Median));
            Assert.IsTrue(parameters.Column(2).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(2).Median));
            Assert.IsTrue(parameters.TwoColumns(0, 1).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(3).Mean));
            Assert.IsTrue(parameters.TwoColumns(0, 2).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(4).Mean));
            Assert.IsTrue(parameters.TwoColumns(1, 2).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(5).Mean));

            // For small n, the fitted alpha can vary considerably, and the formula for var(m) varies
            // quite strongly with alpha, so the computed var(m) have a very long tail. This pushes the
            // mean computed var(m) quite a bit higher than a typical value, so we use medians instead
            // of means for our best guess for the predicted variance.

            TestResult ks = tests.KolmogorovSmirnovTest(new UniformDistribution());

            Assert.IsTrue(ks.Probability > 0.05);
        }
Ejemplo n.º 14
0
        public void TestMultivariateRegression()
        {
            double cz = 1.0;
            double cx = 0.0;
            double cy = 0.0;

            Random       rng           = new Random(1001110000);
            Distribution xDistribution = new UniformDistribution(Interval.FromEndpoints(-4.0, 8.0));
            Distribution yDistribution = new UniformDistribution(Interval.FromEndpoints(-8.0, 4.0));
            Distribution eDistribution = new NormalDistribution();

            Sample r2Sample = new Sample();

            for (int i = 0; i < 500; i++)
            {
                MultivariateSample xyzSample = new MultivariateSample(3);
                for (int k = 0; k < 12; k++)
                {
                    double x = xDistribution.GetRandomValue(rng);
                    double y = yDistribution.GetRandomValue(rng);
                    double z = cx * x + cy * y + cz + eDistribution.GetRandomValue(rng);
                    xyzSample.Add(x, y, z);
                }
                FitResult fit = xyzSample.LinearRegression(2);
                double    fcx = fit.Parameters[0];
                double    fcy = fit.Parameters[1];
                double    fcz = fit.Parameters[2];

                double ss2 = 0.0;
                double ss1 = 0.0;
                foreach (double[] xyz in xyzSample)
                {
                    ss2 += MoreMath.Sqr(xyz[2] - (fcx * xyz[0] + fcy * xyz[1] + fcz));
                    ss1 += MoreMath.Sqr(xyz[2] - xyzSample.Column(2).Mean);
                }
                double r2 = 1.0 - ss2 / ss1;
                r2Sample.Add(r2);
            }

            Console.WriteLine("{0} {1} {2} {3} {4}", r2Sample.Count, r2Sample.PopulationMean, r2Sample.StandardDeviation, r2Sample.Minimum, r2Sample.Maximum);

            Distribution r2Distribution = new BetaDistribution((3 - 1) / 2.0, (12 - 3) / 2.0);

            //Distribution r2Distribution = new BetaDistribution((10 - 2) / 2.0, (2 - 1) / 2.0);
            Console.WriteLine("{0} {1}", r2Distribution.Mean, r2Distribution.StandardDeviation);

            TestResult ks = r2Sample.KolmogorovSmirnovTest(r2Distribution);

            Console.WriteLine(ks.RightProbability);
            Console.WriteLine(ks.Probability);
        }
Ejemplo n.º 15
0
        public void FitDataToPolynomialTest()
        {
            Interval r = Interval.FromEndpoints(-10.0, 10.0);

            double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
            Func <double, double> fv = delegate(double x) {
                double f = 0.0;
                for (int i = c.Length - 1; i >= 0; i--)
                {
                    f = f * x + c[i];
                }
                return(f);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + 0.5 * Math.Cos(x));
            };

            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 50);

            Assert.IsTrue(set.Count == 50);

            // fit to an appropriate polynomial
            FitResult poly = set.FitToPolynomial(5);

            // the coefficients should match
            for (int i = 0; i < poly.Dimension; i++)
            {
                Assert.IsTrue(poly.Parameter(i).ConfidenceInterval(0.95).ClosedContains(c[i]));
            }

            // the fit should be good
            Console.WriteLine(poly.GoodnessOfFit.LeftProbability);
            Assert.IsTrue(poly.GoodnessOfFit.LeftProbability < 0.95);

            // fit to a lower order polynomial
            FitResult low = set.FitToPolynomial(4);

            // the fit should be bad
            Assert.IsTrue(low.GoodnessOfFit.Statistic > poly.GoodnessOfFit.Statistic);
            Assert.IsTrue(low.GoodnessOfFit.LeftProbability > 0.95);

            // fit to a higher order polynomial
            FitResult high = set.FitToPolynomial(6);

            // the higher order coefficients should be compatible with zero
            Assert.IsTrue(high.Parameter(6).ConfidenceInterval(0.95).ClosedContains(0.0));

            // the fit should be better, but not too much better
            Assert.IsTrue(high.GoodnessOfFit.Statistic < poly.GoodnessOfFit.Statistic);
        }
Ejemplo n.º 16
0
        public void ExponentialFit()
        {
            ExponentialDistribution distribution = new ExponentialDistribution(5.0);
            Sample sample = SampleTest.CreateSample(distribution, 100);

            // fit to normal should be bad
            FitResult nfit = NormalDistribution.FitToSample(sample);

            Assert.IsTrue(nfit.GoodnessOfFit.Probability < 0.05);

            // fit to exponential should be good
            FitResult efit = ExponentialDistribution.FitToSample(sample);

            Assert.IsTrue(efit.GoodnessOfFit.Probability > 0.05);
            Assert.IsTrue(efit.Parameter(0).ConfidenceInterval(0.95).ClosedContains(distribution.Mean));
        }
Ejemplo n.º 17
0
        public void TestBivariateRegression()
        {
            double a0 = 1.0;
            double b0 = 0.0;

            Random       rng           = new Random(1001110000);
            Distribution xDistribution = new UniformDistribution(Interval.FromEndpoints(-2.0, 4.0));
            Distribution eDistribution = new NormalDistribution();

            Sample r2Sample = new Sample();

            for (int i = 0; i < 500; i++)
            {
                BivariateSample xySample = new BivariateSample();
                for (int k = 0; k < 10; k++)
                {
                    double x = xDistribution.GetRandomValue(rng);
                    double y = a0 + b0 * x + eDistribution.GetRandomValue(rng);
                    xySample.Add(x, y);
                }
                FitResult fit = xySample.LinearRegression();
                double    a   = fit.Parameters[0];
                double    b   = fit.Parameters[1];

                double ss2 = 0.0;
                double ss1 = 0.0;
                foreach (XY xy in xySample)
                {
                    ss2 += MoreMath.Sqr(xy.Y - (a + b * xy.X));
                    ss1 += MoreMath.Sqr(xy.Y - xySample.Y.Mean);
                }
                double r2 = 1.0 - ss2 / ss1;
                r2Sample.Add(r2);
            }

            Console.WriteLine("{0} {1} {2} {3} {4}", r2Sample.Count, r2Sample.PopulationMean, r2Sample.StandardDeviation, r2Sample.Minimum, r2Sample.Maximum);

            Distribution r2Distribution = new BetaDistribution((2 - 1) / 2.0, (10 - 2) / 2.0);

            //Distribution r2Distribution = new BetaDistribution((10 - 2) / 2.0, (2 - 1) / 2.0);
            Console.WriteLine("{0} {1}", r2Distribution.Mean, r2Distribution.StandardDeviation);

            TestResult ks = r2Sample.KolmogorovSmirnovTest(r2Distribution);

            Console.WriteLine(ks.RightProbability);
            Console.WriteLine(ks.Probability);
        }
Ejemplo n.º 18
0
        public void FitDataToFunctionTest()
        {
            // create a data set from a nonlinear function

            /*
             * Interval r = Interval.FromEndpoints(-3.0, 5.0);
             * double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
             * Function<double, double> fv = delegate(double x) {
             *  return (3.0 * Math.Cos(2.0 * Math.PI * x / 2.0 - 1.0));
             * };
             * Function<double, double> fu = delegate(double x) {
             *  return (0.1 + 0.1 * Math.Abs(x));
             * };
             * DataSet set = CreateDataSet(r, fv, fu, 20, 2);
             */

            UncertainMeasurementSample set = new UncertainMeasurementSample();

            set.Add(new UncertainMeasurement <double>(1.0, 1.0, 0.1));
            set.Add(new UncertainMeasurement <double>(2.0, 0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(3.0, 0.0, 0.1));
            set.Add(new UncertainMeasurement <double>(4.0, -0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(5.0, -1.0, 0.1));
            set.Add(new UncertainMeasurement <double>(6.0, -0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(7.0, 0.0, 0.1));
            set.Add(new UncertainMeasurement <double>(8.0, 0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(9.0, 1.0, 0.1));

            // fit it to a parameterized fit function

            /*
             * Function<double[], double, double> ff = delegate(double[] p, double x) {
             *  return (p[0] * Math.Cos(2.0 * Math.PI / p[1] + p[2]));
             * };
             */
            Func <double[], double, double> ff = delegate(double[] p, double x) {
                //Console.WriteLine("    p[0]={0}, x={1}", p[0], x);
                return(p[1] * Math.Cos(x / p[0] + p[2]));
                //return (x / p[0]);
            };
            FitResult fit = set.FitToFunction(ff, new double[] { 1.3, 1.1, 0.1 });

            Console.WriteLine(fit.Parameter(0));
            Console.WriteLine(fit.Parameter(1));
            Console.WriteLine(fit.Parameter(2));
        }
Ejemplo n.º 19
0
 private void RefreshDiagnostics(DetectionType detectionType, FitResult fit, FitResult fitX, FitResult fitY, MoveResult flags)
 {
     if (detectionType == DetectionType.Collision)
     {
         Diagnostic.Write("fit ", fit);
         Diagnostic.Write("fitX", fitX);
         Diagnostic.Write("fitY", fitY);
         Diagnostic.Write("rslt", flags);
     }
     else if (detectionType == DetectionType.Retrace)
     {
         Diagnostic.Write("rFt ", fit);
         Diagnostic.Write("rFtX", fitX);
         Diagnostic.Write("rFtY", fitY);
         Diagnostic.Write("RSLT", flags);
     }
 }
Ejemplo n.º 20
0
        public void TimeSeriesFitToMA1()
        {
            double beta  = -0.2;
            double mu    = 0.4;
            double sigma = 0.6;
            int    n     = 100;

            // If we are going to strictly test parameter values and variances,
            // we can't pick n too small, because the formulas we use are only
            // asymptotically unbiased.

            MultivariateSample parameters  = new MultivariateSample(3);
            MultivariateSample covariances = new MultivariateSample(6);

            for (int i = 0; i < 100; i++)
            {
                TimeSeries series = GenerateMA1TimeSeries(beta, mu, sigma, n, i + 314159);

                Debug.Assert(series.Count == n);

                FitResult result = series.FitToMA1();

                Assert.IsTrue(result.Dimension == 3);

                parameters.Add(result.Parameters);
                covariances.Add(
                    result.CovarianceMatrix[0, 0],
                    result.CovarianceMatrix[1, 1],
                    result.CovarianceMatrix[2, 2],
                    result.CovarianceMatrix[0, 1],
                    result.CovarianceMatrix[0, 2],
                    result.CovarianceMatrix[1, 2]
                    );
            }

            Assert.IsTrue(parameters.Column(0).PopulationMean.ConfidenceInterval(0.99).ClosedContains(beta));;
            Assert.IsTrue(parameters.Column(1).PopulationMean.ConfidenceInterval(0.99).ClosedContains(mu));
            Assert.IsTrue(parameters.Column(2).PopulationMean.ConfidenceInterval(0.99).ClosedContains(sigma));

            Assert.IsTrue(parameters.Column(0).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(0).Mean));
            Assert.IsTrue(parameters.Column(1).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(1).Mean));
            Assert.IsTrue(parameters.Column(2).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(2).Mean));
            Assert.IsTrue(parameters.TwoColumns(0, 1).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(3).Mean));
            Assert.IsTrue(parameters.TwoColumns(0, 2).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(4).Mean));
            Assert.IsTrue(parameters.TwoColumns(1, 2).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(5).Mean));
        }
Ejemplo n.º 21
0
        public void BivariateLinearPolynomialRegressionAgreement()
        {
            // A degree-1 polynomial fit should give the same answer as a linear fit

            BivariateSample B = new BivariateSample();

            B.Add(0.0, 5.0);
            B.Add(3.0, 6.0);
            B.Add(1.0, 7.0);
            B.Add(4.0, 8.0);
            B.Add(2.0, 9.0);
            FitResult PR = B.PolynomialRegression(1);
            FitResult LR = B.LinearRegression();

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters, LR.Parameters));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.CovarianceMatrix, LR.CovarianceMatrix));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.GoodnessOfFit.Statistic, LR.GoodnessOfFit.Statistic));
        }
Ejemplo n.º 22
0
        public void NormalFit()
        {
            // pick mu >> sigma so that we get no negative values;
            // otherwise the attempt to fit to an exponential will fail
            ContinuousDistribution distribution = new NormalDistribution(6.0, 2.0);
            Sample sample = SampleTest.CreateSample(distribution, 100);

            // fit to normal should be good
            FitResult nfit = NormalDistribution.FitToSample(sample);

            Assert.IsTrue(nfit.GoodnessOfFit.Probability > 0.05);
            Assert.IsTrue(nfit.Parameter(0).ConfidenceInterval(0.95).ClosedContains(distribution.Mean));
            Assert.IsTrue(nfit.Parameter(1).ConfidenceInterval(0.95).ClosedContains(distribution.StandardDeviation));

            // fit to exponential should be bad
            FitResult efit = ExponentialDistribution.FitToSample(sample);

            Assert.IsTrue(efit.GoodnessOfFit.Probability < 0.05);
        }
Ejemplo n.º 23
0
        public void BivariateNonlinearFit()
        {
            // Verify that we can fit a non-linear function,
            // that the estimated parameters do cluster around the true values,
            // and that the estimated parameter covariances do reflect the actually observed covariances

            double a = 2.7;
            double b = 3.1;

            ContinuousDistribution xDistribution = new ExponentialDistribution(2.0);
            ContinuousDistribution eDistribution = new NormalDistribution(0.0, 4.0);

            MultivariateSample parameters  = new MultivariateSample("a", "b");
            MultivariateSample covariances = new MultivariateSample(3);

            for (int i = 0; i < 64; i++)
            {
                BivariateSample sample = new BivariateSample();
                Random          rng    = new Random(i);
                for (int j = 0; j < 8; j++)
                {
                    double x = xDistribution.GetRandomValue(rng);
                    double y = a * Math.Pow(x, b) + eDistribution.GetRandomValue(rng);
                    sample.Add(x, y);
                }

                FitResult fit = sample.NonlinearRegression(
                    (IList <double> p, double x) => p[0] * Math.Pow(x, p[1]),
                    new double[] { 1.0, 1.0 }
                    );

                parameters.Add(fit.Parameters);
                covariances.Add(fit.Covariance(0, 0), fit.Covariance(1, 1), fit.Covariance(0, 1));
            }

            Assert.IsTrue(parameters.Column(0).PopulationMean.ConfidenceInterval(0.99).ClosedContains(a));
            Assert.IsTrue(parameters.Column(1).PopulationMean.ConfidenceInterval(0.99).ClosedContains(b));

            Assert.IsTrue(parameters.Column(0).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(0).Mean));
            Assert.IsTrue(parameters.Column(1).PopulationVariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(1).Mean));
            Assert.IsTrue(parameters.TwoColumns(0, 1).PopulationCovariance.ConfidenceInterval(0.99).ClosedContains(covariances.Column(2).Mean));
        }
Ejemplo n.º 24
0
        public void Bug7953()
        {
            // Fitting this sample to a Weibull caused a NonconvergenceException in the root finder that was used inside the fit method.
            // The underlying problem was that our equation to solve involved x^k and k ~ 2000 and (~12)^(~2000) overflows double
            // so all the quantities became Infinity and the root-finder never converged. We changed the algorithm to operate on
            // w = log x - <log x> which keeps quantities much smaller.

            Sample sample = new Sample(
                12.824, 12.855, 12.861, 12.862, 12.863,
                12.864, 12.865, 12.866, 12.866, 12.866,
                12.867, 12.867, 12.868, 12.868, 12.870,
                12.871, 12.871, 12.871, 12.871, 12.872,
                12.876, 12.878, 12.879, 12.879, 12.881
                );

            FitResult result = WeibullDistribution.FitToSample(sample);

            Console.WriteLine("{0} {1}", result.Parameter(0), result.Parameter(1));
            Console.WriteLine(result.GoodnessOfFit.RightProbability);
        }
Ejemplo n.º 25
0
        public void FitDataToLineTest()
        {
            Interval r = Interval.FromEndpoints(0.0, 10.0);
            Func <double, double> fv = delegate(double x) {
                return(2.0 * x - 1.0);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + x);
            };
            UncertainMeasurementSample data = CreateDataSet(r, fv, fu, 20);


            // sanity check the data set
            Assert.IsTrue(data.Count == 20);

            // fit to a line
            FitResult line = data.FitToLine();

            Assert.IsTrue(line.Dimension == 2);
            Assert.IsTrue(line.Parameter(0).ConfidenceInterval(0.95).ClosedContains(-1.0));
            Assert.IsTrue(line.Parameter(1).ConfidenceInterval(0.95).ClosedContains(2.0));
            Assert.IsTrue(line.GoodnessOfFit.LeftProbability < 0.95);

            // correlation coefficient should be related to covariance as expected
            Assert.IsTrue(TestUtilities.IsNearlyEqual(line.CorrelationCoefficient(0, 1), line.Covariance(0, 1) / line.Parameter(0).Uncertainty / line.Parameter(1).Uncertainty));

            // fit to a 1st order polynomial and make sure it agrees
            FitResult poly = data.FitToPolynomial(1);

            Assert.IsTrue(poly.Dimension == 2);
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.Parameters, line.Parameters));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.CovarianceMatrix, line.CovarianceMatrix));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.Statistic, line.GoodnessOfFit.Statistic));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.LeftProbability, line.GoodnessOfFit.LeftProbability));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.RightProbability, line.GoodnessOfFit.RightProbability));

            // fit to a constant; the result should be poor
            FitResult constant = data.FitToConstant();

            Assert.IsTrue(constant.GoodnessOfFit.LeftProbability > 0.95);
        }
Ejemplo n.º 26
0
        public void Bug6162()
        {
            // When UncertianMeasurementSample.FitToPolynomial used Cholesky inversion of (A^T A), this inversion
            // would fail when roundoff errors would made the matrix non-positive-definite. We have now changed
            // to QR decomposition, which is more robust.

            //real data
            double[] X_axis = new double[] { 40270.65625, 40270.6569444444, 40270.6576388888, 40270.6583333332, 40270.6590277776,
                                             40270.659722222, 40270.6604166669, 40270.6611111113, 40270.6618055557, 40270.6625000001 };

            double[] Y_axis = new double[] { 246.824996948242, 246.850006103516, 245.875, 246.225006103516, 246.975006103516,
                                             247.024993896484, 246.949996948242, 246.875, 247.5, 247.100006103516 };

            UncertainMeasurementSample DataSet = new UncertainMeasurementSample();

            for (int i = 0; i < 10; i++)
            {
                DataSet.Add(X_axis[i], Y_axis[i], 1);
            }
            //for (int i = 0; i < 10; i++) DataSet.Add(X_axis[i] - 40270.0, Y_axis[i] - 247.0, 1);

            FitResult DataFit = DataSet.FitToPolynomial(3);

            for (int i = 0; i < DataFit.Dimension; i++)
            {
                Console.WriteLine("a" + i.ToString() + " = " + DataFit.Parameter(i).Value);
            }

            BivariateSample bs = new BivariateSample();

            for (int i = 0; i < 10; i++)
            {
                bs.Add(X_axis[i], Y_axis[i]);
            }
            FitResult bsFit = bs.PolynomialRegression(3);

            for (int i = 0; i < bsFit.Dimension; i++)
            {
                Console.WriteLine(bsFit.Parameter(i));
            }
        }
Ejemplo n.º 27
0
        public void FitToFunctionPolynomialCompatibilityTest()
        {
            // specify a cubic function
            Interval r = Interval.FromEndpoints(-10.0, 10.0);
            Func <double, double> fv = delegate(double x) {
                return(0.0 - 1.0 * x + 2.0 * x * x - 3.0 * x * x * x);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + 0.5 * Math.Cos(x));
            };

            // create a data set from it
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 60);

            // fit it to a cubic polynomial
            FitResult pFit = set.FitToPolynomial(3);

            // fit it to a cubic polynomaial
            Func <double[], double, double> ff = delegate(double[] p, double x) {
                return(p[0] + p[1] * x + p[2] * x * x + p[3] * x * x * x);
            };
            FitResult fFit = set.FitToFunction(ff, new double[] { 0, 0, 0, 0 });

            // the fits should agree
            Console.WriteLine("{0} ?= {1}", pFit.GoodnessOfFit.Statistic, fFit.GoodnessOfFit.Statistic);
            for (int i = 0; i < pFit.Dimension; i++)
            {
                Console.WriteLine("{0} ?= {1}", pFit.Parameter(i), fFit.Parameter(i));
                Assert.IsTrue(pFit.Parameter(i).ConfidenceInterval(0.01).ClosedContains(fFit.Parameter(i).Value));
            }
            // dimension
            Assert.IsTrue(pFit.Dimension == fFit.Dimension);
            // chi squared
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.GoodnessOfFit.Statistic, fFit.GoodnessOfFit.Statistic, Math.Sqrt(TestUtilities.TargetPrecision)));
            // don't demand super-high precision agreement of parameters and covariance matrix
            // parameters
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.Parameters, fFit.Parameters, Math.Pow(TestUtilities.TargetPrecision, 0.3)));
            // covariance
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.CovarianceMatrix, fFit.CovarianceMatrix, Math.Pow(TestUtilities.TargetPrecision, 0.3)));
        }
Ejemplo n.º 28
0
        public void MultivariateLinearRegressionTest()
        {
            // define model y = a + b0 * x0 + b1 * x1 + noise
            double       a     = 1.0;
            double       b0    = -2.0;
            double       b1    = 3.0;
            Distribution noise = new NormalDistribution(0.0, 10.0);

            // draw a sample from the model
            Random             rng    = new Random(1);
            MultivariateSample sample = new MultivariateSample(3);

            for (int i = 0; i < 100; i++)
            {
                double x0  = -10.0 + 20.0 * rng.NextDouble();
                double x1  = -10.0 + 20.0 * rng.NextDouble();
                double eps = noise.InverseLeftProbability(rng.NextDouble());
                double y   = a + b0 * x0 + b1 * x1 + eps;
                sample.Add(x0, x1, y);
            }

            // do a linear regression fit on the model
            FitResult result = sample.LinearRegression(2);

            // the result should have the appropriate dimension
            Assert.IsTrue(result.Dimension == 3);

            // the result should be significant
            Console.WriteLine("{0} {1}", result.GoodnessOfFit.Statistic, result.GoodnessOfFit.LeftProbability);
            Assert.IsTrue(result.GoodnessOfFit.LeftProbability > 0.95);

            // the parameters should match the model
            Console.WriteLine(result.Parameter(0));
            Assert.IsTrue(result.Parameter(0).ConfidenceInterval(0.90).ClosedContains(b0));
            Console.WriteLine(result.Parameter(1));
            Assert.IsTrue(result.Parameter(1).ConfidenceInterval(0.90).ClosedContains(b1));
            Console.WriteLine(result.Parameter(2));
            Assert.IsTrue(result.Parameter(2).ConfidenceInterval(0.90).ClosedContains(a));
        }
Ejemplo n.º 29
0
        public void RayleighFit()
        {
            RayleighDistribution rayleigh = new RayleighDistribution(3.2);

            Sample parameter = new Sample();
            Sample variance  = new Sample();

            for (int i = 0; i < 128; i++)
            {
                // We pick a quite-small sample, because we have a finite-n unbiased estimator.
                Sample s = SampleTest.CreateSample(rayleigh, 8, i);

                FitResult r = RayleighDistribution.FitToSample(s);
                parameter.Add(r.Parameters[0]);
                variance.Add(r.Covariance(0, 0));

                Assert.IsTrue(r.GoodnessOfFit.Probability > 0.01);
            }

            Assert.IsTrue(parameter.PopulationMean.ConfidenceInterval(0.99).ClosedContains(rayleigh.Scale));
            Assert.IsTrue(parameter.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(variance.Median));
        }
Ejemplo n.º 30
0
        private MoveResult AdjustFlags(Vector2 interpolation, FitResult fitX, FitResult fitY)
        {
            MoveResult adjustments = MoveResult.None;

            if (fitX == FitResult.Ok)
            {
                adjustments |= MoveResult.X;
            }
            else
            {
                if (interpolation.X > 0)
                {
                    adjustments |= MoveResult.BlockedOnPositiveX;
                }
                else if (interpolation.Y < 0)
                {
                    adjustments |= MoveResult.BlockedOnNegativeX;
                }
            }

            if (fitY == FitResult.Ok)
            {
                adjustments |= MoveResult.Y;
            }
            else
            {
                if (interpolation.Y > 0)
                {
                    adjustments |= MoveResult.BlockedOnPositiveY;
                }
                else if (interpolation.Y < 0)
                {
                    adjustments |= MoveResult.BlockedOnNegativeY;
                }
            }

            return(adjustments);
        }
Ejemplo n.º 31
0
 public ExperimentLog()
 {
     Setting = new MCASetting();
     MotorPosition = 0;
     MCAData = new MCAData();
     Temp1Start = 0;
     Temp2Start = 0;
     Temp1Stop = 0;
     Temp2Stop = 0;
     RunTime = 0;
     Channel0InitialPeakGuess = new InitialPeakGuess();
     Channel0FitResult = new FitResult();
     Channel1InitialPeakGuess = new InitialPeakGuess();
     Channel1FitResult = new FitResult();
 }
        public override FitResult GetFitResult()
        {
            FitResult result = new FitResult();
            result.BendMin = BendMin;
            result.BendMax = BendMax;
            result.FitMode = Common.Enums.FitModeEnum.ProtrusiveOrBendStrengthEvaluation;

            return result;
        }
Ejemplo n.º 33
0
        public override FitResult GetFitResult()
        {
            FitResult result = new FitResult();
            result.RotateMinAngle = RotateMinAngle;
            result.RotateMaxAngle = RotateMaxAngle;
            result.FitMode = Common.Enums.FitModeEnum.RotationFit;

            return result;
        }
Ejemplo n.º 34
0
        public override FitResult GetFitResult()
        {
            FitResult result = new FitResult();
            result.BendMinAngle = BendMinAngle;
            result.BendMaxAngle = BendMaxAngle;
            result.FitMode = Common.Enums.FitModeEnum.ProtrusiveOrBendFit;

            return result;
        }
Ejemplo n.º 35
0
        /// <summary>
        /// Computes the Beta distribution that best fits the given sample.
        /// </summary>
        /// <param name="sample">The sample to fit.</param>
        /// <returns>The best fit parameters.</returns>
        /// <remarks>
        /// <para>The returned fit parameters are the &#x3B1; (<see cref="Alpha"/>) and  &#x3B2; (<see cref="Beta"/>) parameters, in that order.
        /// These are the same parameters, in the same order, that are required by the <see cref="BetaDistribution(double,double)"/> constructor to
        /// specify a new Beta distribution.</para>
        /// </remarks>
        /// <exception cref="ArgumentNullException"><paramref name="sample"/> is null.</exception>
        /// <exception cref="InsufficientDataException"><paramref name="sample"/> contains fewer than three values.</exception>
        /// <exception cref="InvalidOperationException">Not all the entries in <paramref name="sample" /> lie between zero and one.</exception>
        public static FitResult FitToSample(Sample sample)
        {
            if (sample == null) throw new ArgumentNullException("sample");
            if (sample.Count < 3) throw new InsufficientDataException();

            // maximum likelyhood calculation
            //   \log L = \sum_i \left[ (\alpha-1) \log x_i + (\beta-1) \log (1-x_i) - \log B(\alpha,\beta) \right]
            // using \frac{\partial B(a,b)}{\partial a} = \psi(a) - \psi(a+b), we have
            //   \frac{\partial \log L}{\partial \alpha} = \sum_i \log x_i -     N \left[ \psi(\alpha) - \psi(\alpha+\beta) \right]
            //   \frac{\partial \log L}{\partial \beta}  = \sum_i \log (1-x_i) - N \left[ \psi(\beta)  - \psi(\alpha+\beta) \right]
            // set equal to zero to get equations for \alpha, \beta
            //   \psi(\alpha) - \psi(\alpha+\beta) = <\log x>
            //   \psi(\beta) - \psi(\alpha+\beta) = <\log (1-x)>

            // compute the mean log of x and (1-x)
            // these are the (logs of) the geometric means
            double ga = 0.0; double gb = 0.0;
            foreach (double value in sample) {
                if ((value <= 0.0) || (value >= 1.0)) throw new InvalidOperationException();
                ga += Math.Log(value); gb += Math.Log(1.0 - value);
            }
            ga /= sample.Count; gb /= sample.Count;

            // define the function to zero
            Func<IList<double>, IList<double>> f = delegate(IList<double> x) {
                double pab = AdvancedMath.Psi(x[0] + x[1]);
                return (new double[] {
                    AdvancedMath.Psi(x[0]) - pab - ga,
                    AdvancedMath.Psi(x[1]) - pab - gb
                });
            };

            // guess initial values using the method of moments
            //   M1 = \frac{\alpha}{\alpha+\beta} C2 = \frac{\alpha\beta}{(\alpha+\beta)^2 (\alpha+\beta+1)}
            // implies
            //   \alpha = M1 \left( \frac{M1 (1-M1)}{C2} - 1 \right)
            //   \beta = (1 - M1) \left( \frac{M1 (1-M1)}{C2} -1 \right)
            double m = sample.Mean; double mm = 1.0 - m;
            double q = m * mm / sample.Variance - 1.0;
            double[] x0 = new double[] { m * q, mm * q };

            // find the parameter values that zero the two equations
            IList<double> x1 = MultiFunctionMath.FindZero(f, x0);
            double a = x1[0]; double b = x1[1];

            // take more derivatives of \log L to get curvature matrix
            //   \frac{\partial^2 \log L}{\partial\alpha^2} = - N \left[ \psi'(\alpha) - \psi'(\alpha+\beta) \right]
            //   \frac{\partial^2 \log L}{\partial\beta^2}  = - N \left[ \psi'(\beta)  - \psi'(\alpha+\beta) \right]
            //   \frac{\partial^2 \log L}{\partial \alpha \partial \beta} = - N \psi'(\alpha+\beta)
            // covariance matrix is inverse of curvature matrix
            SymmetricMatrix CI = new SymmetricMatrix(2);
            CI[0, 0] = sample.Count * (AdvancedMath.Psi(1, a) - AdvancedMath.Psi(1, a + b));
            CI[1, 1] = sample.Count * (AdvancedMath.Psi(1, b) - AdvancedMath.Psi(1, a + b));
            CI[0, 1] = sample.Count * AdvancedMath.Psi(1, a + b);
            CholeskyDecomposition CD = CI.CholeskyDecomposition();
            SymmetricMatrix C = CD.Inverse();

            // do a KS test on the result
            TestResult test = sample.KolmogorovSmirnovTest(new BetaDistribution(a, b));

            // return the results
            FitResult result = new FitResult(x1, C, test);
            return (result);
        }
Ejemplo n.º 36
0
        private MoveResult AdjustFlags(Vector2 interpolation, FitResult fitX, FitResult fitY)
        {
            MoveResult adjustments = MoveResult.None;

            if (fitX == FitResult.Ok)
            {
                adjustments |= MoveResult.X;
            }
            else
            {
                if (interpolation.X > 0)
                {
                    adjustments |= MoveResult.BlockedOnPositiveX;
                }
                else if (interpolation.Y < 0)
                {
                    adjustments |= MoveResult.BlockedOnNegativeX;
                }
            }

            if (fitY == FitResult.Ok)
            {
                adjustments |= MoveResult.Y;
            }
            else
            {
                if (interpolation.Y > 0)
                {
                    adjustments |= MoveResult.BlockedOnPositiveY;
                }
                else if (interpolation.Y < 0)
                {
                    adjustments |= MoveResult.BlockedOnNegativeY;
                }
            }

            return adjustments;
        }
Ejemplo n.º 37
0
 private void RefreshDiagnostics(DetectionType detectionType, FitResult fit, FitResult fitX, FitResult fitY, MoveResult flags)
 {
     if (detectionType == DetectionType.Collision)
     {
         Diagnostic.Write("fit ", fit);
         Diagnostic.Write("fitX", fitX);
         Diagnostic.Write("fitY", fitY);
         Diagnostic.Write("rslt", flags);
     }
     else if (detectionType == DetectionType.Retrace)
     {
         Diagnostic.Write("rFt ", fit);
         Diagnostic.Write("rFtX", fitX);
         Diagnostic.Write("rFtY", fitY);
         Diagnostic.Write("RSLT", flags);
     }
 }
        public override FitResult GetFitResult()
        {
            FitResult result = new FitResult();
            result.RotateMin = RotateMin;
            result.RotateMax = RotateMax;
            result.FitMode = Common.Enums.FitModeEnum.RotationStrengthEvaluation;

            return result;
        }
Ejemplo n.º 39
0
            public DataPoint[] FittedDataPoint()
            {
                if (edges.Length == 0) { return new DataPoint[0];}
                FitResult result = new FitResult(minuit);

                DataPoint[] ret = new DataPoint[edges.Length - 1];

                for (int i = 0; i < this.edges.Length - 1; i++)
                {
                    double y = Integral.Integrate1D(pdf.Compute, edges[i], edges[i + 1], 5, result.PartialArg());
                    double x = (this.edges[i]+this.edges[i+1])/2.0;
                    ret[i] = new DataPoint(x, y);
                }
                return ret;
            }
Ejemplo n.º 40
0
 /// <summary>
 /// 
 /// </summary>
 /// <returns></returns>
 public double[] FittedValue()
 {
     FitResult result = new FitResult(minuit);
     double[] ret = new double[edges.Length - 1];
     for (int i = 0; i < this.edges.Length - 1; i++)
     {
         ret[i] = Integral.Integrate1D(pdf.Compute, edges[i], edges[i + 1], 5, result.PartialArg());
     }
     return ret;
 }