public void FitDataToLinearFunctionTest()
        {
            // create a data set from a linear combination of sine and cosine
            Interval r = Interval.FromEndpoints(-4.0, 6.0);

            double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
            Func <double, double> fv = delegate(double x) {
                return(2.0 * Math.Cos(x) + 1.0 * Math.Sin(x));
            };
            Func <double, double> fu = delegate(double x) {
                return(0.1 + 0.1 * Math.Abs(x));
            };
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 20, 2);

            // fit the data set to a linear combination of sine and cosine
            Func <double, double>[] fs = new Func <double, double>[]
            { delegate(double x) { return(Math.Cos(x)); }, delegate(double x) { return(Math.Sin(x)); } };
            FitResult result = set.FitToLinearFunction(fs);

            // the fit should be right right dimension
            Assert.IsTrue(result.Dimension == 2);

            // the coefficients should match
            Console.WriteLine(result.Parameter(0));
            Console.WriteLine(result.Parameter(1));
            Assert.IsTrue(result.Parameter(0).ConfidenceInterval(0.95).ClosedContains(2.0));
            Assert.IsTrue(result.Parameter(1).ConfidenceInterval(0.95).ClosedContains(1.0));

            // diagonal covarainces should match errors
            Assert.IsTrue(TestUtilities.IsNearlyEqual(Math.Sqrt(result.Covariance(0, 0)), result.Parameter(0).Uncertainty));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(Math.Sqrt(result.Covariance(1, 1)), result.Parameter(1).Uncertainty));
        }
        public void NormalFitCovariances()
        {
            NormalDistribution N = new NormalDistribution(-1.0, 2.0);

            // Create a bivariate sample to hold our fitted best mu and sigma values
            // so we can determine their covariance as well as their means and variances
            BivariateSample    parameters  = new BivariateSample();
            MultivariateSample covariances = new MultivariateSample(3);

            // A bunch of times, create a normal sample
            for (int i = 0; i < 128; i++)
            {
                // We use small samples so the variation in mu and sigma will be more substantial.
                Sample s = TestUtilities.CreateSample(N, 8, i);

                // Fit each sample to a normal distribution
                FitResult fit = NormalDistribution.FitToSample(s);

                // and record the mu and sigma values from the fit into our bivariate sample
                parameters.Add(fit.Parameter(0).Value, fit.Parameter(1).Value);

                // also record the claimed covariances among these parameters
                covariances.Add(fit.Covariance(0, 0), fit.Covariance(1, 1), fit.Covariance(0, 1));
            }

            // the mean fit values should agree with the population distribution
            Assert.IsTrue(parameters.X.PopulationMean.ConfidenceInterval(0.95).ClosedContains(N.Mean));
            Assert.IsTrue(parameters.Y.PopulationMean.ConfidenceInterval(0.95).ClosedContains(N.StandardDeviation));

            // but also the covariances of those fit values should agree with the claimed covariances
            Assert.IsTrue(parameters.X.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(0).Mean));
            Assert.IsTrue(parameters.Y.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(1).Mean));
            Assert.IsTrue(parameters.PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(covariances.Column(2).Mean));
        }
        public void FitDataToProportionalityTest()
        {
            Interval r = Interval.FromEndpoints(0.0, 0.1);
            Func <double, double> fv = delegate(double x) {
                return(0.5 * x);
            };
            Func <double, double> fu = delegate(double x) {
                return(0.02);
            };
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 20);

            // fit to proportionality
            FitResult prop = set.FitToProportionality();

            Assert.IsTrue(prop.Dimension == 1);
            Assert.IsTrue(prop.Parameter(0).ConfidenceInterval(0.95).ClosedContains(0.5));
            Assert.IsTrue(prop.GoodnessOfFit.LeftProbability < 0.95);

            // fit to line
            FitResult line = set.FitToLine();

            Assert.IsTrue(line.Dimension == 2);

            // line's intercept should be compatible with zero and slope with proportionality constant
            Assert.IsTrue(line.Parameter(0).ConfidenceInterval(0.95).ClosedContains(0.0));
            Assert.IsTrue(line.Parameter(1).ConfidenceInterval(0.95).ClosedContains(prop.Parameter(0).Value));

            // the fit should be better, but not too much better
            Assert.IsTrue(line.GoodnessOfFit.Statistic < prop.GoodnessOfFit.Statistic);
        }
        public void ExponentialFitUncertainty()
        {
            // check that the uncertainty in reported fit parameters is actually meaningful
            // it should be the standard deviation of fit parameter values in a sample of many fits

            // define a population distribution
            ExponentialDistribution distribution = new ExponentialDistribution(4.0);

            // draw a lot of samples from it; fit each sample and
            // record the reported parameter value and error of each
            Sample values        = new Sample();
            Sample uncertainties = new Sample();

            for (int i = 0; i < 128; i++)
            {
                Sample         sample = SampleTest.CreateSample(distribution, 8, i);
                FitResult      fit    = ExponentialDistribution.FitToSample(sample);
                UncertainValue lambda = fit.Parameter(0);
                values.Add(lambda.Value);
                uncertainties.Add(lambda.Uncertainty);
            }

            // the reported values should agree with the source distribution
            Assert.IsTrue(values.PopulationMean.ConfidenceInterval(0.95).ClosedContains(distribution.Mean));

            // the reported errors should agree with the standard deviation of the reported parameters
            Assert.IsTrue(values.PopulationStandardDeviation.ConfidenceInterval(0.95).ClosedContains(uncertainties.Mean));
        }
        public void FitDataToFunctionTest()
        {
            // create a data set from a nonlinear function

            /*
             * Interval r = Interval.FromEndpoints(-3.0, 5.0);
             * double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
             * Function<double, double> fv = delegate(double x) {
             *  return (3.0 * Math.Cos(2.0 * Math.PI * x / 2.0 - 1.0));
             * };
             * Function<double, double> fu = delegate(double x) {
             *  return (0.1 + 0.1 * Math.Abs(x));
             * };
             * DataSet set = CreateDataSet(r, fv, fu, 20, 2);
             */

            UncertainMeasurementSample set = new UncertainMeasurementSample();

            set.Add(new UncertainMeasurement <double>(1.0, 1.0, 0.1));
            set.Add(new UncertainMeasurement <double>(2.0, 0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(3.0, 0.0, 0.1));
            set.Add(new UncertainMeasurement <double>(4.0, -0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(5.0, -1.0, 0.1));
            set.Add(new UncertainMeasurement <double>(6.0, -0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(7.0, 0.0, 0.1));
            set.Add(new UncertainMeasurement <double>(8.0, 0.7, 0.1));
            set.Add(new UncertainMeasurement <double>(9.0, 1.0, 0.1));

            // fit it to a parameterized fit function

            /*
             * Function<double[], double, double> ff = delegate(double[] p, double x) {
             *  return (p[0] * Math.Cos(2.0 * Math.PI / p[1] + p[2]));
             * };
             */
            Func <double[], double, double> ff = delegate(double[] p, double x) {
                //Console.WriteLine("    p[0]={0}, x={1}", p[0], x);
                return(p[1] * Math.Cos(x / p[0] + p[2]));
                //return (x / p[0]);
            };
            FitResult fit = set.FitToFunction(ff, new double[] { 1.3, 1.1, 0.1 });

            Console.WriteLine(fit.Parameter(0));
            Console.WriteLine(fit.Parameter(1));
            Console.WriteLine(fit.Parameter(2));
        }
        public void NormalFit()
        {
            // pick mu >> sigma so that we get no negative values;
            // otherwise the attempt to fit to an exponential will fail
            ContinuousDistribution distribution = new NormalDistribution(6.0, 2.0);
            Sample sample = SampleTest.CreateSample(distribution, 100);

            // fit to normal should be good
            FitResult nfit = NormalDistribution.FitToSample(sample);

            Assert.IsTrue(nfit.GoodnessOfFit.Probability > 0.05);
            Assert.IsTrue(nfit.Parameter(0).ConfidenceInterval(0.95).ClosedContains(distribution.Mean));
            Assert.IsTrue(nfit.Parameter(1).ConfidenceInterval(0.95).ClosedContains(distribution.StandardDeviation));

            // fit to exponential should be bad
            FitResult efit = ExponentialDistribution.FitToSample(sample);

            Assert.IsTrue(efit.GoodnessOfFit.Probability < 0.05);
        }
Exemple #7
0
        public void Bug7953()
        {
            // Fitting this sample to a Weibull caused a NonconvergenceException in the root finder that was used inside the fit method.
            // The underlying problem was that our equation to solve involved x^k and k ~ 2000 and (~12)^(~2000) overflows double
            // so all the quantities became Infinity and the root-finder never converged. We changed the algorithm to operate on
            // w = log x - <log x> which keeps quantities much smaller.

            Sample sample = new Sample(
                12.824, 12.855, 12.861, 12.862, 12.863,
                12.864, 12.865, 12.866, 12.866, 12.866,
                12.867, 12.867, 12.868, 12.868, 12.870,
                12.871, 12.871, 12.871, 12.871, 12.872,
                12.876, 12.878, 12.879, 12.879, 12.881
                );

            FitResult result = WeibullDistribution.FitToSample(sample);

            Console.WriteLine("{0} {1}", result.Parameter(0), result.Parameter(1));
            Console.WriteLine(result.GoodnessOfFit.RightProbability);
        }
        public void FitDataToLineTest()
        {
            Interval r = Interval.FromEndpoints(0.0, 10.0);
            Func <double, double> fv = delegate(double x) {
                return(2.0 * x - 1.0);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + x);
            };
            UncertainMeasurementSample data = CreateDataSet(r, fv, fu, 20);


            // sanity check the data set
            Assert.IsTrue(data.Count == 20);

            // fit to a line
            FitResult line = data.FitToLine();

            Assert.IsTrue(line.Dimension == 2);
            Assert.IsTrue(line.Parameter(0).ConfidenceInterval(0.95).ClosedContains(-1.0));
            Assert.IsTrue(line.Parameter(1).ConfidenceInterval(0.95).ClosedContains(2.0));
            Assert.IsTrue(line.GoodnessOfFit.LeftProbability < 0.95);

            // correlation coefficient should be related to covariance as expected
            Assert.IsTrue(TestUtilities.IsNearlyEqual(line.CorrelationCoefficient(0, 1), line.Covariance(0, 1) / line.Parameter(0).Uncertainty / line.Parameter(1).Uncertainty));

            // fit to a 1st order polynomial and make sure it agrees
            FitResult poly = data.FitToPolynomial(1);

            Assert.IsTrue(poly.Dimension == 2);
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.Parameters, line.Parameters));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.CovarianceMatrix, line.CovarianceMatrix));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.Statistic, line.GoodnessOfFit.Statistic));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.LeftProbability, line.GoodnessOfFit.LeftProbability));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(poly.GoodnessOfFit.RightProbability, line.GoodnessOfFit.RightProbability));

            // fit to a constant; the result should be poor
            FitResult constant = data.FitToConstant();

            Assert.IsTrue(constant.GoodnessOfFit.LeftProbability > 0.95);
        }
        public void FitToFunctionPolynomialCompatibilityTest()
        {
            // specify a cubic function
            Interval r = Interval.FromEndpoints(-10.0, 10.0);
            Func <double, double> fv = delegate(double x) {
                return(0.0 - 1.0 * x + 2.0 * x * x - 3.0 * x * x * x);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + 0.5 * Math.Cos(x));
            };

            // create a data set from it
            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 60);

            // fit it to a cubic polynomial
            FitResult pFit = set.FitToPolynomial(3);

            // fit it to a cubic polynomaial
            Func <double[], double, double> ff = delegate(double[] p, double x) {
                return(p[0] + p[1] * x + p[2] * x * x + p[3] * x * x * x);
            };
            FitResult fFit = set.FitToFunction(ff, new double[] { 0, 0, 0, 0 });

            // the fits should agree
            Console.WriteLine("{0} ?= {1}", pFit.GoodnessOfFit.Statistic, fFit.GoodnessOfFit.Statistic);
            for (int i = 0; i < pFit.Dimension; i++)
            {
                Console.WriteLine("{0} ?= {1}", pFit.Parameter(i), fFit.Parameter(i));
                Assert.IsTrue(pFit.Parameter(i).ConfidenceInterval(0.01).ClosedContains(fFit.Parameter(i).Value));
            }
            // dimension
            Assert.IsTrue(pFit.Dimension == fFit.Dimension);
            // chi squared
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.GoodnessOfFit.Statistic, fFit.GoodnessOfFit.Statistic, Math.Sqrt(TestUtilities.TargetPrecision)));
            // don't demand super-high precision agreement of parameters and covariance matrix
            // parameters
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.Parameters, fFit.Parameters, Math.Pow(TestUtilities.TargetPrecision, 0.3)));
            // covariance
            Assert.IsTrue(TestUtilities.IsNearlyEqual(pFit.CovarianceMatrix, fFit.CovarianceMatrix, Math.Pow(TestUtilities.TargetPrecision, 0.3)));
        }
        public void FitDataToPolynomialTest()
        {
            Interval r = Interval.FromEndpoints(-10.0, 10.0);

            double[] c = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 };
            Func <double, double> fv = delegate(double x) {
                double f = 0.0;
                for (int i = c.Length - 1; i >= 0; i--)
                {
                    f = f * x + c[i];
                }
                return(f);
            };
            Func <double, double> fu = delegate(double x) {
                return(1.0 + 0.5 * Math.Cos(x));
            };

            UncertainMeasurementSample set = CreateDataSet(r, fv, fu, 50);

            Assert.IsTrue(set.Count == 50);

            // fit to an appropriate polynomial
            FitResult poly = set.FitToPolynomial(5);

            // the coefficients should match
            for (int i = 0; i < poly.Dimension; i++)
            {
                Assert.IsTrue(poly.Parameter(i).ConfidenceInterval(0.95).ClosedContains(c[i]));
            }

            // the fit should be good
            Console.WriteLine(poly.GoodnessOfFit.LeftProbability);
            Assert.IsTrue(poly.GoodnessOfFit.LeftProbability < 0.95);

            // fit to a lower order polynomial
            FitResult low = set.FitToPolynomial(4);

            // the fit should be bad
            Assert.IsTrue(low.GoodnessOfFit.Statistic > poly.GoodnessOfFit.Statistic);
            Assert.IsTrue(low.GoodnessOfFit.LeftProbability > 0.95);

            // fit to a higher order polynomial
            FitResult high = set.FitToPolynomial(6);

            // the higher order coefficients should be compatible with zero
            Assert.IsTrue(high.Parameter(6).ConfidenceInterval(0.95).ClosedContains(0.0));

            // the fit should be better, but not too much better
            Assert.IsTrue(high.GoodnessOfFit.Statistic < poly.GoodnessOfFit.Statistic);
        }
        public void ExponentialFit()
        {
            ExponentialDistribution distribution = new ExponentialDistribution(5.0);
            Sample sample = SampleTest.CreateSample(distribution, 100);

            // fit to normal should be bad
            FitResult nfit = NormalDistribution.FitToSample(sample);

            Assert.IsTrue(nfit.GoodnessOfFit.Probability < 0.05);

            // fit to exponential should be good
            FitResult efit = ExponentialDistribution.FitToSample(sample);

            Assert.IsTrue(efit.GoodnessOfFit.Probability > 0.05);
            Assert.IsTrue(efit.Parameter(0).ConfidenceInterval(0.95).ClosedContains(distribution.Mean));
        }
Exemple #12
0
        public void Bug6162()
        {
            // When UncertianMeasurementSample.FitToPolynomial used Cholesky inversion of (A^T A), this inversion
            // would fail when roundoff errors would made the matrix non-positive-definite. We have now changed
            // to QR decomposition, which is more robust.

            //real data
            double[] X_axis = new double[] { 40270.65625, 40270.6569444444, 40270.6576388888, 40270.6583333332, 40270.6590277776,
                                             40270.659722222, 40270.6604166669, 40270.6611111113, 40270.6618055557, 40270.6625000001 };

            double[] Y_axis = new double[] { 246.824996948242, 246.850006103516, 245.875, 246.225006103516, 246.975006103516,
                                             247.024993896484, 246.949996948242, 246.875, 247.5, 247.100006103516 };

            UncertainMeasurementSample DataSet = new UncertainMeasurementSample();

            for (int i = 0; i < 10; i++)
            {
                DataSet.Add(X_axis[i], Y_axis[i], 1);
            }
            //for (int i = 0; i < 10; i++) DataSet.Add(X_axis[i] - 40270.0, Y_axis[i] - 247.0, 1);

            FitResult DataFit = DataSet.FitToPolynomial(3);

            for (int i = 0; i < DataFit.Dimension; i++)
            {
                Console.WriteLine("a" + i.ToString() + " = " + DataFit.Parameter(i).Value);
            }

            BivariateSample bs = new BivariateSample();

            for (int i = 0; i < 10; i++)
            {
                bs.Add(X_axis[i], Y_axis[i]);
            }
            FitResult bsFit = bs.PolynomialRegression(3);

            for (int i = 0; i < bsFit.Dimension; i++)
            {
                Console.WriteLine(bsFit.Parameter(i));
            }
        }
        public void MultivariateLinearRegressionTest()
        {
            // define model y = a + b0 * x0 + b1 * x1 + noise
            double       a     = 1.0;
            double       b0    = -2.0;
            double       b1    = 3.0;
            Distribution noise = new NormalDistribution(0.0, 10.0);

            // draw a sample from the model
            Random             rng    = new Random(1);
            MultivariateSample sample = new MultivariateSample(3);

            for (int i = 0; i < 100; i++)
            {
                double x0  = -10.0 + 20.0 * rng.NextDouble();
                double x1  = -10.0 + 20.0 * rng.NextDouble();
                double eps = noise.InverseLeftProbability(rng.NextDouble());
                double y   = a + b0 * x0 + b1 * x1 + eps;
                sample.Add(x0, x1, y);
            }

            // do a linear regression fit on the model
            FitResult result = sample.LinearRegression(2);

            // the result should have the appropriate dimension
            Assert.IsTrue(result.Dimension == 3);

            // the result should be significant
            Console.WriteLine("{0} {1}", result.GoodnessOfFit.Statistic, result.GoodnessOfFit.LeftProbability);
            Assert.IsTrue(result.GoodnessOfFit.LeftProbability > 0.95);

            // the parameters should match the model
            Console.WriteLine(result.Parameter(0));
            Assert.IsTrue(result.Parameter(0).ConfidenceInterval(0.90).ClosedContains(b0));
            Console.WriteLine(result.Parameter(1));
            Assert.IsTrue(result.Parameter(1).ConfidenceInterval(0.90).ClosedContains(b1));
            Console.WriteLine(result.Parameter(2));
            Assert.IsTrue(result.Parameter(2).ConfidenceInterval(0.90).ClosedContains(a));
        }
Exemple #14
0
    //only compute after drives are achieved
    public void ComputeMotionEffortCoefs(DriveParams[] driveParams)
    {
        //using (StreamWriter sw = new StreamWriter("regressionCoefs.txt")) {
        double param = 0;

        for (int coefInd = 0; coefInd < _motionEffortCoefs[0].Length; coefInd++)
        {
            MultivariateSample efforts = new MultivariateSample(5);     //4 efforts + 1 coefficient

            for (int i = 0; i < 32; i++)
            {
                if (coefInd == (int)MotionCoef.Speed)
                {
                    param = driveParams[i].Speed;
                }
                else if (coefInd == (int)MotionCoef.V0)
                {
                    param = driveParams[i].V0;
                }
                else if (coefInd == (int)MotionCoef.V1)
                {
                    param = driveParams[i].V1;
                }
                else if (coefInd == (int)MotionCoef.Ti)
                {
                    param = driveParams[i].Ti;
                }
                else if (coefInd == (int)MotionCoef.Texp)
                {
                    param = driveParams[i].Texp;
                }
                else if (coefInd == (int)MotionCoef.TVal)
                {
                    param = driveParams[i].Tval;
                }
                else if (coefInd == (int)MotionCoef.T0)
                {
                    param = driveParams[i].T0;
                }
                else if (coefInd == (int)MotionCoef.T1)
                {
                    param = driveParams[i].T1;
                }
                else if (coefInd == (int)MotionCoef.HrMag)
                {
                    param = Mathf.Abs(driveParams[i].HrMag);
                }
                else if (coefInd == (int)MotionCoef.HSign)
                {
                    param = driveParams[i].HSign;
                }
                else if (coefInd == (int)MotionCoef.HfMag)
                {
                    param = driveParams[i].HfMag;
                }
                else if (coefInd == (int)MotionCoef.SquashMag)
                {
                    param = driveParams[i].SquashMag;
                }
                else if (coefInd == (int)MotionCoef.WbMag)
                {
                    param = driveParams[i].WbMag;
                }
                else if (coefInd == (int)MotionCoef.WxMag)
                {
                    param = driveParams[i].WxMag;
                }
                else if (coefInd == (int)MotionCoef.WtMag)
                {
                    param = driveParams[i].WtMag;
                }
                else if (coefInd == (int)MotionCoef.WfMag)
                {
                    param = driveParams[i].WfMag;
                }
                else if (coefInd == (int)MotionCoef.EtMag)
                {
                    param = driveParams[i].EtMag;
                }
                else if (coefInd == (int)MotionCoef.EfMag)
                {
                    param = driveParams[i].EfMag;
                }
                else if (coefInd == (int)MotionCoef.DMag)
                {
                    param = driveParams[i].DMag;
                }
                else if (coefInd == (int)MotionCoef.TrMag)
                {
                    param = driveParams[i].TrMag;
                }
                else if (coefInd == (int)MotionCoef.TfMag)
                {
                    param = driveParams[i].TfMag;
                }
                else if (coefInd == (int)MotionCoef.EncSpr0)
                {
                    param = driveParams[i].EncSpr0;
                }
                else if (coefInd == (int)MotionCoef.SinRis0)
                {
                    param = driveParams[i].SinRis0;
                }
                else if (coefInd == (int)MotionCoef.RetAdv0)
                {
                    param = driveParams[i].RetAdv0;
                }
                else if (coefInd == (int)MotionCoef.EncSpr1)
                {
                    param = driveParams[i].EncSpr1;
                }
                else if (coefInd == (int)MotionCoef.SinRis1)
                {
                    param = driveParams[i].SinRis1;
                }
                else if (coefInd == (int)MotionCoef.RetAdv1)
                {
                    param = driveParams[i].RetAdv1;
                }
                else if (coefInd == (int)MotionCoef.EncSpr2)
                {
                    param = driveParams[i].EncSpr2;
                }
                else if (coefInd == (int)MotionCoef.SinRis2)
                {
                    param = driveParams[i].SinRis2;
                }
                else if (coefInd == (int)MotionCoef.RetAdv2)
                {
                    param = driveParams[i].RetAdv2;
                }
                else if (coefInd == (int)MotionCoef.Continuity)
                {
                    param = driveParams[i].Continuity;
                }
                else if (coefInd == (int)MotionCoef.Arm0X)
                {
                    param = driveParams[i].Arm[0].x;
                }
                else if (coefInd == (int)MotionCoef.Arm0Y)
                {
                    param = driveParams[i].Arm[0].y;
                }
                else if (coefInd == (int)MotionCoef.Arm0Z)
                {
                    param = driveParams[i].Arm[0].z;
                }
                else if (coefInd == (int)MotionCoef.Arm1X)
                {
                    param = driveParams[i].Arm[1].x;
                }
                else if (coefInd == (int)MotionCoef.Arm1Y)
                {
                    param = driveParams[i].Arm[1].y;
                }
                else if (coefInd == (int)MotionCoef.Arm1Z)
                {
                    param = driveParams[i].Arm[1].z;
                }
                else if (coefInd == (int)MotionCoef.ShapeTi)
                {
                    param = driveParams[i].ShapeTi;
                }
                else if (coefInd == (int)MotionCoef.ExtraGoal)
                {
                    param = driveParams[i].ExtraGoal;
                }
                else if (coefInd == (int)MotionCoef.UseCurveKeys)
                {
                    param = driveParams[i].UseCurveKeys;
                }
                else if (coefInd == (int)MotionCoef.FixedTarget)
                {
                    param = driveParams[i].FixedTarget;
                }
                else if (coefInd == (int)MotionCoef.SquashF)
                {
                    param = driveParams[i].SquashF;
                }
                else if (coefInd == (int)MotionCoef.GoalThreshold)
                {
                    param = driveParams[i].GoalThreshold;
                }


                efforts.Add(param, _xRange[i][0], _xRange[i][1], _xRange[i][2], _xRange[i][3]);
            }



            FitResult regression = efforts.LinearRegression(0);     //keep motion parameter fixed
            //intercept, space, time, weight, flow coefficients

            for (int i = 0; i < 5; i++)
            {
                _motionEffortCoefs[i][coefInd] = (float)regression.Parameter(i).Value;
            }
            //         sw.WriteLine(_motionEffortCoefs[0][coefInd] + "\t" + _motionEffortCoefs[1][coefInd] + "\t" +
            //                    _motionEffortCoefs[2][coefInd] + "\t" + _motionEffortCoefs[3][coefInd] + "\t" +
            //                  _motionEffortCoefs[4][coefInd]);


            // }
        }
    }
        public void BivariateLinearRegression()
        {
            // do a set of logistic regression fits
            // make sure not only that the fit parameters are what they should be, but that their variances/covariances are as returned

            Random rng = new Random(314159);

            // define logistic parameters
            double a0 = 2.0; double b0 = -1.0;

            // keep track of sample of returned a and b fit parameters
            BivariateSample ps = new BivariateSample();

            // also keep track of returned covariance estimates
            // since these vary slightly from fit to fit, we will average them
            double caa = 0.0;
            double cbb = 0.0;
            double cab = 0.0;

            // also keep track of test statistics
            Sample fs = new Sample();

            // do 100 fits
            for (int k = 0; k < 100; k++)
            {
                // we should be able to draw x's from any distribution; noise should be drawn from a normal distribution
                Distribution xd = new LogisticDistribution();
                Distribution nd = new NormalDistribution(0.0, 2.0);

                // generate a synthetic data set
                BivariateSample s = new BivariateSample();
                for (int i = 0; i < 25; i++)
                {
                    double x = xd.GetRandomValue(rng);
                    double y = a0 + b0 * x + nd.GetRandomValue(rng);
                    s.Add(x, y);
                }

                // do the regression
                FitResult r = s.LinearRegression();

                // record best fit parameters
                double a = r.Parameter(0).Value;
                double b = r.Parameter(1).Value;
                ps.Add(a, b);

                // record estimated covariances
                caa += r.Covariance(0, 0);
                cbb += r.Covariance(1, 1);
                cab += r.Covariance(0, 1);

                // record the fit statistic
                fs.Add(r.GoodnessOfFit.Statistic);
                Console.WriteLine("F={0}", r.GoodnessOfFit.Statistic);
            }

            caa /= ps.Count;
            cbb /= ps.Count;
            cab /= ps.Count;

            // check that mean parameter estimates are what they should be: the underlying population parameters
            Assert.IsTrue(ps.X.PopulationMean.ConfidenceInterval(0.95).ClosedContains(a0));
            Assert.IsTrue(ps.Y.PopulationMean.ConfidenceInterval(0.95).ClosedContains(b0));

            Console.WriteLine("{0} {1}", caa, ps.X.PopulationVariance);
            Console.WriteLine("{0} {1}", cbb, ps.Y.PopulationVariance);

            // check that parameter covarainces are what they should be: the reported covariance estimates
            Assert.IsTrue(ps.X.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(caa));
            Assert.IsTrue(ps.Y.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(cbb));
            Assert.IsTrue(ps.PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(cab));

            // check that F is distributed as it should be
            Console.WriteLine(fs.KolmogorovSmirnovTest(new FisherDistribution(2, 48)).LeftProbability);
        }
        public void LinearLogisticRegression()
        {
            // do a set of logistic regression fits
            // make sure not only that the fit parameters are what they should be, but that their variances/covariances are as returned

            Random rng = new Random(314159);

            // define logistic parameters
            double a0 = 1.0; double b0 = -1.0 / 2.0;
            //double a0 = -0.5; double b0 = 2.0;

            // keep track of sample of returned a and b fit parameters
            BivariateSample ps = new BivariateSample();

            // also keep track of returned covariance estimates
            // since these vary slightly from fit to fit, we will average them
            double caa = 0.0;
            double cbb = 0.0;
            double cab = 0.0;

            // do 50 fits
            for (int k = 0; k < 50; k++)
            {
                Console.WriteLine("k={0}", k);

                // generate a synthetic data set
                BivariateSample s = new BivariateSample();
                for (int i = 0; i < 50; i++)
                {
                    double x  = 2.0 * rng.NextDouble() - 1.0;
                    double ez = Math.Exp(a0 + b0 * x);
                    double P  = ez / (1.0 + ez);
                    if (rng.NextDouble() < P)
                    {
                        s.Add(x, 1.0);
                    }
                    else
                    {
                        s.Add(x, 0.0);
                    }
                }

                //if (k != 27) continue;

                // do the regression
                FitResult r = s.LinearLogisticRegression();

                // record best fit parameters
                double a = r.Parameter(0).Value;
                double b = r.Parameter(1).Value;
                ps.Add(a, b);

                Console.WriteLine("{0}, {1}", a, b);

                // record estimated covariances
                caa += r.Covariance(0, 0);
                cbb += r.Covariance(1, 1);
                cab += r.Covariance(0, 1);
            }

            caa /= ps.Count;
            cbb /= ps.Count;
            cab /= ps.Count;

            // check that mean parameter estimates are what they should be: the underlying population parameters
            Assert.IsTrue(ps.X.PopulationMean.ConfidenceInterval(0.95).ClosedContains(a0));
            Assert.IsTrue(ps.Y.PopulationMean.ConfidenceInterval(0.95).ClosedContains(b0));

            // check that parameter covarainces are what they should be: the reported covariance estimates
            Assert.IsTrue(ps.X.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(caa));
            Assert.IsTrue(ps.Y.PopulationVariance.ConfidenceInterval(0.95).ClosedContains(cbb));
            Assert.IsTrue(ps.PopulationCovariance.ConfidenceInterval(0.95).ClosedContains(cab));
        }