public void MultivariateLinearRegressionAgreement()
        {
            Random rng = new Random(1);

            MultivariateSample SA = new MultivariateSample(2);

            for (int i = 0; i < 10; i++)
            {
                SA.Add(rng.NextDouble(), rng.NextDouble());
            }
            RegressionResult RA = SA.LinearRegression(0);
            ColumnVector     PA = RA.Parameters.Best;
            SymmetricMatrix  CA = RA.Parameters.Covariance;

            MultivariateSample SB = SA.Columns(1, 0);
            RegressionResult   RB = SB.LinearRegression(1);
            ColumnVector       PB = RB.Parameters.Best;
            SymmetricMatrix    CB = RB.Parameters.Covariance;

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PA[0], PB[1])); Assert.IsTrue(TestUtilities.IsNearlyEqual(PA[1], PB[0]));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[0, 0], CB[1, 1])); Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[0, 1], CB[1, 0])); Assert.IsTrue(TestUtilities.IsNearlyEqual(CA[1, 1], CB[0, 0]));

            BivariateSample  SC = SA.TwoColumns(1, 0);
            RegressionResult RC = SC.LinearRegression();
            ColumnVector     PC = RC.Parameters.Best;
            SymmetricMatrix  CC = RC.Parameters.Covariance;

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PA, PC));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(CA, CC));
        }
        public void MultivariateLinearRegressionNullDistribution()
        {
            int d = 4;

            Random             rng = new Random(1);
            NormalDistribution n   = new NormalDistribution();

            Sample fs = new Sample();

            for (int i = 0; i < 64; i++)
            {
                MultivariateSample ms = new MultivariateSample(d);
                for (int j = 0; j < 8; j++)
                {
                    double[] x = new double[d];
                    for (int k = 0; k < d; k++)
                    {
                        x[k] = n.GetRandomValue(rng);
                    }
                    ms.Add(x);
                }
                RegressionResult r = ms.LinearRegression(0);
                fs.Add(r.F.Statistic);
            }

            // conduct a KS test to check that F follows the expected distribution
            TestResult ks = fs.KolmogorovSmirnovTest(new FisherDistribution(3, 4));

            Assert.IsTrue(ks.LeftProbability < 0.95);
        }
Exemplo n.º 3
0
        public RegressionResult RegressLine(List <Point> points)
        {
            if (points.Count == 0)
            {
                return(new RegressionResult());
            }

            var xtyAve = points.Select(p => p.X * p.Y).Average();
            var xAve   = points.Average(p => p.X);
            var yAve   = points.Average(p => p.Y);
            var xsqAve = points.Select(p => p.X * p.X).Average();
            var ysqAve = points.Select(p => p.Y * p.Y).Average();
            var xAveSq = xAve * xAve;
            var yAveSq = yAve * yAve;

            var result = new RegressionResult();

            result.Slope = (xtyAve - xAve * yAve) / (xsqAve - xAveSq);

            if (double.IsNaN(result.Slope))
            {
                result.Slope = 100000;
            }

            result.Intercept = yAve - result.Slope * xAve;
            result.RSquared  = (xtyAve - xAve * yAve) / Math.Sqrt((xsqAve - xAveSq) * (ysqAve - yAveSq));

            return(result);
        }
        //RegressionResult
        double runCalculation(SimplexConstant[] consts)
        {
            ObjectiveFunctionDelegate objFunc = new ObjectiveFunctionDelegate(minimizeAngle);
            RegressionResult          result  = NelderMeadSimplex.Regress(consts, tolerance, maxEvals, objFunc);

            return(result.Constants[0]);
        }
Exemplo n.º 5
0
        public List <Point> PointsOnLineDistanceAway(Point p, RegressionResult line, double dist)
        {
            //{{x1->(-a m+xo-Sqrt[d^2 (1+m^2)-(a+m xo-yo)^2]+m yo)/(1+m^2)}
            //{ x1->(-a m+xo+Sqrt[d^2 (1+m^2)-(a+m xo-yo)^2]+m yo)/(1+m^2)}}

            //{{x1->(-a*m+xo-Math.Sqrt[dsq*(1+msq)-(a+m*xo-yo)^2]+m*yo)/(1+msq)}
            //{ x1->(-a*m+xo+Math.Sqrt[dsq*(1+msq)-(a+m*xo-yo)^2]+m*yo)/(1+msq)}}

            var a   = line.Intercept;
            var m   = line.Slope;
            var d   = dist;
            var dsq = d * d;
            var msq = m * m;
            var xo  = p.X;
            var yo  = p.Y;

            var sqrtPart = Math.Sqrt(dsq * (1 + msq) - (a + m * xo - yo) * (a + m * xo - yo));
            var xplus    = (int)Math.Round((-a * m + xo - sqrtPart + m * yo) / (1 + msq));
            var xminus   = (int)Math.Round((-a * m + xo + sqrtPart + m * yo) / (1 + msq));

            if (xminus == int.MinValue)
            {
                return(new List <Point>());
            }

            return(new List <Point>
            {
                new Point(xplus, (int)Math.Round(xplus * m + a)),
                new Point(xminus, (int)Math.Round(xminus * m + a))
            });
        }
Exemplo n.º 6
0
        public Point Intersects(RegressionResult line1, RegressionResult line2)
        {
            //x=(a2-a1)/(m1-m2)
            var x = (line2.Intercept - line1.Intercept) / (line1.Slope - line2.Slope);
            var y = line1.Slope * x + line1.Intercept;

            return(new Point(x, y));
        }
Exemplo n.º 7
0
 private static void _printResult(RegressionResult result)
 {
     // a bit of reflection fun, why not...
     PropertyInfo[] properties = result.GetType().GetProperties();
     foreach (PropertyInfo p in properties)
     {
         Console.WriteLine(p.Name + ": " + p.GetValue(result, null).ToString());
     }
 }
Exemplo n.º 8
0
 /// <summary>
 /// Test to see if we can fit a parabola
 /// </summary>
 public static void SimplexTest1()
 {
     Console.WriteLine("Starting SimplexTest1");
     SimplexConstant[] constants = new SimplexConstant[] { new SimplexConstant(3, 1), new SimplexConstant(5, 1) };
     double tolerance = 1e-6;
     int maxEvals = 1000;
     ObjectiveFunctionDelegate objFunction = new ObjectiveFunctionDelegate(_objFunction1);
     RegressionResult result = NelderMeadSimplex.Regress(constants, tolerance, maxEvals, objFunction);
     _printResult(result);
 }
Exemplo n.º 9
0
        /// <summary>
        /// Test on the Rosenbrock function
        /// </summary>
        public static void SimplexTest2()
        {
            Console.WriteLine("\n\nStarting SimplexTest2");

            // we are regressing for frequency, amplitude, and phase offset
            SimplexConstant[] constants = new SimplexConstant[] { new SimplexConstant(-1.2, .1), new SimplexConstant(1, .1)};
            double tolerance = 1e-10;
            int maxEvals = 1000;
            ObjectiveFunctionDelegate objFunction = new ObjectiveFunctionDelegate(_objFunction2);
            RegressionResult result = NelderMeadSimplex.Regress(constants, tolerance, maxEvals, objFunction);
            _printResult(result);
        }
Exemplo n.º 10
0
        /// <summary>
        /// Test to see if we can fit a parabola
        /// </summary>
        private static void _simplexTest1()
        {
            Console.WriteLine("Starting SimplexTest1");
            SimplexConstant[] nss       = new SimplexConstant[] { new SimplexConstant(0, 1), new SimplexConstant(0, 1), new SimplexConstant(0, 1), new SimplexConstant(0, 1), new SimplexConstant(1, 1), new SimplexConstant(1, 1) };
            double[]          tt        = { 5.0, 10.0, 15.0, 20.0, 25.0 };
            double[]          Y         = { 0.001770949, 0.008396027, 0.013860686, 0.019379306, 0.023731833 };
            double            tolerance = 1e-6;
            int maxEvals = 1000;
            ObjectiveFunctionDelegate objFunction = new ObjectiveFunctionDelegate(_objFunction1);
            RegressionResult          result      = NelderMeadSimplex.Regress(nss, tt, Y, tolerance, maxEvals, objFunction);

            _printResult(result);
        }
Exemplo n.º 11
0
    // Run cost minimization
    Vector3 IDecisionManager.getNextDesiredPoint()
    {
        // What are the variables to minimize (position)
        SimplexConstant[] constants = new SimplexConstant[] { new SimplexConstant(this.sensorModule.gps.position.x, 1),
                                                              new SimplexConstant(this.sensorModule.gps.position.y, 1),
                                                              new SimplexConstant(this.sensorModule.gps.position.z, 1) };
        double tolerance = 1e-30;
        int    maxEvals  = 10000;

        // What's the objective function
        ObjectiveFunctionDelegate objFunction = new ObjectiveFunctionDelegate(_objFunction1);
        RegressionResult          result      = NelderMeadSimplex.Regress(constants, tolerance, maxEvals, objFunction);
        Vector3 r = new Vector3((float)result.Constants[0], (float)result.Constants[1], (float)result.Constants[2]);

//		Debug.Log (result.TerminationReason.ToString ());
        return(r);
    }
Exemplo n.º 12
0
        public void BivariateLinearPolynomialRegressionAgreement()
        {
            // A degree-1 polynomial fit should give the same answer as a linear fit

            BivariateSample B = new BivariateSample();

            B.Add(0.0, 5.0);
            B.Add(3.0, 6.0);
            B.Add(1.0, 7.0);
            B.Add(4.0, 8.0);
            B.Add(2.0, 9.0);
            RegressionResult PR = B.PolynomialRegression(1);
            RegressionResult LR = B.LinearRegression();

            Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters.Best, LR.Parameters.Best));
            Assert.IsTrue(TestUtilities.IsNearlyEqual(PR.Parameters.Covariance, LR.Parameters.Covariance));
        }
Exemplo n.º 13
0
        public void Minimize()
        {
            SetCoefficients();

            Console.WriteLine("Starting minimization...");
            SimplexConstant[] constants = new SimplexConstant[Coefficients.Length];
            for (int i = 0; i < Coefficients.Length; i++)
            {
                constants[i] = new SimplexConstant(Coefficients[i], Math.Abs(Coefficients[i]) / 2);
            }
            double tolerance = 1e-6;
            int    maxEvals  = 1000;
            ObjectiveFunctionDelegate objFunction = new ObjectiveFunctionDelegate(SpiderObjectiveFunction);
            RegressionResult          result      = NelderMeadSimplex.Regress(constants, tolerance, maxEvals, objFunction);

            Coefficients = result.Constants;
            PrintCoefficients(Coefficients);
        }
Exemplo n.º 14
0
        /// <summary>
        /// Calculates the regression function between the original and predicted or
        /// </summary>
        /// <param name="peptideScans"></param>
        /// <param name="peptidePredictedNET"></param>
        /// <returns></returns>
        public RegressionResult CalculateRegression(List <float> observed, List <float> basis)
        {
            float[] observedArray = new float[observed.Count];
            float[] basisArray    = new float[basis.Count];

            observed.CopyTo(observedArray);
            basis.CopyTo(basisArray);

            m_regressor.SetPoints(ref observedArray, ref basisArray);
            m_regressor.PerformRegression((Regressor.RegressionType)RegressionType);

            RegressionResult result = new RegressionResult();

            result.NETSlope     = m_regressor.Slope;
            result.NETIntercept = m_regressor.Intercept;
            result.NETRSquared  = m_regressor.RSquared;

            return(result);
        }
Exemplo n.º 15
0
        public Point PointAlongLine(RegressionResult line, Point point, double distance)
        {
            //d2 = dx2 + dy2
            //m = dy / dx => m2 = dy2 / dx2
            //m2* dx2 = dy2
            //d2 = dx2 + m2 * dx2 => (1 + m2) * dx2

            //sqrt(d2 / (1 + m2)) = dx
            //sqrt(d2 - dx2) = dy

            var dx = Math.Sqrt(distance * distance / (1 + (line.Slope * line.Slope)));
            var dy = Math.Sqrt(distance * distance - dx * dx);

            return(new Point
                   (
                       point.X + (int)dx * Math.Sign(distance),
                       point.Y + (int)dy * Math.Sign(distance)
                   ));
        }
Exemplo n.º 16
0
        static void PrintResult(string title, Bacterium bacterium, RegressionResult result)
        {
            double matchSum = 0;

            Console.WriteLine("\n");
            Console.WriteLine($"{title} on {bacterium.ASV}. Used {result.FormulaUsed}");
            for (var i = 0; i < result.TestY.Length; i++)
            {
                var match = GetMatch(result.TestX[i], result.PredictionOnTestSet[i]);
                Console.WriteLine(String.Format("Timestamp {0} is {1:0.00} and predicted {2:0.00}. Prediction is {3:0.0000}% of actual value",
                                                result.TestY[i],
                                                result.TestX[i],
                                                result.PredictionOnTestSet[i],
                                                match
                                                ));
                matchSum += match;
            }
            var matchAverage = matchSum / result.TestY.Length;

            Console.WriteLine($"Error on training set: {result.Error}");
            Console.WriteLine($"Average match on test set (closer to 100 is better): {Math.Round(matchAverage, 4)}");
        }
Exemplo n.º 17
0
        private const double JITTER = 1e-10d; // a small value used to protect against floating point noise

        public static RegressionResult Regress(SimplexConstant[] simplexConstants, double convergenceTolerance, int maxEvaluations,
                                               ObjectiveFunctionDelegate objectiveFunction)
        {
            // confirm that we are in a position to commence
            if (objectiveFunction == null)
            {
                throw new InvalidOperationException("ObjectiveFunction must be set to a valid ObjectiveFunctionDelegate"); // Not L10N
            }
            if (simplexConstants == null)
            {
                throw new InvalidOperationException("SimplexConstants must be initialized"); // Not L10N
            }
            // create the initial simplex
            int numDimensions = simplexConstants.Length;
            int numVertices   = numDimensions + 1;

            Vector <double>[] vertices = InitializeVertices(simplexConstants);

            int evaluationCount = 0;
            TerminationReason terminationReason;
            ErrorProfile      errorProfile;

            double[] errorValues = InitializeErrorValues(vertices, objectiveFunction);

            // iterate until we converge, or complete our permitted number of iterations
            while (true)
            {
                errorProfile = EvaluateSimplex(errorValues);

                // see if the range in point heights is small enough to exit
                if (HasConverged(convergenceTolerance, errorProfile, errorValues))
                {
                    terminationReason = TerminationReason.Converged;
                    break;
                }

                // attempt a reflection of the simplex
                double reflectionPointValue = TryToScaleSimplex(-1.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                ++evaluationCount;
                if (reflectionPointValue <= errorValues[errorProfile.LowestIndex])
                {
                    // it's better than the best point, so attempt an expansion of the simplex
                    TryToScaleSimplex(2.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                }
                else if (reflectionPointValue >= errorValues[errorProfile.NextHighestIndex])
                {
                    // it would be worse than the second best point, so attempt a contraction to look
                    // for an intermediate point
                    double currentWorst          = errorValues[errorProfile.HighestIndex];
                    double contractionPointValue = TryToScaleSimplex(0.5, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                    if (contractionPointValue >= currentWorst)
                    {
                        // that would be even worse, so let's try to contract uniformly towards the low point;
                        // don't bother to update the error profile, we'll do it at the start of the
                        // next iteration
                        ShrinkSimplex(errorProfile, vertices, errorValues, objectiveFunction);
                        evaluationCount += numVertices; // that required one function evaluation for each vertex; keep track
                    }
                }
                // check to see if we have exceeded our alloted number of evaluations
                if (evaluationCount >= maxEvaluations)
                {
                    terminationReason = TerminationReason.MaxFunctionEvaluations;
                    break;
                }
            }
            RegressionResult regressionResult = new RegressionResult(terminationReason,
                                                                     vertices[errorProfile.LowestIndex].ToArray(), errorValues[errorProfile.LowestIndex], evaluationCount);

            return(regressionResult);
        }
Exemplo n.º 18
0
        public static RegressionResult Regress(SimplexConstant[] simplexConstants, double convergenceTolerance, int maxEvaluations, 
                                        ObjectiveFunctionDelegate objectiveFunction)
        {
            // confirm that we are in a position to commence
            if (objectiveFunction == null)
                throw new InvalidOperationException("ObjectiveFunction must be set to a valid ObjectiveFunctionDelegate"); // Not L10N

            if (simplexConstants == null)
                throw new InvalidOperationException("SimplexConstants must be initialized"); // Not L10N

            // create the initial simplex
            int numDimensions = simplexConstants.Length;
            int numVertices = numDimensions + 1;
            Vector<double>[] vertices = InitializeVertices(simplexConstants);

            int evaluationCount = 0;
            TerminationReason terminationReason;
            ErrorProfile errorProfile;

            double[] errorValues = InitializeErrorValues(vertices, objectiveFunction);

            // iterate until we converge, or complete our permitted number of iterations
            while (true)
            {
               errorProfile = EvaluateSimplex(errorValues);

                // see if the range in point heights is small enough to exit
                if (HasConverged(convergenceTolerance, errorProfile, errorValues))
                {
                    terminationReason = TerminationReason.Converged;
                    break;
                }

                // attempt a reflection of the simplex
                double reflectionPointValue = TryToScaleSimplex(-1.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                ++evaluationCount;
                if (reflectionPointValue <= errorValues[errorProfile.LowestIndex])
                {
                    // it's better than the best point, so attempt an expansion of the simplex
                    TryToScaleSimplex(2.0, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                }
                else if (reflectionPointValue >= errorValues[errorProfile.NextHighestIndex])
                {
                    // it would be worse than the second best point, so attempt a contraction to look
                    // for an intermediate point
                    double currentWorst = errorValues[errorProfile.HighestIndex];
                    double contractionPointValue = TryToScaleSimplex(0.5, ref errorProfile, vertices, errorValues, objectiveFunction);
                    ++evaluationCount;
                    if (contractionPointValue >= currentWorst)
                    {
                        // that would be even worse, so let's try to contract uniformly towards the low point;
                        // don't bother to update the error profile, we'll do it at the start of the
                        // next iteration
                        ShrinkSimplex(errorProfile, vertices, errorValues, objectiveFunction);
                        evaluationCount += numVertices; // that required one function evaluation for each vertex; keep track
                    }
                }
                // check to see if we have exceeded our alloted number of evaluations
                if (evaluationCount >= maxEvaluations)
                {
                    terminationReason = TerminationReason.MaxFunctionEvaluations;
                    break;
                }
            }
            RegressionResult regressionResult = new RegressionResult(terminationReason,
                                vertices[errorProfile.LowestIndex].ToArray(), errorValues[errorProfile.LowestIndex], evaluationCount);
            return regressionResult;
        }