//The M Step - Much better numeric properties if A and R are put on the same scale at the start
        //of the process
        private void MaximizeGivenZExpectationsScaled()
        {
            //Recreating as I didn't know if it remembered the Hessian
            QN = new QuasiNewton();
            QN.MaxIterations = MaxIterations;
            QN.Tolerance     = TerminationTolerance;
            currentWinner    = new CurrentBestSolution()
            {
                curBestFunction = double.MaxValue
            };

            // results = QN.MinimizeDetail(new DiffFunc(GetDerivatives), new double[] {InitialPopSize,GrowthRate});
            results = QN.MinimizeDetail(new DiffFunc(GetDerivativesScaled), startParams);

            pParameters = results.solution;
            curVal      = results.funcValue;
            if (results.quality != Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.LocalOptima)
            {
                //Sometimes it went to the local optimum, but didn't seem to think so.
                if (results.quality == Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.UserCalculationError &&
                    currentWinner.GradientL2NormAtBest < .002)
                {
                    pParameters = currentWinner.curBestParameters;
                    curVal      = currentWinner.curBestFunction;
                }
                else
                {
                    throw new Exception("M Step Failed to Converge during Solving");
                }
            }
        }
Exemplo n.º 2
0
 protected void FitModel()
 {
     QN.MaxIterations = 50000;
     QN.Tolerance     = 1e-10;
     results          = QN.MinimizeDetail(new DiffFunc(GetDerivatives), Parameters);
     Parameters       = results.solution;
     SetParameterDictionary();
     if (results.quality != Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.LocalOptima)
     {
         throw new Exception("Optimization failed\n" + results.quality.ToString());
     }
     else
     {
         SuccessfulFit = true;
     }
 }
Exemplo n.º 3
0
        protected override void FitModel()
        {
            ///TestGradient();
            QN.MaxIterations = 500;
            QN.Tolerance     = 1e-8;
            results          = QN.MinimizeDetail(new DiffFunc(GetDerivatives), CreateInitialParameterGuess());
            if (results.quality != Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.LocalOptima)
            {
                Console.WriteLine(results.quality.ToString());
                this.SuccessfulFit = false;
                pParameters        = new double[] { Double.NaN, Double.NaN, Double.NaN };

                //throw new Exception("Problem in Data Fitting!");
            }
            else
            {
                pParameters   = results.solution;
                SuccessfulFit = true;
            }
        }
        //The M Step
        private void MaximizeGivenZExpectations()
        {
            //Solver doesn't work well, but is here.
            //LBFGS solver = new LBFGS(2);
            //FunctionEval ff = new FunctionEval(GetDerivativesLBFGS);
            //Vector x0 = Vector.FromArray(new double[] { InitialPopSize, GrowthRate });
            //solver.debug = true;
            //solver.linesearchDebug = true;
            //solver.Run(x0,1, ff);
            //pParameters = x0.ToArray();
            //var b = solver.convergenceCriteria;

            //Recreating as I didn't know if it remembered the Hessian
            QN = new QuasiNewton();
            QN.MaxIterations = MaxIterations;
            QN.Tolerance     = TerminationTolerance;
            currentWinner    = new CurrentBestSolution()
            {
                curBestFunction = double.MaxValue
            };

            // results = QN.MinimizeDetail(new DiffFunc(GetDerivatives), new double[] {InitialPopSize,GrowthRate});
            results = QN.MinimizeDetail(new DiffFunc(GetDerivatives), startParams);

            pParameters = results.solution;
            curVal      = results.funcValue;
            if (results.quality != Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.LocalOptima)
            {
                //Sometimes it went to the local optimum, but didn't seem to think so.
                if (results.quality == Microsoft.SolverFoundation.Solvers.CompactQuasiNewtonSolutionQuality.UserCalculationError &&
                    currentWinner.GradientL2NormAtBest < .002)
                {
                    pParameters = currentWinner.curBestParameters;
                    curVal      = currentWinner.curBestFunction;
                }
                else
                {
                    throw new Exception("M Step Failed to Converge during Solving");
                }
            }
        }