Beispiel #1
0
        public bool AuxiliaryFunction(int index, out object output)
        {
            var approx = new ARMAModel(0, maxLag); // we should be able to get close with an MA
            var hlds   = new HaltonSequence(maxLag);

            double bestError        = double.MaxValue;
            var    bestMAPolynomial = Vector <double> .Build.Dense(maxLag);//new Polynomial(maxLag);

            for (int i = 0; i < 200000; ++i)
            {
                var cube    = hlds.GetNext();                            // this is the MA part to try in the ARMA
                var curCube = approx.ParameterToCube(approx.Parameters); // el. 0=mu, el. 1=d, el. 2=sigma
                for (int j = 0; j < maxLag; ++j)
                {
                    curCube[j + 3] = cube[j];
                }
                approx.SetParameters(approx.CubeToParameter(curCube));

                // now compare autocorrelation function (don't care about mean or sigma)
                var    acf   = approx.ComputeACF(maxLag, true);
                double error = 0;
                for (int j = 0; j < maxLag; ++j)
                {
                    error += Math.Abs(acf[j + 1] - Rho(j));
                }
                if (error < bestError)
                {
                    bestError        = error;
                    bestMAPolynomial = approx.GetMAPolynomial();
                }
            }

            approx.SetMAPolynomial(bestMAPolynomial);
            approx.Mu = Mu;
            output    = approx;
            return(true);
        }
Beispiel #2
0
        /// This function samples from parameter space using a Halton sequence and picks
        /// the model with best log-likelihood.
        /// Individual parameters are tagged as ParameterState.Locked, ParameterState.Free, or ParameterState.Consequential.
        /// Locked parameters are held at current values in optimization.
        /// Free parameters are optimized.
        /// Consequential parameters are computed as a function of other parameters and the data.
        public virtual void FitByMLE(int numIterationsLDS, int numIterationsOpt,
                                     double consistencyPenalty,
                                     Optimizer.OptimizationCallback optCallback)
        {
            thisAsMLEEstimable = this as IMLEEstimable;
            if (thisAsMLEEstimable == null)
            {
                throw new ApplicationException("MLE not supported for this model.");
            }

            int optDimension     = NumParametersOfType(ParameterState.Free);
            int numConsequential = NumParametersOfType(ParameterState.Consequential);
            int numIterations    = numIterationsLDS + numIterationsOpt;

            var trialParameterList = new Vector <double> [numIterationsLDS];
            var trialCubeList      = new Vector <double> [numIterationsLDS];

            var hsequence = new HaltonSequence(optDimension);

            if (optDimension == 0) // then all parameters are either locked or consequential
            {
                Vector <double> tparms = Parameters;
                Parameters = ComputeConsequentialParameters(tparms);
            }
            else
            {
                thisAsMLEEstimable.CarryOutPreMLEComputations();

                for (int i = 0; i < numIterationsLDS; ++i)
                {
                    Vector <double> smallCube = hsequence.GetNext();
                    Vector <double> cube      = CubeInsert(smallCube);
                    trialParameterList[i] = thisAsMLEEstimable.CubeToParameter(cube);
                    trialCubeList[i]      = cube;
                }

                var logLikes = new double[numIterationsLDS];

                //const bool multiThreaded = false;
                //if (multiThreaded)
                //{
                //    Parallel.For(0, numIterations,
                //                 i =>
                //                 {
                //                     Vector tparms = trialParameterList[i];
                //                     if (numConsequential > 0)
                //                     {
                //                         tparms = ComputeConsequentialParameters(tparms);
                //                         lock (trialParameterList)
                //                             trialParameterList[i] = tparms;
                //                     }

                //                     double ll = LogLikelihood(tparms);
                //                     if (optCallback != null)
                //                         lock (logLikes)
                //                             optCallback(tparms, ll,
                //                                         (int)(i * 100 / numIterations), false);

                //                     lock (logLikes)
                //                         logLikes[i] = ll;
                //                 });
                //}

                for (int i = 0; i < numIterationsLDS; ++i)
                {
                    Vector <double> tparms = trialParameterList[i];
                    if (numConsequential > 0)
                    {
                        tparms = ComputeConsequentialParameters(tparms);
                        trialParameterList[i] = tparms;
                    }

                    double ll = LogLikelihood(tparms, consistencyPenalty, false);
                    logLikes[i] = ll;

                    if (optCallback != null)
                    {
                        lock (logLikes)
                            optCallback(tparms, ll, i * 100 / numIterations, false);
                    }
                }

                // Step 1: Just take the best value.
                Array.Sort(logLikes, trialParameterList);
                Parameters = trialParameterList[numIterationsLDS - 1];

                // Step 2: Take some of the top values and use them to create a simplex, then optimize
                // further in natural parameter space with the Nelder Mead algorithm.
                // Here we optimize in cube space, reflecting the cube when necessary to make parameters valid.
                var simplex = new List <Vector <double> >();
                for (int i = 0; i <= optDimension; ++i)
                {
                    simplex.Add(
                        FreeParameters(thisAsMLEEstimable.ParameterToCube(trialParameterList[numIterationsLDS - 1 - i])));
                }
                var nmOptimizer = new NelderMead {
                    Callback = optCallback, StartIteration = numIterationsLDS
                };
                currentPenalty = consistencyPenalty;
                nmOptimizer.Minimize(NegativeLogLikelihood, simplex, numIterationsOpt);
                Parameters =
                    ComputeConsequentialParameters(
                        thisAsMLEEstimable.CubeToParameter(CubeFix(CubeInsert(nmOptimizer.ArgMin))));
            }

            LogLikelihood(null, 0.0, true);
        }