Exemple #1
0
 private MeanConst(MeanConst original, Cloner cloner)
     : base(original, cloner)
 {
 }
Exemple #2
0
 private MeanConst(MeanConst original, Cloner cloner)
   : base(original, cloner) {
 }
        // Does not produce the same result for the same seed when using parallel engine (see below)!
        public override double Evaluate(ISymbolicExpressionTree tree, IRandom random)
        {
            var meanFunction          = new MeanConst();
            var problemData           = ProblemData;
            var ds                    = problemData.Dataset;
            var targetVariable        = problemData.TargetVariable;
            var allowedInputVariables = problemData.AllowedInputVariables.ToArray();
            var nVars                 = allowedInputVariables.Length;
            var trainingRows          = problemData.TrainingIndices.ToArray();

            // use the same covariance function for each restart
            var covarianceFunction = TreeToCovarianceFunction(tree);

            // allocate hyperparameters
            var hyperParameters = new double[meanFunction.GetNumberOfParameters(nVars) + covarianceFunction.GetNumberOfParameters(nVars) + 1]; // mean + cov + noise

            double[] bestHyperParameters = new double[hyperParameters.Length];
            var      bestObjValue        = new double[1] {
                double.MinValue
            };

            // data that is necessary for the objective function
            var data = Tuple.Create(ds, targetVariable, allowedInputVariables, trainingRows, (IMeanFunction)meanFunction, covarianceFunction, bestObjValue);

            for (int t = 0; t < Restarts; t++)
            {
                var prevBest = bestObjValue[0];
                var prevBestHyperParameters = new double[hyperParameters.Length];
                Array.Copy(bestHyperParameters, prevBestHyperParameters, bestHyperParameters.Length);

                // initialize hyperparameters
                hyperParameters[0] = ds.GetDoubleValues(targetVariable).Average(); // mean const

                // Evaluate might be called concurrently therefore access to random has to be synchronized.
                // However, results of multiple runs with the same seed will be different when using the parallel engine.
                lock (syncRoot) {
                    for (int i = 0; i < covarianceFunction.GetNumberOfParameters(nVars); i++)
                    {
                        hyperParameters[1 + i] = random.NextDouble() * 2.0 - 1.0;
                    }
                }
                hyperParameters[hyperParameters.Length - 1] = 1.0; // s² = exp(2), TODO: other inits better?

                // use alglib.bfgs for hyper-parameter optimization ...
                double             epsg   = 0;
                double             epsf   = 0.00001;
                double             epsx   = 0;
                double             stpmax = 1;
                int                maxits = ConstantOptIterations;
                alglib.mincgstate  state;
                alglib.mincgreport rep;

                alglib.mincgcreate(hyperParameters, out state);
                alglib.mincgsetcond(state, epsg, epsf, epsx, maxits);
                alglib.mincgsetstpmax(state, stpmax);
                alglib.mincgoptimize(state, ObjectiveFunction, null, data);

                alglib.mincgresults(state, out bestHyperParameters, out rep);

                if (rep.terminationtype < 0)
                {
                    // error -> restore previous best quality
                    bestObjValue[0] = prevBest;
                    Array.Copy(prevBestHyperParameters, bestHyperParameters, prevBestHyperParameters.Length);
                }
            }

            UpdateBestSoFar(bestObjValue[0], bestHyperParameters, meanFunction, covarianceFunction);

            return(bestObjValue[0]);
        }