예제 #1
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="numberOfStartingPoints">Number of randomly created starting points to use for the initial model in the first iteration (default is 5)</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, int numberOfStartingPoints = 5, int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (numberOfStartingPoints < 1)
            {
                throw new ArgumentNullException("numberOfParticles must be at least 1");
            }

            m_parameters             = parameters;
            m_maxIterations          = maxIterations;
            m_numberOfStartingPoints = numberOfStartingPoints;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_sampler = new RandomUniform(seed);

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(30, 10, 2000, parameters.Length, 1e-6, 1.0, 42, false);

            // optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, 1000, 42, false);

            // acquisition function to maximize,
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;
        }
예제 #2
0
        /// <summary>
        /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores.
        /// This model is used to sample new promising parameter candiates which are evaluated and added to the existing paramter sets.
        /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems,
        /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method.
        /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods.
        /// Implementation loosely based on:
        /// http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf
        /// https://papers.nips.cc/paper/4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf
        /// https://papers.nips.cc/paper/4443-algorithms-for-hyper-parameter-optimization.pdf
        /// </summary>
        /// <param name="parameters">A list of parameter bounds for each optimization parameter</param>
        /// <param name="maxIterations">Maximum number of iterations. MaxIteration * numberOfCandidatesEvaluatedPrIteration = totalFunctionEvaluations</param>
        /// <param name="previousParameterSets">Parameter sets from previous run</param>
        /// <param name="previousParameterSetScores">Scores from from previous run corresponding to each parameter set</param>
        /// <param name="numberOfCandidatesEvaluatedPrIteration">How many candiate parameter set should by sampled from the model in each iteration.
        /// The parameter sets are inlcuded in order of most promissing outcome (default is 1)</param>
        /// <param name="seed">Seed for the random initialization</param>
        public BayesianOptimizer(ParameterBounds[] parameters, int maxIterations, List <double[]> previousParameterSets, List <double> previousParameterSetScores,
                                 int numberOfCandidatesEvaluatedPrIteration = 1, int seed = 42)
        {
            if (parameters == null)
            {
                throw new ArgumentNullException("parameters");
            }
            if (maxIterations <= 0)
            {
                throw new ArgumentNullException("maxIterations must be at least 1");
            }
            if (previousParameterSets == null)
            {
                throw new ArgumentNullException("previousParameterSets");
            }
            if (previousParameterSetScores == null)
            {
                throw new ArgumentNullException("previousResults");
            }
            if (previousParameterSets.Count != previousParameterSetScores.Count)
            {
                throw new ArgumentException("previousParameterSets length: "
                                            + previousParameterSets.Count + " does not correspond with previousResults length: " + previousParameterSetScores.Count);
            }
            if (previousParameterSetScores.Count < 2 || previousParameterSets.Count < 2)
            {
                throw new ArgumentException("previousParameterSets length and previousResults length must be at least 2 and was: " + previousParameterSetScores.Count);
            }

            m_parameters    = parameters;
            m_maxIterations = maxIterations;
            m_numberOfCandidatesEvaluatedPrIteration = numberOfCandidatesEvaluatedPrIteration;

            m_random = new Random(seed);

            // Use member to seed the random uniform sampler.
            m_sampler = new RandomUniform(m_random.Next());

            // Hyper parameters for regression extra trees learner. These are based on the values suggested in http://www.cs.ubc.ca/~hutter/papers/10-TR-SMAC.pdf.
            // However, according to the author Frank Hutter, the hyper parameters for the forest model should not matter that much.
            m_learner = new RegressionExtremelyRandomizedTreesLearner(trees: 30,
                                                                      minimumSplitSize: 10,
                                                                      maximumTreeDepth: 2000,
                                                                      featuresPrSplit: parameters.Length,
                                                                      minimumInformationGain: 1e-6,
                                                                      subSampleRatio: 1.0,
                                                                      seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                                      runParallel: false);

            // Optimizer for finding maximum expectation (most promissing hyper parameters) from extra trees model.
            m_maximizer = new RandomSearchOptimizer(m_parameters, iterations: 1000,
                                                    seed: m_random.Next(), // Use member to seed the random uniform sampler.
                                                    runParallel: false);

            // Acquisition function to maximize.
            m_acquisitionFunc = AcquisitionFunctions.ExpectedImprovement;

            m_previousParameterSets      = previousParameterSets;
            m_previousParameterSetScores = previousParameterSetScores;
        }
        public IEnumerable <IOptimizer> UnrollOptimizer(IOptimizer optimizer)
        {
            List <IOptimizer> optimizers = new List <IOptimizer>();

            var batchRun   = optimizer as BatchRun;
            var experiment = optimizer as Experiment;

            if (batchRun != null && batchRun.Optimizer != null)
            {
                for (int i = 0; i < batchRun.Repetitions; i++)
                {
                    optimizers.AddRange(UnrollOptimizer(batchRun.Optimizer));
                }
            }
            else if (experiment != null)
            {
                foreach (var opt in experiment.Optimizers)
                {
                    optimizers.AddRange(UnrollOptimizer(opt));
                }
            }
            else
            {
                optimizers.Add(optimizer);
            }

            return(optimizers);
        }
    public bool Load() {
      if (loaded) {
        printToConsole("A file has already been loaded.");
        return false;
      }
      content = ContentManager.Load(filePath);

      printToConsole("Loading completed!");
      printToConsole("Content loaded: " + content.ToString());

      optimizer = content as IOptimizer;
      if (optimizer != null) {
        numberOfRuns = NumberOfRuns(optimizer);
        initialNumberOfRuns = optimizer.Runs.Count;
        printToConsole(String.Format("Initial number of runs: {0}", initialNumberOfRuns));
        PrintRuns();

        optimizer.ExceptionOccurred += new EventHandler<EventArgs<Exception>>(Optimizer_Exception);
        optimizer.ExecutionStateChanged += new EventHandler(Optimizer_ExecutionStateChanged);
        optimizer.Stopped += new EventHandler(Optimizer_Stopped);
        optimizer.ExecutionTimeChanged += new EventHandler(Optimizer_ExecutionTimeChanged);
        optimizer.Runs.RowsChanged += new EventHandler(Optimizer_Runs_RowsChanged);
      }
      loaded = optimizer != null;
      return loaded;
    }
예제 #5
0
파일: YOLO.Train.cs 프로젝트: molekm/YOLOv4
        static Loss TrainStep(ObjectDetectionDataset.EntryBatch batch, Model model, IOptimizer optimizer, int classCount, ReadOnlySpan <int> strides, bool bench = false)
        {
            if (bench)
            {
                return(ComputeLosses(model, batch, classCount, strides));
            }

            var    tape = new GradientTape();
            Loss   losses;
            Tensor totalLoss;

            using (tape.StartUsing()) {
                losses    = ComputeLosses(model, batch, classCount, strides);
                totalLoss = losses.GIUO + losses.Conf + losses.Prob;

                if (!tf.executing_eagerly() || !tf.logical_or(tf.is_inf(totalLoss), tf.is_nan(totalLoss)).numpy().any())
                {
                    PythonList <Tensor> gradients = tape.gradient(totalLoss, model.trainable_variables);
                    optimizer.apply_gradients(gradients.Zip(
                                                  (PythonList <Variable>)model.trainable_variables, (g, v) => (g, v)));
                }
                else
                {
                    Trace.TraceWarning("NaN/inf loss ignored");
                }
            }

            return(losses);
        }
예제 #6
0
 public TypeResolverConfigurationKernel(IOptimizer optimizer, IConfigurationTypeResolver configResolver, IAssemblyScanningTypeResolver assemblyScanner)
 {
     Optimizer             = optimizer;
     AssemblyScanner       = assemblyScanner;
     ConfigurationResolver = configResolver;
     assemblyScanner.ConfigurationKernel = this;
 }
예제 #7
0
 public int CompareTo(IOptimizer y)
 {
     if (Level == y.Level)
     {
         if (Priority > y.Priority)
         {
             return(1);
         }
         else if (Priority == y.Priority)
         {
             return(0);
         }
         else
         {
             return(-1);
         }
     }
     else if (Level > y.Level)
     {
         return(1);
     }
     else
     {
         return(-1);
     }
 }
        public bool Load()
        {
            if (loaded)
            {
                printToConsole("A file has already been loaded.");
                return(false);
            }
            content = ContentManager.Load(filePath);

            printToConsole("Loading completed!");
            printToConsole("Content loaded: " + content.ToString());

            optimizer = content as IOptimizer;
            if (optimizer != null)
            {
                numberOfRuns        = NumberOfRuns(optimizer);
                initialNumberOfRuns = optimizer.Runs.Count;
                printToConsole(String.Format("Initial number of runs: {0}", initialNumberOfRuns));
                PrintRuns();

                optimizer.ExceptionOccurred     += new EventHandler <EventArgs <Exception> >(Optimizer_Exception);
                optimizer.ExecutionStateChanged += new EventHandler(Optimizer_ExecutionStateChanged);
                optimizer.Stopped += new EventHandler(Optimizer_Stopped);
                optimizer.ExecutionTimeChanged += new EventHandler(Optimizer_ExecutionTimeChanged);
                optimizer.Runs.RowsChanged     += new EventHandler(Optimizer_Runs_RowsChanged);
            }
            loaded = optimizer != null;
            return(loaded);
        }
예제 #9
0
 public TypeResolverConfigurationKernel(IOptimizer optimizer, IConfigurationTypeResolver configResolver, IAssemblyScanningTypeResolver assemblyScanner)
 {
     Optimizer = optimizer;
     AssemblyScanner = assemblyScanner;
     ConfigurationResolver = configResolver;
     assemblyScanner.ConfigurationKernel = this;
 }
예제 #10
0
 public void Initialize(IOptimizer optimizer)
 {
     gamma = new NDarray <double>(1.0, InputShape);
     beta  = new NDarray <double>(0.0, InputShape);
     gOpt  = optimizer.Clone();
     bOpt  = optimizer.Clone();
 }
 public HLRunInfo(int id, IOptimizer optimizer, string filePath, int coresRequired, string savePath) {
   Id = id;
   Optimizer = optimizer;
   FilePath = filePath;
   CoresRequired = coresRequired;
   SavePath = savePath;
 }
예제 #12
0
파일: YOLO.Train.cs 프로젝트: molekm/YOLOv4
        static void UpdateLearningRate(IOptimizer optimizer, Variable step, LearningRateSchedule learningRateSchedule)
        {
            Tensor learningRate          = learningRateSchedule.Get(step: step);
            var    optimizerLearningRate = optimizer.DynamicGet <Variable>("lr");

            optimizerLearningRate.assign(learningRate);
        }
예제 #13
0
        public static double[] SolveInverse(
            IForwardSolver forwardSolver,
            IOptimizer optimizer,
            SolutionDomainType solutionDomainType,
            double[] dependentValues,
            double[] standardDeviationValues,
            InverseFitType inverseFitType,
            object[] independentValues,
            double[] lowerBounds,
            double[] upperBounds)
        {
            //var opticalPropertyGuess = ((OpticalProperties[]) (independentValues[0])).First();
            //var fitParameters = new double[4] { opticalPropertyGuess.Mua, opticalPropertyGuess.Musp, opticalPropertyGuess.G, opticalPropertyGuess.N };
            var parametersToFit = GetParametersToFit(inverseFitType);

            var opticalPropertyGuess = (OpticalProperties[])(independentValues[0]);
            var fitParametersArray   = opticalPropertyGuess.SelectMany(opgi => new[] { opgi.Mua, opgi.Musp, opgi.G, opgi.N }).ToArray();
            var parametersToFitArray = Enumerable.Range(0, opticalPropertyGuess.Count()).SelectMany(_ => parametersToFit).ToArray();

            Func <double[], object[], double[]> func = GetForwardReflectanceFuncForOptimization(forwardSolver, solutionDomainType);

            var fit = optimizer.SolveWithConstraints(fitParametersArray, parametersToFitArray, lowerBounds, upperBounds, dependentValues.ToArray(),
                                                     standardDeviationValues.ToArray(), func, independentValues.ToArray());

            return(fit);
        }
예제 #14
0
 public override void UpdateLayers(IOptimizer optimizer)
 {
     CurrentLayer.Learn(optimizer);
     for (int i = 0; i < InputLayers.Count; i++)
     {
         InputLayers[i].UpdateLayers(optimizer);
     }
 }
예제 #15
0
 public HLRunInfo(int id, IOptimizer optimizer, string filePath, int coresRequired, string savePath)
 {
     Id            = id;
     Optimizer     = optimizer;
     FilePath      = filePath;
     CoresRequired = coresRequired;
     SavePath      = savePath;
 }
예제 #16
0
 public MultilayerPerceptron(ICostFunction costFunction, IOptimizer optimizer, Func <double, int, List <Dense>, double> regularization, Func <int[], int[], double> metrics)
 {
     this.costFunction   = costFunction;
     this.optimizer      = optimizer;
     this.regularization = regularization;
     this.metrics        = metrics;
     layers = new List <Dense>();
 }
예제 #17
0
        public FastNN(int[] numNeurons, int miniBatchSize, bool l2loss, float dropoutProb = 0)
        {
            this.l2loss = l2loss;

            layers = new DenseLayer[numNeurons.Length - 1];

            dropouts = new DropoutLayer[numNeurons.Length - 1];

            activations = new Matrix <float> [numNeurons.Length];

            singleActivations = new Matrix <float> [numNeurons.Length];

            preActivations = new Matrix <float> [numNeurons.Length - 1];

            deltas = new Matrix <float> [numNeurons.Length - 1];

            this.miniBatchSize = miniBatchSize;

            optimizer = new Adam(0.001F);

            IActivationFunc activationFunc = new Relu();

            IInitialization initialization = new HeNormal();

            for (int i = 0; i < numNeurons.Length; i++)
            {
                activations[i]       = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0);
                singleActivations[i] = DenseMatrix.Create(1, numNeurons[i], 0);

                if (i == 0)
                {
                    continue;
                }


                if (i == numNeurons.Length - 1)
                {
                    activationFunc = new Linear();
                }

                preActivations[i - 1] = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0);
                layers[i - 1]         = new DenseLayer(numNeurons[i - 1], numNeurons[i], activationFunc, initialization);
                deltas[i - 1]         = DenseMatrix.Create(miniBatchSize, numNeurons[i], 0);

                if (dropoutProb > 0 && i < numNeurons.Length - 1)
                {
                    dropouts[i - 1] = new DropoutLayer(miniBatchSize, numNeurons[i], dropoutProb);
                }
            }


            computeSDOutput = false;

            if (numNeurons.Last() == 2)
            {
                computeSDOutput = true;
            }
        }
예제 #18
0
 private void RemoveOptimizer(IOptimizer optimizer)
 {
     DeregisterOptimizerEvents(optimizer);
     Runs.RemoveRange(optimizer.Runs);
     if (ExecutionState == ExecutionState.Prepared && !optimizers.Any(opt => opt.ExecutionState == ExecutionState.Prepared))
     {
         OnStopped();
     }
 }
예제 #19
0
        /// <summary>
        /// Creates an optimizer that applies the given optimizer until a fixed point is found.
        /// </summary>
        /// <param name="optimizer">The optimizer whose fixed point to find.</param>
        /// <returns>An optimizer that applies the given optimizer until a fixed point is found.</returns>
        public static IOptimizer FixedPoint(this IOptimizer optimizer)
        {
            if (optimizer == null)
            {
                throw new ArgumentNullException(nameof(optimizer));
            }

            return(new FixedPointOptimizer(optimizer, int.MaxValue, throwOnCycle: true));
        }
예제 #20
0
        public override void Initialize(IOptimizer <Type> optimizer = null)
        {
            gamma = NDArray <Type> .Ones(Inputs);

            beta = NDArray <Type> .Zeros(Inputs);

            gOpt = optimizer.Clone();
            bOpt = optimizer.Clone();
        }
예제 #21
0
    public OptimizerTask(IOptimizer optimizer)
      : base(optimizer) {

      if (optimizer is Experiment || optimizer is BatchRun) {
        this.ComputeInParallel = true;
      } else {
        this.ComputeInParallel = false;
      }
    }
예제 #22
0
        public override void Initialize(IOptimizer <Type> optimizer)
        {
            double lim = 1.0 / Math.Sqrt(Inputs);

            W  = NumDN.Uniform <Type>(-lim, lim, Inputs, Outputs);
            w0 = NDArray <Type> .Zeros(1, Outputs);

            WOpt  = optimizer.Clone();
            w0Opt = optimizer.Clone();
        }
 public SingleCloudOptimizedCommandHandler(IOptimizer optimizer,
                                           IMapper mapper,
                                           IRepository <AWSCloudFormationTemplate> awsTemplate,
                                           IRepository <AzureVMTemplate> azureTemplate)
 {
     _optimizer     = optimizer;
     _mapper        = mapper;
     _awsTemplate   = awsTemplate;
     _azureTemplate = azureTemplate;
 }
예제 #24
0
        public Executer(Mine mine, IOptimizer optimizer)
        {
            if (optimizer == null || mine == null)
            {
                throw new ArgumentNullException();
            }

            _optimizer = optimizer;
            _mine      = mine;
        }
예제 #25
0
 private void AddOptimizer(IOptimizer optimizer)
 {
     RegisterOptimizerEvents(optimizer);
     Runs.AddRange(optimizer.Runs);
     optimizer.Prepare();
     if (ExecutionState == ExecutionState.Stopped && optimizer.ExecutionState == ExecutionState.Prepared)
     {
         OnPrepared();
     }
 }
예제 #26
0
        public override double Evaluate(IChromosome chromosome)
        {
            try
            {
                var parameters = Config.Genes.Select(s =>
                                                     new MinMaxParameterSpec(min: (double)(s.MinDecimal ?? s.MinInt.Value), max: (double)(s.MaxDecimal ?? s.MaxInt.Value),
                                                                             transform: Transform.Linear, parameterType: s.Precision > 0 ? ParameterType.Continuous : ParameterType.Discrete)
                                                     ).ToArray();


                IOptimizer optimizer = null;
                if (Config.Fitness != null)
                {
                    if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.RandomSearch.ToString())
                    {
                        optimizer = new RandomSearchOptimizer(parameters, iterations: Config.Generations, seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.ParticleSwarm.ToString())
                    {
                        optimizer = new ParticleSwarmOptimizer(parameters, maxIterations: Config.Generations, numberOfParticles: Config.PopulationSize,
                                                               seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Bayesian.ToString())
                    {
                        optimizer = new BayesianOptimizer(parameters, maxIterations: Config.Generations, numberOfStartingPoints: Config.PopulationSize, seed: 42);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.GlobalizedBoundedNelderMead.ToString())
                    {
                        optimizer = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: Config.Generations,
                                                                             maxIterationsPrRestart: Config.PopulationSize, seed: 42, maxDegreeOfParallelism: Config.MaxThreads);
                    }
                    else if (Config.Fitness.OptimizerTypeName == Enums.OptimizerTypeOptions.Genetic.ToString())
                    {
                        throw new Exception("Genetic optimizer cannot be used with Sharpe Maximizer");
                    }
                }

                //todo:
                // GridSearchOptimizer?

                Func <double[], OptimizerResult> minimize = p => Minimize(p, (Chromosome)chromosome);

                // run optimizer
                var result = optimizer.OptimizeBest(minimize);

                Best = ToChromosome(result, chromosome);

                return(result.Error);
            }
            catch (Exception ex)
            {
                Program.Logger.Error(ex);
                return(ErrorFitness);
            }
        }
예제 #27
0
        public void Initialize(IOptimizer <U> optimizer)
        {
            wOpt = optimizer.Clone();
            bOpt = optimizer.Clone();

            double lim = 3.0 / Math.Sqrt(InputShape[0]);

            weight = ND.Uniform(-lim, lim, InputShape[0], OutputShape[0]).Cast <U>();
            biases = new NDarray <double>(1, OutputShape[0]).Cast <U>();
            wTmp   = new NDarray <double>(weight.Shape).Cast <U>();
        }
예제 #28
0
        public TrainerCPU(TrainType trainType, IOptimizer optimizer)
        {
            TrainOptimizer   = optimizer;
            RandomSeed       = 12;
            TrainTypeSetting = trainType;
            BatchSize        = batchSize;
            random           = new Random(RandomSeed);

            L1Regularization = 0;
            L2Regularization = 0;
        }
예제 #29
0
        public GroupFormationAlgorithm(List <Participant> _participants, IMatcher matcher, IEvaluator evaluator, IOptimizer optimizer, int groupSize)
        {
            _participants.ForEach(p => participants.Add(p.Clone()));
            this.Evaluator = evaluator;
            this.Matcher   = matcher;
            this.Optimizer = optimizer;

            this.GroupSize = groupSize;

            Init();
        }
예제 #30
0
        public Hypothesis(Predictor predictor, IOptimizer optimizer, double learningRate)
        {
            _costHistory   = new History();
            _weightHistory = new History();

            _predictor = predictor;

            _optimizer = new Optimizer();
            _optimizer.SetLearningRate(learningRate);
            _optimizer.SetOptimizer(optimizer);
        }
예제 #31
0
        public static IOptimizer BuildOptimizer(string type, System.Type returnClass, int incrementSize, long explicitInitialValue)
        {
            // FIXME: Disable this warning, or refactor without the deprecated version.
            IOptimizer optimizer = BuildOptimizer(type, returnClass, incrementSize);

            if (optimizer is IInitialValueAwareOptimizer)
            {
                ((IInitialValueAwareOptimizer)optimizer).InjectInitialValue(explicitInitialValue);
            }

            return(optimizer);
        }
예제 #32
0
 private void optimizerTabPage_DragDrop(object sender, DragEventArgs e)
 {
     if (e.Effect != DragDropEffects.None)
     {
         IOptimizer optimizer = e.Data.GetData(HeuristicLab.Common.Constants.DragDropDataFormat) as IOptimizer;
         if (e.Effect.HasFlag(DragDropEffects.Copy))
         {
             optimizer = (IOptimizer)optimizer.Clone();
         }
         Content.Optimizer = optimizer;
     }
 }
예제 #33
0
 private void DeregisterOptimizerEvents(IOptimizer optimizer)
 {
     optimizer.ExceptionOccurred    -= new EventHandler <EventArgs <Exception> >(optimizer_ExceptionOccurred);
     optimizer.ExecutionTimeChanged -= new EventHandler(optimizer_ExecutionTimeChanged);
     optimizer.Paused               -= new EventHandler(optimizer_Paused);
     optimizer.Prepared             -= new EventHandler(optimizer_Prepared);
     optimizer.Started              -= new EventHandler(optimizer_Started);
     optimizer.Stopped              -= new EventHandler(optimizer_Stopped);
     optimizer.Runs.CollectionReset -= new CollectionItemsChangedEventHandler <IRun>(optimizer_Runs_CollectionReset);
     optimizer.Runs.ItemsAdded      -= new CollectionItemsChangedEventHandler <IRun>(optimizer_Runs_ItemsAdded);
     optimizer.Runs.ItemsRemoved    -= new CollectionItemsChangedEventHandler <IRun>(optimizer_Runs_ItemsRemoved);
 }
예제 #34
0
 public OptimizerTask(IOptimizer optimizer)
     : base(optimizer)
 {
     if (optimizer is Experiment || optimizer is BatchRun)
     {
         this.ComputeInParallel = true;
     }
     else
     {
         this.ComputeInParallel = false;
     }
 }
		public virtual void Configure(IType type, IDictionary<string, string> parms, Dialect.Dialect dialect)
		{
			identifierType = type;
			bool forceTableUse = PropertiesHelper.GetBoolean(ForceTableParam, parms, false);

			string sequenceName = PropertiesHelper.GetString(SequenceParam, parms, DefaultSequenceName);
			if (sequenceName.IndexOf('.') < 0)
			{
				string schemaName;
				string catalogName;
				parms.TryGetValue(PersistentIdGeneratorParmsNames.Schema, out schemaName);
				parms.TryGetValue(PersistentIdGeneratorParmsNames.Catalog, out catalogName);
				sequenceName = Table.Qualify(catalogName, schemaName, sequenceName);
			}
			int initialValue = PropertiesHelper.GetInt32(InitialParam, parms, DefaultInitialValue);
			int incrementSize = PropertiesHelper.GetInt32(IncrementParam, parms, DefaultIncrementSize);

			string valueColumnName = PropertiesHelper.GetString(ValueColumnParam, parms, DefaultValueColumnName);

			string defOptStrategy = incrementSize <= 1 ? OptimizerFactory.None : OptimizerFactory.Pool;
			string optimizationStrategy = PropertiesHelper.GetString(OptimizerParam, parms, defOptStrategy);
			if (OptimizerFactory.None.Equals(optimizationStrategy) && incrementSize > 1)
			{
				log.Warn("config specified explicit optimizer of [" + OptimizerFactory.None + "], but [" + IncrementParam + "=" + incrementSize + "; honoring optimizer setting");
				incrementSize = 1;
			}
			if (dialect.SupportsSequences && !forceTableUse)
			{
				if (OptimizerFactory.Pool.Equals(optimizationStrategy) && !dialect.SupportsPooledSequences)
				{
					// TODO : may even be better to fall back to a pooled table strategy here so that the db stored values remain consistent...
					optimizationStrategy = OptimizerFactory.HiLo;
				}
				databaseStructure = new SequenceStructure(dialect, sequenceName, initialValue, incrementSize);
			}
			else
			{
				databaseStructure = new TableStructure(dialect, sequenceName, valueColumnName, initialValue, incrementSize);
			}

			optimizer = OptimizerFactory.BuildOptimizer(optimizationStrategy, identifierType.ReturnedClass, incrementSize);
			databaseStructure.Prepare(optimizer);
		}
    private int NumberOfRuns(IOptimizer optimizer) {
      var batchRun = optimizer as BatchRun;
      var experiment = optimizer as Experiment;

      if (batchRun != null && batchRun.Optimizer != null) {
        return batchRun.Repetitions * NumberOfRuns(batchRun.Optimizer);
      } else if (experiment != null) {
        int runs = 0;
        foreach (var opt in experiment.Optimizers) {
          runs += NumberOfRuns(opt);
        }
        return runs;
      } else { return 1; }
    }
    private void printPythonIndividuals(IOptimizer optimizer) {
      var algo = optimizer as Algorithm;
      if (algo == null) return;
      var prob = algo.Problem as CFGPythonProblem;
      if (prob == null) return;

      Helper.printToConsole(String.Join(Environment.NewLine, prob.PythonProcess.GetIndidividuals()), String.Format("{0}({1})", runInfo.FileName, runInfo.Id));
    }
예제 #38
0
		public virtual void Prepare(IOptimizer optimizer)
		{
			applyIncrementSizeToSourceValues = optimizer.ApplyIncrementSizeToSourceValues;
		}
예제 #39
0
 private BatchRun(BatchRun original, Cloner cloner)
   : base(original, cloner) {
   executionState = original.executionState;
   executionTime = original.executionTime;
   runsExecutionTime = original.runsExecutionTime;
   optimizer = cloner.Clone(original.optimizer);
   repetitions = original.repetitions;
   repetitionsCounter = original.repetitionsCounter;
   runs = cloner.Clone(original.runs);
   batchRunAction = original.batchRunAction;
   Initialize();
 }
예제 #40
0
 private void AddOptimizer(IOptimizer optimizer) {
   RegisterOptimizerEvents(optimizer);
   Runs.AddRange(optimizer.Runs);
   optimizer.Prepare();
   if (ExecutionState == ExecutionState.Stopped && optimizer.ExecutionState == ExecutionState.Prepared)
     OnPrepared();
 }
    private string GetGeneration(IOptimizer opt) {
      var engineAlgorithm = opt as EngineAlgorithm;
      if (engineAlgorithm == null) {
        engineAlgorithm = opt.NestedOptimizers.Where(o => o is EngineAlgorithm
         && o.ExecutionState.Equals(HeuristicLab.Core.ExecutionState.Started)).FirstOrDefault() as EngineAlgorithm;
      }

      if (engineAlgorithm != null && engineAlgorithm.Results.ContainsKey("Generations")) {
        return engineAlgorithm.Results["Generations"].ToString();
      }

      return "No generation info found.";
    }
예제 #42
0
 public ActionReplayOptimizer(CodeBuilder builder)
 {
     generalOptimizer = new GeneralOptimizer();
     this.builder = builder;
 }
예제 #43
0
 private void DeregisterOptimizerEvents(IOptimizer optimizer) {
   optimizer.ExceptionOccurred -= new EventHandler<EventArgs<Exception>>(optimizer_ExceptionOccurred);
   optimizer.ExecutionTimeChanged -= new EventHandler(optimizer_ExecutionTimeChanged);
   optimizer.Paused -= new EventHandler(optimizer_Paused);
   optimizer.Prepared -= new EventHandler(optimizer_Prepared);
   optimizer.Started -= new EventHandler(optimizer_Started);
   optimizer.Stopped -= new EventHandler(optimizer_Stopped);
   optimizer.Runs.CollectionReset -= new CollectionItemsChangedEventHandler<IRun>(optimizer_Runs_CollectionReset);
   optimizer.Runs.ItemsAdded -= new CollectionItemsChangedEventHandler<IRun>(optimizer_Runs_ItemsAdded);
   optimizer.Runs.ItemsRemoved -= new CollectionItemsChangedEventHandler<IRun>(optimizer_Runs_ItemsRemoved);
 }
예제 #44
0
		public void Prepare(IOptimizer optimizer)
		{
			_applyIncrementSizeToSourceValues = optimizer.ApplyIncrementSizeToSourceValues;
			_requiresPooledSequenceGenerator = optimizer.RequiresPooledSequenceGenerator;
		}
예제 #45
0
    private void UpdateChildTreeNodes(TreeNodeCollection collection, IOptimizer optimizer) {
      var batchRun = optimizer as BatchRun;
      var experiment = optimizer as Experiment;

      if (batchRun != null && batchRun.Optimizer != null) UpdateChildTreeNodes(collection, new List<IOptimizer>() { batchRun.Optimizer });
      else if (experiment != null) UpdateChildTreeNodes(collection, experiment.Optimizers);
    }
		/// <summary>
		/// Do we require a sequence with the ability to set initialValue and incrementSize
		/// larger than 1?
		/// </summary>
		protected bool RequiresPooledSequence(int initialValue, int incrementSize, IOptimizer optimizer)
		{
			int sourceIncrementSize = optimizer.ApplyIncrementSizeToSourceValues ? incrementSize : 1;
			return (initialValue > 1 || sourceIncrementSize > 1);
		}
예제 #47
0
    private TreeNode CreateTreeNode(IOptimizer optimizer) {
      TreeNode node = new TreeNode(optimizer.ToString());
      node.Tag = optimizer;

      var algorithm = optimizer as IAlgorithm;
      if (algorithm != null) {
        foreach (TreeNode childNode in CreateAlgorithmChildNodes(algorithm))
          node.Nodes.Add(childNode);
      }
      var batchRun = optimizer as BatchRun;
      if (batchRun != null) {
        node.Text += string.Format(" {0}/{1}", batchRun.RepetitionsCounter, batchRun.Repetitions);
      }

      List<TreeNode> nodes;
      if (!treeNodeTagMapping.TryGetValue(optimizer, out nodes)) {
        nodes = new List<TreeNode>();
        treeNodeTagMapping.Add(optimizer, nodes);
        RegisterNamedItemEvents(optimizer);
      }
      nodes.Add(node);

      foreach (TreeNode childNode in node.Nodes) {
        INamedItem namedItem = childNode.Tag as INamedItem;
        if (namedItem != null) {
          if (!treeNodeTagMapping.TryGetValue(namedItem, out nodes)) {
            nodes = new List<TreeNode>();
            treeNodeTagMapping.Add(namedItem, nodes);
            RegisterNamedItemEvents(namedItem);
          }
          nodes.Add(childNode);
        }
      }
      return node;
    }
예제 #48
0
 private void RemoveOptimizer(IOptimizer optimizer) {
   DeregisterOptimizerEvents(optimizer);
   Runs.RemoveRange(optimizer.Runs);
   if (ExecutionState == ExecutionState.Prepared && !optimizers.Any(opt => opt.ExecutionState == ExecutionState.Prepared))
     OnStopped();
 }
    private int GetSeed(IOptimizer opt) {
      var pni = opt as IParameterizedItem;

      if (pni == null) {
        pni = opt.NestedOptimizers.Where(o => o is IParameterizedItem
          && o.ExecutionState.Equals(HeuristicLab.Core.ExecutionState.Started)).FirstOrDefault() as IParameterizedItem;
      }

      if (pni != null && pni.Parameters.ContainsKey("Seed")) {
        return ((IntValue)pni.Parameters["Seed"].ActualValue).Value;
      }

      return -1;
    }