/// <summary> /// Exports the standard deviations of the numerical features of the current population's competitive part via /// <see cref="RunStatisticTracker.ComputeAndExportNumericalFeatureCoefficientOfVariation"/>. /// </summary> public void ExportFeatureStandardDeviations() { RunStatisticTracker.ComputeAndExportNumericalFeatureCoefficientOfVariation( this.ParameterTree, this.MostRecentSorting.Select(point => point.Genome.CreateMutableGenome()), this._currentGeneration); }
/// <summary> /// Exports the standard deviations of the numerical features of the current population's competitive part via /// <see cref="RunStatisticTracker.ComputeAndExportNumericalFeatureCoefficientOfVariation"/>. /// </summary> public void ExportFeatureStandardDeviations() { RunStatisticTracker.ComputeAndExportNumericalFeatureCoefficientOfVariation( this._parameterTree, this._population.GetCompetitiveIndividuals(), this._currentGeneration); }
public void TrackConvergenceBehaviorThrowsForMissingRunEvaluator() { var incumbentWrapper = new IncumbentGenomeWrapper <TestResult> { IncumbentGeneration = 0, IncumbentGenome = new Genome(), IncumbentInstanceResults = new List <TestResult>().ToImmutableList(), }; Assert.Throws <ArgumentNullException>(() => RunStatisticTracker.TrackConvergenceBehavior(incumbentWrapper, runEvaluator: null)); }
/// <summary> /// Tracks the convergence behavior of the algorithm and logs it to csv. /// </summary> private void TrackConvergenceBehavior() { var metricRunEvaluator = this._runEvaluator as IMetricRunEvaluator <TResult>; if (metricRunEvaluator == null || !this._configuration.TrackConvergenceBehavior) { return; } var currentAverage = RunStatisticTracker.TrackConvergenceBehavior(this._incumbentGenomeWrapper, metricRunEvaluator); this.IncumbentQuality.Add(currentAverage); }
/// <summary> /// Starts a new OPTANO Algorithm Tuner run. /// </summary> /// <returns>Best parameters found for the target algorithm.</returns> public Dictionary <string, IAllele> Run() { if (!this._populationUpdateStrategyManager.HasPopulation) { var initialPopulation = this.InitializePopulation(); this._populationUpdateStrategyManager.Initialize(initialPopulation); } for (; this._currGeneration < this._configuration.Generations; this._currGeneration++) { // Check at the start of the generation to catch the limit also for continued runs. if (this.IsEvaluationLimitMet()) { break; } LoggingHelper.WriteLine(VerbosityLevel.Info, $"Generation {this._currGeneration}/{this._configuration.Generations}."); this._populationUpdateStrategyManager.CurrentStrategy.LogPopulationToConsole(); var instancesForEvaluation = this._instanceSelector.Select(this._currGeneration).ToList(); var currentStrategy = this._populationUpdateStrategyManager.ChangePopulationUpdateStrategy( instancesForEvaluation, this._incumbentGenomeWrapper); currentStrategy.PerformIteration(this._currGeneration, instancesForEvaluation); this.UpdateIncumbentGenomeWrapper(currentStrategy.FindIncumbentGenome()); this.UpdateGenerationHistory(currentStrategy); this.TrackConvergenceBehavior(); if (this._currGeneration != this._configuration.Generations - 1) { // Functions depending on the complete population may behave unexpectedly in the final generation // if strategies ignore certain steps for that generation to speed up the tuning. currentStrategy.ExportFeatureStandardDeviations(); this.DumpStatus(); } this.LogFinishedGeneration(); } this._populationUpdateStrategyManager.FinishPhase(); this.LogStatistics(); RunStatisticTracker.ExportConvergenceBehavior(this.IncumbentQuality); // Return best parameters. return(this._incumbentGenomeWrapper.IncumbentGenome.GetFilteredGenes(this._parameterTree)); }
/// <summary> /// Completes the generation information history and exports it to file. /// </summary> public void CompleteAndExportGenerationHistory() { if (this._configuration.ScoreGenerationHistory && this._runEvaluator is IMetricRunEvaluator <TResult> metricRunEvaluator) { var scorer = new GenerationInformationScorer <TInstance, TResult>( this._genomeSorter, this._targetRunResultStorage, metricRunEvaluator); scorer.ScoreInformationHistory(this._informationHistory, this._trainingInstances, this._testInstances); RunStatisticTracker.ExportAverageIncumbentScores(this._informationHistory, this._configuration.EvaluationLimit); } RunStatisticTracker.ExportGenerationHistory(this._informationHistory); }
public void ExportGenerationHistoryWritesOutAllInformation() { var firstGeneration = new GenerationInformation( 0, TimeSpan.FromSeconds(30), 34, typeof(GgaStrategy <TestInstance, TestResult>), new ImmutableGenome(new Genome()), "id"); firstGeneration.IncumbentTrainingScore = -34.5; firstGeneration.IncumbentTestScore = -20; var secondGeneration = new GenerationInformation( 1, TimeSpan.FromSeconds(60), 2587, typeof(DifferentialEvolutionStrategy <TestInstance, TestResult>), new ImmutableGenome(new Genome()), "id"); secondGeneration.IncumbentTrainingScore = -104; secondGeneration.IncumbentTestScore = -100; RunStatisticTracker.ExportGenerationHistory(new List <GenerationInformation> { firstGeneration, secondGeneration }); var exported = File.ReadAllLines("generationHistory.csv"); Assert.True(3 == exported.Length, "Expected three lines: One legend and two generations."); Assert.True( "Generation;Elapsed(d:hh:mm:ss);Total # Evaluations;Average Train Incumbent;Average Test Incumbent;Strategy;Incumbent;IncumbentID" == exported[0], "Legend is not as expected."); exported[1].ShouldBe( "0;0:00:00:30.0000000;34;-34.5;-20;GgaStrategy`2;[](Age: 0)[Engineered: no];id"); exported[2].ShouldBe( "1;0:00:01:00.0000000;2587;-104;-100;DifferentialEvolutionStrategy`2;[](Age: 0)[Engineered: no];id"); }
public void ExportGenerationHistoryWritesOutAllInformation() { var firstGeneration = new GenerationInformation( 0, 34, typeof(GgaStrategy <TestInstance, TestResult>), new ImmutableGenome(new Genome())); firstGeneration.IncumbentTrainingScore = -34.5; firstGeneration.IncumbentTestScore = -20; var secondGeneration = new GenerationInformation( 1, 2587, typeof(DifferentialEvolutionStrategy <TestInstance, TestResult>), new ImmutableGenome(new Genome())); secondGeneration.IncumbentTrainingScore = -104; secondGeneration.IncumbentTestScore = -100; RunStatisticTracker.ExportGenerationHistory(new List <GenerationInformation> { firstGeneration, secondGeneration }); var exported = File.ReadAllLines("generationHistory.csv"); Assert.True(3 == exported.Length, "Expected three lines: One legend and two generations."); Assert.True( "Generation;Total # Evaluations;Average Train Incumbent;Average Test Incumbent;Strategy;Incumbent" == exported[0], "Legend is not as expected."); Assert.True( "0;34;-34.5;-20;GgaStrategy`2;[](Age: 0)[Engineered: no]" == exported[1], "First generation information is not as expected."); Assert.True( "1;2587;-104;-100;DifferentialEvolutionStrategy`2;[](Age: 0)[Engineered: no]" == exported[2], "Second generation information is not as expected."); }
public void ExportAverageIncumbentScoresThrowsForMissingHistory() { Assert.Throws <ArgumentNullException>( () => RunStatisticTracker.ExportAverageIncumbentScores(informationHistory: null, evaluationLimit: 200)); }
public void ExportAverageIncumbentScoresDeterminesScoresCorrectly() { var incumbent = new ImmutableGenome(new Genome()); var strategy = typeof(GgaStrategy <TestInstance, TestResult>); // Check what happens if the first generation takes more than 100 evaluations. var generation0 = new GenerationInformation(0, TimeSpan.Zero, 150, strategy, incumbent, "id"); generation0.IncumbentTrainingScore = -34.5; generation0.IncumbentTestScore = -20; // Check what happens for multiple information objects within one evaluation level. var generation1 = new GenerationInformation(1, TimeSpan.Zero, 199, strategy, incumbent, "id"); generation1.IncumbentTrainingScore = 12.34; generation1.IncumbentTestScore = 28.6; // Check what happens for an evaluation number equal to a bound. var generation2 = new GenerationInformation(2, TimeSpan.Zero, 300, strategy, incumbent, "id"); generation2.IncumbentTrainingScore = 12.01; generation2.IncumbentTestScore = 29; // Check what happens if there is no information object in a certain level (301-400). var generation3 = new GenerationInformation(2, TimeSpan.Zero, 401, strategy, incumbent, "id"); generation3.IncumbentTrainingScore = 14; generation3.IncumbentTestScore = 286; // Make sure to try an evaluation limit higher than the last total number of evaluations. RunStatisticTracker.ExportAverageIncumbentScores( new List <GenerationInformation> { generation0, generation1, generation2, generation3 }, 600); var exported = File.ReadAllLines("scores.csv"); Assert.True(7 == exported.Length, "Expected seven lines: One legend and six evaluation levels."); Assert.True( "# Evaluations;Average Train Incumbent;Average Test Incumbent" == exported[0], "Legend is not as expected."); Assert.True( "100;;" == exported[1], "There should be an empty line as first information is only gathered at 150 evaluations."); Assert.True( "200;12.34;28.6" == exported[2], "First score line should use latest information."); Assert.True( "300;12.01;29" == exported[3], "Second score line should use information with evaluation number equal to the bound."); Assert.True( "400;12.01;29" == exported[4], "Third score line should not change scores."); Assert.True( "500;14;286" == exported[5], "Fourth score line should use the newest data again."); Assert.True( "600;14;286" == exported[6], "Fifth score line should be written to have scores until the limit."); }
public void ExportGenerationHistoryThrowsForMissingHistory() { Assert.Throws <ArgumentNullException>(() => RunStatisticTracker.ExportGenerationHistory(informationHistory: null)); }