public static void Run() { int seed = 2; var rng = new Random(seed); OptimizationProblem optimizationProblem = new S_CRES(); var builder = new DifferentialEvolutionAlgorithmConstrained.Builder(optimizationProblem); builder.PopulationSize = 20; builder.MutationFactor = 0.6; builder.CrossoverProbability = 0.9; builder.ConvergenceCriterion = new MaxFunctionEvaluations(100000); builder.Penalty = new DeathPenalty(); builder.RandomNumberGenerator = rng; IOptimizationAlgorithm de = builder.Build(); IOptimizationAnalyzer analyzer = new OptimizationAnalyzer(de); analyzer.Optimize(); //TODO: Not sure this is the exact solution. Needs research. double expectedFitness = 13.590841691859703; var expectedDesign = Vector.CreateFromArray(new double[] { 2.246825836986833, 2.3818634605759064 }); Assert.Equal(expectedFitness, de.BestFitness, 6); Assert.True(Vector.CreateFromArray(de.BestPosition).Equals(expectedDesign, 1E-6)); }
public static void Run() { int seed = 1; var rng = new Random(seed); OptimizationProblem optimizationProblem = new Ackley(2); var builder = new ParticleSwarmOptimizationAlgorithm.Builder(optimizationProblem); builder.SwarmSize = 10; builder.PhiP = 2.0; builder.PhiG = 2.0; builder.Omega = 0.2; builder.ConvergenceCriterion = new MaxFunctionEvaluations(10000); builder.Logger = new NoLogger(); builder.RandomNumberGenerator = rng; IOptimizationAlgorithm pso = builder.Build(); IOptimizationAnalyzer analyzer = new OptimizationAnalyzer(pso); analyzer.Optimize(); double expectedFitness = 0.0; var expectedDesign = Vector.CreateZero(optimizationProblem.Dimension); Assert.Equal(expectedFitness, pso.BestFitness, 3); Assert.True(Vector.CreateFromArray(pso.BestPosition).Equals(expectedDesign, 1E-4)); }
protected override void Context() { _container = A.Fake <IContainer>(); sut = new ParameterIdentificationAlgorithmToOptmizationAlgorithmMapper(_container); _optimizationAlgorithmProperties = new OptimizationAlgorithmProperties("Algo") { new ExtendedProperty <string> { Name = "Toto", Value = "Test" }, new ExtendedProperty <double> { Name = "Tata", Value = 10d }, new ExtendedProperty <bool> { Name = "Does not exist", Value = false } }; _optimizationAlgorithm = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _optimizationAlgorithm.Properties).Returns(new OptimizationAlgorithmProperties("Algo")); _optimizationAlgorithm.Properties.Add(new ExtendedProperty <string> { Name = "Toto", Value = "OLD" }); _optimizationAlgorithm.Properties.Add(new ExtendedProperty <double> { Name = "Tata", Value = 5d }); _optimizationAlgorithm.Properties.Add(new ExtendedProperty <bool> { Name = "TUTU", Value = true }); A.CallTo(() => _container.Resolve <IOptimizationAlgorithm>(_optimizationAlgorithmProperties.Name)).Returns(_optimizationAlgorithm); }
public static void Run() { int seed = 1; var rng = new Random(seed); OptimizationProblem optimizationProblem = new Rosenbrock(); var builder = new DifferentialEvolutionAlgorithm.Builder(optimizationProblem); builder.PopulationSize = 100; builder.MutationFactor = 0.6; builder.CrossoverProbability = 0.9; builder.ConvergenceCriterion = new MaxFunctionEvaluations(100000); builder.RandomNumberGenerator = rng; IOptimizationAlgorithm de = builder.Build(); IOptimizationAnalyzer analyzer = new OptimizationAnalyzer(de); analyzer.Optimize(); double expectedFitness = 0.0; var expectedDesign = Vector.CreateWithValue(optimizationProblem.Dimension, 1.0); Assert.Equal(expectedFitness, de.BestFitness, 10); Assert.True(Vector.CreateFromArray(de.BestPosition).Equals(expectedDesign, 1E-6)); }
protected override void Context() { _optimizationAlgorithm1 = A.Fake <IOptimizationAlgorithm>(); _optimizationAlgorithm2 = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _optimizationAlgorithm2.Name).Returns(Constants.OptimizationAlgorithm.DEFAULT); _identificationAlgorithm1 = A.Fake <OptimizationAlgorithmProperties>(); _identificationAlgorithm2 = A.Fake <OptimizationAlgorithmProperties>(); _algorithmPropertiesMapper = A.Fake <IOptimizationAlgorithmToOptimizationAlgorithmPropertiesMapper>(); _parameterIdentificationConfigurationDTOMapper = A.Fake <IParameterIdentificationConfigurationToParameterIdentificationConfigurationDTOMapper>(); _optimizationAlgorithmRepository = A.Fake <IOptimizationAlgorithmRepository>(); _parameterIdentificationAlgorithmOptionsPresenter = A.Fake <IExtendedPropertiesPresenter>(); _view = A.Fake <IParameterIdentificationConfigurationView>(); _parameterIdentification = new ParameterIdentification(); _noOptionsPresenter = A.Fake <IStandardParameterIdentificationRunModePresenter>(); _multipleParameterIdentificationRunModePresenter = A.Fake <IMultipleParameterIdentificationRunModePresenter>(); _categorialParameterIdentificationRunModePresenter = A.Fake <ICategorialParameterIdentificationRunModePresenter>(); ConfigureCategorialParameterIdentificationRunModePresenter(); sut = new ParameterIdentificationConfigurationPresenter(_view, _parameterIdentificationAlgorithmOptionsPresenter, _optimizationAlgorithmRepository, _parameterIdentificationConfigurationDTOMapper, _algorithmPropertiesMapper, _noOptionsPresenter, _multipleParameterIdentificationRunModePresenter, _categorialParameterIdentificationRunModePresenter); A.CallTo(() => _algorithmPropertiesMapper.MapFrom(_optimizationAlgorithm1)).Returns(_identificationAlgorithm1); A.CallTo(() => _algorithmPropertiesMapper.MapFrom(_optimizationAlgorithm2)).Returns(_identificationAlgorithm2); A.CallTo(() => _noOptionsPresenter.CanEdit(A <ParameterIdentification> .That.Matches(x => x.Configuration.RunMode.IsAnImplementationOf <StandardParameterIdentificationRunMode>()))).Returns(true); A.CallTo(() => _categorialParameterIdentificationRunModePresenter.CanEdit(A <ParameterIdentification> .That.Matches(x => x.Configuration.RunMode.IsAnImplementationOf <CategorialParameterIdentificationRunMode>()))).Returns(true); A.CallTo(() => _multipleParameterIdentificationRunModePresenter.CanEdit(A <ParameterIdentification> .That.Matches(x => x.Configuration.RunMode.IsAnImplementationOf <MultipleParameterIdentificationRunMode>()))).Returns(true); }
protected override void Context() { base.Context(); _optimizationAlgorithm = A.Fake <IOptimizationAlgorithm>(); _cloneOfAlgoProperties = new OptimizationAlgorithmProperties("CLONE"); A.CallTo(() => _optimizationAlgorithm.Properties.Clone()).Returns(_cloneOfAlgoProperties); }
public static void Run() { OptimizationProblem sizingOptimizationProblem = new Truss10Benchmark(); var builder = new DifferentialEvolutionAlgorithmConstrained.Builder(sizingOptimizationProblem); builder.PopulationSize = 100; builder.MutationFactor = 0.6; builder.CrossoverProbability = 0.9; builder.ConvergenceCriterion = new MaxFunctionEvaluations(200000); builder.Penalty = new DeathPenalty(); IOptimizationAlgorithm de = builder.Build(); IOptimizationAnalyzer analyzer = new OptimizationAnalyzer(de); analyzer.Optimize(); // Print results Console.WriteLine("\n Best Position:"); for (int i = 0; i < sizingOptimizationProblem.Dimension; i++) { Console.WriteLine(String.Format(@" x[{0}] = {1} ", i, de.BestPosition[i])); } Console.WriteLine(String.Format(@"Best Fitness: {0}", de.BestFitness)); }
public bool HasConverged(IOptimizationAlgorithm algorithm) { if (algorithm.CurrentIteration < this.maxIterations) { return(false); } return(true); }
public bool HasConverged(IOptimizationAlgorithm algorithm) { if (algorithm.CurrentFunctionEvaluations < this.maxFES) { return(false); } return(true); }
public static bool ShouldRunOnThisDataSet(IOptimizationAlgorithm optimizationAlgorithm, int count) { if (count > 200) { return(optimizationAlgorithm.GetType().IsAssignableFrom(typeof(GreedyAlgorithm))); } return(optimizationAlgorithm.GetType().IsAssignableFrom(typeof(DynamicProgrammingAlgorithm))); }
static private IEnumerable <Bitmap> DrawSolution(ISolution solution, IOptimizationAlgorithm optimizer) { yield return((solution as VehicleRoutingSolution)?.Draw()); yield return((solution as TspSolution)?.Draw()); yield return((solution as FloorplanSolution)?.Draw()); yield return(DrawCostDiagram.Draw(optimizer, new BitmapStyle())); }
private void initialize() { _parameterIdentification = _runInitializer.InitializeRun().Result; RunResult.Description = _parameterIdentification.Description; _optimizationAlgorithm = _optimizationAlgorithmMapper.MapFrom(_parameterIdentification.AlgorithmProperties); _residualCalculator = _residualCalculatorFactory.CreateFor(_parameterIdentification.Configuration); _parameterIdentification.AllSimulations.Each(s => _allSimModelBatches.Add(s, createSimModelBatch(s))); initializeParameterHistoryCache(); _variableParameters = _parameterIdentification.AllVariableIdentificationParameters.ToList(); _fixedParameters = _parameterIdentification.AllFixedIdentificationParameters.ToList(); }
protected override void Context() { sut = new ParameterIdentificationConfigurationToParameterIdentificationConfigurationDTOMapper(); _parameterIdentificationConfiguration = new ParameterIdentificationConfiguration(); _option1 = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _option1.Name).Returns("option1"); _option2 = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _option2.Name).Returns("option2"); _option3 = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _option3.Name).Returns("option3"); _allAlgorithms = new[] { _option2, _option1, _option3 }; }
private void initialize(CancellationToken cancellationToken) { _parameterIdentification = _runInitializer.InitializeRun(cancellationToken).Result; RunResult.Description = _parameterIdentification.Description; _optimizationAlgorithm = _optimizationAlgorithmMapper.MapFrom(_parameterIdentification.AlgorithmProperties); _residualCalculator = _residualCalculatorFactory.CreateFor(_parameterIdentification.Configuration); var parallelOptions = createParallelOptions(_cancellationToken); Parallel.ForEach(_parameterIdentification.AllSimulations, parallelOptions, simulation => { parallelOptions.CancellationToken.ThrowIfCancellationRequested(); _allSimModelBatches.TryAdd(simulation, createSimModelBatch(simulation)); }); initializeParameterHistoryCache(); _variableParameters = _parameterIdentification.AllVariableIdentificationParameters.ToList(); _fixedParameters = _parameterIdentification.AllFixedIdentificationParameters.ToList(); }
public bool HasConverged(IOptimizationAlgorithm algorithm) { return(!originalCriterion.HasConverged(algorithm)); }
private void bwVehicleRouting_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker worker = (BackgroundWorker)sender; algorithmStatus.Text = "Press ESC to cancel"; VehicleRoutingProblem problem = new VehicleRoutingProblem(routingCustomers, routingVehicles); VehicleRoutingSolution startSolution = new VehicleRoutingSolution(problem); Swap swap = new Swap(problem.Dimension, 1); Shift shift = new Shift(problem.Dimension, 2); TwoOpt twoOpt = new TwoOpt(problem.Dimension, 3); List <Operator> operations = new List <Operator> { swap, shift, twoOpt }; MultistartParameters multistartParameters = (MultistartParameters)multistartOptions.Clone(); LocalDescentParameters ldParameters = new LocalDescentParameters() { Name = "VEHICLE LD", Seed = seed, DetailedOutput = true, Operators = operations }; SimulatedAnnealingParameters saParameters = new SimulatedAnnealingParameters() { Name = "VEHICLE SA", InitProbability = 0.4, TemperatureCooling = 0.95, MinCostDeviation = 10E-3, Seed = seed, DetailedOutput = true, Operators = operations }; StackedParameters ssParameters = new StackedParameters() { Name = "B", DetailedOutput = true, OptimizationAlgorithms = new Type[] { typeof(LocalDescent), typeof(SimulatedAnnealing), typeof(LocalDescent) }, Parameters = new OptimizationParameters[] { ldParameters, saParameters, ldParameters } }; switch (optimizerType) { case 0: { multistartParameters.Parameters = ldParameters; multistartParameters.OptimizationAlgorithm = typeof(LocalDescent); } break; case 1: { multistartParameters.Parameters = saParameters; multistartParameters.OptimizationAlgorithm = typeof(SimulatedAnnealing); } break; case 2: { saParameters.InitProbability = 0.002; saParameters.MinCostDeviation = 10E-2; multistartParameters.Parameters = ssParameters; multistartParameters.OptimizationAlgorithm = typeof(StackedSearch); } break; case 3: { saParameters.InitProbability = 0.01; saParameters.MinCostDeviation = 10E-2; multistartParameters.InstancesNumber = 3; multistartParameters.Parameters = ssParameters; multistartParameters.OptimizationAlgorithm = typeof(StackedSearch); } break; } vehicleRoutingOptimizer = new ParallelMultistart(multistartParameters); toRenderBackground = false; foreach (ISolution solution in vehicleRoutingOptimizer.Minimize(startSolution)) { if (worker.CancellationPending) { vehicleRoutingOptimizer.Stop(); e.Cancel = true; } if (e.Cancel) { solution.IsFinal = false; } worker.ReportProgress(0); } }
static void Main(string[] args) { //VehicleRoutingProblem vrProblem = new VehicleRoutingProblem(4, 2); //VehicleRoutingSolution vrSolution = new VehicleRoutingSolution(vrProblem); //Console.WriteLine(vrSolution.ToString()); //Console.WriteLine(vrSolution.CostValue); //Console.ReadLine(); //return; //FloorplanProblem problem = new FloorplanProblem(100); //FloorplanProblem problem = FloorplanProblem.Load("500.vlsi"); //FloorplanSolution solution = new FloorplanSolution(problem); //Swap swap = new Swap(problem.Dimension, 10); //Shift shift = new Shift(problem.Dimension, 1); //EmptyLeafMove eLeaf = new EmptyLeafMove(problem.Dimension, 5); //FullLeafMove fLeaf = new FullLeafMove(problem.Dimension, 5); //FullNodeMove fnode = new FullNodeMove(problem.Dimension, 5); //List<Operator> operations = new List<Operator> { swap, fLeaf }; //TspProblem problem = new TspProblem(200); //TspSolution solution = new TspSolution(problem); VehicleRoutingProblem problem = new VehicleRoutingProblem(225, 10); VehicleRoutingSolution solution = new VehicleRoutingSolution(problem); Swap swap = new Swap(problem.Dimension, 1); Shift shift = new Shift(problem.Dimension, 2); TwoOpt twoOpt = new TwoOpt(problem.Dimension, 3); List <Operator> operations = new List <Operator> { swap, shift, twoOpt }; MultistartParameters multistartOptions = new MultistartParameters() { Name = "P", InstancesNumber = 5, RandomizeStart = false, DetailedOutput = true, OutputFrequency = 100, }; LocalDescentParameters ldParameters = new LocalDescentParameters() { Name = "LD", DetailedOutput = true, Seed = 0, Operators = operations, IsSteepestDescent = false }; SimulatedAnnealingParameters saParameters = new SimulatedAnnealingParameters() { Name = "SA", InitProbability = 0.5, TemperatureCooling = 0.98, MinCostDeviation = 0, UseWeightedNeighborhood = false, DetailedOutput = false, Seed = 0, Operators = operations, }; MultistartParameters pldParameters = (MultistartParameters)multistartOptions.Clone(); pldParameters.OptimizationAlgorithm = typeof(LocalDescent); pldParameters.Parameters = ldParameters; MultistartParameters psaParameters = (MultistartParameters)multistartOptions.Clone(); psaParameters.OptimizationAlgorithm = typeof(SimulatedAnnealing); psaParameters.Parameters = saParameters; StackedParameters ssParameters = new StackedParameters() { Name = "B", DetailedOutput = true, OptimizationAlgorithms = new Type[] { typeof(LocalDescent), typeof(SimulatedAnnealing), typeof(LocalDescent) }, Parameters = new OptimizationParameters[] { ldParameters, saParameters, ldParameters } }; StackedParameters sspParameters = new StackedParameters() { Name = "B", DetailedOutput = false, OptimizationAlgorithms = new Type[] { typeof(LocalDescent), typeof(ParallelMultistart), typeof(LocalDescent) }, Parameters = new OptimizationParameters[] { ldParameters, psaParameters, ldParameters } }; LocalDescent ld = new LocalDescent(ldParameters); SimulatedAnnealing sa = new SimulatedAnnealing(saParameters); ParallelMultistart pld = new ParallelMultistart(pldParameters); ParallelMultistart psa = new ParallelMultistart(psaParameters); StackedSearch ss = new StackedSearch(ssParameters); StackedSearch ssp = new StackedSearch(sspParameters); MultistartParameters pssParameters = (MultistartParameters)multistartOptions.Clone(); pssParameters.DetailedOutput = false; pssParameters.RandomizeStart = true; pssParameters.OptimizationAlgorithm = typeof(StackedSearch); pssParameters.Parameters = ssParameters; ParallelMultistart pss = new ParallelMultistart(pssParameters); IOptimizationAlgorithm optimizer = sa; ISolution bestSolution = solution; foreach (ISolution s in optimizer.Minimize(solution.Shuffle(0))) { //if (s.IsCurrentBest) Console.WriteLine("\t{0}, {1:f}s, {2}, {3}, {4}, {5}, {6}", s.CostValue, s.TimeInSeconds, s.IterationNumber, s.IsCurrentBest, s.IsFinal, bestSolution.CostValue - s.CostValue, s.InstanceTag); bestSolution = s; } Console.WriteLine(bestSolution.TimeInSeconds + "s"); Console.WriteLine(bestSolution.IterationNumber + " iterations"); //var groups = optimizer.SearchHistory.GroupBy(s => s.OperatorTag).Select(s => new { Operator = s.Key, Count = s.Count() }); //var dictionary = groups.ToDictionary(g => g.Operator, g => g.Count); //foreach (var o in groups) //{ // Console.WriteLine("{0} = {1}", o.Operator, o.Count); //} //Console.WriteLine("Done"); foreach (var b in DrawSolution(bestSolution, optimizer)) { b?.Save("solution" + DateTime.Now.Millisecond + ".jpg"); } Console.ReadLine(); }
public OptimizationAnalyzer(IOptimizationAlgorithm optimizationAlgorithm) { this.optimizationAlgorithm = optimizationAlgorithm; }
protected override void Because() { _result = sut.MapFrom(_optimizationAlgorithmProperties); }
public void Log(IOptimizationAlgorithm algorithm) { }
private void bwFloorplan_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker worker = (BackgroundWorker)sender; algorithmStatus.Text = "Press ESC to cancel"; FloorplanProblem problem = new FloorplanProblem(floorplanRectangles); FloorplanSolution startSolution = new FloorplanSolution(problem); Swap swap = new Swap(problem.Dimension, 10); Shift shift = new Shift(problem.Dimension, 1); EmptyLeafMove eLeaf = new EmptyLeafMove(problem.Dimension, 5); FullLeafMove fLeaf = new FullLeafMove(problem.Dimension, 5); FullNodeMove fNode = new FullNodeMove(problem.Dimension, 5); MultistartParameters multistartParameters = (MultistartParameters)multistartOptions.Clone(); LocalDescentParameters ldParameters = new LocalDescentParameters() { Name = "VLSI LD", Seed = seed, DetailedOutput = true, Operators = new List <Operator> { swap, fLeaf } }; SimulatedAnnealingParameters saParameters = new SimulatedAnnealingParameters() { Name = "VLSI SA", InitProbability = 0.5, TemperatureCooling = 0.96, MinCostDeviation = 0, Seed = seed, DetailedOutput = true, Operators = new List <Operator> { swap, fNode } }; StackedParameters ssParameters = new StackedParameters() { Name = "B", DetailedOutput = true, OptimizationAlgorithms = new Type[] { typeof(LocalDescent), typeof(SimulatedAnnealing), typeof(LocalDescent) }, Parameters = new OptimizationParameters[] { ldParameters, saParameters, ldParameters } }; switch (optimizerType) { case 0: { multistartParameters.Parameters = ldParameters; multistartParameters.OptimizationAlgorithm = typeof(LocalDescent); } break; case 1: { multistartParameters.Parameters = saParameters; multistartParameters.OptimizationAlgorithm = typeof(SimulatedAnnealing); } break; case 2: { saParameters.InitProbability = 0.005; saParameters.TemperatureCooling = 0.95; multistartParameters.Parameters = ssParameters; multistartParameters.OptimizationAlgorithm = typeof(StackedSearch); } break; case 3: { saParameters.InitProbability = 0.005; saParameters.TemperatureCooling = 0.95; multistartParameters.InstancesNumber = 3; multistartParameters.Parameters = ssParameters; multistartParameters.OptimizationAlgorithm = typeof(StackedSearch); } break; } floorplanOptimizer = new ParallelMultistart(multistartParameters); toRenderBackground = false; foreach (ISolution solution in floorplanOptimizer.Minimize(startSolution)) { if (worker.CancellationPending) { floorplanOptimizer.Stop(); e.Cancel = true; } worker.ReportProgress(0); } }
public OptimizationAlgorithmProperties MapFrom(IOptimizationAlgorithm optimizationAlgorithm) { return(optimizationAlgorithm.Properties.Clone()); }
public void ChangeOptimizationAlgorithm(IOptimizationAlgorithm optimizationAlgorithm) { updateOptimizationAlgorithm(optimizationAlgorithm); editAlgorithmOptions(); }
static public Bitmap Draw(IOptimizationAlgorithm optimizer, BitmapStyle bitmapStyle, int maxPoints = 0) { BitmapStyle style = bitmapStyle ?? new BitmapStyle(); if ((optimizer?.SearchHistory?.Count ?? 0) == 0) { return(null); } int n = maxPoints == 0 ? 1 : optimizer.SearchHistory.Count / maxPoints + 1; List <SolutionSummary> historyToDraw = maxPoints == 0 ? optimizer.SearchHistory : new List <SolutionSummary>(); double minCost = int.MaxValue; double maxCost = 0; HashSet <string> instances = new HashSet <string>(); HashSet <string> operators = new HashSet <string>(); for (int i = 0; i < optimizer.SearchHistory.Count; i++) { if (minCost > optimizer.SearchHistory[i].CostValue) { minCost = optimizer.SearchHistory[i].CostValue; } if (maxCost < optimizer.SearchHistory[i].CostValue) { maxCost = optimizer.SearchHistory[i].CostValue; } if (maxPoints == 0 || i % n == 0) { if (maxPoints > 0) { historyToDraw.Add(optimizer.SearchHistory[i]); } instances.Add(optimizer.SearchHistory[i].InstanceTag); operators.Add(optimizer.SearchHistory[i].OperatorTag); } } Dictionary <string, Brush> instanceBrush = new Dictionary <string, Brush>(); int counter = 0; foreach (string instance in instances) { instanceBrush.Add(instance, new SolidBrush(colors[counter % colors.Count])); counter++; } Dictionary <string, Brush> operatorBrush = new Dictionary <string, Brush>(); counter = 0; foreach (string operation in operators) { operatorBrush.Add(operation, new SolidBrush(colors[counter % colors.Count])); if (!notNeighborOperators.Contains(operation)) { counter++; } } double scaleX = (double)(style.ImageWidth - style.MarginX) / (historyToDraw.Count); double scaleY = (style.ImageHeight - style.MarginY - 4 * style.PenWidth) / (maxCost - minCost); Bitmap bitmap = new Bitmap(style.ImageWidth, style.ImageHeight, PixelFormat.Format32bppRgb); using (Graphics g = Graphics.FromImage(bitmap)) { Brush brushBlack = new SolidBrush(Color.Black); Font font = new Font(style.FontName, style.FontSize); g.Clear(Color.FromName(style.BackgroundColor)); for (int i = 0; i < historyToDraw.Count - 1; i++) { float x = style.MarginX + (float)(i * scaleX); float y = bitmap.Height - 4 * style.PenWidth - (float)((historyToDraw[i].CostValue - minCost) * scaleY); g.FillEllipse(instanceBrush.Count > 1 ? instanceBrush[historyToDraw[i].InstanceTag] : operatorBrush[historyToDraw[i].OperatorTag], x, y, 2 * style.PenWidth, 2 * style.PenWidth); } string summary = String.Format("Max cost: {0:F4}\nMin cost: {1:F4}\nAccepted iterations: {2}\nTime: {3:F3}s", maxCost, minCost, optimizer.SearchHistory.Count, optimizer.CurrentSolution.TimeInSeconds); g.DrawString(summary, font, brushBlack, 0, 0); } return(bitmap); }
private void updateOptimizationAlgorithm(IOptimizationAlgorithm optimizationAlgorithm) { _configuration.AlgorithmProperties = _optimizationAlgorithmToAlgorithmMapper.MapFrom(optimizationAlgorithm); }
protected override void Context() { _modelCoreSimulationMapper = A.Fake <ISimulationToModelCoreSimulationMapper>(); _residualCalculatorFactory = A.Fake <IResidualCalculatorFactory>(); _timeGridUpdater = A.Fake <ITimeGridUpdater>(); _simModelBatchFactory = A.Fake <ISimModelBatchFactory>(); _optimizationAlgorithmMapper = A.Fake <IParameterIdentificationAlgorithmToOptmizationAlgorithmMapper>(); _outputSelectionUpdater = A.Fake <IOutputSelectionUpdater>(); _coreUserSettings = A.Fake <ICoreUserSettings>(); _jacobianMatrixCalculator = A.Fake <IJacobianMatrixCalculator>(); _coreUserSettings.MaximumNumberOfCoresToUse = 2; sut = new ParameterIdentificationRun(_residualCalculatorFactory, _timeGridUpdater, _simModelBatchFactory, _modelCoreSimulationMapper, _optimizationAlgorithmMapper, _outputSelectionUpdater, _coreUserSettings, _jacobianMatrixCalculator); _simulation = A.Fake <ISimulation>(); _parameter1 = A.Fake <IParameter>(); _parameter1.Dimension = DomainHelperForSpecs.ConcentrationDimensionForSpecs(); _parameter1.Value = 15; _parameter2 = A.Fake <IParameter>(); _parameter2.Value = 35; _parameter2.Dimension = DomainHelperForSpecs.ConcentrationDimensionForSpecs(); _parameterIdentification = new ParameterIdentification(); _parameterIdentification.Configuration.LLOQMode = LLOQModes.OnlyObservedData; _parameterIdentification.Configuration.RemoveLLOQMode = RemoveLLOQModes.NoTrailing; _parameterIdentification.AddSimulation(_simulation); _parameterSelection1 = ParameterSelectionFor(_parameter1, "ParameterPath1"); _parameterSelection2 = ParameterSelectionFor(_parameter2, "ParameterPath2"); _identificationParameter = DomainHelperForSpecs.IdentificationParameter("IdParam", min: 10, max: 20, startValue: 15); _identificationParameter.AddLinkedParameter(_parameterSelection1); _identificationParameter.AddLinkedParameter(_parameterSelection2); _modelCoreSimulation = A.Fake <IModelCoreSimulation>(); A.CallTo(() => _modelCoreSimulationMapper.MapFrom(_simulation, true)).Returns(_modelCoreSimulation); _outputMapping = A.Fake <OutputMapping>(); A.CallTo(() => _outputMapping.UsesSimulation(_simulation)).Returns(true); A.CallTo(() => _outputMapping.WeightedObservedData.ObservedData).Returns(DomainHelperForSpecs.ObservedData()); _parameterIdentification.AddOutputMapping(_outputMapping); _simModelBatch = A.Fake <ISimModelBatch>(); A.CallTo(() => _simModelBatchFactory.Create()).Returns(_simModelBatch); _parameterIdentification.AddIdentificationParameter(_identificationParameter); _residualCalculator = A.Fake <IResidualCalculator>(); A.CallTo(_residualCalculatorFactory).WithReturnType <IResidualCalculator>().Returns(_residualCalculator); _algorithm = A.Fake <IOptimizationAlgorithm>(); A.CallTo(() => _optimizationAlgorithmMapper.MapFrom(_parameterIdentification.AlgorithmProperties)).Returns(_algorithm); _cancellationTokenSource = new CancellationTokenSource(); _cancellationToken = _cancellationTokenSource.Token; _runInitializer = A.Fake <IParameterIdentifcationRunInitializer>(); A.CallTo(() => _runInitializer.InitializeRun()).ReturnsAsync(_parameterIdentification); PerformExtraInitializationSteps(); sut.InitializeWith(_runInitializer); }
public bool HasConverged(IOptimizationAlgorithm algorithm) { return(criterion1.HasConverged(algorithm) || criterion2.HasConverged(algorithm)); }
public void Log(IOptimizationAlgorithm algorithm) { bestContinuousVariables.Add(algorithm.BestPosition); //bestIntegerVariables.Add(); bestObjectives.Add(algorithm.BestFitness); }
private void updateAlgorithm(IOptimizationAlgorithm newValue) { _presenter.ChangeOptimizationAlgorithm(newValue); }