public override bool Execute() { string aim = "Hello friends"; int dim = aim.Length; Func <string, double> fn1 = (s => s.Select((b, i) => ((int)b - aim[i]) * 2 * ((int)b - aim[i])).Sum()); Func <string, double> fn = (s => fn1(s)); SimulatedAnnealing <string> sa = new SimulatedAnnealing <string>() { NeighborhoodProvider = Neighbors, EnergyProvider = fn, CurrentElement = "^ùsprôqù;rqô;ùvere!wlpc\"!".Substring(0, dim) }; for (int i = 0; i < 100; ++i) { sa.Run(100); sa.CurrentElement = sa.BestFound; } if (IsVerbose) { Console.WriteLine(" ::!:: " + sa.BestFound); } if (sa.BestFound != aim) { return(false); } return(true); }
static List <Simulation> RunSimulatedAnnealing(Simulation s, int duration) { SimulatedAnnealing SA = HeuristicFactory.CreateSA(s, duration, SimulatedAnnealingBase.DebugLevel.All); SA.Run(); return(SA.BestSolutions); }
private static void Main(string[] args) { var voc = Vocabulary.GetVocabularyFromFile(@"Vocabulary.json"); var grammar = new Grammar(voc); ProgramParams programParams; using (var file = File.OpenText(@"ProgramParameters.json")) { var serializer = new JsonSerializer(); programParams = (ProgramParams)serializer.Deserialize(file, typeof(ProgramParams)); } if (programParams.DataWithMovement) { CreateMovementGrammar(grammar); } else { CreateSimpleGrammar(grammar); } grammar.GenerateDerivedRulesFromSchema(); var p = new Parser(grammar); var data = p.GenerateSentences(programParams.NumberOfDataSentences); using (var sw = File.AppendText("SessionReport.txt")) { sw.WriteLine("-------------------"); sw.WriteLine("Session {0} ", DateTime.Now.ToString("MM/dd/yyyy h:mm tt")); sw.WriteLine("sentences: {0}, runs: {1}, movement: {2}", programParams.NumberOfDataSentences, programParams.NumberOfRuns, programParams.DataWithMovement); } var stopWatch = StartWatch(); var learner = new Learner(voc, grammar.NonTerminalsTypeDictionary, grammar.POSTypes, data, grammar); learner.originalGrammar.GenerateDerivedRulesFromSchema(); var targetGrammarEnergy = learner.Energy(learner.originalGrammar); learner.originalGrammar.GenerateInitialRulesFromDerivedRules(); var s = string.Format("Target Hypothesis:\r\n{0} with energy: {1}\r\n", learner.originalGrammar, targetGrammarEnergy); Console.WriteLine(s); using (var sw = File.AppendText("SessionReport.txt")) { sw.WriteLine(s); } for (var i = 0; i < programParams.NumberOfRuns; i++) { var sa = new SimulatedAnnealing(learner); sa.Run(); } StopWatch(stopWatch); }
/// <summary> /// Tests a simulated annealing. /// </summary> /// <typeparam name="TGene"></typeparam> /// <param name="testNumber">The number of the test.</param> /// <param name="testDescription">The description of the test.</param> /// <param name="simulatedAnnealing">The simulated annealing to test.</param> /// <param name="objectiveFunction">The objective funtion to test.</param> /// /// <param name="maxIterationCount">The maximum number of iterations (the computational budget).</param> /// <param name="acceptableEnergy">The acceptable energy.</param> /// /// <param name="initialTemperature">The initial temperature.</param> /// <param name="finalTemperature">The final temperature.</param> /// <apra private static void Test <T>(int testNumber, string testDescription, SimulatedAnnealing <T> simulatedAnnealing, ObjectiveFunction <T> objectiveFunction, int maxIterationCount, double acceptableEnergy, double initialTemperature, double finalTemperature) { // Print the number of the test and its description. Console.WriteLine("Test " + testNumber + " : " + testDescription); // Run the simulated annealing. DateTime startTime = DateTime.Now; int usedIterationCount; double achievedEnergy; T[] solution = simulatedAnnealing.Run(objectiveFunction, maxIterationCount, out usedIterationCount, acceptableEnergy, out achievedEnergy, initialTemperature, finalTemperature ); DateTime endTime = DateTime.Now; // Build the solution string. StringBuilder solutionSB = new StringBuilder(); solutionSB.Append("["); foreach (T component in solution) { solutionSB.Append(component + ", "); } if (solution.Length != 0) { solutionSB.Remove(solutionSB.Length - 2, 2); } solutionSB.Append("]"); // Print the results. Console.WriteLine("Test " + testNumber + " : Duration: " + (endTime - startTime)); Console.WriteLine("Test " + testNumber + " : Number of iterations taken : " + usedIterationCount); Console.WriteLine("Test " + testNumber + " : Best solution : " + solutionSB.ToString()); Console.WriteLine("Test " + testNumber + " : Best solution's evaluation : " + achievedEnergy); }
private static void Main() { Parameters.Verbosity = VerbosityLevels.Normal; // this next line is to set the Debug statements from OOOT to the Console. Trace.Listeners.Add(new TextWriterTraceListener(Console.Out)); /* In this example, we first present how the details of an optimzation * problem can be saved to an XML-file so that it can be read in * and solved as opposed to defining all the details in an imperative * (code line by code line) way. In the first function, the xml file * name "test1.xml" is created. */ makeAndSaveProblemDefinition(); /* now we create a series of different optimization methods and test * them on the problem. The problem is now opened from the file and * the details are stored in an object of class "Problem Definition".*/ var stream = new FileStream(filename, FileMode.Open); double[] xStar; ProblemDefinition probTest1 = ProblemDefinition.OpenprobFromXml(stream); abstractOptMethod opty; /******************Exhaustive Search ***********************/ //SearchIO.output("******************Exhaustive Search ***********************"); //Console.ReadKey(); //opty = new ExhaustiveSearch(probTest1.SpaceDescriptor, optimize.minimize); //opty.Add(probTest1); ///* No convergence criteria is needed as the process concludes when all // * states have been visited but for this problem that is 4 trillion states.*/ //opty.ConvergenceMethods.Clear(); ///* if you DID KNOW the best, you can include a criteria like...*/ //opty.ConvergenceMethods.Add(new ToKnownBestXConvergence(new[] { 3.0, 3.0 }, 0.0000001)); //var timer = Stopwatch.StartNew(); //var fStar = opty.Run(out xStar); ///* you probably will never see this process complete. Even with the added // * convergence criteria (which is not factored into the estimated time of // * completion), you are probably looking at 1 to 2 years. */ //printResults(opty, xStar, fStar, timer); /***********Gradient Based Optimization with Steepest Descent****************/ //SearchIO.output("***********Gradient Based Optimization with Steepest Descent****************"); //Console.ReadKey(); //opty = new GradientBasedOptimization(); //opty.Add(probTest1); //abstractSearchDirection searchDirMethod = new SteepestDescent(); //opty.Add(searchDirMethod); ////abstractLineSearch lineSearchMethod = new ArithmeticMean(0.0001, 1, 100); ////abstractLineSearch lineSearchMethod = new DSCPowell(0.0001, 1, 100); //abstractLineSearch lineSearchMethod = new GoldenSection(0.0001, 1); //opty.Add(lineSearchMethod); //opty.Add(new squaredExteriorPenalty(opty, 10)); ///* since this is not a population-based optimization method, we need to remove the MaxSpan criteria. */ //opty.ConvergenceMethods.RemoveAll(a => a is MaxSpanInPopulationConvergence); //timer = Stopwatch.StartNew(); //fStar = opty.Run(out xStar); //printResults(opty, xStar, fStar, timer); ///***********Gradient Based Optimization with Fletcher-Reeves****************/ //SearchIO.output("***********Gradient Based Optimization with Fletcher-Reeves****************"); //Console.ReadKey(); ///* we don't need to reset (invoke the constructor) for GradientBasedOptimization since we are only // * change the seaach direction method. */ //searchDirMethod = new FletcherReevesDirection(); ///* you could also try the remaining 3 search direction methods. */ ////searchDirMethod = new CyclicCoordinates(); ////searchDirMethod = new BFGSDirection(); ////searchDirMethod = new PowellMethod(0.001, 6); //opty.Add(searchDirMethod); //timer = Stopwatch.StartNew(); //opty.ResetFunctionEvaluationDatabase(); //fStar = opty.Run(out xStar); //printResults(opty, xStar, fStar, timer); ///******************Generalized Reduced Gradient***********************/ //SearchIO.output("******************Generalized Reduced Gradient***********************"); //Console.ReadKey(); //opty = new GeneralizedReducedGradientActiveSet(); //opty.Add(probTest1); //opty.Add(new squaredExteriorPenalty(opty, 10)); //opty.ConvergenceMethods.RemoveAll(a => a is MaxSpanInPopulationConvergence); //timer = Stopwatch.StartNew(); //fStar = opty.Run(out xStar); //printResults(opty, xStar, fStar, timer); /* GRG is the ONLY one here that handles constraints explicity. It find the * optimal very quickly and accurately. However, many of the other show a * better value of f*, this is because they are using an imperfect penalty * function (new squaredExteriorPenalty(opty, 10)). While it seems that GRG * includes it as well, it is only used in the the line search method. */ /******************Random Hill Climbing ***********************/ probTest1.SpaceDescriptor = new DesignSpaceDescription(new[] { new VariableDescriptor(-5000, 5000, 0.1), new VariableDescriptor(-5000, 5000, 0.1) }); SearchIO.output("******************Random Hill Climbing ***********************"); Console.ReadKey(); opty = new HillClimbing(); opty.Add(probTest1); opty.Add(new squaredExteriorPenalty(opty, 8)); opty.Add(new RandomNeighborGenerator(probTest1.SpaceDescriptor)); opty.Add(new KeepSingleBest(optimize.minimize)); opty.ConvergenceMethods.RemoveAll(a => a is MaxSpanInPopulationConvergence); /* the deltaX convergence needs to be removed as well, since RHC will end many iterations * at the same point it started. */ opty.ConvergenceMethods.RemoveAll(a => a is DeltaXConvergence); var timer = Stopwatch.StartNew(); var fStar = opty.Run(out xStar); printResults(opty, xStar, fStar, timer); /******************Exhaustive Hill Climbing ***********************/ SearchIO.output("******************Exhaustive Hill Climbing ***********************"); Console.ReadKey(); /* Everything else about the Random Hill Climbing stays the same. */ opty.Add(new ExhaustiveNeighborGenerator(probTest1.SpaceDescriptor)); timer = Stopwatch.StartNew(); fStar = opty.Run(out xStar); printResults(opty, xStar, fStar, timer); /******************Simulated Annealing***********************/ SearchIO.output("******************Simulated Annealing***********************"); Console.ReadKey(); opty = new SimulatedAnnealing(optimize.minimize); opty.Add(probTest1); opty.Add(new squaredExteriorPenalty(opty, 10)); opty.Add(new RandomNeighborGenerator(probTest1.SpaceDescriptor, 100)); opty.Add(new SACoolingSangiovanniVincentelli(100)); opty.ConvergenceMethods.RemoveAll(a => a is MaxSpanInPopulationConvergence); /* the deltaX convergence needs to be removed as well, since RHC will end many iterations * at the same point it started. */ opty.ConvergenceMethods.RemoveAll(a => a is DeltaXConvergence); timer = Stopwatch.StartNew(); fStar = opty.Run(out xStar); printResults(opty, xStar, fStar, timer); }
public void NelderOtherHeuristics() { int thinningPeriod = 4; int treeCount = 75; #if DEBUG treeCount = 25; #endif PlotsWithHeight nelder = PublicApi.GetNelder(); OrganonConfiguration configuration = OrganonTest.CreateOrganonConfiguration(new OrganonVariantNwo()); configuration.Treatments.Harvests.Add(new ThinByIndividualTreeSelection(thinningPeriod)); OrganonStand stand = nelder.ToOrganonStand(configuration, 20, 130.0F, treeCount); stand.PlantingDensityInTreesPerHectare = TestConstant.NelderReplantingDensityInTreesPerHectare; Objective landExpectationValue = new Objective() { IsLandExpectationValue = true, PlanningPeriods = 9 }; Objective volume = new Objective() { PlanningPeriods = landExpectationValue.PlanningPeriods }; HeuristicParameters defaultParameters = new HeuristicParameters() { UseScaledVolume = false }; GeneticParameters geneticParameters = new GeneticParameters(treeCount) { PopulationSize = 7, MaximumGenerations = 5, UseScaledVolume = defaultParameters.UseScaledVolume }; GeneticAlgorithm genetic = new GeneticAlgorithm(stand, configuration, landExpectationValue, geneticParameters); TimeSpan geneticRuntime = genetic.Run(); GreatDeluge deluge = new GreatDeluge(stand, configuration, volume, defaultParameters) { RainRate = 5, LowerWaterAfter = 9, StopAfter = 10 }; deluge.RandomizeTreeSelection(TestConstant.Default.SelectionPercentage); TimeSpan delugeRuntime = deluge.Run(); RecordTravel recordTravel = new RecordTravel(stand, configuration, landExpectationValue, defaultParameters) { StopAfter = 10 }; recordTravel.RandomizeTreeSelection(TestConstant.Default.SelectionPercentage); TimeSpan recordRuntime = recordTravel.Run(); SimulatedAnnealing annealer = new SimulatedAnnealing(stand, configuration, volume, defaultParameters) { Iterations = 100 }; annealer.RandomizeTreeSelection(TestConstant.Default.SelectionPercentage); TimeSpan annealerRuntime = annealer.Run(); TabuParameters tabuParameters = new TabuParameters() { UseScaledVolume = defaultParameters.UseScaledVolume }; TabuSearch tabu = new TabuSearch(stand, configuration, landExpectationValue, tabuParameters) { Iterations = 7, //Jump = 2, MaximumTenure = 5 }; tabu.RandomizeTreeSelection(TestConstant.Default.SelectionPercentage); TimeSpan tabuRuntime = tabu.Run(); ThresholdAccepting thresholdAcceptor = new ThresholdAccepting(stand, configuration, volume, defaultParameters); thresholdAcceptor.IterationsPerThreshold.Clear(); thresholdAcceptor.Thresholds.Clear(); thresholdAcceptor.IterationsPerThreshold.Add(10); thresholdAcceptor.Thresholds.Add(1.0F); thresholdAcceptor.RandomizeTreeSelection(TestConstant.Default.SelectionPercentage); TimeSpan acceptorRuntime = thresholdAcceptor.Run(); RandomGuessing random = new RandomGuessing(stand, configuration, volume, defaultParameters) { Iterations = 4 }; TimeSpan randomRuntime = random.Run(); configuration.Treatments.Harvests.Clear(); configuration.Treatments.Harvests.Add(new ThinByPrescription(thinningPeriod)); PrescriptionParameters prescriptionParameters = new PrescriptionParameters() { Maximum = 60.0F, Minimum = 50.0F, Step = 10.0F, UseScaledVolume = defaultParameters.UseScaledVolume }; PrescriptionEnumeration enumerator = new PrescriptionEnumeration(stand, configuration, landExpectationValue, prescriptionParameters); TimeSpan enumerationRuntime = enumerator.Run(); // heuristics assigned to volume optimization this.Verify(deluge); this.Verify(annealer); this.Verify(thresholdAcceptor); this.Verify(random); // heuristics assigned to net present value optimization this.Verify(genetic); this.Verify(enumerator); this.Verify(recordTravel); this.Verify(tabu); HeuristicSolutionDistribution distribution = new HeuristicSolutionDistribution(1, thinningPeriod, treeCount); distribution.AddRun(annealer, annealerRuntime, defaultParameters); distribution.AddRun(deluge, delugeRuntime, defaultParameters); distribution.AddRun(thresholdAcceptor, acceptorRuntime, defaultParameters); distribution.AddRun(genetic, geneticRuntime, defaultParameters); distribution.AddRun(enumerator, enumerationRuntime, defaultParameters); distribution.AddRun(recordTravel, recordRuntime, defaultParameters); distribution.AddRun(tabu, tabuRuntime, defaultParameters); distribution.AddRun(random, randomRuntime, defaultParameters); distribution.OnRunsComplete(); }