/// <summary> /// Solves the specified sudoku. /// </summary> /// <param name="sudoku">The sudoku.</param> public void Solve(Sudoku sudoku) { var ga = new GeneticAlgorithm(); var population = new Population(); // create an initial population for (var i = 0; i < POPULATION_SIZE; ++i) { var chromosome = new Chromosome(); for (var row = 0; row < sudoku.Rows; ++row) { chromosome.Genes.Add(new SudokuGene()); } population.Add(chromosome); } var fitnessCalculator = new SudokuFitnessCalculator(sudoku); ga.Add(new Elite(0.01)); ga.Add(new Mutate(0.8)); ga.Add(new SudokuDiversify(0.2)); ga.Add(new SudokuCrossOver(0.8)); ga.Run(population, new TournamentSelection(), fitnessCalculator, this); }
// This is the set of valid AGMA gear pitches (in unit of inch^-1). private static void Main() { //var opty = new GradientBasedOptimization(); //var opty = new HillClimbing(); var opty = new GeneticAlgorithm(100); var numGears = 2 * NumGearPairs; /* here is the Dependent Analysis. Take a look at the file/class ForceVelocityPositionAnalysis.cs * and notice that it inherits from IDependent Analysis. By adding this to the optimization method * (line 122), we are ensuring that it is called for any new decision variables found in the process.*/ var FVPAnalysis = new ForceVelocityPositionAnalysis(numGears, outputTorque, inputSpeed, inputPosition); opty.Add(FVPAnalysis); /* here is the objective function, minimize mass. Note that it will hold a reference to the * ForceVelocityPositionAnalysis so that it can reference it for exact values of diamter. */ opty.Add(new massObjective(FVPAnalysis, gearDensity)); /* here is an inequality constraint for fitting within the box described above. Again, it * needs to position and diameter information stored in ForceVelocityPositionAnalysis */ opty.Add(new boundingboxConstraint(FVPAnalysis, boxMinX, boxMaxX, boxMinY, boxMaxY, boxMinZ, boxMaxZ)); /* on and on: stress inequality, output Location, output Speed equalities. Details can be found in * http://dx.doi.org/10.1115/DETC2009-86780 */ opty.Add(new stressConstraint(FVPAnalysis, Nf, SFB, SFC)); opty.Add(new outputLocationConstraint(FVPAnalysis, locationTol, outputX, outputY, outputZ)); opty.Add(new outputSpeedConstraint(FVPAnalysis, speedTol, outputSpeed)); for (var i = 0; i < NumGearPairs - 1; i++) { // each mating gear pair must have the same pitch. opty.Add(new samePitch(i * 4 + 1, (i + 1) * 4 + 1)); } /******** Set up Design Space *************/ /* for the GA and the Hill Climbing, a compete discrete space is needed. Face width and * location parameters should be continuous though. Consider removing the 800's below * when using a mixed optimization method. */ var dsd = new DesignSpaceDescription(numGears * 4); for (var i = 0; i < numGears; i++) { dsd[4 * i] = new VariableDescriptor(5, 1000, 1.0); // number of teeth: integers between 5 and 1000 dsd[4 * i + 1] = new VariableDescriptor(ValidPitches); // pitches from AGMA standard dsd[4 * i + 2] = new VariableDescriptor(0, 50, 800); // face width is between 0 and 50 inches dsd[4 * i + 3] = new VariableDescriptor(0, 500, 800); //location is either an angle or a length // a max of 500 inches is generous } opty.Add(dsd); /******** Set up Optimization *************/ /* the following mish-mash is similiar to previous project - just trying to find a * combination of methods that'll lead to the optimial optimization algorithm. */ //abstractSearchDirection searchDirMethod = new SteepestDescent(); //opty.Add(searchDirMethod); //abstractLineSearch lineSearchMethod = new ArithmeticMean(0.0001, 1, 100); //opty.Add(lineSearchMethod); opty.Add(new LatinHyperCube(dsd, VariablesInScope.BothDiscreteAndReal)); opty.Add(new GACrossoverBitString(dsd)); opty.Add(new GAMutationBitString(dsd)); opty.Add(new PNormProportionalSelection(optimize.minimize, true, 0.7)); //opty.Add(new RandomNeighborGenerator(dsd,3000)); //opty.Add(new KeepSingleBest(optimize.minimize)); opty.Add(new squaredExteriorPenalty(opty, 10)); opty.Add(new MaxAgeConvergence(40, 0.001)); opty.Add(new MaxFnEvalsConvergence(10000)); opty.Add(new MaxSpanInPopulationConvergence(15)); double[] xStar; Parameters.Verbosity = VerbosityLevels.AboveNormal; // this next line is to set the Debug statements from OOOT to the Console. Debug.Listeners.Add(new TextWriterTraceListener(Console.Out)); var timer = Stopwatch.StartNew(); var fStar = opty.Run(out xStar, numGears * 4); printResults(opty, xStar, fStar, timer); Console.ReadKey(); }
static void Main() { Parameters.Verbosity = VerbosityLevels.AboveNormal; // this next line is to set the Debug statements from OOOT to the Console. Trace.Listeners.Add(new TextWriterTraceListener(Console.Out)); /* first a new optimization method in the form of a genetic algorithm is created. */ var optMethod = new GeneticAlgorithm(100); /* then an objective function and constraints are added. Since these inherit from * type OptimizationToolbox.objectiveFunction and OptimizationToolbox.inequality * the Add function knows where to store them so that they will be invoked by the * fitness evaluation section of the GA. */ optMethod.Add(new efficiencyMeasurement()); //optMethod.Add(new lessThanManifoldVolume()); /* GA's cannot explicitly handle inequalities, so a merit function must be added * so that the fitness evaluation knows how to combine the constraint with the * objective function. */ //optMethod.Add(new squaredExteriorPenalty(optMethod, 50)); /* Now a number of convergerence criteria are added. Again, since these all * inherit from the abstractConvergence class, the Add method knows to where * to store them. */ optMethod.Add(new ToKnownBestFConvergence(0.0, 0.5)); optMethod.Add(new MaxIterationsConvergence(50000)); /* stop after 500 iteration (i.e. generations) */ optMethod.Add(new MaxAgeConvergence(20, 0.000000001)); /*stop after 20 generations of the best not changing */ optMethod.Add(new MaxSpanInPopulationConvergence(100)); /*stop if the largest distance is only one unit. */ optMethod.NumConvergeCriteriaNeeded = 2; /* two of these three criteria are needed to stop the process. */ /* The genetic algorithm is for discrete problems. Therefore we need to provide the optimization algorithm * and the subsequent generators with the details of the space. The first variable represents the number of * passes in our fictitious problem. We set the lower bound to 1 and the upper bound to 20. The third argument * is the delta and since only integers are possible we set this to 1. The second and third variables are * really continous, but for the purpose of the GA we set a discretization at one-ten-thousandth for the second * and one-hundredth in the third. Note that you can provide either the delta or the number of steps. Here * 36,001 steps will make increments of one-hundredth. */ var SpaceDescriptor = new DesignSpaceDescription { new VariableDescriptor(1, 20, 1.0), new VariableDescriptor(0, 100, 0.0001), new VariableDescriptor(-180, 180, 36000) }; optMethod.Add(SpaceDescriptor); /* the genetic algorithm requires some more values to be fully specified. These include initial, * crossover, and mutation generators, as well as a selector. A Latin Hyper Cube initial sample is * first created to assure the population covers the space well. */ optMethod.Add(new LatinHyperCube(SpaceDescriptor, VariablesInScope.BothDiscreteAndReal)); /* the typical bit-string approach to mutation and crossover are adopted here. Note that the * mutation rate (per candidate) is increased to 0.4 from the default of 0.1. Which means that * 4 in 10 candidates should experience at least one mutation. No new crossover rate is provided * therefore the default of 1.7 will be used. This means that between two parents there will likely * be 1.7 locations of crossover between them. */ optMethod.Add(new GAMutationBitString(SpaceDescriptor, 0.4)); optMethod.Add(new GACrossoverBitString(SpaceDescriptor)); /* Finally, the selector is added to the population. This RandomPairwiseCompare is often referred to * as tournament selection wherein a random selection of two candidates results in the inferior one * being removed from the population. It requires the optimization direction: are lower values better * (minimize) or larger (maximize)? */ optMethod.Add(new RandomPairwiseCompare(optimize.minimize)); /* for output statements (points in the code where the SearchIO.output(...) function is called, the * verbosity is set to 4 which is high. Typical values are between 0 and 4 but higher values (>4) * may be used, but this will likely cut into the speed of the search process. */ Parameters.Verbosity = VerbosityLevels.AboveNormal; // this next line is to set the Debug statements from OOOT to the Console. Trace.Listeners.Add(new TextWriterTraceListener(Console.Out)); var timer = Stopwatch.StartNew(); /* everything is set, we can now run the algorithm and retrieve the f* and x* values. */ double[] xOptimal; var fOptimal = optMethod.Run(out xOptimal); printResults(optMethod, xOptimal, fOptimal, timer); }