private static void TestCrossoverVsMutation() { List <double> b = new List <double>(); List <double> w = new List <double>(); List <double> a = new List <double>(); var evaluation = new TspEvaluation(@"C:\Users\jbelter\source\repos\machine-learning-cvrp\data\A-n46-k7.vrp"); double[] px = { 0, .1, .8, 1, 0, 0, 0 }; double[] pw = { 0, 0, 0, 0, .2, .5, 1 }; for (int j = 0; j < 7; j++) { for (int i = 0; i < 20; i++) { var stopCondition = new IterationsStopCondition(200); //var optimizer = new TspRandomSearch(evaluation, stopCondition); var generator = new TspGenerator(new Random()); ASelection selection = new TournamentSelection(Convert.ToInt32(5)); var crossover = new OrderedCrossover(px[j]); var mutation = new SwapMutation(pw[j]); var optimizer = new GeneticAlgorithm <int>(evaluation, stopCondition, generator, selection, crossover, mutation, 100); optimizer.Run(); b.Add(optimizer.bestSolutions.Last()); w.Add(optimizer.worstValues.Last()); a.Add(optimizer.averageValues.Last()); } double avgB = 0.0, avgA = 0.0, avgW = 0.0; for (int i = 0; i < b.Count; i++) { avgB += b[i]; avgA += a[i]; avgW += w[i]; } avgB /= b.Count; avgA /= b.Count; avgW /= b.Count; Console.WriteLine("avg(best value): " + avgB.ToString() + " avg(average value): " + avgA.ToString() + " avg(worst value): " + avgW.ToString()); } }
private static void TestGA(AEvaluation <int> problem, double probCrossover, double probMutation, int population, int iterations, double tournament) { List <String> results = new List <String>(); List <String> learningProgress = new List <String>(); int savedRun = new Random().Next(0, 10); for (int i = 0; i < 10; i++) { //var evaluation = new TspEvaluation(@"C:\Users\jbelter\source\repos\machine-learning-cvrp\data\A-n32-k5.vrp"); var evaluation = problem; var stopCondition = new IterationsStopCondition(iterations); //var optimizer = new TspRandomSearch(evaluation, stopCondition); var generator = new TspGenerator(new Random()); ASelection selection; if (tournament > 1 && tournament <= population) { selection = new TournamentSelection(Convert.ToInt32(tournament)); } else if (tournament > 0 && tournament <= 1) { selection = new TournamentSelection(tournament); } else { selection = new TournamentSelection(5); } var crossover = new OrderedCrossover(probCrossover); var mutation = new SwapMutation(probMutation); var optimizer = new GeneticAlgorithm <int>(evaluation, stopCondition, generator, selection, crossover, mutation, population); optimizer.Run(); if (i == savedRun) { learningProgress = formatLearning(optimizer.worstValues, optimizer.averageValues, optimizer.bestSolutions); } //ReportOptimizationResult(optimizer.Result); results.Add(String.Join(", ", problem.iSize.ToString(), optimizer.timeTaken.ToString(), optimizer.bestSolutions.Last().ToString(), optimizer.averageValues.Last().ToString(), optimizer.worstValues.Last().ToString(), probCrossover.ToString(), probMutation.ToString(), population.ToString(), iterations.ToString(), tournament.ToString() )); } SaveToFile(@"C:\Users\jbelter\source\repos\machine-learning-cvrp\resultsGA.txt", results); SaveToFile((@"C:\Users\jbelter\source\repos\machine-learning-cvrp\progress\GA" + "-" + problem.iSize.ToString() + "-" + probCrossover.ToString() + "-" + probMutation.ToString() + "-" + population.ToString() + "-" + iterations.ToString() + "-" + tournament.ToString()) .Replace(".", "") + ".txt", learningProgress); }