public virtual void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { // Evaluate in single-file each genome within the population. // Only evaluate new genomes (those with EvaluationCount==0). int count = pop.GenomeList.Count; for(int i=0; i<count; i++) { IGenome g = pop.GenomeList[i]; if(g.EvaluationCount!=0) continue; INetwork network = g.Decode(activationFn); if(network==null) { // Future genomes may not decode - handle the possibility. g.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; } else { g.Fitness = Math.Max(networkEvaluator.EvaluateNetwork(network), EvolutionAlgorithm.MIN_GENOME_FITNESS); } // Reset these genome level statistics. g.TotalFitness = g.Fitness; g.EvaluationCount = 1; // Update master evaluation counter. evaluationCount++; } }
/// <summary> /// Default Constructor. /// </summary> public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator, NeatParameters neatParameters) { this.pop = pop; this.populationEvaluator = populationEvaluator; this.neatParameters = neatParameters; neatParameters_Normal = neatParameters; neatParameters_PrunePhase = new NeatParameters(neatParameters); neatParameters_PrunePhase.pMutateAddConnection = 0.0; neatParameters_PrunePhase.pMutateAddNode = 0.0; neatParameters_PrunePhase.pMutateAddModule = 0.0; neatParameters_PrunePhase.pMutateConnectionWeights = 0.33; neatParameters_PrunePhase.pMutateDeleteConnection = 0.33; neatParameters_PrunePhase.pMutateDeleteSimpleNeuron = 0.33; // Disable all crossover as this has a tendency to increase complexity, which is precisely what // we don't want during a pruning phase. neatParameters_PrunePhase.pOffspringAsexual = 1.0; neatParameters_PrunePhase.pOffspringSexual = 0.0; if(neatParameters.multiobjective) { this.multiobjective=new Multiobjective.Multiobjective(neatParameters); neatParameters.compatibilityThreshold=100000000.0; //disable speciation w/ multiobjective } if(neatParameters.noveltySearch) { if(neatParameters.noveltyHistogram) { this.noveltyFixed = new noveltyfixed(neatParameters.archiveThreshold); this.histogram = new noveltyhistogram(neatParameters.histogramBins); noveltyInitialized=true; InitialisePopulation(); } if(neatParameters.noveltyFixed || neatParameters.noveltyFloat) { this.noveltyFixed = new noveltyfixed(neatParameters.archiveThreshold); InitialisePopulation(); noveltyFixed.initialize(this.pop); noveltyInitialized=true; populationEvaluator.EvaluatePopulation(pop, this); UpdateFitnessStats(); DetermineSpeciesTargetSize(); } } else { InitialisePopulation(); } }
// Initialize the variables pertaining to the grid bins (min, max, range, bin size, num bins..) // MAPELITES TODO: Define this in a better way. For now it is hardcoded. // - a better way: specify in the behavior characterization what the bounds are for each dimension public void meGridInit(Population pop) { meNumDimensions = pop.GenomeList[0].Behavior.behaviorList.Count; // see how many dimensions we are working with. // HARDCODED - IMPORTANT: CHANGE THIS TO FIT THE DOMAIN. DO NOT USE TOO MANY BINS FOR THE DIMENSIONALITY! int HC_NUMBINS = 36; double HC_DEFAULTMIN = 0; double HC_DEFAULTMAX = 1; int HC_HOWMANYISTOOMANYBINS = 2000; int HC_HOWMANYISTOOMANYDIMENSIONS = 14; if (meNumDimensions >= HC_HOWMANYISTOOMANYDIMENSIONS) { Console.WriteLine("[!] Behavior characterization has TOO MANY VALUES for use with MapElites grid! (" + meNumDimensions + " vals, max " + HC_HOWMANYISTOOMANYDIMENSIONS + ")"); throw new Exception("[!] Behavior characterization has TOO MANY VALUES for use with MapElites grid! (" + meNumDimensions + " vals, max " + HC_HOWMANYISTOOMANYDIMENSIONS + ")"); } double totalNumberOfBins = Math.Pow(HC_NUMBINS, meNumDimensions); while (totalNumberOfBins > HC_HOWMANYISTOOMANYBINS) { if (HC_NUMBINS <= 1) { Console.WriteLine("[!] Cannot resolve MapElites bins (this should never happen). Try using a BC with fewer values."); throw new Exception("[!] Cannot resolve MapElites bins (this should never happen). Try using a BC with fewer values."); } // Decrement the number of bins per dimension and then recalculate the total. HC_NUMBINS--; totalNumberOfBins = Math.Pow(HC_NUMBINS, meNumDimensions); Console.WriteLine("[!] Too many bins per dimension for this BC, trying fewer bins: " + HC_NUMBINS + " per dimension"); } meNumBins = HC_NUMBINS; meMin = new List<double>(); meMax = new List<double>(); meRange = new List<double>(); meBinSize = new List<double>(); // meBinSize = meRange / meNumBins for (int i = 0; i < meNumDimensions; i++) { meMin.Add(HC_DEFAULTMIN); meMax.Add(HC_DEFAULTMAX); meRange.Add(HC_DEFAULTMAX - HC_DEFAULTMIN); meBinSize.Add((meRange[i])/meNumBins); } }
public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { int count = pop.GenomeList.Count; evalPack e; IGenome g; int i; for (i = 0; i < count; i++) { //Console.WriteLine(i); sem.WaitOne(); g = pop.GenomeList[i]; e = new evalPack(networkEvaluator, activationFn, g, i % HyperNEATParameters.numThreads,(int)ea.Generation); ThreadPool.QueueUserWorkItem(new WaitCallback(evalNet), e); // Update master evaluation counter. evaluationCount++; /*if(printFinalPositions) file.WriteLine(g.Behavior.behaviorList[0].ToString() + ", " + g.Behavior.behaviorList[1].ToString());//*/ } //Console.WriteLine("waiting for last threads.."); for (int j = 0; j < HyperNEATParameters.numThreads; j++) { sem.WaitOne(); // Console.WriteLine("waiting"); } for (int j = 0; j < HyperNEATParameters.numThreads; j++) { //Console.WriteLine("releasing"); sem.Release(); } //Console.WriteLine("generation done..."); //calulate novelty scores... if(ea.NeatParameters.noveltySearch) { if(ea.NeatParameters.noveltySearch) { ea.CalculateNovelty(); } } }
//add an existing population from hypersharpNEAT to the multiobjective population maintained in //this class, step taken before evaluating multiobjective population through the rank function public void addPopulation(Population p) { EvolutionManager em = EvolutionManager.SharedEvolutionManager; for(int i=0;i<p.GenomeList.Count;i++) { bool blacklist=false; for(int j=0;j<population.Count;j++) { if(distance(p.GenomeList[i].Behavior.objectives,population[j].objectives) < 0.01) blacklist=true; //reject a genome if it is very similar to existing genomes in pop } if(!blacklist) { //add genome if it is unique //we might not need to make copies NeatGenome.NeatGenome copy = (NeatGenome.NeatGenome)em.getGenomeFromID(p.GenomeList[i].GenomeId); //new NeatGenome.NeatGenome((NeatGenome.NeatGenome)p.GenomeList[i], p.GenomeList[i].GenomeId); copy.objectives = (double[])p.GenomeList[i].Behavior.objectives.Clone(); population.Add(copy); } } }
public virtual void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { // Evaluate in single-file each genome within the population. // Only evaluate new genomes (those with EvaluationCount==0). int count = pop.GenomeList.Count; for(int i=0; i<count; i++) { IGenome g = pop.GenomeList[i]; if(g.EvaluationCount!=0) continue; INetwork network = g.Decode(activationFn); if(network==null) { // Future genomes may not decode - handle the possibility. g.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; } else { BehaviorType behavior; g.Fitness = Math.Max(networkEvaluator.EvaluateNetwork(network,out behavior), EvolutionAlgorithm.MIN_GENOME_FITNESS); g.RealFitness = g.Fitness; g.Behavior = behavior; } // Reset these genome level statistics. g.TotalFitness = g.Fitness; g.EvaluationCount = 1; // Update master evaluation counter. evaluationCount++; } if(ea.NeatParameters.noveltySearch) { if(ea.NeatParameters.noveltySearch && ea.noveltyInitialized) { ea.CalculateNovelty(); } } }
// private IGenome EvenDistributionSelect(Species species) // { // return species.Members[Utilities.Next(species.SelectionCount)]; // } private IGenome TournamentSelect(Population p) { double bestFound= 0.0; IGenome bestGenome=null; int bound = p.GenomeList.Count; for(int i=0;i<neatParameters.tournamentSize;i++) { IGenome next= p.GenomeList[Utilities.Next(bound)]; if (next.Fitness > bestFound) { bestFound=next.Fitness; bestGenome=next; } } return bestGenome; }
/// <summary> /// Default Constructor. /// </summary> public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator) : this(pop, populationEvaluator, new NeatParameters()) {}
public static GenomeList CreateGenomeList(Population seedPopulation, int length, NeatParameters neatParameters, IdGenerator idGenerator) { //Build the list. GenomeList genomeList = new GenomeList(); int seedIdx=0; for(int i=0; i<length; i++) { NeatGenome newGenome = new NeatGenome((NeatGenome)seedPopulation.GenomeList[seedIdx], idGenerator.NextGenomeId); // Reset the connection weights foreach(ConnectionGene connectionGene in newGenome.ConnectionGeneList) connectionGene.Weight = (Utilities.NextDouble() * neatParameters.connectionWeightRange) - neatParameters.connectionWeightRange/2.0; genomeList.Add(newGenome); if(++seedIdx >= seedPopulation.GenomeList.Count) { // Back to first genome. seedIdx=0; } } return genomeList; }
public void initalizeEvolution(Population pop) { if (logOutput != null) logOutput.Close(); logOutput = new StreamWriter(outputFolder + "logfile.txt"); //IdGenerator idgen = new IdGeneratorFactory().CreateIdGenerator(pop.GenomeList); ea = new EvolutionAlgorithm(pop, populationEval, neatParams); }
//not entirely concerned with this yet. public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { List<long> genomeIDs = pop.GenomeList.Select(x => (long)x.GenomeId).ToList(); //mostly for memory cleanup stuff -- don't really need to do this if(genomeBehaviors!=null) genomeBehaviors.Clear(); if (fitnessDictionary != null) fitnessDictionary.Clear(); Dictionary<long, KeyValuePair<double[], List<double>>> genomeBs = new Dictionary<long,KeyValuePair<double[],List<double>>>(); fitnessDictionary = new Dictionary<long, double>(); genomeSecondaryBehaviors = new Dictionary<long, List<double>>(); //break our communication up into 5 almost equal chunks (maybe a better number to select here) var genomesChunks = genomeIDs.GroupBy(x => genomeIDs.IndexOf(x) % 7); foreach(var chunk in genomesChunks) { var genomes = serialCallCommunicatorWithIDs(chunk.ToList()); foreach (var gReturn in genomes) { genomeBs.Add(gReturn.Key, gReturn.Value); } } while (genomeBs.Count == 0) { //send them back, we want the right ones no matter what! genomeBs = serialCallCommunicatorWithIDs(genomeIDs); } try { int objCount = 3; //assign genome behaviors to population objects! foreach (IGenome genome in pop.GenomeList) { //calculate our progress in obj double[] accumObjectives = new double[objCount]; for (int i = 0; i < objCount; i++) accumObjectives[i] = 0.0; //our real fitness is measured by distance traveled genome.RealFitness = fitnessDictionary[genome.GenomeId]; genome.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; //set the behavior yo! //objectives should be [ fitness, 0, 0 ] -- to be updated with novelty stuff genome.Behavior = new SharpNeatLib.BehaviorType() { objectives = genomeBs[genome.GenomeId].Key, behaviorList = genomeBs[genome.GenomeId].Value }; if (genomeSecondaryBehaviors.Count > 0) genome.SecondBehavior = new SharpNeatLib.BehaviorType() { objectives = genomeBs[genome.GenomeId].Key, behaviorList = genomeSecondaryBehaviors[genome.GenomeId] }; } //if (ea.NeatParameters.noveltySearch) //{ // if (ea.NeatParameters.noveltySearch && ea.noveltyInitialized) // { // ea.CalculateNovelty(); // } //} } catch (Exception e) { //check our last object var parsedJson = JObject.Parse((string)lastReturnedObject.Args[0]); //Console.WriteLine(parsedJson); Console.WriteLine("Error: " + e.Message); Console.WriteLine(e.StackTrace); throw e; } }
//add an existing population from hypersharpNEAT to the multiobjective population maintained in //this class, step taken before evaluating multiobjective population through the rank function public void addPopulation(Population p) { for(int i=0;i<p.GenomeList.Count;i++) { bool blacklist=false; for(int j=0;j<population.Count;j++) { if (distance(p.GenomeList[i].objectives, population[j].objectives) < 0.0001) {//JUSTIN: Changed from 0.001 (doesn't seem to help) blacklist = true; //reject a genome if it is very similar to existing genomes in pop //Console.Write("Blacklisting: "); //foreach (double bla in p.GenomeList[i].objectives) Console.Write(bla + " "); //Console.Write("vs "); //foreach (double bla in population[j].objectives) Console.Write(bla + " "); //Console.WriteLine(); break; } } if(!blacklist) { //add genome if it is unique //we might not need to make copies NeatGenome.NeatGenome copy=new NeatGenome.NeatGenome((NeatGenome.NeatGenome)p.GenomeList[i],0); //copy.objectives = (double[])p.GenomeList[i].objectives.Clone(); //JUSTIN: Moved this to the NeatGenome copy constructor... population.Add(copy); } } }
/// <summary> /// Default Constructor. /// </summary> public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator, NeatParameters neatParameters) { this.pop = pop; this.populationEvaluator = populationEvaluator; this.neatParameters = neatParameters; neatParameters_Normal = neatParameters; neatParameters_PrunePhase = new NeatParameters(neatParameters); neatParameters_PrunePhase.pMutateAddConnection = 0.0; neatParameters_PrunePhase.pMutateAddNode = 0.0; neatParameters_PrunePhase.pMutateConnectionWeights = 0.33; neatParameters_PrunePhase.pMutateDeleteConnection = 0.33; neatParameters_PrunePhase.pMutateDeleteSimpleNeuron = 0.33; // Disable all crossover as this has a tendency to increase complexity, which is precisely what // we don't want during a pruning phase. neatParameters_PrunePhase.pOffspringAsexual = 1.0; neatParameters_PrunePhase.pOffspringSexual = 0.0; InitialisePopulation(); }
/// <summary> /// Default Constructor. /// </summary> public EvolutionAlgorithm(Population pop, IPopulationEvaluator populationEvaluator, NeatParameters neatParameters) { this.pop = pop; this.populationEvaluator = populationEvaluator; this.neatParameters = neatParameters; neatParameters_Normal = neatParameters; neatParameters_PrunePhase = new NeatParameters(neatParameters); neatParameters_PrunePhase.pMutateAddConnection = 0.0; neatParameters_PrunePhase.pMutateAddNode = 0.0; neatParameters_PrunePhase.pMutateAddModule = 0.0; neatParameters_PrunePhase.pMutateConnectionWeights = 0.33; neatParameters_PrunePhase.pMutateDeleteConnection = 0.33; neatParameters_PrunePhase.pMutateDeleteSimpleNeuron = 0.33; // Disable all crossover as this has a tendency to increase complexity, which is precisely what // we don't want during a pruning phase. neatParameters_PrunePhase.pOffspringAsexual = 1.0; neatParameters_PrunePhase.pOffspringSexual = 0.0; if (neatParameters.mapelites) { meInitialisePopulation(); meGridInit(pop); Console.WriteLine("Mapelites stuff has been initialized. Oh btw, we're doing mapelites."); if (neatParameters.me_simpleGeneticDiversity) { Console.WriteLine("Mapelites reinforced by the power of 51MPLE gENET1C d1VER51TY!!!!1 *fireworks* *applause* *receive phd*"); } if (neatParameters.me_noveltyPressure) { Console.WriteLine("Mapelites now with NOVELTY PRESSURE! (>'')>"); } } // Skip all that other stupid shit if we are doing MapElites else if (neatParameters.NS2) { if (neatParameters.NS1) ns1 = true; ns2InitializePopulation(); if (neatParameters.track_me_grid) { Console.WriteLine("Initializing mapelites-style-grid genome tracking.."); meGridInit(pop); } Console.WriteLine("Novelty Search 2.0 has been initialized."); } // Skip the code jungle below if we are doing Novelty Search 2.0 else if (neatParameters.NSLC) // (Steady-State NSLC -- NEW!!) { // TODO: JUSTIN: SS-NSLC GOES HERE! ns1 = true; ns2InitializePopulation(); if (neatParameters.track_me_grid) { Console.WriteLine("Initializing mapelites-style-grid genome tracking.."); meGridInit(pop); } Console.WriteLine("Initializing STEADY STATE -- NSLC! NEW! This is a thing that is happening now. You cannot stop it. Relax."); // TODO: INITIALIZATION for SS-NSLC (is like NS1... but make it separate so we can stop being so intertwined. cleaner is better, yo) } // Skip the nasty quagmire of unverified bogus rotten banana sandwiches if doing Steady-State NSLC else { if (neatParameters.multiobjective) { this.multiobjective = new Multiobjective.Multiobjective(neatParameters); neatParameters.compatibilityThreshold = 100000000.0; //disable speciation w/ multiobjective } if (neatParameters.noveltySearch) { if (neatParameters.noveltyHistogram) { this.noveltyFixed = new noveltyfixed(neatParameters.archiveThreshold); this.histogram = new noveltyhistogram(neatParameters.histogramBins); noveltyInitialized = true; InitialisePopulation(); } if (neatParameters.noveltyFixed || neatParameters.noveltyFloat) { this.noveltyFixed = new noveltyfixed(neatParameters.archiveThreshold); InitialisePopulation(); noveltyFixed.initialize(this.pop); noveltyInitialized = true; populationEvaluator.EvaluatePopulation(pop, this); UpdateFitnessStats(); DetermineSpeciesTargetSize(); } if (neatParameters.track_me_grid) { Console.WriteLine("Initializing mapelites-style-grid genome tracking.."); meGridInit(pop); // JUSTIN: Trying to add grid-tracking to NS1 } } else { InitialisePopulation(); if (neatParameters.track_me_grid) { Console.WriteLine("Initializing mapelites-style-grid genome tracking.."); meGridInit(pop); // JUSTIN: Trying to add grid-tracking to fitness-based search } } } }
public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { var count = pop.GenomeList.Count; #region Reset the genomes for (var i = 0; i < count; i++) { pop.GenomeList[i].TotalFitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; pop.GenomeList[i].EvaluationCount = 0; pop.GenomeList[i].Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; } #endregion //TODO: Parallelize/Distribute this loop //Ideally we should have a distributed method which returns an array of //doubles to add to the genome fitnesses of each individual. for (var i = 0; i < count; i++) { Console.WriteLine("Individual #{0}", i + 1); var g = pop.GenomeList[i]; var network = g.Decode(ActivationFunction); if (network == null) { // Future genomes may not decode - handle the possibility. g.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; g.TotalFitness = g.Fitness; g.EvaluationCount = 1; continue; } HandEngine engine = new HandEngine(); //Run multiple hands per individual for (var curGame = 0; curGame < GamesPerEvaluation; curGame++) { #region Setup the players for this game var field = new List<Seat>(); var stacks = GetStacks(PlayersPerGame); var networks = new int[PlayersPerGame]; networks[0] = i; IPlayer hero = null;//new NeuralNetworkPlayer(InputGenerator, OutputInterpreter, // network, Rand); field.Add(new Seat(1, "Net_" + i, stacks[0], hero)); for (var curPlayer = 1; curPlayer < PlayersPerGame; curPlayer++) { INetwork nextNetwork = null; var next = 0; while (nextNetwork == null) { next = Rand.Next(0, count); nextNetwork = pop.GenomeList[next].Decode(ActivationFunction); } networks[curPlayer] = next; //"NeuralNet" + next, stacks[curPlayer], IPlayer villain = null;// new NeuralNetworkPlayer(InputGenerator, // OutputInterpreter, nextNetwork, Rand); field.Add(new Seat(curPlayer + 1, "Net" + next + "_Seat+ " + (curPlayer+1), stacks[curPlayer], villain)); } #endregion //Have the players play a single hand. HandHistory history = new HandHistory(field.ToArray(), (ulong)curGame+1, (uint)(curGame % PlayersPerGame + 1), new double[] { 1, 2 }, 0, BettingType); CachedHand hand = CachedHands[Rand.Next(CachedHands.Count)]; engine.PlayHand(history); #region Add the results to the players' fitness scores //We'll use the profit as the fitness function. //Alternatively, we could in the future experiment with using profit //as a percentage of the original stacks. Or we could use the square //of the profit (multiplying by -1 if the player lost money). for (var curResult = 0; curResult < PlayersPerGame; curResult++) { var curGenome = pop.GenomeList[networks[curResult]]; curGenome.TotalFitness += field[curResult].Chips - stacks[curResult]; curGenome.EvaluationCount++; } #endregion if (GamesPlayed % 10000 == 0) using (TextWriter writer = new StreamWriter("game_" + GamesPlayed + ".txt")) writer.WriteLine(history.ToString()); //increment the game counter GamesPlayed++; } } //Normalize the fitness scores to use the win-rate for (var i = 0; i < count; i++) { pop.GenomeList[i].Fitness = Math.Max(pop.GenomeList[i].Fitness, EvolutionAlgorithm.MIN_GENOME_FITNESS); pop.GenomeList[i].TotalFitness = Math.Max(pop.GenomeList[i].Fitness, EvolutionAlgorithm.MIN_GENOME_FITNESS); } }
/// <summary> /// Biased select. /// </summary> /// <param name="species">Species to select from.</param> /// <returns></returns> private IGenome RouletteWheelSelect(Population p) { double selectValue = (Utilities.NextDouble() * p.SelectionTotalFitness); double accumulator=0.0; int genomeBound = p.GenomeList.Count; for(int genomeIdx=0; genomeIdx<genomeBound;genomeIdx++) { IGenome genome = p.GenomeList[genomeIdx]; accumulator += genome.Fitness; if(selectValue <= accumulator) return genome; } // Should never reach here. return null; }
public void initalizeEvolution(Population pop) { logOutput = new StreamWriter(outputFolder + "logfile.txt"); //IdGenerator idgen = new IdGeneratorFactory().CreateIdGenerator(pop.GenomeList); ea = new EvolutionAlgorithm(pop, experiment.PopulationEvaluator, experiment.DefaultNeatParameters); }
public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { int count = pop.GenomeList.Count; evalPack e; IGenome g; int i; for (i = 0; i < count; i++) { sem.WaitOne(); g = pop.GenomeList[i]; e = new evalPack(networkEvaluator, activationFn, g); ThreadPool.QueueUserWorkItem(new WaitCallback(evalNet), e); // Update master evaluation counter. evaluationCount++; } for (int j = 0; j < HyperNEATParameters.numThreads; j++) { sem.WaitOne(); } for (int j = 0; j < HyperNEATParameters.numThreads; j++) { sem.Release(); } }
/// <summary> /// Initializes the EA using an initial population that has already been read into object format. /// </summary> /// <param name="pop"></param> public void initalizeEvolution(Population pop) { LogOutput = Logging ? new StreamWriter(Path.Combine(OutputFolder, "log.txt")) : null; FinalPositionOutput = FinalPositionLogging ? new StreamWriter(Path.Combine(OutputFolder, "final-position.txt")) : null; ArchiveModificationOutput = FinalPositionLogging ? new StreamWriter(Path.Combine(OutputFolder, "archive-mods.txt")) : null; ComplexityOutput = new StreamWriter(Path.Combine(OutputFolder, "complexity.txt")); ComplexityOutput.WriteLine("avg,stdev,min,max"); if (FinalPositionLogging) { FinalPositionOutput.WriteLine("ID,x,y"); ArchiveModificationOutput.WriteLine("ID,action,time,x,y"); } EA = new EvolutionAlgorithm(pop, experiment.PopulationEvaluator, experiment.DefaultNeatParameters); EA.outputFolder = OutputFolder; EA.neatBrain = NEATBrain; }
public EvolutionAlgorithm initializeEvolutionAlgorithm(IPopulationEvaluator popEval, int popSize, AssessGenotypeFunction assess, List<long> parentGenomeIDs = null) { //have to add our seed to the parents! if (officialSeedGenome != null) { //if we aren't given any parents, make a new list, and add the seed if (parentGenomeIDs == null) parentGenomeIDs = new List<long>(); parentGenomeIDs.Add(officialSeedGenome.GenomeId); } //create our initial population, using seeds or not, making sure it is at least "popsize" long GenomeList gl = createGenomeList(popSize, assess, parentGenomeIDs); //now we have a genomelist full of our parents, if they didn't die in a car crash when we were young, yay! //also, these parents, their connections, and neurons are safely catalogued by WIN (eventually...) Population pop = new Population(idgen, gl); //create our algorithm evoAlgorithm = new EvolutionAlgorithm(pop, popEval, neatParams, assess); createExperimentDirectory(); //send it away! return evoAlgorithm; }