internal void SaveBestPerformer(int epoch) { ITrainingSession bestPerformer = _generation.GetBestPerformer(); var saver = new NeuralNetworkSaver("\\networks"); saver.SaveNeuralNetwork(bestPerformer.NeuralNet, bestPerformer.GetSessionEvaluation(), epoch); }
public void RunSimulation() { RunStarterGeneration(); for (int epoch = 1; epoch < _evolutionConfig.NumEpochs; epoch++) { for (int generation = 0; generation < _evolutionConfig.GenerationsPerEpoch; generation++) { if (generation == 0) { LoggerFactory.GetLogger().Log(LogLevel.Info, $"Creating next generation with top performer with eval: {_bestPerformerOfEpoch.GetSessionEvaluation()}"); createNextGeneration(_bestPerformerOfEpoch); } else { createNextGeneration(null); } _generation.Run(); IEnumerable <double> evals = _generation.GetEvalsForGeneration().OrderByDescending(d => d); var evalsTotalCount = evals.Count(); if (_evolutionConfig.NumTopEvalsToReport > 0) { evals = evals.Take(_evolutionConfig.NumTopEvalsToReport); } var sb = new StringBuilder(); foreach (var t in evals.OrderBy(d => d)) { sb.AppendLine($"eval: {t}"); } LoggerFactory.GetLogger().Log(LogLevel.Info, sb.ToString()); LoggerFactory.GetLogger().Log(LogLevel.Info, $"count: {evalsTotalCount}"); LoggerFactory.GetLogger().Log(LogLevel.Info, $"Epoch: {epoch}, Generation: {generation}"); } if (_epochAction != null) { LoggerFactory.GetLogger().Log(LogLevel.Info, "Updating best performer"); _bestPerformerOfEpoch = _epochAction.UpdateBestPerformer(_generation, epoch); } else { _bestPerformerOfEpoch = GetBestPerformerOfGeneration(); } SaveBestPerformer(epoch); } }
private void createNextGeneration(ITrainingSession bestPerformer) { Stopwatch watch = new Stopwatch(); watch.Start(); int numberOfTopPerformersToChoose = (int)(_generationConfig.GenerationPopulation * 0.50); int numToBreed = (int)(_generationConfig.GenerationPopulation * 0.35); int numToGen = (int)(_generationConfig.GenerationPopulation * 0.15); if (numberOfTopPerformersToChoose + numToBreed + numToGen < _generationConfig.GenerationPopulation) { numToGen += _generationConfig.GenerationPopulation - (numberOfTopPerformersToChoose + numToBreed + numToGen); } var sessions = _generation.GetBestPerformers(numberOfTopPerformersToChoose); if (bestPerformer != null) { LoggerFactory.GetLogger().Log(LogLevel.Info, "Best performer found for creating generation"); if (sessions.All(s => s.NeuralNet.GetGenes() != bestPerformer.NeuralNet.GetGenes())) { LoggerFactory.GetLogger() .Log(LogLevel.Info, $"Best performer adding to sessions with eval {bestPerformer.GetSessionEvaluation()}"); sessions[sessions.Count - 1] = bestPerformer; sessions = sessions.OrderByDescending(s => s.GetSessionEvaluation()).ToList(); LoggerFactory.GetLogger() .Log(LogLevel.Info, $"session 0 eval: {sessions[0].GetSessionEvaluation()}"); } else { LoggerFactory.GetLogger() .Log(LogLevel.Info, $"Best performer already in generation: not adding."); } } _history.AddEval(sessions[0].GetSessionEvaluation()); var mutateChance = DetermineMutateChance(); IList <INeuralNetwork> children = _breeder.Breed(sessions, numToBreed); var newSessions = new List <ITrainingSession>(); //Allow the very top numToLiveOn sessions to be added to next generation untouched int numToLiveOn = sessions.Count / 10; var sessionsToLiveOn = sessions.Take(numToLiveOn).ToList(); newSessions.AddRange(sessionsToLiveOn); //try to mutate session that will live on 1 by 1 var mutatedTop = GetMutatedNetworks(sessionsToLiveOn.Select(s => s.NeuralNet), mutateChance); //or each session that lived on that was mutated, remove last top performer, then mutate remaining top performers in batch and add sessions = sessions.Skip(numToLiveOn).ToList(); var sessionSubset = sessions.Take(sessions.Count - mutatedTop.Count); IList <INeuralNetwork> toKeepButPossiblyMutate = sessionSubset.Select(session => session.NeuralNet).ToList(); IList <INeuralNetwork> newNetworks = getNewNetworks(numToGen, mutateChance); List <INeuralNetwork> toTryMutate = new List <INeuralNetwork>(); //try to mutate both new networks as well as all the top performers we wanted to keep toTryMutate.AddRange(toKeepButPossiblyMutate); toTryMutate.AddRange(newNetworks); bool didMutate; IList <INeuralNetwork> maybeMutated = _mutator.Mutate(toTryMutate, mutateChance, out didMutate); List <INeuralNetwork> allToAdd = new List <INeuralNetwork>(); allToAdd.AddRange(mutatedTop); allToAdd.AddRange(children); allToAdd.AddRange(maybeMutated); newSessions.AddRange(allToAdd.Select((net, sessionNumber) => new TrainingSession(net, _evaluatableFactory.Create(net), sessionNumber))); _generation = new Generation(newSessions, _generationConfig); watch.Stop(); LoggerFactory.GetLogger().Log(LogLevel.Debug, $"create generation runtime (sec): {watch.Elapsed.TotalSeconds}"); watch.Reset(); }