public void SetUp() { server = new HandServer(); Random r = new Random(); cache = new CachedHand(6, r); }
public void EvaluatePopulation(Population pop, EvolutionAlgorithm ea) { var count = pop.GenomeList.Count; #region Reset the genomes for (var i = 0; i < count; i++) { pop.GenomeList[i].TotalFitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; pop.GenomeList[i].EvaluationCount = 0; pop.GenomeList[i].Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; } #endregion //TODO: Parallelize/Distribute this loop //Ideally we should have a distributed method which returns an array of //doubles to add to the genome fitnesses of each individual. for (var i = 0; i < count; i++) { Console.WriteLine("Individual #{0}", i + 1); var g = pop.GenomeList[i]; var network = g.Decode(ActivationFunction); if (network == null) { // Future genomes may not decode - handle the possibility. g.Fitness = EvolutionAlgorithm.MIN_GENOME_FITNESS; g.TotalFitness = g.Fitness; g.EvaluationCount = 1; continue; } HandEngine engine = new HandEngine(); //Run multiple hands per individual for (var curGame = 0; curGame < GamesPerEvaluation; curGame++) { #region Setup the players for this game var field = new List <Seat>(); var stacks = GetStacks(PlayersPerGame); var networks = new int[PlayersPerGame]; networks[0] = i; IPlayer hero = null;//new NeuralNetworkPlayer(InputGenerator, OutputInterpreter, // network, Rand); field.Add(new Seat(1, "Net_" + i, stacks[0], hero)); for (var curPlayer = 1; curPlayer < PlayersPerGame; curPlayer++) { INetwork nextNetwork = null; var next = 0; while (nextNetwork == null) { next = Rand.Next(0, count); nextNetwork = pop.GenomeList[next].Decode(ActivationFunction); } networks[curPlayer] = next; //"NeuralNet" + next, stacks[curPlayer], IPlayer villain = null; // new NeuralNetworkPlayer(InputGenerator, // OutputInterpreter, nextNetwork, Rand); field.Add(new Seat(curPlayer + 1, "Net" + next + "_Seat+ " + (curPlayer + 1), stacks[curPlayer], villain)); } #endregion //Have the players play a single hand. HandHistory history = new HandHistory(field.ToArray(), (ulong)curGame + 1, (uint)(curGame % PlayersPerGame + 1), new double[] { 1, 2 }, 0, BettingType); CachedHand hand = CachedHands[Rand.Next(CachedHands.Count)]; engine.PlayHand(history); #region Add the results to the players' fitness scores //We'll use the profit as the fitness function. //Alternatively, we could in the future experiment with using profit //as a percentage of the original stacks. Or we could use the square //of the profit (multiplying by -1 if the player lost money). for (var curResult = 0; curResult < PlayersPerGame; curResult++) { var curGenome = pop.GenomeList[networks[curResult]]; curGenome.TotalFitness += field[curResult].Chips - stacks[curResult]; curGenome.EvaluationCount++; } #endregion if (GamesPlayed % 10000 == 0) { using (TextWriter writer = new StreamWriter("game_" + GamesPlayed + ".txt")) writer.WriteLine(history.ToString()); } //increment the game counter GamesPlayed++; } } //Normalize the fitness scores to use the win-rate for (var i = 0; i < count; i++) { pop.GenomeList[i].Fitness = Math.Max(pop.GenomeList[i].Fitness, EvolutionAlgorithm.MIN_GENOME_FITNESS); pop.GenomeList[i].TotalFitness = Math.Max(pop.GenomeList[i].Fitness, EvolutionAlgorithm.MIN_GENOME_FITNESS); } }