/// <summary> /// Create an annealing trainer. /// </summary> /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLTrainFactory.PropertyPopulationSize, false, 5000); IMLTrain train = new MLMethodGeneticAlgorithm(() => { IMLMethod result = (IMLMethod)ObjectCloner.DeepCopy(method); ((IMLResettable)result).Reset(); return(result); }, score, populationSize); return(train); }
public void Train(int mode = 0, int epochs = 1, int maxTemp = 10, int minTemp = 2, int cycles = 10, int timerTimeout = 5000) { var encogScore = new EncogScore(maze, timerTimeout); encogScore.EncogCycleComplete += EncogScore_EncogCycleComplete; encogScore.MazeCycleComplete += EncogScore_MazeCycleComplete; IMLTrain train; if (mode == 0) { //Simulated Annealing train = new NeuralSimulatedAnnealing(network, encogScore, maxTemp, minTemp, cycles); } else { //Genetic Algorithm train = new MLMethodGeneticAlgorithm(() => { return(network); }, encogScore, cycles); } for (int epoch = 1; epoch <= epochs; epoch++) { train.Iteration(); TrainingIterationComplete?.Invoke(epoch, train.Error); } //SaveTrainingData(); }
public void StartTraining() { var Scorer = new GameScore(); var Trainer = new MLMethodGeneticAlgorithm(GetNetwork, Scorer, 200) { ThreadCount = 8 }; //Trainer.ThreadCount = 1; var s = new BestStrategy(); s.Init(Trainer); Trainer.Strategies.Add(s); for (int i = 0; i < 1000; i++) { Trainer.Iteration(); Console.WriteLine(string.Format("Epoch {0} finished : {1} Hash : {2}", i, Trainer.Genetic.BestGenome.Score, Trainer.Genetic.BestGenome.GetHashCode())); if ((i % 10) == 0) { SaveTrainer(Trainer); } } SaveTrainer(Trainer); System.IO.File.WriteAllText(@".\Result.txt", Trainer.Genetic.BestGenome.ToString()); }
public static BasicNetwork train(Render render) { BasicNetwork network = CreateNetwork(6, 2, 8); IMLTrain train; train = new MLMethodGeneticAlgorithm(() => { BasicNetwork result = CreateNetwork(6, 2, 8); ((IMLResettable)result).Reset(); return(result); }, new Tester(), 2000); int epoch = 1; for (int i = 0; i < 80; i++) { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Score:" + train.Error); epoch++; if (train.Error >= 37) { break; } } return((BasicNetwork)train.Method); }
public static void LanderTrain() { Console.WriteLine("\n\nEncog network structure: \nNumber of Input neurons 3 \nNumber of output neurons 1 "); int hiddenLayers = Util.GetInput("Number of hidden layers [default 1]:", 1); int hiddenLayerNeurons = Util.GetInput("Number of hidden layer neurons [default 100]: ", 100); int numOfEpoch = Util.GetInput("Number of Epochs [default 10]:", 10); int type = Util.GetInput("\nSelect a training method [Annealing - 0][Genetic - 1 default]:", 1); BasicNetwork network = CreateNetwork(hiddenLayers, hiddenLayerNeurons); var pilotScore = new PilotScore(); IMLTrain train; if (type == 0) { int startTemp = Util.GetInput("Start Temperature [default 10]:", 10); int endTemp = Util.GetInput("Stop Temperature [default 2]:", 2); int cycles = Util.GetInput("Number of Cycles [default 10]:", 10); train = new NeuralSimulatedAnnealing( network, pilotScore, startTemp, endTemp, cycles); } else { int PopulationSize = Util.GetInput("Population Size [default 10]:", 10); train = new MLMethodGeneticAlgorithm(() => { BasicNetwork result = CreateNetwork(hiddenLayers, hiddenLayerNeurons); ((IMLResettable)result).Reset(); return(result); }, pilotScore, PopulationSize); // population size } Console.WriteLine("\n\nTraining: \n"); System.Diagnostics.Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); for (int i = 1; i <= numOfEpoch; i++) // num of epochs { train.Iteration(); Console.WriteLine($"Epoch#: {i} \t Score: {train.Error}"); } stopwatch.Stop(); Console.WriteLine("\nThe total number of times it tried the Lunar Lander for training: " + NeuralPilot.CycleCount); Console.WriteLine($"Elapsed: {stopwatch.Elapsed}\n"); int showPredictedOutput = Util.GetInput("Show landing simulation for AI prediction? [No - 0, Yes - 1 default]: ", 1); if (showPredictedOutput == 1) { network = (BasicNetwork)train.Method; var pilot = new NeuralPilot(network, true); pilot.ScorePilot(); Console.WriteLine("hit enter to continue..."); Console.ReadLine(); } }
/// <summary> /// This is where encog network settings are configured and training is being processed. /// </summary> public static void LogisticTrain() { Console.WriteLine("\n\nEncog network structure: \nNumber of Input neurons 1 \nNumber of output neurons 9 "); int hiddenLayers = Util.GetInput("Number of hidden layers [default 1]: ", 1); int hiddenLayerNeurons = Util.GetInput("Hidden layer neurons [default 100]: ", 100); int type = Util.GetInput("\nSelect a training method [Annealing - 0][Genetic - 1 default]:", 1); int numOfEpoch = Util.GetInput("Number of Epochs [default 10]:", 10); BasicNetwork network = CreateNetwork(hiddenLayers, hiddenLayerNeurons); var pilotScore = new EncogLogisticScore(); IMLTrain train; if (type == 0) { int startTemp = Util.GetInput("Start Temperature [default 10]:", 10); int endTemp = Util.GetInput("End Temperature [default 2]:", 2); int cycles = Util.GetInput("Cycles [default 10]:", 10); train = new NeuralSimulatedAnnealing(network, pilotScore, endTemp, startTemp, cycles); } else { int populationSize = Util.GetInput("Population Size [default 10]:", 10); train = new MLMethodGeneticAlgorithm(() => { BasicNetwork result = CreateNetwork(hiddenLayers, hiddenLayerNeurons); ((IMLResettable)result).Reset(); return(result); }, pilotScore, populationSize); // population size } Stopwatch watch = new Stopwatch(); watch.Start(); Console.WriteLine("\n\nTraining: \n"); for (int i = 0; i < numOfEpoch; i++) // num of epochs { train.Iteration(); double totalCosts = train.Error; string currencyScore = totalCosts.ToString("$#,##0"); Console.WriteLine($"Epoch # {i} \t Score: {currencyScore,10}"); } watch.Stop(); Console.WriteLine("\nPredicted outputs:"); network = (BasicNetwork)train.Method; var pilot = new EncogLogisticSimulator(network, true); pilot.CalculateScore(LogisticSimulator.GenerateCustomerOrders(), true); Console.WriteLine($"\nElapsed: {watch.Elapsed}"); Console.WriteLine("\nThe total number of times it tried the Logistic Simulation for training: " + pilotScore.SessionCnt); Console.ReadLine(); }
public void TestGenetic() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingData); MLMethodGeneticAlgorithm genetic = new MLMethodGeneticAlgorithm(() => { return(NetworkUtil.CreateXORNetworkUntrained()); }, score, 500); NetworkUtil.TestTraining(genetic, 0.00001); }
public BasicNetwork SharkTrain() { IMLTrain train = new MLMethodGeneticAlgorithm(() => { BasicNetwork result = CreateNetwork(2, 2); ((IMLResettable)result).Reset(); return(result); }, new SharkTrainer(), 1000); for (int i = 0; i < 6; i++) { train.Iteration(); Console.WriteLine("Shark Inital Train Score: " + Math.Round(train.Error, 2)); } return((BasicNetwork)train.Method); }
public void Init(IMLTrain train) { Method = (MLMethodGeneticAlgorithm)train; Adversaries = new List <Adversaries>(); var r = new RangeRandomizer(-1, 1); for (int i = 0; i < 6; i++) { Adversaries.Add(new Adversaries() { PlayerStart = r.NextDouble() >= 0.5 ? 1.0 : -1.0, Player = new PlayerVertical(i) }); Adversaries.Add(new Adversaries() { PlayerStart = r.NextDouble() >= 0.5 ? 1.0 : -1.0, Player = new PlayerHorizontal(i) }); } BaseAdversaries = Adversaries.ToList(); }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { BasicNetwork network = CreateNetwork(); IMLTrain train; if (app.Args.Length > 0 && String.Compare(app.Args[0], "anneal", true) == 0) { train = new NeuralSimulatedAnnealing( network, new PilotScore(), 10, 2, 100); } else { train = new MLMethodGeneticAlgorithm(() => { BasicNetwork result = CreateNetwork(); ((IMLResettable)result).Reset(); return(result); }, new PilotScore(), 500); } int epoch = 1; for (int i = 0; i < 50; i++) { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Score:" + train.Error); epoch++; } Console.WriteLine(@"\nHow the winning network landed:"); network = (BasicNetwork)train.Method; var pilot = new NeuralPilot(network, true); Console.WriteLine(pilot.ScorePilot()); EncogFramework.Instance.Shutdown(); }
void SaveTrainer(MLMethodGeneticAlgorithm Trainer) { var Basic = ((Trainer.Genetic.BestGenome as MLMethodGenome).Phenotype as BasicNetwork); Encog.Persist.EncogDirectoryPersistence.SaveObject(new System.IO.FileInfo("./Network.net"), Basic); }