private NEATPopulation Generate() { IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; NEATTraining train = new NEATTraining( score, 2, 1, 10); return (NEATPopulation)train.Population; }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); var pop = new NEATPopulation(2, 1, 1000); ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network var step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; var train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, 0.01); var network = (NEATNetwork) train.Method; network.ClearContext(); // test the neural network Console.WriteLine(@"Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }
private void trainNetworkBackprop() { // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1); IMLDataSet aset = new BasicMLDataSet(input, ideal); int epoch = 1; // train the neural network ICalculateScore score = new TrainingSetScore(aset); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, aset, 0.001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000); // train the neural network var step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; var train = new NEATTraining(score, pop); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); trainMain.AddStrategy(new HybridStrategy(train)); network.ClearContext(); while (!stop.ShouldStop()) { trainMain.Iteration(); train.Iteration(); Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error+ @" Genetic iteration:"+trainAlt.IterationNumber+ @"neat iteration:"+train.IterationNumber ); epoch++; } }
/// <summary> /// Builds and trains a neat network. /// </summary> /// <param name="aset">The IMLDataset.</param> /// <param name="inputcounts">The inputcounts.</param> /// <param name="outputcounts">The outputcounts.</param> /// <param name="populationsize">The populationsize.</param> /// <param name="ToErrorTraining">To error rate you want to train too.</param> /// <returns>a trained netnetwork.</returns> public static NEATNetwork BuildTrainNeatNetwork(IMLDataSet aset, int inputcounts, int outputcounts, int populationsize, double ToErrorTraining) { NEATPopulation pop = new NEATPopulation(inputcounts, outputcounts, populationsize); ICalculateScore score = new TrainingSetScore(aset); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; NEATTraining train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, ToErrorTraining); NEATNetwork network = (NEATNetwork)train.Method; return network; }
private void Validate(NEATPopulation pop) { Assert.AreEqual(0.3, pop.OldAgePenalty); Assert.AreEqual(50, pop.OldAgeThreshold); Assert.AreEqual(10, pop.PopulationSize); Assert.AreEqual(0.2, pop.SurvivalRate); Assert.AreEqual(10, pop.YoungBonusAgeThreshold); Assert.AreEqual(0.3, pop.YoungScoreBonus); // see if the population can actually be used to train IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); NEATTraining train = new NEATTraining(score, pop); train.Iteration(); }