예제 #1
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder    = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork)method, score, startTemp, stopTemp, cycles);

            return(train);
        }
예제 #2
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain        trainMain;

            if (Method.Equals("Leven"))
            {
                Console.WriteLine("Using LevenbergMarquardtTraining");
                trainMain = new LevenbergMarquardtTraining(network, trainingSet);
            }
            else
            {
                trainMain = new Backpropagation(network, trainingSet);
            }

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
예제 #3
0
        public override double Train(Data info, float lr, float mom)
        {
            IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData);

            //Train network on data set, parameters (Network, dataset, learning rate, momentum).
            ICalculateScore score    = new TrainingSetScore(data);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(EncogNetwork, score, 10, 2, 1000);
            IMLTrain        learner;

            learner = new LevenbergMarquardtTraining(EncogNetwork, data);

            var stop = new StopTrainingStrategy();

            learner.AddStrategy(new Greedy());
            learner.AddStrategy(new HybridStrategy(trainAlt));

            //Train network on data set.
            double lastError = double.PositiveInfinity;

            do
            {
                if (learner.Error != 0)
                {
                    lastError = learner.Error;
                }

                learner.Iteration();
            } while (lastError - learner.Error > 0.0000001);

            return(learner.Error);
        }
예제 #4
0
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(minis);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, minis, 0.0001, 0.01);
            StopTrainingStrategy stop      = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();

            return(trainMain.Error);
        }
예제 #5
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork)method,
                                                        new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                        mate);

            return(train);
        }
예제 #6
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);

            IMLTrain train = new MLMethodGeneticAlgorithm(() =>
            {
                IMLMethod result = (IMLMethod)ObjectCloner.DeepCopy(method);
                ((IMLResettable)result).Reset();
                return(result);
            }, score, populationSize);

            return(train);
        }
예제 #7
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);


            IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0);

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
예제 #8
0
        static void Main(string[] args)
        {
            // this form of ANN uses genetic algorithm to produce
            // hidden layer of neurons
            // A NEAT network starts with only an
            // input layer and output layer. The rest is evolved as the training progresses.
            // Connections inside of a NEAT neural network can be feedforward, recurrent,
            // or self - connected.All of these connection types will be tried by NEAT as it
            // attempts to evolve a neural network capable of the given task.
            IMLDataSet     trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            NEATPopulation pop         = new NEATPopulation(2, 1, 1000);

            pop.Reset();
            pop.InitialConnectionDensity = 1.0; // not required, but speeds processing.
            ICalculateScore score = new TrainingSetScore(trainingSet);
            // train the neural network
            TrainEA train = NEATUtil.ConstructNEATTrainer(pop, score);

            EncogUtility.TrainToError(train, 0.01);

            NEATNetwork network = (NEATNetwork)train.CODEC.Decode(train.BestGenome);

            // TODO no persistance? no means to peek structure?

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            EncogUtility.Evaluate(network, trainingSet);
        }
예제 #9
0
        private void trainNetworkBackprop()
        {
            // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1);

            IMLDataSet aset  = new BasicMLDataSet(input, ideal);
            int        epoch = 1;
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(aset);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, aset, 0.001, 0.0);
            StopTrainingStrategy stop      = new StopTrainingStrategy();
            var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000);
            // train the neural network
            var step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;
            var train = new NEATTraining(score, pop);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);
            trainMain.AddStrategy(new HybridStrategy(train));


            network.ClearContext();

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                train.Iteration();
                Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error + @" Genetic iteration:" + trainAlt.IterationNumber + @"neat iteration:" + train.IterationNumber);
                epoch++;
            }
        }
예제 #10
0
        /// <summary>
        /// Create an NEAT GA trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            ICalculateScore score = new TrainingSetScore(training);
            TrainEA         train = NEATUtil.ConstructNEATTrainer((NEATPopulation)method, score);

            return(train);
        }
예제 #11
0
        private void Validate(NEATPopulation pop)
        {
            Assert.AreEqual(10, pop.PopulationSize);
            Assert.AreEqual(0.2, pop.SurvivalRate);

            // see if the population can actually be used to train
            IMLDataSet             trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
            ICalculateScore        score       = new TrainingSetScore(trainingSet);
            IEvolutionaryAlgorithm train       = NEATUtil.ConstructNEATTrainer(pop, score);

            train.Iteration();
        }
예제 #12
0
        private NEATPopulation Generate()
        {
            IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            ICalculateScore score = new TrainingSetScore(trainingSet);
            // train the neural network
            ActivationStep step = new ActivationStep();

            step.Center = 0.5;

            IEvolutionaryAlgorithm train = NEATUtil.ConstructNEATTrainer(
                score, 2, 1, 10);

            return((NEATPopulation)train.Population);
        }
        private NEATPopulation Generate()
        {
            IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            ICalculateScore score = new TrainingSetScore(trainingSet);
            // train the neural network
            ActivationStep step = new ActivationStep();

            step.Center = 0.5;

            NEATTraining train = new NEATTraining(
                score, 2, 1, 10);

            return((NEATPopulation)train.Population);
        }
예제 #14
0
        /// <summary>
        /// Builds and trains a neat network.
        /// </summary>
        /// <param name="aset">The IMLDataset.</param>
        /// <param name="inputcounts">The inputcounts.</param>
        /// <param name="outputcounts">The outputcounts.</param>
        /// <param name="populationsize">The populationsize.</param>
        /// <param name="ToErrorTraining">To error rate you want to train too.</param>
        /// <returns>a trained netnetwork.</returns>
        public static NEATNetwork BuildTrainNeatNetwork(IMLDataSet aset, int inputcounts, int outputcounts, int populationsize, double ToErrorTraining)
        {
            NEATPopulation  pop   = new NEATPopulation(inputcounts, outputcounts, populationsize);
            ICalculateScore score = new TrainingSetScore(aset);
            // train the neural network
            ActivationStep step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;
            NEATTraining train = new NEATTraining(score, pop);

            EncogUtility.TrainToError(train, ToErrorTraining);
            NEATNetwork network = (NEATNetwork)train.Method;

            return(network);
        }
        private void Validate(NEATPopulation pop)
        {
            Assert.AreEqual(0.3, pop.OldAgePenalty);
            Assert.AreEqual(50, pop.OldAgeThreshold);
            Assert.AreEqual(10, pop.PopulationSize);
            Assert.AreEqual(0.2, pop.SurvivalRate);
            Assert.AreEqual(10, pop.YoungBonusAgeThreshold);
            Assert.AreEqual(0.3, pop.YoungScoreBonus);

            // see if the population can actually be used to train
            IMLDataSet      trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
            ICalculateScore score       = new TrainingSetScore(trainingSet);
            NEATTraining    train       = new NEATTraining(score, pop);

            train.Iteration();
        }
예제 #16
0
        /// <summary>
        /// Create a PSO trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int particles = holder.GetInt(
                MLTrainFactory.PropertyParticles, false, 20);

            ICalculateScore score      = new TrainingSetScore(training);
            IRandomizer     randomizer = new NguyenWidrowRandomizer();

            IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles);

            return(train);
        }
예제 #17
0
        /// <summary>
        /// Create an EPL GA trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            PrgPopulation pop = (PrgPopulation)method;

            ICalculateScore score = new TrainingSetScore(training);
            TrainEA         train = new TrainEA(pop, score);

            train.Rules.AddRewriteRule(new RewriteConstants());
            train.Rules.AddRewriteRule(new RewriteAlgebraic());
            train.CODEC = new PrgCODEC();
            train.AddOperation(0.8, new SubtreeCrossover());
            train.AddOperation(0.1, new SubtreeMutation(pop.Context, 4));
            train.AddOperation(0.1, new ConstMutation(pop.Context, 0.5, 1.0));
            train.AddScoreAdjuster(new ComplexityAdjustedScore());
            train.Speciation = new PrgSpeciation();
            return(train);
        }
예제 #18
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            IMLDataSet      trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            BasicNetwork    network     = EncogUtility.SimpleFeedForward(2, 2, 0, 1, false);
            ICalculateScore score       = new TrainingSetScore(trainingSet);
            IRandomizer     randomizer  = new NguyenWidrowRandomizer(-1, 1);

            IMLTrain train = new NeuralPSO(network, randomizer, score, 20);

            EncogUtility.TrainToError(train, 0.01);

            network = (BasicNetwork)train.Method;

            // test the neural network
            Console.WriteLine("Neural Network Results:");
            EncogUtility.Evaluate(network, trainingSet);

            EncogFramework.Instance.Shutdown();
        }
예제 #19
0
        /// <summary>
        ///     The entry point for this example.  If you would like to make this example
        ///     stand alone, then add to its own project and rename to Main.
        /// </summary>
        /// <param name="args">Not used.</param>
        public static void ExampleMain(string[] args)
        {
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            var        pop         = new NEATPopulation(2, 1, 1000);

            pop.Reset();
            pop.InitialConnectionDensity = 1.0; // not required, but speeds processing.
            ICalculateScore score = new TrainingSetScore(trainingSet);
            // train the neural network
            var train = NEATUtil.ConstructNEATTrainer(pop, score);

            EncogUtility.TrainToError(train, 0.01);

            var network = (NEATNetwork)train.CODEC.Decode(train.BestGenome);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            EncogUtility.Evaluate(network, trainingSet);
        }
        public static double TrainNetwork(String what,
                                          FreeformNetwork network, IMLDataSet trainingSet)
        {
            ICalculateScore score = new TrainingSetScore(trainingSet);

            IMLTrain trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);

            IMLTrain trainMain = new FreeformBackPropagation(network, trainingSet, 0.00001, 0.0);

            StopTrainingStrategy stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            EncogUtility.TrainToError(trainMain, 0.01);

            return(trainMain.Error);
        }
예제 #21
0
        public void Train(BasicNetwork network, IMLDataSet training)
        {
            IMLTrain trainMain = new LevenbergMarquardtTraining(network, training);
            // train the neural network
            var stop     = new StopTrainingStrategy();
            var score    = new TrainingSetScore(trainMain.Training);
            var trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            var epoch = 0;

            while (!stop.ShouldStop() && trainMain.IterationNumber < 1500)
            {
                trainMain.Iteration();
                Console.WriteLine("Training " + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
        }
예제 #22
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            IMLDataSet      trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            var             pop         = new NEATPopulation(2, 1, 1000);
            ICalculateScore score       = new TrainingSetScore(trainingSet);
            // train the neural network
            var step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;

            var train = new NEATTraining(score, pop);

            EncogUtility.TrainToError(train, 0.01);

            var network = (NEATNetwork)train.Method;

            network.ClearContext();
            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            EncogUtility.Evaluate(network, trainingSet);
        }
예제 #23
0
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            Backpropagation trainMain = new Backpropagation(network, minis, 0.0001, 0.6);

            //set the number of threads below.
            trainMain.ThreadCount = 0;
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(minis);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            // IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01);

            StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            //prune strategy not in GIT!...Removing it.
            //PruneStrategy strategypruning = new PruneStrategy(0.91d, 0.001d, 10, network,minis, 0, 20);
            //trainMain.AddStrategy(strategypruning);

            EncogUtility.TrainConsole(trainMain, network, minis, 15.2);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();

                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();
            Console.WriteLine(@"Total elapsed time in seconds:" + TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds).Seconds);

            return(trainMain.Error);
        }
예제 #24
0
    /// <summary>
    /// Trains the network
    /// </summary>
    public virtual void trainNetwork()
    {
      INeuralDataSet trainingSet = new BasicNeuralDataSet(networkInput, networkIdealOutput);
      //ITrain trainBackProp = new Backpropagation(network, trainingSet, BACKPROP_LEARN_RATE, BACKPROP_MOMENTUM);

      ITrain trainBackProp = new ScaledConjugateGradient(network, trainingSet);

      double error = Double.MaxValue;
      double lastError = Double.MaxValue;
      int epoch = 1;

      int lastAnneal = 0;
      int errorExit = 0;

      double errorOnLastAnnealStart = double.MaxValue;
      double sameErrorOnLastAnnealStartCount = 0;

      double currentAnnealInterval = MIN_BACKPROP_ITERATIONS_ANNEAL_START;
      double annealStartError = 0;

      do
      {
        trainBackProp.Iteration();
        error = trainBackProp.Error;

        if (lastError - error < MAX_RMS_ITERATION_NETWORK_ERROR)
          errorExit++;
        else
          errorExit = 0;

        Console.WriteLine("Iteration(SC) #{0} Error: {1}", epoch, error.ToString("0.00000000"));

        if (error > ANNEAL_MIN_ERROR)
        {
          if ((lastAnneal > currentAnnealInterval) && (lastError - error < MAX_ANNEAL_START_ERROR))
          {
            if (error == errorOnLastAnnealStart)
              sameErrorOnLastAnnealStartCount++;
            else if (error < errorOnLastAnnealStart)
            {
              sameErrorOnLastAnnealStartCount = 0;
              errorOnLastAnnealStart = error;
            }

            ICalculateScore score = new TrainingSetScore(trainingSet);
            NeuralSimulatedAnnealing trainAnneal = new NeuralSimulatedAnnealing(network, score, ANNEAL_STARTTEMP, ANNEAL_ENDTEMP, ANNEAL_ITERATIONS);

            for (int i = 1; i <= ANNEAL_ATTEMPTS; i++)
            {
              trainAnneal.Iteration();

              if (i == 1)
                annealStartError = trainAnneal.Error;

              Console.WriteLine("Iteration(Anneal) #{0}-{1} Error: {2}", epoch, i, trainAnneal.Error.ToString("0.00000000"));
              //WebLogging.AddLog("WinRatioNeural", WebLogging.LogCategory.WinRatioNeural, "Iteration(Anneal) #" + i + " Error: " + trainAnneal.Error.ToString("0.00000000"));
            }

            if (annealStartError == trainAnneal.Error)
            {
              if (currentAnnealInterval < 200)
              {
                currentAnnealInterval *= 1.5;
                Console.WriteLine("Iteration(Anneal) # No improvment. Increasing anneal interval to " + currentAnnealInterval);
              }
              else
                Console.WriteLine("Iteration(Anneal) # No improvment. Anneal interval at max.");
            }

            lastAnneal = 0;

            trainBackProp = new ScaledConjugateGradient(network, trainingSet);
            trainBackProp.Iteration();
            error = trainBackProp.Error;
            //saveNetwork(correctPredictions.ToString("##0.0")+ "_" + epoch.ToString() + "_nerualPokerAI_LA.nnDAT");
          }
        }

        //Every 50 epochs we can test the network accuracy
        //#if DEBUG
        //if (epoch % 50 == 0)
        //{
        //    //We want to switch to the testing set if we are not using all data for training
        //    if (TRAIN_DATA_PERCENT < 1.0) createTestingSets();

        //    Console.WriteLine("    Network accuracy is currently {0}%",getNetworkAccuracy());

        //    //Wait for 1 second so that we can read the output
        //    Thread.Sleep(1000);

        //    //Likewise we want to switch back before continuing
        //    if (TRAIN_DATA_PERCENT < 1.0) createTrainingSets();
        //}
        //#endif

        lastError = trainBackProp.Error;
        epoch++;
        lastAnneal++;

        //} while (error > MAX_RMS_TOTAL_NETWORK_ERROR && errorExit < 10 && epoch < MAX_ITERATIONS);
      } while (trainBackProp.Error > MAX_RMS_TOTAL_NETWORK_ERROR && epoch < MAX_ITERATIONS && sameErrorOnLastAnnealStartCount < 2);
    }