Beispiel #1
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);


            IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0);

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
Beispiel #2
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain        trainMain;

            if (Method.Equals("Leven"))
            {
                Console.WriteLine("Using LevenbergMarquardtTraining");
                trainMain = new LevenbergMarquardtTraining(network, trainingSet);
            }
            else
            {
                trainMain = new Backpropagation(network, trainingSet);
            }

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(minis);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, minis, 0.0001, 0.01);
            StopTrainingStrategy stop      = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();

            return(trainMain.Error);
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            BasicNetwork network = CreateNetwork();

            IMLTrain train;

            if (app.Args.Length > 0 && String.Compare(app.Args[0], "anneal", true) == 0)
            {
                train = new NeuralSimulatedAnnealing(
                    network, new PilotScore(), 10, 2, 100);
            }
            else
            {
                train = new NeuralGeneticAlgorithm(
                    network, new NguyenWidrowRandomizer(),
                    new PilotScore(), 500, 0.1, 0.25);
            }

            int epoch = 1;

            for (int i = 0; i < 50; i++)
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Score:" + train.Error);
                epoch++;
            }

            Console.WriteLine(@"\nHow the winning network landed:");
            network = (BasicNetwork)train.Method;
            var pilot = new NeuralPilot(network, true);

            Console.WriteLine(pilot.ScorePilot());
            EncogFramework.Instance.Shutdown();
        }
Beispiel #5
0
        public override double Train(Data info, float lr, float mom)
        {
            IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData);

            //Train network on data set, parameters (Network, dataset, learning rate, momentum).
            ICalculateScore score    = new TrainingSetScore(data);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(EncogNetwork, score, 10, 2, 1000);
            IMLTrain        learner;

            learner = new LevenbergMarquardtTraining(EncogNetwork, data);

            var stop = new StopTrainingStrategy();

            learner.AddStrategy(new Greedy());
            learner.AddStrategy(new HybridStrategy(trainAlt));

            //Train network on data set.
            double lastError = double.PositiveInfinity;

            do
            {
                if (learner.Error != 0)
                {
                    lastError = learner.Error;
                }

                learner.Iteration();
            } while (lastError - learner.Error > 0.0000001);

            return(learner.Error);
        }
        public void Train(int mode = 0, int epochs = 1, int maxTemp = 10, int minTemp = 2, int cycles = 10, int timerTimeout = 5000)
        {
            var encogScore = new EncogScore(maze, timerTimeout);

            encogScore.EncogCycleComplete += EncogScore_EncogCycleComplete;
            encogScore.MazeCycleComplete  += EncogScore_MazeCycleComplete;

            IMLTrain train;

            if (mode == 0)
            {
                //Simulated Annealing
                train = new NeuralSimulatedAnnealing(network, encogScore, maxTemp, minTemp, cycles);
            }
            else
            {
                //Genetic Algorithm
                train = new MLMethodGeneticAlgorithm(() => { return(network); }, encogScore, cycles);
            }

            for (int epoch = 1; epoch <= epochs; epoch++)
            {
                train.Iteration();
                TrainingIterationComplete?.Invoke(epoch, train.Error);
            }

            //SaveTrainingData();
        }
Beispiel #7
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder    = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork)method, score, startTemp, stopTemp, cycles);

            return(train);
        }
        static void Main(string[] args)
        {
            FeedforwardNetwork network = new FeedforwardNetwork();

            network.AddLayer(new FeedforwardLayer(2));
            network.AddLayer(new FeedforwardLayer(3));
            network.AddLayer(new FeedforwardLayer(1));
            network.Reset();

            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                network, XOR_INPUT, XOR_IDEAL, 10, 2, 100);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                epoch++;
            } while ((epoch < 5000) && (train.Error > 0.001));

            network = train.Network;

            // test the neural network
            Console.WriteLine("Neural Network Results:");
            for (int i = 0; i < XOR_IDEAL.Length; i++)
            {
                double[] actual = network.ComputeOutputs(XOR_INPUT[i]);
                Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1]
                                  + ", actual=" + actual[0] + ",ideal=" + XOR_IDEAL[i][0]);
            }
        }
        private void trainNetworkBackprop()
        {
            // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1);

            IMLDataSet aset  = new BasicMLDataSet(input, ideal);
            int        epoch = 1;
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(aset);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, aset, 0.001, 0.0);
            StopTrainingStrategy stop      = new StopTrainingStrategy();
            var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000);
            // train the neural network
            var step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;
            var train = new NEATTraining(score, pop);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);
            trainMain.AddStrategy(new HybridStrategy(train));


            network.ClearContext();

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                train.Iteration();
                Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error + @" Genetic iteration:" + trainAlt.IterationNumber + @"neat iteration:" + train.IterationNumber);
                epoch++;
            }
        }
Beispiel #10
0
        static void Main(string[] args)
        {
            FeedforwardNetwork network = new FeedforwardNetwork();
            network.AddLayer(new FeedforwardLayer(2));
            network.AddLayer(new FeedforwardLayer(3));
            network.AddLayer(new FeedforwardLayer(1));
            network.Reset();

            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                    network, XOR_INPUT, XOR_IDEAL, 10, 2, 100);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                epoch++;
            } while ((epoch < 5000) && (train.Error > 0.001));

            network = train.Network;

            // test the neural network
            Console.WriteLine("Neural Network Results:");
            for (int i = 0; i < XOR_IDEAL.Length; i++)
            {
                double[] actual = network.ComputeOutputs(XOR_INPUT[i]);
                Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1]
                        + ", actual=" + actual[0] + ",ideal=" + XOR_IDEAL[i][0]);
            }
        }
        public static void LanderTrain()
        {
            Console.WriteLine("\n\nEncog network structure: \nNumber of Input neurons 3 \nNumber of output neurons 1 ");
            int hiddenLayers       = Util.GetInput("Number of hidden layers [default 1]:", 1);
            int hiddenLayerNeurons = Util.GetInput("Number of hidden layer neurons [default 100]: ", 100);
            int numOfEpoch         = Util.GetInput("Number of Epochs [default 10]:", 10);
            int type = Util.GetInput("\nSelect a training method [Annealing - 0][Genetic - 1 default]:", 1);

            BasicNetwork network    = CreateNetwork(hiddenLayers, hiddenLayerNeurons);
            var          pilotScore = new PilotScore();


            IMLTrain train;

            if (type == 0)
            {
                int startTemp = Util.GetInput("Start Temperature [default 10]:", 10);
                int endTemp   = Util.GetInput("Stop Temperature [default 2]:", 2);
                int cycles    = Util.GetInput("Number of Cycles [default 10]:", 10);
                train = new NeuralSimulatedAnnealing(
                    network, pilotScore, startTemp, endTemp, cycles);
            }
            else
            {
                int PopulationSize = Util.GetInput("Population Size [default 10]:", 10);
                train = new MLMethodGeneticAlgorithm(() => {
                    BasicNetwork result = CreateNetwork(hiddenLayers, hiddenLayerNeurons);
                    ((IMLResettable)result).Reset();
                    return(result);
                }, pilotScore, PopulationSize); // population size
            }

            Console.WriteLine("\n\nTraining: \n");

            System.Diagnostics.Stopwatch stopwatch = new Stopwatch();
            stopwatch.Start();
            for (int i = 1; i <= numOfEpoch; i++) // num of epochs
            {
                train.Iteration();
                Console.WriteLine($"Epoch#: {i} \t Score: {train.Error}");
            }
            stopwatch.Stop();


            Console.WriteLine("\nThe total number of times it tried the Lunar Lander for training: " + NeuralPilot.CycleCount);
            Console.WriteLine($"Elapsed: {stopwatch.Elapsed}\n");

            int showPredictedOutput = Util.GetInput("Show landing simulation for AI prediction? [No - 0, Yes - 1 default]: ", 1);

            if (showPredictedOutput == 1)
            {
                network = (BasicNetwork)train.Method;
                var pilot = new NeuralPilot(network, true);
                pilot.ScorePilot();

                Console.WriteLine("hit enter to continue...");
                Console.ReadLine();
            }
        }
Beispiel #12
0
        public void TestAnneal()
        {
            IMLDataSet               trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
            BasicNetwork             network      = NetworkUtil.CreateXORNetworkUntrained();
            ICalculateScore          score        = new TrainingSetScore(trainingData);
            NeuralSimulatedAnnealing anneal       = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

            NetworkUtil.TestTraining(anneal, 0.01);
        }
Beispiel #13
0
        /// <summary>
        /// This is where encog network settings are configured and training is being processed.
        /// </summary>
        public static void LogisticTrain()
        {
            Console.WriteLine("\n\nEncog network structure: \nNumber of Input neurons 1 \nNumber of output neurons 9 ");

            int hiddenLayers       = Util.GetInput("Number of hidden layers [default 1]: ", 1);
            int hiddenLayerNeurons = Util.GetInput("Hidden layer neurons [default 100]: ", 100);
            int type       = Util.GetInput("\nSelect a training method [Annealing - 0][Genetic - 1 default]:", 1);
            int numOfEpoch = Util.GetInput("Number of Epochs [default 10]:", 10);

            BasicNetwork network    = CreateNetwork(hiddenLayers, hiddenLayerNeurons);
            var          pilotScore = new EncogLogisticScore();
            IMLTrain     train;

            if (type == 0)
            {
                int startTemp = Util.GetInput("Start Temperature [default 10]:", 10);
                int endTemp   = Util.GetInput("End Temperature [default 2]:", 2);
                int cycles    = Util.GetInput("Cycles [default 10]:", 10);
                train = new NeuralSimulatedAnnealing(network, pilotScore, endTemp, startTemp, cycles);
            }
            else
            {
                int populationSize = Util.GetInput("Population Size [default 10]:", 10);
                train = new MLMethodGeneticAlgorithm(() => {
                    BasicNetwork result = CreateNetwork(hiddenLayers, hiddenLayerNeurons);
                    ((IMLResettable)result).Reset();
                    return(result);
                }, pilotScore, populationSize); // population size
            }

            Stopwatch watch = new Stopwatch();

            watch.Start();
            Console.WriteLine("\n\nTraining: \n");

            for (int i = 0; i < numOfEpoch; i++) // num of epochs
            {
                train.Iteration();

                double totalCosts    = train.Error;
                string currencyScore = totalCosts.ToString("$#,##0");
                Console.WriteLine($"Epoch # {i} \t Score: {currencyScore,10}");
            }

            watch.Stop();

            Console.WriteLine("\nPredicted outputs:");
            network = (BasicNetwork)train.Method;
            var pilot = new EncogLogisticSimulator(network, true);

            pilot.CalculateScore(LogisticSimulator.GenerateCustomerOrders(), true);

            Console.WriteLine($"\nElapsed: {watch.Elapsed}");
            Console.WriteLine("\nThe total number of times it tried the Logistic Simulation for training: " + pilotScore.SessionCnt);
            Console.ReadLine();
        }
Beispiel #14
0
        private void trainNetworkAnneal()
        {
            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                this.network, this.input, this.ideal, 10, 2, 100);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteration #" + epoch + " Error:"
                                  + train.Error);
                epoch++;
            } while ((train.Error > 0.01));
        }
        private void trainNetworkAnneal()
        {
            Console.WriteLine("Training with simulated annealing for 5 iterations");
            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                this.network, this.input, this.ideal, 10, 2, 100);

            int epoch = 1;

            for (int i = 1; i <= 5; i++)
            {
                train.Iteration();
                Console.WriteLine("Iteration(Anneal) #" + epoch + " Error:"
                                  + train.Error);
                epoch++;
            }
        }
Beispiel #16
0
        public void Train(BasicNetwork network, IMLDataSet training)
        {
            IMLTrain trainMain = new LevenbergMarquardtTraining(network, training);
            // train the neural network
            var stop     = new StopTrainingStrategy();
            var score    = new TrainingSetScore(trainMain.Training);
            var trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            var epoch = 0;

            while (!stop.ShouldStop() && trainMain.IterationNumber < 1500)
            {
                trainMain.Iteration();
                Console.WriteLine("Training " + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
        }
        public static double TrainNetwork(String what,
                                          FreeformNetwork network, IMLDataSet trainingSet)
        {
            ICalculateScore score = new TrainingSetScore(trainingSet);

            IMLTrain trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);

            IMLTrain trainMain = new FreeformBackPropagation(network, trainingSet, 0.00001, 0.0);

            StopTrainingStrategy stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            EncogUtility.TrainToError(trainMain, 0.01);

            return(trainMain.Error);
        }
Beispiel #18
0
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            Backpropagation trainMain = new Backpropagation(network, minis, 0.0001, 0.6);

            //set the number of threads below.
            trainMain.ThreadCount = 0;
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(minis);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            // IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01);

            StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            //prune strategy not in GIT!...Removing it.
            //PruneStrategy strategypruning = new PruneStrategy(0.91d, 0.001d, 10, network,minis, 0, 20);
            //trainMain.AddStrategy(strategypruning);

            EncogUtility.TrainConsole(trainMain, network, minis, 15.2);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();

                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();
            Console.WriteLine(@"Total elapsed time in seconds:" + TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds).Seconds);

            return(trainMain.Error);
        }
Beispiel #19
0
    /// <summary>
    /// Trains the network
    /// </summary>
    public virtual void trainNetwork()
    {
      INeuralDataSet trainingSet = new BasicNeuralDataSet(networkInput, networkIdealOutput);
      //ITrain trainBackProp = new Backpropagation(network, trainingSet, BACKPROP_LEARN_RATE, BACKPROP_MOMENTUM);

      ITrain trainBackProp = new ScaledConjugateGradient(network, trainingSet);

      double error = Double.MaxValue;
      double lastError = Double.MaxValue;
      int epoch = 1;

      int lastAnneal = 0;
      int errorExit = 0;

      double errorOnLastAnnealStart = double.MaxValue;
      double sameErrorOnLastAnnealStartCount = 0;

      double currentAnnealInterval = MIN_BACKPROP_ITERATIONS_ANNEAL_START;
      double annealStartError = 0;

      do
      {
        trainBackProp.Iteration();
        error = trainBackProp.Error;

        if (lastError - error < MAX_RMS_ITERATION_NETWORK_ERROR)
          errorExit++;
        else
          errorExit = 0;

        Console.WriteLine("Iteration(SC) #{0} Error: {1}", epoch, error.ToString("0.00000000"));

        if (error > ANNEAL_MIN_ERROR)
        {
          if ((lastAnneal > currentAnnealInterval) && (lastError - error < MAX_ANNEAL_START_ERROR))
          {
            if (error == errorOnLastAnnealStart)
              sameErrorOnLastAnnealStartCount++;
            else if (error < errorOnLastAnnealStart)
            {
              sameErrorOnLastAnnealStartCount = 0;
              errorOnLastAnnealStart = error;
            }

            ICalculateScore score = new TrainingSetScore(trainingSet);
            NeuralSimulatedAnnealing trainAnneal = new NeuralSimulatedAnnealing(network, score, ANNEAL_STARTTEMP, ANNEAL_ENDTEMP, ANNEAL_ITERATIONS);

            for (int i = 1; i <= ANNEAL_ATTEMPTS; i++)
            {
              trainAnneal.Iteration();

              if (i == 1)
                annealStartError = trainAnneal.Error;

              Console.WriteLine("Iteration(Anneal) #{0}-{1} Error: {2}", epoch, i, trainAnneal.Error.ToString("0.00000000"));
              //WebLogging.AddLog("WinRatioNeural", WebLogging.LogCategory.WinRatioNeural, "Iteration(Anneal) #" + i + " Error: " + trainAnneal.Error.ToString("0.00000000"));
            }

            if (annealStartError == trainAnneal.Error)
            {
              if (currentAnnealInterval < 200)
              {
                currentAnnealInterval *= 1.5;
                Console.WriteLine("Iteration(Anneal) # No improvment. Increasing anneal interval to " + currentAnnealInterval);
              }
              else
                Console.WriteLine("Iteration(Anneal) # No improvment. Anneal interval at max.");
            }

            lastAnneal = 0;

            trainBackProp = new ScaledConjugateGradient(network, trainingSet);
            trainBackProp.Iteration();
            error = trainBackProp.Error;
            //saveNetwork(correctPredictions.ToString("##0.0")+ "_" + epoch.ToString() + "_nerualPokerAI_LA.nnDAT");
          }
        }

        //Every 50 epochs we can test the network accuracy
        //#if DEBUG
        //if (epoch % 50 == 0)
        //{
        //    //We want to switch to the testing set if we are not using all data for training
        //    if (TRAIN_DATA_PERCENT < 1.0) createTestingSets();

        //    Console.WriteLine("    Network accuracy is currently {0}%",getNetworkAccuracy());

        //    //Wait for 1 second so that we can read the output
        //    Thread.Sleep(1000);

        //    //Likewise we want to switch back before continuing
        //    if (TRAIN_DATA_PERCENT < 1.0) createTrainingSets();
        //}
        //#endif

        lastError = trainBackProp.Error;
        epoch++;
        lastAnneal++;

        //} while (error > MAX_RMS_TOTAL_NETWORK_ERROR && errorExit < 10 && epoch < MAX_ITERATIONS);
      } while (trainBackProp.Error > MAX_RMS_TOTAL_NETWORK_ERROR && epoch < MAX_ITERATIONS && sameErrorOnLastAnnealStartCount < 2);
    }
Beispiel #20
0
        private void trainNetworkAnneal()
        {
            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                   this.network, this.input, this.ideal, 10, 2, 100);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteration #" + epoch + " Error:"
                        + train.Error);
                epoch++;
            } while ((train.Error > 0.01));
        }
        private void trainNetworkAnneal()
        {
            Console.WriteLine("Training with simulated annealing for 5 iterations");
            // train the neural network
            NeuralSimulatedAnnealing train = new NeuralSimulatedAnnealing(
                   this.network, this.input, this.ideal, 10, 2, 100);

            int epoch = 1;

            for (int i = 1; i <= 5; i++)
            {
                train.Iteration();
                Console.WriteLine("Iteration(Anneal) #" + epoch + " Error:"
                        + train.Error);
                epoch++;
            }
        }