Exemple #1
0
        /// <summary>
        ///     Perform the training.
        /// </summary>
        /// <param name="train">The training method.</param>
        /// <param name="method">The ML method.</param>
        /// <param name="trainingSet">The training set.</param>
        private void PerformTraining(IMLTrain train, IMLMethod method,
                                     IMLDataSet trainingSet)
        {
            ValidateNetwork.ValidateMethodToData(method, trainingSet);
            double targetError = Prop.GetPropertyDouble(
                ScriptProperties.MlTrainTargetError);

            Analyst.ReportTrainingBegin();
            int maxIteration = Analyst.MaxIteration;

            if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                Analyst.ReportTraining(train);
            }
            else
            {
                do
                {
                    train.Iteration();
                    Analyst.ReportTraining(train);
                } while ((train.Error > targetError) &&
                         !Analyst.ShouldStopCommand() &&
                         !train.TrainingDone &&
                         ((maxIteration == -1) || (train.IterationNumber < maxIteration)));
            }
            train.FinishTraining();

            Analyst.ReportTrainingEnd();
        }
        public static void TestTraining(IMLTrain train, double requiredImprove)
        {
            train.Iteration();
            double error1 = train.Error;

            for (int i = 0; i < 10; i++)
                train.Iteration();

            double error2 = train.Error;

            double improve = (error1 - error2) / error1;
            Assert.IsTrue(improve >= requiredImprove,"Improve rate too low for " + train.GetType().Name +
                    ",Improve=" + improve + ",Needed=" + requiredImprove);
        }
        /// <summary>
        ///     Fit the model using cross validation.
        /// </summary>
        /// <param name="k">The number of folds total.</param>
        /// <param name="foldNum">The current fold.</param>
        /// <param name="fold">The current fold.</param>
        private void FitFold(int k, int foldNum, DataFold fold)
        {
            IMLMethod method = CreateMethod();
            IMLTrain  train  = CreateTrainer(method, fold.Training);

            if (train.ImplementationType == TrainingImplementationType.Iterative)
            {
                var earlyStop = new SimpleEarlyStoppingStrategy(
                    fold.Validation);
                train.AddStrategy(earlyStop);

                var line = new StringBuilder();
                while (!train.TrainingDone)
                {
                    train.Iteration();
                    line.Length = 0;
                    line.Append("Fold #");
                    line.Append(foldNum);
                    line.Append("/");
                    line.Append(k);
                    line.Append(": Iteration #");
                    line.Append(train.IterationNumber);
                    line.Append(", Training Error: ");
                    line.Append(Format.FormatDouble(train.Error, 8));
                    line.Append(", Validation Error: ");
                    line.Append(Format.FormatDouble(earlyStop.ValidationError,
                                                    8));
                    Report.Report(k, foldNum, line.ToString());
                }
                fold.Score  = earlyStop.ValidationError;
                fold.Method = method;
            }
            else if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                double validationError = CalculateError(method,
                                                        fold.Validation);
                Report.Report(k, k,
                              "Trained, Training Error: " + train.Error
                              + ", Validatoin Error: " + validationError);
                fold.Score  = validationError;
                fold.Method = method;
            }
            else
            {
                throw new EncogError("Unsupported training type for EncogModel: "
                                     + train.ImplementationType);
            }
        }
        public void StartOptimization(CancellationToken?cancelToken = null)
        {
            updateStatus?.Invoke("Training...");

            double hours = simSettings.SimType == SimulationType.Time ? simSettings.Hours.Value : 0;

            simSettings.StartedOn = DateTime.Now;
            simSettings.EndsOn    = DateTime.Now.AddHours(hours);

            do
            {
                train.Iteration();

                if (cancelToken.HasValue && cancelToken.Value.IsCancellationRequested)
                {
                    break;
                }
            } while ((simSettings.SimType == SimulationType.Sessions && simSettings.Sessions > planogramScore.SessionNumber) ||
                     (simSettings.SimType == SimulationType.Time && simSettings.EndsOn > DateTime.Now) ||
                     (simSettings.SimType == SimulationType.Score && SimulationSettings.NUM_SCORE_HITS > planogramScore.NumScoreHits));

            // display for final results only
            if (!simSettings.EnableSimDisplay)
            {
                updateUI?.Invoke(planogramScore.LastResult, true);
            }

            updateStatus?.Invoke("Done", true);
        }
Exemple #5
0
        /// <summary>
        /// Called just after a training iteration.
        /// </summary>
        ///
        public virtual void PostIteration()
        {
            if (_ready)
            {
                double currentError = _mainTrain.Error;
                _lastImprovement = (currentError - _lastError)
                                   / _lastError;
                EncogLogging.Log(EncogLogging.LevelDebug, "Last improvement: "
                                 + _lastImprovement);

                if ((_lastImprovement > 0) ||
                    (Math.Abs(_lastImprovement) < _minImprovement))
                {
                    _lastHybrid++;

                    if (_lastHybrid > _tolerateMinImprovement)
                    {
                        _lastHybrid = 0;

                        EncogLogging.Log(EncogLogging.LevelDebug,
                                         "Performing hybrid cycle");

                        for (int i = 0; i < _alternateCycles; i++)
                        {
                            _altTrain.Iteration();
                        }
                    }
                }
            }
            else
            {
                _ready = true;
            }
        }
Exemple #6
0
        /// <summary>
        /// Perform one iteration.
        /// </summary>
        ///
        public override void Iteration()
        {
            double error = 0;

            for (int valFold = 0; valFold < Folded.NumFolds; valFold++)
            {
                // restore the correct network
                _networks[valFold].CopyToNetwork(_flatNetwork);

                // train with non-validation folds
                for (int curFold = 0; curFold < Folded.NumFolds; curFold++)
                {
                    if (curFold != valFold)
                    {
                        Folded.CurrentFold = curFold;
                        _train.Iteration();
                    }
                }

                // evaluate with the validation fold
                Folded.CurrentFold = valFold;
                double e = _flatNetwork.CalculateError(Folded);
                //System.out.println("Fold " + valFold + ", " + e);
                error += e;
                _networks[valFold].CopyFromNetwork(_flatNetwork);
            }

            Error = error / Folded.NumFolds;
        }
Exemple #7
0
        /// <summary>
        /// Train the network, using the specified training algorithm, and send the
        /// output to the console.
        /// </summary>
        /// <param name="train">The training method to use.</param>
        /// <param name="network">The network to train.</param>
        /// <param name="trainingSet">The training set.</param>
        /// <param name="seconds">The second to train for.</param>
        public static void TrainConsole(IMLTrain train, BasicNetwork network, IMLDataSet trainingSet, double seconds)
        {
            int    epoch = 1;
            double remaining;

            Console.WriteLine(@"Beginning training...");
            long start = Environment.TickCount;

            do
            {
                train.Iteration();

                double current = Environment.TickCount;
                double elapsed = (current - start) / 1000;
                remaining = seconds - elapsed;

                Console.WriteLine(@"Iteration #" + Format.FormatInteger(epoch)
                                  + @" Error:" + Format.FormatPercent(train.Error)
                                  + @" elapsed time = " + Format.FormatTimeSpan((int)elapsed)
                                  + @" time left = "
                                  + Format.FormatTimeSpan((int)remaining));
                epoch++;
            } while (remaining > 0 && !train.TrainingDone);
            train.FinishTraining();
        }
Exemple #8
0
        public static void MyTrainConsole(IMLTrain train, BasicNetwork network, IMLDataSet trainingSet, int minutes, FileInfo networkFile, FileInfo trainFile)
        {
            int  epoch = 1;
            long remaining;

            Console.WriteLine(@"Beginning training...");
            long start = Environment.TickCount;

            do
            {
                train.Iteration();
                long current = Environment.TickCount;
                long elapsed = (current - start) / 1000;
                remaining = minutes - elapsed / 60;
                Console.WriteLine($@"Iteration #{Format.FormatInteger(epoch)} Error:{Format.FormatPercent(train.Error)} elapsed time = {Format.FormatTimeSpan((int)elapsed)} time left = {Format.FormatTimeSpan((int)remaining * 60)}");
                epoch++;
                EncogDirectoryPersistence.SaveObject(networkFile, network);
                TrainingContinuation cont = train.Pause();
                EncogDirectoryPersistence.SaveObject(trainFile, cont);
                train.Resume(cont);
                foreach (var x in cont.Contents)
                {
                    Console.WriteLine($"{x.Key}: {((double[])x.Value).Average()}");
                }
            }while (remaining > 0 && !train.TrainingDone && !Console.KeyAvailable);
            Console.WriteLine("Finishing.");
            train.FinishTraining();
        }
Exemple #9
0
        public static void TestTraining(IMLTrain train, double requiredImprove)
        {
            train.Iteration();
            double error1 = train.Error;

            for (int i = 0; i < 10; i++)
            {
                train.Iteration();
            }

            double error2 = train.Error;

            double improve = (error1 - error2) / error1;

            Assert.IsTrue(improve >= requiredImprove, "Improve rate too low for " + train.GetType().Name +
                          ",Improve=" + improve + ",Needed=" + requiredImprove);
        }
Exemple #10
0
        public void StartTrain(Action <double, int> trainCallback = null, int reportEach = 5)
        {
            // initialize input and output values
            var inputs      = new double[_learnLength][];
            var outputs     = new double[_learnLength][];
            var i0          = TrainStartIndex();
            var sourceArray = GetSource().ToArray();
            var window      = new Queue <double>(sourceArray.Skip(i0).Take(_windowSize).Select(d => d.Val[(byte)_wantedInput]));

            for (var i = 0; i < _learnLength; i++)
            {
                // берем _windowSize предыдущих значений для _learnLength значений начиная с выбранной даты
                var innerArray = window.ToArray();
                inputs[i] = NormalizeInput(innerArray);

                window.Dequeue();
                window.Enqueue(sourceArray[i0 + i + _windowSize].Val[(byte)_wantedInput]);

                var ouputArray = sourceArray.Skip(i + _windowSize).Take(_layers.Last()).Select(d => d.Val[(byte)_wantedOutput]).ToArray();
                outputs[i] = NormalizeOutput(ouputArray);
            }

            if (_network != null)
            {
                var      trainingSet = new BasicMLDataSet(inputs, outputs);
                IMLTrain teacher     =
                    _trainMethod == TrainMethod.Specific ? (IMLTrain) new LevenbergMarquardtTraining(_network, trainingSet) :
                    _trainMethod == TrainMethod.BackProp ? (IMLTrain) new Backpropagation(_network, trainingSet) :
                    _trainMethod == TrainMethod.Resilent ? (IMLTrain) new ResilientPropagation(_network, trainingSet) :
                    _trainMethod == TrainMethod.Genetic ? (IMLTrain) new NeuralGeneticAlgorithm(
                        _network,
                        new Encog.MathUtil.Randomize.NguyenWidrowRandomizer(),
                        new TrainingSetScore(trainingSet),
                        _population,
                        _mutationPercent / 100.0,
                        _matePercent / 100.0) : null;
                if (teacher == null)
                {
                    return;
                }

                _stop = false;
                for (int i = 1; !_stop; i++)
                {
                    teacher.Iteration();
                    if (teacher is NeuralGeneticAlgorithm)
                    {
                        _network = (teacher as NeuralGeneticAlgorithm).Genetic.Population.Best.Organism as BasicNetwork;
                    }
                    if (i % reportEach == 0 && trainCallback != null)
                    {
                        trainCallback(teacher.Error, i);
                    }
                }
            }
        }
Exemple #11
0
    void Train()
    {
        backpropagation.Iteration();
        Debug.Log("Epoch: " + epoch + "\nErro: " + backpropagation.Error);
        epoch++;

        if (backpropagation.Error < minimumError || epoch > maximumEpoch)
        {
            backpropagation.FinishTraining();
            train = false;
            buttons.SetActive(true);
        }
    }
    IEnumerator Train()
    {
        while (true)
        {
            backpropagation.Iteration();
            debugText.text = ("Epoch: " + epoch + "\nErro: " + backpropagation.Error);
            epoch++;

            if (epoch > int.Parse(maximumEpoch.text) || backpropagation.Error < float.Parse(minimumError.text, CultureInfo.InvariantCulture))
            {
                Stop();
            }

            yield return(null);
        }
    }
Exemple #13
0
        /// <summary>
        /// Train to a specific error, using the specified training method, send the
        /// output to the console.
        /// </summary>
        ///
        /// <param name="train">The training method.</param>
        /// <param name="error">The desired error level.</param>
        public static void TrainToError(IMLTrain train, double error)
        {
            int epoch = 1;

            Console.Out.WriteLine(@"Beginning training...");

            do
            {
                train.Iteration();

                Console.Out.WriteLine(@"Iteration #" + Format.FormatInteger(epoch)
                                      + @" Error:" + Format.FormatPercent(train.Error)
                                      + @" Target Error: " + Format.FormatPercent(error));
                epoch++;
            } while ((train.Error > error) && !train.TrainingDone);
            train.FinishTraining();
        }
Exemple #14
0
        public List <string> entrenar()
        {
            IMLDataSet    parsito  = new BasicMLDataSet(this.neuralInput, this.neuralOutput);
            List <string> mensajes = new List <string>();
            int           epoch    = 1;

            do
            {
                train.Iteration();
                mensajes.Add("Ronda # " + epoch + "     -     Porcentaje de Error: " + train.Error);
                epoch++;
            } while ((epoch < 5000 && train.Error > 0.001));
            mensajes.Add("Resultados del Entrenamiento");
            foreach (IMLDataPair pair in parsito)
            {
                IMLData output = network.Compute(pair.Input);
                mensajes.Add("Actuales =" + output[0] + ", " + output[1] + ", " + output[2] + ", " + output[3] + ", " + output[4] + ", " + output[5] + ", " + output[6] + ", " + output[7] + ", " + output[8] + ", " + output[9] + ", " + output[10] + ", " + output[11]
                             + "\n Ideales =" + pair.Ideal[0] + ", " + pair.Ideal[1] + ", " + pair.Ideal[2] + ", " + pair.Ideal[3] + ", " + pair.Ideal[4] + ", " + pair.Ideal[5] + ", " + pair.Ideal[6] + ", " + pair.Ideal[7] + ", " + pair.Ideal[8] + ", " + pair.Ideal[9] + ", " + pair.Ideal[10] + ", " + pair.Ideal[11]);
            }
            return(mensajes);
        }
        void trainWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            int epoch = 1;

            do
            {
                if (trainWorker.CancellationPending == true)
                {
                    e.Cancel = true;
                    return;
                }
                else
                {
                    Wait(100);
                    train.Iteration();
                    var IterationResults = new KeyValuePair <int, double> [2];
                    IterationResults[0] = new KeyValuePair <int, double>(epoch, train.Error);;
                    IterationResults[1] = new KeyValuePair <int, double>(epoch, network.CalculateError(crossValidationSet));
                    trainWorker.ReportProgress(epoch, IterationResults);
                    epoch++;
                }
            } while (train.Error > 0.01);
        }
Exemple #16
0
        /// <summary>
        /// Entraine le réseau.
        /// </summary>
        public void EntrainementReseau()
        {
            // Entrainement du réseau.
            double erreurEntrainement = 0;
            double erreurValidation   = 0;
            int    epoch = 1;

            do
            {
                train.Iteration();

                // Entrainement du réseau.
                erreurEntrainement = Reseau.CalculateError(TrainingSet);
                ListeErreurEntrainement.Add(erreurEntrainement);

                if (erreurEntrainement < ErreurOptimaleEntrainement)
                {
                    ErreurOptimaleEntrainement = erreurEntrainement;
                    ReseauOptimalEntrainement  = (BasicNetwork)Reseau.Clone();
                }


                // Validation du réseau.
                erreurValidation = Reseau.CalculateError(ValidationSet);
                ListeErreurValidation.Add(erreurValidation);

                if (erreurValidation < ErreurOptimaleValidation)
                {
                    ErreurOptimaleValidation = erreurValidation;
                    ReseauOptimalValidation  = (BasicNetwork)Reseau.Clone();
                }

                epoch++;
            } while (epoch < NbEpochMax);// && erreurEntrainement != 0 && erreurValidation != 0);

            train.FinishTraining();
        }
Exemple #17
0
 private void x0d87de1eb44df41c(IMLTrain xd87f6a9c53c2ed9f, IMLMethod x1306445c04667cc7, IMLDataSet x1c9e132f434262d8)
 {
     int maxIteration;
     ValidateNetwork.ValidateMethodToData(x1306445c04667cc7, x1c9e132f434262d8);
     double propertyDouble = base.Prop.GetPropertyDouble("ML:TRAIN_targetError");
     base.Analyst.ReportTrainingBegin();
     if ((((uint) maxIteration) & 0) == 0)
     {
         if (2 == 0)
         {
             goto Label_0038;
         }
         maxIteration = base.Analyst.MaxIteration;
         if (0xff != 0)
         {
             goto Label_0038;
         }
     }
     Label_001B:
     if (!xd87f6a9c53c2ed9f.TrainingDone && ((maxIteration == -1) || (xd87f6a9c53c2ed9f.IterationNumber < maxIteration)))
     {
         goto Label_0038;
     }
     Label_0023:
     xd87f6a9c53c2ed9f.FinishTraining();
     base.Analyst.ReportTrainingEnd();
     return;
     Label_0038:
     xd87f6a9c53c2ed9f.Iteration();
     base.Analyst.ReportTraining(xd87f6a9c53c2ed9f);
     if ((xd87f6a9c53c2ed9f.Error <= propertyDouble) || base.Analyst.ShouldStopCommand())
     {
         goto Label_0023;
     }
     goto Label_001B;
 }
Exemple #18
0
 public static void TrainToError(IMLTrain train, IMLDataSet trainingSet, double error)
 {
     string[] strArray;
     int i = 1;
     if ((((uint) i) - ((uint) error)) <= uint.MaxValue)
     {
         goto Label_00AF;
     }
     goto Label_0050;
     Label_0037:
     if ((train.Error > error) && !train.TrainingDone)
     {
         goto Label_00B9;
     }
     train.FinishTraining();
     if ((((uint) i) | 3) != 0)
     {
         return;
     }
     goto Label_00AF;
     Label_0050:
     strArray[1] = Format.FormatInteger(i);
     do
     {
         if (0 != 0)
         {
             goto Label_0037;
         }
         strArray[2] = " Error:";
     }
     while ((((uint) error) + ((uint) i)) > uint.MaxValue);
     strArray[3] = Format.FormatPercent(train.Error);
     strArray[4] = " Target Error: ";
     strArray[5] = Format.FormatPercent(error);
     Console.WriteLine(string.Concat(strArray));
     i++;
     goto Label_0037;
     Label_00AF:
     Console.WriteLine("Beginning training...");
     Label_00B9:
     train.Iteration();
     strArray = new string[6];
     strArray[0] = "Iteration #";
     goto Label_0050;
 }
Exemple #19
0
 public static void TrainConsole(IMLTrain train, BasicNetwork network, IMLDataSet trainingSet, int minutes)
 {
     long num2;
     long num4;
     long num5;
     int i = 1;
     Console.WriteLine("Beginning training...");
     long tickCount = Environment.TickCount;
     goto Label_018E;
     Label_0025:
     train.FinishTraining();
     if ((((uint) num5) & 0) != 0)
     {
         goto Label_018E;
     }
     return;
     Label_003A:
     if (((((uint) minutes) - ((uint) tickCount)) >= 0) && (0 == 0))
     {
         goto Label_0025;
     }
     Label_0052:
     if (num2 > 0L)
     {
         if (!train.TrainingDone)
         {
             goto Label_018E;
         }
         goto Label_003A;
     }
     goto Label_0025;
     Label_018E:
     train.Iteration();
     Label_0194:
     num4 = Environment.TickCount;
     num5 = (num4 - tickCount) / 0x3e8L;
     num2 = minutes - (num5 / 60L);
     while (true)
     {
         string[] strArray = new string[8];
         if ((((uint) num5) | 4) == 0)
         {
             goto Label_0194;
         }
         strArray[0] = "Iteration #";
         strArray[1] = Format.FormatInteger(i);
         strArray[2] = " Error:";
         strArray[3] = Format.FormatPercent(train.Error);
         strArray[4] = " elapsed time = ";
         if ((((uint) i) + ((uint) i)) >= 0)
         {
             if ((((uint) num4) & 0) == 0)
             {
                 if ((((uint) minutes) + ((uint) minutes)) > uint.MaxValue)
                 {
                     goto Label_0025;
                 }
                 if (((uint) num5) > uint.MaxValue)
                 {
                     goto Label_003A;
                 }
                 strArray[5] = Format.FormatTimeSpan((int) num5);
                 strArray[6] = " time left = ";
             }
             strArray[7] = Format.FormatTimeSpan(((int) num2) * 60);
             Console.WriteLine(string.Concat(strArray));
             i++;
             goto Label_0052;
         }
     }
 }
Exemple #20
0
 public static void TrainToError(IMLTrain train, double error)
 {
     int i = 1;
     if ((((uint) i) | 8) != 0)
     {
         goto Label_0048;
     }
     Label_001A:
     if (0 == 0)
     {
         return;
     }
     Label_0048:
     Console.Out.WriteLine("Beginning training...");
     Label_0057:
     train.Iteration();
     Console.Out.WriteLine("Iteration #" + Format.FormatInteger(i) + " Error:" + Format.FormatPercent(train.Error) + " Target Error: " + Format.FormatPercent(error));
     i++;
     while (train.Error > error)
     {
         if (!train.TrainingDone)
         {
             goto Label_0057;
         }
         break;
     }
     train.FinishTraining();
     if (1 != 0)
     {
         goto Label_001A;
     }
     goto Label_0048;
 }
        /// <summary>
        /// Train to a specific error, using the specified training method, send the
        /// output to the console.
        /// </summary>
        ///
        /// <param name="train">The training method.</param>
        /// <param name="error">The desired error level.</param>
        public static void TrainToError(IMLTrain train, double error)
        {

            int epoch = 1;

            Console.Out.WriteLine(@"Beginning training...");

            do
            {
                train.Iteration();

                Console.Out.WriteLine(@"Iteration #" + Format.FormatInteger(epoch)
                        + @" Error:" + Format.FormatPercent(train.Error)
                        + @" Target Error: " + Format.FormatPercent(error));
                epoch++;
            } while ((train.Error > error) && !train.TrainingDone);
            train.FinishTraining();
        }
        /// <summary>
        /// Train the network, using the specified training algorithm, and send the
        /// output to the console.
        /// </summary>
        /// <param name="train">The training method to use.</param>
        /// <param name="network">The network to train.</param>
        /// <param name="trainingSet">The training set.</param>
        /// <param name="minutes">The number of minutes to train for.</param>
        public static void TrainConsole(IMLTrain train,
                                        BasicNetwork network, IMLDataSet trainingSet,
                                        int minutes)
        {
            int epoch = 1;
            long remaining;

            Console.WriteLine(@"Beginning training...");
            long start = Environment.TickCount;
            do
            {
                train.Iteration();

                long current = Environment.TickCount;
                long elapsed = (current - start)/1000;
                remaining = minutes - elapsed/60;

                Console.WriteLine(@"Iteration #" + Format.FormatInteger(epoch)
                                  + @" Error:" + Format.FormatPercent(train.Error)
                                  + @" elapsed time = " + Format.FormatTimeSpan((int) elapsed)
                                  + @" time left = "
                                  + Format.FormatTimeSpan((int) remaining*60));
                epoch++;
            } while (remaining > 0 && !train.TrainingDone);
            train.FinishTraining();
        }
Exemple #23
0
        /// <summary>
        ///     Perform the training.
        /// </summary>
        /// <param name="train">The training method.</param>
        /// <param name="method">The ML method.</param>
        /// <param name="trainingSet">The training set.</param>
        private void PerformTraining(IMLTrain train, IMLMethod method,
            IMLDataSet trainingSet)
        {
            ValidateNetwork.ValidateMethodToData(method, trainingSet);
            double targetError = Prop.GetPropertyDouble(
                ScriptProperties.MlTrainTargetError);
            Analyst.ReportTrainingBegin();
            int maxIteration = Analyst.MaxIteration;

            if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                Analyst.ReportTraining(train);
            }
            else
            {
                do
                {
                    train.Iteration();
                    Analyst.ReportTraining(train);
                } while ((train.Error > targetError)
                         && !Analyst.ShouldStopCommand()
                         && !train.TrainingDone
                         && ((maxIteration == -1) || (train.IterationNumber < maxIteration)));
            }
            train.FinishTraining();

            Analyst.ReportTrainingEnd();
        }
Exemple #24
0
 public void Iterate()
 {
     train.Iteration();
 }