/// <summary>
        /// Perform the training option.
        /// </summary>
        public void Train()
        {
            // first, create the machine learning method
            var       methodFactory = new MLMethodFactory();
            IMLMethod method        = methodFactory.Create(Config.MethodType, Config.MethodArchitecture, Config.InputWindow, 1);

            // second, create the data set
            string     filename = FileUtil.CombinePath(new FileInfo(_path), Config.FilenameTrain).ToString();
            IMLDataSet dataSet  = EncogUtility.LoadEGB2Memory(new FileInfo(filename));

            // third, create the trainer
            var      trainFactory = new MLTrainFactory();
            IMLTrain train        = trainFactory.Create(method, dataSet, Config.TrainType, Config.TrainParams);

            // reset if improve is less than 1% over 5 cycles
            if (method is IMLResettable && !(train is ManhattanPropagation))
            {
                train.AddStrategy(new RequiredImprovementStrategy(500));
            }

            // fourth, train and evaluate.
            EncogUtility.TrainToError(train, Config.TargetError);
            method = train.Method;
            EncogDirectoryPersistence.SaveObject(FileUtil.CombinePath(new FileInfo(_path), Config.MethodName), method);

            // finally, write out what we did
            Console.WriteLine(@"Machine Learning Type: " + Config.MethodType);
            Console.WriteLine(@"Machine Learning Architecture: " + Config.MethodArchitecture);

            Console.WriteLine(@"Training Method: " + Config.TrainType);
            Console.WriteLine(@"Training Args: " + Config.TrainParams);
        }
Exemple #2
0
        public void Process(String methodName, String methodArchitecture, String trainerName, String trainerArgs,
                            int outputNeurons)
        {
            // first, create the machine learning method
            var       methodFactory = new MLMethodFactory();
            IMLMethod method        = methodFactory.Create(methodName, methodArchitecture, 2, outputNeurons);

            // second, create the data set
            IMLDataSet dataSet = new BasicMLDataSet(XORInput, XORIdeal);

            // third, create the trainer
            var      trainFactory = new MLTrainFactory();
            IMLTrain train        = trainFactory.Create(method, dataSet, trainerName, trainerArgs);

            // reset if improve is less than 1% over 5 cycles
            if (method is IMLResettable && !(train is ManhattanPropagation))
            {
                train.AddStrategy(new RequiredImprovementStrategy(500));
            }

            // fourth, train and evaluate.
            EncogUtility.TrainToError(train, 0.01);
            method = train.Method;
            EncogUtility.Evaluate((IMLRegression)method, dataSet);

            // finally, write out what we did
            Console.WriteLine(@"Machine Learning Type: " + methodName);
            Console.WriteLine(@"Machine Learning Architecture: " + methodArchitecture);

            Console.WriteLine(@"Training Method: " + trainerName);
            Console.WriteLine(@"Training Args: " + trainerArgs);
        }
        /// <summary>
        ///     Fit the model using cross validation.
        /// </summary>
        /// <param name="k">The number of folds total.</param>
        /// <param name="foldNum">The current fold.</param>
        /// <param name="fold">The current fold.</param>
        private void FitFold(int k, int foldNum, DataFold fold)
        {
            IMLMethod method = CreateMethod();
            IMLTrain  train  = CreateTrainer(method, fold.Training);

            if (train.ImplementationType == TrainingImplementationType.Iterative)
            {
                var earlyStop = new SimpleEarlyStoppingStrategy(
                    fold.Validation);
                train.AddStrategy(earlyStop);

                var line = new StringBuilder();
                while (!train.TrainingDone)
                {
                    train.Iteration();
                    line.Length = 0;
                    line.Append("Fold #");
                    line.Append(foldNum);
                    line.Append("/");
                    line.Append(k);
                    line.Append(": Iteration #");
                    line.Append(train.IterationNumber);
                    line.Append(", Training Error: ");
                    line.Append(Format.FormatDouble(train.Error, 8));
                    line.Append(", Validation Error: ");
                    line.Append(Format.FormatDouble(earlyStop.ValidationError,
                                                    8));
                    Report.Report(k, foldNum, line.ToString());
                }
                fold.Score  = earlyStop.ValidationError;
                fold.Method = method;
            }
            else if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                double validationError = CalculateError(method,
                                                        fold.Validation);
                Report.Report(k, k,
                              "Trained, Training Error: " + train.Error
                              + ", Validatoin Error: " + validationError);
                fold.Score  = validationError;
                fold.Method = method;
            }
            else
            {
                throw new EncogError("Unsupported training type for EncogModel: "
                                     + train.ImplementationType);
            }
        }