Beispiel #1
0
        public override double Train(Data info, float lr, float mom)
        {
            IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData);

            //Train network on data set, parameters (Network, dataset, learning rate, momentum).
            ICalculateScore score    = new TrainingSetScore(data);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(EncogNetwork, score, 10, 2, 1000);
            IMLTrain        learner;

            learner = new LevenbergMarquardtTraining(EncogNetwork, data);

            var stop = new StopTrainingStrategy();

            learner.AddStrategy(new Greedy());
            learner.AddStrategy(new HybridStrategy(trainAlt));

            //Train network on data set.
            double lastError = double.PositiveInfinity;

            do
            {
                if (learner.Error != 0)
                {
                    lastError = learner.Error;
                }

                learner.Iteration();
            } while (lastError - learner.Error > 0.0000001);

            return(learner.Error);
        }
Beispiel #2
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain        trainMain;

            if (Method.Equals("Leven"))
            {
                Console.WriteLine("Using LevenbergMarquardtTraining");
                trainMain = new LevenbergMarquardtTraining(network, trainingSet);
            }
            else
            {
                trainMain = new Backpropagation(network, trainingSet);
            }

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
Beispiel #3
0
        public void TestLMA()
        {
            IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();
            IMLTrain     rprop   = new LevenbergMarquardtTraining(network, trainingData);

            NetworkUtil.TestTraining(rprop, 0.03);
        }
Beispiel #4
0
        public void Train(IMLDataSet training)
        {
            var train = new LevenbergMarquardtTraining(network, training);

            do
            {
                train.Iteration();
            } while (train.Error > MaxError);
        }
Beispiel #5
0
        public void Execute(IExampleInterface app)
        {
            IMLDataSet trainingData = GenerateTraining(InputOutputCount, Compl);
            IMLMethod  method       = EncogUtility.SimpleFeedForward(InputOutputCount,
                                                                     HiddenCount, 0, InputOutputCount, false);
            var train = new LevenbergMarquardtTraining((BasicNetwork)method, trainingData);

            EncogUtility.TrainToError(train, 0.01);

            EncogFramework.Instance.Shutdown();
        }
Beispiel #6
0
        /// <inheritdoc />
        public override BasicNetwork Calculate()
        {
            //Network
            var network           = new BasicNetwork();
            var inputNeuronCount  = inputs.GetLength(1);
            var outputNeuronCount = targets.GetLength(1);

            //Input Layer
            var inputLayer = new BasicLayer(inputActivationFunction, inputHasBias, inputNeuronCount);

            network.AddLayer(inputLayer);

            //Hidden Layer
            if (hiddenLayerConfig.GetLength(1) != 2)
            {
                throw new ArgumentException("Net Configuration is a 2 column table of values of neuron count and activation function type, with each row representing a separate layer");
            }

            for (var row = 0; row < hiddenLayerConfig.GetLength(0); row++)
            {
                var activationFunc = Util.GetActivationFunction(hiddenLayerConfig[row, 1].ToString());
                var layer          = new BasicLayer(activationFunc, true, Convert.ToInt32(hiddenLayerConfig[row, 0]));

                network.AddLayer(layer);
            }

            //Output layer
            var outputLayer = new BasicLayer(outputActivationFunction, outputHasBias, outputNeuronCount);

            network.AddLayer(outputLayer);

            //Training
            network.Structure.FinalizeStructure();
            network.Reset();
            var dataSet = new BasicMLDataSet(inputs.AsJagged(), targets.AsJagged());
            var trainlm = new LevenbergMarquardtTraining(network, dataSet);

            var epoch = 1;

            do
            {
                trainlm.Iteration();
                epoch++;
            } while (epoch < epochLimit && trainlm.Error > errorTolerance);

            return(network);
        }
Beispiel #7
0
        private void Train(BasicNetwork network, IMLDataSet trainingSet)
        {
            if (mTrainingMethod == TrainingMethod.ResilientPropagation)
            {
                ITrain train = new ResilientPropagation(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.LevenbergMarquardt)
            {
                LevenbergMarquardtTraining train = new LevenbergMarquardtTraining(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.Backpropagation)
            {
                Backpropagation train = new Backpropagation(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.ManhattanPropagation)
            {
                ManhattanPropagation train = new ManhattanPropagation(network, trainingSet, 0.9);
                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
        }
Beispiel #8
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training);
            return result;
        }
Beispiel #9
0
        public void Train(BasicNetwork network, IMLDataSet training)
        {
            IMLTrain trainMain = new LevenbergMarquardtTraining(network, training);
            // train the neural network
            var stop     = new StopTrainingStrategy();
            var score    = new TrainingSetScore(trainMain.Training);
            var trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            var epoch = 0;

            while (!stop.ShouldStop() && trainMain.IterationNumber < 1500)
            {
                trainMain.Iteration();
                Console.WriteLine("Training " + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
        }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                          "LMA training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var  holder = new ParamsHolder(args);
            bool useReg = holder.GetBoolean(
                MLTrainFactory.PropertyBayesianRegularization, false, false);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork)method, training)
            {
                UseBayesianRegularization = useReg
            };

            return(result);
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            double error = 0.00001;

            double[][] XOR_Input =
            {
                new[] { 0.0, 0.0 },
                new[] { 1.0, 0.0 },
                new[] { 0.0, 1.0 },
                new[] { 1.0, 1.0 }
            };

            double[][] XOR_Ideal =
            {
                new[] { 0.0 },
                new[] { 1.0 },
                new[] { 1.0 },
                new[] { 0.0 }
            };

            var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);

            BasicNetwork network = CreateNetwork();

            //var train = new Backpropagation(network, trainingSet, 0.7, 0.2);
            //var train = new ManhattanPropagation(network, trainingSet, 0.001);
            // var train = new QuickPropagation(network, trainingSet, 2.0);
            //var train = new ResilientPropagation(network, trainingSet);
            //var train = new ScaledConjugateGradient(network, trainingSet);
            var train = new LevenbergMarquardtTraining(network, trainingSet);

            int epoch = 0;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteration No: {0}, Error: {1}", ++epoch, train.Error);
            }while (train.Error > error);

            foreach (var item in trainingSet)
            {
                var output = network.Compute(item.Input);
                Console.WriteLine("Input: {0}, {1} \tIdeal: {2} \t Actual: {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]);
            }

            Console.WriteLine("Training done.");
            Console.WriteLine("press any key to continue");
            Console.ReadLine();

            // normalized value
            var weightNorm = new NormalizedField(NormalizationAction.Normalize, "Weights", 50.0, 40.0, 1.0, -1.0);

            double normalizedValue   = weightNorm.Normalize(42.5);
            double denormalizedValue = weightNorm.DeNormalize(normalizedValue);

            Console.WriteLine("Normalized value: {0}", normalizedValue.ToString());
            Console.WriteLine("press any key to continue");
            Console.ReadLine();

            // normalized array
            double[] weights         = new double[] { 40.0, 42.5, 43.0, 49.0, 50.0 };
            var      weightNormArray = new NormalizeArray();

            weightNormArray.NormalizedHigh = 1.0;
            weightNormArray.NormalizedLow  = -1.0;
            double[] normalizedWeights = weightNormArray.Process(weights);

            foreach (var item in normalizedWeights)
            {
                Console.WriteLine("Normalized value: {0}", item.ToString());
            }
            Console.WriteLine("press any key to continue");
            Console.ReadLine();
        }