Beispiel #1
0
        public Task <Network> Train(NetworkConfiguration networkConfiguration, int[] spectralImagesIndexesToConsider, TrainingCallback callback)
        {
            var network = networkFactory.Create(
                networkConfiguration.ActivationFunction,
                DefaultFingerprintSize,
                networkConfiguration.HiddenLayerCount,
                networkConfiguration.OutputCount);

            var spectralImagesToTrain = trainingDataProvider.GetSpectralImagesToTrain(
                spectralImagesIndexesToConsider, (int)System.Math.Pow(2, networkConfiguration.OutputCount));
            var trainingSet = trainingDataProvider.MapSpectralImagesToBinaryOutputs(
                spectralImagesToTrain, networkConfiguration.OutputCount);

            normalizeStrategy.NormalizeInputInPlace(networkConfiguration.ActivationFunction, trainingSet.Inputs);
            normalizeStrategy.NormalizeOutputInPlace(networkConfiguration.ActivationFunction, trainingSet.Outputs);
            return(Task.Factory.StartNew(
                       () =>
            {
                var dataset = new BasicNeuralDataSet(trainingSet.Inputs, trainingSet.Outputs);
                var learner = new Backpropagation(network, dataset);
                double correctOutputs = 0.0;
                for (int idynIndex = 0; idynIndex < Idyn; idynIndex++)
                {
                    correctOutputs = networkPerformanceMeter.MeasurePerformance(
                        network, dataset, networkConfiguration.ActivationFunction);
                    callback(TrainingStatus.OutputReordering, correctOutputs, learner.Error, idynIndex * Edyn);
                    var bestPairs = GetBestPairsForReordering(
                        (int)System.Math.Pow(2, networkConfiguration.OutputCount), network, spectralImagesToTrain, trainingSet);
                    ReorderOutputsAccordingToBestPairs(bestPairs, trainingSet, dataset);

                    for (int edynIndex = 0; edynIndex < Edyn; edynIndex++)
                    {
                        correctOutputs = networkPerformanceMeter.MeasurePerformance(
                            network, dataset, networkConfiguration.ActivationFunction);
                        callback(
                            TrainingStatus.RunningDynamicEpoch,
                            correctOutputs,
                            learner.Error,
                            (idynIndex * Edyn) + edynIndex);
                        learner.Iteration();
                    }
                }

                for (int efixedIndex = 0; efixedIndex < Efixed; efixedIndex++)
                {
                    correctOutputs = networkPerformanceMeter.MeasurePerformance(
                        network, dataset, networkConfiguration.ActivationFunction);
                    callback(
                        TrainingStatus.FixedTraining, correctOutputs, learner.Error, (Idyn * Edyn) + efixedIndex);
                    learner.Iteration();
                }

                network.ComputeMedianResponses(trainingSet.Inputs, TrainingSongSnippets);
                callback(TrainingStatus.Finished, correctOutputs, learner.Error, (Idyn * Edyn) + Efixed);
                return network;
            }));
        }
        public override double Train(Data info, float lr, float mom)
        {
            IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData);

            //Train network on data set, parameters (Network, dataset, learning rate, momentum).
            IMLTrain learner   = new Backpropagation(EncogNetwork, data, lr, mom);
            double   lastError = double.PositiveInfinity;

            do
            {
                if (learner.Error != 0)
                {
                    lastError = learner.Error;
                }

                int i = 0;

                while (i < 1000)
                {
                    learner.Iteration();
                    i++;
                }
            } while (lastError - learner.Error > 0.0000001);

            return(learner.Error);
        }
Beispiel #3
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);


            IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0);

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
Beispiel #4
0
        public static long BenchmarkEncog(double[][] input, double[][] output)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true,
                                            input[0].Length));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true,
                                            HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false,
                                            output[0].Length));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(input, output);

            // train the neural network
            IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);

            var sw = new Stopwatch();

            sw.Start();
            // run epoch of learning procedure
            for (int i = 0; i < ITERATIONS; i++)
            {
                train.Iteration();
            }
            sw.Stop();

            return(sw.ElapsedMilliseconds);
        }
Beispiel #5
0
        public void Train()
        {
            var        network     = createNetwork();
            IMLDataSet trainingSet = EncogUtility.LoadCSV2Memory(normFile, network.InputCount, 1, false, CSVFormat.English, false);

            IMLTrain train     = new Backpropagation(network, trainingSet);
            int      epoch     = 1;
            int      truecases = 0;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.05);
            train.FinishTraining();

            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(@" actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
                if (pair.Ideal[0] == 1 && output[0] > 0.5)
                {
                    truecases++;
                }
                else if (pair.Ideal[0] == 0 && output[0] < 0.5)
                {
                    truecases++;
                }
            }
            Console.WriteLine(truecases);
            SerializeObject.Save(networkFile, network);
        }
        private void trainNetworkBackprop()
        {
            Train train = new Backpropagation(this.network, this.input,
                                              this.ideal, 0.000001, 0.1);
            double lastError  = Double.MaxValue;
            int    epoch      = 1;
            int    lastAnneal = 0;

            do
            {
                train.Iteration();
                double error = train.Error;

                Console.WriteLine("Iteration(Backprop) #" + epoch + " Error:"
                                  + error);

                if (error > 0.05)
                {
                    if ((lastAnneal > 100) && (error > lastError || Math.Abs(error - lastError) < 0.0001))
                    {
                        trainNetworkAnneal();
                        lastAnneal = 0;
                    }
                }

                lastError = train.Error;
                epoch++;
                lastAnneal++;
            } while (train.Error > MAX_ERROR);
        }
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(minis);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, minis, 0.0001, 0.01);
            StopTrainingStrategy stop      = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();

            return(trainMain.Error);
        }
        private void trainNetworkBackprop()
        {
            // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1);

            IMLDataSet aset  = new BasicMLDataSet(input, ideal);
            int        epoch = 1;
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(aset);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, aset, 0.001, 0.0);
            StopTrainingStrategy stop      = new StopTrainingStrategy();
            var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000);
            // train the neural network
            var step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;
            var train = new NEATTraining(score, pop);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);
            trainMain.AddStrategy(new HybridStrategy(train));


            network.ClearContext();

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                train.Iteration();
                Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error + @" Genetic iteration:" + trainAlt.IterationNumber + @"neat iteration:" + train.IterationNumber);
                epoch++;
            }
        }
Beispiel #9
0
        public static void Train(PointsConverted pointsConvertedA, PointsConverted pointsConvertedB = null, bool debug = true)
        {
            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_input));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_hidden));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, N_output));
            network.Structure.FinalizeStructure();
            network.Reset();

            pointsConvertedA.Juntar(pointsConvertedB);

            var trainingSet = new BasicNeuralDataSet(pointsConvertedA.entrada, pointsConvertedA.saida);
            var train       = new Backpropagation(network, trainingSet);

            var epoch = 0;

            do
            {
                train.Iteration();

                if (debug)
                {
                    Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                }

                epoch++;
            }while ((epoch <= 20000) || (train.Error > 0.001));
        }
Beispiel #10
0
        static void Main(string[] args)
        {
            // used for prediction of time series
            // sin(x) in theory
            int DEGREES     = 360;
            int WINDOW_SIZE = 16;

            double[][] Input = new double[DEGREES][];
            double[][] Ideal = new double[DEGREES][];

            // Create array of sin signals
            for (int i = 0; i < DEGREES; i++)
            {
                Input[i] = new double[WINDOW_SIZE];
                Ideal[i] = new double[] { Math.Sin(DegreeToRad(i + WINDOW_SIZE)) };
                for (int j = 0; j < WINDOW_SIZE; j++)
                {
                    Input[i][j] = Math.Sin(DegreeToRad(i + j));
                }
            }
            // construct training set
            IMLDataSet trainingSet = new BasicMLDataSet(Input, Ideal);

            // construct an Elman type network
            // simple recurrent network
            ElmanPattern pattern = new ElmanPattern
            {
                InputNeurons       = WINDOW_SIZE,
                ActivationFunction = new ActivationSigmoid(),
                OutputNeurons      = 1
            };

            pattern.AddHiddenLayer(WINDOW_SIZE);
            IMLMethod    method  = pattern.Generate();
            BasicNetwork network = (BasicNetwork)method;
            // Train network
            IMLTrain train = new Backpropagation(network, trainingSet);
            var      stop  = new StopTrainingStrategy();

            train.AddStrategy(new Greedy());
            train.AddStrategy(stop);
            int epoch = 0;

            while (!stop.ShouldStop())
            {
                train.Iteration();
                Console.WriteLine($"Training Epoch #{epoch} Error:{train.Error}");
                epoch++;
            }
            // Test network
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine($"actual={output[0]}, ideal={pair.Ideal[0]}");
            }
        }
        static void Main(string[] args)
        {
            #region dimensional arrays comment

            /*two dimensional array for summary,
             * input array name is=x,
             * output array name is=y*/
            #endregion
            #region dimensional arrays created for dataset
            double[][] x =
            {
                new double[] { 0.1, 0.4 },
                new double[] { 0.3, 0.5 },
                new double[] { 0.5, 0.2 },
                new double[] { 0.7, 0.3 },
            };
            double[][] y =
            {
                new double[] { 0.5 },
                new double[] { 0.8 },
                new double[] { 0.7 },
                new double[] { 1.0 }
            };
            #endregion
            #region artificial neural network created
            //Layers:2 input, 5 neurons hidden layer, 1 output layer
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); //input neuron layer
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5)); //hidden neuron layer
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); //output neuron layer
            network.Structure.FinalizeStructure();
            network.Reset();
            #endregion
            #region training
            IMLDataSet dataSet = new BasicMLDataSet(x, y);//filling data inside dataset
            ITrain     learner = new Backpropagation(network, dataSet);
            for (int i = 0; i < 3000; i++)
            {
                learner.Iteration();
                //Console.WriteLine("error is:" + learner.Error);
            }
            #endregion
            #region testing
            foreach (BasicMLDataPair pair in dataSet)
            {
                IMLData result = network.Compute(pair.Input);
                Console.WriteLine(" {0} + {1} = {2} -> {3} ", pair.Input[0], pair.Input[1], pair.Ideal[0], result[0]);
            }
            Console.ReadKey();
            #endregion
        }
Beispiel #12
0
        public void TrainNetwork()
        {
            var train = new Backpropagation(network, TrainSet, parameters.LearingCoefficient, parameters.InertiaCoefficient);

            errorSet = new List <double[]>();// iteracja, bład, błąd walidacji
            int epoch = 1;

            do
            {
                train.Iteration();
                errorSet.Add(new double[] { epoch, train.Error, network.CalculateError(ValidationSet) });;
                epoch++;
            } while (train.Error > 0.01 && epoch < parameters.IterationsCount);
        }
        private void TrainNetworkBackpropBackprop()
        {
            HeatonResearchNeural.Feedforward.Train.Train train = new Backpropagation(this.network, this.input,
                                                                                     this.ideal, 0.7, 0.5);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Backprop:Iteration #" + epoch + " Error:"
                                  + train.Error);
                epoch++;
            } while ((train.Error > Config.ACCEPTABLE_ERROR));
        }
Beispiel #14
0
        private void trainNetworkBackprop()
        {
            Train train = new Backpropagation(this.network, this.input,
                                              this.ideal, 0.001, 0.1);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteration #" + epoch + " Error:"
                                  + train.Error);
                epoch++;
            } while ((epoch < 5000) && (train.Error > 0.01));
        }
Beispiel #15
0
        public override List <double> Train(List <Tuple <List <double>, List <double> > > trainSet, int iterations)
        {
            var            errors      = new List <double>(iterations);
            INeuralDataSet trainingSet = new BasicNeuralDataSet(trainSet.Select(e => e.Item1.ToArray()).ToArray(), trainSet.Select(e => e.Item2.ToArray()).ToArray());
            ITrain         train       = new Backpropagation(network, trainingSet, this.learningRate, this.momentum);
            var            epoch       = 0;

            do
            {
                train.Iteration();
                epoch++;
                errors.Add(train.Error);
            } while (epoch < iterations);

            return(errors);
        }
        static double ResultTreinamento(int i, IMLDataSet trainingSet, int iteracoes = 20000)
        {
            var train = new Backpropagation(network, trainingSet);

            var epoch = 0;

            do
            {
                train.Iteration();

                //Console.WriteLine("Treino #" + i + " Epoch #" + epoch + " Error:" + train.Error);

                epoch++;
            }while ((epoch <= iteracoes));

            return(train.Error);
        }
Beispiel #17
0
        private void Train(BasicNetwork network, IMLDataSet trainingSet)
        {
            if (mTrainingMethod == TrainingMethod.ResilientPropagation)
            {
                ITrain train = new ResilientPropagation(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.LevenbergMarquardt)
            {
                LevenbergMarquardtTraining train = new LevenbergMarquardtTraining(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.Backpropagation)
            {
                Backpropagation train = new Backpropagation(network, trainingSet);

                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
            else if (mTrainingMethod == TrainingMethod.ManhattanPropagation)
            {
                ManhattanPropagation train = new ManhattanPropagation(network, trainingSet, 0.9);
                int epoch = 1;
                do
                {
                    train.Iteration();
                    epoch++;
                } while (train.Error > mMaxError && epoch < mMaxEpoch);
            }
        }
Beispiel #18
0
        public void MainTestEncog()
        {
            int classesCount    = 0;
            int attributesCount = 0;
            var epoch           = 0;
            var trainData       = CsvParser.Parse("./../../../DataSets/data.train.csv", ref classesCount, ref attributesCount).NormalizedData;
            var testData        = CsvParser.Parse("./../../../DataSets/data.train.csv", ref classesCount, ref attributesCount).NormalizedData;
            var correct         = 0;

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.Structure.FinalizeStructure();
            network.Reset();

            INeuralDataSet trainingSet = new BasicNeuralDataSet(trainData.Select(e => e.Item1.ToArray()).ToArray(), trainData.Select(e => e.Item2.ToArray()).ToArray());
            ITrain         train       = new Backpropagation(network, trainingSet, 0.3, 0.6);

            //ITrain train = new ResilientPropagation(network, trainingSet);

            do
            {
                train.Iteration();
                epoch++;
            } while ((epoch < 15000) && (train.Error > 0.001));

            foreach (IMLDataPair pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                //pair.Ideal
                if (Network.GetClass(new List <double>()
                {
                    output[0], output[1], output[2]
                }) == Network.GetClass(new List <double>()
                {
                    pair.Ideal[0], pair.Ideal[1], pair.Ideal[2]
                }))
                {
                    correct++;
                }
            }
        }
        public NetworkState TrainNetwork()
        {
            int epoch = 0;

            _trainingSet = new BasicNeuralDataSet(_annInputs, _annOutputs);

            _basicNetwork = new BasicNetwork();
            _basicNetwork.AddLayer(new BasicLayer(null, true, _nInputNeurons));
            _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _nHiddenNeurons));
            _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, _nOutputNeurons));
            _basicNetwork.Structure.FinalizeStructure();
            _basicNetwork.Reset();

            //Distribuisco numeri casuali[-1,1] lavorando in ambiente stocastico(non deterministico).
            //In questo modo il training avviene in maniera casuale, partendo sempre dallo stesso stato.
            new ConsistentRandomizer(-1, 1, 100).Randomize(_basicNetwork);

            Backpropagation train = new Backpropagation(_basicNetwork, _trainingSet, LearnRate, Momentum);

            train.FixFlatSpot = false;

            do
            {
                train.Iteration();
                epoch++;
                _trainError = train.Error;

                BufferTrainError.Add(_trainError);
            } while (train.Error > _error);

            train.FinishTraining();

            _neuronsWeight = _basicNetwork.Structure.Network.Flat.Weights.Select(x => System.Convert.ToDouble(x)).ToList();
            Make2DNeuronsWeightsMap();

            foreach (IMLDataPair pair in _trainingSet)
            {
                IMLData output = _basicNetwork.Compute(pair.Input);
                Console.WriteLine("Input: " + pair.Input[0] + @" - " + pair.Input[1] + @" - " + pair.Input[2]);
                Console.WriteLine("Output 0: - actual=" + output[0] + @"-ideal=" + pair.Ideal[0]);
                Console.WriteLine("Output 1: - actual=" + output[1] + @"-ideal=" + pair.Ideal[1]);
                Console.WriteLine("Output 2: - actual=" + output[2] + @"-ideal=" + pair.Ideal[2]);
            }
            return(NetworkState.TRAINED);
        }
Beispiel #20
0
        static void Main(string[] args)
        {
            INeuralDataSet trainingSet = new BasicNeuralDataSet(AndInput, AndIdeal);
            var            network     = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationRamp(), true, 25));
            network.AddLayer(new BasicLayer(new ActivationRamp(), true, 75));
            network.AddLayer(new BasicLayer(new ActivationRamp(), true, 50));
            network.AddLayer(new BasicLayer(new ActivationRamp(), true, 20));
            network.Structure.FinalizeStructure();
            network.Reset();

            ITrain train = new Backpropagation(network, trainingSet, 0.02, 0.3);

            int epoch = 1;

            do
            {
                train.Iteration();

                Console.WriteLine($"{train.Error}");
                epoch++;
            } while ((epoch < MaxEpoch) && (train.Error > AcceptableError));



            var input = new BasicMLData(new double[25] {
                1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1
            });

            int best = network.Winner(input);

            Console.WriteLine($"Rozpoznano: {_literki[best]}");

            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);

                Console.WriteLine($"wynik : { output[0].ToString("0.000")} {output[1].ToString("0.000")} {output[2].ToString("0.000")} {output[3].ToString("0.000")} {output[4].ToString("0.000")} {output[5].ToString("0.000")} {output[6].ToString("0.000")} {output[7].ToString("0.000")} {output[8].ToString("0.000")} {output[9].ToString("0.000")} {output[10].ToString("0.000")} {output[11].ToString("0.000")} {output[12].ToString("0.000")} {output[13].ToString("0.000")} {output[14].ToString("0.000")} {output[15].ToString("0.000")} {output[16].ToString("0.000")} {output[17].ToString("0.000")} {output[18].ToString("0.000")} {output[19].ToString("0.000")}");
            }



            Console.ReadKey();
        }
Beispiel #21
0
        public double[][] Train(double[][] trainingData, double[][] validationData)
        {
            var error = new List <double[]>();

            PrepareNormalizerFor(trainingData, validationData);

            var trainingSet   = PrepareSet(trainingData);
            var validationSet = PrepareSet(validationData);

            var training = new Backpropagation(_network, trainingSet, _settings.LearningRate, _settings.Momentum)
            {
                BatchSize = 1
            };

            for (int epoch = 0; epoch < _settings.Iterations; epoch++)
            {
                training.Iteration();
                double trainingError = -1;
                double testingError  = -1;

                if (_settings.Type == ProblemType.Regression)
                {
                    trainingError = _network.CalculateError(trainingSet);
                    if (validationSet != null)
                    {
                        testingError = _network.CalculateError(validationSet);
                    }
                }
                else
                {
                    trainingError = CalculateClassificationError(trainingSet);
                    if (validationSet != null)
                    {
                        testingError = CalculateClassificationError(validationSet);
                    }
                }
                var errorIter = new[] { epoch, trainingError, testingError };
                error.Add(errorIter);
                Console.WriteLine($"Epoch #{epoch} [{training.Error}] TrainingError: {errorIter[1]} ValidationError: {errorIter[2]}");
            }
            training.FinishTraining();

            return(error.ToArray());
        }
        public static double Encog_Neural(double CarPrice)
        {
            double[][] x =
            {
                new double[] { 0.0, CarPrice },
            };

            double[][] y =
            {
                new double[] { 1.0 }
            };

            //////////CREATE NETWORK/////////
            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet dataset = new BasicMLDataSet(x, y);

            ITrain learner = new Backpropagation(network, dataset);

            for (int i = 0; i < 100; i++)
            {
                learner.Iteration();
            }

            //// Testing /////
            foreach (BasicMLDataPair pair in dataset)
            {
                IMLData neuralResult = network.Compute(pair.Input);

                // Divided Cost by result
                var priceResult = CarPrice / neuralResult[0];

                return(priceResult);
            }
            return(0);
        }
Beispiel #23
0
        static IMLDataSet UczSiec(BasicNetwork siec, DaneKlasyfikacja doNauki, double wspolczynnikNauki = 0.003, double bezwladnosc = 0.01) //Wspolczynniki domyslne wybrane tak zeby chociaz dzialalo jakkolwiek
        {
            IMLDataSet    dataSet       = new BasicMLDataSet(doNauki.punkty.ToArray(), doNauki.klasyWej.ToArray());
            List <double> bledyTreningu = new List <double>();
            IMLTrain      train         = new Backpropagation(siec, dataSet, wspolczynnikNauki, bezwladnosc);

            int iter    = 1;
            int maxIter = 5000;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteracja #{0} Blad {1:0.0000}", iter, train.Error);
                bledyTreningu.Add(train.Error);
                iter++;
            } while (train.Error >= 0.03 && iter < maxIter);

            EksportujBledyTreningu(sciezkaKlasyfikacjaBledyTreningu, bledyTreningu);
            train.FinishTraining();
            return(dataSet);
        }
Beispiel #24
0
        static IMLDataSet UczSiec(BasicNetwork siec, DaneRegresja doNauki, double wspolczynnikNauki = 0.003, double bezwladnosc = 0.01)
        {
            IMLDataSet    dataSet       = new BasicMLDataSet(doNauki.wejscioweX.ToArray(), doNauki.oczekiwaneY.ToArray());
            List <double> bledyTreningu = new List <double>();
            IMLTrain      train         = new Backpropagation(siec, dataSet, wspolczynnikNauki, bezwladnosc);

            int iter    = 1;
            int maxIter = 5000;

            do
            {
                train.Iteration();
                Console.WriteLine("Iteracja #{0} Blad {1:0.0000}", iter, train.Error);
                bledyTreningu.Add(train.Error);
                iter++;
            } while (train.Error >= 0.001 && iter < maxIter);

            EksportujBledyTreningu(sciezkaRegresjaBledyTreningu, bledyTreningu);
            train.FinishTraining();
            return(dataSet);
        }
Beispiel #25
0
        private static BasicNetwork  TrainBasicNetwork(BasicNetwork network, BasicMLDataSet trainingSet)
        {
            var trainerAlgorithm = new Backpropagation(network, trainingSet, 0.7, 0.2);
            //var trainerAlgorithm = new ResilientPropagation(network, trainingSet);
            //var trainerAlgorithm = new ManhattanPropagation(network, trainingSet, 0.001)
            //var trainerAlgorithm = new ScaledConjugateGradient(network, trainingSet);
            //var trainerAlgorithm = new LevenbergMarquardtTraining(network, trainingSet);          //
            //var trainerAlgorithm = new QuickPropagation(network, trainingSet, 2.0);                 //

            var iteration = 1;

            do
            {
                trainerAlgorithm.Iteration();
                iteration++;
                Console.WriteLine($"Iteration Num : {iteration}, Error : {trainerAlgorithm.Error}");
            } while (trainerAlgorithm.Error > 0.001);
            trainerAlgorithm.FinishTraining();

            return(network);
        }
Beispiel #26
0
        private void button2_Click(object sender, EventArgs e)
        {
            double[][]     x = { new double[] { 0.1, 0.2 },
                                 new double[]     { 0.4, 0.3 } };
            double[][]     y = { new double[] { 0.3 },
                                 new double[]     { 0.7 } };
            BasicMLDataSet dataset = new BasicMLDataSet(x, y);

            BasicNetwork rede = new BasicNetwork();

            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            rede.Structure.FinalizeStructure();
            rede.Reset();

            Backpropagation train = new Backpropagation(rede, dataset, 0.5, 0.1);

            int epoch = 0;

            do
            {
                train.Iteration();
                if (epoch % 100 == 0)
                {
                    richTextBox2.AppendText("Época " + epoch.ToString() + " Erro " + train.Error + Environment.NewLine);
                }
                epoch++;
            } while (epoch < 3000);

            for (double t = 0.0; t <= 5; t += 0.05)
            {
                double[] d      = new double[] { t, t };
                IMLData  input  = new BasicMLData(d);
                IMLData  output = rede.Compute(input);
                double[] result = new double[output.Count];
                output.CopyTo(result, 0, output.Count);
                richTextBox2.AppendText(" " + t + "+" + t + "=" + result[0] + Environment.NewLine);
            }
        }
        public void ThreadProc()
        {
            int update = 0;

            Train train = new Backpropagation(this.network, PruneSelectiveForm.XOR_INPUT,
                                              this.obtainIdeal(), 0.7, 0.9);

            int max = 10000;

            for (int i = 0; i < max; i++)
            {
                train.Iteration();

                update++;
                if (update == 100)
                {
                    SetText("Cycles Left:" + (max - i) + ",Error:"
                            + train.Error);
                    update = 0;
                }
            }
        }
Beispiel #28
0
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            Backpropagation trainMain = new Backpropagation(network, minis, 0.0001, 0.6);

            //set the number of threads below.
            trainMain.ThreadCount = 0;
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(minis);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            // IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01);

            StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            //prune strategy not in GIT!...Removing it.
            //PruneStrategy strategypruning = new PruneStrategy(0.91d, 0.001d, 10, network,minis, 0, 20);
            //trainMain.AddStrategy(strategypruning);

            EncogUtility.TrainConsole(trainMain, network, minis, 15.2);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();

                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();
            Console.WriteLine(@"Total elapsed time in seconds:" + TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds).Seconds);

            return(trainMain.Error);
        }
        public void StartLearning(int iterationCount)
        {
            this.resultList = new ResultsList();

            this.learningProcess = new List <IterationError>();

            this.Network.Structure.FinalizeStructure();
            Network.Reset();
            IOutput writer = MyCore.Resolve <IOutput>();

            Propagation train = new Backpropagation(this.Network, this.TrainingSet, this.LearningRate, this.TheMomentum);
            // train.BatchSize = 1;

            int epoch = 1;

            do
            {
                train.Iteration();
                writer.Write(String.Format(@"Epoch # {0} Error: {1}", epoch, train.Error));
                epoch++;
                this.learningProcess.Add(new IterationError(epoch, train.Error));
            } while (epoch < iterationCount);
        }
Beispiel #30
0
        private void button1_Click(object sender, EventArgs e)
        {
            double[][] x = { new double[] { 0.1, 0.2 },
                             new double[] { 0.4, 0.3 } };
            double[][] y = { new double[] { 0.3 },
                             new double[] { 0.4 } };

            BasicNetwork rede = new BasicNetwork();

            rede.AddLayer(new BasicLayer(2));
            rede.AddLayer(new BasicLayer(2));
            rede.AddLayer(new BasicLayer(1));
            rede.Structure.FinalizeStructure();
            rede.Reset();
            BasicMLDataSet dataset = new BasicMLDataSet(x, y);

            Backpropagation propagation = new Backpropagation(rede, dataset, 0.3, 0.7);
            int             epoch       = 0;

            while (true)
            {
                propagation.Iteration();
                richTextBox1.AppendText("Época " + epoch.ToString() + " Erro " + propagation.Error + Environment.NewLine);
                epoch++;

                if (epoch > 3500 || propagation.Error < 0.01)
                {
                    break;
                }
            }
            ;
            foreach (IMLDataPair d in dataset)
            {
                IMLData o = rede.Compute(d.Input);
                richTextBox1.AppendText(" Saída " + o + " Ideal " + d.Input[0] + Environment.NewLine);
            }
        }