Ejemplo n.º 1
0
        public void TestRPROPCont()
        {
            IMLDataSet   trainingSet = XOR.CreateXORDataSet();
            BasicNetwork net1        = XOR.CreateUnTrainedXOR();
            BasicNetwork net2        = XOR.CreateUnTrainedXOR();

            ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
            ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);

            rprop1.Iteration();
            rprop1.Iteration();

            rprop2.Iteration();
            rprop2.Iteration();

            TrainingContinuation cont = rprop2.Pause();

            ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);

            rprop3.Resume(cont);

            rprop1.Iteration();
            rprop3.Iteration();


            for (int i = 0; i < net1.Flat.Weights.Length; i++)
            {
                Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
            }
        }
Ejemplo n.º 2
0
        public void TestRPROPContPersistEG()
        {
            IMLDataSet   trainingSet = XOR.CreateXORDataSet();
            BasicNetwork net1        = XOR.CreateUnTrainedXOR();
            BasicNetwork net2        = XOR.CreateUnTrainedXOR();

            ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
            ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);

            rprop1.Iteration();
            rprop1.Iteration();

            rprop2.Iteration();
            rprop2.Iteration();

            TrainingContinuation cont = rprop2.Pause();

            EncogDirectoryPersistence.SaveObject(EG_FILENAME, cont);
            TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.LoadObject(EG_FILENAME);

            ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);

            rprop3.Resume(cont2);

            rprop1.Iteration();
            rprop3.Iteration();


            for (int i = 0; i < net1.Flat.Weights.Length; i++)
            {
                Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
            }
        }
Ejemplo n.º 3
0
        public void Perform(int thread)
        {
            var stopwatch = new Stopwatch();

            stopwatch.Start();
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(INPUT_COUNT));
            network.AddLayer(new BasicLayer(HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(OUTPUT_COUNT));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet training = RandomTrainingFactory.Generate(1000, 50000,
                                                                 INPUT_COUNT, OUTPUT_COUNT, -1, 1);

            var rprop = new ResilientPropagation(network, training);

            rprop.ThreadCount = thread;
            for (int i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            stopwatch.Stop();
            Console.WriteLine("Result with " + thread + " was " + stopwatch.ElapsedMilliseconds + "ms");
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Evaluate one network.
        /// </summary>
        /// <param name="context">The job context.</param>
        public override void PerformJobUnit(JobUnitContext context)
        {
            BasicNetwork network = (BasicNetwork)context.JobUnit;

            // train the neural network
            ITrain train = new ResilientPropagation(network, this.training);

            for (int i = 0; i < this.iterations; i++)
            {
                train.Iteration();
            }

            double error = train.Error;

            if ((error < this.bestResult) || (this.bestNetwork == null))
            {
#if logging
                if (this.logger.IsDebugEnabled)
                {
                    this.logger.Debug("Prune found new best network: error="
                                      + error + ", network=" + network);
                }
#endif
                this.bestNetwork = network;
                this.bestResult  = error;
            }
            this.currentTry++;

            this.ReportStatus(context,
                              "Current: " + PruneIncremental.NetworkToString(network)
                              + ", Best: "
                              + PruneIncremental.NetworkToString(this.bestNetwork));
        }
Ejemplo n.º 5
0
        /// <summary>
        ///     The entry point for this example.  If you would like to make this example
        ///     stand alone, then add to its own project and rename to Main.
        /// </summary>
        /// <param name="args">Not used.</param>
        public static void ExampleMain(string[] args)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.FinalizeStructure();
            network.Reset();

            var trainingData = BasicData.ConvertArrays(XOR_INPUT, XOR_IDEAL);

            // train the neural network
            var train = new ResilientPropagation(network, trainingData);

            var epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + " Error:" + train.LastError);
                epoch++;
            } while (train.LastError > 0.01);

            // test the neural network
            Console.WriteLine("Neural Network Results:");
            for (var i = 0; i < XOR_INPUT.Length; i++)
            {
                var output = network.ComputeRegression(XOR_INPUT[i]);
                Console.WriteLine(string.Join(",", XOR_INPUT[i])
                                  + ", actual=" + string.Join(",", output)
                                  + ",ideal=" + string.Join(",", XOR_IDEAL[i]));
            }
        }
Ejemplo n.º 6
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.Structure.FinalizeStructure();
            network.Reset();
            IMLDataSet trainingSet = new BasicMLDataSet(SensoriInput, AttuatoriOutput);
            IMLTrain   train       = new ResilientPropagation(network, trainingSet);
            int        epoch       = 1;

            do
            {
                /* Avvia la redistribuzione dei pesi */
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.001); /* Itera finche' non viene raggiunto un errore tollerabile */

            /* Test la MLP */
            Console.WriteLine("\r\n+------------------------------------+");
            Console.WriteLine("|Neural Network Results:             |");
            Console.WriteLine("+------------------------------------+");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine("Input:" + pair.Input[0] + " - " + pair.Input[1] + " - " + pair.Input[2] + " - " + pair.Input[3]
                                  + "\tactual=" + Math.Round(output[0], 2) + " - " + Math.Round(output[1], 2) + " - " + Math.Round(output[2], 2) + " - " + Math.Round(output[3], 2) + " - " + Math.Round(output[4], 2)
                                  + "\tideal=" + pair.Ideal[0] + " - " + pair.Ideal[1] + " - " + pair.Ideal[2] + " - " + pair.Ideal[3] + " - " + pair.Ideal[4]);
            }
            Console.Read();
        }
Ejemplo n.º 7
0
        public void Run()
        {
            //Se crea la red neuronal con sus respectivas capas
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            //Crear el conjunto de entrenamiento
            IMLDataSet conjuntoEntrenamiento = new BasicMLDataSet(entradas, salidas);

            //Entrenar
            IMLTrain train = new ResilientPropagation(network, conjuntoEntrenamiento);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoca #" + epoch + " Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine("Resultados:");
            foreach (IMLDataPair pair in conjuntoEntrenamiento)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Ejemplo n.º 8
0
        private void BtnEntrenar_Click(object sender, EventArgs e)
        {
            this.con.Open();

            var redN = new BasicNetwork();

            redN.AddLayer(new BasicLayer(null, false, imagen0.Height * imagen0.Width));
            redN.AddLayer(new BasicLayer(new ActivationLinear(), false, 2));
            redN.AddLayer(new BasicLayer(new ActivationLinear(), true, 1));
            redN.Structure.FinalizeStructure();
            redN.Reset();

            IMLDataSet Set = new BasicMLDataSet(E, IDEAL);


            IMLTrain train = new ResilientPropagation(redN, Set);


            do
            {
                CicloE++;
                train.Iteration();
                cadena.Add(train.Error);
                cadena2.Add(CicloE);
            }while (train.Error > 0.01);

            timer1.Enabled = true;
            timer1.Start();
            char i1;
            char i2;

            i1 = txtNombre.Text.ToUpper()[0];
            i2 = txtApellido.Text.ToUpper()[0];
            EncogDirectoryPersistence.SaveObject(new FileInfo(i1.ToString() + i2.ToString() + ".txt"), redN);
        }
Ejemplo n.º 9
0
        public void TrainNetwork(FileInfo ann, FileInfo trainingFile)
        {
            try
            {
                var network     = (BasicNetwork)EncogDirectoryPersistence.LoadObject(ann);
                var trainingSet = EncogUtility.LoadCSV2Memory(trainingFile.ToString(),
                                                              network.InputCount, network.OutputCount, true, CSVFormat.English, false);
                var trainerAlgorithm = new ResilientPropagation(network, trainingSet);                  // 16617 ==> e 0.1
                //var trainerAlgorithm = new QuickPropagation(network, trainingSet, 2.0);
                //var trainerAlgorithm = new ManhattanPropagation(network, trainingSet, 0.001);           //
                //var trainerAlgorithm = new ScaledConjugateGradient(network, trainingSet);                   // 73799  ==> e 0.1
                //var trainerAlgorithm = new LevenbergMarquardtTraining(network, trainingSet);          // 32750 ==> e 0.1
                var iteration = 1;
                do
                {
                    trainerAlgorithm.Iteration();
                    Console.WriteLine("Epoch : {0} Error : {1}", iteration, trainerAlgorithm.Error);
                    iteration++;
                } while (trainerAlgorithm.Error > 0.1);

                EncogDirectoryPersistence.SaveObject(ann, (BasicNetwork)network);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
Ejemplo n.º 10
0
        public void train(List <Verse> verse)
        {
            // prepare input and ideal vectors
            // input <- ClassifiableText text vector
            // ideal <- positonValue vector
            //

            double[][] input = getInput(verse);
            double[][] ideal = getIdeal(verse);

            // train
            //

            Propagation train = new ResilientPropagation(network, new BasicMLDataSet(input, ideal));

            train.ThreadCount = 16;

            do
            {
                train.Iteration();
                notifyObservers("Training Classifier for '" + positon.getName() + "' positon. Errors: " + String.Format("%.2f", train.Error * 100) + "%. Wait...");
            } while (train.Error > 0.01);

            train.FinishTraining();
            notifyObservers("Classifier for '" + positon.getName() + "' positon trained. Wait...");
        }
Ejemplo n.º 11
0
        static void Main(string[] args)
        {
            INeuralDataSet trainingSet = new BasicNeuralDataSet(AndInput, AndIdeal);
            var            network     = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            ITrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch no {epoch}. Error: {train.Error}");
                epoch++;
            } while ((epoch < MaxEpoch) && (train.Error > AcceptableError));

            Console.WriteLine("\nAnd function Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine($"{pair.Input[0]} AND {pair.Input[1]} should be: {pair.Ideal[0]} actual value is: {output[0]}");
            }


            Console.ReadKey();
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Ejemplo n.º 13
0
        private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
        {
            worker.ReportProgress(0, "Creating Network...");
            BasicNetwork Network = new BasicNetwork();

            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
            Network.Structure.FinalizeStructure();
            Network.Reset();
            DataContainer.NeuralNetwork.Network = Network;

            ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);

            worker.ReportProgress(0, "Running Training: Epoch 0");
            for (int i = 0; i < 200; i++)
            {
                training.Iteration();
                worker.ReportProgress(0, "Running Training: Epoch " + (i + 1).ToString() + "     Current Training Error : " + training.Error.ToString());
                if (worker.CancellationPending == true)
                {
                    completed = true;
                    return;
                }
            }
            completed = true;
        }
        public void Run()
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            IMLTrain   train       = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch # " + epoch + @" Error: " + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            train.FinishTraining();

            Console.WriteLine(@"Neural Network Results: ");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @" , " + pair.Input[1] + @" , actual = " + output[0] + @" , ideal = " + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
        }
Ejemplo n.º 15
0
        public void Train()
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, Dataset.InputSize));
            for (int i = 0; i <= Dataset.IdealSize; i++)
            {
                network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, Dataset.InputSize));
            }
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, Dataset.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset();

            var train = new ResilientPropagation(network, Dataset);
            var epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch: # {epoch}; Error: {train.Error};");
                epoch++;
            } while (train.Error > 0.1);

            Save(network);
        }
Ejemplo n.º 16
0
        static void Train(ILogger logger, ResilientPropagation train, double errorDeltaThreshold,
                          double errorThreshold)
        {
            var  watch        = Stopwatch.StartNew();
            var  learningTime = default(TimeSpan);
            int? errorDelta;
            var  errorsList = new List <double>();
            bool errorDeltaThresholdReached;
            bool errorThresholdAchieved;

            do
            {
                logger.LogInfo(@"Training... ");
                train.Iteration();
                errorsList.Add(train.Error);
                errorDelta = errorsList.Count > 1
                    ? (int?)(errorsList[train.IterationNumber - 1] - errorsList[train.IterationNumber - 2])
                    : (int?)null;
                errorDeltaThresholdReached = (!errorDelta.HasValue || errorDelta.Value < errorDeltaThreshold);
                errorThresholdAchieved     = train.Error < errorThreshold;
                logger.LogInfo(
                    $@"iteration {train.IterationNumber} completed in {watch.ElapsedMilliseconds.AsTime()}; error = {
                            train.Error
                        }, error delta = {errorDelta?.ToString() ?? "?"}");
                learningTime += watch.Elapsed;
                watch.Restart();
            } while (!errorThresholdAchieved || !errorDeltaThresholdReached);

            logger.LogInfo(
                $@"Training finished after {learningTime.TotalMilliseconds.AsTime()}, at iteration {
                        train.IterationNumber
                    }; error = {train.Error}");
        }
Ejemplo n.º 17
0
        private void TrainNetwork(DateTime trainFrom, DateTime trainTo, TrainingStatus status)
        {
            if (_input == null || _ideal == null)
            {
                CreateTrainingSets(trainFrom, trainTo);
            }
            _trainThread = Thread.CurrentThread;
            int    epoch = 1;
            ITrain train = null;

            try
            {
                var trainSet = new BasicNeuralDataSet(_input, _ideal);
                train = new ResilientPropagation(_network, trainSet);
                double error;
                do
                {
                    train.Iteration();
                    error = train.Error;
                    status?.Invoke(epoch, error, TrainingAlgorithm.Resilient);
                    epoch++;
                } while (error > MaxError);
            }
            catch (ThreadAbortException) { _trainThread = null; }
            finally
            {
                train?.FinishTraining();
            }
            _trainThread = null;
        }
Ejemplo n.º 18
0
 public static BasicNetwork TrainNetwork(BasicNetwork network, double[][] trainingData, double[][] ideals,
                                         int maxIterationsBeforeCompletion            = 5000, double tolerance = 0.001, string connectionId = null,
                                         Func <string, string, bool, bool> OutputData = null)
 {
     //Check we have data and a model to train
     if (trainingData.Any() && ideals.Any() && network != null)
     {
         //Create Dataset - data and correct classifications (matched by position)
         INeuralDataSet trainingSet = new BasicNeuralDataSet(trainingData, ideals);
         //Proagate the data through the network
         ITrain train = new ResilientPropagation(network, trainingSet);
         //Set the iteration count to 0
         var epoch = 0;
         //Train
         do
         {
             train.Iteration();
             //If the delegate is defined, output the progress to it
             if (OutputData != null)
             {
                 OutputData(connectionId, "Epoch #" + epoch + " Error:" + train.Error, true);
             }
             epoch++;
         } while ((epoch < maxIterationsBeforeCompletion) && (train.Error > tolerance));
     }
     //Return the trained network
     return(network);
 }
Ejemplo n.º 19
0
        // Train network using the according settings.
        public static void Train(ref NetworkContainer container, ref EncogTrainSettings settings)
        {
            if (settings.maxTrainingError <= 0)
            {
                throw new EncogException("Maxtrainingerror should be higher than 0");
            }

            // Create training data.
            IMLDataSet trainingSet = new BasicMLDataSet(settings.dataset, settings.answers);

            // Train the neural network.
            IMLTrain train = new ResilientPropagation(container.network, trainingSet);

            uint epoch = 0;

            do
            {
                train.Iteration();
                if (container.verbose)
                {
                    Console.WriteLine("Epoch # " + epoch + " Error: " + train.Error);
                }
                epoch++;
            } while (train.Error > settings.maxTrainingError && (epoch < settings.maxEpochCount || settings.maxEpochCount == 0));
        }
Ejemplo n.º 20
0
        public static void Main()
        {
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            INeuralDataSet trainingSet = new BasicNeuralDataSet(XorInput, XorIdeal);

            ITrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            var timer = Stopwatch.StartNew();
            do
            {
                train.Iteration();
                epoch++;
            } while ((epoch < 50000) && (train.Error > 0.0001));

            timer.Stop();

            Console.WriteLine("Neural Network Results:");
            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + "," + pair.Input[1]
                        + ", actual=" + output[0] + ", ideal=" + pair.Ideal[0]);
            }
            Console.WriteLine($"Completed {epoch} epochs in {timer.Elapsed} ({(float)timer.ElapsedMilliseconds / epoch} ms per epoch)");
            Console.ReadLine();
        }
Ejemplo n.º 21
0
        private static EncogTrainingResponse TrainNetwork2(BasicNetwork network, TrainingData training, double maxError, CancellationToken cancelToken, double?maxSeconds = null)
        {
            //TODO: When the final layer is softmax, the error seems to be higher.  Probably because the training outputs need to be run through softmax

            const int MAXITERATIONS = 5000;

            INeuralDataSet trainingSet = new BasicNeuralDataSet(training.Input, training.Output);
            ITrain         train       = new ResilientPropagation(network, trainingSet);

            DateTime startTime = DateTime.UtcNow;
            TimeSpan?maxTime   = maxSeconds != null?TimeSpan.FromSeconds(maxSeconds.Value) : (TimeSpan?)null;

            bool success = false;

            //List<double> log = new List<double>();
            int    iteration = 1;
            double error     = double.MaxValue;

            while (true)
            {
                if (cancelToken.IsCancellationRequested)
                {
                    break;
                }

                train.Iteration();

                error = train.Error;
                //log.Add(error);

                iteration++;

                if (double.IsNaN(error))
                {
                    break;
                }
                else if (error < maxError)
                {
                    success = true;
                    break;
                }
                else if (iteration >= MAXITERATIONS)
                {
                    break;
                }
                else if (maxTime != null && DateTime.UtcNow - startTime > maxTime)
                {
                    break;
                }
            }

            //string logExcel = string.Join("\r\n", log);       // paste this into excel and chart it to see the error trend

            train.FinishTraining();

            return(new EncogTrainingResponse(network, success, error, iteration, (DateTime.UtcNow - startTime).TotalSeconds));
        }
        private void Train(IMLDataSet trainingSet)
        {
            ITrain training = new ResilientPropagation(Network, trainingSet);
            var    epoch    = 0;

            do
            {
                training.Iteration();
                epoch++;
            }while (training.Error > MaxError && epoch != 1000);
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Trains state inside neural network to generate new value function.
        /// </summary>
        /// <param name="currentState"></param>
        /// <param name="v"></param>
        public void Train(Board board, double v)
        {
            BasicMLDataSet trainingSet = new BasicMLDataSet();
            BasicMLData    ideal       = new BasicMLData(1);

            ideal[0] = v;
            //trainingSet.Add(ANNAdapter.Adapt(board), ideal);
            trainingSet.Add(ANNAdapter.Adapt192(board), ideal);
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            train.Iteration();
        }
Ejemplo n.º 24
0
        public void Train(BasicNetwork network, IMLDataSet training)
        {
            ITrain train = new ResilientPropagation(network, training);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > MaxError);
        }
        public double Evaluate(BasicNetwork network, IMLDataSet training)
        {
            var    rprop         = new ResilientPropagation(network, training);
            double startingError = network.CalculateError(training);

            for (int i = 0; i < ITERATIONS; i++)
            {
                rprop.Iteration();
            }
            double finalError = network.CalculateError(training);

            return(startingError - finalError);
        }
Ejemplo n.º 26
0
            void Train()
            {
                if (Memory.Count > 0)
                {
                    network.Reset();
                    double[][] InputData = new double[Memory.Count][]; //подготовка данных для обучения сети
                    double[][] SenseData = new double[Memory.Count][];
                    for (int i = 0; i < Memory.Count; i++)
                    {
                        InputData[i] = Memory[i];
                        SenseData[i] = MemorySense[i];
                    }
                    IMLDataSet trainingSet = new BasicMLDataSet(InputData, SenseData);
                    IMLTrain   train       = new ResilientPropagation(network, trainingSet);

                    int epoch = 1;

                    double old = 9999;
                    double d   = 999;
                    do
                    {
                        try
                        {
                            train.Iteration();
                        }
                        catch (Exception)
                        {
                            World.Remove(this);
                            Life newlife = new Life(World, x, y);
                            World.Add(newlife);
                            break;
                        }

                        //Console.SetCursorPosition(0, 0); //вывод информации о текущем состоянии обучения
                        //Console.Write(@"Epoch #" + epoch + @" Error:" + train.Error);
                        epoch++;
                        d   = Math.Abs(old - train.Error);
                        old = train.Error;
                    } while (train.Error > 0.0001 && epoch < 3000 && d > 0.00001);

                    train.FinishTraining();

                    //double sumd=0.0; //подсчет суммарной ошибки после обучения
                    //foreach (IMLDataPair pair in trainingSet)
                    //{
                    //    IMLData output = network.Compute(pair.Input);
                    //    sumd = sumd + Math.Abs(pair.Ideal[0] - output[0]);
                    //    sumd = sumd / trainingSet.InputSize;
                    //}
                }
            }
Ejemplo n.º 27
0
        public void LearnFromString(string request, string responce)
        {
            var        inputs      = createDoubles(request);
            var        outputs     = createDoubles(responce);
            IMLDataSet trainingSet = new BasicMLDataSet(new double[][] { inputs }, new double[][] { outputs });
            IMLTrain   train       = new ResilientPropagation(NeuralNetwork, trainingSet);
            int        epoch       = 1;

            do
            {
                train.Iteration();
                epoch++;
            } while (train.Error > 0.00000000000001);
        }
Ejemplo n.º 28
0
        static void Main(string[] args)
        {
            var inputs  = new List <double[]>();
            var outputs = new List <double[]>();

            var lines = File.ReadLines(@"../../states.txt");

            foreach (var line in lines)
            {
                var lineInput  = new List <double>();
                var lineOutput = new List <double>();

                var weights = line.Split(new char[] { ',' });
                lineInput.AddRange(weights.Take(8).Select(value => double.Parse(value)));
                lineOutput.Add(double.Parse(weights[8]));
                lineInput.AddRange(weights.Skip(9).Select(value => double.Parse(value)));

                inputs.Add(lineInput.ToArray());
                outputs.Add(lineOutput.ToArray());
            }//foreach

            var nnInput = inputs.ToArray();
            var nnIdeal = outputs.ToArray();

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 16));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 300));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(nnInput, nnIdeal);

            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + "Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            EncogDirectoryPersistence.SaveObject(new FileInfo("network.eg"), network);

            Console.ReadKey();
        } //Main
        public void TestRPROPConsistency()
        {
            IMLDataSet training = EncoderTrainingFactory.generateTraining(4, false);
            var        network  = EncogUtility.SimpleFeedForward(4, 2, 0, 4, true);

            (new ConsistentRandomizer(-1, 1, 50)).Randomize(network);
            var rprop = new ResilientPropagation(network, training);

            for (var i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            Assert.IsTrue(CompareArray.Compare(ExpectedWeights1, network.Flat.Weights, 0.00001));

            for (var i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            Assert.IsTrue(CompareArray.Compare(ExpectedWeights2, network.Flat.Weights, 0.00001));

            var e = network.CalculateError(training);

            Assert.AreEqual(0.0767386807494191, e, 0.00001);
        }
Ejemplo n.º 30
0
        private void Train(BasicNetwork network, double[][] input, double[][] output)
        {
            IMLDataSet trainingSet = new BasicMLDataSet(input, output);
            IMLTrain   train       = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                epoch++;
            } while (train.Error > 0.04);

            train.FinishTraining();
        }