One problem with the backpropagation algorithm is that the magnitude of the partial derivative is usually too large or too small. Further, the learning rate is a single value for the entire neural network. The resilient propagation learning algorithm uses a special update value(similar to the learning rate) for every neuron connection. Further these update values are automatically determined, unlike the learning rate of the backpropagation algorithm. For most training situations, we suggest that the resilient propagation algorithm (this class) be used for training. There are a total of three parameters that must be provided to the resilient training algorithm. Defaults are provided for each, and in nearly all cases, these defaults are acceptable. This makes the resilient propagation algorithm one of the easiest and most efficient training algorithms available. The optional parameters are: zeroTolerance - How close to zero can a number be to be considered zero. The default is 0.00000000000000001. initialUpdate - What are the initial update values for each matrix value. The default is 0.1. maxStep - What is the largest amount that the update values can step. The default is 50. Usually you will not need to use these, and you should use the constructor that does not require them.
Inheritance: Propagation
Exemple #1
0
        static void Main(string[] args)
        {
            //create a neural network withtout using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));

            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch #{epoch} Error: {train.Error}");
                epoch++;
            } while (train.Error > 0.01);
            train.FinishTraining();

            Console.WriteLine("Neural Network Results:");
            foreach (IMLDataPair iPair in trainingSet)
            {
                IMLData output = network.Compute(iPair.Input);
                Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}");
            }

            EncogFramework.Instance.Shutdown();

            Console.ReadKey();
        }
Exemple #2
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            var train = new ResilientPropagation(network, trainingSet);
            var epoch = 1;
            do
            {
                train.Iteration();

            } while (train.Error > 0.01);

            train.FinishTraining();

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
            Console.ReadLine();
        }
        private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
        {
            worker.ReportProgress(0, "Creating Network...");
            BasicNetwork Network = new BasicNetwork();
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
            Network.Structure.FinalizeStructure();
            Network.Reset();
            DataContainer.NeuralNetwork.Network = Network;

            ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);
            worker.ReportProgress(0, "Running Training: Epoch 0");
            for(int i = 0; i < 200; i++)
            {
                training.Iteration();
                worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + "     Current Training Error : " + training.Error.ToString());
                if(worker.CancellationPending == true)
                {
                    completed = true;
                    return;
                }

            }
            completed = true;
        }
        public void TestRPROPContPersistEG()
        {
            IMLDataSet trainingSet = XOR.CreateXORDataSet();
            BasicNetwork net1 = XOR.CreateUnTrainedXOR();
            BasicNetwork net2 = XOR.CreateUnTrainedXOR();

            ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
            ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);

            rprop1.Iteration();
            rprop1.Iteration();

            rprop2.Iteration();
            rprop2.Iteration();

            TrainingContinuation cont = rprop2.Pause();

            EncogDirectoryPersistence.SaveObject(EG_FILENAME, cont);
            TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.LoadObject(EG_FILENAME);

            ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);
            rprop3.Resume(cont2);

            rprop1.Iteration();
            rprop3.Iteration();


            for (int i = 0; i < net1.Flat.Weights.Length; i++)
            {
                Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
            }
        }
        public void TestRPROPCont()
        {
            IMLDataSet trainingSet = XOR.CreateXORDataSet();
            BasicNetwork net1 = XOR.CreateUnTrainedXOR();
            BasicNetwork net2 = XOR.CreateUnTrainedXOR();

            ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet);
            ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet);

            rprop1.Iteration();
            rprop1.Iteration();

            rprop2.Iteration();
            rprop2.Iteration();

            TrainingContinuation cont = rprop2.Pause();

            ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet);
            rprop3.Resume(cont);

            rprop1.Iteration();
            rprop3.Iteration();

            for (int i = 0; i < net1.Flat.Weights.Length; i++)
            {
                Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001);
            }
        }
        public void TestRPROP()
        {
            IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();
            IMLTrain rprop = new ResilientPropagation(network, trainingData);
            NetworkUtil.TestTraining(rprop, 0.03);
        }
        public ResilientPropagation TrainNetwork(BasicNetwork network, BasicMLDataSet trainingData)
        {
            var trainedNetwork = new ResilientPropagation(network, trainingData);
            var epoch = 0;
            do
            {
                trainedNetwork.Iteration();
                epoch++;
                Console.WriteLine("Epoch:{0}, Error{1}", epoch, trainedNetwork.Error);
            } while (trainedNetwork.Error > 0.01);

            return trainedNetwork;
        }
        public void TestRPROPFolded()
        {
            IMLDataSet trainingData = XOR.CreateNoisyXORDataSet(10);

            BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();

            var folded = new FoldedDataSet(trainingData);
            IMLTrain train = new ResilientPropagation(network, folded);
            var trainFolded = new CrossValidationKFold(train, 4);

            EncogUtility.TrainToError(trainFolded, 0.2);

            XOR.VerifyXOR((IMLRegression) trainFolded.Method, 0.2);
        }
Exemple #9
0
        /// <summary>
        /// Evaluate how long it takes to calculate the error for the network. This
        /// causes each of the training pairs to be run through the network. The
        /// network is evaluated 10 times and the lowest time is reported. 
        /// </summary>
        /// <param name="network">The training data to use.</param>
        /// <param name="training">The number of seconds that it took.</param>
        /// <returns></returns>
        public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
        {
            // train the neural network
            IMLTrain train = new ResilientPropagation(network, training);

            int iterations = 0;
            var watch = new Stopwatch();
            watch.Start();
            while (watch.ElapsedMilliseconds < (10*Milis))
            {
                iterations++;
                train.Iteration();
            }

            return iterations;
        }
Exemple #10
0
 public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
 {
     int num;
     IMLTrain train = new ResilientPropagation(network, training);
     if (0 == 0)
     {
         num = 0;
     }
     Stopwatch stopwatch = new Stopwatch();
     stopwatch.Start();
     while (stopwatch.ElapsedMilliseconds < 0x2710L)
     {
         num++;
         train.Iteration();
     }
     return num;
 }
 public double EvaluateMPROP(BasicNetwork network, IMLDataSet data)
 {
     var train = new ResilientPropagation(network, data);
     long start = DateTime.Now.Ticks;
     Console.WriteLine(@"Training 20 Iterations with MPROP");
     for (int i = 1; i <= 20; i++)
     {
         train.Iteration();
         Console.WriteLine("Iteration #" + i + " Error:" + train.Error);
     }
     //train.finishTraining();
     long stop = DateTime.Now.Ticks;
     double diff = new TimeSpan(stop - start).Seconds;
     Console.WriteLine("MPROP Result:" + diff + " seconds.");
     Console.WriteLine("Final MPROP error: " + network.CalculateError(data));
     return diff;
 }
        public int Train(DataSet dataSet)
        {
            Network = new BasicNetwork();
            Network.AddLayer(new BasicLayer(null, true, 8 * 21));
            var first = ((8 * 21 + 4) * FirstLayerParameter);
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)first));
            var second = ((8 * 21 + 4) * SecondLayerParameter);
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)second));
            Network.AddLayer(new BasicLayer(null, false, 1));
               // Network.AddLayer(new );
            Network.Structure.FinalizeStructure();
            Network.Reset();
            //IMLData x = new BasicNeuralData();
            var set = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
            var ideal = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
            for (int i = 0; i < dataSet.Signatures.Count; i++)
            {
                set[i] = dataSet.Signatures[i].Data.Cast<double>().ToArray();
                ideal[i] = new double[] {1};
            }
            for (int i = dataSet.Signatures.Count; i < dataSet.Signatures.Count  + dataSet.Forgeries.Count; i++)
            {
                set[i] = dataSet.Forgeries[i- dataSet.Signatures.Count].Data.Cast<double>().ToArray();
                ideal[i] = new double[] { 0 };
            }

            IMLDataSet trainingSet = new BasicMLDataSet(set, ideal);

            IMLTrain train = new ResilientPropagation(Network, trainingSet);

            int epoch = 1;
            var errors = new List<double>();
            do
            {
                train.Iteration();
                // Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
                errors.Add(train.Error);

            } while ( epoch < 10000);

            train.FinishTraining();

            return 1;
        }
Exemple #13
0
        static void Main(string[] args)
        {
            double[][] XOR_Input =
            {
                new[] {0.0,0.0},
                new[] {1.0,0.0},
                new[] {0.0,1.0},
                new[] {1.0,1.0}
             };

            double[][] XOR_Ideal =
            {
                new[] {0.0},
                new[] {1.0},
                new[] {1.0},
                new[] {0.0}
            };

            var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);

            BasicNetwork network = CreateNetwork();

            var train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            do
            {

                train.Iteration();
                epoch++;
                Console.WriteLine("Iteration No :{0}, Error: {1}", epoch, train.Error);

            } while (train.Error > 0.001);

            foreach (var item in trainingSet)
            {

                var output = network.Compute(item.Input);
                Console.WriteLine("Input : {0}, {1} Ideal : {2} Actual : {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]);
            }

            Console.WriteLine("press any key to exit...");
            Console.ReadLine();
        }
Exemple #14
0
        /// <summary>
        /// Evaluate how long it takes to calculate the error for the network. This
        /// causes each of the training pairs to be run through the network. The
        /// network is evaluated 10 times and the lowest time is reported. 
        /// </summary>
        /// <param name="network">The training data to use.</param>
        /// <param name="training">The number of seconds that it took.</param>
        /// <returns></returns>
        public static int EvaluateTrain(BasicNetwork network, IMLDataSet training)
        {
            // train the neural network
            IMLTrain train = new ResilientPropagation(network, training);

            int iterations = 0;
            const int milis10 = Milis * 10;
            var watch = new Stopwatch();
            watch.Start();
            while (true)
            {
                iterations++;
                train.Iteration();

                if((iterations & 0xff) == 0 && watch.ElapsedMilliseconds < milis10) break;
            }

            return iterations;
        }
        public static int Evaluate(BasicNetwork network, IMLDataSet training)
        {
            ResilientPropagation rprop = new ResilientPropagation(network, training);
            int iterations = 0;

            for (; ; )
            {
                rprop.Iteration();                
                iterations++;
                if (rprop.Error < TARGET_ERROR)
                {
                    return iterations;
                }

                if (iterations > 1000)
                {
                    iterations = 0;
                    return -1;
                }
            }
        }
        public void TestRPROPConsistency()
        {
            IMLDataSet training = EncoderTrainingFactory.generateTraining(4, false);
            var network = EncogUtility.SimpleFeedForward(4, 2, 0, 4, true);
            (new ConsistentRandomizer(-1, 1, 50)).Randomize(network);
            var rprop = new ResilientPropagation(network, training);
            for (var i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            Assert.IsTrue(CompareArray.Compare(ExpectedWeights1, network.Flat.Weights,0.00001));

            for (var i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            Assert.IsTrue(CompareArray.Compare(ExpectedWeights2, network.Flat.Weights, 0.00001));

            var e = network.CalculateError(training);
            Assert.AreEqual(0.0767386807494191, e, 0.00001);
        }
Exemple #17
0
        public double Run(List<int> topoplogy, int iterations)
        {
            _Network = new BasicNetwork();
            _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _Features));
            foreach (int layer in topoplogy)
            {
                _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, layer));
            }
            _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            _Network.Structure.FinalizeStructure();
            _Network.Reset();

            //Encog.Neural.Networks.Training.Propagation.Gradient.
            ITrain train = new ResilientPropagation(_Network, _TrainingSet);

            for (int i = 0; i < iterations; i++)
            {
                train.Iteration();
            }
            return train.Error;
        }
        private static void Main(string[] args)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            train.FinishTraining();

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
        }
Exemple #19
0
        private static void XORTest()
        {
            double[][] XOR_Input =
            {
                new[] {0.0, 0.0},
                new[] {1.0, 0.0},
                new[] {0.0, 1.0},
                new[] {1.0, 1.0}
            };

            double[][] XOR_Ideal =
            {
                new[] {0.0},
                new[] {1.0},
                new[] {1.0},
                new[] {0.0}
            };

            var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal);

            var network = CreateNetwork();

            var train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            do
            {
                train.Iteration();
                epoch++;
                Console.WriteLine($"Iteration No: {epoch}, Error: {train.Error}");
            } while (train.Error > 0.001);

            foreach (var item in trainingSet)
            {
                var output = network.Compute(item.Input);
                Console.WriteLine($"Input : {item.Input[0]}, {item.Input[1]}, Ideal: {item.Ideal[0]}, Actual : {output[0]}");
            }
        }
        public void Perform(int thread)
        {
            var stopwatch = new Stopwatch();
            stopwatch.Start();
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(INPUT_COUNT));
            network.AddLayer(new BasicLayer(HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(OUTPUT_COUNT));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet training = RandomTrainingFactory.Generate(1000, 50000,
                                                                 INPUT_COUNT, OUTPUT_COUNT, -1, 1);

            var rprop = new ResilientPropagation(network, training);
            rprop.ThreadCount = thread;
            for (int i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            stopwatch.Stop();
            Console.WriteLine("Result with " + thread + " was " + stopwatch.ElapsedMilliseconds + "ms");
        }
Exemple #21
0
            void Train()
            {
                if (Memory.Count>0)
                {
                    network.Reset();
                    double[][] InputData = new double[Memory.Count][]; //подготовка данных для обучения сети
                    double[][] SenseData = new double[Memory.Count][];
                    for (int i = 0; i < Memory.Count; i++)
                    {
                        InputData[i] = Memory[i];
                        SenseData[i] = MemorySense[i];
                    }
                    IMLDataSet trainingSet = new BasicMLDataSet(InputData, SenseData);
                    IMLTrain train = new ResilientPropagation(network, trainingSet);

                    int epoch = 1;

                    double old = 9999;
                    double d = 999;
                    do
                    {
                        train.Iteration();
                        //Console.SetCursorPosition(0, 0); //вывод информации о текущем состоянии обучения
                        //Console.Write(@"Epoch #" + epoch + @" Error:" + train.Error);
                        epoch++;
                        d = Math.Abs(old - train.Error);
                        old = train.Error;
                    } while (train.Error > 0.0001 && epoch < 3000 && d > 0.00001);

                    train.FinishTraining();

                    //double sumd=0.0; //подсчет суммарной ошибки после обучения
                    //foreach (IMLDataPair pair in trainingSet)
                    //{
                    //    IMLData output = network.Compute(pair.Input);
                    //    sumd = sumd + Math.Abs(pair.Ideal[0] - output[0]);
                    //    sumd = sumd / trainingSet.InputSize;
                    //}
                }
            }
        /// <summary>
        /// This is based off of this article:
        /// http://www.codeproject.com/Articles/54575/An-Introduction-to-Encog-Neural-Networks-for-C
        /// </summary>
        /// <remarks>
        /// Go here for documentation of encog:
        /// http://www.heatonresearch.com/wiki
        /// 
        /// Download link:
        /// https://github.com/encog/encog-dotnet-core/releases
        /// </remarks>
        private void btnXOR_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                _trainingData = null;
                _results = null;

                BasicNetwork network = new BasicNetwork();

                #region Create nodes

                // Create the network's nodes

                //NOTE: Using ActivationSigmoid, because there are no negative values.  If there were negative, use ActivationTANH
                //http://www.heatonresearch.com/wiki/Activation_Function

                //NOTE: ActivationSigmoid (0 to 1) and ActivationTANH (-1 to 1) are pure but slower.  A cruder but faster function is ActivationElliott (0 to 1) and ActivationElliottSymmetric (-1 to 1)
                //http://www.heatonresearch.com/wiki/Elliott_Activation_Function

                network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));     // input layer
                network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));     // hidden layer
                network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));     // output layer
                network.Structure.FinalizeStructure();

                // Randomize the links
                network.Reset();

                #endregion

                #region Training data

                // Neural networks must be trained before they are of any use. To train this neural network, we must provide training
                // data. The training data is the truth table for the XOR operator. The XOR has the following inputs:
                double[][] xor_input = new[]
                {
                    new[] { 0d, 0d },
                    new[] { 1d, 0d },
                    new[] { 0d, 1d },
                    new[] { 1d, 1d },
                };

                // And the expected outputs
                double[][] xor_ideal_output = new[]
                {
                    new[] { 0d },
                    new[] { 1d },
                    new[] { 1d },
                    new[] { 0d },
                };

                _trainingData = GetDrawDataFromTrainingData(xor_input, xor_ideal_output);

                #endregion
                #region Train network

                INeuralDataSet trainingSet = new BasicNeuralDataSet(xor_input, xor_ideal_output);

                // This is a good general purpose training algorithm
                //http://www.heatonresearch.com/wiki/Training
                ITrain train = new ResilientPropagation(network, trainingSet);

                List<double> log = new List<double>();

                int trainingIteration = 1;
                do
                {
                    train.Iteration();

                    log.Add(train.Error);

                    trainingIteration++;
                } while ((trainingIteration < 2000) && (train.Error > 0.001));

                // Paste this into excel and chart it to see the error trend
                string logExcel = string.Join("\r\n", log);

                #endregion

                #region Test

                //NOTE: I initially ran a bunch of tests, but the network always returns exactly the same result when given the same inputs
                //var test = Enumerable.Range(0, 1000).
                //    Select(o => new { In1 = _rand.Next(2), In2 = _rand.Next(2) }).

                var test = xor_input.
                    Select(o => new { In1 = Convert.ToInt32(o[0]), In2 = Convert.ToInt32(o[1]) }).
                    Select(o => new
                    {
                        o.In1,
                        o.In2,
                        Expected = XOR(o.In1, o.In2),
                        NN = CallNN(network, o.In1, o.In2),
                    }).
                    Select(o => new { o.In1, o.In2, o.Expected, o.NN, Error = Math.Abs(o.Expected - o.NN) }).
                    OrderByDescending(o => o.Error).
                    ToArray();

                #endregion
                #region Test intermediate values

                // It was only trained with inputs of 0 and 1.  Let's see what it does with values in between

                var intermediates = Enumerable.Range(0, 1000).
                    Select(o => new { In1 = _rand.NextDouble(), In2 = _rand.NextDouble() }).
                    Select(o => new
                    {
                        o.In1,
                        o.In2,
                        NN = CallNN(network, o.In1, o.In2),
                    }).
                    OrderBy(o => o.In1).
                    ThenBy(o => o.In2).
                    //OrderBy(o => o.NN).
                    ToArray();

                #endregion

                #region Serialize/Deserialize

                // Serialize it
                string weightDump = network.DumpWeights();
                double[] dumpArray = weightDump.Split(',').
                    Select(o => double.Parse(o)).
                    ToArray();

                //TODO: Shoot through the layers, and store in some custom structure that can be serialized, then walked through to rebuild on deserialize
                //string[] layerDump = network.Structure.Layers.
                //    Select(o => o.ToString()).
                //    ToArray();

                // Create a clone
                BasicNetwork clone = new BasicNetwork();

                clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
                clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
                clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
                clone.Structure.FinalizeStructure();

                clone.DecodeFromArray(dumpArray);

                // Test the clone
                string cloneDump = clone.DumpWeights();

                bool isSame = weightDump == cloneDump;

                var cloneTests = xor_input.
                    Select(o => new
                    {
                        Input = o,
                        NN = CallNN(clone, o[0], o[1]),
                    }).ToArray();

                #endregion

                #region Store results

                double[] matchValues = new[] { 0d, 1d };
                double matchRange = .03;        //+- 5% of target value would be considered a match

                _results = intermediates.
                    Select(o => Tuple.Create(new Point(o.In1, o.In2), o.NN, IsMatch(o.NN, matchValues, matchRange))).
                    ToArray();

                #endregion
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error);
            }
            finally
            {
                RedrawResults();
            }
        }
 /// <summary>
 /// Train the network, to a specific error, send the output to the console.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="trainingSet">The training set to use.</param>
 /// <param name="error">The error level to train to.</param>
 public static void TrainToError(BasicNetwork network,
                                 IMLDataSet trainingSet, double error)
 {
     Propagation train = new ResilientPropagation(network,
                                                  trainingSet) {ThreadCount = 0};
     TrainToError(train, trainingSet, error);
 }
 /// <summary>
 /// Train using RPROP and display progress to a dialog box.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="trainingSet">The training set to use.</param>
 public static void TrainDialog(BasicNetwork network,
                                IMLDataSet trainingSet)
 {
     Propagation train = new ResilientPropagation(network,
                                                  trainingSet) {ThreadCount = 0};
     TrainDialog(train, network, trainingSet);
 }
 /// <summary>
 /// Train the neural network, using SCG training, and output status to the
 /// console.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="trainingSet">The training set.</param>
 /// <param name="minutes">The number of minutes to train for.</param>
 public static void TrainConsole(BasicNetwork network,
                                 IMLDataSet trainingSet, int minutes)
 {
     Propagation train = new ResilientPropagation(network,
                                                  trainingSet) {ThreadCount = 0};
     TrainConsole(train, network, trainingSet, minutes);
 }
        /// <summary>
        /// Perform an individual job unit, which is a single network to train and
        /// evaluate.
        /// </summary>
        ///
        /// <param name="context">Contains information about the job unit.</param>
        public override sealed void PerformJobUnit(JobUnitContext context)
        {
            var network = (BasicNetwork) context.JobUnit;
            BufferedMLDataSet buffer = null;
            IMLDataSet useTraining = _training;

            if (_training is BufferedMLDataSet)
            {
                buffer = (BufferedMLDataSet) _training;
                useTraining = (buffer.OpenAdditional());
            }

            // train the neural network

            double error = Double.PositiveInfinity;
            for (int z = 0; z < _weightTries; z++)
            {
                network.Reset();
                Propagation train = new ResilientPropagation(network,
                                                             useTraining);
                var strat = new StopTrainingStrategy(0.001d,
                                                     5);

                train.AddStrategy(strat);
                train.ThreadCount = 1; // force single thread mode

                for (int i = 0;
                     (i < _iterations) && !ShouldStop
                     && !strat.ShouldStop();
                     i++)
                {
                    train.Iteration();
                }

                error = Math.Min(error, train.Error);
            }

            if (buffer != null)
            {
                buffer.Close();
            }

            if (!ShouldStop)
            {
                // update min and max

                _high = Math.Max(_high, error);
                _low = Math.Min(_low, error);

                if (_hidden1Size > 0)
                {
                    int networkHidden1Count;
                    int networkHidden2Count;

                    if (network.LayerCount > 3)
                    {
                        networkHidden2Count = network.GetLayerNeuronCount(2);
                        networkHidden1Count = network.GetLayerNeuronCount(1);
                    }
                    else
                    {
                        networkHidden2Count = 0;
                        networkHidden1Count = network.GetLayerNeuronCount(1);
                    }

                    int row, col;

                    if (_hidden2Size == 0)
                    {
                        row = networkHidden1Count - _hidden[0].Min;
                        col = 0;
                    }
                    else
                    {
                        row = networkHidden1Count - _hidden[0].Min;
                        col = networkHidden2Count - _hidden[1].Min;
                    }

                    if ((row < 0) || (col < 0))
                    {
                        Console.Out.WriteLine("STOP");
                    }
                    _results[row][col] = error;
                }

                // report status
                _currentTry++;

                UpdateBest(network, error);
                ReportStatus(
                    context,
                    "Current: "
                    + NetworkToString(network)
                    + "; Best: "
                    + NetworkToString(_bestNetwork));
            }
        }
        private void ProcessTrain()
        {
            if (network == null)
                return;

            String strMode = GetArg("mode");
            String strMinutes = GetArg("minutes");
            String strStrategyError = GetArg("strategyerror");
            String strStrategyCycles = GetArg("strategycycles");

            app.WriteLine("Training Beginning... Output patterns="
                          + outputCount);

            double strategyError = double.Parse(strStrategyError);
            int strategyCycles = int.Parse(strStrategyCycles);

            var train = new ResilientPropagation(network, training);
            train.AddStrategy(new ResetStrategy(strategyError, strategyCycles));

            if (String.Compare(strMode, "gui", true) == 0)
            {
                EncogUtility.TrainDialog(train, network, training);
            }
            else
            {
                int minutes = int.Parse(strMinutes);
                EncogUtility.TrainConsole(train, network, training,
                                          minutes);
            }
            app.WriteLine("Training Stopped...");
        }
        public void Train(BasicNetwork network, IMLDataSet training)
        {
            ITrain train = new ResilientPropagation(network, training);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > MaxError);
        }
Exemple #29
0
        /// <inheritdoc/>
        public override void CreateTrainer(OpenCLTrainingProfile profile, bool singleThreaded)
        {
            Propagation.Propagation train = new ResilientPropagation(Network,
                    Training, profile, InitialUpdate, MaxStep);

            if (singleThreaded)
                train.NumThreads = 1;
            else
                train.NumThreads = 0;


            foreach (IStrategy strategy in Strategies)
            {
                train.AddStrategy(strategy);
            }

            Train = train;
        }
        public void Train(Network network, TrainingCallback callback)
        {
            IActivationFunction activationFunctionInput = network.GetActivation(0);
            int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1);
            double error = 0;
            callback.Invoke(TrainingStatus.FillingStandardInputs, 0, 0, 0); /*First operation is filling standard input/outputs*/
            Dictionary<int, List<BasicMLData>> trackIdFingerprints = GetNormalizedTrackFingerprints(activationFunctionInput, trainingSongSnippets, outputNeurons);
            workingThread = Thread.CurrentThread;
            IActivationFunction activationFunctionOutput = network.GetActivation(network.LayerCount - 1);
            double[][] normalizedBinaryCodes = GetNormalizedBinaryCodes(activationFunctionOutput, outputNeurons);
            Tuple<double[][], double[][]> tuple = FillStandardInputsOutputs(trackIdFingerprints, normalizedBinaryCodes); /*Fill standard input output*/
            double[][] inputs = tuple.Item1;
            double[][] outputs = tuple.Item2;

            if (inputs == null || outputs == null)
            {
                callback.Invoke(TrainingStatus.Exception, 0, 0, 0);
                return;
            }

            int currentIterration = 0;
            double correctOutputs = 0.0;
            BasicNeuralDataSet dataset = new BasicNeuralDataSet(inputs, outputs);
            ITrain learner = new ResilientPropagation(network, dataset);
            try
            {
                // Dynamic output reordering cycle
                /*Idyn = 50*/
                for (int i = 0; i < Idyn; i++)
                {
                    if (paused)
                    {
                        pauseSem.WaitOne();
                    }

                    correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
                    callback.Invoke(TrainingStatus.OutputReordering, correctOutputs, error, currentIterration);
                    ReorderOutput(network, dataset, trackIdFingerprints, normalizedBinaryCodes);
                    /*Edyn = 10*/
                    for (int j = 0; j < Edyn; j++)
                    {
                        if (paused)
                        {
                            pauseSem.WaitOne();
                        }

                        correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
                        callback.Invoke(TrainingStatus.RunningDynamicEpoch, correctOutputs, error, currentIterration);
                        learner.Iteration();
                        error = learner.Error;
                        currentIterration++;
                    }
                }

                for (int i = 0; i < Efixed; i++)
                {
                    if (paused)
                    {
                        pauseSem.WaitOne();
                    }

                    correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset);
                    callback.Invoke(TrainingStatus.FixedTraining, correctOutputs, error, currentIterration);
                    learner.Iteration();
                    error = learner.Error;
                    currentIterration++;
                }

                network.ComputeMedianResponses(inputs, trainingSongSnippets);
                callback.Invoke(TrainingStatus.Finished, correctOutputs, error, currentIterration);
            }
            catch (ThreadAbortException)
            {
                callback.Invoke(TrainingStatus.Aborted, correctOutputs, error, currentIterration);
                paused = false;
            }
        }