Reset() публичный Метод

Reset the weight matrix and the bias values. This will use a Nguyen-Widrow randomizer with a range between -1 and 1. If the network does not have an input, output or hidden layers, then Nguyen-Widrow cannot be used and a simple range randomize between -1 and 1 will be used.
public Reset ( ) : void
Результат void
Пример #1
0
        static void Main(string[] args)
        {
            //create a neural network withtout using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));

            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch #{epoch} Error: {train.Error}");
                epoch++;
            } while (train.Error > 0.01);
            train.FinishTraining();

            Console.WriteLine("Neural Network Results:");
            foreach (IMLDataPair iPair in trainingSet)
            {
                IMLData output = network.Compute(iPair.Input);
                Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}");
            }

            EncogFramework.Instance.Shutdown();

            Console.ReadKey();
        }
Пример #2
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            var train = new ResilientPropagation(network, trainingSet);
            var epoch = 1;
            do
            {
                train.Iteration();

            } while (train.Error > 0.01);

            train.FinishTraining();

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
            Console.ReadLine();
        }
Пример #3
0
        public static long BenchmarkEncog(double[][] input, double[][] output)
        {
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true,
                    input[0].Length));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true,
                    HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false,
                    output[0].Length));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(input, output);

            // train the neural network
            IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);

            Stopwatch sw = new Stopwatch();
            sw.Start();
            // run epoch of learning procedure
            for (int i = 0; i < ITERATIONS; i++)
            {
                train.Iteration();
            }
            sw.Stop();

            return sw.ElapsedMilliseconds;
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
        private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
        {
            worker.ReportProgress(0, "Creating Network...");
            BasicNetwork Network = new BasicNetwork();
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
            Network.Structure.FinalizeStructure();
            Network.Reset();
            DataContainer.NeuralNetwork.Network = Network;

            ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);
            worker.ReportProgress(0, "Running Training: Epoch 0");
            for(int i = 0; i < 200; i++)
            {
                training.Iteration();
                worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + "     Current Training Error : " + training.Error.ToString());
                if(worker.CancellationPending == true)
                {
                    completed = true;
                    return;
                }

            }
            completed = true;
        }
 /// <summary>
 /// Metodo responsavel por criar a rede neural
 /// </summary>
 /// <param name="source">FileInfo com o path do network</param>
 private static void CreateNetwork(FileInfo source)
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(new ActivationLinear(), true, 4));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6));
     network.AddLayer(new BasicLayer(new ActivationTANH(), false, 2));
     network.Structure.FinalizeStructure();
     network.Reset();
     EncogDirectoryPersistence.SaveObject(source, (BasicNetwork)network);
 }
 public BasicNetwork generateNetwork()
 {
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.INPUT_COUNT));
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.HIDDEN_COUNT));
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.OUTPUT_COUNT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Пример #8
0
 public void Create(int inputnodes,int hiddennodes)
 {
     network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, inputnodes));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, hiddennodes));
     network.AddLayer(new BasicLayer(new ActivationLinear(), false, 1));
     network.Structure.FinalizeStructure();
     network.Reset();
     this.hiddennodes = hiddennodes;
 }
 public BasicNetwork generateNetwork()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(INPUT_COUNT));
     network.AddLayer(new BasicLayer(HIDDEN_COUNT));
     network.AddLayer(new BasicLayer(OUTPUT_COUNT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
 public static void CreateNetwork(FileOps fileOps)
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(new ActivationLinear(),true,4));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 2));
     network.Structure.FinalizeStructure();
     network.Reset();
     EncogDirectoryPersistence.SaveObject(fileOps.TrainedNeuralNetworkFile, network);
 }
Пример #11
0
 private static BasicNetwork CreateNetwork()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, 2));
     network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
     network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Пример #12
0
 public static BasicNetwork createElliott()
 {
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, INPUT_OUTPUT));
     network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), true, HIDDEN));
     network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), false, INPUT_OUTPUT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Пример #13
0
        public PSO()
        {
            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(5));
            network.AddLayer(new BasicLayer(1));
            network.AddLayer(new BasicLayer(1));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet dataSet = new BasicMLDataSet();
            dataSet.Add(new BasicMLData(new double[] { 1.0, 4.0, 3.0, 4.0, 5.0}) , new BasicMLData(new double[] { 2.0, 4.0, 6.0 , 8.0, 10} ));
            
            train = new NeuralPSO(network, new RangeRandomizer(0, 10), new TrainingSetScore(dataSet),5);
            
        }
Пример #14
0
 public IMLMethod Generate()
 {
     BasicLayer layer;
     BasicLayer layer2;
     BasicNetwork network = new BasicNetwork();
     if ((0 != 0) || (0 == 0))
     {
         network.AddLayer(layer2 = new BasicLayer(this._x2a5a4034520336f3, true, this._xcfe830a7176c14e5));
     }
     network.AddLayer(layer = new BasicLayer(this._x2a5a4034520336f3, true, this._xdf89f9cf9fc3d06f));
     network.AddLayer(new BasicLayer(null, false, this._x8f581d694fca0474));
     layer2.ContextFedBy = layer;
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Пример #15
0
        public static void Experiment()
        {
            BasicNetwork net = new BasicNetwork();

            net.AddLayer(
                new BasicLayer(new ActivationLinear(), false, 3));

            net.AddLayer(
                new BasicLayer(new ActivationTANH(), true, 3));

            net.AddLayer(
                new BasicLayer(new ActivationLinear(), false, 2));

            net.Structure.FinalizeStructure();
            //Задание случайных весов?
            net.Reset();
        }
Пример #16
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="args">Not used.</param>
        public static void Main(String[] args)
        {
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
            
            EncogUtility.TrainToError(network,trainingSet,0.01);
            
            // test the neural network
            Console.WriteLine("Neural Network Results:");
            EncogUtility.Evaluate(network,trainingSet);
        }
Пример #17
0
        public void BuildModel()
        {
            Model = new BasicNetwork();
            Model.AddLayer(new BasicLayer(null, UseBias, FirstLayerSize));

            foreach (int layer in Layers)
            {
                if (Activation == ActivationType.Bipolar)
                    Model.AddLayer(new BasicLayer(new ActivationTANH(), UseBias, layer));
                else
                    Model.AddLayer(new BasicLayer(new ActivationSigmoid(), UseBias, layer));
            }

            Model.AddLayer(new BasicLayer(Activation == ActivationType.Bipolar ? new ActivationTANH() as IActivationFunction : new ActivationSigmoid() as IActivationFunction, false, LastLayerSize));
            Model.Structure.FinalizeStructure();
            Model.Reset();
        }
        public int Train(DataSet dataSet)
        {
            Network = new BasicNetwork();
            Network.AddLayer(new BasicLayer(null, true, 8 * 21));
            var first = ((8 * 21 + 4) * FirstLayerParameter);
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)first));
            var second = ((8 * 21 + 4) * SecondLayerParameter);
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)second));
            Network.AddLayer(new BasicLayer(null, false, 1));
               // Network.AddLayer(new );
            Network.Structure.FinalizeStructure();
            Network.Reset();
            //IMLData x = new BasicNeuralData();
            var set = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
            var ideal = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][];
            for (int i = 0; i < dataSet.Signatures.Count; i++)
            {
                set[i] = dataSet.Signatures[i].Data.Cast<double>().ToArray();
                ideal[i] = new double[] {1};
            }
            for (int i = dataSet.Signatures.Count; i < dataSet.Signatures.Count  + dataSet.Forgeries.Count; i++)
            {
                set[i] = dataSet.Forgeries[i- dataSet.Signatures.Count].Data.Cast<double>().ToArray();
                ideal[i] = new double[] { 0 };
            }

            IMLDataSet trainingSet = new BasicMLDataSet(set, ideal);

            IMLTrain train = new ResilientPropagation(Network, trainingSet);

            int epoch = 1;
            var errors = new List<double>();
            do
            {
                train.Iteration();
                // Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
                errors.Add(train.Error);

            } while ( epoch < 10000);

            train.FinishTraining();

            return 1;
        }
        public Predictor(TextBox txtOutput, CSVData data, int hiddenNodes, double percentValidation)
        {
            m_txtOutputWindow = txtOutput;
            m_data = data;

            // Populate the input and output arrays
            LoadData(percentValidation);

            // Create Neural Network
            m_network = new BasicNetwork();
            m_network.AddLayer(new BasicLayer(null, true, m_data.InputNodes));
            m_network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, hiddenNodes));
            m_network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, m_data.OutputNodes));
            m_network.Structure.FinalizeStructure();
            m_network.Reset();

            m_train = new Backpropagation(m_network, new BasicMLDataSet(m_inputTraining, m_outputTraining));
        }
Пример #20
0
        public MLPNetwork(int layersCount, int neuronsCount, bool bias, ActivationFunctionType aft, ProblemType problemType, string inputFileName)
        {
            this.layersCount = layersCount;
            this.neuronsCount = neuronsCount;
            this.bias = bias;
            this.activationFunType = aft;
            this.problemType = problemType;

            LoadTrainingData(inputFileName);

            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, bias, trainingData.InputSize));
            for (int i = 0; i < layersCount; i++)
                network.AddLayer(new BasicLayer(CreateActivationFunction(), bias, neuronsCount));
            network.AddLayer(new BasicLayer(CreateActivationFunction(), false, outputSize));
            network.Structure.FinalizeStructure();
            network.Reset();
        }
        public void testPersistLargeEG()
        {
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 200));
            network.Structure.FinalizeStructure();
            network.Reset();

            EncogDirectoryPersistence.SaveObject(EG_FILENAME, network);
            BasicNetwork network2 = (BasicNetwork)EncogDirectoryPersistence.LoadObject(EG_FILENAME);

            double d = EngineArray.EuclideanDistance(network.Structure.Flat.Weights,
                    network2.Structure.Flat.Weights);

            Assert.IsTrue(d < 0.01);
        }
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            EncogUtility.TrainToError(network, trainingSet, 0.01);
            EncogUtility.Evaluate(network, trainingSet);

            var ff = new FreeformNetwork(network);
            EncogUtility.Evaluate(ff, trainingSet);

            EncogFramework.Instance.Shutdown();
        }
Пример #23
0
        /// <summary>
        /// Trains a random trainer.
        /// </summary>
        /// <param name="inputs">The inputs.</param>
        /// <param name="predictWindow">The predict window.</param>
        public static double RandomTrainerMethod(int inputs, int predictWindow)
        {
            double[] firstinput = MakeInputs(inputs);
            double[] SecondInput = MakeInputs(inputs);
            double[] ThirdInputs = MakeInputs(inputs);
            double[] FourthInputs = MakeInputs(inputs);
            double[] inp5 = MakeInputs(inputs);
            double[] inp6 = MakeInputs(inputs);

            var pair = TrainerHelper.ProcessPairs(firstinput, firstinput, inputs, predictWindow);
            var pair2 = TrainerHelper.ProcessPairs(SecondInput, firstinput, inputs, predictWindow);
            var pair3 = TrainerHelper.ProcessPairs(ThirdInputs, firstinput, inputs, predictWindow);
            var pair4 = TrainerHelper.ProcessPairs(FourthInputs, firstinput, inputs, predictWindow);
            var pair5 = TrainerHelper.ProcessPairs(inp5, firstinput, inputs, predictWindow);
            var pair6 = TrainerHelper.ProcessPairs(inp6, firstinput, inputs, predictWindow);
            BasicMLDataSet SuperSet = new BasicMLDataSet();
            SuperSet.Add(pair);
            SuperSet.Add(pair2);

            SuperSet.Add(pair3);
            SuperSet.Add(pair4);
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize));
            network.AddLayer(new BasicLayer(new ActivationTANH(), false, 20));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, 0));
            network.AddLayer(new BasicLayer(new ActivationLinear(), true, predictWindow));

            //var layer = new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize);
            //layer.Network = network;


            network.Structure.FinalizeStructure();
            network.Reset();


            // var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize);
            return CreateEval.TrainNetworks(network, SuperSet);
            //Lets create an evaluation.
            //Console.WriteLine(@"Last error rate on random trainer:" + error);

        }
Пример #24
0
        public double Run(List<int> topoplogy, int iterations)
        {
            _Network = new BasicNetwork();
            _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _Features));
            foreach (int layer in topoplogy)
            {
                _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, layer));
            }
            _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            _Network.Structure.FinalizeStructure();
            _Network.Reset();

            //Encog.Neural.Networks.Training.Propagation.Gradient.
            ITrain train = new ResilientPropagation(_Network, _TrainingSet);

            for (int i = 0; i < iterations; i++)
            {
                train.Iteration();
            }
            return train.Error;
        }
        public void BasicSlidingSineSignal()
        {
            var listSize = 30 * 200;
            var inputList = new List<double>(listSize);
            var idealList = new List<double>(listSize);
            var rand = new Random(23);
            for(int i = 0; i < listSize; i++)
            {
                idealList.Add(Math.Sin(Math.PI * 2.0 * i / 30));
                inputList.Add(idealList[idealList.Count - 1] + (rand.NextDouble() - 0.5) * 0.1);
            }

            var input = new SlidingWindowMLDataProvider(inputList, 10, 0, 1);
            var ideal = new SlidingWindowMLDataProvider(idealList, 2, 11, 1); // predecit the eleventh, twelth item from the ten previous to it
            var ds = new DynamicMLDataSet(input, ideal);

            Assert.AreEqual(10, input.WindowSize);
            Assert.AreEqual(10, ds.InputSize);
            Assert.AreEqual(2, ds.IdealSize);
            Assert.AreEqual(listSize, ds.Count);

            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(ds.InputSize));
            network.AddLayer(new BasicLayer(ds.InputSize + 3));
            network.AddLayer(new BasicLayer(ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration = 0;
            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
Пример #26
0
        public void Perform(int thread)
        {
            var stopwatch = new Stopwatch();
            stopwatch.Start();
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(INPUT_COUNT));
            network.AddLayer(new BasicLayer(HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(OUTPUT_COUNT));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet training = RandomTrainingFactory.Generate(1000, 50000,
                                                                 INPUT_COUNT, OUTPUT_COUNT, -1, 1);

            var rprop = new ResilientPropagation(network, training);
            rprop.ThreadCount = thread;
            for (int i = 0; i < 5; i++)
            {
                rprop.Iteration();
            }
            stopwatch.Stop();
            Console.WriteLine("Result with " + thread + " was " + stopwatch.ElapsedMilliseconds + "ms");
        }
Пример #27
0
        public override void Run()
        {
            testNetwork = new BasicNetwork();

            testNetwork.AddLayer(new BasicLayer(null, true, 2));
            testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
            testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            testNetwork.Structure.FinalizeStructure();
            testNetwork.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new Backpropagation(testNetwork, trainingSet);
            //IMLTrain train = new ResilientPropagation(testNetwork, trainingSet); //Encog manual says it is the best general one

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.0001);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = testNetwork.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Пример #28
0
        /// <summary>
        /// Generate the Elman neural network.
        /// </summary>
        ///
        /// <returns>The Elman neural network.</returns>
        public IMLMethod Generate()
        {
            BasicLayer hidden, input;

            var network = new BasicNetwork();
            network.AddLayer(input = new BasicLayer(_activation, true,
                                                    _inputNeurons));
            network.AddLayer(hidden = new BasicLayer(_activation, true,
                                                     _hiddenNeurons));
            network.AddLayer(new BasicLayer(null, false, _outputNeurons));
            input.ContextFedBy = hidden;
            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }
        public void TestDynamicXOR()
        {
            Func<int, int, double> inputFunc = OnInputFunc;
            Func<int, int, double> idealFunc = delegate(int chunk, int index) { return XOR.XORIdeal[chunk][index]; };
            var input = new FuncMLDataProvider(inputFunc, XOR.XORInput.Length, XOR.XORInput[0].Length);
            var ideal = new FuncMLDataProvider(idealFunc, XOR.XORIdeal.Length, XOR.XORIdeal[0].Length);

            var ds = new DynamicMLDataSet(input, ideal);

            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.InputSize + 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration = 0;
            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.0001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
Пример #30
0
 public static BasicNetwork CreateThreeLayerNet()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(2));
     network.AddLayer(new BasicLayer(3));
     network.AddLayer(new BasicLayer(1));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }