Exemplo n.º 1
0
        public void testPersistLargeEG()
        {
            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 200));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 200));
            network.Structure.FinalizeStructure();
            network.Reset();

            EncogDirectoryPersistence.SaveObject(EG_FILENAME, network);
            BasicNetwork network2 = (BasicNetwork)EncogDirectoryPersistence.LoadObject(EG_FILENAME);

            double d = EngineArray.EuclideanDistance(network.Structure.Flat.Weights,
                                                     network2.Structure.Flat.Weights);

            Assert.IsTrue(d < 0.01);
        }
Exemplo n.º 2
0
        public void Process()
        {
            IList <BasicData> trainingData = LoadSunspots();

            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, this.INPUT_WINDOW));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 50));
            network.AddLayer(new BasicLayer(new ActivationLinear(), false, 1));
            network.FinalizeStructure();
            network.Reset();

            BackPropagation train = new BackPropagation(network, trainingData, 1e-9, 0.5);

            train.BatchSize = 0;

            PerformIterations(train, 100000, 650, true);
            Query(network, trainingData);
        }
Exemplo n.º 3
0
        private BasicNetwork LoadNetwork(Network network_data)
        {
            byte[]   byteData = Convert.FromBase64String(network_data.data);
            double[] data     = new double[byteData.Length / 8];
            Buffer.BlockCopy(byteData, 0, data, 0, byteData.Length);
            BasicNetwork network = new BasicNetwork();

            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 30 * 2 * 2));
            for (int i = 0; i < network_data.hidden_count; i++)
            {
                network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, network_data.hidden_length));
            }
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();
            network.DecodeFromArray(data);
            return(network);
        }
Exemplo n.º 4
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(2));
            network.AddLayer(new BasicLayer(3));
            network.AddLayer(new BasicLayer(1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingDataSource = new CSVDataSource(@"Data\training.csv", true, ',');
            //var validationDataSource = new CSVDataSource(@"Data\validation.csv", true, ',');

            var trainingSet = new VersatileMLDataSet(trainingDataSource);

            //var validationSet = new VersatileMLDataSet(validationDataSource);

            trainingSet.Analyze();
            trainingSet.Normalize();

            var training = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                training.Iteration();
                Console.WriteLine($"Epoch #{epoch}. Error: {training.Error}");
                epoch++;
            }while (training.Error > 0.01);

            training.FinishTraining();

            Console.WriteLine("Neural Network Results:");

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine($"{pair.Input[0]},{pair.Input[1]}, actual={output[0]}, ideal={pair.Ideal}");
            }

            EncogFramework.Instance.Shutdown();
        }
Exemplo n.º 5
0
        public void BasicSlidingSineSignal()
        {
            var listSize  = 30 * 200;
            var inputList = new List <double>(listSize);
            var idealList = new List <double>(listSize);
            var rand      = new Random(23);

            for (int i = 0; i < listSize; i++)
            {
                idealList.Add(Math.Sin(Math.PI * 2.0 * i / 30));
                inputList.Add(idealList[idealList.Count - 1] + (rand.NextDouble() - 0.5) * 0.1);
            }

            var input = new SlidingWindowMLDataProvider(inputList, 10, 0, 1);
            var ideal = new SlidingWindowMLDataProvider(idealList, 2, 11, 1);             // predecit the eleventh, twelth item from the ten previous to it
            var ds    = new DynamicMLDataSet(input, ideal);

            Assert.AreEqual(10, input.WindowSize);
            Assert.AreEqual(10, ds.InputSize);
            Assert.AreEqual(2, ds.IdealSize);
            Assert.AreEqual(listSize, ds.Count);

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(ds.InputSize));
            network.AddLayer(new BasicLayer(ds.InputSize + 3));
            network.AddLayer(new BasicLayer(ds.IdealSize));
            network.Structure.FinalizeStructure();
            network.Reset(42);

            var trainer = new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, ds);

            int maxIteration = 300;
            int iteration    = 0;

            do
            {
                trainer.Iteration();
                Debug.WriteLine(++iteration + ": Error = " + trainer.Error);
            } while(trainer.Error > 0.001 && maxIteration > iteration);

            Assert.IsTrue(iteration < maxIteration);
        }
Exemplo n.º 6
0
        public static double Encog_Neural(double CarPrice)
        {
            double[][] x =
            {
                new double[] { 0.0, CarPrice },
            };

            double[][] y =
            {
                new double[] { 1.0 }
            };

            //////////CREATE NETWORK/////////
            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet dataset = new BasicMLDataSet(x, y);

            ITrain learner = new Backpropagation(network, dataset);

            for (int i = 0; i < 100; i++)
            {
                learner.Iteration();
            }

            //// Testing /////
            foreach (BasicMLDataPair pair in dataset)
            {
                IMLData neuralResult = network.Compute(pair.Input);

                // Divided Cost by result
                var priceResult = CarPrice / neuralResult[0];

                return(priceResult);
            }
            return(0);
        }
Exemplo n.º 7
0
        /// <summary>
        ///     Run the example.
        /// </summary>
        public void Process()
        {
            // read the iris data from the resources
            var assembly = Assembly.GetExecutingAssembly();
            var res      = assembly.GetManifestResourceStream("AIFH_Vol3.Resources.iris.csv");

            // did we fail to read the resouce
            if (res == null)
            {
                Console.WriteLine("Can't read iris data from embedded resources.");
                return;
            }

            // load the data
            var istream = new StreamReader(res);
            var ds      = DataSet.Load(istream);

            istream.Close();

            // The following ranges are setup for the Iris data set.  If you wish to normalize other files you will
            // need to modify the below function calls other files.
            ds.NormalizeRange(0, 0, 1);
            ds.NormalizeRange(1, 0, 1);
            ds.NormalizeRange(2, 0, 1);
            ds.NormalizeRange(3, 0, 1);
            var species = ds.EncodeOneOfN(4);

            var trainingData = ds.ExtractSupervised(0, 4, 4, 3);

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 20));
            network.AddLayer(new BasicLayer(new ActivationSoftMax(), false, 3));
            network.FinalizeStructure();
            network.Reset();

            var train = new BackPropagation(network, trainingData, 0.001, 0.9);

            PerformIterations(train, 100000, 0.02, true);
            QueryOneOfN(network, trainingData, species);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Trains a random trainer.
        /// </summary>
        /// <param name="inputs">The inputs.</param>
        /// <param name="predictWindow">The predict window.</param>
        public static double RandomTrainerMethod(int inputs, int predictWindow)
        {
            double[] firstinput   = MakeInputs(inputs);
            double[] SecondInput  = MakeInputs(inputs);
            double[] ThirdInputs  = MakeInputs(inputs);
            double[] FourthInputs = MakeInputs(inputs);
            double[] inp5         = MakeInputs(inputs);
            double[] inp6         = MakeInputs(inputs);

            var            pair     = TrainerHelper.ProcessPairs(firstinput, firstinput, inputs, predictWindow);
            var            pair2    = TrainerHelper.ProcessPairs(SecondInput, firstinput, inputs, predictWindow);
            var            pair3    = TrainerHelper.ProcessPairs(ThirdInputs, firstinput, inputs, predictWindow);
            var            pair4    = TrainerHelper.ProcessPairs(FourthInputs, firstinput, inputs, predictWindow);
            var            pair5    = TrainerHelper.ProcessPairs(inp5, firstinput, inputs, predictWindow);
            var            pair6    = TrainerHelper.ProcessPairs(inp6, firstinput, inputs, predictWindow);
            BasicMLDataSet SuperSet = new BasicMLDataSet();

            SuperSet.Add(pair);
            SuperSet.Add(pair2);

            SuperSet.Add(pair3);
            SuperSet.Add(pair4);
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize));
            network.AddLayer(new BasicLayer(new ActivationTANH(), false, 20));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, 0));
            network.AddLayer(new BasicLayer(new ActivationLinear(), true, predictWindow));

            //var layer = new BasicLayer(new ActivationTANH(), true, SuperSet.InputSize);
            //layer.Network = network;


            network.Structure.FinalizeStructure();
            network.Reset();


            // var network = (BasicNetwork)CreateEval.CreateElmanNetwork(SuperSet.InputSize, SuperSet.IdealSize);
            return(CreateEval.TrainNetworks(network, SuperSet));
            //Lets create an evaluation.
            //Console.WriteLine(@"Last error rate on random trainer:" + error);
        }
Exemplo n.º 9
0
        public void CreateNetwork(int[] layersOrNeuronsLayersOutput)
        {
            _layers = layersOrNeuronsLayersOutput;
            if (_wantedNnType == NetworkType.FNN)
            {
                // create neural network
                _network = new BasicNetwork();

                //_network.AddLayer(new BasicLayer(null, true, _windowSize));
                //foreach (var neuronCount in _layers)
                //{
                //    _network.AddLayer(new BasicLayer(new ActivationTANH(), false, neuronCount));
                //}

                var input  = new BasicLayer(null, true, _windowSize);
                var input1 = new BasicLayer(new ActivationTANH(), false, _layers[0]);
                var hidden = new BasicLayer(new ActivationTANH(), false, _layers[1]);
                var output = new BasicLayer(new ActivationTANH(), false, _layers[2]);

                input1.ContextFedBy = hidden;

                _network.AddLayer(input);
                _network.AddLayer(input1);
                _network.AddLayer(hidden);
                _network.AddLayer(output);

                _network.Structure.FinalizeStructure();
                _network.Reset();
            }

            //if (_wantedNnType == NetworkType.RNN)
            //{
            //    _rnetwork = NetworkBuilder.MakeLstm(_windowSize,
            //        _layers[0],
            //        _layers[1],
            //        _layers[2],
            //        new TanhUnit(),
            //        0.08, new Random());
            //    _rnetwork.ResetState();
            //    _network = null;
            //}
        }
Exemplo n.º 10
0
        /// <see cref="INetwork.CreateNetwork"/>
        public INetwork CreateNetwork()
        {
            if (TrainedNetworkFile.Exists)
            {
                return(this);
            }

            BasicNetwork network = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, InputCount));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, OutputCount));

            network.Structure.FinalizeStructure();
            network.Reset();

            EncogDirectoryPersistence.SaveObject(TrainedNetworkFile, network);
            TrainedNetworkFile = new FileInfo(TrainedNetworkFile.FullName);

            return(this);
        }
Exemplo n.º 11
0
        private BasicNetwork createNeuralNetwork()
        {
            BasicNetwork network = new BasicNetwork();

            // input layer
            network.AddLayer(new BasicLayer(null, true, inputLayerSize));

            // hidden layer
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, inputLayerSize / 6));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, inputLayerSize / 6 / 4));

            // output layer
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, outputLayerSize));

            network.Structure.FinalizeStructure();
            //  network.GetStructure().readonlyizeStructure();
            network.Reset();

            return(network);
        }
        /// <summary>
        /// Create a feed forward network.
        /// </summary>
        ///
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The feedforward network.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            var                 result = new BasicNetwork();
            IList <String>      layers = ArchitectureParse.ParseLayers(architecture);
            IActivationFunction af     = new ActivationLinear();

            int questionPhase = 0;

            foreach (String layerStr  in  layers)
            {
                // determine default
                int defaultCount = questionPhase == 0 ? input : output;

                ArchitectureLayer layer = ArchitectureParse.ParseLayer(
                    layerStr, defaultCount);
                bool bias = layer.Bias;

                String part = layer.Name;
                part = part != null?part.Trim() : "";

                IActivationFunction lookup = _factory.Create(part);

                if (lookup != null)
                {
                    af = lookup;
                }
                else
                {
                    if (layer.UsedDefault)
                    {
                        questionPhase++;
                        if (questionPhase > 2)
                        {
                            throw new EncogError("Only two ?'s may be used.");
                        }
                    }

                    if (layer.Count == 0)
                    {
                        throw new EncogError("Unknown architecture element: "
                                             + architecture + ", can't parse: " + part);
                    }

                    result.AddLayer(new BasicLayer(af, bias, layer.Count));
                }
            }

            result.Structure.FinalizeStructure();
            result.Reset();

            return(result);
        }
Exemplo n.º 13
0
        /// <summary>
        /// Generate the RSOM network.
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {
            ILayer input = new BasicLayer(new ActivationLinear(), false,
                                          this.inputNeurons);
            ILayer output = new BasicLayer(new ActivationLinear(), false,
                                           this.outputNeurons);
            int          y       = PatternConst.START_Y;
            BasicNetwork network = new BasicNetwork();

            network.AddLayer(input);
            network.AddLayer(output);
            input.X       = PatternConst.START_X;
            output.X      = PatternConst.START_X;
            input.Y       = y;
            y            += PatternConst.INC_Y;
            output.Y      = y;
            network.Logic = new SOMLogic();
            network.Structure.FinalizeStructure();
            network.Reset();
            return(network);
        }
Exemplo n.º 14
0
        /// <summary>
        /// Initializes this neural network.
        /// </summary>
        private void InitializeNetwork()
        {
            NeuralNetwork = new BasicNetwork();

            foreach (var l in Parameters.Layers)
            {
                NeuralNetwork.AddLayer(new BasicLayer(l.ActivationFunction, l.Bias, l.NueronCount));
            }

            NeuralNetwork.Structure.FinalizeStructure();
            NeuralNetwork.Reset();
        }
Exemplo n.º 15
0
 public override NetworkBase BuildNetwork(int inputNeurons, List <int> hiddenNeurons, int outputNeurons, double learningRate, double momentum,
                                          SimpleMLP.IActivation activationFunction, INetwork networkType)
 {
     this.momentum     = momentum;
     this.learningRate = learningRate;
     this.network.AddLayer(new BasicLayer(activationFunction.GetEncogActivationFunction(), false, inputNeurons));
     hiddenNeurons.ForEach(e => network.AddLayer(new BasicLayer(activationFunction.GetEncogActivationFunction(), true, e)));
     this.network.AddLayer(new BasicLayer(activationFunction.GetEncogActivationFunction(), true, outputNeurons));
     this.network.Structure.FinalizeStructure();
     this.network.Reset();
     return(this);
 }
Exemplo n.º 16
0
        void ItWorks()
        {
            var network2 = new BasicNetwork();

            var data    = LoadData();
            var label   = LoadLabel();
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 8));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(data, label);

            var propagation = new StochasticGradientDescent(network, trainingSet);

            var epoch = 1;

            for (var i = 0; i < 5000; i++)
            {
                propagation.Iteration();
                Console.WriteLine($"Epoch: {epoch} Error: {propagation.Error}");
                epoch++;
            }

            propagation.FinishTraining();


            var weights = network.Flat.Weights;
            var biases  = network.Flat.BiasActivation;

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Debug.Log(pair.Input[0] + "," + pair.Input[1]
                          + ", actual=" + output[0] + ",ideal=" + pair.Ideal[0]);
            }
        }
Exemplo n.º 17
0
        private void button2_Click(object sender, EventArgs e)
        {
            double[][]     x = { new double[] { 0.1, 0.2 },
                                 new double[]     { 0.4, 0.3 } };
            double[][]     y = { new double[] { 0.3 },
                                 new double[]     { 0.7 } };
            BasicMLDataSet dataset = new BasicMLDataSet(x, y);

            BasicNetwork rede = new BasicNetwork();

            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            rede.Structure.FinalizeStructure();
            rede.Reset();

            Backpropagation train = new Backpropagation(rede, dataset, 0.5, 0.1);

            int epoch = 0;

            do
            {
                train.Iteration();
                if (epoch % 100 == 0)
                {
                    richTextBox2.AppendText("Época " + epoch.ToString() + " Erro " + train.Error + Environment.NewLine);
                }
                epoch++;
            } while (epoch < 3000);

            for (double t = 0.0; t <= 5; t += 0.05)
            {
                double[] d      = new double[] { t, t };
                IMLData  input  = new BasicMLData(d);
                IMLData  output = rede.Compute(input);
                double[] result = new double[output.Count];
                output.CopyTo(result, 0, output.Count);
                richTextBox2.AppendText(" " + t + "+" + t + "=" + result[0] + Environment.NewLine);
            }
        }
Exemplo n.º 18
0
        public void CerateNetwork()
        {
            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, parameters.Bias, parameters.CountInput));
            foreach (var NueronNumber in parameters.Layers)
            {
                switch (parameters.FunctionType)
                {
                case FunctionTypeEnum.Bipolar:
                    network.AddLayer(new BasicLayer(new ActivationTANH(), parameters.Bias, NueronNumber));
                    break;

                case FunctionTypeEnum.Unipolar:
                    network.AddLayer(new BasicLayer(new ActivationSigmoid(), parameters.Bias, NueronNumber));    //sprawdzić
                    break;
                }
            }
            if (parameters.ProblemType == ProblemTypeEnum.Classification)
            {
                if (parameters.FunctionType == FunctionTypeEnum.Unipolar)
                {
                    network.AddLayer(new BasicLayer(new ActivationSoftMax(), parameters.Bias, classCount));
                }
                else
                {
                    network.AddLayer(new BasicLayer(parameters.CountOutput));
                }
            }
            else
            {
                network.AddLayer(new BasicLayer(parameters.CountOutput));
            }
            network.Structure.FinalizeStructure();
            network.Reset();
        }
Exemplo n.º 19
0
        public void Process()
        {
            Console.WriteLine("Please wait, reading MNIST training data.");
            var dir              = AppDomain.CurrentDomain.BaseDirectory;
            var trainingReader   = LearnDigitsBackprop.LoadMNIST(dir, true, MNIST_DEPTH);
            var validationReader = LearnDigitsBackprop.LoadMNIST(dir, false, MNIST_DEPTH);

            Console.WriteLine("Training set size: " + trainingReader.NumImages);
            Console.WriteLine("Validation set size: " + validationReader.NumImages);

            var outputCount = trainingReader.Data[0].Ideal.Length;

            int[] inputShape = { trainingReader.NumCols, trainingReader.NumCols, 3 };

            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, inputShape));
            network.AddLayer(new Conv2DLayer(new ActivationReLU(), 3, 5, 5));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 100));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 50));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 25));
            network.AddLayer(new BasicLayer(new ActivationSoftMax(), false, outputCount));
            network.FinalizeStructure();
            network.Reset();

            // train the neural network
            Console.WriteLine("Training neural network.");
            var train = new BackPropagation(network, trainingReader.Data, 1e-4, 0.9);

            train.L1 = 0;
            train.L2 = 1e-11;

            PerformIterationsClassifyEarlyStop(train, network, validationReader.Data, 5);
        }
Exemplo n.º 20
0
        private static void Main(string[] args)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            train.FinishTraining();

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
        }
Exemplo n.º 21
0
        public BasicNetwork GetNetwork()
        {
            var network = new BasicNetwork();

            foreach (NeuralLayerInfo layer in Layers)
            {
                var objLayer = layer.GetLayer();
                network.AddLayer(objLayer);
            }
            network.FinalizeStructure();
            network.Reset();
            return(network);
        }
        private static void Main(string[] args)
        {
            CSVReader reader = new CSVReader();
            DataSet   ds     = reader.ReadCSVFile(FILENAME, true);

            dt = ds.Tables["Table1"];

            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 17));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 2));
            network.Structure.FinalizeStructure();
            network.Reset();
            Dictionarys dict = new Dictionarys();
            // create training dat
            IMLDataSet dataSet = dict.GetDataSet(dt);
            // train the neural network
            IMLTrain train = new ResilientPropagation(network, dataSet);
            int      epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);
            train.FinishTraining();
            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in dataSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
            EncogFramework.Instance.Shutdown();
        }
        public void Run()
        {
            //Se crea la red neuronal con sus respectivas capas
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.Structure.FinalizeStructure();
            network.Reset();

            //Crear el conjunto de entrenamiento
            IMLDataSet conjuntoEntrenamiento = new BasicMLDataSet(entradas, salidas);

            //Entrenar
            IMLTrain train = new ResilientPropagation(network, conjuntoEntrenamiento);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoca #" + epoch + " Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.001);

            //Prueba de la red neuronal
            Console.WriteLine("Resultados:");
            foreach (IMLDataPair pair in conjuntoEntrenamiento)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + "," + output[1] + @",ideal=" + pair.Ideal[0] + "," + pair.Ideal[1]);
            }

            IMLData dataprueba = new BasicMLData(new double[] { 2.4, 2.5 });
            var     prueba     = network.Compute(dataprueba);
        }
Exemplo n.º 24
0
        /// <summary>
        /// Generate the Elman neural network.
        /// </summary>
        /// <returns>The Elman neural network.</returns>
        public BasicNetwork Generate()
        {
            int    y     = PatternConst.START_Y;
            ILayer input = new BasicLayer(this.activation, false,
                                          this.inputNeurons);

            BasicNetwork result = new BasicNetwork();

            result.AddLayer(input);

            input.X = PatternConst.START_X;
            input.Y = y;
            y      += PatternConst.INC_Y;

            foreach (int count in this.hidden)
            {
                ILayer hidden = new BasicLayer(
                    this.activation, true, count);

                result.AddLayer(hidden);
                hidden.X = PatternConst.START_X;
                hidden.Y = y;
                y       += PatternConst.INC_Y;
            }

            ILayer output = new BasicLayer(this.activation, true,
                                           this.outputNeurons);

            result.AddLayer(output);
            output.X = PatternConst.START_X;
            output.Y = y;
            y       += PatternConst.INC_Y;

            result.Structure.FinalizeStructure();
            result.Reset();

            return(result);
        }
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            EncogUtility.TrainToError(network, trainingSet, 0.01);
            EncogUtility.Evaluate(network, trainingSet);

            var ff = new FreeformNetwork(network);

            EncogUtility.Evaluate(ff, trainingSet);

            EncogFramework.Instance.Shutdown();
        }
Exemplo n.º 26
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network using online (batch=1)
            Propagation train = new Backpropagation(network, trainingSet, 0.7, 0.3);

            train.BatchSize = 1;

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Exemplo n.º 27
0
        private void button1_Click(object sender, EventArgs e)
        {
            double[][] x = { new double[] { 0.1, 0.2 },
                             new double[] { 0.4, 0.3 } };
            double[][] y = { new double[] { 0.3 },
                             new double[] { 0.4 } };

            BasicNetwork rede = new BasicNetwork();

            rede.AddLayer(new BasicLayer(2));
            rede.AddLayer(new BasicLayer(2));
            rede.AddLayer(new BasicLayer(1));
            rede.Structure.FinalizeStructure();
            rede.Reset();
            BasicMLDataSet dataset = new BasicMLDataSet(x, y);

            Backpropagation propagation = new Backpropagation(rede, dataset, 0.3, 0.7);
            int             epoch       = 0;

            while (true)
            {
                propagation.Iteration();
                richTextBox1.AppendText("Época " + epoch.ToString() + " Erro " + propagation.Error + Environment.NewLine);
                epoch++;

                if (epoch > 3500 || propagation.Error < 0.01)
                {
                    break;
                }
            }
            ;
            foreach (IMLDataPair d in dataset)
            {
                IMLData o = rede.Compute(d.Input);
                richTextBox1.AppendText(" Saída " + o + " Ideal " + d.Input[0] + Environment.NewLine);
            }
        }
Exemplo n.º 28
0
        /// <summary>
        /// Generate a Jordan neural network.
        /// </summary>
        /// <returns>A Jordan neural network.</returns>
        public BasicNetwork Generate()
        {
            // construct an Jordan type network
            ILayer input = new BasicLayer(this.activation, false,
                                          this.inputNeurons);
            ILayer hidden = new BasicLayer(this.activation, true,
                                           this.hiddenNeurons);
            ILayer output = new BasicLayer(this.activation, true,
                                           this.outputNeurons);
            ILayer       context = new ContextLayer(this.outputNeurons);
            BasicNetwork network = new BasicNetwork();

            network.AddLayer(input);
            network.AddLayer(hidden);
            network.AddLayer(output);

            output.AddNext(context, SynapseType.OneToOne);
            context.AddNext(hidden);

            int y = PatternConst.START_Y;

            input.X   = PatternConst.START_X;
            input.Y   = y;
            y        += PatternConst.INC_Y;
            hidden.X  = PatternConst.START_X;
            hidden.Y  = y;
            context.X = PatternConst.INDENT_X;
            context.Y = y;
            y        += PatternConst.INC_Y;
            output.X  = PatternConst.START_X;
            output.Y  = y;

            network.Structure.FinalizeStructure();
            network.Reset();
            return(network);
        }
Exemplo n.º 29
0
    /// <summary>
    /// Creates a feedforward NN
    /// </summary>
    public virtual void createNetwork()
    {
      IActivationFunction threshold;

      if (ACTIVIATION_FUNCTION == 1)
        threshold = new ActivationSigmoid();
      else if (ACTIVIATION_FUNCTION == 2)
        threshold = new ActivationTANH();
      else
        throw new System.Exception("Only 2 activation functions have been impletemented.");

      network = new BasicNetwork();
      network.AddLayer(new BasicLayer(threshold, true, INPUT_NEURONS));
      network.AddLayer(new BasicLayer(threshold, true, HIDDENLAYER1_NEURONS));

      if (HIDDENLAYER2_NEURONS > 0)
      {
        network.AddLayer(new BasicLayer(threshold, true, HIDDENLAYER2_NEURONS));
      }

      network.AddLayer(new BasicLayer(threshold, true, OUTPUT_NEURONS));
      network.Structure.FinalizeStructure();
      network.Reset();
    }
Exemplo n.º 30
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            BasicNetwork network = new BasicNetwork();

            int y = PatternConst.START_Y;

            ILayer inputLayer  = new BasicLayer(new ActivationLinear(), false, InputNeurons);
            ILayer outputLayer = new BasicLayer(new ActivationLinear(), true, OutputNeurons);

            network.AddLayer(inputLayer);
            network.AddLayer(outputLayer);
            network.Structure.FinalizeStructure();

            (new RangeRandomizer(-0.5, 0.5)).Randomize(network);

            inputLayer.X = PatternConst.START_X;
            inputLayer.Y = y;
            y           += PatternConst.INC_Y;

            outputLayer.X = PatternConst.START_X;
            outputLayer.Y = y;

            return(network);
        }
Exemplo n.º 31
0
 /// <summary>
 /// Generate the RSOM network.
 /// </summary>
 /// <returns>The neural network.</returns>
 public BasicNetwork Generate()
 {
     ILayer input = new BasicLayer(new ActivationLinear(), false,
             this.inputNeurons);
     ILayer output = new BasicLayer(new ActivationLinear(), false,
             this.outputNeurons);
     int y = PatternConst.START_Y;
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(input);
     network.AddLayer(output);
     input.X = PatternConst.START_X;
     output.X = PatternConst.START_X;
     input.Y = y;
     y += PatternConst.INC_Y;
     output.Y = y;
     network.Logic = new SOMLogic();
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 32
0
        /// <summary>
        /// Generate the RSOM network.
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {
            ILayer output = new BasicLayer(new ActivationLinear(), false,
                    this.outputNeurons);
            ILayer input = new BasicLayer(new ActivationLinear(), false,
                    this.inputNeurons);

            BasicNetwork network = new BasicNetwork();
            ILayer context = new ContextLayer(this.outputNeurons);
            network.AddLayer(input);
            network.AddLayer(output);

            output.AddNext(context, SynapseType.OneToOne);
            context.AddNext(input);

            int y = PatternConst.START_Y;
            input.X = PatternConst.START_X;
            input.Y = y;

            context.X = PatternConst.INDENT_X;
            context.Y = y;

            y += PatternConst.INC_Y;

            output.X = PatternConst.START_X;
            output.Y = y;

            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            ILayer layer = new BasicLayer(new ActivationBiPolar(), true,
                    this.neuronCount);

            BasicNetwork result = new BasicNetwork(new BoltzmannLogic());
            result.SetProperty(BoltzmannLogic.PROPERTY_ANNEAL_CYCLES, this.annealCycles);
            result.SetProperty(BoltzmannLogic.PROPERTY_RUN_CYCLES, this.runCycles);
            result.SetProperty(BoltzmannLogic.PROPERTY_TEMPERATURE, this.temperature);
            result.AddLayer(layer);
            layer.AddNext(layer);
            layer.X = PatternConst.START_X;
            layer.Y = PatternConst.START_Y;
            result.Structure.FinalizeStructure();
            result.Reset();
            return result;
        }
Exemplo n.º 34
0
 /// <summary>
 /// Generate the RBF network.
 /// </summary>
 /// <returns>The neural network.</returns>
 public BasicNetwork Generate()
 {
     ILayer input = new BasicLayer(new ActivationLinear(), false,
             this.inputNeurons);
     ILayer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
     BasicNetwork network = new BasicNetwork();
     RadialBasisFunctionLayer rbfLayer = new RadialBasisFunctionLayer(
            this.hiddenNeurons);
     network.AddLayer(input);
     network.AddLayer(rbfLayer, SynapseType.Direct);
     network.AddLayer(output);
     network.Structure.FinalizeStructure();
     network.Reset();
     network.TagLayer(RBF_LAYER, rbfLayer);
     rbfLayer.RandomizeRBFCentersAndWidths(this.inputNeurons, -1, 1, RBFEnum.Gaussian);
     int y = PatternConst.START_Y;
     input.X = PatternConst.START_X;
     input.Y = y;
     y += PatternConst.INC_Y;
     rbfLayer.X = PatternConst.START_X;
     rbfLayer.Y = y;
     y += PatternConst.INC_Y;
     output.X = PatternConst.START_X;
     output.Y = y;
     return network;
 }
Exemplo n.º 35
0
        /// <summary>
        /// Generate the Hopfield neural network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            ILayer layer = new BasicLayer(new ActivationBiPolar(), false,
                    this.neuronCount);

            BasicNetwork result = new BasicNetwork(new HopfieldLogic());
            result.AddLayer(layer);
            layer.AddNext(layer);
            layer.X = PatternConst.START_X;
            layer.Y = PatternConst.START_Y;
            result.Structure.FinalizeStructure();
            result.Reset();
            return result;
        }
Exemplo n.º 36
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            BasicNetwork network = new BasicNetwork();

            int y = PatternConst.START_Y;

            ILayer inputLayer = new BasicLayer(new ActivationLinear(), false, InputNeurons);
            ILayer outputLayer = new BasicLayer(new ActivationLinear(), true, OutputNeurons);

            network.AddLayer(inputLayer);
            network.AddLayer(outputLayer);
            network.Structure.FinalizeStructure();

            (new RangeRandomizer(-0.5, 0.5)).Randomize(network);

            inputLayer.X = PatternConst.START_X;
            inputLayer.Y = y;
            y += PatternConst.INC_Y;

            outputLayer.X = PatternConst.START_X;
            outputLayer.Y = y;

            return network;
        }
Exemplo n.º 37
0
        /// <summary>
        /// Generate the Elman neural network.
        /// </summary>
        /// <returns>The Elman neural network.</returns>
        public BasicNetwork Generate()
        {
            int y = PatternConst.START_Y;
            ILayer input = new BasicLayer(this.activation, false,
                   this.inputNeurons);

            BasicNetwork result = new BasicNetwork();
            result.AddLayer(input);

            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;

            foreach (int count in this.hidden)
            {

                ILayer hidden = new BasicLayer(
                       this.activation, true, count);

                result.AddLayer(hidden);
                hidden.X = PatternConst.START_X;
                hidden.Y = y;
                y += PatternConst.INC_Y;
            }

            ILayer output = new BasicLayer(this.activation, true,
                   this.outputNeurons);
            result.AddLayer(output);
            output.X = PatternConst.START_X;
            output.Y = y;
            y += PatternConst.INC_Y;

            result.Structure.FinalizeStructure();
            result.Reset();

            return result;
        }
Exemplo n.º 38
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {

            ILayer input, instar, outstar;
            int y = PatternConst.START_Y;

            BasicNetwork network = new BasicNetwork();
            network.AddLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount));
            network.AddLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount));
            network.AddLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount));
            network.Structure.FinalizeStructure();
            network.Reset();

            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;

            instar.X = PatternConst.START_X;
            instar.Y = y;
            y += PatternConst.INC_Y;

            outstar.X = PatternConst.START_X;
            outstar.Y = y;

            // tag as needed
            network.TagLayer(BasicNetwork.TAG_INPUT, input);
            network.TagLayer(BasicNetwork.TAG_OUTPUT, outstar);
            network.TagLayer(CPNPattern.TAG_INSTAR, instar);
            network.TagLayer(CPNPattern.TAG_OUTSTAR, outstar);

            return network;
        }
Exemplo n.º 39
0
        /// <summary>
        /// Generate a Jordan neural network.
        /// </summary>
        /// <returns>A Jordan neural network.</returns>
        public BasicNetwork Generate()
        {
            // construct an Jordan type network
            ILayer input = new BasicLayer(this.activation, false,
                   this.inputNeurons);
            ILayer hidden = new BasicLayer(this.activation, true,
                   this.hiddenNeurons);
            ILayer output = new BasicLayer(this.activation, true,
                   this.outputNeurons);
            ILayer context = new ContextLayer(this.outputNeurons);
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(input);
            network.AddLayer(hidden);
            network.AddLayer(output);

            output.AddNext(context, SynapseType.OneToOne);
            context.AddNext(hidden);

            int y = PatternConst.START_Y;
            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;
            hidden.X = PatternConst.START_X;
            hidden.Y = y;
            context.X = PatternConst.INDENT_X;
            context.Y = y;
            y += PatternConst.INC_Y;
            output.X = PatternConst.START_X;
            output.Y = y;

            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }