예제 #1
0
        public void testFeedForwardAndBAckLoopWorksWithMomentum()
        {
            // example 11.14 of Neural Network Design by Hagan, Demuth and Beale
            Matrix hiddenLayerWeightMatrix = new Matrix(2, 1);

            hiddenLayerWeightMatrix.Set(0, 0, -0.27);
            hiddenLayerWeightMatrix.Set(1, 0, -0.41);

            Vector hiddenLayerBiasVector = new Vector(2);

            hiddenLayerBiasVector.SetValue(0, -0.48);
            hiddenLayerBiasVector.SetValue(1, -0.13);

            Vector input = new Vector(1);

            input.SetValue(0, 1);

            Matrix outputLayerWeightMatrix = new Matrix(1, 2);

            outputLayerWeightMatrix.Set(0, 0, 0.09);
            outputLayerWeightMatrix.Set(0, 1, -0.17);

            Vector outputLayerBiasVector = new Vector(1);

            outputLayerBiasVector.SetValue(0, 0.48);

            Vector error = new Vector(1);

            error.SetValue(0, 1.261);

            double learningRate           = 0.1;
            double momentumFactor         = 0.5;
            FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(
                hiddenLayerWeightMatrix, hiddenLayerBiasVector,
                outputLayerWeightMatrix, outputLayerBiasVector);

            ffnn.SetTrainingScheme(new BackPropagationLearning(learningRate,
                                                               momentumFactor));
            ffnn.ProcessInput(input);
            ffnn.ProcessError(error);

            Matrix finalHiddenLayerWeights = ffnn.GetHiddenLayerWeights();

            Assert.AreEqual(-0.2675, finalHiddenLayerWeights.Get(0, 0), 0.001);
            Assert.AreEqual(-0.4149, finalHiddenLayerWeights.Get(1, 0), 0.001);

            Vector hiddenLayerBias = ffnn.GetHiddenLayerBias();

            Assert.AreEqual(-0.4775, hiddenLayerBias.GetValue(0), 0.001);
            Assert.AreEqual(-0.1349, hiddenLayerBias.GetValue(1), 0.001);

            Matrix finalOutputLayerWeights = ffnn.GetOutputLayerWeights();

            Assert.AreEqual(0.1304, finalOutputLayerWeights.Get(0, 0), 0.001);
            Assert.AreEqual(-0.1235, finalOutputLayerWeights.Get(0, 1), 0.001);

            Vector outputLayerBias = ffnn.GetOutputLayerBias();

            Assert.AreEqual(0.6061, outputLayerBias.GetValue(0), 0.001);
        }
예제 #2
0
        internal static void backPropogationDemo()
        {
            try
            {
                DataSet              irisDataSet = DataSetFactory.getIrisDataSet();
                INumerizer           numerizer   = new IrisDataSetNumerizer();
                NeuralNetworkDataSet innds       = new IrisNeuralNetworkDataSet();

                innds.CreateExamplesFromDataSet(irisDataSet, numerizer);

                NeuralNetworkConfig config = new NeuralNetworkConfig();
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_INPUTS, 4);
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_OUTPUTS, 3);
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_HIDDEN_NEURONS,
                                 6);
                config.SetConfig(FeedForwardNeuralNetwork.LOWER_LIMIT_WEIGHTS, -2.0);
                config.SetConfig(FeedForwardNeuralNetwork.UPPER_LIMIT_WEIGHTS, 2.0);

                FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(config);
                ffnn.SetTrainingScheme(new BackPropagationLearning(0.1, 0.9));

                ffnn.TrainOn(innds, 1000);

                innds.RefreshDataset();
                int[] result = ffnn.TestOnDataSet(innds);
                System.Console.WriteLine(result[0] + " right, " + result[1] + " wrong");
            }
            catch (Exception e)
            {
                throw e;
            }
        }
예제 #3
0
        public void testDataSetPopulation()
        {
            DataSet              irisDataSet = DataSetFactory.getIrisDataSet();
            INumerizer           numerizer   = new IrisDataSetNumerizer();
            NeuralNetworkDataSet innds       = new IrisNeuralNetworkDataSet();

            innds.CreateExamplesFromDataSet(irisDataSet, numerizer);

            NeuralNetworkConfig config = new NeuralNetworkConfig();

            config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_INPUTS, 4);
            config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_OUTPUTS, 3);
            config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_HIDDEN_NEURONS, 6);
            config.SetConfig(FeedForwardNeuralNetwork.LOWER_LIMIT_WEIGHTS, -2.0);
            config.SetConfig(FeedForwardNeuralNetwork.UPPER_LIMIT_WEIGHTS, 2.0);

            FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(config);

            ffnn.SetTrainingScheme(new BackPropagationLearning(0.1, 0.9));

            ffnn.TrainOn(innds, 10);

            innds.RefreshDataset();
            ffnn.TestOnDataSet(innds);
        }
예제 #4
0
        internal static void backPropogationDemo()
        {
            try
            {
                System.Console.WriteLine(Util.ntimes("*", 100));
                System.Console.WriteLine(
                    "\n BackpropagationDemo  - Running BackProp on Iris data Set with {0} epochs of learning ",
                    epochs);
                System.Console.WriteLine(Util.ntimes("*", 100));

                DataSet              animalDataSet = DataSetFactory.getAnimalDataSet();
                INumerizer           numerizer     = new AnimalDataSetNumerizer();
                NeuralNetworkDataSet innds         = new IrisNeuralNetworkDataSet();

                innds.CreateExamplesFromDataSet(animalDataSet, numerizer);

                NeuralNetworkConfig config = new NeuralNetworkConfig();
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_INPUTS, 20);
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_OUTPUTS, 3);
                config.SetConfig(FeedForwardNeuralNetwork.NUMBER_OF_HIDDEN_NEURONS, numNeuronsPerLayer);
                config.SetConfig(FeedForwardNeuralNetwork.LOWER_LIMIT_WEIGHTS, -2.0);
                config.SetConfig(FeedForwardNeuralNetwork.UPPER_LIMIT_WEIGHTS, 2.0);

                FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(config);
                ffnn.SetTrainingScheme(new BackPropagationLearning(0.1, 0.9));

                ffnn.TrainOn(innds, epochs);

                innds.RefreshDataset();
                int[] result = ffnn.TestOnDataSet(innds);
                System.Console.WriteLine(result[0] + " right, " + result[1] + " wrong");
            }
            catch (Exception e)
            {
                throw e;
            }
        }