Esempio n. 1
0
        private static TrainingState ClassifyData(NeuralNetwork neuralNetwork, UserInput userInput, long epoch)
        {
            // calculate the output for each training example
            foreach (DataInstance trainingExample in userInput.DataInstances)
            {
                neuralNetwork.InputNodes.ForEach(input => input.CalculateValue(trainingExample));
                neuralNetwork.HiddenNodes.ForEach(hidden => hidden.CalculateValue(neuralNetwork.Weights));
                neuralNetwork.OutputNodes.ForEach(output => output.CalculateValue(neuralNetwork.Weights));

                // associated all the calculated outputs (1 for our current case) with the training example
                trainingExample.CalculatedClasses.Clear();
                neuralNetwork.OutputNodes.ForEach(output => trainingExample.CalculatedClasses.Add(output.Value));
            }

            // calculate RMSE for all training examples
            List<double> RMSEs = userInput.DataInstances.ConvertAll(trainingExample => trainingExample.RMSE);

            // return the results
            return new TrainingState()
            {
                Epoch = epoch,
                MaxRMSE = RMSEs.Max(),
                AvgRMSE = RMSEs.Average(),
                PercentCorrect = (float)RMSEs.Where(rmse => rmse < userInput.ErrorMargin).Count() / (float)RMSEs.Count
            };
        }
Esempio n. 2
0
        public void Fixed_200Epochs()
        {
            UserInput userInput = new UserInput()
            {
                DataFilePath = ".\\TestData\\iris.csv",
                ErrorMargin = 0.05,
                HiddenNodeCount = 10,
                LearningRate = 0.1,
                MaxEpochs = 200,
                MaxInitialWeight = 0.02,
                MinInitialWeight = 0.02
            };

            NeuralNetwork net = new NeuralNetwork(userInput);
            Backpropagation.Calculate(net, userInput);

            Assert.AreEqual(net.TrainingState.Epoch, 200);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.MaxRMSE - 0.3539866109402714), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.AvgRMSE - 0.08665010792931953), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.PercentCorrect - .43), 0.05);
        }
Esempio n. 3
0
        public void Fixed_10Epochs()
        {
            UserInput userInput = new UserInput()
            {
                DataFilePath = ".\\TestData\\iris.csv",
                ErrorMargin = 0.05,
                HiddenNodeCount = 10,
                LearningRate = 0.1,
                MaxEpochs = 10,
                MaxInitialWeight = 0.02,
                MinInitialWeight = 0.02
            };

            NeuralNetwork net = new NeuralNetwork(userInput);
            Backpropagation.Calculate(net, userInput);

            Assert.AreEqual(net.TrainingState.Epoch, 10);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.MaxRMSE - 0.7647903291625834), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.AvgRMSE - 0.4198381630410543), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.PercentCorrect - 0), 0.05);
        }
Esempio n. 4
0
        public static void Calculate(NeuralNetwork neuralNetwork, UserInput userInput)
        {
            LogHelper.WriteDebug("Backpropagation.Calculate - Enter");
            long epoch = 0;

            // do until the termination condition is met
            while (++epoch <= userInput.MaxEpochs && neuralNetwork.TrainingState.MaxRMSE >= userInput.ErrorMargin)
            {
                LogHelper.WriteDebug("Starting epoch {0}", epoch);

                // iterate through the training examples
                foreach (DataInstance trainingExample in userInput.DataInstances)
                {
                    LogHelper.WriteDebug("Processing training example: {0}", trainingExample);

                    // 1.
                    LogHelper.WriteDebug("Propagating input forward through network.");
                    neuralNetwork.InputNodes.ForEach(input => input.CalculateValue(trainingExample));
                    neuralNetwork.HiddenNodes.ForEach(hidden => hidden.CalculateValue(neuralNetwork.Weights));
                    neuralNetwork.OutputNodes.ForEach(output => output.CalculateValue(neuralNetwork.Weights));

                    // 2. propagate error backward through the network
                    LogHelper.WriteDebug("Calculating output unit error values.");
                    neuralNetwork.OutputNodes.ForEach(output => output.CalculateError(trainingExample));

                    LogHelper.WriteDebug("Calculating hidden unit error values.");
                    neuralNetwork.HiddenNodes.ForEach(hidden => hidden.CalculateError(neuralNetwork.Weights));

                    LogHelper.WriteDebug("Updating network weights.");
                    neuralNetwork.Weights.ForEach(weight => weight.CalculateValue(userInput.LearningRate));
                }

                // classify the data based on the updated weights
                LogHelper.WriteDebug("Running classification with updated weights.");
                neuralNetwork.TrainingState =  ClassifyData(neuralNetwork, userInput, epoch);
            }

            LogHelper.WriteDebug("Backpropagation.Calculate - Exit");
        }
Esempio n. 5
0
        public void Fixed()
        {
            UserInput userInput = new UserInput()
            {
                DataFilePath = ".\\TestData\\xor.csv",
                ErrorMargin = 0.1,
                HiddenNodeCount = 2,
                LearningRate = 0.1,
                MaxEpochs = 10,
                MaxInitialWeight = 0.6,
                MinInitialWeight = 0.6
            };

            NeuralNetwork net = new NeuralNetwork(userInput);
            Backpropagation.Calculate(net, userInput);

            Assert.AreEqual(net.TrainingState.Epoch, 10);

            Assert.LessOrEqual(Math.Abs(net.TrainingState.MaxRMSE - 0.5432215742673802), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.AvgRMSE - 0.4999911891911738), 0.0000001);
            Assert.LessOrEqual(Math.Abs(net.TrainingState.PercentCorrect - 0), 0.05);
        }
Esempio n. 6
0
        private void trainButton_Click(object sender, EventArgs e)
        {
            UpdateUserInputFromControls();

            InitializeResultsFile();

            output.Clear();

            NeuralNetwork net = new NeuralNetwork(_userInput);

            net.TrainingStateChanged += new NeuralNetwork.TrainingStateChangedEventHandler(net_TrainingStateChanged);
            Backpropagation.Calculate(net, _userInput);
        }