Пример #1
0
            // Backpropagate
            public void BackPropagate(double learningRate, double error,
                                      SecondLayerNeuron output)
            {
                // Obtain the sensitivity
                double sensitivity = Sensitivity(error, output);

                // Calculate the adjustments
                double[] adjustments = new double[inputs.Count + 1];

                // For the weights Dwjk = lr * sensitivity * input
                for (int i = 0; i < inputs.Count; i++)
                {
                    adjustments[i] = learningRate * sensitivity * inputs[i].Output;
                }

                // For the bias is the same, but the input is always 1
                adjustments[inputs.Count] = learningRate * sensitivity;

                // Apply adjustments to the weights for the inputs
                for (int i = 0; i < inputs.Count; i++)
                {
                    weights[i] += adjustments[i];
                }

                // Apply the adjustment to the bias
                bias += adjustments[inputs.Count];
            }
Пример #2
0
        // Contructor
        public SimpleNeuralNet1(
            int numberOfInputs,
            int numberOfHiddenNodes,
            double learningRate,
            int maxRepetitions)
        {
            this.learningRate   = learningRate;
            this.maxRepetitions = maxRepetitions;

            // Initialize the inputs, set the value to 0
            inputs = new List <Neuron>(numberOfInputs);
            for (int i = 0; i < numberOfInputs; i++)
            {
                inputs.Insert(i, new InputNeuron(0));
            }

            // Initialize the hidden nodes, set the bias to 0
            hiddenLayer = new List <Neuron>(numberOfHiddenNodes);
            for (int i = 0; i < numberOfHiddenNodes; i++)
            {
                hiddenLayer.Insert(i, new FirstLayerNeuron(inputs, 0));
            }

            // Initialize the second layer
            secondLayer = new SecondLayerNeuron(hiddenLayer, 0);
        }
Пример #3
0
            // Computes the sensitivity of the unit
            private double Sensitivity(double error, SecondLayerNeuron output)
            {
                // The fprime times the sum of the weighed sensitivities of outputs
                // fprime(net) * sum(wji * sensitivity_k)
                double outputSensitivity  = output.Sensitivity(error);
                int    indexOfThisNeuron  = output.Inputs.IndexOf(this);
                double weightOfThisNeuron = output.Weights[indexOfThisNeuron];

                return(fprime(sum) * outputSensitivity * weightOfThisNeuron);
            }