コード例 #1
0
ファイル: NeuralNetwork.cs プロジェクト: SJHOne/NeuralNet
        public void Compute()
        {
            // Sum the input connections and set internal value via the activation function
            // Each layer should be fully connected to each other layer.

            // If this is just an input, then do nothing, preserve the outputValue as is
            if (inputconnections.Count == 0)
            {
                return;
            }

            netInputValue = 0.0;

            // Sum each input connection
            foreach (Connection nc in inputconnections)
            {
                netInputValue += (nc.source.outputValue * nc.weight);
            }

            // Do you just add the internal bias?
            netInputValue += internalBias;

            // Then pass the value to the correct activation function and update the result
            outputValue = ActivationFunctionImpl.Run(activationFunction, netInputValue);
        }
コード例 #2
0
ファイル: NeuralNetwork.cs プロジェクト: SJHOne/NeuralNet
        public double learningRate = 0.5;   // Constant Learning Rate, for now.

        public void CalculateUpdateWeight()
        {
            // 'We perform the actual updates in the neural network after we have the new weights
            // leading into the hidden layer neurons (ie, we use the original weights, not the
            // updated weights, when we continue the backpropagation algorithm below).' - from reference [1] above.

            // Figure out if we are a connection to the output layer.
            if (this.destination.outputconnections.Count == 0)
            {
                // This connection leads to an output neuron:

                // How much does total (output) error change with respect to the output
                //destination.Target = target;
                double howMuchTotalErrorChange = destination.DeltaTotalError();

                // Use the derivative of the sigmoid function to calculate the next step
                double howMuchOutputChangeWithRespectToInput = destination.DeltaOutputWithRespectToInput();

                // How much does the total net *input* of destination change due to this connection
                double howMuchTotalNetInputChangeDueToThisWeight = this.source.OutputValue;

                double totalWeightChangeForThisConnection = howMuchTotalErrorChange * howMuchOutputChangeWithRespectToInput * howMuchTotalNetInputChangeDueToThisWeight;

                // This will be 'locked in' with a commit, later
                newWeight = weight - (totalWeightChangeForThisConnection * learningRate);
            }
            else
            {
                // Must be a connection leading to a hidden neuron, (this is a bit trickier)

                // 'We’re going to use a similar process as we did for the output layer,
                // but slightly different to account for the fact that the output of each
                // hidden layer neuron contributes to the output (and therefore error)
                // of multiple output neurons. We know that out_{h1} affects both out_{o1} and out_{o2}
                // therefore the \frac{\partial E_{total}}{\partial out_{h1}} needs to
                // take into consideration its effect on the both output neurons:' - from reference [1] above.

                // How many output neurons are affected by this connection?
                int noofNeurons = this.destination.outputconnections.Count;

                double runningTotal = 0.0;

                // iterate over these neurons and gather the 'effect' on each.
                foreach (Connection nnc in this.destination.outputconnections)
                {
                    Neuron nOutputNeuron = nnc.destination;
                    runningTotal += (nOutputNeuron.DeltaTotalError() * nOutputNeuron.DeltaOutputWithRespectToInput() * nnc.weight);
                }

                double partialDerivativeDestination = ActivationFunctionImpl.SigmoidDerivative(this.destination.OutputValue);
                double partialDerivativeNetInput    = source.OutputValue;

                // This will be 'locked in' with a commit, later
                newWeight = weight - (runningTotal * partialDerivativeDestination * partialDerivativeNetInput * learningRate);
            }
        }
コード例 #3
0
ファイル: NeuralNetwork.cs プロジェクト: SJHOne/NeuralNet
        public double DeltaOutputWithRespectToInput()
        {
            if (howMuchOutputChangeWithRespectToInput != 0.0)
            {
                return(howMuchOutputChangeWithRespectToInput);
            }

            howMuchOutputChangeWithRespectToInput = ActivationFunctionImpl.SigmoidDerivative(OutputValue);
            return(howMuchOutputChangeWithRespectToInput);
        }