public static Dendrite BuildDendrite(Neuron upstreamNeuron, Neuron downStreamNeuron, IWeightBuilder weightBuilder)
        {
            Dendrite toReturn = new Dendrite();

            toReturn.DownStreamNeuron = downStreamNeuron;
            toReturn.UpStreamNeuron   = upstreamNeuron;
            toReturn.Weight           = weightBuilder.BuildWeight();

            return(toReturn);
        }
        public void UpdateNetwork(double stepSize, int sizeOfTrainingData, int batchSize)
        {
            // Generate the list of layers to update, starting from the
            // beginnign to the end (the output layer).
            List <Layer> layersToUpdate = new List <Layer>();

            foreach (HiddenLayer hiddenLayer in HiddenLayers)
            {
                layersToUpdate.Add(hiddenLayer);
            }
            layersToUpdate.Add(OutputLayer);

            // start from the last layer and work my way backward.
            for (int c = layersToUpdate.Count - 1; c >= 0; c--)
            {
                // take each of the neurons in the layer and update the bias
                // take each of the dendrites attached to the neuron and update the weight.
                for (int n = 0; n < layersToUpdate[c].Neurons.Count; n++)
                {
                    // the current neuron being examined.
                    Neuron thisNeuron      = layersToUpdate[c].Neurons[n];
                    bool   isNeuronDropped = layersToUpdate[c].DropOutMask[n] == 0;

                    // get the sum of all the errors of this neuron across the batch.
                    // divide by the batch size. This is the average error for the neuron.
                    double averageNeuronError = thisNeuron.SumOfErrorsOfNeuron /= (batchSize * 1.0);
                    thisNeuron.ClearError();

                    // updating the weight is just subtracting the bias (the delta is the gradient
                    // of the bias). dC/db = delta. Make sure to weight the delta by the step size.
                    thisNeuron.Bias = thisNeuron.Bias - (stepSize * averageNeuronError);

                    for (int d = 0; d < thisNeuron.UpstreamDendrites.Count; d++)
                    {
                        Dendrite dendrite = thisNeuron.UpstreamDendrites[d];

                        double averageErrorWrtWeight = dendrite.SumOfErrorsWrtWeights / (batchSize * 1.0);
                        dendrite.ClearError();

                        double regularization = 0.0;
                        if (_regularizationFunction != null && !isNeuronDropped)
                        {
                            regularization = _regularizationFunction.Compute(dendrite.Weight, sizeOfTrainingData);
                        }

                        dendrite.Weight = dendrite.Weight -
                                          (stepSize * (averageErrorWrtWeight + regularization));
                    }
                }
            }
        }
Beispiel #3
0
        public static Neuron BuildNeuron(IWeightBuilder weightBuilder, Layer previousLayer)
        {
            Neuron toReturn = new Neuron();

            toReturn.Bias = 0;

            if (previousLayer != null)
            {
                for (int c = 0; c < previousLayer.Neurons.Count; c++)
                {
                    Neuron previousNeuron = previousLayer.Neurons[c];

                    Dendrite dendrite = Dendrite.BuildDendrite(previousNeuron, toReturn, weightBuilder);

                    toReturn.UpstreamDendrites.Add(dendrite);
                    previousNeuron.DownstreamDendrites.Add(dendrite);
                }
            }

            return(toReturn);
        }
        public double Backpropagation(double[] expectedValues)
        {
            double totalNetworkCost = 0.0;

            // Compute error for the output neurons to get the ball rolling.
            // See https://github.com/kwende/CSharpNeuralNetworkExplorations/blob/master/Explorations/SimpleMLP/Documentation/OutputNeuronErrors.png
            for (int d = 0; d < expectedValues.Length; d++)
            {
                Neuron outputNeuronBeingExamined = OutputLayer.Neurons[d];
                double expectedOutput            = expectedValues[d];
                double actualOutput = outputNeuronBeingExamined.Activation;
                double actualInput  = outputNeuronBeingExamined.TotalInput;

                double cost = _costFunction.Compute(expectedOutput, actualOutput);
                totalNetworkCost += cost;

                double errorRelativeToActivation =
                    (_costFunction.ComputeDerivativeWRTActivation(actualOutput, expectedOutput));

                double errorWrtToNeuron = errorRelativeToActivation * Math.Sigmoid.ComputeDerivative(actualInput);

                outputNeuronBeingExamined.AddError(errorWrtToNeuron);

                for (int e = 0; e < outputNeuronBeingExamined.UpstreamDendrites.Count; e++)
                {
                    Dendrite dendrite              = outputNeuronBeingExamined.UpstreamDendrites[e];
                    Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                    double   errorRelativeToWeight = (errorWrtToNeuron * upstreamNeuron.Activation);

                    dendrite.AddError(errorRelativeToWeight);
                }
            }

            // Compute error for each neuron in each layer moving backwards (backprop).
            for (int d = HiddenLayers.Count - 1; d >= 0; d--)
            {
                HiddenLayer hiddenLayer = HiddenLayers[d];
                for (int e = 0; e < hiddenLayer.Neurons.Count; e++)
                {
                    Neuron thisNeuron = (Neuron)hiddenLayer.Neurons[e];
                    double dropoutBit = hiddenLayer.DropOutMask[e];

                    double input = thisNeuron.TotalInput;

                    double          errorSum            = 0.0;
                    List <Dendrite> downStreamDendrites = thisNeuron.DownstreamDendrites;

                    for (int f = 0; f < downStreamDendrites.Count; f++)
                    {
                        Dendrite currentDendrite  = downStreamDendrites[f];
                        Neuron   downStreamNeuron = currentDendrite.DownStreamNeuron;

                        double delta  = downStreamNeuron.CurrentNeuronError;
                        double weight = currentDendrite.Weight;
                        errorSum += delta * weight;
                    }

                    double errorWrtToThisNeuron = errorSum * Math.Sigmoid.ComputeDerivative(input) * dropoutBit;
                    thisNeuron.AddError(errorWrtToThisNeuron);

                    for (int f = 0; f < thisNeuron.UpstreamDendrites.Count; f++)
                    {
                        Dendrite dendrite              = thisNeuron.UpstreamDendrites[f];
                        Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                        double   errorRelativeToWeight = (errorWrtToThisNeuron * upstreamNeuron.Activation);
                        dendrite.AddError(errorRelativeToWeight);
                    }
                }
            }

            return(totalNetworkCost);
        }