public static Network BuildNetwork(Random random,
                                           ICostFunction costFunction, IRegularizationFunction regularizationFunction,
                                           WeightIntializerType weightIntializerType,
                                           DropoutLayerOptions dropoutLayerOptions,
                                           int inputNeuronCount, int outputNeuronCount, params int[] hiddenLayerCounts)
        {
            Network network = new Network(costFunction, regularizationFunction,
                                          dropoutLayerOptions, random);

            network.InputLayer = InputLayer.BuildInputLayer(null, inputNeuronCount, random);

            Layer          previousLayer     = network.InputLayer;
            int            dropoutLayerIndex = 1;
            bool           isDropoutLayer    = false;
            IWeightBuilder weightBuilder     = null;

            for (int c = 0; c < hiddenLayerCounts.Length; c++)
            {
                isDropoutLayer = dropoutLayerOptions.DropoutLayerIndices.Contains(dropoutLayerIndex);
                int currentLayerCount = hiddenLayerCounts[c];

                switch (weightIntializerType)
                {
                case WeightIntializerType.RandomGaussianWithNeuronCount:
                    weightBuilder = new RandomGaussianWithNeuronCount(previousLayer.Neurons.Count, 0, random);
                    break;

                case WeightIntializerType.RandomNormal:
                    weightBuilder = new RandomGaussian(0, 1, random);
                    break;
                }

                HiddenLayer hiddenLayer = HiddenLayer.BuildHiddenLayer(weightBuilder, previousLayer,
                                                                       currentLayerCount, isDropoutLayer ? dropoutLayerOptions.ProbabilityOfDropout : 0, random);

                network.HiddenLayers.Add(hiddenLayer);
                previousLayer = hiddenLayer;

                dropoutLayerIndex++;
            }

            isDropoutLayer = dropoutLayerOptions.DropoutLayerIndices.Contains(dropoutLayerIndex);

            switch (weightIntializerType)
            {
            case WeightIntializerType.RandomGaussianWithNeuronCount:
                weightBuilder = new RandomGaussianWithNeuronCount(previousLayer.Neurons.Count, 0, random);
                break;

            case WeightIntializerType.RandomNormal:
                weightBuilder = new RandomGaussian(0, 1, random);
                break;
            }

            network.OutputLayer = OutputLayer.BuildOutputLayer(weightBuilder, (HiddenLayer)previousLayer,
                                                               outputNeuronCount, isDropoutLayer ? dropoutLayerOptions.ProbabilityOfDropout : 0, random);

            return(network);
        }
Example #2
0
        public static OutputLayer BuildOutputLayer(IWeightBuilder weightBuilder,
                                                   HiddenLayer previousLayer, int numberOfNeurons, double probabilityOfDropout, Random random)
        {
            OutputLayer toReturn = new OutputLayer(numberOfNeurons, probabilityOfDropout, random);

            for (int c = 0; c < numberOfNeurons; c++)
            {
                toReturn.Neurons.Add(Neuron.BuildNeuron(weightBuilder, previousLayer));
            }
            return(toReturn);
        }
        public double Backpropagation(double[] expectedValues)
        {
            double totalNetworkCost = 0.0;

            // Compute error for the output neurons to get the ball rolling.
            // See https://github.com/kwende/CSharpNeuralNetworkExplorations/blob/master/Explorations/SimpleMLP/Documentation/OutputNeuronErrors.png
            for (int d = 0; d < expectedValues.Length; d++)
            {
                Neuron outputNeuronBeingExamined = OutputLayer.Neurons[d];
                double expectedOutput            = expectedValues[d];
                double actualOutput = outputNeuronBeingExamined.Activation;
                double actualInput  = outputNeuronBeingExamined.TotalInput;

                double cost = _costFunction.Compute(expectedOutput, actualOutput);
                totalNetworkCost += cost;

                double errorRelativeToActivation =
                    (_costFunction.ComputeDerivativeWRTActivation(actualOutput, expectedOutput));

                double errorWrtToNeuron = errorRelativeToActivation * Math.Sigmoid.ComputeDerivative(actualInput);

                outputNeuronBeingExamined.AddError(errorWrtToNeuron);

                for (int e = 0; e < outputNeuronBeingExamined.UpstreamDendrites.Count; e++)
                {
                    Dendrite dendrite              = outputNeuronBeingExamined.UpstreamDendrites[e];
                    Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                    double   errorRelativeToWeight = (errorWrtToNeuron * upstreamNeuron.Activation);

                    dendrite.AddError(errorRelativeToWeight);
                }
            }

            // Compute error for each neuron in each layer moving backwards (backprop).
            for (int d = HiddenLayers.Count - 1; d >= 0; d--)
            {
                HiddenLayer hiddenLayer = HiddenLayers[d];
                for (int e = 0; e < hiddenLayer.Neurons.Count; e++)
                {
                    Neuron thisNeuron = (Neuron)hiddenLayer.Neurons[e];
                    double dropoutBit = hiddenLayer.DropOutMask[e];

                    double input = thisNeuron.TotalInput;

                    double          errorSum            = 0.0;
                    List <Dendrite> downStreamDendrites = thisNeuron.DownstreamDendrites;

                    for (int f = 0; f < downStreamDendrites.Count; f++)
                    {
                        Dendrite currentDendrite  = downStreamDendrites[f];
                        Neuron   downStreamNeuron = currentDendrite.DownStreamNeuron;

                        double delta  = downStreamNeuron.CurrentNeuronError;
                        double weight = currentDendrite.Weight;
                        errorSum += delta * weight;
                    }

                    double errorWrtToThisNeuron = errorSum * Math.Sigmoid.ComputeDerivative(input) * dropoutBit;
                    thisNeuron.AddError(errorWrtToThisNeuron);

                    for (int f = 0; f < thisNeuron.UpstreamDendrites.Count; f++)
                    {
                        Dendrite dendrite              = thisNeuron.UpstreamDendrites[f];
                        Neuron   upstreamNeuron        = (Neuron)dendrite.UpStreamNeuron;
                        double   errorRelativeToWeight = (errorWrtToThisNeuron * upstreamNeuron.Activation);
                        dendrite.AddError(errorRelativeToWeight);
                    }
                }
            }

            return(totalNetworkCost);
        }