コード例 #1
0
        void ComputeDeltas(int layer, INeuralNetAccessor outputGradient)
        {
            float sigmoidDash;
            float error;

            int         numberOfNodes          = neuralNetAccessor.NodesInLayer(layer);
            int         numberOfNextLayerNodes = neuralNetAccessor.NodesInLayer(layer + 1);
            NeuralLayer neuralLayer            = neuralNetAccessor.GetNeuralLayer(layer);

            for (int node = 0; node < numberOfNodes; node++)
            {
                // Compute sigmoidDash
                //sigmoidDash = neuralLayer.GetNode(node).Compute(weightedInputs[layer][node] + smallDelta) - activations[layer][node];
                sigmoidDash = activations[layer][node];
                sigmoidDash = sigmoidDash * (1 - sigmoidDash);

                // Compute error
                error = 0;
                for (int nextNode = 0; nextNode < numberOfNextLayerNodes; nextNode++)
                {
                    error += outputGradient.BiasAccessor[layer + 1, nextNode] * outputGradient.WeightAccessor[layer, nextNode, node];
                }

                // Comput delta
                outputGradient.BiasAccessor[layer, node] = sigmoidDash * error;
            }
        }
コード例 #2
0
        void ComputeBackward(LearningExample learningExample, INeuralNetAccessor outputGradient)
        {
            int lastLayer = neuralNetAccessor.NumberOfLayers - 1;

            ComputeLastDeltas(learningExample.Output, outputGradient);
            ComputeWeightGradients(lastLayer, learningExample.Input, outputGradient);
            for (int layer = lastLayer - 1; layer >= 0; layer--)
            {
                ComputeDeltas(layer, outputGradient);
                ComputeWeightGradients(layer, learningExample.Input, outputGradient);
            }
        }
コード例 #3
0
        void ComputeLastDeltas(float[] learningExampleOutput, INeuralNetAccessor outputGradient)
        {
            float deltaCa;

            int lastLayer          = neuralNetAccessor.NumberOfLayers - 1;
            int numberOfFinalNodes = neuralNetAccessor.NodesInLayer(lastLayer);

            for (int i = 0; i < numberOfFinalNodes; i++)
            {
                deltaCa = activations[lastLayer][i] - learningExampleOutput[i];
                outputGradient.BiasAccessor[lastLayer, i] = deltaCa;
            }
        }
コード例 #4
0
 public void AddAllBiasesAndWeightsWith(INeuralNetAccessor otherNeuralNetAccessor)
 {
     for (int layer = 0; layer < NumberOfLayers; layer++)
     {
         for (int node = 0; node < NodesInLayer(layer); node++)
         {
             BiasAccessor[layer, node] += otherNeuralNetAccessor.BiasAccessor[layer, node];
             for (int weight = 0; weight < WeightsOfNodeInLayer(layer); weight++)
             {
                 WeightAccessor[layer, node, weight] += otherNeuralNetAccessor.WeightAccessor[layer, node, weight];
             }
         }
     }
 }
コード例 #5
0
        void ComputeLastDeltas(float[] learningExampleOutput, INeuralNetAccessor outputGradient)
        {
            float sigmoidDash;
            float deltaCa;

            int         lastLayer          = neuralNetAccessor.NumberOfLayers - 1;
            int         numberOfFinalNodes = neuralNetAccessor.NodesInLayer(lastLayer);
            NeuralLayer lastNeuralLayer    = neuralNetAccessor.GetNeuralLayer(lastLayer);

            for (int i = 0; i < numberOfFinalNodes; i++)
            {
                //sigmoidDash = lastNeuralLayer.GetNode(i).Compute(weightedInputs[lastLayer][i] + smallDelta) - activations[lastLayer][i];
                sigmoidDash = activations[lastLayer][i];
                sigmoidDash = sigmoidDash * (1 - sigmoidDash);
                deltaCa     = activations[lastLayer][i] - learningExampleOutput[i];
                outputGradient.BiasAccessor[lastLayer, i] = sigmoidDash * deltaCa;
            }
        }
コード例 #6
0
        void ComputeWeightGradients(int layer, float[] learningExampleInput, INeuralNetAccessor outputGradient)
        {
            int numberOfNodes   = neuralNetAccessor.NodesInLayer(layer);
            int numberOfWeights = neuralNetAccessor.WeightsOfNodeInLayer(layer);

            for (int node = 0; node < numberOfNodes; node++)
            {
                for (int weight = 0; weight < numberOfWeights; weight++)
                {
                    float a;
                    if (layer == 0)
                    {
                        a = learningExampleInput[weight];
                    }
                    else
                    {
                        a = activations[layer - 1][weight];
                    }
                    outputGradient.WeightAccessor[layer, node, weight] = a * outputGradient.BiasAccessor[layer, node];
                }
            }
        }
コード例 #7
0
 public void BackPropagate(LearningExample learningExample, INeuralNetAccessor outputGradient)
 {
     ComputeForward(learningExample.Input);
     ComputeBackward(learningExample, outputGradient);
 }