예제 #1
0
    private static void DoBackpropagation(Layer outputLayer, double[] targetOutputs, ErrorFunctionType errorFunctionType, double learningRate, double momentumMagnitude)
    {
        var backwardsPassDeltas = UpdateOutputLayer(outputLayer, targetOutputs, errorFunctionType, learningRate, momentumMagnitude);

        foreach (var t in outputLayer.PreviousLayers)
        {
            RecurseBackpropagation(t, backwardsPassDeltas, momentumMagnitude);
        }
    }
예제 #2
0
    private static Dictionary <Node, double> UpdateOutputLayer(Layer outputLayer, double[] targetOutputs, ErrorFunctionType errorFunctionType, double learningRate, double momentumMagnitude)
    {
        var errorFunctionDifferential = ErrorFunctionResolver.ResolveErrorFunctionDifferential(errorFunctionType);

        var deltas = new Dictionary <Node, double>();

        for (var i = 0; i < outputLayer.Nodes.Count; i++)
        {
            var node  = outputLayer.Nodes[i];
            var delta = Math.MinMagnitude(errorFunctionDifferential.Invoke(targetOutputs[i], node.Output), 100)
                        * outputLayer.ActivationFunctionDifferential(node.Output)
                        * learningRate;
            deltas.Add(node, delta);
            foreach (var(prevNode, weightForPrevNode) in node.Weights)
            {
                UpdateNodeWeight(prevNode, weightForPrevNode, delta, momentumMagnitude);
            }

            foreach (var(_, weightForPrevLayer) in node.BiasWeights)
            {
                UpdateBiasNodeWeight(weightForPrevLayer, delta, momentumMagnitude);
            }
        }

        return(deltas);
    }
예제 #3
0
    public static void Backpropagate(this Layer outputLayer, Dictionary <Layer, double[]> inputs, double[] targetOutputs, ErrorFunctionType errorFunctionType, double learningRate, double momentumMagnitude = 0d)
    {
        outputLayer.CalculateOutputs(inputs);

        DoBackpropagation(outputLayer, targetOutputs, errorFunctionType, learningRate, momentumMagnitude);
    }
예제 #4
0
 /// <summary>
 /// Returns the differential of the error function supplied
 /// Function returned is of the following signature: (target, actual) => differential of error
 /// </summary>
 /// <param name="errorFunctionType"></param>
 /// <returns></returns>
 public static Func <double, double, double> ResolveErrorFunctionDifferential(ErrorFunctionType errorFunctionType) => errorFunctionType switch
 {
예제 #5
0
    public static void NegativeSample(this Layer outputLayer, int inputIndex, int outputIndex, bool isPositiveTarget, ErrorFunctionType errorFunctionType, double learningRate, double momentumMagnitude = 0d)
    {
        outputLayer.CalculateIndexedOutput(inputIndex, outputIndex, 1);
        var targetOutput = isPositiveTarget ? 1 : 0;

        var deltas = NegativeSampleOutput(outputLayer, targetOutput, outputIndex, errorFunctionType, learningRate, momentumMagnitude);

        foreach (var previousLayer in outputLayer.PreviousLayers)
        {
            foreach (var previousPreviousLayer in previousLayer.PreviousLayers)
            {
                RecurseNegativeSample(previousLayer, previousPreviousLayer, inputIndex, deltas, momentumMagnitude);
            }
        }
    }
예제 #6
0
    private static Dictionary <Node, double> NegativeSampleOutput(Layer outputLayer, double targetOutput, int outputIndex, ErrorFunctionType errorFunctionType, double learningRate, double momentumMagnitude)
    {
        var errorFunctionDifferential = ErrorFunctionResolver.ResolveErrorFunctionDifferential(errorFunctionType);
        var outputNode = outputLayer.Nodes[outputIndex];

        var delta = errorFunctionDifferential.Invoke(targetOutput, outputNode.Output)
                    * outputLayer.ActivationFunctionDifferential(outputNode.Output)
                    * learningRate;

        foreach (var(prevNode, weightForPrevNode) in outputNode.Weights)
        {
            Backpropagation.UpdateNodeWeight(prevNode, weightForPrevNode, delta, momentumMagnitude);
        }
        foreach (var(_, weightForPrevLayer) in outputNode.BiasWeights)
        {
            Backpropagation.UpdateBiasNodeWeight(weightForPrevLayer, delta, momentumMagnitude);
        }

        return(new Dictionary <Node, double> {
            { outputNode, delta }
        });
    }