/// <summary> /// Calculate the gradient for a neuron. /// </summary> /// <param name="toNeuron">The neuron to calculate for.</param> private void CalculateNeuronGradient(IFreeformNeuron toNeuron) { // Only calculate if layer has inputs, because we've already handled the // output // neurons, this means a hidden layer. if (toNeuron.InputSummation != null) { // between the layer deltas between toNeuron and the neurons that // feed toNeuron. // also calculate all inbound gradeints to toNeuron foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { // calculate the gradient double gradient = connection.Source.Activation * toNeuron.GetTempTraining(0); connection.AddTempTraining(0, gradient); // calculate the next layer delta IFreeformNeuron fromNeuron = connection.Source; double sum = fromNeuron.Outputs.Sum(toConnection => toConnection.Target.GetTempTraining(0) * toConnection.Weight); double neuronOutput = fromNeuron.Activation; double neuronSum = fromNeuron.Sum; double deriv = toNeuron.InputSummation .ActivationFunction .DerivativeFunction(neuronSum, neuronOutput); if (FixFlatSopt && (toNeuron.InputSummation .ActivationFunction is ActivationSigmoid)) { deriv += FlatSpotConst; } double layerDelta = sum * deriv; fromNeuron.SetTempTraining(0, layerDelta); } // recurse to the next level foreach (IFreeformConnection connection in toNeuron .InputSummation.List) { IFreeformNeuron fromNeuron = connection.Source; CalculateNeuronGradient(fromNeuron); } } }
/// <summary> /// Calculate the output delta for a neuron, given its difference. /// Only used for output neurons. /// </summary> /// <param name="neuron">The neuron.</param> /// <param name="diff">The difference.</param> private void CalculateOutputDelta(IFreeformNeuron neuron, double diff) { double neuronOutput = neuron.Activation; double neuronSum = neuron.InputSummation.Sum; double deriv = neuron.InputSummation.ActivationFunction .DerivativeFunction(neuronSum, neuronOutput); if (FixFlatSopt && (neuron.InputSummation.ActivationFunction is ActivationSigmoid)) { deriv += FlatSpotConst; } double layerDelta = deriv * diff; neuron.SetTempTraining(0, layerDelta); }