private double feedForward(double[][,] input, Class cls) { Net.Calculate(input, m_Values); var lidx = Net.LayerCount - 1; var result = m_Values[lidx]; var errors = m_Errors[lidx]; var len = result.GetLength(0); var output = new double[len]; for (int j = 0; j < len; j++) { output[j] = result[j][0, 0]; } var expect = m_ExpectedOutputs[cls]; var llayer = Net[lidx]; for (int p = 0; p < llayer.OutputDepth; p++) { var ej = m_LossFunction.Derivative(p, output, expect); var value = result[p][0, 0]; var deriv = (llayer.ActivationFunction != null) ? llayer.ActivationFunction.DerivativeFromValue(value) : 1; errors[p][0, 0] = ej * deriv / m_BatchSize; } return(m_LossFunction.Value(output, expect) / m_BatchSize); }
private double feedForward(NeuralNetwork net, double[] input, double[] expected) { var output = net.Calculate(input); var llayer = net[net.LayerCount - 1]; var errors = m_Errors[net.LayerCount - 1]; for (int j = 0; j < m_OutputDim; j++) { var neuron = llayer[j]; var ej = m_LossFunction.Derivative(j, output, expected); errors[j] = ej * neuron.Derivative; } return(m_LossFunction.Value(output, expected)); }