/// <summary> /// This method calculates the results of the network at the output layer /// </summary> /// <param name="input">input layer data</param> /// <param name="numOfFeatures">number of input neurons</param> /// <param name="output">output parameter to store outputs at the output layer</param> /// <param name="outputSum">output sum of the last layer.</param> private void CalculateResultatOutputlayer(double[] input, int numOfFeatures, bool softmax, out double[] output, out double[] outputSum) { output = new double[m_OutputLayerNeurons]; outputSum = new double[m_OutputLayerNeurons]; int numOfHiddenNeuronsInLastHiddenLayer = m_HiddenLayerNeurons[m_HiddenLayerNeurons.Length - 1]; for (int j = 0; j < m_OutputLayerNeurons; j++) { outputSum[j] = 0.0; for (int i = 0; i < numOfHiddenNeuronsInLastHiddenLayer; i++) { outputSum[j] += m_Weights[m_HiddenLayerNeurons.Length][j, i] * input[i]; } outputSum[j] += m_Biases[m_HiddenLayerNeurons.Length][j]; if (softmax == false) { output[j] = ActivationFunctions.Sigmoid(outputSum[j]); } else { // Do nothing } } if (softmax == true) { output = ActivationFunctions.SoftMaxClassifier(outputSum); } }