public void Train(double[] input, double[] labelArray) { // Making prediction var inputs = Matrix <double> .Build.Dense(input.Length, 1, (i, j) => input[i]); var hidden = _weightsInHidden.Multiply(inputs); hidden.Add(_biasHidden, hidden); hidden.MapInplace(val => _activation.Activasion(val)); var output = _weightsHiddenOut.Multiply(hidden); output.Add(_biasOut, output); output.MapInplace(val => _activation.Activasion(val)); // Building target matrix var target = Matrix <double> .Build.Dense(labelArray.Length, 1, (i, j) => labelArray[i]); // Calculate error // Error => Target - output var errors = target.Subtract(output); // Gradient = outputs * (1 - outputs); // Calculate gradient var gradiants = output.Map(val => _activation.DActivasion(val)); InPlaceMultiply(ref gradiants, ref errors); gradiants.Multiply(_learningRate, gradiants); // Calculate deltas var hidden_t = hidden.Transpose(); var weight_hidden_out_deltas = gradiants.Multiply(hidden_t); //lock (_wHiddenOutLock) //{ // Adjust weights by deltas _weightsHiddenOut.Add(weight_hidden_out_deltas, _weightsHiddenOut); // Adjust bias by deltas, which is gradiant _biasOut.Add(gradiants, _biasOut); //} // Calculate hidden layer errors var weights_hidden_out_transposed = _weightsHiddenOut.Transpose(); var hidden_errors = weights_hidden_out_transposed.Multiply(errors); // Calculate hidden gradiant var hidden_gradiant = hidden.Map(val => _activation.DActivasion(val)); InPlaceMultiply(ref hidden_gradiant, ref hidden_errors); hidden_gradiant.Multiply(_learningRate, hidden_gradiant); // Calculate input => hidden deltas var inputs_t = inputs.Transpose(); var weights_input_hidden_deltas = hidden_gradiant.Multiply(inputs_t); //lock (_wInHiddenLock) //{ _weightsInHidden.Add(weights_input_hidden_deltas, _weightsInHidden); _biasHidden.Add(hidden_gradiant, _biasHidden); //} }