public void Train(double[][] learnInputData, double[][] learnOutputData, double alpha, double eps, int maxEpoch, bool log = false, int logInterval = 1000) { double error; long epoch = 0; do { error = 0; for (int p = 0; p < learnInputData.Length; p++) { double[] results = GetOutputs(learnInputData[p]); double[] sigmas = new double[outputs]; for (int i = 0; i < outputs; i++) { sigmas[i] = learnOutputData[p][i] - results[i]; error += sigmas[i] * sigmas[i]; } double[][] deltas = new double[hiddenNeuronsSize.Length][]; for (int i = hiddenNeuronsSize.Length - 1; i >= 0; i--) { if (i == hiddenNeuronsSize.Length - 1) { deltas[i] = hiddenLayers[i].GetErrors(outputLayer, sigmas); } else { deltas[i] = hiddenLayers[i].GetErrors(hiddenLayers[i + 1], deltas[i + 1]); } } for (int layer = 0; layer < hiddenNeuronsSize.Length; layer++) { for (int i = 0; i < hiddenNeuronsSize[layer]; i++) { for (int j = 0; j < hiddenLayers[layer].inputsSize; j++) { double weight = hiddenLayers[layer].GetWeight(j, i); double output = layer == 0 ? inputLayer.GetOutput(j) : hiddenLayers[layer - 1].GetOutput(j); double gradient = hiddenLayers[layer].GetDerivativeOutput(i); hiddenLayers[layer].SetWeight(j, i, weight + alpha * deltas[layer][i] * output * gradient); } } } for (int i = 0; i < outputs; i++) { for (int j = 0; j < hiddenNeuronsSize[hiddenNeuronsSize.Length - 1]; j++) { double weight = outputLayer.GetWeight(j, i); double output = hiddenLayers[hiddenNeuronsSize.Length - 1].GetOutput(j); outputLayer.SetWeight(j, i, weight + alpha * sigmas[i] * output); } } } if (log && epoch % logInterval == 0) { Log(Math.Sqrt(error), epoch); } epoch++; } while (Math.Sqrt(error) > eps && epoch < maxEpoch); }