/// DO POPRAWY public static float Learn(Network network, DataSet data) { float meanError = 0.0f; List <List <float> > placeForErrors = CreateNodesForErrors(network); List <List <float> > placeForOutputs = CreateNodesForErrors(network); for (int i = 0; i < data.Input.Count; i++) { float[] solution = new float [network.Classes]; solution = NetworkCalculation.CalculateSingleRecord(network, data.Input[i].ToArray(), placeForOutputs); float[] MSError = SquaredError(solution, data.Output[i].ToArray()); float[] lastLayerError = LastLayerError(solution, data.Output[i].ToArray()); float tmpError = 0.0f; for (int j = 0; j < MSError.Length; j++) { tmpError += MSError[j]; } CalculateNodeErrors(placeForErrors, lastLayerError, network); ModifyWages(network, placeForErrors, placeForOutputs, network.LearningFactor); meanError += tmpError; } meanError = meanError / (float)data.Input.Count; return(meanError); }
public static float GetTestingSetError(Network network, DataSet testingSet) { float error = 0.0f; int correctAnswerCounter = 0; List <List <float> > placeForOutputs = LearningHelper.CreateNodesForErrors(network); for (int i = 0; i < testingSet.Input.Count; i++) { float[] solution = new float[network.Classes]; solution = NetworkCalculation.CalculateSingleRecord(network, testingSet.Input[i].ToArray(), placeForOutputs); bool areSame = CompareAnswers(solution, testingSet.Output[i].ToArray()); if (areSame) { correctAnswerCounter++; } } error = (float)(((double)correctAnswerCounter * 100.0) / (double)testingSet.Output.Count); return(error); }