public double LearnOnReducedData(int num_TrainingPerClass) { double meanSquareError = threshold + 1; while (meanSquareError > threshold) { meanSquareError = 0; for (int i = 0; i < num_Classes; i++) { for (int j = 1; j <= num_TrainingPerClass; j++) { String pattern = "img/" + classes[i] + "/Training/" + j + ".txt"; string[] lines = System.IO.File.ReadAllLines(pattern); double[] input = HelperFunctions.ToDoubleArray(lines); HelperFunctions.Normalize(ref input); double[] output = new double[num_Classes]; for (int k = 0; k < num_Classes; k++) { if (k != i) { output[k] = 0; } else { output[k] = 1; } } MLP.BackPropagate(PCA.ForwardPropagate(input), output); } } for (int i = 0; i < num_Classes; i++) { for (int j = 1; j <= num_TrainingPerClass; j++) { String pattern = "img/" + classes[i] + "/Training/" + j + ".txt"; string[] lines = System.IO.File.ReadAllLines(pattern); double[] input = HelperFunctions.ToDoubleArray(lines); HelperFunctions.Normalize(ref input); double[] output = MLP.ForwardPropagate(PCA.ForwardPropagate(input)); double[] desiredOutput = new double[num_Classes]; double error = 0; for (int k = 0; k < num_Classes; k++) { if (k != i) { desiredOutput[k] = 0; } else { desiredOutput[k] = 1; } error = desiredOutput[k] - output[k]; meanSquareError += error * error; } } } meanSquareError /= num_TrainingPerClass * num_Classes * 2; Console.WriteLine(meanSquareError); } return(meanSquareError); }