public static void Training() { reader = new Reader(); dataset = reader.ReadData(); lengthOfTrainigData = datadivision.GetPartOfData(70, dataset); double[][] trainingData = datadivision.Take(dataset, lengthOfTrainigData); //creating a board with training data in the right size for (int i = 0; i < SaveState; i++) { List <Network> ntw = new List <Network>(); var tasks = new Task[batchsize]; for (int j = 0; j < batchsize; j++) { //B/c ii may change and this code can't let that happen int copyOfJ = j; double[] row = reader.ReadNextRow(trainingData); int correct = reader.ReadNextClassification(trainingData); ntw.Add(new Network()); tasks[copyOfJ] = Task.Run(() => ntw[copyOfJ].Backprop(row, correct)); } Task.WaitAll(tasks); //Syncronously descend foreach (Network nn in ntw) { nn.Descent(); //nn.Dispose(); } //Updating the weights with the avg gradients Network.Descent(batchsize); UserValidation(); } //Save weights and biases NetworkParametersHandler.WriteWeightBias(); }
//Reset (re-initialize) weights and biases of the neural network public static void reset() { Network nn = new Network(); nn.initialize(); NetworkParametersHandler.WriteWeightBias(); }
static void Main(string[] args) { NetworkParametersHandler.ReadWeightBias(); //Reading network parameters form file //ProgramManager.reset(); if (trainMode) { while (!ProgramManager.finished) { ProgramManager.Training(); } } else { ProgramManager.Testing(); } Console.WriteLine("Finished"); Console.ReadKey(); }