internal async Task Train(int numLayers, CancellationToken cancellation, IProgress <Tuple <string, int> > taskAndPercentComplete) { for (var i = numLayers; i > 0; i--) { await Task.Run(() => _trainer.AddLayer(cancellation, taskAndPercentComplete)); } }
static void Main(string[] args) { try { for (var noise = 0; noise < 2; noise++) { var error = 0.05 * noise; List<Sample> pointsPos, pointsNeg; var flippedLabels = GenerateSamples(error, out pointsPos, out pointsNeg); var t = new Trainer<Sample>(new ILearner<Sample>[] { new Sample.Learner(21) }, pointsPos, pointsNeg); var convergence = 0; var prev = 0; Console.WriteLine("Noise: " + error + " (" + flippedLabels + " label errors)"); for (var i = 0; i < 1000; i++) { var cost = t.AddLayer(); if (float.IsNaN(cost)) { Console.WriteLine("Terminated after " + i + " layers."); break; } //Console.WriteLine("Loss = " + (float)cost); var testErrors = TestTrainer(t.Classifier, pointsPos, pointsNeg); var veriErrors = TestTrainer(t.Classifier); if (testErrors == prev) convergence++; else convergence = 0; if (convergence > 19) { Console.WriteLine("Fully trained after " + (i - 9) + " layers."); break; } else if ((i + 1) % 10 == 0) { Console.WriteLine("[" + testErrors + " / " + veriErrors + " ] errors after " + (i + 1) + " layers."); } prev = testErrors; } } } catch (Exception e) { Console.WriteLine(e.ToString()); } Console.ReadKey(); }