public void Run(int firstHiddenLayerSize, int secondHiddenLayerSize, int iterations, float learningRate, int batchSize) { var digitReader = new HandwrittenDigitReader(); var trainDigits = digitReader.Iterate( MnistFiles.FindByType(MNISTFileType.TrainImages).FileName, MnistFiles.FindByType(MNISTFileType.TrainLabels).FileName); var testDigits = digitReader.Iterate( MnistFiles.FindByType(MNISTFileType.TestImages).FileName, MnistFiles.FindByType(MNISTFileType.TestLabels).FileName); var trainingData = trainDigits.Select(im => im.ToDataPoint()); var testData = testDigits.Select(im => im.ToDataPoint()); var cancellationToken = new CancellationTokenSource(); _model = new PerceptronModel( HandwrittenDigit.ImageSize * HandwrittenDigit.ImageSize, firstHiddenLayerSize, secondHiddenLayerSize, 10); _classifier = new PerceptronClassifier( _model, learningRate, batchSize); var watch = Stopwatch.StartNew(); _classifier.IterationComplete += j => { //Console.WriteLine($"Epoch {j}: {MNISTEvaulate(net, testData)} / {testData.Count}"); OnLog( $"Iteration #{j} has completed: {EvaluateClassifier(_classifier, testData)} / {testData.Count()} [{watch.ElapsedMilliseconds} ms]"); watch = Stopwatch.StartNew(); if (j == iterations) { cancellationToken.Cancel(); } EpochComplete?.Invoke(j); }; _classifier.Train(trainingData, cancellationToken.Token); }
public DemoRunner() { MnistFiles = new MNISTDatabase(); var defaultFile = new FileInfo("network.xml"); if (defaultFile.Exists) { var xml = defaultFile.OpenRead(); using (var textReader = new StreamReader(xml)) { var serializer = new PerceptronModelSerializer(); _model = serializer.Deserialize(textReader); } } else { _model = new PerceptronModel(30); } _classifier = new PerceptronClassifier(_model, 2, 50); }