public void Test() { double testDataAmount = 0; double correctEstimate = 0; foreach (Image image in MnistReader.ReadTestData()) { testDataAmount++; _inputLayer.SetNeurons(image.Data); SetNewDesiredValues(image.Label); FeedForward(); CalculateError(); _confusionMatrix[(int)image.Label, _outputLayer.GetIndexOfHighestNeuron()]++; for (int i = 0; i < _outputLayer.DesiredValues.Length; i++) { if (_outputLayer.DesiredValues[i] == 1.0) { if (i == _outputLayer.GetIndexOfHighestNeuron()) { correctEstimate++; } } } } double accuracy = (correctEstimate / testDataAmount) * 100; Console.WriteLine("Accuracy: " + accuracy + " %.\n"); PrintConfusionMatrix(); }
public void Next() { DigitImage img = Program.RandomTestingData; TrainingItem ti = MnistReader.ConvertSingleImage(img); Image realImg = img.ToBitMap(); input.Image = new Bitmap(realImg, input.Size); byte b = 0; for (int i = 0; i < 4; i++) { b += (byte)(ti.outputs[i] > 0.5 ? Math.Pow(2, i) : 0); } byte o = Program.GetOutput(ti.inputs); output.Text = (timerRunning ? "Test in progress : \n" : "") + "Found : " + o + "\nExpected : " + b; if (timerRunning) { loop++; if (o == b) { worked++; } if (loop >= 100) { timerRunning = false; timer.Stop(); output.Text = "Test finished\nSuccess : " + worked + " %"; } } }
public MainWindowVM() { _dialogService = new SimpleDialogService(); var trainImageFile = new FileInfo(TrainImagePath); var trainLabelFile = new FileInfo(TrainLabelPath); var testImageFile = new FileInfo(TestImagePath); var testLabelFile = new FileInfo(TestLabelPath); TrainModelCommand = new DelegateCommand(TrainModel); LoadModelCommand = new DelegateCommand(LoadModel); SaveModelCommand = new DelegateCommand(SaveModel); PredictAllCommand = new DelegateCommand(PredictAll); MnistReader reader = new MnistReader(); _trainer = new Trainer(); TrainImages = reader.ReadDataset(trainImageFile, trainLabelFile, TrainImageCount) .Select(CreateVM) .ToList(); TestImages = reader.ReadDataset(testImageFile, testLabelFile, TestImageCount) .Select(CreateVM) .ToList(); }
private static void TrainNetwork(FeedForwardNeuralNet net) { List <double[]> trainingInput = new List <double[]>(); List <double[]> trainingOutput = new List <double[]>(); List <double[]> validationInput = new List <double[]>(); List <double[]> validationOutput = new List <double[]>(); int i = 0; foreach (var image in MnistReader.ReadTrainingData()) { double[] input = Encode(image.Data); double[] output = Encode(image.Label); if (i < 50000) { trainingInput.Add(input); trainingOutput.Add(output); } else { validationInput.Add(input); validationOutput.Add(output); } i++; } Console.WriteLine("Started Training:"); net.Train(trainingInput, trainingOutput, 30, 10, 5.0, validationInput, validationOutput); Console.WriteLine("Finished training."); }
static void Main() { Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); /******LOAD DATA******/ List <TrainingItem> trainData = MnistReader.ConvertTrainingData( MnistReader.ReadImages("train-images.idx3-ubyte", "train-labels.idx1-ubyte", 60000)); /***Test form***/ testForm = new TestingForm(); /***Graph***/ graphForm = new Graph(); /*****Create Network Form*****/ form = new NetworkForm(); if (!TryLoadNetwork()) { network = new NeuralNetwork(0.01, 0.9, new int[] { 196, 15, 4 }); } form.DrawNetwork(network); form.startTrainingAction += () => network.Train(trainData, (i, cost) => { form.SetProgress("Trained for " + TimeSpan.FromMilliseconds(network.TrainedMillis).ToString("c") + " - Gen " + i + " - Cost : " + cost); if (graphForm.Visible) { graphForm.SetGraphData("Cost", network.Costs); } }, true, 10000); form.stopTrainingAction += network.StopTraining; form.toJsonAction += () => { File.WriteAllText(networkDataPath, new NetworkInfos(network).GetJSON()); }; form.testNetworkAction += TestNetwork; form.costGraphAction += () => { graphForm.SetGraphData("Cost", network.Costs); graphForm.Show(); }; form.FormClosed += (object o, FormClosedEventArgs a) => network.StopTraining(); /*****END*****/ /***RUN***/ Application.Run(form); }
/// <summary> /// Método que lee una determinada cantidad de imágenes (de dos clases) de un formato igual al dataset MNSIT, para entrenamiento. /// </summary> /// <param name="filePathImgs">Ruta del archivo con las imágenes</param> /// <param name="filePathLbls">Ruta del archivo con las etiquetas</param> /// <param name="cat">Categoría principal en la que se entrenará el modelo, con el fin de marcarla con el valor esperado, y viceversa con las demás</param> /// <param name="maxImagesPerCat">Máximo de imágenes que serán leídas por cada categoría</param> /// <returns></returns> public static Data[] ImportData_MNIST_TrainOneCat(string filePathImgs, string filePathLbls, int cat, int maxImagesPerCat) { Data[] D = MnistReader.ReadData_TrainOneCat(filePathImgs, filePathLbls, cat, maxImagesPerCat); for (int ixO = 0; ixO < 10; ixO++) { WriteImageConsole(D[ixO].Values_Jagged); } return(D); }
private static void TestNetwork() { network.StopTraining(); if (testingData == null) { testingData = MnistReader.ReadImages("t10k-images.idx3-ubyte", "t10k-labels.idx1-ubyte", 2000); } testForm.Show(); testForm.Next(); }
public Main() { InitializeComponent(); var reader = new MnistReader(); trainingSet = reader.GetTrainingSet("MNIST\\train-images.idx3-ubyte", "MNIST\\train-labels.idx1-ubyte"); testSet = reader.GetTrainingSet("MNIST\\t10k-images.idx3-ubyte", "MNIST\\t10k-labels.idx1-ubyte"); network = new NeuralNetwork(new[] { 784, 30, 10 }); benchmark = new Benchmark(); }
/// <summary> /// Método que lee una determinada cantidad de imágenes de un formato igual al dataset MNSIT /// </summary> /// <param name="filePathImgs">Ruta del archivo con las imágenes</param> /// <param name="filePathLbls">Ruta del archivo con las etiquetas</param> /// <param name="categories">Vector de categorías que serán leídas</param> /// <param name="maxImagesPerCat">Máximo de imágenes que serán leídas por cada categoría</param> /// <param name="totalClasses">Número de clases obtenidas</param> /// <returns></returns> public static Data[] ImportData_MNIST(string filePathImgs, string filePathLbls, string[] categories, int maxImagesPerCat, out int totalClasses) { Data[] D = MnistReader.ReadData(filePathImgs, filePathLbls, categories, maxImagesPerCat); for (int ixO = 0; ixO < 10; ixO++) { Console.WriteLine($"TOTAL {ixO}: {D.Where(d => d.Expected[ixO] == 1).Count()}"); Random r = new Random(); WriteImageConsole(D.Where(d => d.Expected[ixO] == 1).OrderBy(d => r.Next()).FirstOrDefault().Values_Jagged); } totalClasses = categories.Length; //10; //Del 0 al 9 return(D); }
public void MNISTPropagateTest() { MnistReader.RootPath = Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, @"..\..\..\MNIST")); var images = MnistReader.ReadTestData().ToList(); Assert.IsTrue(images.Count() > 0); var image = images.ElementAt(0); NNArray input = image.Values; NNArray output = Utils.OneHot(10, image.Label); var nbInput = input.Length; IActivation activation; activation = new Softmax(); var network = new Network( new NormalizedLayer(nbInput, 1), new DenseLayerNoBias(nbInput, 10, activation, new CrossEntropyOneHot())); // network.AddLayer(new DenseLayerNoBias(nbInput, 28, activation, new SquaredDistance())); network.Initialize(); DateTime start; DateTime end; int epoc = 0, maxEpoc = 10000; double error = double.MaxValue; start = DateTime.Now; while (++epoc < maxEpoc && error > 0.01) { error = network.Train(input, output, 0.01); } end = DateTime.Now; var duration = (end - start).TotalMilliseconds / 1000; Console.WriteLine($"Duration for activation {activation.Name}: {duration} \t epoc: {epoc}\terror: {error}"); Assert.AreEqual(image.Label, network.OutputLayer.Output.ArgMax()); Assert.IsTrue(epoc < maxEpoc); foreach (var img in images.Where(i => i.Label == image.Label)) { network.Evaluate(img.Values); Console.WriteLine($"{network.OutputLayer.Output.ArgMax()}"); } }
public void Initialize() { int i = 0; foreach (var item in MnistReader.Read(MnistReader.TrainImages, MnistReader.TrainLabels)) { Images.Add(item); if (i++ > 200) { break; } } Image = Images.First(); }
public static void ReadDataFromFile() { Console.WriteLine("Reading training data ... "); foreach (var data in MnistReader.ReadTrainingData()) { TrainData.Add(data); } Console.WriteLine("Reading test data ... "); foreach (var data in MnistReader.ReadTestData()) { TestData.Add(data); } }
private static void TestNetwork(FeedForwardNeuralNet net) { Console.WriteLine("Started Testing"); int testedImages = 0; int rightGuesses = 0; foreach (var image in MnistReader.ReadTestData()) { double[] input = Encode(image.Data); double[] output = net.Run(input); int result = output.ToList().IndexOf(output.Max()); if (result == image.Label) { rightGuesses++; } testedImages++; } Console.WriteLine($"{testedImages} has been tested. {rightGuesses} were correctly classified."); }
public void Train(double targetErrorRate = 0.05) { int count = 0; int sum = 0; int epochs = 0; double totalError = 1.0; while (totalError > targetErrorRate) { epochs++; _errors.Clear(); foreach (Image image in MnistReader.ReadTrainingData()) { count++; _inputLayer.SetNeurons(image.Data); SetNewDesiredValues(image.Label); FeedForward(); _errors.Add(CalculateError()); BackPropagate(); if (count == 1000) { sum += count; count = 0; Console.WriteLine("Images read: " + sum); } } totalError = CalculateTotalError(); Console.WriteLine("Current total error: " + totalError + "\tEpoch " + epochs); } Console.WriteLine("Finished training with total error of: " + totalError); }
void Init() { string trainingImages = "mnist/train-images.idx3-ubyte"; string trainingLabels = "mnist/train-labels.idx1-ubyte"; string testingImages = "mnist/t10k-images.idx3-ubyte"; string testingLabels = "mnist/t10k-labels.idx1-ubyte"; TrainingSet = MnistReader.LoadDataset(trainingImages, trainingLabels); TestingSet = MnistReader.LoadDataset(testingImages, testingLabels); Image imgSpecimen = TrainingSet.Data[0]; int inputSize = imgSpecimen.Size; Network = new NeuralNetwork( NetworkProperties.Default, inputSize, 16, 16, 10 ); Network.Initialize(); }
private static void MnistTest() { var model = new SequentialModel( new DenseLayer(784, 800), //new ActivationLayer(new Sigmoid(), 800), new ActivationLayer(new LeakyReLU(0.5d), 800), new DenseLayer(800, 10), new ActivationLayer(new Sigmoid(), 10) ); var mnistTrain = MnistReader.ReadTrainingData() .Select(mnistImage => { var image = new double[mnistImage.Width * mnistImage.Height]; for (int i = 0; i < mnistImage.Height; i++) { for (int j = 0; j < mnistImage.Width; j++) { image[(i * mnistImage.Width) + j] = mnistImage.Image[i, j] / 255d; } } var label = new double[10]; label[mnistImage.Label] = 1; return(image, label); }).ToArray(); var rng = new Random(); (double[] Input, double[] ExpectedOutput) dataSource() => mnistTrain[rng.Next(0, mnistTrain.Length - 1)]; //using var window = new RenderWindow(new VideoMode(600, 600), "Map"); //window.Clear(Color.Black); //var (exampleInput, exampleExpectedOutput) = dataSource(); //var img = new Image(28, 28); //for (uint i = 0; i < 28; i++) //{ // for (uint j = 0; j < 28; j++) // { // var brightness = (byte)(exampleInput[(j * 28) + i] * 255d); // img.SetPixel(i, j, new Color(brightness, brightness, brightness)); // } //} //window.Draw(new Sprite(new Texture(img)) { Scale = new Vector2f(600 / 28f, 600 / 28f) }); //window.Draw(new Text("Actual: " + Array.IndexOf(exampleExpectedOutput, 1), new Font(Resources.Arial))); //window.Display(); model.Train( dataSource: dataSource, epochs: 60000, batchSize: 1000, learningRate: 0.001d, errorFunction: new MeanSquareError(), callback: (i, error, metric) => Console.WriteLine("[" + i.ToString().PadLeft(5) + "] (" + (metric * 100d)?.ToString("0.00").PadLeft(6) + "%) Error: " + error), metric: (expected, actual) => { if (expected.Length == 0) { throw new ArgumentException("Expected array to be non-empty.", nameof(expected)); } if (expected.Length != actual.Length) { throw new ArgumentException("Expected array of same size as " + nameof(expected) + ".", nameof(actual)); } var maxExpected = 0; var maxActual = 0; for (int i = 1; i < expected.Length; i++) { if (expected[i] > expected[maxExpected]) { maxExpected = i; } if (actual[i] > actual[maxActual]) { maxActual = i; } } return(maxExpected == maxActual ? 1 : 0); }); }
/// <summary> /// Método que lee una determinada cantidad de imágenes (de dos clases) de un formato igual al dataset MNSIT, para prueba. /// </summary> /// <param name="filePathImgs">Ruta del archivo con las imágenes</param> /// <param name="filePathLbls">Ruta del archivo con las etiquetas</param> /// <param name="cats">Vector con las categorías de imágenes que serán leídas</param> /// <param name="maxImagesPerCat">Máximo de imágenes que serán leídas por cada categoría</param> /// <returns></returns> public static Data[] ImportData_MNIST_TestOneCat(string filePathImgs, string filePathLbls, int[] cats, int maxImagesPerCat) { Data[] D = MnistReader.ReadData_TestOneCat(filePathImgs, filePathLbls, cats, maxImagesPerCat); return(D); }