private void newLetterToolStripMenuItem_Click(object sender, EventArgs e) { int[] layers = new int[3] { 784, 370, 37 }; LetterNN.init(out letterNN, layers); pictureBoxAI.Image = Properties.Resources.ai; digit = true; }
private void loadLetterToolStripMenuItem_Click(object sender, EventArgs e) { double[][][] loadedWeights; double[][] loadedBiases; int[] header; int layerLength = 0; int wbOffset = 0; string file = "NULL"; DialogResult result = openFileDialog.ShowDialog(); if (result == DialogResult.OK) // Test result. { file = openFileDialog.FileName; byte[] fileByte = File.ReadAllBytes(file); layerLength = fileByte[0]; header = new int[layerLength]; wbOffset += INT_SIZE; for (int i = 0; i < layerLength; i++) { header[i] = BitConverter.ToInt32(fileByte, INT_SIZE * i + INT_SIZE); wbOffset += INT_SIZE; } //header length: 4, 784 100 100 10 loadedWeights = new double[layerLength - 1][][]; for (int i = 0; i < layerLength - 1; i++) { loadedWeights[i] = new double[header[i + 1]][]; for (int j = 0; j < header[i + 1]; j++) { loadedWeights[i][j] = new double[header[i]]; Buffer.BlockCopy(fileByte, wbOffset, loadedWeights[i][j], 0, loadedWeights[i][j].Length * DOUBLE_SIZE); wbOffset += loadedWeights[i][j].Length * DOUBLE_SIZE; } } loadedBiases = new double[layerLength - 1][]; for (int i = 0; i < layerLength - 1; i++) { loadedBiases[i] = new double[header[i + 1]]; Buffer.BlockCopy(fileByte, wbOffset, loadedBiases[i], 0, loadedBiases[i].Length * DOUBLE_SIZE); wbOffset += loadedBiases[i].Length * DOUBLE_SIZE; } letterNN = new NeuralNetwork <char>(header); LetterNN.init(out letterNN, header); letterNN.testWB(ref loadedWeights, ref loadedBiases); pictureBoxAI.Image = Properties.Resources.ai; digit = true; } }