static void Train(String[] words) { Layer[] layer = new Layer[] { new ConvolutionalLayer(3, 3, 3, 0, 1), new ConvolutionalLayer(3, 3, 3, 0, 1), //26 new Lib.Layers.Convolutional.LeakyReluLayer(), new ConvolutionalLayer(2, 2, 3, 0, 1), //24 new Lib.Layers.Convolutional.LeakyReluLayer(), new ConvolutionalLayer(2, 2, 3, 0, 1), //22 new Lib.Layers.Convolutional.LeakyReluLayer(), new FullyConnectedLayer(1728), new SoftmaxLayer(10) }; nn = new Lib.NeuralNetwork(Lib.NeuralNetwork.random, layer); if (nn == null) { Console.WriteLine("you must first initialize a Neural Network."); return; } TrainingData[] data = Loader.loadTestMNSIT(@"C:\Users\drumm\Desktop\MNIST"); Trainer trainer = new Trainer(data); Matrix.table(nn.feedforward(data[0].inputs)); Matrix.table(data[0].labels); trainer.Train(nn); Matrix.table(data[0].labels); Matrix.table(nn.feedforward(data[0].inputs)); Console.WriteLine("Training is completed succesfully!"); }
static void createNeuralNetwork(String[] words) { Random r = Lib.NeuralNetwork.random; try { if (Char.IsDigit(words[1].ToCharArray()[0])) { int[] nodes = new int[words.Length - 1]; for (int i = 0; i < nodes.Length; i++) { nodes[i] = int.Parse(words[i + 1]); } nn = new Lib.NeuralNetwork(r, nodes); } else { Layer[] layers = new Layer[(words.Length - 1) / 2]; for (int i = 1; i < words.Length; i += 2) { if (words[i].Equals("relu")) { int nodes = int.Parse(words[i + 1]); layers[(i - 1) / 2] = new ReluLayer(nodes); } else if (words[i].Equals("do")) { int nodes = int.Parse(words[i + 1]); layers[(i - 1) / 2] = new DropoutLayer(nodes); } else if (words[i].Equals("lrelu")) { int nodes = int.Parse(words[i + 1]); layers[(i - 1) / 2] = new Lib.Layers.LeakyReluLayer(nodes); } else { Console.WriteLine("There is no layer type like \"" + words[i] + "\""); return; } } for (int i = 0; i < layers.Length; i++) { if (i != layers.Length - 1) { layers[i].weights = new Matrix(layers[i + 1].nodes, layers[i].nodes); layers[i].weights.randomize(r); } layers[i].bias = new Matrix(layers[i].nodes, 1); layers[i].bias.randomize(r); } nn = new Lib.NeuralNetwork(Lib.NeuralNetwork.random, layers); } } catch (Exception e) { printError(e); return; } Console.WriteLine("Neural Network was created succesfully!"); }
public static NeuralNetwork loadNetwork(String file_name) { String text; String[] sections; Layer[] layers; using (StreamReader streamReader = new StreamReader("models\\" + file_name + ".network", Encoding.UTF8)) { text = streamReader.ReadToEnd(); sections = text.Split("#".ToCharArray()); } String[] values = sections[0].Split(";".ToCharArray()); layers = new Layer[values.Length]; for (int i = 0; i < sections.Length; i++) { if (i == 0) { for (int j = 0; j < layers.Length; j++) { String[] curText = values[j].Split(" ".ToCharArray()); if (curText[0] == "relu") { int layer_nodes = int.Parse(curText[1]); layers[j] = new ReluLayer(layer_nodes); } else if (curText[0] == "do") { int layer_nodes = int.Parse(curText[1]); layers[j] = new DropoutLayer(layer_nodes); } else if (curText[0] == "lrelu") { int layer_nodes = int.Parse(curText[1]); layers[j] = new LeakyReluLayer(layer_nodes); } if (j == layers.Length - 1) { layers[j].weights = new Matrix(1, 1); } } } else if (layers[i - 1].weights == null) { int index = i - 1; values = sections[i].Split(";".ToCharArray()); int rows = values.Length; int cols = values[0].Split(" ".ToCharArray()).Length; Matrix m = new Matrix(rows, cols); for (int j = 0; j < values.Length; j++) { String[] comp = values[j].Split(" ".ToCharArray()); for (int k = 0; k < comp.Length; k++) { m.data[j, k] = float.Parse(comp[k]); } } Matrix.table(m); layers[index].weights = m; } } NeuralNetwork nn = new NeuralNetwork(layers); return(nn); }