// loading the network from the computer memory. public void Load(neuralnetwork data) { for (int i = 0; i < entryLevel.Length; i++) { for (int z = 0; z < entryLevel[i].Weights.Length; z++) { entryLevel[i].Weights[z] = data.entryLevel[i].Weights[z]; } entryLevel[i].bias = data.entryLevel[i].bias; } for (int i = 0; i < numOfneuronsRows; i++) { for (int j = 0; j < length; j++) { for (int z = 0; z < neurons[i, j].Weights.Length; z++) { neurons[i, j].Weights[z] = data.neurons[i, j].Weights[z]; } neurons[i, j].bias = data.neurons[i, j].bias; } } for (int i = 0; i < outputLevel.Length; i++) { for (int z = 0; z < outputLevel[i].Weights.Length; z++) { outputLevel[i].Weights[z] = data.outputLevel[i].Weights[z]; } outputLevel[i].bias = data.outputLevel[i].bias; } }
// saving the network to the computer memory. public void Save(string x) { BinaryFormatter bf = new BinaryFormatter(); int y = 0; FileStream file; if (File.Exists(x + length + "A" + (numOfneuronsRows) + "A" + y)) { while (File.Exists(x + length + "A" + (numOfneuronsRows) + "A" + y)) { y++; } file = File.Create(x + length + "A" + (numOfneuronsRows) + "A" + y); } else { file = File.Create(x + length + "A" + (numOfneuronsRows) + "A" + 0); } // "c:\\4inRow\\_neuralnetwork.dat" neuralnetwork data = new neuralnetwork(numOfneuronsRows, length, entryLevel.Length, outputLength); for (int i = 0; i < entryLevel.Length; i++) { for (int z = 0; z < entryLevel[i].Weights.Length; z++) { data.entryLevel[i].Weights[z] = entryLevel[i].Weights[z]; } data.entryLevel[i].bias = entryLevel[i].bias; } for (int i = 0; i < numOfneuronsRows; i++) { for (int j = 0; j < length; j++) { for (int z = 0; z < neurons[i, j].Weights.Length; z++) { data.neurons[i, j].Weights[z] = neurons[i, j].Weights[z]; } data.neurons[i, j].bias = neurons[i, j].bias; } } for (int i = 0; i < outputLevel.Length; i++) { for (int z = 0; z < outputLevel[i].Weights.Length; z++) { data.outputLevel[i].Weights[z] = outputLevel[i].Weights[z]; } data.outputLevel[i].bias = outputLevel[i].bias; } data.score = score; bf.Serialize(file, data); file.Close(); }
// loading a network from a given network (duplicating the given network into this network). public void Load(int fileNum, string x) { if (File.Exists(x + length + "A" + (numOfneuronsRows) + "A" + fileNum)) { BinaryFormatter bf = new BinaryFormatter(); FileStream file = File.Open(x + length + "A" + (numOfneuronsRows) + "A" + fileNum, FileMode.Open); neuralnetwork data = (neuralnetwork)bf.Deserialize(file); file.Close(); for (int i = 0; i < entryLevel.Length; i++) { for (int z = 0; z < entryLevel[i].Weights.Length; z++) { entryLevel[i].Weights[z] = data.entryLevel[i].Weights[z]; } entryLevel[i].bias = data.entryLevel[i].bias; } for (int i = 0; i < numOfneuronsRows; i++) { for (int j = 0; j < length; j++) { for (int z = 0; z < neurons[i, j].Weights.Length; z++) { neurons[i, j].Weights[z] = data.neurons[i, j].Weights[z]; } neurons[i, j].bias = data.neurons[i, j].bias; } } for (int i = 0; i < outputLevel.Length; i++) { for (int z = 0; z < outputLevel[i].Weights.Length; z++) { outputLevel[i].Weights[z] = data.outputLevel[i].Weights[z]; } outputLevel[i].bias = data.outputLevel[i].bias; } } }
static void Main(string[] args) // Examination of the back propagation function by teaching the neural network to do xor. One output would be XOR of the two inputs, the second would be NOT XOR, the third AND the the fourth OR. { neuralnetwork asd = new neuralnetwork(2, 4, 2, 4); double[] output = new double[2]; // the input values double[] x1 = { 0, 0 }; double[] x2 = { 0, 1 }; double[] x3 = { 1, 0 }; double[] x4 = { 1, 1 }; int epoch = 0; Retry: epoch++; for (int i = 0; i < 4; i++) { // 1) forward propagation (calculates output) double[] results = new double[4]; // desired results if (i == 0) { output = asd.run(x1); results[0] = (int)x1[0] ^ (int)x1[1]; results[1] = 1 - ((int)x1[0] ^ (int)x1[1]); results[2] = (int)x1[0] & (int)x1[1]; results[3] = (int)x1[0] | (int)x1[1]; asd.bp(results); asd.activateAdjustments(); } if (i == 1) { output = asd.run(x2); results[0] = (int)x2[0] ^ (int)x2[1]; results[1] = 1 - ((int)x2[0] ^ (int)x2[1]); results[2] = (int)x2[0] & (int)x2[1]; results[3] = (int)x2[0] | (int)x2[1]; asd.bp(results); asd.activateAdjustments(); } if (i == 2) { output = asd.run(x3); results[0] = (int)x3[0] ^ (int)x3[1]; results[1] = 1 - ((int)x3[0] ^ (int)x3[1]); results[2] = (int)x3[0] & (int)x3[1]; results[3] = (int)x3[0] | (int)x3[1]; asd.bp(results); asd.activateAdjustments(); } if (i == 3) { output = asd.run(x4); //Console.WriteLine("{0} xor {1} = {2} ------- {3}", x4[0], x4[1], output[0], output[1]); results[0] = (int)x4[0] ^ (int)x4[1]; results[1] = 1 - ((int)x4[0] ^ (int)x4[1]); results[2] = (int)x4[0] & (int)x4[1]; results[3] = (int)x4[0] | (int)x4[1]; asd.bp(results); asd.activateAdjustments(); } } if (epoch < 50000) { goto Retry; } //Console.WriteLine("asdadasdasdasdasdasdasdasdasd"); for (int i = 0; i < 4; i++) // very important, do NOT train for only one example { // 1) forward propagation (calculates output) double[] qwe = new double[1]; if (i == 0) { output = asd.run(x1); Console.WriteLine("{0} xor {1} = {2} ------- {0} not xor {1} = {3} ------- {0} and {1} = {4} ------- {0} or {1} = {5}", x1[0], x1[1], output[0], output[1], output[2], output[3]); } if (i == 1) { output = asd.run(x2); Console.WriteLine("{0} xor {1} = {2} ------- {0} not xor {1} = {3} ------- {0} and {1} = {4} ------- {0} or {1} = {5}", x2[0], x2[1], output[0], output[1], output[2], output[3]); } if (i == 2) { output = asd.run(x3); Console.WriteLine("{0} xor {1} = {2} ------- {0} not xor {1} = {3} ------- {0} and {1} = {4} ------- {0} or {1} = {5}", x3[0], x3[1], output[0], output[1], output[2], output[3]); } if (i == 3) { output = asd.run(x4); Console.WriteLine("{0} xor {1} = {2} ------- {0} not xor {1} = {3} ------- {0} and {1} = {3} ------- {0} or {1} = {3}", x4[0], x4[1], output[0], output[1], output[2], output[3]); } } Console.ReadLine(); }