partial void test_ann_neuron_hiddenPercpetronToolStripMenuItem_Click(object sender, EventArgs e) { string log = ""; int epochs = 1; int nofS = 2; Synapse s; double momentum = 0.5, learningRate = 0.08; string c = ""; c += "activation=tanh(a=1.7159, b=0.6667); fieldsize=2"; HiddenPerceptron[] h = new HiddenPerceptron[2]; OutputPerceptron o = new OutputPerceptron(); o.Configure(c); // instantiate with configuration string h[0] = new HiddenPerceptron(); h[0].Configure(c); h[1] = new HiddenPerceptron(); h[1].Configure(c); Node[] src0 = new Node[nofS]; for (int i = 0; i < nofS; i++) { src0[i] = new Node(new double?[] { -1.0 + (i * 2.0), null }); } // initialize inputs for (int i = 0; i < nofS; i++) { h[0].Source = src0[i]; h[1].Source = src0[i]; } // initialize inputs for output node for (int i = 0; i < nofS; i++) { o.Source = h[i].Output; } // initialize weights for (int i = 0; i <= nofS; i++) { h[0].Synapse[i].W = (Math.Daemon.Random.Next(6) + 1) * 0.35; h[0].Synapse[i].dW = 0.0; h[1].Synapse[i].W = (Math.Daemon.Random.Next(6) + 1) * 0.50; h[1].Synapse[i].dW = 0.0; o.Synapse[i].W = (Math.Daemon.Random.Next(6) + 1) * 0.20; o.Synapse[i].dW = 0.0; } double[][] trainingSet = new double[4][]; trainingSet[0] = new double[] { 0.0, 0.0, 0.0 }; trainingSet[1] = new double[] { 0.0, 1.0, 1.0 }; trainingSet[2] = new double[] { 1.0, 0.0, 1.0 }; trainingSet[3] = new double[] { 1.0, 1.0, 0.0 }; for (int i = 0; i < epochs; i++) { log += "\n"; for (int j = 0; j < trainingSet.Length; j++) { ((double?[])src0[0].Element)[Global.Sig] = trainingSet[j][0]; ((double?[])src0[1].Element)[Global.Sig] = trainingSet[j][1]; // propagate signal log += "\n\nPROPAGATE SIGNAL...///////////////////////////////////////"; h[0].Next(Propagate.Signal); h[1].Next(Propagate.Signal); o.Next(Propagate.Signal); log += "\n\nepoch[" + i.ToString("00") + "]"; log += "\n\n" + h[0].ToString(); log += "\n" + h[1].ToString(); log += "\n" + o.ToString(); // set target output value log += "\n\nSET TARGET...////////////////////////////////////////////"; ((double?[])h[0].Output.Element)[Global.Err] = 0.0; ((double?[])h[1].Output.Element)[Global.Err] = 0.0; ((double?[])o.Output.Element)[Global.Err] = trainingSet[j][2]; log += "\n\n" + h[0].ToString(); log += "\n" + h[1].ToString(); log += "\n" + o.ToString(); // propagate error log += "\n\nPROPAGATE ERROR...///////////////////////////////////////"; o.Next(Propagate.Error); h[0].Next(Propagate.Error); h[1].Next(Propagate.Error); log += "\n\n" + h[0].ToString(); log += "\n" + h[1].ToString(); log += "\n" + o.ToString(); // adjust weight log += "\n\nADJUST WEIGHTS...///////////////////////////////////////"; for (int k = 0; k < o.Synapse.Count; k++) { s = o.Synapse[k]; s.dW = (momentum * s.dW) + (learningRate * o.Gradient * ((double?[])s.Source.Element)[Global.Sig].Value); s.W += s.dW; } for (int k = 0; k < h[0].Synapse.Count; k++) { s = h[0].Synapse[k]; s.dW = (momentum * s.dW) + (learningRate * h[0].Gradient * ((double?[])s.Source.Element)[Global.Sig].Value); s.W += s.dW; } for (int k = 0; k < h[1].Synapse.Count; k++) { s = h[1].Synapse[k]; s.dW = (momentum * s.dW) + (learningRate * h[1].Gradient * ((double?[])s.Source.Element)[Global.Sig].Value); s.W += s.dW; } log += "\n\n" + h[0].ToString(); log += "\n" + h[1].ToString(); log += "\n" + o.ToString(); } } richTextBox.Text = log; }
/// <summary> /// tests output perceptron /// </summary> /// <param name="sender"></param> /// <param name="e"></param> partial void test_ann_neuron_outputPerceptronToolStripMenuItem_Click(object sender, EventArgs e) { string log = ""; int epochs = 1, nofS = 2; string c = "act=tanh(a=1.7159, b=0.6667); outputfieldsize=2"; Synapse s; double momentum = 0.5, learningRate = 0.08; OutputPerceptron o = new OutputPerceptron(); o.Configure(c); ((double?[])o.Output.Element)[Global.Err] = 1.0; Node[] source = new Node[nofS]; for (int i = 0; i < nofS; i++) { source[i] = new Node(new double?[] { 1.0, null }); } for (int i = 0; i < nofS; i++) { o.Source = source[i]; } for (int i = 0; i <= nofS; i++) { o.Synapse[i].W = (Math.Daemon.Random.Next(6) + 1) * 0.1; o.Synapse[i].dW = 0.0; } double[][] trainingSet = new double[4][]; trainingSet[0] = new double[] { 0.0, 0.0, 0.0 }; trainingSet[1] = new double[] { 0.0, 1.0, 1.0 }; trainingSet[2] = new double[] { 1.0, 0.0, 1.0 }; trainingSet[3] = new double[] { 1.0, 1.0, 1.0 }; for (int i = 0; i < epochs; i++) { log += "\n"; for (int j = 0; j < trainingSet.Length; j++) { ((double?[])source[0].Element)[Global.Sig] = trainingSet[j][0]; ((double?[])source[1].Element)[Global.Sig] = trainingSet[j][1]; // propagate signal o.Next(Propagate.Signal); log += "\n\nepoch[" + i.ToString("00") + "]"; log += "\n\npropagate signal..."; log += "\n" + o.ToString(); // set target output value ((double?[])o.Output.Element)[Global.Err] = trainingSet[j][2]; log += "\n\nSet target..."; log += "\n" + o.ToString(); // propagate error o.Next(Propagate.Error); log += "\n\npropagate error..."; log += "\n" + o.ToString(); // adjust weight for (int k = 0; k < o.Synapse.Count; k++) { s = o.Synapse[k]; s.dW = (momentum * s.dW) + (learningRate * o.Gradient * ((double?[])s.Source.Element)[Global.Sig].Value); s.W += s.dW; } log += "\n\nadjust weights..."; log += "\n" + o.ToString(); } } richTextBox.Text = log; }