public Brain(Brain model) { Neurons = model.Neurons; Synapses = model.Synapses; InputCount = model.InputCount; Inputs = model.Inputs; Expected = model.Expected; }
public Configuration(Configuration template) { model = template.model; SIAs = new List<Tuple<double, double>>(); SIAs.AddRange(template.SIAs); Weights = new List<double>(); Weights.AddRange(template.Weights); }
public Configuration(Brain model, bool randomize = false) { this.model = model; SIAs = new List<Tuple<double, double>>(); Weights = new List<double>(); //assemble list of all neurons and then all synapses foreach (List<Neuron> nlist in this.model.Neurons.Values) foreach (Neuron n in nlist) if (randomize) SIAs.Add(new Tuple<double, double>(Neuron.GetRandomSlope(), Neuron.GetRandomAugment())); else SIAs.Add(new Tuple<double, double>(n.Slope, n.Augment)); foreach (List<Synapse> slist in this.model.Synapses.Values) foreach (Synapse s in slist) if (randomize) Weights.Add(Synapse.GetRandomWeight()); else Weights.Add(s.Weight); }
public static Bitmap GetSchema(Brain brain) { int width = 1024; int height = 768; Font font; int layernum = brain.Synapses.Count + 1; int neuronsize = (int)((height * 0.8) / layernum / 2); neuronsize = (neuronsize > height / 6) ? height / 6 : neuronsize; String synapsesstr = brain.GetSynapsesStr(); // count neurons in each layer List<int> neuroncount = new List<int>(); if (brain.Neurons != null) { neuroncount.Add(brain.InputCount); foreach (List<Neuron> list in brain.Neurons.Values) // first already complete neuroncount.Add(list.Count); } int max = neuroncount.Max(); // add missing synapses - should be fixed in Update()s ? /*for (int i = 0; i < layernum - 1; i++) // for each layer { //g.DrawLine(Pens.White, 0, ycoords[layernum - i - 1], width, ycoords[layernum - i - 1]); for (int j = 0; j < neuroncount[i]; j++) // for each neuron in layer { for (int k = 0; k < neuroncount[i + 1]; k++) { brain.layers[i].Neurons[k].AddMissingSynapse(j); } } }*/ // compute y-coords List<int> ycoords = new List<int>(); for (int i = 0; i < layernum; i++) ycoords.Add((int)(height / (layernum) * i + height / (layernum + 3))); if ((neuronsize + 2) * 1.5 * max > width) // neurons too big for width neuronsize = (int)(width / max / 1.8); font = new Font(new FontFamily("Arial"), (neuronsize / 4 > 0) ? neuronsize / 4 : 1, FontStyle.Bold, GraphicsUnit.Pixel); Bitmap b = new Bitmap(width, height); // prepare points Dictionary<String, Point> coords = new Dictionary<String, Point>(); for (int i = 0; i < layernum; i++) for (int j = 0; j < neuroncount[i]; j++) coords.Add(String.Format("{0}_{1}", i, j), // left margin, neuron spacing, from center new Point((int)(neuronsize/2 + j * neuronsize * 1.5 + (width / 2) - ((neuronsize - 1) * 1.5 / 2 * neuroncount[i])), ycoords[layernum - 1 - i])); // draw using (Graphics g = Graphics.FromImage(b)) { g.Clear(Color.Black); for (int i = 0; i < layernum; i++) // for each layer { //g.DrawLine(Pens.White, 0, ycoords[layernum - i - 1], width, ycoords[layernum - i - 1]); // draw synapses if (i != layernum - 1) { Pen p; foreach (Synapse s in brain.Synapses[i - 1]) { float weight = brain.maxweight == 0 ? 0 : (float)(s.Weight / brain.maxweight * neuronsize / 6); if (weight == 0) continue; if (weight > 0) p = new Pen(Brushes.White, weight); else p = new Pen(Brushes.Red, -weight); String key = String.Format("{0}_{1}", i, s.Source == null ? s.InputIndex : s.Source.Index); //Console.WriteLine(String.Format("{0} > {1}", key, String.Format("{0}_{1}", i + 1, s.Target.Index))); g.DrawLine(p, coords[key], coords[String.Format("{0}_{1}", i + 1, s.Target.Index)]); } } // draw the rest for (int j = 0; j < neuroncount[i]; j++) // for each neuron in layer { string key = String.Format("{0}_{1}", i, j); string caption; if (i == 0) //input caption = string.Format("i{0}", j); else caption = String.Format("n{0}_{1}", i - 1, j); // draw circles float neuronthickness = brain.maxaugment == 0 || i==0 ? 1 : Math.Abs((float)(brain.Neurons[i - 1][j].Augment/ brain.maxaugment * neuronsize / 12)); neuronthickness *=1.5f; //neuronthickness += 2; Pen neuronpen = new Pen((i == 0 || brain.Neurons[i - 1][j].Augment >= 0) ? Brushes.White : Brushes.Red, neuronthickness); //g.FillEllipse((i==0 || brain.Neurons[i-1][j].Augment>=0)?Brushes.White:Brushes.Red, coords[key].X - neuronsize / 2, coords[key].Y - neuronsize / 2, neuronsize, neuronsize); //g.FillEllipse(Brushes.Black, coords[key].X - neuronsize / 2+neuronthickness/2, coords[key].Y - neuronsize / 2+neuronthickness/2, neuronsize- neuronthickness, neuronsize- neuronthickness); g.FillEllipse(Brushes.Black, coords[key].X - neuronsize / 2, coords[key].Y - neuronsize / 2, neuronsize, neuronsize); g.DrawEllipse(neuronpen, coords[key].X - neuronsize / 2, coords[key].Y - neuronsize / 2, neuronsize, neuronsize); // draw strings SizeF stringsize = g.MeasureString(caption, font); g.DrawString(caption, font, Brushes.White, coords[key].X - stringsize.Width / 2, coords[key].Y - stringsize.Height / 2); } } } return b; }
private void btnNeuralRun_Click(object sender, EventArgs e) { ges = new Dictionary<int, double>(); // Global Errors barNeuralMatch.Value = 0; if (txtNeuralInput.Lines.Count((s) => s.Trim() != "") != txtNeuralExpected.Lines.Count((s) => s.Trim() != "")) return; // update NN Update(); bool shouldloop = true; //do the magic for (int i = 0; i < (int)numNeuralEpoch.Value; i++) { switch (cmbNeuralAlgorithm.Text) { case "Activate": // basic computation { brain.Think(); shouldloop = false; // no reason to repeat break; } case "SOMA": // basic computation { List<Neural.Configuration> population = new List<Neural.Configuration>(); for (int j = 0; j < 50; j++) population.Add(new Neural.Configuration(brain, true)); // also use actual configuration population.Add(new Neural.Configuration(brain)); population = new SOMA().Run(population); //use the best one population.Sort((x, y) => x.GE.CompareTo(y.GE)); brain.UpdateConfiguration(population[0]); brain.Think(); break; } case "Fixed Increments": { brain.Think(NeuralNetworkAlgorithm.FixedIncrement); brain.Think(); // check again (no adaptation) to update values break; } case "Back Propagation": { brain = brain.Think(NeuralNetworkAlgorithm.BackPropagation); brain.Think(); // to compute final outputs break; } default: { txtLog.AppendText(String.Format("'{0}' algorithm is not implemented.\r\n", cmbNeuralAlgorithm.Text)); shouldloop = false; break; } } barNeuralProgress.Value = (i + 1) * 10000 / (int)numNeuralEpoch.Value; txtNeuralOutput.Text = brain.GetDataStr(brain.Outputs); barNeuralMatch.Value = (int)(brain.ComputeMatch() * 100); UpdateNeuronDataGrid(); txtNeuralSynapses.Text = brain.GetSynapsesStr(); UpdateChartLSP(); ges.Add(i + 1, brain.GetGlobalError()); UpdateChartStatus(); InvalidateAll(); if (!shouldloop || brain.ComputeMatch() == 1) { double ge = brain.GetGlobalError(); for (int j = i + 1; j < numNeuralEpoch.Value; j++) ges.Add(j + 1, ge); barNeuralProgress.Value = barNeuralProgress.Maximum; UpdateChartStatus(); break; } if (i != numNeuralEpoch.Value - 1) { int sleeptime = (numNeuralEpoch.Value > 10) ? (int)(1000 / numNeuralEpoch.Value) : 200; if (sleeptime > 16) Thread.Sleep(sleeptime); } } txtLog.AppendText(String.Format("Finished with Global Error of {0}.\n", brain.GetGlobalError())); }
private void btnNeuralLoadConfiguration_Click(object sender, EventArgs e) { openFileDialog1.FileName = "network.ann"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { IFormatter formatter = new BinaryFormatter(); Stream stream = new FileStream(openFileDialog1.FileName, FileMode.Open, FileAccess.Read, FileShare.Read); brain = (Brain)formatter.Deserialize(stream); stream.Close(); //show actual data txtNeuralInput.Text = brain.GetDataStr(brain.Inputs); txtNeuralExpected.Text = brain.GetDataStr(brain.Expected); gridNeuralLayers.Rows.Clear(); for (int i = 0; i < brain.Neurons.Count; i++) { AddNewLayer(brain.Neurons[i].Count, functionlist.FirstOrDefault(x => x.Value.ToString() == brain.Neurons[i][0].f.ToString()).Key); } barNeuralMatch.Value = 0; UpdateNeuronDataGrid(); txtNeuralSynapses.Text = brain.GetSynapsesStr(); } }
public Brain Think(NeuralNetworkAlgorithm algo = NeuralNetworkAlgorithm.None) { if (algo != NeuralNetworkAlgorithm.BackPropagation) { if (Inputs == null) return this; Outputs = new List<List<double>>(); for (int inputcounter = 0; inputcounter < Inputs.Count; inputcounter++) // for every input set { ThinkOnce(inputcounter, algo); } return this; } else // back propagation { // http://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/ double eta = 0.2; //Outputs = new List<List<double>>(); Brain newbrain = new Brain(this); for (int inputcounter = 0; inputcounter < Inputs.Count; inputcounter++) // for every input set { ThinkOnce(inputcounter); // compute neuron errors - from the top, not for input neurons... for (int i = newbrain.Neurons.Count - 1; i >= 0; i--) { for (int j = 0; j < newbrain.Neurons[i].Count; j++) { Neuron n = newbrain.Neurons[i][j]; if (i == newbrain.Neurons.Count - 1) n.Error = Outputs[inputcounter][j] - Expected[inputcounter][j]; else { n.Error = 0; foreach (Synapse s in newbrain.Synapses[i]) { if (s.Source != n) continue; n.Error += s.Weight * s.Target.Error * n.f.ComputeDerivation(n.Input, n.Slope); } } // and compute augments double augdiff = eta * n.Error; n.Augment -= augdiff; } } // now update the weights foreach (int i in newbrain.Synapses.Keys) { for (int j = 0; j < newbrain.Synapses[i].Count; j++) { Synapse s = newbrain.Synapses[i][j]; double weidiff = eta * s.Target.Error * s.LastInput; weidiff+= s.LastDiff; // momentum-aware s.LastDiff = weidiff; s.Weight -= weidiff; } } } return newbrain; } }