private void btnNeuralRun_Click(object sender, EventArgs e) { ges = new Dictionary<int, double>(); // Global Errors barNeuralMatch.Value = 0; if (txtNeuralInput.Lines.Count((s) => s.Trim() != "") != txtNeuralExpected.Lines.Count((s) => s.Trim() != "")) return; // update NN Update(); bool shouldloop = true; //do the magic for (int i = 0; i < (int)numNeuralEpoch.Value; i++) { switch (cmbNeuralAlgorithm.Text) { case "Activate": // basic computation { brain.Think(); shouldloop = false; // no reason to repeat break; } case "SOMA": // basic computation { List<Neural.Configuration> population = new List<Neural.Configuration>(); for (int j = 0; j < 50; j++) population.Add(new Neural.Configuration(brain, true)); // also use actual configuration population.Add(new Neural.Configuration(brain)); population = new SOMA().Run(population); //use the best one population.Sort((x, y) => x.GE.CompareTo(y.GE)); brain.UpdateConfiguration(population[0]); brain.Think(); break; } case "Fixed Increments": { brain.Think(NeuralNetworkAlgorithm.FixedIncrement); brain.Think(); // check again (no adaptation) to update values break; } case "Back Propagation": { brain = brain.Think(NeuralNetworkAlgorithm.BackPropagation); brain.Think(); // to compute final outputs break; } default: { txtLog.AppendText(String.Format("'{0}' algorithm is not implemented.\r\n", cmbNeuralAlgorithm.Text)); shouldloop = false; break; } } barNeuralProgress.Value = (i + 1) * 10000 / (int)numNeuralEpoch.Value; txtNeuralOutput.Text = brain.GetDataStr(brain.Outputs); barNeuralMatch.Value = (int)(brain.ComputeMatch() * 100); UpdateNeuronDataGrid(); txtNeuralSynapses.Text = brain.GetSynapsesStr(); UpdateChartLSP(); ges.Add(i + 1, brain.GetGlobalError()); UpdateChartStatus(); InvalidateAll(); if (!shouldloop || brain.ComputeMatch() == 1) { double ge = brain.GetGlobalError(); for (int j = i + 1; j < numNeuralEpoch.Value; j++) ges.Add(j + 1, ge); barNeuralProgress.Value = barNeuralProgress.Maximum; UpdateChartStatus(); break; } if (i != numNeuralEpoch.Value - 1) { int sleeptime = (numNeuralEpoch.Value > 10) ? (int)(1000 / numNeuralEpoch.Value) : 200; if (sleeptime > 16) Thread.Sleep(sleeptime); } } txtLog.AppendText(String.Format("Finished with Global Error of {0}.\n", brain.GetGlobalError())); }
private void btnNeuralLoadConfiguration_Click(object sender, EventArgs e) { openFileDialog1.FileName = "network.ann"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { IFormatter formatter = new BinaryFormatter(); Stream stream = new FileStream(openFileDialog1.FileName, FileMode.Open, FileAccess.Read, FileShare.Read); brain = (Brain)formatter.Deserialize(stream); stream.Close(); //show actual data txtNeuralInput.Text = brain.GetDataStr(brain.Inputs); txtNeuralExpected.Text = brain.GetDataStr(brain.Expected); gridNeuralLayers.Rows.Clear(); for (int i = 0; i < brain.Neurons.Count; i++) { AddNewLayer(brain.Neurons[i].Count, functionlist.FirstOrDefault(x => x.Value.ToString() == brain.Neurons[i][0].f.ToString()).Key); } barNeuralMatch.Value = 0; UpdateNeuronDataGrid(); txtNeuralSynapses.Text = brain.GetSynapsesStr(); } }