public void Connect(Neuron from, Neuron to, double weight) { Synapse s = new Synapse(); s.Axon = from; s.Dentrite = to; s.Weight = weight; synapses.Add(s); }
public void Evaluate() { foreach (Neuron outNeuron in outputs) { double value = 0.0; foreach (Neuron inNeuron in inputs) { Synapse s = GetSynapse(inNeuron, outNeuron); value += s.Weight * inNeuron.Value; } outNeuron.Value = value; } }
public void Train(TrainingData[] data) { // set weights random Random r = new Random(0); foreach (Synapse s in synapses) { s.Weight = r.NextDouble() * 2 - 1.0; // value between -1.0 and 1.0 } // minimize the error double learningRate = 0.01; double precision = 0.01; double lastError; double currentError = double.MaxValue; do { lastError = currentError; currentError = 0.0; foreach (Synapse s in synapses) { s.dW = 0.0; } // for each training point... foreach (TrainingData d in data) { // for each output neuron... for (int j = 0; j < outputs.Count; j++) { // calculate Yj from inputs and weights outputs[j].Value = 0.0; for (int i = 0; i < inputs.Count; i++) { Synapse s = GetSynapse(inputs[i], outputs[j]); outputs[j].Value += s.Weight * d.X[i]; } // determine error contribution from this output node and training point currentError += Math.Pow(d.T[j] - outputs[j].Value, 2.0); // determine weight gradient for each synapse for (int i = 0; i < inputs.Count; i++) { Synapse s = GetSynapse(inputs[i], outputs[j]); s.dW += (d.T[j] - outputs[j].Value) * d.X[i]; } } } // update error for number of training points currentError /= data.Length; // adjust weights foreach (Synapse s in synapses) { s.Weight += learningRate * s.dW; } }while (Math.Abs(currentError - lastError) > precision); }