// Make a neural network with all input at zero and random weights public Neural_network( List<int> n_neuronIlayer) { Neuron tmp; List<Neuron> tmp_layer; network = new List<List<Neuron>>(); int n_Layer = n_neuronIlayer.Count; rand = new System.Random(); for (int i = 0; i < n_Layer; i++) { tmp_layer = new List<Neuron>(); for (int j = 0; j < n_neuronIlayer[i]; j++) { if (i == 0) // special treatment for first layer, that take input of the network { // for this case the weight is List<double> n = new List<double>(); n.Add(1); // a List of one element valued at 1 tmp = new Neuron(n, rand, false); //tmp = new Neuron(n,0); // and the biais is set to 0 } else { tmp = new Neuron(n_neuronIlayer[i - 1], rand); // fill Input with n_neuronIlayer[i - 1] random values } tmp_layer.Add(tmp); } this.network.Add(tmp_layer); } output = new List<bool>(); }
// If specific weight are needed use this constructor public Neural_network(List<List<List<double>>> network_weight) { Neuron tmp; List<Neuron> tmp_layer; List<double> tmp_weight; network = new List<List<Neuron>>(); rand = new System.Random(); for (int i = 0; i < network_weight.Count; i++) { tmp_layer = new List<Neuron>(); for (int j = 0; j < network_weight[i].Count; j++) { tmp_weight = new List<double>(network_weight[i][j]); tmp = new Neuron(tmp_weight, rand); tmp_layer.Add(tmp); } this.network.Add(tmp_layer); } output = new List<bool>(); }
public void addNeuron(int layer_num, Neuron neur) { network[layer_num].Add(neur); }
public Synapse(Neuron destination, double weight) { Destination = destination; Weight = weight; }
public void Activate(double value, Neuron from) { InputValues[from] = value; Outputs.ForEach(o => o.Destination.Activate(ActivatedValue * o.Weight, this)); }
public Neuron Connect(Neuron next, double weight) { Outputs.Add(new Synapse(next, weight)); return(this); }
public Neuron(Neuron N) { inputs = new List<Input>(N.inputs); fire_val = N.fire_val; }