// ====== METHODS ================== //given two indiv A & B, this method will choose a random layer and a random neuron position in that layer and then do a crossover //by cuting A and B at this pos and mixing them together, and it will return a list containing the 2 mixed indivs public List<indiv> crossover(indiv A, indiv B) { int layer_num_Crossingover = rand.Next(1, A.getNbLayer()-1);//in the first layer all weigths are 1 and can't change so no need to crossover at this layer int Neuron_num_Crossingover = rand.Next(0, A.getNbNeuronAtLayer(layer_num_Crossingover)-1); List<Nn.Neuron> layer_CrossingoverA = new List<Nn.Neuron>(A.getLayer(layer_num_Crossingover).GetRange(0, Neuron_num_Crossingover)); layer_CrossingoverA.AddRange(B.getLayer(layer_num_Crossingover).GetRange(Neuron_num_Crossingover, B.getNbNeuronAtLayer(layer_num_Crossingover) - Neuron_num_Crossingover)); List<Nn.Neuron> layer_CrossingoverB = new List<Nn.Neuron>(B.getLayer(layer_num_Crossingover).GetRange(0, Neuron_num_Crossingover)); layer_CrossingoverB.AddRange(A.getLayer(layer_num_Crossingover).GetRange(Neuron_num_Crossingover, A.getNbNeuronAtLayer(layer_num_Crossingover) - Neuron_num_Crossingover)); List<List<Nn.Neuron>> NnA2 = new List<List<Nn.Neuron>>(A.getLayersRange(0, layer_num_Crossingover)); NnA2.Add(layer_CrossingoverA); NnA2.AddRange(B.getLayersRange(layer_num_Crossingover+1, B.getNbLayer())); List<List<Nn.Neuron>> NnB2 = new List<List<Nn.Neuron>>(B.getLayersRange(0, layer_num_Crossingover)); NnB2.Add(layer_CrossingoverB); NnB2.AddRange(A.getLayersRange(layer_num_Crossingover+1, A.getNbLayer())); indiv A2 = new indiv(NnA2); indiv B2 = new indiv(NnB2); List<indiv> newIndivs = new List<indiv>(); newIndivs.Add(A2); newIndivs.Add(B2); return newIndivs; }
//this method will choose a random weigth of a random neuron of a random layer and modify it by a random number between -0.1 and 0.1 public indiv mutate(indiv A) { int layer_num_mutation = rand.Next(1, A.getNbLayer());//weights of the first layer must stay 1 int Neuron_num_mutation = rand.Next(0, A.getNbNeuronAtLayer(layer_num_mutation)); int weigth_num_mutation = rand.Next(0, A.getNeuron(layer_num_mutation, Neuron_num_mutation).getNbWeigths()); double mutation = (rand.NextDouble()*2-1); //mutation between -0.1 and 0.1 indiv mutator = new indiv(A, -1); double newWeigth = A.getNeuron(layer_num_mutation, Neuron_num_mutation).getWeigth(weigth_num_mutation) + mutation; if (newWeigth > 1) newWeigth = 1; else if (newWeigth < -1) newWeigth = -1; mutator.setDataAtPos(layer_num_mutation, Neuron_num_mutation, weigth_num_mutation, newWeigth); return mutator; }
public void Init(indiv saved_neuroNet, GameObject mySelf, bool color = false) { neuroNet = saved_neuroNet; this.mySelf = mySelf; this.color = color; //Error gestion if(saved_neuroNet.getNbNeuronAtLayer(0) != 18) { Debug.Log("Initialized with a Neural network with wrong first layer size : "+ saved_neuroNet.getNbNeuronAtLayer(0)); throw new System.Exception(); } isInit = true; }