public Network Crossover(Network partner, bool ignoreStrength = false) { var maxCreationGeneration = (this.CreationGeneration > partner.CreationGeneration) ? this.CreationGeneration : partner.CreationGeneration; var offspring = new Network(this.config.Clone(), NetworkOrigins.CROSSOVER, maxCreationGeneration + 1) { AncestralGeneration = this.AncestralGeneration }; for (var i = 0; i < this.Axions.Length; i++) { for (var j = 0; j < this.Axions[i].Length; j++) { var selfAxion = this.Axions[i][j]; var partnerAxion = partner.Axions[i][j]; var offspringAxion = offspring.Axions[i][j]; var strongestWeight = (this.Strength > partner.Strength) ? selfAxion.Weight : partnerAxion.Weight; var weakestWeight = (this.Strength < partner.Strength) ? selfAxion.Weight : partnerAxion.Weight; if (ignoreStrength) { strongestWeight = selfAxion.Weight; weakestWeight = partnerAxion.Weight; } var chance = (NEMath.Random() * this.config.CrossoverBias + 0.50); chance = (ignoreStrength) ? 0.5 : chance; offspringAxion.Weight = (NEMath.Random() < chance) ? strongestWeight : weakestWeight; } } return(offspring); }
public void Mutate() { var cycles = Math.Round(NEMath.RandomBetween(1, this.config.MaxMutationCycles, this.config.MutationCycleBias), 0); while (cycles > 0) { var chance = NEMath.Random(); if (chance < this.config.AxionMutationRate) { var layer = this.Axions[NEMath.RNG.Next(0, this.Axions.Length)]; var axion = layer[NEMath.RNG.Next(0, layer.Length)]; if (chance < this.config.AxionReplacementRate) { axion.Weight = NEMath.Clamp(NEMath.RandomBetween(-1, 1), -1.0, 1.0); } else { axion.Weight = NEMath.Clamp(axion.Weight + NEMath.RandomBetween(-0.1, 0.1), -1.0, 1.0); } } cycles -= 1; } }
/// <summary> /// <para>Brings together all networks, and forces the creation of a new diversified pool of networks.</para> /// <para>Existing clusters will be cleared, and all existing networks (except the strongest) purged.</para> /// /// <para>The new pool of networks will initially be archived, and will need to be assessed and then clustered.</para> /// /// <seealso cref="Cluster"/> /// </summary> public void Merge() { var existingNetworks = this.GetAllNetworks().OrderByDescending(o => o.Strength).ToList(); this.Clusters.Clear(); this.NetworkArchive.Clear(); if (existingNetworks.Count < 1) { return; } var maxNetworks = this.ClusterConfig.MaxNetworks * this.SuperclusterConfig.MaxClusters; var maxClones = Math.Round(maxNetworks * this.ClusterConfig.CloneRatio, 0); // Prime population with clones of the strongest mutation. // One perfect clone, two imperfect clones, and two heavily mutated clones. this.NetworkArchive.Add(existingNetworks[0].Clone(true)); this.NetworkArchive.Add(existingNetworks[0].Clone()); this.NetworkArchive.Add(existingNetworks[0].Clone()); this.NetworkArchive.Add(existingNetworks[0].Clone()); this.NetworkArchive.Add(existingNetworks[0].Clone()); this.NetworkArchive[3].HeavilyMutuate(0.25); this.NetworkArchive[4].HeavilyMutuate(0.50); // Fill the clone quota, strongly biased towards stronger networks. while (this.NetworkArchive.Count < maxClones) { var biasedIndex = (int)Math.Round(NEMath.RandomBetween(0, existingNetworks.Count - 1, 5.0), 0); this.NetworkArchive.Add(existingNetworks[biasedIndex].Clone()); } // Fill the remaining quota via crossover, moderately biased towards strong networks. while (this.NetworkArchive.Count < maxNetworks) { var biasedIndexA = (int)Math.Round(NEMath.RandomBetween(0, existingNetworks.Count - 1, 2.5), 0); var biasedIndexB = (int)Math.Round(NEMath.RandomBetween(0, existingNetworks.Count - 1, 2.5), 0); if (biasedIndexA == biasedIndexB) { continue; } var parentA = existingNetworks[biasedIndexA]; var parentB = existingNetworks[biasedIndexB]; this.NetworkArchive.Add(parentA.Crossover(parentB)); } }
public void HeavilyMutuate(double perAxionChance) { for (var i = 0; i < this.Axions.Length; i++) { for (var j = 0; j < this.Axions[i].Length; j++) { var axion = this.Axions[i][j]; if (NEMath.RNG.NextDouble() < perAxionChance) { axion.Weight = NEMath.Clamp(axion.Weight + NEMath.RandomBetween(-0.5, 0.5), -1.0, 1.0); } ; } } }
private double GetNeuronOutput(int layerIndex, int neuronIndex, bool forceSoftmax = false) { double value = 0.0; var layerInputVector = this.GetLayerInputVector(layerIndex); var neuron = this.Neurons[layerIndex][neuronIndex]; if (forceSoftmax || neuron.ActivationType == ActivationTypes.Softmax) { value = NEMath.Softmax(layerInputVector, neuronIndex); } else { value = neuron.GetOutput(); } return(value); }
/// <summary> /// Triggers an evolution cycle for all clustered networks. Does not effect archived networks. /// </summary> public void Evolve() { var maxTravellers = Math.Round(this.ClusterConfig.MaxNetworks * this.ClusterConfig.TravellerRatio, 0); foreach (var cluster in this.Clusters) { var travellerCandidates = new List <Network>(); while (travellerCandidates.Count < maxTravellers && this.NetworkArchive.Count > 0) { var biasedIndex = (int)Math.Round(NEMath.RandomBetween(0, this.NetworkArchive.Count - 1, 0.1), 0); travellerCandidates.Add(this.NetworkArchive[biasedIndex]); this.NetworkArchive.RemoveAt(biasedIndex); } cluster.Evolve(ref travellerCandidates); this.NetworkArchive.AddRange(travellerCandidates); } }
public void RandomiseAxions() { for (var i = 0; i < this.Axions.Length; i++) { for (var j = 0; j < this.Axions[i].Length; j++) { var axion = this.Axions[i][j]; // Randomise, but bias towards zero. var randomWeight = NEMath.RandomBetween(0, 1, 0.25); if (NEMath.Random() < 0.5) { randomWeight *= -1.0; } axion.Weight = randomWeight; } } }
public double GetOutput() { double output = 0; switch (this.ActivationType) { case ActivationTypes.Bias: output = 1; break; case ActivationTypes.TanH: output = Math.Tanh(this.Input); break; case ActivationTypes.Softstep: output = NEMath.Softstep(this.Input); break; case ActivationTypes.Softplus: output = NEMath.Softplus(this.Input); break; case ActivationTypes.ReLU: output = NEMath.ReLU(this.Input); break; case ActivationTypes.LeakyReLU: output = NEMath.LeakyReLU(this.Input); break; case ActivationTypes.Passthrough: default: output = this.Input; break; } return Math.Round(output, 6); }
/// <summary> /// <para>Will recluster all networks around the strongest candidates. Any networks that do not make a cluster (due to capping) will be archived.</para> /// <para>If the network archive overflows, the weakest candidates in the archive will be purged.</para> /// /// <para>Clustering assumes all networks, including archived, have been evaluated using the same data set.</para> /// <para>If you have been using different data sets to train different clusters, reevaluate all using a desired superset before clustering.</para> /// </summary> public void Cluster(bool clusteringInputsSet = false) { var unclusteredNetworks = this.GetAllNetworks().OrderByDescending(o => o.Strength).ToList(); this.Clusters.Clear(); this.NetworkArchive.Clear(); // If the trainer has not set inputs to use for clustering assements, we will set all the inputs // to 1.0 (except for the biasing input). if (!clusteringInputsSet) { foreach (var network in unclusteredNetworks) { for (var i = 1; i < network.Neurons[0].Length; i++) { network.Neurons[0][i].Input = 1.0; } } } // Create clusters around the strongest available candidates, until the cluster cap is reached. // // The process is to grap the strongest network (top of the list), then through all unclustered // networks to see if they are compatible. Once all the unclustered networks have been assessed, // the process repeats until the maximum number of clusters is reached. while (this.Clusters.Count < this.SuperclusterConfig.MaxClusters && unclusteredNetworks.Count > 0) { var reference = unclusteredNetworks[0]; unclusteredNetworks.RemoveAt(0); var cluster = new Cluster(Guid.NewGuid().ToString(), this.ClusterConfig, this.NetworkConfig); cluster.Networks.Add(reference); var referenceVector = reference.Query(); for (var i = unclusteredNetworks.Count - 1; i >= 0; i--) { var candidate = unclusteredNetworks[i]; var candidateVector = candidate.Query(); var angle = NEMath.AngleBetweenVectors(referenceVector, candidateVector); if (angle <= this.SuperclusterConfig.ClusteringAngle) { cluster.Networks.Add(candidate); unclusteredNetworks.RemoveAt(i); } } this.Clusters.Add(cluster); } // If there are any unclustered networks left, add them to the archive. this.NetworkArchive.AddRange(unclusteredNetworks); // Should already be sorted by strength, but let's make sure: this.NetworkArchive = this.NetworkArchive.OrderByDescending(o => o.Strength).ToList(); // If there are more networks in the archive than allowed, cull the weakest. while (this.NetworkArchive.Count > this.SuperclusterConfig.MaxArchivedNetworks) { this.NetworkArchive.RemoveAt(this.NetworkArchive.Count - 1); } // If for whatever reason there were not enough clusters created to fill the quota, // create clusters with random networks to fill out the population. while (this.Clusters.Count < this.SuperclusterConfig.MaxClusters) { var cluster = new Cluster(Guid.NewGuid().ToString(), this.ClusterConfig, this.NetworkConfig); while (cluster.Networks.Count < this.ClusterConfig.MaxNetworks) { var network = new Network(this.NetworkConfig.Clone(), NetworkOrigins.BOOTSTRAP, 0); network.RandomiseAxions(); cluster.Networks.Add(network); } this.Clusters.Add(cluster); } }
public void Evolve(ref List <Network> travellerCandidates) { // Sort networks by strength. this.Networks = this.Networks.OrderByDescending(o => o.Strength).ToList(); // Cull weakest half of networks. if (this.Networks.Count > 1) { var half = (int)Math.Round((double)this.Networks.Count / 2, 0); this.Networks = this.Networks.Take(half).ToList(); } // Fill opened space with new networks. var newNetworks = new List <Network>(); if (this.Networks.Count < this.clusterConfig.MaxNetworks) { var clones = this.Networks.Where(o => o.Origin == NetworkOrigins.CLONE).Count(); var travellers = this.Networks.Where(o => o.Origin == NetworkOrigins.TRAVELLER).Count(); var others = this.Networks.Where(o => o.Origin != NetworkOrigins.CLONE && o.Origin != NetworkOrigins.TRAVELLER).Count(); var maxClones = Math.Round(this.clusterConfig.MaxNetworks * this.clusterConfig.CloneRatio, 0); var maxTravellers = Math.Round(this.clusterConfig.MaxNetworks * this.clusterConfig.TravellerRatio, 0); var maxOthers = this.clusterConfig.MaxNetworks - maxClones - maxTravellers; var deltaClones = NEMath.Clamp(maxClones - clones, 0, maxClones); var deltaTravellers = NEMath.Clamp(maxTravellers - travellers, 0, maxClones); var deltaOthers = NEMath.Clamp(maxOthers - others, 0, maxOthers); while (deltaClones > 0) { var biasedIndex = (int)Math.Round(NEMath.RandomBetween(0, this.Networks.Count - 1, 5.0), 0); newNetworks.Add(this.Networks[biasedIndex].Clone()); deltaClones -= 1; } while (deltaTravellers > 0 && travellerCandidates.Count > 0) { newNetworks.Add(travellerCandidates[0]); travellerCandidates.RemoveAt(0); } while (deltaOthers > 0 && this.Networks.Count > 1) { var biasedIndexA = (int)Math.Round(NEMath.RandomBetween(0, this.Networks.Count - 1, 2.5), 0); var biasedIndexB = (int)Math.Round(NEMath.RandomBetween(0, this.Networks.Count - 1, 2.5), 0); if (biasedIndexA == biasedIndexB) { continue; } var parentA = this.Networks[biasedIndexA]; var parentB = this.Networks[biasedIndexB]; newNetworks.Add(parentA.Crossover(parentB)); deltaOthers -= 1; } } // Mutate existing networks (except strongest) for (var i = 1; i < this.Networks.Count; i++) { if (NEMath.Random() < this.clusterConfig.HeavyMutationRate) { this.Networks[i].HeavilyMutuate(0.25); } else { this.Networks[i].Mutate(); } } // Add new networks to cluster. this.Networks.AddRange(newNetworks); }
public static double RandomBetween(double min, double max, double biasingPower = 1) { return(NEMath.Random(biasingPower) * (max - min) + min); }