public void BuildHistogramData() { IRandomSource rng = RandomDefaults.CreateRandomSource(0); int iters = 10_000; double[] vals = new double[iters]; for (int i = 0; i < iters; i++) { vals[i] = 1000.0 + (rng.NextDouble() * 2.0) - 1.0; } // Construct a histogram on the array of values. HistogramData hist = NumericsUtils.BuildHistogramData(vals, 8); // We expect samples to be approximately evenly distributed over the histogram buckets. for (int i = 0; i < hist.FrequencyArray.Length; i++) { Assert.True(hist.FrequencyArray[i] > (iters / 8) * 0.8); } // We expect min and max to be close to 999 and 1001 respectively. Assert.True(hist.Max <= (1001) && hist.Max > (1001) - 0.1); Assert.True(hist.Min >= (999) && hist.Min < (999) + 0.1); }
private static void UpdateSpeciesTargetSizes( NeatPopulation <T> pop, IRandomSource rng) { double totalMeanFitness = pop.NeatPopulationStats.SumSpeciesMeanFitness; int totalTargetSizeInt = 0; // Handle specific case where all genomes/species have a zero fitness. // Assign all species an equal targetSize. if (0.0 == totalMeanFitness) { totalTargetSizeInt = CalcSpeciesTargetSizesInner_ZeroTotalMeanFitness(pop, rng); } else { // Calculate the new target size of each species using fitness sharing. double popSizeReal = pop.GenomeList.Count; Species <T>[] speciesArr = pop.SpeciesArray; // The size of each specie is based on its fitness relative to the other species. for (int i = 0; i < speciesArr.Length; i++) { SpeciesStats stats = speciesArr[i].Stats; stats.TargetSizeReal = (stats.MeanFitness / totalMeanFitness) * popSizeReal; // Discretize targetSize (stochastic rounding). stats.TargetSizeInt = (int)NumericsUtils.ProbabilisticRound(stats.TargetSizeReal, rng); // Total up discretized target sizes. totalTargetSizeInt += stats.TargetSizeInt; } } // Adjust each species' target allocation such that the sum total matches the required population size. AdjustSpeciesTargetSizes(pop, totalTargetSizeInt, rng); }
public void TestGaussianDelta() { var strategy = DeltaWeightMutationStrategy.CreateGaussianDeltaStrategy( new SelectAllStrategy(), 1.0); IRandomSource rng = RandomDefaults.CreateRandomSource(0); int iters = 100_000; double[] weightArr = new double[iters]; for (int i = 0; i < iters; i++) { weightArr[i] = 1000.0; } strategy.Invoke(weightArr, rng); // Construct a histogram on the array of weights. HistogramData hist = NumericsUtils.BuildHistogramData(weightArr, 8); // We expect min and max to be close to be about -995.5 and +1004.5 respectively // (but they could be further from the mean of 1000, with no bound). Assert.IsTrue(hist.Max >= 1002.0); Assert.IsTrue(hist.Min <= 998.0); TestMean(weightArr, 1000.0); }
/// <summary> /// Sets up the initial connections for a brand new Net /// </summary> protected void InitializeConnections() { ConnectionTemplate[] conTmpl = new ConnectionTemplate[Inputs * Outputs]; // create templates for all possible connections for (int i = 0, c = 0; i < Inputs; i++) { for (int o = 0; o < Outputs; o++) { conTmpl[c++] = new ConnectionTemplate(generator.Next, i, o); } } // mix them up SortUtils.Shuffle(conTmpl, _rng); // make up a number between the max possible connections and zero int connectionCount = (int)NumericsUtils.ProbabilisticRound(conTmpl.Length * 0.075, _rng); connectionCount = System.Math.Max(2, connectionCount); // create that many actual connections for (int i = 0; i < connectionCount; i++) { Node input = Neurons[i]; Node output = Neurons[i + Inputs]; Connection con = new Connection(_rng.NextUInt(), input, output, _rng.NextDouble() * 2.0 - 1.0, _rng.NextDouble() * -5.0) { Enabled = true }; Connections.Add(con); } }
public void TestUniformDelta() { double weightScale = 5.0; var strategy = DeltaWeightMutationStrategy.CreateUniformDeltaStrategy( new SelectAllStrategy(), weightScale); IRandomSource rng = RandomDefaults.CreateRandomSource(0); int iters = 10_000; double[] weightArr = new double[iters]; for (int i = 0; i < iters; i++) { weightArr[i] = 1000.0; } strategy.Invoke(weightArr, rng); // Construct a histogram on the array of weights. HistogramData hist = NumericsUtils.BuildHistogramData(weightArr, 8); // We expect samples to be approximately evenly distributed over the histogram buckets. for (int i = 0; i < hist.FrequencyArray.Length; i++) { Assert.IsTrue(hist.FrequencyArray[i] > (iters / 8) * 0.8); } // We expect min and max to be close to 1000-weightScale and 1000+weightScale respectively. Assert.IsTrue(hist.Max <= (1000 + weightScale) && hist.Max > (1000 + weightScale) - 0.1); Assert.IsTrue(hist.Min >= (1000 - weightScale) && hist.Min < (1000 - weightScale) + 0.1); }
private static int CalcSpeciesTargetSizes( NeatPopulation <T> pop, double totalMeanFitness, IRandomSource rng) { // Handle specific case where all genomes/species have a zero fitness. // Assign all species an equal targetSize. if (0.0 == totalMeanFitness) { return(CalcSpeciesTargetSizes_ZeroTotalMeanFitness(pop, rng)); } // Calculate the new target size of each species using fitness sharing. double popSizeReal = pop.GenomeList.Count; Species <T>[] speciesArr = pop.SpeciesArray; int totalTargetSizeInt = 0; // The size of each specie is based on its fitness relative to the other species. for (int i = 0; i < speciesArr.Length; i++) { SpeciesStats stats = speciesArr[i].Stats; stats.TargetSizeReal = (stats.MeanFitness / totalMeanFitness) * popSizeReal; // Discretize targetSize (stochastic rounding). stats.TargetSizeInt = (int)NumericsUtils.ProbabilisticRound(stats.TargetSizeReal, rng); // Total up discretized target sizes. totalTargetSizeInt += stats.TargetSizeInt; } return(totalTargetSizeInt); }
public void GaussianReset() { var strategy = ResetWeightMutationStrategy <double> .CreateGaussianResetStrategy( new SelectAllStrategy(), 1.0); IRandomSource rng = RandomDefaults.CreateRandomSource(0); int iters = 100_000; double[] weightArr = new double[iters]; for (int i = 0; i < iters; i++) { weightArr[i] = 123.0; } strategy.Invoke(weightArr, rng); // Construct a histogram on the array of weights. HistogramData hist = NumericsUtils.BuildHistogramData(weightArr, 8); // We expect min and max to be close to be about -4.5 and +4.5 respectively // (but they could be higher in magnitude, with no bound). Assert.True(hist.Max >= 3.8); Assert.True(hist.Min <= -3.8); TestMean(weightArr); TestStandardDeviation(weightArr); }
/// <summary> /// Handle specific case where all genomes/species have a zero fitness. /// </summary> private static int CalcSpeciesTargetSizesInner_ZeroTotalMeanFitness(NeatPopulation <T> pop, IRandomSource rng) { // Assign all species an equal targetSize. Species <T>[] speciesArr = pop.SpeciesArray; double popSizeReal = pop.GenomeList.Count; double targetSizeReal = popSizeReal / speciesArr.Length; // Keep a total of all allocated target sizes, typically this will vary slightly from the // required target population size due to rounding of each real valued target size. int totalTargetSizeInt = 0; for (int i = 0; i < speciesArr.Length; i++) { SpeciesStats stats = speciesArr[i].Stats; stats.TargetSizeReal = targetSizeReal; // Stochastic rounding will result in equal allocation if targetSizeReal is a whole // number, otherwise it will help to distribute allocations fairly. stats.TargetSizeInt = (int)NumericsUtils.ProbabilisticRound(targetSizeReal, rng); // Total up discretized target sizes. totalTargetSizeInt += stats.TargetSizeInt; } return(totalTargetSizeInt); }
private static void AllocateEliteSelectionOffspringCounts( Species <T> species, NeatEvolutionAlgorithmSettings eaSettings, bool isBestGenomeSpecies, IRandomSource rng) { SpeciesStats stats = species.Stats; // Special case - zero target size. if (stats.TargetSizeInt == 0) { Debug.Assert(!isBestGenomeSpecies, "Zero target size assigned to specie that contains the best genome."); stats.EliteSizeInt = 0; stats.OffspringCount = 0; stats.OffspringAsexualCount = 0; stats.OffspringSexualCount = 0; stats.SelectionSizeInt = 0; return; } // Calculate the elite size as a proportion of the current species size. // Note. We discretize the real size with a probabilistic handling of the fractional part. double eliteSizeReal = species.GenomeList.Count * eaSettings.ElitismProportion; int eliteSizeInt = (int)NumericsUtils.ProbabilisticRound(eliteSizeReal, rng); // Ensure eliteSizeInt is no larger than the current target size. (I.e. the value was // calculated as a proportion of the current size, not the new target size). stats.EliteSizeInt = Math.Min(eliteSizeInt, stats.TargetSizeInt); // Special case: ensure the species with the best genome preserves that genome. // Note. This is done even for a target size of one, which would mean that no offspring are // produced from the best genome, apart from the (usually small) chance of a cross-species mating. if (isBestGenomeSpecies && stats.EliteSizeInt == 0) { stats.EliteSizeInt = 1; } // Determine how many offspring to produce for the species. stats.OffspringCount = stats.TargetSizeInt - stats.EliteSizeInt; // Determine the split between asexual and sexual reproduction. Again using probabilistic // rounding to compensate for any rounding bias. double offspringAsexualCountReal = stats.OffspringCount * eaSettings.OffspringAsexualProportion; stats.OffspringAsexualCount = (int)NumericsUtils.ProbabilisticRound(offspringAsexualCountReal, rng); stats.OffspringSexualCount = stats.OffspringCount - stats.OffspringAsexualCount; // Calculate the selectionSize. The number of the species' fittest genomes that are selected from // to create offspring. // We ensure this is at least one; if TargetSizeInt is zero then it doesn't matter because no genomes will be // selected from this species to produce offspring, except for cross-species mating, hence the minimum of one is // a useful general approach. double selectionSizeReal = species.GenomeList.Count * eaSettings.SelectionProportion; stats.SelectionSizeInt = Math.Max(1, (int)NumericsUtils.ProbabilisticRound(selectionSizeReal, rng)); }
public void TestProbabilisticRound() { IRandomSource rng = RandomDefaults.CreateRandomSource(0); for (int i = 0; i < 1000000; i++) { double valReal = 100 * rng.NextDouble(); double valRound = NumericsUtils.ProbabilisticRound(valReal, rng); Assert.IsTrue(valRound == Math.Floor(valReal) || valRound == Math.Ceiling(valReal)); } }
public void TestProbabilisticRound() { var rng = new XorShiftRandom(0); for (int i = 0; i < 1000000; i++) { double valReal = 100 * rng.NextDouble(); double valRound = NumericsUtils.ProbabilisticRound(valReal, rng); Assert.IsTrue(valRound == Math.Floor(valReal) || valRound == Math.Ceiling(valReal)); } }
/// <summary> /// Select a subset of items from a superset of a given size. /// </summary> /// <param name="supersetCount">The size of the superset to select from.</param> /// <param name="rng">Random source.</param> /// <returns>An array of indexes that are the selected items.</returns> public int[] SelectSubset(int supersetCount, IRandomSource rng) { // Note. Ideally we'd return a sorted list of indexes to improve performance of the code that consumes them, // however, the sampling process inherently produces samples in randomized order, thus the decision of whether // to sort or not depends on the cost to the code using the samples. I.e. don't sort here! int selectionCount = (int)NumericsUtils.StochasticRound(supersetCount * _selectionProportion, rng); int[] idxArr = new int[selectionCount]; DiscreteDistribution.SampleUniformWithoutReplacement(rng, supersetCount, idxArr); return(idxArr); }
public static byte[] EncodeMultTransfer(byte[] origin, byte[] dest, List <byte[]> from, List <byte[]> signatures, byte[] to, byte[] value, List <byte[]> pubkeyHashList) { int length = RLP.EncodeElement(origin).Length + RLP.EncodeElement(dest).Length + RLPUtils.EncodeList(from.ToArray()).Length + RLPUtils.EncodeList(signatures.ToArray()).Length + RLP.EncodeElement(to).Length + RLP.EncodeElement(value).Length + RLPUtils.EncodeList(pubkeyHashList.ToArray()).Length; if (length < 56) { return(Utils.Combine(new byte[] { (byte)(0xc0 + length) }, RLP.EncodeElement(origin), RLP.EncodeElement(dest), RLP.EncodeList(EncodeElementsBytes(from.ToArray())), RLP.EncodeList(EncodeElementsBytes(signatures.ToArray())), RLP.EncodeElement(to), RLP.EncodeElement(value), RLP.EncodeList(EncodeElementsBytes(pubkeyHashList.ToArray())))); } else { Tuple <byte, byte[]> tuple = NumericsUtils.getLengthByte(length); return(Utils.Combine(new byte[] { (byte)(0xf7 + tuple.Item1) }, tuple.Item2, RLP.EncodeElement(origin), RLP.EncodeElement(dest), RLP.EncodeList(EncodeElementsBytes(from.ToArray())), RLP.EncodeList(EncodeElementsBytes(signatures.ToArray())), RLP.EncodeElement(to), RLP.EncodeElement(value), RLP.EncodeList(EncodeElementsBytes(pubkeyHashList.ToArray())))); } }
public static byte[] EncodeMultiple(byte[] a, byte[] b, byte[] c, List <byte[]> d, List <byte[]> e, List <byte[]> f) { int length = RLP.EncodeElement(a).Length + RLP.EncodeElement(b).Length + RLP.EncodeElement(c).Length + RLPUtils.EncodeList(d.ToArray()).Length + RLPUtils.EncodeList(e.ToArray()).Length + RLPUtils.EncodeList(f.ToArray()).Length; if (length < 56) { return(Utils.Combine(new byte[] { (byte)(0xc0 + length) }, RLP.EncodeElement(a), RLP.EncodeElement(b), RLP.EncodeElement(c), RLP.EncodeList(EncodeElementsBytes(d.ToArray())), RLP.EncodeList(EncodeElementsBytes(e.ToArray())), RLP.EncodeList(EncodeElementsBytes(f.ToArray())))); } else { Tuple <byte, byte[]> tuple = NumericsUtils.getLengthByte(length); return(Utils.Combine(new byte[] { (byte)(0xf7 + tuple.Item1) }, tuple.Item2, RLP.EncodeElement(a), RLP.EncodeElement(b), RLP.EncodeElement(c), RLP.EncodeList(EncodeElementsBytes(d.ToArray())), RLP.EncodeList(EncodeElementsBytes(e.ToArray())), RLP.EncodeList(EncodeElementsBytes(f.ToArray())))); } }
public static byte[] EncodeRateHeightLockWithdraw(byte[] depositHash, byte[] to) { int length = RLP.EncodeElement(depositHash).Length + RLP.EncodeElement(to).Length; if (length < 56) { return(Utils.Combine(new byte[] { (byte)(0xc0 + length) }, RLP.EncodeElement(depositHash), RLP.EncodeElement(to))); } else { Tuple <byte, byte[]> tuple = NumericsUtils.getLengthByte(length); return(Utils.Combine(new byte[] { (byte)(0xf7 + tuple.Item1) }, tuple.Item2, RLP.EncodeElement(depositHash), RLP.EncodeElement(to))); } }
public static byte[] EncodeRateHeightLock(byte[] assetHash, long oneTimeDepositMultiple, int withDrawPeriodHeight, string withDrawRate, byte[] dest, Dictionary <byte[], Extract> stateMap) { List <byte[]> list = EncodeStateMap(stateMap); int length = RLP.EncodeElement(assetHash).Length + RLP.EncodeElement(oneTimeDepositMultiple.ToBytesForRLPEncoding()).Length + RLP.EncodeElement(withDrawPeriodHeight.ToBytesForRLPEncoding()).Length + RLP.EncodeElement(withDrawRate.ToBytesForRLPEncoding()).Length + RLP.EncodeElement(dest).Length + RLP.EncodeList(list.ToArray()).Length; if (length < 56) { return(Utils.Combine(new byte[] { (byte)(0xc0 + length) }, RLP.EncodeElement(assetHash), RLP.EncodeElement(oneTimeDepositMultiple.ToBytesForRLPEncoding()) , RLP.EncodeElement(withDrawPeriodHeight.ToBytesForRLPEncoding()), RLP.EncodeElement(withDrawRate.ToBytesForRLPEncoding()) , RLP.EncodeElement(dest), RLP.EncodeList(list.ToArray()))); } else { Tuple <byte, byte[]> tuple = NumericsUtils.getLengthByte(length); return(Utils.Combine(new byte[] { (byte)(0xf7 + tuple.Item1) }, tuple.Item2, RLP.EncodeElement(assetHash), RLP.EncodeElement(oneTimeDepositMultiple.ToBytesForRLPEncoding()) , RLP.EncodeElement(withDrawPeriodHeight.ToBytesForRLPEncoding()), RLP.EncodeElement(withDrawRate.ToBytesForRLPEncoding()) , RLP.EncodeElement(dest), RLP.EncodeList(list.ToArray()))); } }
public void UniformReset() { double weightScale = 5.0; var strategy = ResetWeightMutationStrategy <double> .CreateUniformResetStrategy( new SelectAllStrategy(), weightScale); IRandomSource rng = RandomDefaults.CreateRandomSource(0); int iters = 10_000; double[] weightArr = new double[iters]; for (int i = 0; i < iters; i++) { weightArr[i] = 123.0; } strategy.Invoke(weightArr, rng); // Construct a histogram on the array of weights. HistogramData hist = NumericsUtils.BuildHistogramData(weightArr, 8); // We expect samples to be approximately evenly distributed over the histogram buckets. for (int i = 0; i < hist.FrequencyArray.Length; i++) { Assert.True(hist.FrequencyArray[i] > (iters / 8) * 0.8); } // We expect min and max to be close to -weightScale and +weightScale respectively. double delta = weightScale - hist.Max; Assert.True(delta >= 0.0 && delta < 0.1); delta = weightScale + hist.Min; Assert.True(delta >= 0.0 && delta < 0.1); // Mean should be near to zero. TestMean(weightArr); }
/// <summary> /// Creates a single randomly initialised genome. /// </summary> private NeatGenome <T> CreateGenome() { // Determine how many connections to create in the new genome, as a proportion of all possible connections // between the input and output nodes. int connectionCount = (int)NumericsUtils.ProbabilisticRound(_connectionDefArr.Length * _connectionsProportion, _rng); // Ensure there is at least one connection. connectionCount = Math.Max(1, connectionCount); // Select a random subset of all possible connections between the input and output nodes. int[] sampleArr = new int[connectionCount]; DiscreteDistributionUtils.SampleUniformWithoutReplacement( _connectionDefArr.Length, sampleArr, _rng); // Sort the samples. // Note. This results in the neural net connections being sorted by sourceID then targetID. Array.Sort(sampleArr); // Create the connection gene arrays and populate them. var connGenes = new ConnectionGenes <T>(connectionCount); var connArr = connGenes._connArr; var weightArr = connGenes._weightArr; for (int i = 0; i < sampleArr.Length; i++) { DirectedConnection cdef = _connectionDefArr[sampleArr[i]]; connArr[i] = new DirectedConnection( cdef.SourceId, cdef.TargetId); weightArr[i] = _connWeightDist.Sample(_metaNeatGenome.ConnectionWeightRange, true); } // Get create a new genome with a new ID, birth generation of zero. int id = _genomeIdSeq.Next(); return(_genomeBuilder.Create(id, 0, connGenes)); }
/// <summary> /// Creates a single randomly initialised genome. /// A random set of connections are made form the input to the output neurons, the number of /// connections made is based on the NeatGenomeParameters.InitialInterconnectionsProportion /// which specifies the proportion of all possible input-output connections to be made in /// initial genomes. /// /// The connections that are made are allocated innovation IDs in a consistent manner across /// the initial population of genomes. To do this we allocate IDs sequentially to all possible /// interconnections and then randomly select some proportion of connections for inclusion in the /// genome. In addition, for this scheme to work the innovation ID generator must be reset to zero /// prior to each call to CreateGenome(), and a test is made to ensure this is the case. /// /// The consistent allocation of innovation IDs ensure that equivalent connections in different /// genomes have the same innovation ID, and although this isn't strictly necessary it is /// required for sexual reproduction to work effectively - like structures are detected by comparing /// innovation IDs only. /// </summary> /// <param name="birthGeneration">The current evolution algorithm generation. /// Assigned to the new genome as its birth generation.</param> public NeatGenome CreateGenome(uint birthGeneration) { NeuronGeneList neuronGeneList = new NeuronGeneList(_inputNeuronCount + _outputNeuronCount); NeuronGeneList inputNeuronGeneList = new NeuronGeneList(_inputNeuronCount); // includes single bias neuron. NeuronGeneList outputNeuronGeneList = new NeuronGeneList(_outputNeuronCount); // Create a single bias neuron. uint biasNeuronId = _innovationIdGenerator.NextId; if (0 != biasNeuronId) { // The ID generator must be reset before calling this method so that all generated genomes use the // same innovation ID for matching neurons and structures. throw new SharpNeatException("IdGenerator must be reset before calling CreateGenome(uint)"); } // Note. Genes within nGeneList must always be arranged according to the following layout plan. // Bias - single neuron. Innovation ID = 0 // Input neurons. // Output neurons. // Hidden neurons. NeuronGene neuronGene = CreateNeuronGene(biasNeuronId, NodeType.Bias); inputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); // Create input neuron genes. for (int i = 0; i < _inputNeuronCount; i++) { neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Input); inputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); } // Create output neuron genes. for (int i = 0; i < _outputNeuronCount; i++) { neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Output); outputNeuronGeneList.Add(neuronGene); neuronGeneList.Add(neuronGene); } // Define all possible connections between the input and output neurons (fully interconnected). int srcCount = inputNeuronGeneList.Count; int tgtCount = outputNeuronGeneList.Count; ConnectionDefinition[] connectionDefArr = new ConnectionDefinition[srcCount * tgtCount]; for (int srcIdx = 0, i = 0; srcIdx < srcCount; srcIdx++) { for (int tgtIdx = 0; tgtIdx < tgtCount; tgtIdx++) { connectionDefArr[i++] = new ConnectionDefinition(_innovationIdGenerator.NextId, srcIdx, tgtIdx); } } // Shuffle the array of possible connections. SortUtils.Shuffle(connectionDefArr, _rng); // Select connection definitions from the head of the list and convert them to real connections. // We want some proportion of all possible connections but at least one (Connectionless genomes are not allowed). int connectionCount = (int)NumericsUtils.ProbabilisticRound( (double)connectionDefArr.Length * _neatGenomeParamsComplexifying.InitialInterconnectionsProportion, _rng); connectionCount = Math.Max(1, connectionCount); // Create the connection gene list and populate it. ConnectionGeneList connectionGeneList = new ConnectionGeneList(connectionCount); for (int i = 0; i < connectionCount; i++) { ConnectionDefinition def = connectionDefArr[i]; NeuronGene srcNeuronGene = inputNeuronGeneList[def._sourceNeuronIdx]; NeuronGene tgtNeuronGene = outputNeuronGeneList[def._targetNeuronIdx]; ConnectionGene cGene = new ConnectionGene(def._innovationId, srcNeuronGene.InnovationId, tgtNeuronGene.InnovationId, GenerateRandomConnectionWeight()); connectionGeneList.Add(cGene); // Register connection with endpoint neurons. srcNeuronGene.TargetNeurons.Add(cGene.TargetNodeId); tgtNeuronGene.SourceNeurons.Add(cGene.SourceNodeId); } // Ensure connections are sorted. connectionGeneList.SortByInnovationId(); // Create and return the completed genome object. return(CreateGenome(_genomeIdGenerator.NextId, birthGeneration, neuronGeneList, connectionGeneList, _inputNeuronCount, _outputNeuronCount, false)); }
/// <summary> /// Calculate statistics for each specie. This method is at the heart of the evolutionary algorithm, /// the key things that are achieved in this method are - for each specie we calculate: /// 1) The target size based on fitness of the specie's member genomes. /// 2) The elite size based on the current size. Potentially this could be higher than the target /// size, so a target size is taken to be a hard limit. /// 3) Following (1) and (2) we can calculate the total number offspring that need to be generated /// for the current generation. /// </summary> private SpecieStats[] CalcSpecieStats(out int offspringCount) { double totalMeanFitness = 0.0; // Build stats array and get the mean fitness of each specie. int specieCount = _specieList.Count; SpecieStats[] specieStatsArr = new SpecieStats[specieCount]; for (int i = 0; i < specieCount; i++) { SpecieStats inst = new SpecieStats(); specieStatsArr[i] = inst; inst._meanFitness = _specieList[i].CalcMeanFitness(); totalMeanFitness += inst._meanFitness; } // Calculate the new target size of each specie using fitness sharing. // Keep a total of all allocated target sizes, typically this will vary slightly from the // overall target population size due to rounding of each real/fractional target size. int totalTargetSizeInt = 0; if (0.0 == totalMeanFitness) { // Handle specific case where all genomes/species have a zero fitness. // Assign all species an equal targetSize. double targetSizeReal = (double)_populationSize / (double)specieCount; for (int i = 0; i < specieCount; i++) { SpecieStats inst = specieStatsArr[i]; inst._targetSizeReal = targetSizeReal; // Stochastic rounding will result in equal allocation if targetSizeReal is a whole // number, otherwise it will help to distribute allocations evenly. inst._targetSizeInt = (int)NumericsUtils.ProbabilisticRound(targetSizeReal, _rng); // Total up discretized target sizes. totalTargetSizeInt += inst._targetSizeInt; } } else { // The size of each specie is based on its fitness relative to the other species. for (int i = 0; i < specieCount; i++) { SpecieStats inst = specieStatsArr[i]; inst._targetSizeReal = (inst._meanFitness / totalMeanFitness) * (double)_populationSize; // Discretize targetSize (stochastic rounding). inst._targetSizeInt = (int)NumericsUtils.ProbabilisticRound(inst._targetSizeReal, _rng); // Total up discretized target sizes. totalTargetSizeInt += inst._targetSizeInt; } } // Discretized target sizes may total up to a value that is not equal to the required overall population // size. Here we check this and if there is a difference then we adjust the specie's targetSizeInt values // to compensate for the difference. // // E.g. If we are short of the required populationSize then we add the required additional allocation to // selected species based on the difference between each specie's targetSizeReal and targetSizeInt values. // What we're effectively doing here is assigning the additional required target allocation to species based // on their real target size in relation to their actual (integer) target size. // Those species that have an actual allocation below there real allocation (the difference will often // be a fractional amount) will be assigned extra allocation probabilistically, where the probability is // based on the differences between real and actual target values. // // Where the actual target allocation is higher than the required target (due to rounding up), we use the same // method but we adjust specie target sizes down rather than up. int targetSizeDeltaInt = totalTargetSizeInt - _populationSize; if (targetSizeDeltaInt < 0) { // Check for special case. If we are short by just 1 then increment targetSizeInt for the specie containing // the best genome. We always ensure that this specie has a minimum target size of 1 with a final test (below), // by incrementing here we avoid the probabilistic allocation below followed by a further correction if // the champ specie ended up with a zero target size. if (-1 == targetSizeDeltaInt) { specieStatsArr[_bestSpecieIdx]._targetSizeInt++; } else { // We are short of the required populationSize. Add the required additional allocations. // Determine each specie's relative probability of receiving additional allocation. double[] probabilities = new double[specieCount]; for (int i = 0; i < specieCount; i++) { SpecieStats inst = specieStatsArr[i]; probabilities[i] = Math.Max(0.0, inst._targetSizeReal - (double)inst._targetSizeInt); } // Use a built in class for choosing an item based on a list of relative probabilities. DiscreteDistribution dist = new DiscreteDistribution(probabilities); // Probabilistically assign the required number of additional allocations. // FIXME/ENHANCEMENT: We can improve the allocation fairness by updating the DiscreteDistribution // after each allocation (to reflect that allocation). // targetSizeDeltaInt is negative, so flip the sign for code clarity. targetSizeDeltaInt *= -1; for (int i = 0; i < targetSizeDeltaInt; i++) { int specieIdx = DiscreteDistribution.Sample(_rng, dist); specieStatsArr[specieIdx]._targetSizeInt++; } } } else if (targetSizeDeltaInt > 0) { // We have overshot the required populationSize. Adjust target sizes down to compensate. // Determine each specie's relative probability of target size downward adjustment. double[] probabilities = new double[specieCount]; for (int i = 0; i < specieCount; i++) { SpecieStats inst = specieStatsArr[i]; probabilities[i] = Math.Max(0.0, (double)inst._targetSizeInt - inst._targetSizeReal); } // Use a built in class for choosing an item based on a list of relative probabilities. DiscreteDistribution dist = new DiscreteDistribution(probabilities); // Probabilistically decrement specie target sizes. // ENHANCEMENT: We can improve the selection fairness by updating the DiscreteDistribution // after each decrement (to reflect that decrement). for (int i = 0; i < targetSizeDeltaInt;) { int specieIdx = DiscreteDistribution.Sample(_rng, dist); // Skip empty species. This can happen because the same species can be selected more than once. if (0 != specieStatsArr[specieIdx]._targetSizeInt) { specieStatsArr[specieIdx]._targetSizeInt--; i++; } } } // We now have Sum(_targetSizeInt) == _populationSize. Debug.Assert(SumTargetSizeInt(specieStatsArr) == _populationSize); // TODO: Better way of ensuring champ species has non-zero target size? // However we need to check that the specie with the best genome has a non-zero targetSizeInt in order // to ensure that the best genome is preserved. A zero size may have been allocated in some pathological cases. if (0 == specieStatsArr[_bestSpecieIdx]._targetSizeInt) { specieStatsArr[_bestSpecieIdx]._targetSizeInt++; // Adjust down the target size of one of the other species to compensate. // Pick a specie at random (but not the champ specie). Note that this may result in a specie with a zero // target size, this is OK at this stage. We handle allocations of zero in PerformOneGeneration(). int idx = _rng.Next(specieCount - 1); idx = idx == _bestSpecieIdx ? idx + 1 : idx; if (specieStatsArr[idx]._targetSizeInt > 0) { specieStatsArr[idx]._targetSizeInt--; } else { // Scan forward from this specie to find a suitable one. bool done = false; idx++; for (; idx < specieCount; idx++) { if (idx != _bestSpecieIdx && specieStatsArr[idx]._targetSizeInt > 0) { specieStatsArr[idx]._targetSizeInt--; done = true; break; } } // Scan forward from start of species list. if (!done) { for (int i = 0; i < specieCount; i++) { if (i != _bestSpecieIdx && specieStatsArr[i]._targetSizeInt > 0) { specieStatsArr[i]._targetSizeInt--; done = true; break; } } if (!done) { throw new SharpNeatException("CalcSpecieStats(). Error adjusting target population size down. Is the population size less than or equal to the number of species?"); } } } } // Now determine the eliteSize for each specie. This is the number of genomes that will remain in a // specie from the current generation and is a proportion of the specie's current size. // Also here we calculate the total number of offspring that will need to be generated. offspringCount = 0; for (int i = 0; i < specieCount; i++) { // Special case - zero target size. if (0 == specieStatsArr[i]._targetSizeInt) { specieStatsArr[i]._eliteSizeInt = 0; continue; } // Discretize the real size with a probabilistic handling of the fractional part. double eliteSizeReal = _specieList[i].GenomeList.Count * _eaParams.ElitismProportion; int eliteSizeInt = (int)NumericsUtils.ProbabilisticRound(eliteSizeReal, _rng); // Ensure eliteSizeInt is no larger than the current target size (remember it was calculated // against the current size of the specie not its new target size). SpecieStats inst = specieStatsArr[i]; inst._eliteSizeInt = Math.Min(eliteSizeInt, inst._targetSizeInt); // Ensure the champ specie preserves the champ genome. We do this even if the target size is just 1 // - which means the champ genome will remain and no offspring will be produced from it, apart from // the (usually small) chance of a cross-species mating. if (i == _bestSpecieIdx && inst._eliteSizeInt == 0) { Debug.Assert(inst._targetSizeInt != 0, "Zero target size assigned to champ specie."); inst._eliteSizeInt = 1; } // Now we can determine how many offspring to produce for the specie. inst._offspringCount = inst._targetSizeInt - inst._eliteSizeInt; offspringCount += inst._offspringCount; // While we're here we determine the split between asexual and sexual reproduction. Again using // some probabilistic logic to compensate for any rounding bias. double offspringAsexualCountReal = (double)inst._offspringCount * _eaParams.OffspringAsexualProportion; inst._offspringAsexualCount = (int)NumericsUtils.ProbabilisticRound(offspringAsexualCountReal, _rng); inst._offspringSexualCount = inst._offspringCount - inst._offspringAsexualCount; // Also while we're here we calculate the selectionSize. The number of the specie's fittest genomes // that are selected from to create offspring. This should always be at least 1. double selectionSizeReal = _specieList[i].GenomeList.Count * _eaParams.SelectionProportion; inst._selectionSizeInt = Math.Max(1, (int)NumericsUtils.ProbabilisticRound(selectionSizeReal, _rng)); } return(specieStatsArr); }
private void CreateSpeciesOffspringSexual( Species <T>[] speciesArr, Species <T> species, DiscreteDistribution speciesDistUpdated, DiscreteDistribution?[] genomeDistArr, DiscreteDistribution genomeDist, int offspringCount, List <NeatGenome <T> > offspringList, double interspeciesMatingProportion, IRandomSource rng, out int offspringInterspeciesCount) { // Calc the number of offspring to create via inter-species sexual reproduction. int offspringCountSexualInter; if (interspeciesMatingProportion == 0.0) { offspringInterspeciesCount = offspringCountSexualInter = 0; } else { offspringInterspeciesCount = offspringCountSexualInter = (int)NumericsUtils.ProbabilisticRound(interspeciesMatingProportion * offspringCount, rng); } // Calc the number of offspring to create via intra-species sexual reproduction. int offspringCountSexualIntra = offspringCount - offspringCountSexualInter; // Get genome list for the current species. var genomeList = species.GenomeList; // Produce the required number of offspring from intra-species sexual reproduction. for (int i = 0; i < offspringCountSexualIntra; i++) { // Select/sample parent A from the species. int genomeIdx = DiscreteDistribution.Sample(rng, genomeDist); var parentGenomeA = genomeList[genomeIdx]; // Create a new distribution with parent A removed from the set of possibilities. DiscreteDistribution genomeDistUpdated = genomeDist.RemoveOutcome(genomeIdx); // Select/sample parent B from the species. genomeIdx = DiscreteDistribution.Sample(rng, genomeDistUpdated); var parentGenomeB = genomeList[genomeIdx]; // Create a child genome and add it to offspringList. var childGenome = _reproductionSexual.CreateGenome(parentGenomeA, parentGenomeB, rng); offspringList.Add(childGenome); } // Produce the required number of offspring from inter-species sexual reproduction. for (int i = 0; i < offspringCountSexualInter; i++) { // Select/sample parent A from the current species. int genomeIdx = DiscreteDistribution.Sample(rng, genomeDist); var parentGenomeA = genomeList[genomeIdx]; // Select another species to select parent B from. int speciesIdx = DiscreteDistribution.Sample(rng, speciesDistUpdated); Species <T> speciesB = speciesArr[speciesIdx]; // Select parent B from species B. DiscreteDistribution genomeDistB = genomeDistArr[speciesIdx] !; genomeIdx = DiscreteDistribution.Sample(rng, genomeDistB); var parentGenomeB = speciesB.GenomeList[genomeIdx]; // Ensure parentA is the fittest of the two parents. if (_fitnessComparer.Compare(parentGenomeA.FitnessInfo, parentGenomeB.FitnessInfo) < 0) { VariableUtils.Swap(ref parentGenomeA !, ref parentGenomeB !); } // Create a child genome and add it to offspringList. var childGenome = _reproductionSexual.CreateGenome(parentGenomeA, parentGenomeB, rng); offspringList.Add(childGenome); } }
/// <summary> /// Create the required number of offspring genomes, using specieStatsArr as the basis for selecting how /// many offspring are produced from each species. /// </summary> private List <TGenome> CreateOffspring(SpecieStats[] specieStatsArr, int offspringCount) { // Build a DiscreteDistribution for selecting species for cross-species reproduction. // While we're in the loop we also pre-build a DiscreteDistribution for each specie; // Doing this before the main loop means we have DiscreteDistributions available for // all species when performing cross-specie matings. int specieCount = specieStatsArr.Length; double[] specieFitnessArr = new double[specieCount]; DiscreteDistribution[] distArr = new DiscreteDistribution[specieCount]; // Count of species with non-zero selection size. // If this is exactly 1 then we skip inter-species mating. One is a special case because for 0 the // species all get an even chance of selection, and for >1 we can just select normally. int nonZeroSpecieCount = 0; for (int i = 0; i < specieCount; i++) { // Array of probabilities for specie selection. Note that some of these probabilities can be zero, but at least one of them won't be. SpecieStats inst = specieStatsArr[i]; specieFitnessArr[i] = inst._selectionSizeInt; if (0 == inst._selectionSizeInt) { // Skip building a DiscreteDistribution for species that won't be selected from. distArr[i] = null; continue; } nonZeroSpecieCount++; // For each specie we build a DiscreteDistribution for genome selection within // that specie. Fitter genomes have higher probability of selection. List <TGenome> genomeList = _specieList[i].GenomeList; double[] probabilities = new double[inst._selectionSizeInt]; for (int j = 0; j < inst._selectionSizeInt; j++) { probabilities[j] = genomeList[j].EvaluationInfo.Fitness; } distArr[i] = new DiscreteDistribution(probabilities); } // Complete construction of DiscreteDistribution for specie selection. DiscreteDistribution rwlSpecies = new DiscreteDistribution(specieFitnessArr); // Produce offspring from each specie in turn and store them in offspringList. List <TGenome> offspringList = new List <TGenome>(offspringCount); for (int specieIdx = 0; specieIdx < specieCount; specieIdx++) { SpecieStats inst = specieStatsArr[specieIdx]; List <TGenome> genomeList = _specieList[specieIdx].GenomeList; // Get DiscreteDistribution for genome selection. DiscreteDistribution dist = distArr[specieIdx]; // --- Produce the required number of offspring from asexual reproduction. for (int i = 0; i < inst._offspringAsexualCount; i++) { int genomeIdx = DiscreteDistribution.Sample(_rng, dist); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); offspringList.Add(offspring); } _stats._asexualOffspringCount += (ulong)inst._offspringAsexualCount; // --- Produce the required number of offspring from sexual reproduction. // Cross-specie mating. // If nonZeroSpecieCount is exactly 1 then we skip inter-species mating. One is a special case because // for 0 the species all get an even chance of selection, and for >1 we can just select species normally. int crossSpecieMatings = nonZeroSpecieCount == 1 ? 0 : (int)NumericsUtils.ProbabilisticRound(_eaParams.InterspeciesMatingProportion * inst._offspringSexualCount, _rng); _stats._sexualOffspringCount += (ulong)(inst._offspringSexualCount - crossSpecieMatings); _stats._interspeciesOffspringCount += (ulong)crossSpecieMatings; // An index that keeps track of how many offspring have been produced in total. int matingsCount = 0; for (; matingsCount < crossSpecieMatings; matingsCount++) { TGenome offspring = CreateOffspring_CrossSpecieMating(dist, distArr, rwlSpecies, specieIdx, genomeList); offspringList.Add(offspring); } // For the remainder we use normal intra-specie mating. // Test for special case - we only have one genome to select from in the current specie. if (1 == inst._selectionSizeInt) { // Fall-back to asexual reproduction. for (; matingsCount < inst._offspringSexualCount; matingsCount++) { int genomeIdx = DiscreteDistribution.Sample(_rng, dist); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } else { // Remainder of matings are normal within-specie. for (; matingsCount < inst._offspringSexualCount; matingsCount++) { // Select parent 1. int parent1Idx = DiscreteDistribution.Sample(_rng, dist); TGenome parent1 = genomeList[parent1Idx]; // Remove selected parent from set of possible outcomes. DiscreteDistribution distTmp = dist.RemoveOutcome(parent1Idx); // Test for existence of at least one more parent to select. if (0 != distTmp.Probabilities.Length) { // Get the two parents to mate. int parent2Idx = DiscreteDistribution.Sample(_rng, distTmp); TGenome parent2 = genomeList[parent2Idx]; TGenome offspring = parent1.CreateOffspring(parent2, _currentGeneration); offspringList.Add(offspring); } else { // No other parent has a non-zero selection probability (they all have zero fitness). // Fall back to asexual reproduction of the single genome with a non-zero fitness. TGenome offspring = parent1.CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } } } _stats._totalOffspringCount += (ulong)offspringCount; return(offspringList); }