private static int[] Select(IList <int[]> permutations, int size) { var total = 0; long denominator = 1; int[] current; var quantity = new int[permutations.Count]; for (var counter1 = 0; counter1 < permutations.Count; counter1++) { current = permutations[counter1]; long residue = size; // Quick internal calculations for (var counter2 = 0; counter2 < current.Length; counter2++) { residue -= current[counter2]; denominator *= Fact(current[counter2]); } quantity[counter1] = (int)(Fact(size - 1) / (denominator * Fact(residue))); total += quantity[counter1]; } var prob = new double[quantity.Length]; // quantities found... now build array for probabilities for (var counter1 = 0; counter1 < quantity.Length; counter1++) { prob[counter1] = quantity[counter1] / (double)total; // I don't think we need to check for negative values here -- Sean } RandomChoice.OrganizeDistribution(prob); var selection = RandomChoice.PickFromDistribution(prob, 0.0, 7); return(permutations[selection]); }
/// <summary> /// Completely override FitProportionateSelection.prepareToProduce /// </summary> public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); // load fitnesses Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; double meanSum = 0; double squaredDeviationsSum = 0; for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. SigmaScalingSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // Calculate meanFitness for (var x = 0; x < Fitnesses.Length; x++) { meanSum = meanSum + Fitnesses[x]; } var meanFitness = meanSum / Fitnesses.Length; // Calculate sum of squared deviations for (var x = 0; x < Fitnesses.Length; x++) { squaredDeviationsSum = squaredDeviationsSum + Math.Pow(Fitnesses[x] - meanFitness, 2); } var sigma = Math.Sqrt(squaredDeviationsSum / (Fitnesses.Length - 1)); // Fill fitnesses[] with sigma scaled fitness values for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = (double)SigmaScaledValue(Fitnesses[x], meanFitness, sigma, s); // adjust the fitness proportion according to sigma scaling. // Sigma scaling formula can return negative values, this is unacceptable for fitness proportionate style selection... // so we must substitute the fitnessFloor (some value >= 0) when a sigma scaled fitness <= fitnessFloor is encountered. if (Fitnesses[x] < _fitnessFloor) { Fitnesses[x] = _fitnessFloor; } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
/// <summary> /// Completely override FitProportionateSelection.prepareToProduce. /// </summary> public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { // load fitnesses Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; for (var x = 0; x < Fitnesses.Length; x++) { // adjust the fitness proportion according to current temperature. Fitnesses[x] = (double)BoltzmannExpectedValue(s.Population.Subpops[subpop].Individuals[x].Fitness.Value, s); if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. BoltzmannnSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
// don't need clone etc. #region Operations public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); // load sortedFit Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) { // uh oh s.Output.Fatal("Discovered a negative fitness value." + " FitProportionateSelection requires that all fitness values be non-negative(offending subpop #" + subpop + ")"); } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
// don't need clone etc. public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); LastIndex = 0; Steps = 0; Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; // compute offset Offset = (double)(s.Random[thread].NextDouble() / Fitnesses.Length); // load fitnesses but don't build distribution yet for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. SUSSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // construct and optionally shuffle fitness distribution and indices Indices = new int[s.Population.Subpops[subpop].Individuals.Count]; for (var i = 0; i < Indices.Length; i++) { Indices[i] = i; } if (Shuffle) { ShuffleFitnessesAndIndices(s.Random[thread], Fitnesses, Indices); } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
public virtual void Setup(IEvolutionState state, IParameter paramBase) { var def = DefaultBase; // min and max size if (state.Parameters.ParameterExists(paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE))) { if (!(state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)))) { state.Output.Fatal("This GPNodeBuilder has a " + P_MINSIZE + " but not a " + P_MAXSIZE + "."); } MinSize = state.Parameters.GetInt(paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE), 1); if (MinSize == 0) { state.Output.Fatal("The GPNodeBuilder must have a min size >= 1.", paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE)); } MaxSize = state.Parameters.GetInt(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE), 1); if (MaxSize == 0) { state.Output.Fatal("The GPNodeBuilder must have a max size >= 1.", paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)); } if (MinSize > MaxSize) { state.Output.Fatal("The GPNodeBuilder must have min size <= max size.", paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE)); } } else if (state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE))) { state.Output.Fatal("This GPNodeBuilder has a " + P_MAXSIZE + " but not a " + P_MINSIZE + ".", paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)); } // load sizeDistribution else if (state.Parameters.ParameterExists(paramBase.Push(P_NUMSIZES), def.Push(P_NUMSIZES))) { var siz = state.Parameters.GetInt(paramBase.Push(P_NUMSIZES), def.Push(P_NUMSIZES), 1); if (siz == 0) { state.Output.Fatal("The number of sizes in the GPNodeBuilder's distribution must be >= 1. "); } SizeDistribution = new double[siz]; if (state.Parameters.ParameterExists(paramBase.Push(P_SIZE).Push("0"), def.Push(P_SIZE).Push("0"))) { state.Output.Warning("GPNodeBuilder does not use size #0 in the distribution", paramBase.Push(P_SIZE).Push("0"), def.Push(P_SIZE).Push("0")); } var sum = 0.0; for (var x = 0; x < siz; x++) { SizeDistribution[x] = state.Parameters.GetDouble(paramBase.Push(P_SIZE).Push("" + (x + 1)), def.Push(P_SIZE).Push("" + (x + 1)), 0.0f); if (SizeDistribution[x] < 0.0) { state.Output.Warning("Distribution value #" + x + " negative or not defined, assumed to be 0.0", paramBase.Push(P_SIZE).Push("" + (x + 1)), def.Push(P_SIZE).Push("" + (x + 1))); SizeDistribution[x] = 0.0; } sum += SizeDistribution[x]; } if (sum > 1.0) { state.Output.Warning("Distribution sums to greater than 1.0", paramBase.Push(P_SIZE), def.Push(P_SIZE)); } if (sum.Equals(0.0)) { state.Output.Fatal("Distribution is all 0's", paramBase.Push(P_SIZE), def.Push(P_SIZE)); } // normalize and prepare RandomChoice.OrganizeDistribution(SizeDistribution); } }
/// <summary> /// Normalizes and arranges the probabilities in sources so that they /// are usable by pickRandom(...). If the sources have all zero probabilities, /// then a uniform selection is used. Negative probabilities will /// generate an ArithmeticException, as will an empty source array. /// </summary> public static void SetupProbabilities(IBreedingSource[] sources) { RandomChoice.OrganizeDistribution(sources, sources[0], true); }
public override void Setup(IEvolutionState state, IParameter paramBase) { base.Setup(state, paramBase); // load our probabilities here. q_ny = new double[Nonterminals.Length][]; q_ty = new double[Terminals.Length][]; var allOnes = true; var noOnes = true; var allZeros = true; var initializer = ((GPInitializer)state.Initializer); for (var type = 0; type < Nonterminals.Length; type++) { q_ny[type] = new double[Nonterminals[type].Length]; for (var x = 0; x < Nonterminals[type].Length; x++) { q_ny[type][x] = Nonterminals[type][x].Constraints(initializer).ProbabilityOfSelection; if (q_ny[type][x] != 0.0) { allZeros = false; } if (q_ny[type][x] == 1.0) { noOnes = false; } else { allOnes = false; } } } if (allZeros) { state.Output.Warning("In this function set, the probabilities of all nonterminal functions have a 0.0 selection probability" + " -- this will cause them all to be selected uniformly. That could be an error.", paramBase); } // BRS : TODO : Investigate the "allZeroes" logic as described below... // In ECJ v20 the following is reinitialized to false, // but I think that is a BUG because it is about to check again // and set it to false if any of the probabilities do NOT equal zero. // allZeros = false; // I'm setting this to true for the reason described above! allZeros = true; for (var type = 0; type < Terminals.Length; type++) { q_ty[type] = new double[Terminals[type].Length]; for (var x = 0; x < Terminals[type].Length; x++) { q_ty[type][x] = Terminals[type][x].Constraints(initializer).ProbabilityOfSelection; if (q_ty[type][x] != 0.0) { allZeros = false; } if (q_ty[type][x] == 1.0) { noOnes = false; } else { allOnes = false; } } } if (allZeros) { state.Output.Warning("In this function set, the probabilities of all terminal functions have a 0.0 selection probability" + " -- this will cause them all to be selected uniformly. That could be an error.", paramBase); } if (!allOnes && !noOnes) { state.Output.Warning("In this function set, there are some functions with a selection probability of 1.0," + " but not all of them. That could be an error.", paramBase); } // set up our node probabilities. Allow all zeros. for (var x = 0; x < q_ty.Length; x++) { if (q_ty[x].Length == 0) { state.Output.Warning("Function Set " + Name + " has no terminals for type number " + x + ". This may cause problems for you."); } else { RandomChoice.OrganizeDistribution(q_ty[x], true); } if (q_ny[x].Length == 0) { state.Output.Warning("Function Set " + Name + " has no nonterminals for type number " + x + ". This may cause problems for you."); } else { RandomChoice.OrganizeDistribution(q_ny[x], true); } } // set up cache p_y = new double[CACHE_SIZE][]; }
public virtual void ComputePercentages() { // load ROOT_D for (var f = 0; f < NUMTREESOFTYPE.Length; f++) { for (var t = 0; t < NUMTREESOFTYPE[f].Length; t++) { for (var s = 0; s < NUMTREESOFTYPE[f][t].Length; s++) { ROOT_D[f][t][s] = new UniformGPNodeStorage[FunctionSets[f].Nodes[t].Length]; for (var x = 0; x < ROOT_D[f][t][s].Length; x++) { ROOT_D[f][t][s][x] = new UniformGPNodeStorage(); ROOT_D[f][t][s][x].Node = FunctionSets[f].Nodes[t][x]; ROOT_D[f][t][s][x].Prob = GetProb(NUMTREESROOTEDBYNODE[f][IntForNode(ROOT_D[f][t][s][x].Node)][s]); } // organize the distribution //System.out.PrintLn("Organizing " + f + " " + t + " " + s); // check to see if it's all zeros for (var x = 0; x < ROOT_D[f][t][s].Length; x++) { if (ROOT_D[f][t][s][x].Prob != 0.0) { // don't need to check for negatives here I believe RandomChoice.OrganizeDistribution(ROOT_D[f][t][s], ROOT_D[f][t][s][0]); ROOT_D_ZERO[f][t][s] = false; break; } else { ROOT_D_ZERO[f][t][s] = true; } } } } } // load CHILD_D for (var f = 0; f < NUMCHILDPERMUTATIONS.Length; f++) { for (var p = 0; p < NUMCHILDPERMUTATIONS[f].Length; p++) { for (var o = 0; o < MaxTreeSize + 1; o++) { for (var c = 0; c < MaxArity; c++) { CHILD_D[f][p][o][c] = new double[o + 1]; for (var s = 0; s < CHILD_D[f][p][o][c].Length; s++) { CHILD_D[f][p][o][c][s] = GetProb(NUMCHILDPERMUTATIONS[f][p][s][o][c]); } // organize the distribution //System.out.PrintLn("Organizing " + f + " " + p + " " + o + " " + c); // check to see if it's all zeros for (var x = 0; x < CHILD_D[f][p][o][c].Length; x++) { if (CHILD_D[f][p][o][c][x] != 0.0) { // don't need to check for negatives here I believe RandomChoice.OrganizeDistribution(CHILD_D[f][p][o][c]); break; } } } } } } }
public virtual void Preprocess(IEvolutionState state, int maxTreeSize) { state.Output.Message("Determining Tree Sizes"); MaxTreeSize = maxTreeSize; var functionSetRepository = ((GPInitializer)state.Initializer).FunctionSetRepository; // Put each function set into the arrays FunctionSets = new GPFunctionSet[functionSetRepository.Count]; FunctionSetsHash = Hashtable.Synchronized(new Hashtable()); var e = functionSetRepository.Values.GetEnumerator(); var count = 0; while (e.MoveNext()) { var funcs = (GPFunctionSet)e.Current; FunctionSetsHash[funcs] = count; FunctionSets[count++] = funcs; } // For each function set, assign each GPNode to a unique integer // so we can keep track of it (ick, this will be inefficient!) FuncNodesHash = Hashtable.Synchronized(new Hashtable()); var t_nodes = Hashtable.Synchronized(new Hashtable()); count = 0; MaxArity = 0; for (var x = 0; x < FunctionSets.Length; x++) { GPNode n; // hash all the nodes so we can remove duplicates for (var typ = 0; typ < FunctionSets[x].Nodes.Length; typ++) { for (var nod = 0; nod < FunctionSets[x].Nodes[typ].Length; nod++) { t_nodes[n = FunctionSets[x].Nodes[typ][nod]] = n; } } // rehash with Integers, yuck e = t_nodes.Values.GetEnumerator(); GPNode tmpn; while (e.MoveNext()) { tmpn = (GPNode)e.Current; if (MaxArity < tmpn.Children.Length) { MaxArity = tmpn.Children.Length; } if (!FuncNodesHash.ContainsKey(tmpn)) // don't remap the node; it'd make holes { FuncNodesHash[tmpn] = count++; } } } NumFuncNodes = FuncNodesHash.Count; var initializer = (GPInitializer)state.Initializer; var numAtomicTypes = initializer.NumAtomicTypes; var numSetTypes = initializer.NumSetTypes; var functionSetsLength = FunctionSets.Length; var atomicPlusSetTypes = numAtomicTypes + numSetTypes; var maxTreeSizePlusOne = MaxTreeSize + 1; // set up the arrays // NUMTREESOFTYPE NUMTREESOFTYPE = TensorFactory.Create <BigInteger>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // NUMTREESROOTEDBYNODE NUMTREESROOTEDBYNODE = TensorFactory.Create <BigInteger>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne); // NUMCHILDPERMUTATIONS NUMCHILDPERMUTATIONS = TensorFactory.Create <BigInteger>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne, maxTreeSizePlusOne, MaxArity); // ROOT_D ROOT_D = TensorFactory.CreateOpenEnded <UniformGPNodeStorage>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // 4D OpenEnded // ROOT_D_ZERO ROOT_D_ZERO = TensorFactory.Create <bool>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // CHILD_D CHILD_D = TensorFactory.CreateOpenEnded <double>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne, maxTreeSizePlusOne); // 5D OpenEnded var types = ((GPInitializer)(state.Initializer)).Types; // _TrueSizesBigInt TrueSizesBigInt = TensorFactory.Create <BigInteger>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // Go through each function set and determine numbers // (this will take quite a while! Thankfully it's offline) for (var x = 0; x < FunctionSets.Length; x++) { for (var y = 0; y < numAtomicTypes + numSetTypes; y++) { for (var z = 1; z <= MaxTreeSize; z++) { state.Output.Message("FunctionSet: " + FunctionSets[x].Name + ", Type: " + types[y].Name + ", Size: " + z + " num: " + (TrueSizesBigInt[x][y][z] = NumTreesOfType(initializer, x, y, z))); } } } state.Output.Message("Compiling Distributions"); TrueSizes = TensorFactory.Create <double>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // convert to doubles and organize distribution for (var x = 0; x < FunctionSets.Length; x++) { for (var y = 0; y < numAtomicTypes + numSetTypes; y++) { for (var z = 1; z <= MaxTreeSize; z++) { TrueSizes[x][y][z] = (double)TrueSizesBigInt[x][y][z]; // BRS : DOES THIS TRUNCATE ANYTHING ??? } // and if this is all zero (a possibility) we should be forgiving (hence the 'true') -- I *think* RandomChoice.OrganizeDistribution(TrueSizes[x][y], true); } } // compute our percentages ComputePercentages(); }
/// <summary> /// Sets up all the RuleSetConstraints, loading them from the parameter /// file. This must be called before anything is called which refers /// to a type by Name. /// </summary> public virtual void Setup(IEvolutionState state, IParameter paramBase) { // What's my name? Name = state.Parameters.GetString(paramBase.Push(P_NAME), null); if (Name == null) { state.Output.Fatal("No name was given for this RuleSetConstraints.", paramBase.Push(P_NAME)); } // Register me var tempObject = ((RuleInitializer)state.Initializer).RuleSetConstraintRepository[Name]; ((RuleInitializer)state.Initializer).RuleSetConstraintRepository[Name] = this; var oldConstraints = (RuleSetConstraints)(tempObject); if (oldConstraints != null) { state.Output.Fatal("The rule constraints \"" + Name + "\" has been defined multiple times.", paramBase.Push(P_NAME)); } // load my prototypical Rule RulePrototype = (Rule)(state.Parameters.GetInstanceForParameter(paramBase.Push(P_RULE), null, typeof(Rule))); RulePrototype.Setup(state, paramBase.Push(P_RULE)); p_add = state.Parameters.GetDouble(paramBase.Push(P_ADD_PROB), null, 0); if (p_add < 0 || p_add > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_ADD_PROB)); } p_del = state.Parameters.GetDouble(paramBase.Push(P_DEL_PROB), null, 0); if (p_del < 0 || p_del > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_DEL_PROB)); } p_randorder = state.Parameters.GetDouble(paramBase.Push(P_RAND_ORDER_PROB), null, 0); if (p_randorder < 0 || p_randorder > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_RAND_ORDER_PROB)); } // now, we are going to load EITHER min/max size OR a size distribution, or both // (the size distribution takes precedence) // reset min and max size if (state.Parameters.ParameterExists(paramBase.Push(P_RESETMINSIZE), null) || state.Parameters.ParameterExists(paramBase.Push(P_RESETMAXSIZE), null)) { if (!(state.Parameters.ParameterExists(paramBase.Push(P_RESETMAXSIZE), null))) { state.Output.Error("This RuleSetConstraints has a " + P_RESETMINSIZE + " but not a " + P_RESETMAXSIZE + "."); } ResetMinSize = state.Parameters.GetInt(paramBase.Push(P_RESETMINSIZE), null, 0); if (ResetMinSize == -1) { state.Output.Error("If min&max are defined, RuleSetConstraints must have a min size >= 0.", paramBase.Push(P_RESETMINSIZE), null); } ResetMaxSize = state.Parameters.GetInt(paramBase.Push(P_RESETMAXSIZE), null, 0); if (ResetMaxSize == -1) { state.Output.Error("If min&max are defined, RuleSetConstraints must have a max size >= 0.", paramBase.Push(P_RESETMAXSIZE), null); } if (ResetMinSize > ResetMaxSize) { state.Output.Error("If min&max are defined, RuleSetConstraints must have min size <= max size.", paramBase.Push(P_RESETMINSIZE), null); } state.Output.ExitIfErrors(); } // load SizeDistribution if (state.Parameters.ParameterExists(paramBase.Push(P_NUMSIZES), null)) { var siz = state.Parameters.GetInt(paramBase.Push(P_NUMSIZES), null, 1); if (siz == 0) { state.Output.Fatal("The number of sizes in the RuleSetConstraints's distribution must be >= 1. "); } SizeDistribution = new double[siz]; var sum = 0.0; for (var x = 0; x < siz; x++) { SizeDistribution[x] = state.Parameters.GetDouble(paramBase.Push(P_RESETSIZE).Push("" + x), null, 0.0); if (SizeDistribution[x] < 0.0) { state.Output.Warning("Distribution value #" + x + " negative or not defined, assumed to be 0.0", paramBase.Push(P_RESETSIZE).Push("" + x), null); SizeDistribution[x] = 0.0f; } sum += SizeDistribution[x]; } if (sum > 1.0) { state.Output.Warning("Distribution sums to greater than 1.0", paramBase.Push(P_RESETSIZE), null); } if (sum == 0.0) { state.Output.Fatal("Distribution is all 0's", paramBase.Push(P_RESETSIZE), null); } // normalize and prepare RandomChoice.OrganizeDistribution(SizeDistribution); } MinSize = state.Parameters.ParameterExists(paramBase.Push(P_MINSIZE), null) ? state.Parameters.GetInt(paramBase.Push(P_MINSIZE), null, 0) : 0; MaxSize = state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), null) ? state.Parameters.GetInt(paramBase.Push(P_MAXSIZE), null, 0) : int.MaxValue; // sanity checks if (MinSize > MaxSize) { state.Output.Fatal("Cannot have min size greater than max size : (" + MinSize + " > " + MaxSize + ")", paramBase.Push(P_MINSIZE), null); } if (SizeDistribution != null) { if (MinSize != 0) { state.Output.Fatal("Using size distribution, but min size is not 0", paramBase.Push(P_MINSIZE), null); } if (SizeDistribution.Length - 1 > MaxSize) { state.Output.Fatal("Using size distribution whose maximum size is higher than max size", paramBase.Push(P_MAXSIZE), null); } } else { if (ResetMinSize < MinSize) { state.Output.Fatal("Cannot have min size greater than reset min size : (" + MinSize + " > " + ResetMinSize + ")", paramBase.Push(P_MINSIZE), null); } if (ResetMaxSize > MaxSize) { state.Output.Fatal("Cannot have max size less than reset max size : (" + MaxSize + " > " + ResetMaxSize + ")", paramBase.Push(P_MAXSIZE), null); } } }
// don't need clone etc. -- I'll never clone with my arrays intact #region Operations public override void PrepareToProduce(IEvolutionState state, int subpop, int thread) { base.PrepareToProduce(state, subpop, thread); // load SortedPop integers var i = state.Population.Subpops[subpop].Individuals; SortedPop = new int[i.Count]; for (var x = 0; x < SortedPop.Length; x++) { SortedPop[x] = x; } // sort SortedPop in increasing fitness order QuickSort.QSort(SortedPop, new AnonymousClassSortComparatorL(i)); // determine my boundary -- must be at least 1 and must leave 1 over var boundary = (int)(SortedPop.Length * Top_N_Percent); if (boundary == 0) { boundary = 1; } if (boundary == SortedPop.Length) { boundary = SortedPop.Length - 1; } if (boundary == 0) { // uh oh state.Output.Fatal("Greedy Overselection can only be done with a population of size 2 or more (offending subpop #" + subpop + ")"); } // load SortedFitOver SortedFitOver = new double[boundary]; var y = 0; for (var x = SortedPop.Length - boundary; x < SortedPop.Length; x++) { SortedFitOver[y] = i[SortedPop[x]].Fitness.Value; if (SortedFitOver[y] < 0) { // uh oh state.Output.Fatal("Discovered a negative fitness value." + " Greedy Overselection requires that all fitness values be non-negative (offending subpop #" + subpop + ")"); } y++; } // load SortedFitUnder SortedFitUnder = new double[SortedPop.Length - boundary]; y = 0; for (var x = 0; x < SortedPop.Length - boundary; x++) { SortedFitUnder[y] = i[SortedPop[x]].Fitness.Value; if (SortedFitUnder[y] < 0) { // uh oh state.Output.Fatal("Discovered a negative fitness value." + " Greedy Overselection requires that all fitness values be non-negative (offending subpop #" + subpop + ")"); } y++; } // organize the distributions. All zeros in fitness is fine RandomChoice.OrganizeDistribution(SortedFitUnder, true); RandomChoice.OrganizeDistribution(SortedFitOver, true); }