/// <summary> /// I hard-code both Produce(...) methods for efficiency's sake. /// </summary> public int Produce( int min, int max, int start, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int n = 1; if (n > max) { n = max; } if (n < min) { n = min; } for (int q = 0; q < n; q++) { IList <Individual> oldinds = state.Population.Subpops[subpop].Individuals; int index = state.Random[thread].NextInt(state.Population.Subpops[subpop].Individuals.Count); inds[start + q] = oldinds[index]; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { IntBag parent = new IntBag(1); parent.Add(index); ((IntBag[])misc[KEY_PARENTS])[start + q] = parent; } } return(n); }
/// <summary> /// I hard-code both Produce(...) methods for efficiency's sake. /// </summary> public int Produce( int min, int max, int start, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { var n = 1; if (n > max) { n = max; } if (n < min) { n = min; } for (var q = 0; q < n; q++) { // pick size random individuals, then pick the best. var oldinds = state.Population.Subpops[subpop].Individuals; inds[start + q] = oldinds[0]; // note it's a pointer transfer, not a copy! if (misc != null && misc.ContainsKey(KEY_PARENTS)) { IntBag parent = new IntBag(1); parent.Add(0); ((IntBag[])misc[KEY_PARENTS])[start + q] = parent; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { // First things first: build our individual and his parents array if (Individual == null) { IDictionary <string, object> misc1 = null; if (misc != null && misc[SelectionMethod.KEY_PARENTS] != null) { // the user is providing a parents array. We'll need to make our own. var parentsArray = new IntBag[1]; misc1 = new Dictionary <string, object> { [SelectionMethod.KEY_PARENTS] = parentsArray }; } IList <Individual> temp = new List <Individual>(); Sources[0].Produce(1, 1, subpop, temp, state, thread, misc1); Individual = temp[0]; // Now we extract from misc1 if we have to if (misc1?[SelectionMethod.KEY_PARENTS] != null) // we already know this second fact unless it was somehow removed { Parents = ((IntBag[])misc[SelectionMethod.KEY_PARENTS])[0]; } else { Parents = null; } } int start = inds.Count; // Now we can copy the individual in for (int i = 0; i < min; i++) { inds.Add((Individual)Individual.Clone()); } // add in the parents if we need to if (Parents != null && misc != null && misc[SelectionMethod.KEY_PARENTS] != null) { var parentsArray = (IntBag[])misc[SelectionMethod.KEY_PARENTS]; for (int i = 0; i < min; i++) { parentsArray[start + i] = new IntBag(Parents); } } return(min); }
public virtual int ProduceWithoutCloning( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <String, Object> misc) { int start = inds.Count; int n = INDS_PRODUCED; if (n < min) { n = min; } if (n > max) { n = max; } for (int q = 0; q < n; q++) { int index = Produce(subpop, state, thread); inds.Add(state.Population.Subpops[subpop].Individuals[index]); // by Ermo. seems the misc forget to check if misc is null if (misc != null && misc.ContainsKey(KEY_PARENTS)) { IntBag bag = new IntBag(1); bag.Add(index); ((IntBag[])misc[KEY_PARENTS])[start + q] = bag; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? var n = TossSecondParent ? 1 : INDS_PRODUCED; if (n < min) { n = min; } if (n > max) { n = max; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } var initializer = (RuleInitializer)state.Initializer; for (var q = start; q < n + start;) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our Sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } // grab from different Sources else { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); if (!(Sources[0] is BreedingPipeline)) // it's a selection method probably { Parents[0] = (RuleIndividual)Parents[0].Clone(); } if (!(Sources[1] is BreedingPipeline)) // it's a selection method probably { Parents[1] = (RuleIndividual)Parents[1].Clone(); } } // at this point, Parents[] contains our two selected individuals, // AND they're copied so we own them and can make whatever modifications // we like on them. // so we'll cross them over now. ((RuleIndividual)Parents[0]).PreprocessIndividual(state, thread); ((RuleIndividual)Parents[1]).PreprocessIndividual(state, thread); if (((RuleIndividual)Parents[0]).Rulesets.Length != ((RuleIndividual)Parents[1]).Rulesets.Length) { state.Output.Fatal("The number of rule sets should be identical in both parents ( " + ((RuleIndividual)Parents[0]).Rulesets.Length + " : " + ((RuleIndividual)Parents[1]).Rulesets.Length + " )."); } // for each set of rules (assume both individuals have the same number of rule sets) for (var x = 0; x < ((RuleIndividual)Parents[0]).Rulesets.Length; x++) { var temp = new RuleSet[2]; while (true) { // create two new Rulesets (initially empty) for (var i = 0; i < 2; i++) { temp[i] = new RuleSet(); } // split the ruleset indexed x in parent 1 temp = ((RuleIndividual)Parents[0]).Rulesets[x].SplitIntoTwo(state, thread, temp, RuleCrossProbability); // now temp[0] contains rules to that must go to parent[1] // split the ruleset indexed x in parent 2 (append after the splitted result from previous operation) temp = ((RuleIndividual)Parents[1]).Rulesets[x].SplitIntoTwo(state, thread, temp, RuleCrossProbability); // now temp[1] contains rules that must go to parent[0] // ensure that there are enough rules if (temp[0].RuleCount >= ((RuleIndividual)Parents[0]).Rulesets[x].Constraints(initializer).MinSize && temp[0].RuleCount <= ((RuleIndividual)Parents[0]).Rulesets[x].Constraints(initializer).MaxSize && temp[1].RuleCount >= ((RuleIndividual)Parents[1]).Rulesets[x].Constraints(initializer).MinSize && temp[1].RuleCount <= ((RuleIndividual)Parents[1]).Rulesets[x].Constraints(initializer).MaxSize) { break; } temp = new RuleSet[2]; } // copy the results in the Rulesets of the Parents ((RuleIndividual)Parents[0]).Rulesets[x].CopyNoClone(temp[1]); ((RuleIndividual)Parents[1]).Rulesets[x].CopyNoClone(temp[0]); } ((RuleIndividual)Parents[0]).PostprocessIndividual(state, thread); ((RuleIndividual)Parents[1]).PostprocessIndividual(state, thread); ((RuleIndividual)Parents[0]).Evaluated = false; ((RuleIndividual)Parents[1]).Evaluated = false; // add 'em to the population inds.Add(Parents[0]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && !TossSecondParent) { inds.Add(Parents[1]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // grab n individuals from our source and stick 'em right into inds. // we'll modify them from there var n = Sources[0].Produce(min, max, subpop, inds, state, thread, misc); IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { return(n); } var initializer = (GPInitializer)state.Initializer; for (var q = start; q < n + start; q++) { var i = (GPIndividual)inds[q]; if (Tree1 != TREE_UNFIXED && (Tree1 < 0 || Tree1 >= i.Trees.Length)) { // uh oh state.Output.Fatal("Internal Crossover Pipeline attempted to fix tree.0 to a value" + " which was out of bounds of the array of the individual's trees. " + " Check the pipeline's fixed tree values -- they may be negative" + " or greater than the number of trees in an individual"); } if (Tree2 != TREE_UNFIXED && (Tree2 < 0 || Tree2 >= i.Trees.Length)) { // uh oh state.Output.Fatal("Internal Crossover Pipeline attempted to fix tree.0 to a value" + " which was out of bounds of the array of the individual's trees. " + " Check the pipeline's fixed tree values -- they may be negative" + " or greater than the number of trees in an individual"); } var t1 = 0; var t2 = 0; if (Tree1 == TREE_UNFIXED || Tree2 == TREE_UNFIXED) { do // pick random trees -- their GPTreeConstraints must be the same { if (Tree1 == TREE_UNFIXED) { if (i.Trees.Length > 1) { t1 = state.Random[thread].NextInt(i.Trees.Length); } else { t1 = 0; } } else { t1 = Tree1; } if (Tree2 == TREE_UNFIXED) { if (i.Trees.Length > 1) { t2 = state.Random[thread].NextInt(i.Trees.Length); } else { t2 = 0; } } else { t2 = Tree2; } }while (i.Trees[t1].Constraints(initializer) != i.Trees[t2].Constraints(initializer)); } else { t1 = Tree1; t2 = Tree2; // make sure the constraints are okay if (i.Trees[t1].Constraints(initializer) != i.Trees[t2].Constraints(initializer)) { // uh oh state.Output.Fatal("GP Crossover Pipeline's two tree choices are both specified by the user -- but their GPTreeConstraints are not the same"); } } // prepare the nodeselectors NodeSelect0.Reset(); NodeSelect1.Reset(); // pick some nodes GPNode p1 = null; GPNode p2 = null; var res = false; for (var x = 0; x < NumTries; x++) { // pick a node in individual 1 p1 = NodeSelect0.PickNode(state, subpop, thread, i, i.Trees[t1]); // pick a node in individual 2 p2 = NodeSelect1.PickNode(state, subpop, thread, i, i.Trees[t2]); // make sure they're not the same node res = (p1 != p2 && (t1 != t2 || NoContainment(p1, p2)) && VerifyPoints(initializer, p1, p2) && VerifyPoints(initializer, p2, p1)); // 2 goes into 1 if (res) { break; // got one } } // if res, then it's time to cross over! if (res) { var oldparent = p1.Parent; var oldArgPosition = p1.ArgPosition; p1.Parent = p2.Parent; p1.ArgPosition = p2.ArgPosition; p2.Parent = oldparent; p2.ArgPosition = oldArgPosition; if (p1.Parent is GPNode) { ((GPNode)(p1.Parent)).Children[p1.ArgPosition] = p1; } else { ((GPTree)(p1.Parent)).Child = p1; } if (p2.Parent is GPNode) { ((GPNode)(p2.Parent)).Children[p2.ArgPosition] = p2; } else { ((GPTree)(p2.Parent)).Child = p2; } i.Evaluated = false; // we've modified it } // add the individuals to the population //inds[q] = i; inds.Add(i); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = new IntBag(parentparents[0]); } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? int n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } GPInitializer initializer = (GPInitializer)state.Initializer; for (int q = start; q < n + start; /* no increment */) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } else // grab from different sources { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // at this point, Parents[] contains our two selected individuals // are our tree values valid? if (Tree1 != TREE_UNFIXED && (Tree1 < 0 || Tree1 >= ((GPIndividual)Parents[0]).Trees.Length)) { // uh oh state.Output.Fatal( "GP Crossover Pipeline attempted to fix tree.0 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual"); } if (Tree2 != TREE_UNFIXED && (Tree2 < 0 || Tree2 >= ((GPIndividual)Parents[1]).Trees.Length)) { // uh oh state.Output.Fatal( "GP Crossover Pipeline attempted to fix tree.1 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual"); } int t1; int t2; if (Tree1 == TREE_UNFIXED || Tree2 == TREE_UNFIXED) { do // pick random trees -- their GPTreeConstraints must be the same { if (Tree1 == TREE_UNFIXED) { if (((GPIndividual)Parents[0]).Trees.Length > 1) { t1 = state.Random[thread].NextInt(((GPIndividual)Parents[0]).Trees.Length); } else { t1 = 0; } } else { t1 = Tree1; } if (Tree2 == TREE_UNFIXED) { if (((GPIndividual)Parents[1]).Trees.Length > 1) { t2 = state.Random[thread].NextInt(((GPIndividual)Parents[1]).Trees.Length); } else { t2 = 0; } } else { t2 = Tree2; } } while (((GPIndividual)Parents[0]).Trees[t1].Constraints(initializer) != ((GPIndividual)Parents[1]).Trees[t2].Constraints(initializer)); } else { t1 = Tree1; t2 = Tree2; // make sure the constraints are okay if (((GPIndividual)Parents[0]).Trees[t1].Constraints(initializer) != ((GPIndividual)Parents[1]).Trees[t2].Constraints(initializer)) // uh oh { state.Output.Fatal( "GP Crossover Pipeline's two tree choices are both specified by the user -- but their GPTreeConstraints are not the same"); } } bool res1 = false; bool res2 = false; // BRS: This is kind of stupid to name it this way! GPTree currTree = ((GPIndividual)Parents[1]).Trees[t2]; // pick some nodes GPNode p1 = null; GPNode p2 = null; // lets walk on parent2 all nodes to get subtrees for each node, doing it once for O(N) and not O(N^2) // because depth etc are computed and not stored ArrayList nodeToSubtrees = new ArrayList(); // also Hashtable for size to List() of nodes in that size for O(1) lookup Hashtable sizeToNodes = new Hashtable(); TraverseTreeForDepth(currTree.Child, nodeToSubtrees, sizeToNodes); // sort the ArrayList with comparator that sorts by subtrees nodeToSubtrees.Sort(new NodeComparator()); for (int x = 0; x < NumTries; x++) { // pick a node in individual 1 p1 = NodeSelect1.PickNode(state, subpop, thread, (GPIndividual)Parents[0], ((GPIndividual)Parents[0]).Trees[t1]); // now lets find "similar" in parent 2 p2 = FindFairSizeNode(nodeToSubtrees, sizeToNodes, p1, currTree, state, thread); // check for depth and swap-compatibility limits res1 = VerifyPoints(initializer, p2, p1); // p2 can fill p1's spot -- order is important! if (n - (q - start) < 2 || TossSecondParent) { res2 = true; } else { res2 = VerifyPoints(initializer, p1, p2); // p1 can fill p2's spot -- order is important! } // did we get something that had both nodes verified? // we reject if EITHER of them is invalid. This is what lil-gp // does. // Koza only has numTries set to 1, so it's compatible as well. if (res1 && res2) { break; } } // at this point, res1 AND res2 are valid, OR // either res1 OR res2 is valid and we ran out of tries, OR // neither res1 nor res2 is valid and we rand out of tries. // So now we will transfer to a tree which has res1 or res2 // valid, otherwise it'll just get replicated. This is // compatible with both Koza and lil-gp. // at this point I could check to see if my sources were breeding // pipelines -- but I'm too lazy to write that code (it's a little // complicated) to just swap one individual over or both over, // -- it might still entail some copying. Perhaps in the future. // It would make things faster perhaps, not requiring all that // cloning. // Create some new individuals based on the old ones -- since // GPTree doesn't deep-clone, this should be just fine. Perhaps we // should change this to proto off of the main species prototype, // but // we have to then copy so much stuff over; it's not worth it. GPIndividual j1 = ((GPIndividual)Parents[0]).LightClone(); GPIndividual j2 = null; if (n - (q - start) >= 2 && !TossSecondParent) { j2 = ((GPIndividual)Parents[1]).LightClone(); } // Fill in various tree information that didn't get filled in there j1.Trees = new GPTree[((GPIndividual)Parents[0]).Trees.Length]; if (n - (q - start) >= 2 && !TossSecondParent) { j2.Trees = new GPTree[((GPIndividual)Parents[1]).Trees.Length]; } // at this point, p1 or p2, or both, may be null. // If not, swap one in. Else just copy the parent. for (int x = 0; x < j1.Trees.Length; x++) { if (x == t1 && res1) // we've got a tree with a kicking cross // position! { j1.Trees[x] = ((GPIndividual)Parents[0]).Trees[x].LightClone(); j1.Trees[x].Owner = j1; j1.Trees[x].Child = ((GPIndividual)Parents[0]).Trees[x].Child.CloneReplacing(p2, p1); j1.Trees[x].Child.Parent = j1.Trees[x]; j1.Trees[x].Child.ArgPosition = 0; j1.Evaluated = false; } // it's changed else { j1.Trees[x] = ((GPIndividual)Parents[0]).Trees[x].LightClone(); j1.Trees[x].Owner = j1; j1.Trees[x].Child = (GPNode)((GPIndividual)Parents[0]).Trees[x].Child.Clone(); j1.Trees[x].Child.Parent = j1.Trees[x]; j1.Trees[x].Child.ArgPosition = 0; } } if (n - (q - start) >= 2 && !TossSecondParent) { for (int x = 0; x < j2.Trees.Length; x++) { if (x == t2 && res2) // we've got a tree with a kicking // cross position! { j2.Trees[x] = ((GPIndividual)Parents[1]).Trees[x].LightClone(); j2.Trees[x].Owner = j2; j2.Trees[x].Child = ((GPIndividual)Parents[1]).Trees[x].Child.CloneReplacing(p1, p2); j2.Trees[x].Child.Parent = j2.Trees[x]; j2.Trees[x].Child.ArgPosition = 0; j2.Evaluated = false; } // it's changed else { j2.Trees[x] = ((GPIndividual)Parents[1]).Trees[x].LightClone(); j2.Trees[x].Owner = j2; j2.Trees[x].Child = (GPNode)((GPIndividual)Parents[1]).Trees[x].Child.Clone(); j2.Trees[x].Child.Parent = j2.Trees[x]; j2.Trees[x].Child.ArgPosition = 0; } } } // add the individuals to the population // by Ermo. I think this should be add // inds.set(q,j1); // Yes -- Sean inds.Add(j1); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && !TossSecondParent) { // by Ermo. Same reason, should changed to add //inds[q] = j2; inds.Add(j2); if (preserveParents != null) { preserveParents[q] = parentparents[0]; } q++; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // grab n individuals from our source and stick 'em right into inds. // we'll modify them from there var n = Sources[0].Produce(min, max, subpop, inds, state, thread, misc); // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } var initializer = ((GPInitializer)state.Initializer); // now let's mutate 'em for (var q = start; q < n + start; q++) { var i = (GPIndividual)inds[q]; if (Tree != TREE_UNFIXED && (Tree < 0 || Tree >= i.Trees.Length)) { // uh oh state.Output.Fatal("MutateAllNodesPipeline attempted to fix Tree.0 to a value which was out of bounds of the array of the individual's Trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual"); } int t; // pick random tree if (Tree == TREE_UNFIXED) { t = i.Trees.Length > 1 ? state.Random[thread].NextInt(i.Trees.Length) : 0; } else { t = Tree; } // prepare the NodeSelector NodeSelect.Reset(); // pick a node // pick a node in individual 1 var p1 = NodeSelect.PickNode(state, subpop, thread, i, i.Trees[t]); // generate a tree with a new root but the same children, // which we will replace p1 with var type = p1.ParentType(initializer); var p2 = GenerateCompatibleTree(p1, i.Trees[t].Constraints(initializer).FunctionSet, state, type, thread); // we'll need to set p2.ArgPosition and p2.Parent further down p2.Parent = p1.Parent; p2.ArgPosition = p1.ArgPosition; if (p2.Parent is GPNode) { ((GPNode)(p2.Parent)).Children[p2.ArgPosition] = p2; } else { ((GPTree)(p2.Parent)).Child = p2; } i.Evaluated = false; // we've modified it // add the new individual, replacing its previous source inds[q] = i; if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = new IntBag(parentparents[0]); } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? var n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } for (var q = start; q < n + start; /* no increment */) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } else // grab from different sources { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // determines size of parents, in terms of chunks var chunkSize = ((VectorSpecies)Parents[0].Species).ChunkSize; var size = new int[2]; size[0] = ((VectorIndividual)Parents[0]).GenomeLength; size[1] = ((VectorIndividual)Parents[1]).GenomeLength; var sizeInChunks = new int[2]; sizeInChunks[0] = size[0] / chunkSize; sizeInChunks[1] = size[1] / chunkSize; // variables used to split & join the children var minChunks = new int[2]; var maxChunks = new int[2]; // BRS : TODO : Change to rectangular arrays? var split = new int[2][]; for (var x = 0; x < 2; x++) { split[x] = new int[2]; } var pieces = new Object[2][]; for (var x = 0; x < 2; x++) { pieces[x] = new object[2]; } // determine min and max crossover segment lengths, in terms of chunks for (var i = 0; i < 2; i++) { minChunks[i] = (int)(sizeInChunks[i] * MinCrossoverPercentage); // round minCrossoverPercentage up to nearest chunk boundary if (size[i] % chunkSize != 0 && minChunks[i] < sizeInChunks[i]) { minChunks[i]++; } maxChunks[i] = (int)(sizeInChunks[i] * MaxCrossoverPercentage); } // attempt 'num-tries' times to produce valid children (which are bigger than min-child-size) var validChildren = false; var attempts = 0; while (validChildren == false && attempts < NumTries) { // generate split indices for one-point (tail end used as end of segment) if (CrossoverType == VectorSpecies.C_ONE_POINT) { for (int i = 0; i < 2; i++) { // select first index at most 'max_chunks' away from tail end of vector split[i][0] = sizeInChunks[i] - maxChunks[i]; // shift back towards tail end with random value based on min/max parameters split[i][0] += state.Random[thread].NextInt(maxChunks[i] - minChunks[i]); // convert split from chunk numbers to array indices split[i][0] *= chunkSize; // select tail end chunk boundary as second split index split[i][1] = sizeInChunks[i] * chunkSize; } } // generate split indices for two-point (both indicies have randomized positions) else if (CrossoverType == VectorSpecies.C_TWO_POINT) { for (var i = 0; i < 2; i++) { // select first split index randomly split[i][0] = state.Random[thread].NextInt(sizeInChunks[i] - minChunks[i]); // second index must be at least 'min_chunks' after the first index split[i][1] = split[i][0] + minChunks[i]; // add a random value up to max crossover size, without exceeding size of the parent split[i][1] += state.Random[thread].NextInt(Math.Min(maxChunks[i] - minChunks[i], sizeInChunks[i] - split[i][0])); // convert split from chunk numbers to array indices split[i][0] *= chunkSize; split[i][1] *= chunkSize; } } // use the split indices generated above to split the parents into pieces ((VectorIndividual)Parents[0]).Split(split[0], pieces[0]); ((VectorIndividual)Parents[1]).Split(split[1], pieces[1]); // create copies of the parents, swap the middle segment, and then rejoin the pieces // - this is done to test whether or not the resulting children are of a valid size, // - because we are using Object references to an undetermined array type, there is no way // to cast it to the appropriate array type (i.e. short[] or double[]) to figure out the // length of the pieces // - instead, we use the join method on copies, and let each vector type figure out its own // length with the genomeLength() method var children = new VectorIndividual[2]; children[0] = (VectorIndividual)Parents[0].Clone(); children[1] = (VectorIndividual)Parents[1].Clone(); var swap = pieces[0][1]; pieces[0][1] = pieces[1][1]; pieces[1][1] = swap; children[0].Join(pieces[0]); children[1].Join(pieces[1]); if (children[0].GenomeLength > MinChildSize && children[1].GenomeLength > MinChildSize) { validChildren = true; } attempts++; } // if the children produced were valid, updates the parents if (validChildren) { ((VectorIndividual)Parents[0]).Join(pieces[0]); ((VectorIndividual)Parents[1]).Join(pieces[1]); Parents[0].Evaluated = false; Parents[1].Evaluated = false; } // add parents to the population // by Ermo. is this wrong? // -- Okay Sean inds.Add(Parents[0]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && TossSecondParent == false) { // by Ermo. also this is wrong? inds.Add(Parents[1]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? var n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } // should we bother? // should we use them straight? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } for (var q = start; q < n + start;) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our Sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } // grab from different Sources else { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // at this point, Parents[] contains our two selected individuals, // AND they're copied so we own them and can make whatever modifications // we like on them. // so we'll cross them over now. Since this is the default pipeline, // we'll just do it by calling defaultCrossover on the first child ((VectorIndividual)Parents[0]).DefaultCrossover(state, thread, (VectorIndividual)Parents[1]); Parents[0].Evaluated = false; Parents[1].Evaluated = false; // add 'em to the population // by Ermo. this should use add instead of set, because the inds is empty, so will throw index out of bounds // okay -- Sean inds.Add(Parents[0]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && !TossSecondParent) { // by Ermo. as as here, see the comments above inds.Add(Parents[1]); if (preserveParents != null) { preserveParents[q] = new IntBag(parentparents[0]); } q++; } } if (preserveParents != null) { misc[KEY_PARENTS] = preserveParents; } return(n); }