public void GraphWithoutSelfEdges() { AdjacencyGraph g = new AdjacencyGraph( new QuickGraph.Providers.VertexAndEdgeProvider(), true); RandomGraph.Graph(g, 20, 100, new Random(), false); DepthFirstSearchAlgorithm dfs = new DepthFirstSearchAlgorithm(g); dfs.StartVertex += new VertexHandler(this.StartVertex); dfs.DiscoverVertex += new VertexHandler(this.DiscoverVertex); dfs.ExamineEdge += new EdgeHandler(this.ExamineEdge); dfs.TreeEdge += new EdgeHandler(this.TreeEdge); dfs.BackEdge += new EdgeHandler(this.BackEdge); dfs.ForwardOrCrossEdge += new EdgeHandler(this.FowardOrCrossEdge); dfs.FinishVertex += new VertexHandler(this.FinishVertex); Parents.Clear(); DiscoverTimes.Clear(); FinishTimes.Clear(); m_Time = 0; foreach (IVertex v in g.Vertices) { Parents[v] = v; } // compute dfs.Compute(); CheckDfs(g, dfs); }
public void RemoveFromAllParents() { foreach (Quadtree parent in Parents.ToList()) { parent.Remove(this); } Parents.Clear(); }
/// <summary> /// This clears the top-level dictionaries AND the parent references. /// If a deep clear is required, call "ClearDeeply()" instead. /// If only local entries need to be cleared /// (to bring shadowed values into view, for example) /// then call LocalEntries.Clear() or LocalDefaults.Clear(). /// </summary> public void Clear() { lock (_syncRoot) { _localEntries.Clear(); _localDefaults.Clear(); Parents.Clear(); } }
private void Cleanup() { Reset(); foreach (var kc in ActiveComponents.Reverse()) { if (kc.Value.LastPing != InputHelper.CurrentFrame - 1) { Remove(kc.Key, kc.Value); } } CurrentParent = this; MaxChildren = 0; ChildrenCount = 0; Parents.Clear(); _idsUsedThisFrame.Clear(); }
public void Bind() { Parents.Clear(); _words = Data.Access.GetSemanticTypes(); foreach (SemanticType type in _words) { SemanticTypeViewModel parent = new SemanticTypeViewModel(type); AddNode(parent); if (parent.ParentId == 0) { parent.IsExpanded = true; Parents.Add(parent); } } }
public void GraphWithSelfEdgesPUT(AdjacencyGraph g, int loopBound, bool self) { Random rnd; //new Random(); var choose1 = PexChoose.FromCall(this); rnd = choose1.ChooseValue <Random>("Random object"); Init(); for (int i = 0; i < loopBound; ++i) { for (int j = 0; j < i * i; ++j) { RandomGraph.Graph(g, i, j, rnd, true); Init(); DepthFirstSearchAlgorithm dfs = new DepthFirstSearchAlgorithm(g); dfs.StartVertex += new VertexHandler(this.StartVertex); dfs.DiscoverVertex += new VertexHandler(this.DiscoverVertex); dfs.ExamineEdge += new EdgeHandler(this.ExamineEdge); dfs.TreeEdge += new EdgeHandler(this.TreeEdge); dfs.BackEdge += new EdgeHandler(this.BackEdge); dfs.ForwardOrCrossEdge += new EdgeHandler(this.FowardOrCrossEdge); dfs.FinishVertex += new VertexHandler(this.FinishVertex); Parents.Clear(); DiscoverTimes.Clear(); FinishTimes.Clear(); m_Time = 0; foreach (IVertex v in g.Vertices) { Parents[v] = v; } var choose = PexChoose.FromCall(this); if (choose.ChooseValue <bool>("to add a self ede")) { IVertex selfEdge = RandomGraph.Vertex(g, rnd); g.AddEdge(selfEdge, selfEdge); } // compute dfs.Compute(); CheckDfs(g, dfs); } } }
/// Summary /// Time: 8 min 17 sec /// Pattern: AAAA, Parameterized stub /// Pex Limitations - Not able to generate any test due to the following issue: /// <boundary> maxbranches - 40000 (maximum number of branches exceeded) /// [execution] Please notice: A branch in the method System.Collections.Hashtable+HashtableEnumerator.MoveNext was executed 5777 times; /// please check that the code is not stuck in an infinite loop. /// [test] (run 1) GraphWithoutSelfEdgesPUT01, pathboundsexceeded (duplicate) /// [execution] Please notice: A branch in the method System.Collections.Hashtable+HashtableEnumerator.MoveNext was executed 4344 times; /// please check that the code is not stuck in an infinite loop. /// [test] (run 2) GraphWithoutSelfEdgesPUT01, pathboundsexceeded (duplicate) /// <summary> /// @Author:Madhuri /// </summary> public void GraphWithSelfEdgesPUT(AdjacencyGraph g, int loopBound) { Random rnd = new Random(); Init(); for (int i = 0; i < loopBound; ++i) { for (int j = 0; j < i * i; ++j) { RandomGraph.Graph(g, i, j, rnd, true); BreadthFirstSearchAlgorithm bfs = new BreadthFirstSearchAlgorithm(g); bfs.InitializeVertex += new VertexHandler(this.InitializeVertex); bfs.DiscoverVertex += new VertexHandler(this.DiscoverVertex); bfs.ExamineEdge += new EdgeHandler(this.ExamineEdge); bfs.ExamineVertex += new VertexHandler(this.ExamineVertex); bfs.TreeEdge += new EdgeHandler(this.TreeEdge); bfs.NonTreeEdge += new EdgeHandler(this.NonTreeEdge); bfs.GrayTarget += new EdgeHandler(this.GrayTarget); bfs.BlackTarget += new EdgeHandler(this.BlackTarget); bfs.FinishVertex += new VertexHandler(this.FinishVertex); Parents.Clear(); Distances.Clear(); m_CurrentDistance = 0; m_SourceVertex = RandomGraph.Vertex(g, rnd); var choose = PexChoose.FromCall(this); if (choose.ChooseValue <bool>("to add a self ede")) { IVertex selfEdge = RandomGraph.Vertex(g, rnd); g.AddEdge(selfEdge, selfEdge); } // g.RemoveEdge(RandomGraph.Edge(g, rnd)); foreach (IVertex v in g.Vertices) { Distances[v] = int.MaxValue; Parents[v] = v; } Distances[SourceVertex] = 0; bfs.Compute(SourceVertex); CheckBfs(g, bfs); } } }
public void GraphWithSelfEdges() { Random rnd = new Random(); for (int i = 0; i < 10; ++i) { for (int j = 0; j < i * i; ++j) { AdjacencyGraph g = new AdjacencyGraph( new QuickGraph.Providers.VertexProvider(), new QuickGraph.Providers.EdgeProvider(), true); RandomGraph.Graph(g, i, j, rnd, true); BreadthFirstSearchAlgorithm bfs = new BreadthFirstSearchAlgorithm(g); bfs.InitializeVertex += new VertexEventHandler(this.InitializeVertex); bfs.DiscoverVertex += new VertexEventHandler(this.DiscoverVertex); bfs.ExamineEdge += new EdgeEventHandler(this.ExamineEdge); bfs.ExamineVertex += new VertexEventHandler(this.ExamineVertex); bfs.TreeEdge += new EdgeEventHandler(this.TreeEdge); bfs.NonTreeEdge += new EdgeEventHandler(this.NonTreeEdge); bfs.GrayTarget += new EdgeEventHandler(this.GrayTarget); bfs.BlackTarget += new EdgeEventHandler(this.BlackTarget); bfs.FinishVertex += new VertexEventHandler(this.FinishVertex); Parents.Clear(); Distances.Clear(); m_CurrentDistance = 0; m_SourceVertex = RandomGraph.Vertex(g, rnd); foreach (IVertex v in g.Vertices) { Distances[v] = int.MaxValue; Parents[v] = v; } Distances[SourceVertex] = 0; bfs.Compute(SourceVertex); CheckBfs(g, bfs); } } }
protected virtual void Select() { Parents.Clear(); var competition = new List <IChromosome <TVertex, TEdge> >(); while (Population.Count > 0) { competition.Clear(); var competitionSize = Random.Next(2, 4); for (var i = 0; i < competitionSize && Population.Count > 0; i++) { competition.Add(Population.First.Value); Population.RemoveFirst(); } competition.Sort(); Parents.AddLast(competition.First()); } }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? int n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } GPInitializer initializer = (GPInitializer)state.Initializer; for (int q = start; q < n + start; /* no increment */) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } else // grab from different sources { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // at this point, Parents[] contains our two selected individuals // are our tree values valid? if (Tree1 != TREE_UNFIXED && (Tree1 < 0 || Tree1 >= ((GPIndividual)Parents[0]).Trees.Length)) { // uh oh state.Output.Fatal( "GP Crossover Pipeline attempted to fix tree.0 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual"); } if (Tree2 != TREE_UNFIXED && (Tree2 < 0 || Tree2 >= ((GPIndividual)Parents[1]).Trees.Length)) { // uh oh state.Output.Fatal( "GP Crossover Pipeline attempted to fix tree.1 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual"); } int t1; int t2; if (Tree1 == TREE_UNFIXED || Tree2 == TREE_UNFIXED) { do // pick random trees -- their GPTreeConstraints must be the same { if (Tree1 == TREE_UNFIXED) { if (((GPIndividual)Parents[0]).Trees.Length > 1) { t1 = state.Random[thread].NextInt(((GPIndividual)Parents[0]).Trees.Length); } else { t1 = 0; } } else { t1 = Tree1; } if (Tree2 == TREE_UNFIXED) { if (((GPIndividual)Parents[1]).Trees.Length > 1) { t2 = state.Random[thread].NextInt(((GPIndividual)Parents[1]).Trees.Length); } else { t2 = 0; } } else { t2 = Tree2; } } while (((GPIndividual)Parents[0]).Trees[t1].Constraints(initializer) != ((GPIndividual)Parents[1]).Trees[t2].Constraints(initializer)); } else { t1 = Tree1; t2 = Tree2; // make sure the constraints are okay if (((GPIndividual)Parents[0]).Trees[t1].Constraints(initializer) != ((GPIndividual)Parents[1]).Trees[t2].Constraints(initializer)) // uh oh { state.Output.Fatal( "GP Crossover Pipeline's two tree choices are both specified by the user -- but their GPTreeConstraints are not the same"); } } bool res1 = false; bool res2 = false; // BRS: This is kind of stupid to name it this way! GPTree currTree = ((GPIndividual)Parents[1]).Trees[t2]; // pick some nodes GPNode p1 = null; GPNode p2 = null; // lets walk on parent2 all nodes to get subtrees for each node, doing it once for O(N) and not O(N^2) // because depth etc are computed and not stored ArrayList nodeToSubtrees = new ArrayList(); // also Hashtable for size to List() of nodes in that size for O(1) lookup Hashtable sizeToNodes = new Hashtable(); TraverseTreeForDepth(currTree.Child, nodeToSubtrees, sizeToNodes); // sort the ArrayList with comparator that sorts by subtrees nodeToSubtrees.Sort(new NodeComparator()); for (int x = 0; x < NumTries; x++) { // pick a node in individual 1 p1 = NodeSelect1.PickNode(state, subpop, thread, (GPIndividual)Parents[0], ((GPIndividual)Parents[0]).Trees[t1]); // now lets find "similar" in parent 2 p2 = FindFairSizeNode(nodeToSubtrees, sizeToNodes, p1, currTree, state, thread); // check for depth and swap-compatibility limits res1 = VerifyPoints(initializer, p2, p1); // p2 can fill p1's spot -- order is important! if (n - (q - start) < 2 || TossSecondParent) { res2 = true; } else { res2 = VerifyPoints(initializer, p1, p2); // p1 can fill p2's spot -- order is important! } // did we get something that had both nodes verified? // we reject if EITHER of them is invalid. This is what lil-gp // does. // Koza only has numTries set to 1, so it's compatible as well. if (res1 && res2) { break; } } // at this point, res1 AND res2 are valid, OR // either res1 OR res2 is valid and we ran out of tries, OR // neither res1 nor res2 is valid and we rand out of tries. // So now we will transfer to a tree which has res1 or res2 // valid, otherwise it'll just get replicated. This is // compatible with both Koza and lil-gp. // at this point I could check to see if my sources were breeding // pipelines -- but I'm too lazy to write that code (it's a little // complicated) to just swap one individual over or both over, // -- it might still entail some copying. Perhaps in the future. // It would make things faster perhaps, not requiring all that // cloning. // Create some new individuals based on the old ones -- since // GPTree doesn't deep-clone, this should be just fine. Perhaps we // should change this to proto off of the main species prototype, // but // we have to then copy so much stuff over; it's not worth it. GPIndividual j1 = ((GPIndividual)Parents[0]).LightClone(); GPIndividual j2 = null; if (n - (q - start) >= 2 && !TossSecondParent) { j2 = ((GPIndividual)Parents[1]).LightClone(); } // Fill in various tree information that didn't get filled in there j1.Trees = new GPTree[((GPIndividual)Parents[0]).Trees.Length]; if (n - (q - start) >= 2 && !TossSecondParent) { j2.Trees = new GPTree[((GPIndividual)Parents[1]).Trees.Length]; } // at this point, p1 or p2, or both, may be null. // If not, swap one in. Else just copy the parent. for (int x = 0; x < j1.Trees.Length; x++) { if (x == t1 && res1) // we've got a tree with a kicking cross // position! { j1.Trees[x] = ((GPIndividual)Parents[0]).Trees[x].LightClone(); j1.Trees[x].Owner = j1; j1.Trees[x].Child = ((GPIndividual)Parents[0]).Trees[x].Child.CloneReplacing(p2, p1); j1.Trees[x].Child.Parent = j1.Trees[x]; j1.Trees[x].Child.ArgPosition = 0; j1.Evaluated = false; } // it's changed else { j1.Trees[x] = ((GPIndividual)Parents[0]).Trees[x].LightClone(); j1.Trees[x].Owner = j1; j1.Trees[x].Child = (GPNode)((GPIndividual)Parents[0]).Trees[x].Child.Clone(); j1.Trees[x].Child.Parent = j1.Trees[x]; j1.Trees[x].Child.ArgPosition = 0; } } if (n - (q - start) >= 2 && !TossSecondParent) { for (int x = 0; x < j2.Trees.Length; x++) { if (x == t2 && res2) // we've got a tree with a kicking // cross position! { j2.Trees[x] = ((GPIndividual)Parents[1]).Trees[x].LightClone(); j2.Trees[x].Owner = j2; j2.Trees[x].Child = ((GPIndividual)Parents[1]).Trees[x].Child.CloneReplacing(p1, p2); j2.Trees[x].Child.Parent = j2.Trees[x]; j2.Trees[x].Child.ArgPosition = 0; j2.Evaluated = false; } // it's changed else { j2.Trees[x] = ((GPIndividual)Parents[1]).Trees[x].LightClone(); j2.Trees[x].Owner = j2; j2.Trees[x].Child = (GPNode)((GPIndividual)Parents[1]).Trees[x].Child.Clone(); j2.Trees[x].Child.Parent = j2.Trees[x]; j2.Trees[x].Child.ArgPosition = 0; } } } // add the individuals to the population // by Ermo. I think this should be add // inds.set(q,j1); // Yes -- Sean inds.Add(j1); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && !TossSecondParent) { // by Ermo. Same reason, should changed to add //inds[q] = j2; inds.Add(j2); if (preserveParents != null) { preserveParents[q] = parentparents[0]; } q++; } } return(n); }
private void AddStudent() { if (!AddNewMode) { Person = SelectedPerson; } LastModule = SelectedGroup?.LastModule; if (Person != null && LastModule != null) { using (var _context = new ApplicationContext()) { var person = _context.Persons.FirstOrDefault(x => x.ID == Person.ID); if (person == null) { Parents = ParentPicker.Parents; person = new Person() { FirstName = Person.FirstName, SecondName = Person.SecondName, Patronymic = Person.Patronymic, Phone = Person.Phone }; _context.Entry(person).State = EntityState.Added; _context.Persons.Add(person); foreach (var parent in Parents) { if (!_context.Parents.Any(x => x.ID == parent.ID)) { _context.Entry(parent).State = EntityState.Added; _context.Parents.Add(parent); } else { _context.Entry(parent).State = EntityState.Unchanged; } var newPair = _context.PersonParents.Add(new PersonParent() { Parent = parent, Person = person }); _context.Entry(newPair).State = EntityState.Added; } } var student = new Student() { DateStart = LastModule.DateStart, Balance = 0, Module_ID = LastModule.ID, Person = person }; _context.Students.Add(student); _context.SaveChanges(); EventsManager.RaiseObjectChangedEvent(student, ChangeType.Added); } } Person = new Person(); Parents.Clear(); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? var n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } // should we bother? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } for (var q = start; q < n + start; /* no increment */) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } else // grab from different sources { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // determines size of parents, in terms of chunks var chunkSize = ((VectorSpecies)Parents[0].Species).ChunkSize; var size = new int[2]; size[0] = ((VectorIndividual)Parents[0]).GenomeLength; size[1] = ((VectorIndividual)Parents[1]).GenomeLength; var sizeInChunks = new int[2]; sizeInChunks[0] = size[0] / chunkSize; sizeInChunks[1] = size[1] / chunkSize; // variables used to split & join the children var minChunks = new int[2]; var maxChunks = new int[2]; // BRS : TODO : Change to rectangular arrays? var split = new int[2][]; for (var x = 0; x < 2; x++) { split[x] = new int[2]; } var pieces = new Object[2][]; for (var x = 0; x < 2; x++) { pieces[x] = new object[2]; } // determine min and max crossover segment lengths, in terms of chunks for (var i = 0; i < 2; i++) { minChunks[i] = (int)(sizeInChunks[i] * MinCrossoverPercentage); // round minCrossoverPercentage up to nearest chunk boundary if (size[i] % chunkSize != 0 && minChunks[i] < sizeInChunks[i]) { minChunks[i]++; } maxChunks[i] = (int)(sizeInChunks[i] * MaxCrossoverPercentage); } // attempt 'num-tries' times to produce valid children (which are bigger than min-child-size) var validChildren = false; var attempts = 0; while (validChildren == false && attempts < NumTries) { // generate split indices for one-point (tail end used as end of segment) if (CrossoverType == VectorSpecies.C_ONE_POINT) { for (int i = 0; i < 2; i++) { // select first index at most 'max_chunks' away from tail end of vector split[i][0] = sizeInChunks[i] - maxChunks[i]; // shift back towards tail end with random value based on min/max parameters split[i][0] += state.Random[thread].NextInt(maxChunks[i] - minChunks[i]); // convert split from chunk numbers to array indices split[i][0] *= chunkSize; // select tail end chunk boundary as second split index split[i][1] = sizeInChunks[i] * chunkSize; } } // generate split indices for two-point (both indicies have randomized positions) else if (CrossoverType == VectorSpecies.C_TWO_POINT) { for (var i = 0; i < 2; i++) { // select first split index randomly split[i][0] = state.Random[thread].NextInt(sizeInChunks[i] - minChunks[i]); // second index must be at least 'min_chunks' after the first index split[i][1] = split[i][0] + minChunks[i]; // add a random value up to max crossover size, without exceeding size of the parent split[i][1] += state.Random[thread].NextInt(Math.Min(maxChunks[i] - minChunks[i], sizeInChunks[i] - split[i][0])); // convert split from chunk numbers to array indices split[i][0] *= chunkSize; split[i][1] *= chunkSize; } } // use the split indices generated above to split the parents into pieces ((VectorIndividual)Parents[0]).Split(split[0], pieces[0]); ((VectorIndividual)Parents[1]).Split(split[1], pieces[1]); // create copies of the parents, swap the middle segment, and then rejoin the pieces // - this is done to test whether or not the resulting children are of a valid size, // - because we are using Object references to an undetermined array type, there is no way // to cast it to the appropriate array type (i.e. short[] or double[]) to figure out the // length of the pieces // - instead, we use the join method on copies, and let each vector type figure out its own // length with the genomeLength() method var children = new VectorIndividual[2]; children[0] = (VectorIndividual)Parents[0].Clone(); children[1] = (VectorIndividual)Parents[1].Clone(); var swap = pieces[0][1]; pieces[0][1] = pieces[1][1]; pieces[1][1] = swap; children[0].Join(pieces[0]); children[1].Join(pieces[1]); if (children[0].GenomeLength > MinChildSize && children[1].GenomeLength > MinChildSize) { validChildren = true; } attempts++; } // if the children produced were valid, updates the parents if (validChildren) { ((VectorIndividual)Parents[0]).Join(pieces[0]); ((VectorIndividual)Parents[1]).Join(pieces[1]); Parents[0].Evaluated = false; Parents[1].Evaluated = false; } // add parents to the population // by Ermo. is this wrong? // -- Okay Sean inds.Add(Parents[0]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && TossSecondParent == false) { // by Ermo. also this is wrong? inds.Add(Parents[1]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; } } return(n); }
public override int Produce( int min, int max, int subpop, IList <Individual> inds, IEvolutionState state, int thread, IDictionary <string, object> misc) { int start = inds.Count; // how many individuals should we make? var n = TypicalIndsProduced; if (n < min) { n = min; } if (n > max) { n = max; } IntBag[] parentparents = null; IntBag[] preserveParents = null; if (misc != null && misc.ContainsKey(KEY_PARENTS)) { preserveParents = (IntBag[])misc[KEY_PARENTS]; parentparents = new IntBag[2]; misc[KEY_PARENTS] = parentparents; } // should we bother? // should we use them straight? if (!state.Random[thread].NextBoolean(Likelihood)) { // just load from source 0 and clone 'em Sources[0].Produce(n, n, subpop, inds, state, thread, misc); return(n); } for (var q = start; q < n + start;) // keep on going until we're filled up { Parents.Clear(); // grab two individuals from our Sources if (Sources[0] == Sources[1]) // grab from the same source { Sources[0].Produce(2, 2, subpop, Parents, state, thread, misc); } // grab from different Sources else { Sources[0].Produce(1, 1, subpop, Parents, state, thread, misc); Sources[1].Produce(1, 1, subpop, Parents, state, thread, misc); } // at this point, Parents[] contains our two selected individuals, // AND they're copied so we own them and can make whatever modifications // we like on them. // so we'll cross them over now. Since this is the default pipeline, // we'll just do it by calling defaultCrossover on the first child ((VectorIndividual)Parents[0]).DefaultCrossover(state, thread, (VectorIndividual)Parents[1]); Parents[0].Evaluated = false; Parents[1].Evaluated = false; // add 'em to the population // by Ermo. this should use add instead of set, because the inds is empty, so will throw index out of bounds // okay -- Sean inds.Add(Parents[0]); if (preserveParents != null) { parentparents[0].AddAll(parentparents[1]); preserveParents[q] = parentparents[0]; } q++; if (q < n + start && !TossSecondParent) { // by Ermo. as as here, see the comments above inds.Add(Parents[1]); if (preserveParents != null) { preserveParents[q] = new IntBag(parentparents[0]); } q++; } } if (preserveParents != null) { misc[KEY_PARENTS] = preserveParents; } return(n); }