/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the end set to the end of the result table. /// Each job will receive a range from result table, hasher, comparer and aggregates. /// Note that they are all copies, because they contain a private stete (hasher contains reference to the equality comparers to enable caching when computing the hash, aggregates /// contain references to storage arrays to avoid casting in a tight loop). /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor. /// The global Dictionary recieves a comparer that has no internal comparers set to some hasher. /// </summary> private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; int addition = resTable.NumberOfMatchedElements / this.ThreadCount; if (addition == 0) { throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0."); } var lastComp = RowEqualityComparerGroupKey.Factory(resTable, comparers, true); var lastHasher = new RowHasher(hashers); lastHasher.SetCache(lastComp.comparers); // It needs only comparator that has no comparers set as a cache to some hasher. var globalGroups = new ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]>(lastComp.Clone(cacheResults: false)); for (int i = 0; i < jobs.Length - 1; i++) { var tmpComp = lastComp.Clone(cacheResults: true); var tmpHash = lastHasher.Clone(); tmpHash.SetCache(tmpComp.comparers); jobs[i] = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition, globalGroups); current += addition; } jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements, globalGroups); return(jobs); }
/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the end set to the end of the result table. /// Each job will receive a range from result table, hasher, comparer (cache on) and aggregates. /// Note that they are all copies, because they contain a private state (hasher contains reference to the equality comparers to enable caching when computing the hash). /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor. /// </summary> private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; int addition = resTable.NumberOfMatchedElements / this.ThreadCount; if (addition == 0) { throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0."); } // Set their internal cache. var lastComp = RowEqualityComparerGroupKey.Factory(resTable, comparers, true); var lastHasher = new RowHasher(hashers); lastHasher.SetCache(lastComp.comparers); for (int i = 0; i < jobs.Length - 1; i++) { var tmpComp = lastComp.Clone(cacheResults: true); var tmpHash = lastHasher.Clone(); tmpHash.SetCache(tmpComp.comparers); jobs[i] = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition); current += addition; } jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements); return(jobs); }
public override GroupByResults Group(ITableResults resTable) { //if (this.InParallel) throw new ArgumentException($"{this.GetType()}, cannot perform a parallel group by."); CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers); return(this.SingleThreadGroupBy(new RowHasher(hashers), RowEqualityComparerGroupKey.Factory(resTable, comparers, true), resTable)); }
public TwoStepHalfStreamedBucket(QueryExpressionInfo expressionInfo, IGroupByExecutionHelper executionHelper, int columnCount, int[] usedVars) : base(expressionInfo, executionHelper, columnCount, usedVars) { this.groupJobs = new GroupJob[this.executionHelper.ThreadCount]; // Create an initial job, comparers and hashers this.CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers); var firstComp = RowEqualityComparerGroupKey.Factory(null, comparers, true); var firstHasher = new RowHasher(hashers); firstHasher.SetCache(firstComp.comparers); this.groupJobs[0] = new GroupJob(firstComp, firstHasher, new TableResults(this.ColumnCount, this.executionHelper.FixedArraySize, this.usedVars)); for (int i = 1; i < this.executionHelper.ThreadCount; i++) { CloneHasherAndComparer(firstComp, firstHasher, out RowEqualityComparerGroupKey newComp, out RowHasher newHasher); groupJobs[i] = new GroupJob(newComp, newHasher, new TableResults(this.ColumnCount, this.executionHelper.FixedArraySize, this.usedVars)); } this.globalGroups = new ConcurrentDictionary <GroupDictKeyFull, AggregateBucketResult[]>(RowEqualityComparerGroupDickKeyFull.Factory(comparers, false)); }