private GroupByResults SingleThreadGroupBy(RowHasher hasher, RowEqualityComparerGroupKey comparer, ITableResults resTable)
        {
            #region DECL
            hasher.SetCache(comparer.comparers);
            AggregateBucketResult[] buckets = null;
            var groups = new Dictionary <GroupDictKey, AggregateBucketResult[]>(comparer);
            TableResults.RowProxy row;
            GroupDictKey          key;
            #endregion DECL

            for (int i = 0; i < resTable.NumberOfMatchedElements; i++)
            {
                row = resTable[i];
                key = new GroupDictKey(hasher.Hash(in row), i); // It's a struct.
                if (!groups.TryGetValue(key, out buckets))
                {
                    buckets = AggregateBucketResult.CreateBucketResults(this.aggregates);
                    groups.Add(key, buckets);
                }

                for (int j = 0; j < this.aggregates.Length; j++)
                {
                    this.aggregates[j].Apply(in row, buckets[j]);
                }
            }

            return(new DictGroupDictKeyBucket(groups, resTable));
        }
 public GroupJob(RowEqualityComparerGroupKey comparer, RowHasher hasher, ITableResults resTable)
 {
     this.resTable     = resTable;
     comparer.resTable = this.resTable;
     this.groups       = new Dictionary <GroupDictKey, AggregateBucketResult[]>(comparer);
     this.hasher       = hasher;
 }
示例#3
0
        /// <summary>
        /// Creates jobs for the parallel group by.
        /// Note that the last job in the array has the end set to the end of the result table.
        /// Each job will receive a range from result table, hasher, comparer (cache on) and aggregates.
        /// Note that they are all copies, because they contain a private state (hasher contains reference to the equality comparers to enable caching when computing the hash).
        /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor.
        /// </summary>
        private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers)
        {
            GroupByJob[] jobs     = new GroupByJob[this.ThreadCount];
            int          current  = 0;
            int          addition = resTable.NumberOfMatchedElements / this.ThreadCount;

            if (addition == 0)
            {
                throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0.");
            }

            // Set their internal cache.
            var lastComp   = RowEqualityComparerGroupKey.Factory(resTable, comparers, true);
            var lastHasher = new RowHasher(hashers);

            lastHasher.SetCache(lastComp.comparers);

            for (int i = 0; i < jobs.Length - 1; i++)
            {
                var tmpComp = lastComp.Clone(cacheResults: true);
                var tmpHash = lastHasher.Clone();
                tmpHash.SetCache(tmpComp.comparers);
                jobs[i]  = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition);
                current += addition;
            }

            jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements);
            return(jobs);
        }
示例#4
0
        /// <summary>
        /// Creates jobs for the parallel group by.
        /// Note that the last job in the array has the end set to the end of the result table.
        /// Each job will receive a range from result table, hasher, comparer and aggregates.
        /// Note that they are all copies, because they contain a private stete (hasher contains reference to the equality comparers to enable caching when computing the hash, aggregates
        /// contain references to storage arrays to avoid casting in a tight loop).
        /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor.
        /// The global Dictionary recieves a comparer that has no internal comparers set to some hasher.
        /// </summary>
        private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers)
        {
            GroupByJob[] jobs     = new GroupByJob[this.ThreadCount];
            int          current  = 0;
            int          addition = resTable.NumberOfMatchedElements / this.ThreadCount;

            if (addition == 0)
            {
                throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0.");
            }

            var lastComp   = RowEqualityComparerGroupKey.Factory(resTable, comparers, true);
            var lastHasher = new RowHasher(hashers);

            lastHasher.SetCache(lastComp.comparers);

            // It needs only comparator that has no comparers set as a cache to some hasher.
            var globalGroups = new ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]>(lastComp.Clone(cacheResults: false));

            for (int i = 0; i < jobs.Length - 1; i++)
            {
                var tmpComp = lastComp.Clone(cacheResults: true);
                var tmpHash = lastHasher.Clone();
                tmpHash.SetCache(tmpComp.comparers);
                jobs[i]  = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition, globalGroups);
                current += addition;
            }
            jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements, globalGroups);
            return(jobs);
        }
        /// <summary>
        /// Creates groups and computes aggregate values for each group.
        /// </summary>
        /// <param name="equalityComparer"> Equality comparer where T is group key and computes internaly the hash for each row from the result table.</param>
        /// <param name="resTable"> A result table from the matching clause.</param>
        /// <param name="hasher"> Hasher of rows. </param>
        /// <returns> Aggregate results. </returns>
        private GroupByResults SingleThreadGroupBy(RowHasher hasher, RowEqualityComparerGroupKey equalityComparer, ITableResults resTable)
        {
            #region DECL
            hasher.SetCache(equalityComparer.comparers);
            var aggResults = AggregateListResults.CreateListResults(this.aggregates);
            var groups     = new Dictionary <GroupDictKey, int>(equalityComparer);
            int position;
            TableResults.RowProxy row;
            GroupDictKey          key;
            #endregion DECL

            for (int i = 0; i < resTable.NumberOfMatchedElements; i++)
            {
                row = resTable[i];
                key = new GroupDictKey(hasher.Hash(in row), i); // It's a struct.
                if (!groups.TryGetValue(key, out position))
                {
                    position = groups.Count;
                    groups.Add(key, position);
                }

                for (int j = 0; j < aggregates.Length; j++)
                {
                    aggregates[j].Apply(in row, aggResults[j], position);
                }
            }

            return(new GroupByResultsList(groups, aggResults, resTable));
        }
示例#6
0
 private RowEqualityComparerInt(ITableResults resTable, ExpressionComparer[] comparers, RowHasher hasher, bool cacheResults)
 {
     this.resTable     = resTable;
     this.Comparers    = comparers;
     this.hasher       = hasher;
     this.cacheResults = cacheResults;
 }
示例#7
0
 protected GroupByJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end)
 {
     this.hasher     = hasher;
     this.aggregates = aggregates;
     this.resTable   = resTable;
     this.start      = start;
     this.end        = end;
 }
 public GroupJob(Aggregate[] aggregates, RowEqualityComparerGroupKey comparer, RowHasher hasher, AggregateBucketResult[] spareBuckets, ITableResults resTable)
 {
     this.resTable     = resTable;
     comparer.resTable = this.resTable;
     this.groups       = new Dictionary <GroupDictKey, int>(comparer);
     this.hasher       = hasher;
     this.spareBuckets = spareBuckets;
     this.aggResults   = AggregateListResults.CreateListResults(aggregates);
 }
示例#9
0
 public GroupByJob(RowHasher hasher, Aggregate[] aggregates, ITableResults resTable, int start, int end, ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]> globalGroups)
 {
     this.hasher       = hasher;
     this.aggregates   = aggregates;
     this.resTable     = resTable;
     this.start        = start;
     this.end          = end;
     this.globalGroups = globalGroups;
 }
        public TwoStepHalfStreamedBucket(QueryExpressionInfo expressionInfo, IGroupByExecutionHelper executionHelper, int columnCount, int[] usedVars) : base(expressionInfo, executionHelper, columnCount, usedVars)
        {
            this.groupJobs = new GroupJob[this.executionHelper.ThreadCount];

            // Create an initial job, comparers and hashers
            this.CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers);
            var firstComp   = RowEqualityComparerGroupKey.Factory(null, comparers, true);
            var firstHasher = new RowHasher(hashers);

            firstHasher.SetCache(firstComp.comparers);

            this.groupJobs[0] = new GroupJob(firstComp, firstHasher, new TableResults(this.ColumnCount, this.executionHelper.FixedArraySize, this.usedVars));
            for (int i = 1; i < this.executionHelper.ThreadCount; i++)
            {
                CloneHasherAndComparer(firstComp, firstHasher, out RowEqualityComparerGroupKey newComp, out RowHasher newHasher);
                groupJobs[i] = new GroupJob(newComp, newHasher, new TableResults(this.ColumnCount, this.executionHelper.FixedArraySize, this.usedVars));
            }

            this.globalGroups = new ConcurrentDictionary <GroupDictKeyFull, AggregateBucketResult[]>(RowEqualityComparerGroupDickKeyFull.Factory(comparers, false));
        }
 public GroupByJobBuckets(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end) : base(hasher, comparer, aggregates, resTable, start, end)
 {
     this.groups = new Dictionary <GroupDictKey, AggregateBucketResult[]>(comparer);
 }
 protected override GroupByJob CreateJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end)
 {
     return(new GroupByJobBuckets(hasher, comparer, aggregates, resTable, start, end));
 }
 /// <summary>
 /// Clones comparer and hasher. The cache is always set to true even for the hasher.
 /// </summary>
 protected static void CloneHasherAndComparer(RowEqualityComparerGroupKey comparer, RowHasher hasher, out RowEqualityComparerGroupKey retComparer, out RowHasher retHasher)
 {
     retComparer = comparer.Clone(cacheResults: true);
     retHasher   = hasher.Clone();
     retHasher.SetCache(retComparer.comparers);
 }
示例#14
0
 protected abstract GroupByJob CreateJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end, ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]> globalGroups);
 protected override GroupByJob CreateJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end, ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]> globalGroups)
 {
     return(new GroupByJobBuckets(hasher, comparer, aggregates, resTable, start, end, globalGroups));
 }
示例#16
0
 protected abstract GroupByJob CreateJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end);
 public GroupByJobLists(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end) : base(hasher, comparer, aggregates, resTable, start, end)
 {
     this.groups     = new Dictionary <GroupDictKey, int>(comparer);
     this.aggResults = AggregateListResults.CreateListResults(aggregates);
 }
示例#18
0
        public static RowEqualityComparerInt Factory(ITableResults resTable, ExpressionComparer[] comparers, RowHasher hasher, bool cacheResults)
        {
            var newComparers = comparers.CloneHard(cacheResults);

            var newHasher = hasher.Clone();

            if (cacheResults)
            {
                newHasher.SetCache(newComparers);
            }
            else
            {
                newHasher.UnsetCache();
            }

            return(new RowEqualityComparerInt(resTable, newComparers, newHasher, cacheResults));
        }
 public GroupByJobMixListsBuckets(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end, ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]> globalGroups) : base(hasher, aggregates, resTable, start, end, globalGroups)
 {
     this.groups     = new Dictionary <GroupDictKey, int>(comparer);
     this.aggResults = AggregateListResults.CreateListResults(aggregates);
 }