/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the end set to the end of the result table. /// Each job will receive a range from result table, hasher, comparer (cache on) and aggregates. /// Note that they are all copies, because they contain a private state (hasher contains reference to the equality comparers to enable caching when computing the hash). /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor. /// </summary> private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; int addition = resTable.NumberOfMatchedElements / this.ThreadCount; if (addition == 0) { throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0."); } // Set their internal cache. var lastComp = RowEqualityComparerGroupKey.Factory(resTable, comparers, true); var lastHasher = new RowHasher(hashers); lastHasher.SetCache(lastComp.comparers); for (int i = 0; i < jobs.Length - 1; i++) { var tmpComp = lastComp.Clone(cacheResults: true); var tmpHash = lastHasher.Clone(); tmpHash.SetCache(tmpComp.comparers); jobs[i] = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition); current += addition; } jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements); return(jobs); }
public GroupByJob(Aggregate[] aggregates, ITableResults resTable, int start, int end) { this.aggregates = aggregates; this.resTable = resTable; this.start = start; this.end = end; }
public override GroupByResults Group(ITableResults resTable) { //if (this.InParallel) throw new ArgumentException($"{this.GetType()}, cannot perform a parallel group by."); CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers); return(this.SingleThreadGroupBy(new RowHasher(hashers), RowEqualityComparerGroupKey.Factory(resTable, comparers, true), resTable)); }
public override GroupByResults Group(ITableResults resTable) { // Create hashers and equality comparers. // The hashers receive also the equality comparer as cache. CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers); return(this.ParallelGroupBy(RowEqualityComparerInt.Factory(resTable, comparers, new RowHasher(hashers), false), resTable)); }
private GroupByResults CreateGroupByResults(AggregateBucketResult[] bucket, ITableResults resTable) { var tmpDict = new Dictionary <GroupDictKey, AggregateBucketResult[]>(); tmpDict.Add(new GroupDictKey(0, 0), bucket); return(new DictGroupDictKeyBucket(tmpDict, resTable)); }
public Season(ITableResults TableResults, string FileLocation) { this.TableResults = TableResults; this.FileLocation = FileLocation; Teams = new List <SoccerTeam>(); Games = new List <Game>(); }
/// <summary> /// Sorts given data. /// </summary> /// <param name="resTable"> Query reults to be sorted. </param> /// <returns> The sorted data. </returns> private ITableResults Sort(ITableResults resTable) { ISorter sorter = new MultiColumnTableSorter(resTable, this.comparers, this.helper.InParallel); var sortedResults = sorter.Sort(); return(sortedResults); }
public override GroupByResults Group(ITableResults resTable) { // Create hashers and equality comparers. // The hashers receive also the equality comparer as cache. CreateHashersAndComparers(out ExpressionComparer[] comparers, out ExpressionHasher[] hashers); return(ParallelGroupBy(resTable, comparers, hashers)); }
/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the end set to the end of the result table. /// Each job will receive a range from result table, hasher, comparer and aggregates. /// Note that they are all copies, because they contain a private stete (hasher contains reference to the equality comparers to enable caching when computing the hash, aggregates /// contain references to storage arrays to avoid casting in a tight loop). /// The comparers and hashers build in the constructor of this class are given to the last job, just like the aggregates passed to the construtor. /// The global Dictionary recieves a comparer that has no internal comparers set to some hasher. /// </summary> private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, ExpressionComparer[] comparers, ExpressionHasher[] hashers) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; int addition = resTable.NumberOfMatchedElements / this.ThreadCount; if (addition == 0) { throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0."); } var lastComp = RowEqualityComparerGroupKey.Factory(resTable, comparers, true); var lastHasher = new RowHasher(hashers); lastHasher.SetCache(lastComp.comparers); // It needs only comparator that has no comparers set as a cache to some hasher. var globalGroups = new ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]>(lastComp.Clone(cacheResults: false)); for (int i = 0; i < jobs.Length - 1; i++) { var tmpComp = lastComp.Clone(cacheResults: true); var tmpHash = lastHasher.Clone(); tmpHash.SetCache(tmpComp.comparers); jobs[i] = CreateJob(tmpHash, tmpComp, aggs, resTable, current, current + addition, globalGroups); current += addition; } jobs[jobs.Length - 1] = CreateJob(lastHasher, lastComp, aggs, resTable, current, resTable.NumberOfMatchedElements, globalGroups); return(jobs); }
public GroupJob(RowEqualityComparerGroupKey comparer, RowHasher hasher, ITableResults resTable) { this.resTable = resTable; comparer.resTable = this.resTable; this.groups = new Dictionary <GroupDictKey, AggregateBucketResult[]>(comparer); this.hasher = hasher; }
private RowEqualityComparerInt(ITableResults resTable, ExpressionComparer[] comparers, RowHasher hasher, bool cacheResults) { this.resTable = resTable; this.Comparers = comparers; this.hasher = hasher; this.cacheResults = cacheResults; }
protected GroupByJob(RowHasher hasher, RowEqualityComparerGroupKey comparer, Aggregate[] aggregates, ITableResults resTable, int start, int end) { this.hasher = hasher; this.aggregates = aggregates; this.resTable = resTable; this.start = start; this.end = end; }
public GroupByJob(Aggregate[] aggs, AggregateBucketResult[] aggRes, int start, int end, ITableResults resTable) { this.aggregates = aggs; this.start = start; this.end = end; this.resTable = resTable; this.aggResults = aggRes; }
public override void RetrieveResults(out ITableResults resTable, out GroupByResults groupByResults) { var tmpDict = new Dictionary <GroupDictKey, AggregateBucketResult[]>(); tmpDict.Add(new GroupDictKey(0, 0), this.finalResults); resTable = new TableResults(); groupByResults = new DictGroupDictKeyBucket(tmpDict, resTable); }
public GroupByJob(RowHasher hasher, Aggregate[] aggregates, ITableResults resTable, int start, int end, ConcurrentDictionary <GroupDictKey, AggregateBucketResult[]> globalGroups) { this.hasher = hasher; this.aggregates = aggregates; this.resTable = resTable; this.start = start; this.end = end; this.globalGroups = globalGroups; }
public GroupJob(Aggregate[] aggregates, RowEqualityComparerGroupKey comparer, RowHasher hasher, AggregateBucketResult[] spareBuckets, ITableResults resTable) { this.resTable = resTable; comparer.resTable = this.resTable; this.groups = new Dictionary <GroupDictKey, int>(comparer); this.hasher = hasher; this.spareBuckets = spareBuckets; this.aggResults = AggregateListResults.CreateListResults(aggregates); }
public override void Compute(out ITableResults resTable, out GroupByResults groupByResults) { if (next != null) { throw new Exception($"{this.GetType()}, there was an execution block after match block."); } this.matcher.Search(); this.resultProcessor.RetrieveResults(out resTable, out groupByResults); }
public TableResultsABTree(ABTree <int> indexTree, ITableResults resultTable) { if (indexTree == null || resultTable == null) { throw new ArgumentNullException($"{this.GetType()}, trying to assign null to a constructor."); } this.indexTree = indexTree; this.resTable = resultTable; }
protected GroupByResults(int count, ITableResults resTable) { if (resTable == null) { throw new ArgumentNullException($"{this.GetType()}, trying to assign null to a constructor."); } this.Count = count; this.resTable = resTable; }
protected TableSorter(ITableResults resTable, bool inParallel) { if (resTable == null) { throw new ArgumentNullException($"{this.GetType()}, trying to assign null to a construtor."); } this.inParallel = inParallel; this.resTable = resTable; }
private RowEqualityComparerGroupKey(ITableResults resTable, ExpressionComparer[] expressionComparers, bool cacheResults) { if (expressionComparers == null || expressionComparers.Length == 0) { throw new ArgumentException($"{this.GetType()}, trying to assign null to a constructor."); } this.resTable = resTable; this.comparers = expressionComparers; this.cacheResults = cacheResults; }
public IndexToRowProxyComparer(RowComparer rowComparer, ITableResults resTable, bool allowDuplicities) { if (rowComparer == null || resTable == null) { throw new ArgumentException($"{this.GetType()}, trying to assign null to a constructor."); } this.rowComparer = rowComparer; this.resTable = resTable; this.allowDuplicities = allowDuplicities; }
/// <summary> /// Constructs multi column sorter. /// It comprises of row comparers wrapped inside an integer comparer. /// </summary> /// <param name="resTable"> A result table to sort. </param> /// <param name="expressionComparers"> Comparers for comparing rows in the table. </param> /// <param name="inParallel"> A flag if the table should be sorted in parallel. </param> public MultiColumnTableSorter(ITableResults resTable, ExpressionComparer[] expressionComparers, bool inParallel) : base(resTable, inParallel) { if (resTable == null || expressionComparers == null || expressionComparers.Length == 0) { throw new ArgumentNullException($"{this.GetType()}, trying to assign null to a construtor."); } var rowComparer = RowComparer.Factory(expressionComparers, !inParallel); this.indexComparer = new IndexToRowProxyComparer(rowComparer, resTable, true); }
/// <summary> /// Constructor. /// </summary> /// <param name="dataContext">Document data context.</param> public NamedQueryLookup(ITableResults results, string key, string field1 = null, string field2 = null, string field3 = null) { _dataTbleResults = results; _key = results.GetFieldIndex(key); _field1 = (field1 != null) ? results.GetFieldIndex(field1) : -1; _field2 = (field2 != null) ? results.GetFieldIndex(field2) : -1; _field3 = (field3 != null) ? results.GetFieldIndex(field3) : -1; InitializeComponent(); InitLables(); PopulateList(); ButtonOK.IsEnabled = ListBoxItems.Items.Count == 0 ? false : true; }
public override void RetrieveResults(out ITableResults resTable, out GroupByResults groupByResults) { groupByResults = null; if (this.sortJobs != null) { resTable = new TableResultsABTree((ABTree <int>) this.sortJobs[0].tree, this.sortJobs[0].resTable); } else { resTable = new MultiTableResultsRowProxyArray(this.mergeJob.GetTablesOfSortedJobs(), this.mergedResults); } }
public override void Compute(out ITableResults resTable, out GroupByResults groupByResults) { if (next != null) { throw new Exception($"{this.GetType()}, there was an execution block after match block."); } else { resTable = this.Search(); groupByResults = null; } }
public override void RetrieveResults(out ITableResults resTable, out GroupByResults groupByResults) { resTable = new TableResults(); if (this.groupJobs.Length > 1) { groupByResults = new ConDictGroupDictKeyFullBucket(this.globalGroups, resTable); } else { groupByResults = new DictGroupDictKeyBucket(this.groupJobs[0].groups, this.groupJobs[0].resTable); } }
/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the end set to the end of the result table. /// Each job will receive a range from result table, aggregates and a global place to store groups and the aggregated values. /// Note that there is a single comparer for the ConcurrentDictionary, thus no caching of the expression is done. /// </summary> private GroupByJob[] CreateJobs(RowEqualityComparerInt equalityComparer, ITableResults resTable) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; int addition = resTable.NumberOfMatchedElements / this.ThreadCount; if (addition == 0) { throw new ArgumentException($"{this.GetType()}, a range for a thread cannot be 0."); } return(CreateSpecJobs(jobs, equalityComparer, resTable, current, addition)); }
private void Print(ITableResults resTable, Printer printer) { if (resTable == null) { throw new ArgumentNullException($"{this.GetType()}, recieved table results as null."); } else { foreach (var item in resTable) { printer.PrintRow(item); } } }
/// <summary> /// Creates jobs for the parallel group by. /// Note that the last job in the array has the start/end set to the end of result table. /// /// Note that the passed aggregates results, are the ones that the rest will be merged into. /// They are expected to be at the last index of the jobs => they must have at least one result assigned. /// </summary> /// <param name="resTable"> A place to store aggregation results. </param> /// <param name="aggs"> Aggregation functions. </param> /// <param name="aggResults"> The results of the merge is stored in this isntances. It is placed into the last job. </param> private GroupByJob[] CreateJobs(ITableResults resTable, Aggregate[] aggs, AggregateBucketResult[] aggResults) { GroupByJob[] jobs = new GroupByJob[this.ThreadCount]; int current = 0; // No that this is never <= 0 because it was checked when picking the impl. int addition = resTable.NumberOfMatchedElements / this.ThreadCount; for (int i = 0; i < jobs.Length - 1; i++) { jobs[i] = new GroupByJob(aggs, AggregateBucketResult.CreateBucketResults(aggs), current, current + addition, resTable); current += addition; } jobs[jobs.Length - 1] = new GroupByJob(aggs, aggResults, current, resTable.NumberOfMatchedElements, resTable); return(jobs); }