/// <summary> /// Total number of valid entries in hash table /// </summary> /// <returns></returns> private unsafe long GetEntryCount() { var version = resizeInfo.version; var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; long total_entry_count = 0; long beginAddress = hlog.BeginAddress; for (long bucket = 0; bucket < table_size_; ++bucket) { HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { if (b.bucket_entries[bucket_entry] >= beginAddress) { ++total_entry_count; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } } return(total_entry_count); }
internal void DeleteTentativeEntries() { HashBucketEntry entry = default; int version = resizeInfo.version; var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; for (long bucket = 0; bucket < table_size_; ++bucket) { HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { entry.word = b.bucket_entries[bucket_entry]; if (entry.Tentative) { b.bucket_entries[bucket_entry] = 0; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } } }
private unsafe string DumpDistributionInternal(int version) { var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; long total_record_count = 0; long beginAddress = hlog.BeginAddress; Dictionary <int, long> histogram = new(); for (long bucket = 0; bucket < table_size_; ++bucket) { List <int> tags = new(); int cnt = 0; HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { var x = default(HashBucketEntry); x.word = b.bucket_entries[bucket_entry]; if (((!x.ReadCache) && (x.Address >= beginAddress)) || (x.ReadCache && ((x.Address & ~Constants.kReadCacheBitMask) >= readcache.HeadAddress))) { if (tags.Contains(x.Tag) && !x.Tentative) { throw new FasterException("Duplicate tag found in index"); } tags.Add(x.Tag); ++cnt; ++total_record_count; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } if (!histogram.ContainsKey(cnt)) { histogram[cnt] = 0; } histogram[cnt]++; } var distribution = $"Number of hash buckets: {table_size_}\n" + $"Number of overflow buckets: {OverflowBucketCount}\n" + $"Size of each bucket: {Constants.kEntriesPerBucket * sizeof(HashBucketEntry)} bytes\n" + $"Total distinct hash-table entry count: {{{total_record_count}}}\n" + $"Average #entries per hash bucket: {{{total_record_count / (double)table_size_:0.00}}}\n" + $"Histogram of #entries per bucket:\n"; foreach (var kvp in histogram.OrderBy(e => e.Key)) { distribution += $" {kvp.Key} : {kvp.Value}\n"; } return(distribution); }
/// <summary> /// /// </summary> /// <param name="version"></param> protected virtual string _DumpDistribution(int version) { var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; long total_record_count = 0; Dictionary <int, long> histogram = new Dictionary <int, long>(); for (long bucket = 0; bucket < table_size_; ++bucket) { List <int> tags = new List <int>(); int cnt = 0; HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { if (0 != b.bucket_entries[bucket_entry]) { var x = default(HashBucketEntry); x.word = b.bucket_entries[bucket_entry]; if (tags.Contains(x.Tag) && !x.Tentative) { throw new FasterException("Duplicate tag found in index"); } tags.Add(x.Tag); ++cnt; ++total_record_count; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } if (!histogram.ContainsKey(cnt)) { histogram[cnt] = 0; } histogram[cnt]++; } var distribution = $"Number of hash buckets: {{{table_size_}}}\n" + $"Total distinct hash-table entry count: {{{total_record_count}}}\n" + $"Average #entries per hash bucket: {{{total_record_count / (double)table_size_:0.00}}}\n" + $"Histogram of #entries per bucket:\n"; foreach (var kvp in histogram.OrderBy(e => e.Key)) { distribution += $" {kvp.Key} : {kvp.Value}\n"; } return(distribution); }
/// <summary> /// /// </summary> /// <param name="version"></param> protected virtual void _DumpDistribution(int version) { var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; long total_record_count = 0; Dictionary <int, long> histogram = new Dictionary <int, long>(); for (long bucket = 0; bucket < table_size_; ++bucket) { List <int> tags = new List <int>(); int cnt = 0; HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { if (0 != b.bucket_entries[bucket_entry]) { var x = default(HashBucketEntry); x.word = b.bucket_entries[bucket_entry]; if (tags.Contains(x.Tag) && !x.Tentative) { throw new Exception("Duplicate tag found in index"); } tags.Add(x.Tag); ++cnt; ++total_record_count; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } if (!histogram.ContainsKey(cnt)) { histogram[cnt] = 0; } histogram[cnt]++; } Console.WriteLine("Number of hash buckets: {0}", table_size_); Console.WriteLine("Total distinct hash-table entry count: {0}", total_record_count); Console.WriteLine("Average #entries per hash bucket: {0:0.00}", total_record_count / (double)table_size_); Console.WriteLine("Histogram of #entries per bucket: "); foreach (var kvp in histogram.OrderBy(e => e.Key)) { Console.WriteLine(kvp.Key.ToString() + ": " + kvp.Value.ToString(CultureInfo.InvariantCulture)); } }
/// <summary> /// /// </summary> /// <param name="version"></param> protected virtual void _DumpDistribution(int version) { var table_size_ = state[version].size; var ptable_ = state[version].tableAligned; long total_record_count = 0; long[] histogram = new long[14]; for (long bucket = 0; bucket < table_size_; ++bucket) { int cnt = 0; HashBucket b = *(ptable_ + bucket); while (true) { for (int bucket_entry = 0; bucket_entry < Constants.kOverflowBucketIndex; ++bucket_entry) { if (0 != b.bucket_entries[bucket_entry]) { ++cnt; ++total_record_count; } } if (b.bucket_entries[Constants.kOverflowBucketIndex] == 0) { break; } b = *((HashBucket *)overflowBucketsAllocator.GetPhysicalAddress((b.bucket_entries[Constants.kOverflowBucketIndex]))); } if (cnt < 14) { histogram[cnt]++; } } Console.WriteLine("Number of hash buckets: {0}", table_size_); Console.WriteLine("Total distinct hash-table entry count: {0}", total_record_count); Console.WriteLine("Histogram of #entries per bucket: "); for (int i = 0; i < 14; i++) { Console.WriteLine(i.ToString() + ": " + histogram[i].ToString(CultureInfo.InvariantCulture)); } }