/// <summary> /// Initializes a new instance of the <see cref="ImmutableHashSet{T}.HashBucket.Enumerator"/> struct. /// </summary> /// <param name="bucket">The bucket.</param> internal Enumerator(HashBucket bucket) { _disposed = false; _bucket = bucket; _currentPosition = Position.BeforeFirst; _additionalEnumerator = default(ImmutableList <T> .Enumerator); }
// Compute hash value for each case label constant and store the hash buckets // into a dictionary indexed by hash value. private static Dictionary <uint, HashBucket> ComputeStringHashMap( KeyValuePair <ConstantValue, object>[] caseLabels, GetStringHashCode computeStringHashcodeDelegate) { Debug.Assert(caseLabels != null); Dictionary <uint, HashBucket> stringHashMap = new Dictionary <uint, HashBucket>(caseLabels.Length); foreach (KeyValuePair <ConstantValue, object> kvPair in caseLabels) { ConstantValue stringConstant = kvPair.Key; Debug.Assert(stringConstant.IsNull || stringConstant.IsString); uint hash = computeStringHashcodeDelegate((string)stringConstant.Value); if (!stringHashMap.TryGetValue(hash, out HashBucket bucket)) { bucket = new HashBucket(); stringHashMap.Add(hash, bucket); } Debug.Assert(!bucket.Contains(kvPair)); bucket.Add(kvPair); } return(stringHashMap); }
private void EmitHashTableSwitch() { // Hash value for the key must have already been computed and loaded into keyHash Debug.Assert(_keyHash != null); // Compute hash value for each case label constant and store the hash buckets // into a dictionary indexed by hash value. Dictionary <uint, HashBucket> stringHashMap = ComputeStringHashMap( _caseLabels, _computeStringHashcodeDelegate); // Emit conditional jumps to hash buckets. // EmitHashBucketJumpTable returns a map from hashValues to hashBucketLabels. Dictionary <uint, object> hashBucketLabelsMap = EmitHashBucketJumpTable(stringHashMap); // Emit hash buckets foreach (KeyValuePair <uint, HashBucket> kvPair in stringHashMap) { // hashBucketLabel: // Emit direct string comparisons for each case label in hash bucket _builder.MarkLabel(hashBucketLabelsMap[kvPair.Key]); HashBucket hashBucket = kvPair.Value; this.EmitNonHashTableSwitch(hashBucket.ToArray()); } }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Except(IEnumerable <T> other, IEqualityComparer <T> equalityComparer, IEqualityComparer <HashBucket> hashBucketEqualityComparer, SortedInt32KeyNode <HashBucket> root) { Requires.NotNull(other, nameof(other)); Requires.NotNull(equalityComparer, nameof(equalityComparer)); Requires.NotNull(root, nameof(root)); int count = 0; var newRoot = root; foreach (var item in other.GetEnumerableDisposable <T, Enumerator>()) { int hashCode = item != null?equalityComparer.GetHashCode(item) : 0; HashBucket bucket; if (newRoot.TryGetValue(hashCode, out bucket)) { OperationResult result; HashBucket newBucket = bucket.Remove(item, equalityComparer, out result); if (result == OperationResult.SizeChanged) { count--; newRoot = UpdateRoot(newRoot, hashCode, hashBucketEqualityComparer, newBucket); } } } return(new MutationResult(newRoot, count)); }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Except(IEnumerable <T> other, IEqualityComparer <T> equalityComparer, ImmutableSortedDictionary <int, HashBucket> .Node root) { Requires.NotNull(other, "other"); Requires.NotNull(equalityComparer, "equalityComparer"); Requires.NotNull(root, "root"); int count = 0; var newRoot = root; foreach (var item in other) { int hashCode = equalityComparer.GetHashCode(item); HashBucket bucket; if (newRoot.TryGetValue(hashCode, Comparer <int> .Default, out bucket)) { OperationResult result; HashBucket newBucket = bucket.Remove(item, equalityComparer, out result); if (result == OperationResult.SizeChanged) { count--; newRoot = UpdateRoot(newRoot, hashCode, newBucket); } } } return(new MutationResult(newRoot, count)); }
public void HashBucketWraparound() { var m = new HashBucket(4, 2); m.Store(3, 100).ShouldBe(true); m.Store(3, 200).ShouldBe(true); m.Retrieve(3).ToArray().ShouldBe(new[] { 100, 200 }); }
protected int FindEmptySlot(HashBucket <TKey, TValue> bucket) { for (int i = 0; i < bucket.Length; i++) { if (!bucket.Used(i) && bucket.Lock(i)) { return(i); } } return(-1); }
public void HashBucketStoreZero() { var m = new HashBucket(2, 2); // It can store a zero m.Store(0, 0).ShouldBe(true); var result = m.Retrieve(0); result.ToArray().ShouldBe(new[] { 0 }); }
public void HashBucketStoreFlowpast() { var m = new HashBucket(2, 2); m.Store(1, 123456).ShouldBe(true); m.Store(1, 765432).ShouldBe(true); var result = m.Retrieve(1); result.ToArray().ShouldBe(new[] { 123456, 765432 }); }
public void HashBucketChainLimit() { var m = new HashBucket(8, 2); m.Store(0, 100).ShouldBe(true); m.Store(0, 200).ShouldBe(true); m.Store(0, 300).ShouldBe(false); var result = m.Retrieve(0); result.ToArray().ShouldBe(new[] { 100, 200 }); }
protected HashBucket <TKey, TValue>[][] CreateInitializedBuckets(int tableSize = -1) { if (tableSize == -1) { tableSize = _sizeOfTables; } HashBucket <TKey, TValue>[][] newTables = new HashBucket <TKey, TValue> [_numberOfTables][]; for (int i = 0; i < newTables.GetLength(0); i++) { newTables[i] = new HashBucket <TKey, TValue> [tableSize]; } return(newTables); }
public void HashBucketOverlapLimited() { var m = new HashBucket(8, 2); // If we set the max chain to a lower value then the overlap // won't occur. m.Store(0, 100).ShouldBe(true); m.Store(1, 200).ShouldBe(true); m.Store(0, 300).ShouldBe(false); m.Retrieve(0).ToArray().ShouldBe(new[] { 100, 200 }); m.Retrieve(1).ToArray().ShouldBe(new[] { 200 }); }
public void HashBucketOverlap() { var m = new HashBucket(8, 8); // The values are going to overlap. m.Store(0, 100).ShouldBe(true); m.Store(1, 200).ShouldBe(true); m.Store(0, 300).ShouldBe(true); var result = m.Retrieve(0); result.ToArray().ShouldBe(new[] { 100, 200, 300 }); }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Add(T item, MutationInput origin) { OperationResult result; int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket = origin.Root.GetValueOrDefault(hashCode); var newBucket = bucket.Add(item, origin.EqualityComparer, out result); if (result == OperationResult.NoChangeRequired) { return(new MutationResult(origin.Root, 0)); } var newRoot = UpdateRoot(origin.Root, hashCode, origin.HashBucketEqualityComparer, newBucket); Debug.Assert(result == OperationResult.SizeChanged); return(new MutationResult(newRoot, 1 /*result == OperationResult.SizeChanged ? 1 : 0*/)); }
/// <summary> /// Performs the operation on a given data structure. /// </summary> private static MutationResult Add(TKey key, TValue value, KeyCollisionBehavior behavior, MutationInput origin) { Requires.NotNullAllowStructs(key, nameof(key)); OperationResult result; int hashCode = origin.KeyComparer.GetHashCode(key); HashBucket bucket = origin.Root.GetValueOrDefault(hashCode); var newBucket = bucket.Add(key, value, origin.KeyOnlyComparer, origin.ValueComparer, behavior, out result); if (result == OperationResult.NoChangeRequired) { return(new MutationResult(origin)); } var newRoot = UpdateRoot(origin.Root, hashCode, newBucket, origin.HashBucketComparer); return(new MutationResult(newRoot, result == OperationResult.SizeChanged ? +1 : 0)); }
protected TValue CheckAndReturn(HashBucket <TKey, TValue>[] table, int index, TKey key, out bool success) { HashBucket <TKey, TValue> bucket = table[index]; if (bucket != null) { foreach (HashSlot <TKey, TValue> slot in bucket) { if (key.Equals(slot.Key)) { success = true; return(slot.Value); } } } success = false; return(default(TValue)); }
// Returns -1 if all slots are locked private int PickSlotNumber(HashBucket <TKey, TValue> bucket) { int slotNumber = _random.Next(_bucketWidth); if (bucket.Lock(slotNumber)) { return(slotNumber); } for (int i = 0; i < DEFAULT_SLOT_WIDTH; i++) { if (i == slotNumber) { continue; } if (bucket.Lock(i)) { return(i); } } return(-1); }
/// <summary> /// Performs the operation on a given data structure. /// </summary> private static MutationResult AddRange(IEnumerable <KeyValuePair <TKey, TValue> > items, MutationInput origin, KeyCollisionBehavior collisionBehavior = KeyCollisionBehavior.ThrowIfValueDifferent) { Requires.NotNull(items, nameof(items)); int countAdjustment = 0; var newRoot = origin.Root; foreach (var pair in items) { int hashCode = origin.KeyComparer.GetHashCode(pair.Key); HashBucket bucket = newRoot.GetValueOrDefault(hashCode); OperationResult result; var newBucket = bucket.Add(pair.Key, pair.Value, origin.KeyOnlyComparer, origin.ValueComparer, collisionBehavior, out result); newRoot = UpdateRoot(newRoot, hashCode, newBucket, origin.HashBucketComparer); if (result == OperationResult.SizeChanged) { countAdjustment++; } } return(new MutationResult(newRoot, countAdjustment)); }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static MutationResult Union(IEnumerable <T> other, MutationInput origin) { Requires.NotNull(other, "other"); int count = 0; var newRoot = origin.Root; foreach (var item in other.GetEnumerableDisposable <T, Enumerator>()) { int hashCode = origin.EqualityComparer.GetHashCode(item); HashBucket bucket = newRoot.GetValueOrDefault(hashCode); OperationResult result; var newBucket = bucket.Add(item, origin.EqualityComparer, out result); if (result == OperationResult.SizeChanged) { newRoot = UpdateRoot(newRoot, hashCode, newBucket); count++; } } return(new MutationResult(newRoot, count)); }
// Compute hash value for each case label constant and store the hash buckets // into a dictionary indexed by hash value. private static Dictionary<uint, HashBucket> ComputeStringHashMap( KeyValuePair<ConstantValue, object>[] caseLabels, GetStringHashCode computeStringHashcodeDelegate) { Debug.Assert(caseLabels != null); var stringHashMap = new Dictionary<uint, HashBucket>(caseLabels.Length); foreach (var kvPair in caseLabels) { ConstantValue stringConstant = kvPair.Key; Debug.Assert(stringConstant.IsNull || stringConstant.IsString); uint hash = computeStringHashcodeDelegate((string)stringConstant.Value); HashBucket bucket; if (!stringHashMap.TryGetValue(hash, out bucket)) { bucket = new HashBucket(); stringHashMap.Add(hash, bucket); } Debug.Assert(!bucket.Contains(kvPair)); bucket.Add(kvPair); } return stringHashMap; }
public void Split(HashBucket* ht, int shift, int mask) { for (var i = 0; i <= mask; i++) ht[i] = new HashBucket(); for (DataKey* nx, ls = List; ls != null; ls = nx) { nx = ls->next; var ix = (int)(ls->hash >> shift) & mask; ht[ix].Push(ls); } Htab = ht; }
/// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary<TKey, TValue>.HashBucket.Enumerator"/> struct. /// </summary> /// <param name="bucket">The bucket.</param> internal Enumerator(HashBucket bucket) { this.bucket = bucket; this.currentPosition = Position.BeforeFirst; this.additionalEnumerator = default(ImmutableList <KeyValuePair <TKey, TValue> > .Enumerator); }
/// <summary> /// Throws an exception to catch any errors in comparing HashBucket instances. /// </summary> bool IEquatable <HashBucket> .Equals(HashBucket other) { // This should never be called, as hash buckets don't know how to equate themselves. throw new Exception(); }
protected virtual void Dispose(bool disposing) { if (disposing && !_disposed) { Thread.MemoryBarrier(); _disposed = true; if (!_canceled) { Thread.MemoryBarrier(); UnregisterLinkedTokens(); _callbacks = null; } if (!object.ReferenceEquals(_timer, null)) { _timer.Dispose(); _timer = null; } _handle.Close(); } }
public CancellationTokenSource() { _callbacks = new HashBucket<CancellationTokenRegistration, Action>(); _handle = new ManualResetEvent(false); }
/// <summary> /// Checks whether this <see cref="HashBucket"/> is exactly like another one, /// comparing by value. For use when type parameter T is a struct. /// </summary> /// <param name="other">The other bucket.</param> /// <param name="valueComparer">The comparer to use for the first value in the bucket.</param> /// <returns><c>true</c> if the two <see cref="HashBucket"/> structs have precisely the same values.</returns> internal bool EqualsByValue(HashBucket other, IEqualityComparer <T> valueComparer) { return(valueComparer.Equals(_firstValue, other._firstValue) && object.ReferenceEquals(_additionalElements, other._additionalElements)); }
/// <summary> /// Checks whether this <see cref="HashBucket"/> is exactly like another one, /// comparing by reference. For use when type parameter T is an object. /// </summary> /// <param name="other">The other bucket.</param> /// <returns><c>true</c> if the two <see cref="HashBucket"/> structs have precisely the same values.</returns> internal bool EqualsByRef(HashBucket other) { return(object.ReferenceEquals(_firstValue, other._firstValue) && object.ReferenceEquals(_additionalElements, other._additionalElements)); }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static ImmutableSortedDictionary <int, HashBucket> .Node UpdateRoot(ImmutableSortedDictionary <int, HashBucket> .Node root, int hashCode, HashBucket newBucket, IEqualityComparer <HashBucket> hashBucketComparer) { bool mutated; if (newBucket.IsEmpty) { return(root.Remove(hashCode, Comparer <int> .Default, out mutated)); } else { bool replacedExistingValue; return(root.SetItem(hashCode, newBucket, Comparer <int> .Default, hashBucketComparer, out replacedExistingValue, out mutated)); } }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static SortedInt32KeyNode <HashBucket> UpdateRoot(SortedInt32KeyNode <HashBucket> root, int hashCode, IEqualityComparer <HashBucket> hashBucketEqualityComparer, HashBucket newBucket) { if (newBucket.IsEmpty) { return(root.Remove(hashCode, out _)); } else { return(root.SetItem(hashCode, newBucket, hashBucketEqualityComparer, out _, out _)); } }
private void ResetSlabs() { _cbuckets = 0; // hash bucket extra mem pages are returned to system on reset. if (_lpHash != null) { for (var ls = _lpHash; ls != null; ls = ls.next) ls.Release(); _lpHash = null; } if (_lpBase == null) _lpBase = new LargePage(_virtual_memory, 4); _htable = (HashBucket*)_lpBase.data; // init shared tables with clean values. for (var i = 0; i < _config.HashTableSize; i++) _htable[i] = new HashBucket(); var bp = (byte*)(_htable + _config.HashTableSize); _stabs = (int*)bp; // encoding slab max size and pos like -> 32 : 0 _stabs[0] = 0x200000; _stabs[1] = 0x400001; _slabs = (SlabList*)_align((byte*)(_stabs + 1 + (iLastSlabSize + SizeofRcp) / iQuantum)); // calculate slabs indexes and max sizes. int sbts = 64, scnt = 2, pos = scnt; for (var half = sbts / 2; sbts < iLastSlabSize; half = sbts / 2) { for (var es = sbts + half; sbts < es; sbts += iQuantum) _stabs[pos++] = scnt | (es << 16); scnt++; for (var es = sbts + half; sbts < es; sbts += iQuantum) _stabs[pos++] = scnt | (es << 16); scnt++; } _stabs[pos] = scnt; bp = _align((byte*)(_slabs + scnt + 1)); // pre-allocate ~32k into every slab slot. var quot = sbts / 4; while (scnt-- > 0) { RefCountPtr* ls = null; var count = 0; for (var todo = 32768; todo > 0; count++, todo -= sbts, bp += sbts) { var rc = (RefCountPtr*)bp; rc->_list = ls; ls = rc; } _slabs[scnt].Set(ls, count); sbts -= quot; if ((scnt & 1) != 0) quot = sbts / 4; } // pre-allocate 256k into hash table buckets; _hb_cp = (HashBucket*)bp; _hb_ep = (HashBucket*)(bp += 256 * 1024); // pre-allocate ~1mb into big-mem chunks. _sbigs = null; _cntBigs = 0; sbts = iBigBlockSize + SizeofRcp; for (int left = 1024 * (1024 + 16); left > 0; _cntBigs++, left -= sbts, bp += sbts) { var rc = (RefCountPtr*)bp; rc->_list = _sbigs; _sbigs = rc; } // pre-allocate large memory pages for cache purposes. if (!_config.ReserveMemory) _cntPages = 1 + (int)((Stats.MemoryLimit - 1) / _config.AllocPageSize); if (_lpList != null) if (_config.ReserveMemory) { } else { for (var ls = _lpList; ls != null; ls = ls.next) ls.Release(); _lpList = null; } else if (_config.ReserveMemory) for (var bytes = Stats.MemoryLimit; bytes > 0; bytes -= _lpList.size) _lpList = new LargePage(_virtual_memory, _config.AllocPageSize) { next = _lpList }; _lpNext = _lpList; _cp = bp; _ep = _lpBase.data + _lpBase.size; }
/// <summary> /// Performs the set operation on a given data structure. /// </summary> private static SortedInt32KeyNode <HashBucket> UpdateRoot(SortedInt32KeyNode <HashBucket> root, int hashCode, HashBucket newBucket) { bool mutated; if (newBucket.IsEmpty) { return(root.Remove(hashCode, out mutated)); } else { bool replacedExistingValue; return(root.SetItem(hashCode, newBucket, EqualityComparer <HashBucket> .Default, out replacedExistingValue, out mutated)); } }
/// <summary> /// Conditionally removes a bucket as long as it is empty of key/value pairs. /// Condenses any surrounding buckets if necessary /// </summary> internal async Task <bool> PruneBucketAsync(HashManifest manifest, long bucketIndex, string bucketName, HashBucket bucket, CancellationToken cancellationToken) { if (bucket.Head.HasValue || bucket.Tail.HasValue) { return(false); } await StateManager.RemoveStateAsync(bucketName, cancellationToken); if (bucket.Previous.HasValue) { var previousBucketName = IndexToBucket(bucket.Previous.Value); var previousBucket = await StateManager.GetStateAsync <HashBucket>(previousBucketName, cancellationToken); previousBucket.Next = bucket.Next; await StateManager.SetStateAsync(previousBucketName, previousBucket, cancellationToken); } if (bucket.Next.HasValue) { var nextBucketName = IndexToBucket(bucket.Next.Value); var nextBucket = await StateManager.GetStateAsync <HashBucket>(nextBucketName, cancellationToken); nextBucket.Previous = bucket.Previous; await StateManager.SetStateAsync(nextBucketName, nextBucket, cancellationToken); } if (manifest.Head == bucketIndex) { manifest.Head = bucket.Next; } if (manifest.Tail == bucketIndex) { manifest.Tail = bucket.Previous; } return(true); }