/// <summary> /// Implementation Notes: /// Since resizes are relatively expensive (require rehashing), this attempts to minimize /// the need to resize by setting the initial capacity based on size of collection. /// </summary> /// <param name="collection"></param> /// <param name="comparer"></param> public OrderedHashSet(IEnumerable <T> collection, IEqualityComparer <T> comparer) : this(comparer) { if (collection == null) { throw new ArgumentNullException("collection"); } // to avoid excess resizes, first set size based on collection's count. Collection // may contain duplicates, so call TrimExcess if resulting hashset is larger than // threshold int suggestedCapacity = 0; ICollection <T> coll = collection as ICollection <T>; if (coll != null) { suggestedCapacity = coll.Count; } Initialize(suggestedCapacity); UnionWith(collection); if ((m_count == 0 && m_slots.Length > HashHelpers.GetPrime(0)) || (m_count > 0 && m_slots.Length / m_count > ShrinkThreshold)) { TrimExcess(); } }
private void IncreaseCapacity() { int min = this.m_count * 2; if (min < 0) { min = this.m_count; } int prime = HashHelpers.GetPrime(min); if (prime <= this.m_count) { throw new ArgumentException("缓冲区溢出"); } Slot[] destinationArray = new Slot[prime]; if (this.m_slots != null) { Array.Copy(this.m_slots, 0, destinationArray, 0, this.m_lastIndex); } int[] numArray = new int[prime]; for (int i = 0; i < this.m_lastIndex; i++) { int index = destinationArray[i].hashCode % prime; destinationArray[i].next = numArray[index] - 1; numArray[index] = i + 1; } this.m_slots = destinationArray; this.m_buckets = numArray; }
/*LOCKING: _lock must be held*/ private void Rehash() { // Size doesn't track elements that die without being removed. Before attempting // to rehash we traverse the array to see how many entries are left alive. We // rehash the array into a new one which has a capacity relative to the number of // live entries. RecomputeSize(); uint newLength = (uint)HashHelpers.GetPrime(((int)(size / LOAD_FACTOR) << 1) | 1); if (newLength > data.Length * COMPACT_FACTOR && newLength < data.Length * EXPAND_FACTOR) { /* Avoid unnecessary LOS allocations */ RehashWithoutResize(); return; } //Console.WriteLine ("--- resizing from {0} to {1}", data.Length, newLength); Ephemeron[] tmp = new Ephemeron [newLength]; GC.register_ephemeron_array(tmp); size = 0; for (int i = 0; i < data.Length; ++i) { object key = data[i].key; object value = data[i].value; if (key == null || key == GC.EPHEMERON_TOMBSTONE) { continue; } int len = tmp.Length; int idx, initial_idx; int free_slot = -1; idx = initial_idx = (RuntimeHelpers.GetHashCode(key) & int.MaxValue) % len; do { object k = tmp [idx].key; //keys might be GC'd during Rehash if (k == null || k == GC.EPHEMERON_TOMBSTONE) { free_slot = idx; break; } if (++idx == len) //Wrap around { idx = 0; } } while (idx != initial_idx); tmp [free_slot].key = key; tmp [free_slot].value = value; ++size; } data = tmp; }
public Registrations(int capacity) { var size = HashHelpers.GetPrime(capacity); Buckets = new int[size]; Entries = new Entry[size]; #if !NET40 unsafe { fixed(int *bucketsPtr = Buckets) { int *ptr = bucketsPtr; var end = bucketsPtr + Buckets.Length; while (ptr < end) { *ptr++ = -1; } } } #else for (int i = 0; i < Buckets.Length; i++) { Buckets[i] = -1; } #endif }
private void resize() { int newSize = HashHelpers.GetPrime(size * 2); Node[] temp = new Node[size]; int oldsize = size; for (int i = 0; i < size; i++) { if (table[i] != null) { temp[i] = new Node(table[i].getdata()); } } this.table = new Node[newSize]; this.size = newSize; for (int i = 0; i < oldsize; i++) { if (temp[i] != null) { this.insert(temp[i].getdata()); } } }
public static void CreateHeaderFile(string filename, int capacity) { int count = HashHelpers.GetPrime(capacity); using (System.IO.Stream stream = System.IO.File.Create(filename)) { using (System.IO.BinaryWriter writer = new System.IO.BinaryWriter(stream)) { writer.Write(count); writer.Write(0); writer.Write(0); writer.Write(-1); byte[] keydata = new byte[KEY_MAXLENGTH]; for (int i = 0; i < count; i++) { writer.Write(-1); writer.Write(0); writer.Write(0); writer.Write(0); writer.Write(keydata, 0, KEY_MAXLENGTH); } writer.Flush(); } } }
public void TrimExcess() { if (this.m_count == 0) { this.m_buckets = null; this.m_slots = null; this.m_version++; } else { int prime = HashHelpers.GetPrime(this.m_count); Slot[] slotArray = new Slot[prime]; int[] numArray = new int[prime]; int index = 0; for (int i = 0; i < this.m_lastIndex; i++) { if (this.m_slots[i].hashCode >= 0) { slotArray[index] = this.m_slots[i]; int num4 = slotArray[index].hashCode % prime; slotArray[index].next = numArray[num4] - 1; numArray[num4] = index + 1; index++; } } this.m_lastIndex = index; this.m_slots = slotArray; this.m_buckets = numArray; this.m_freeList = -1; } }
private void Initialize(int capacity) { int size = HashHelpers.GetPrime(capacity); _buckets = new int[size]; _slots = new Slot[size]; }
public void TrimExcess() { if (_count == 0) { _buckets = null; _slots = null; _version++; } else { int newSize = HashHelpers.GetPrime(_count); Slot[] newSlots = new Slot[newSize]; int[] newBuckets = new int[newSize]; int newIndex = 0; for (int i = 0; i < _lastIndex; i++) { if (_slots[i].hashCode >= 0) { newSlots[newIndex] = _slots[i]; int bucket = newSlots[newIndex].hashCode % newSize; newSlots[newIndex].next = newBuckets[bucket] - 1; newBuckets[bucket] = newIndex + 1; newIndex++; } } _lastIndex = newIndex; _slots = newSlots; _buckets = newBuckets; _freeList = -1; } }
public void TrimExcess() { if (Count == 0) { m_buckets = null; m_slots = null; return; } int prime = HashHelpers.GetPrime(Count); Slot[] array = new Slot[prime]; int[] array2 = new int[prime]; int num = 0; for (int i = 0; i < m_lastIndex; i++) { if (m_slots[i].hashCode >= 0) { array[num] = m_slots[i]; int num2 = array[num].hashCode % prime; array[num].next = array2[num2] - 1; array2[num2] = num + 1; num++; } } m_lastIndex = num; m_slots = array; m_buckets = array2; m_freeList = -1; }
internal DictionarySegment(DirtyTracker tracker, int size) { tracker = tracker.CreateChild(); this.tracker = tracker; this.count = 0; if (size == 0) { size = HashHelpers.GetPrime(size); } var newBuckets = new int[size]; for (int i = 0; i < newBuckets.Length; i++) { newBuckets[i] = -1; } this.buckets = newBuckets; this.entries = new DictionaryEntry <TKey, TValue> [size]; this.comparer = ZeroFormatterEqualityComparer <TKey> .Default; this.freeList = -1; this.freeCount = 0; }
DictionarySegment(DirtyTracker tracker, ArraySegment <byte> originalBytes) { tracker = tracker.CreateChild(); this.tracker = tracker; this.originalBytes = originalBytes; var bytes = originalBytes.Array; var offset = originalBytes.Offset + 4; this.count = BinaryUtil.ReadInt32(ref bytes, offset); offset += 4; var intListFormatter = Formatter <IList <int> > .Default; var entryListFormatter = Formatter <IList <DictionaryEntry <TKey, TValue> > > .Default; int size; this.buckets = intListFormatter.Deserialize(ref bytes, offset, tracker, out size); offset += size; this.entries = entryListFormatter.Deserialize(ref bytes, offset, tracker, out size); offset += size; // new size if (buckets.Count == 0) { var capacity = HashHelpers.GetPrime(0); Resize(capacity); } this.comparer = ZeroFormatterEqualityComparer <TKey> .Default; this.freeList = -1; this.freeCount = 0; }
public HashtableEx(int capacity, float loadFactor) { if (capacity < 0) { throw new ArgumentOutOfRangeException(nameof(capacity), "SR.ArgumentOutOfRange_NeedNonNegNum"); } if (!(loadFactor >= 0.1f && loadFactor <= 1.0f)) { throw new ArgumentOutOfRangeException(nameof(loadFactor) /*, SR.Format("SR.ArgumentOutOfRange_HashtableLoadFactor", .1, 1.0)*/); } // Based on perf work, .72 is the optimal load factor for this table. _loadFactor = 0.72f * loadFactor; double rawsize = capacity / _loadFactor; if (rawsize > int.MaxValue) { throw new ArgumentException("SR.Arg_HTCapacityOverflow", nameof(capacity)); } // Avoid awfully small sizes int hashsize = (rawsize > InitialSize) ? HashHelpers.GetPrime((int)rawsize) : InitialSize; _buckets = new bucket[hashsize]; _loadsize = (int)(_loadFactor * hashsize); // Based on the current algorithm, loadsize must be less than hashsize. Debug.Assert(_loadsize < hashsize, "Invalid hashtable loadsize!"); }
private void Initialize(int capacity) { int prime = HashHelpers.GetPrime(capacity); this.m_buckets = new int[prime]; this.m_slots = new Slot[prime]; }
private void Resize() { int num2; int length = this._buckets.Length; bool flag = false; for (num2 = 0; num2 < this._entries.Length; num2++) { if (this._entries[num2].depHnd.IsAllocated && (this._entries[num2].depHnd.GetPrimary() == null)) { flag = true; break; } } if (!flag) { length = HashHelpers.GetPrime((this._buckets.Length == 0) ? 6 : (this._buckets.Length * 2)); } int num3 = -1; int[] numArray = new int[length]; for (int i = 0; i < length; i++) { numArray[i] = -1; } Entry <TKey, TValue>[] entryArray = new Entry <TKey, TValue> [length]; num2 = 0; while (num2 < this._entries.Length) { DependentHandle depHnd = this._entries[num2].depHnd; if (depHnd.IsAllocated && (depHnd.GetPrimary() != null)) { int index = this._entries[num2].hashCode % length; entryArray[num2].depHnd = depHnd; entryArray[num2].hashCode = this._entries[num2].hashCode; entryArray[num2].next = numArray[index]; numArray[index] = num2; } else { this._entries[num2].depHnd.Free(); entryArray[num2].depHnd = new DependentHandle(); entryArray[num2].next = num3; num3 = num2; } num2++; } while (num2 != entryArray.Length) { entryArray[num2].depHnd = new DependentHandle(); entryArray[num2].next = num3; num3 = num2; num2++; } this._buckets = numArray; this._entries = entryArray; this._freeList = num3; }
/// <summary> /// Initializes buckets and slots arrays. Uses suggested capacity by finding next prime /// greater than or equal to capacity. /// </summary> /// <param name="capacity"></param> private void Initialize(int capacity) { Debug.Assert(m_buckets == null, "Initialize was called but m_buckets was non-null"); int size = HashHelpers.GetPrime(capacity); m_buckets = new int[size]; m_slots = new Slot[size]; }
private void Resize() { int length = this._buckets.Length; bool flag = false; for (int index = 0; index < this._entries.Length; ++index) { if (this._entries[index].depHnd.IsAllocated && this._entries[index].depHnd.GetPrimary() == null) { flag = true; break; } } if (!flag) { length = HashHelpers.GetPrime(this._buckets.Length == 0 ? 6 : this._buckets.Length * 2); } int num = -1; int[] numArray = new int[length]; for (int index = 0; index < length; ++index) { numArray[index] = -1; } ConditionalWeakTable <TKey, TValue> .Entry[] entryArray = new ConditionalWeakTable <TKey, TValue> .Entry[length]; int index1; for (index1 = 0; index1 < this._entries.Length; ++index1) { DependentHandle dependentHandle = this._entries[index1].depHnd; if (dependentHandle.IsAllocated && dependentHandle.GetPrimary() != null) { int index2 = this._entries[index1].hashCode % length; entryArray[index1].depHnd = dependentHandle; entryArray[index1].hashCode = this._entries[index1].hashCode; entryArray[index1].next = numArray[index2]; numArray[index2] = index1; } else { this._entries[index1].depHnd.Free(); entryArray[index1].depHnd = new DependentHandle(); entryArray[index1].next = num; num = index1; } } for (; index1 != entryArray.Length; ++index1) { entryArray[index1].depHnd = new DependentHandle(); entryArray[index1].next = num; num = index1; } this._buckets = numArray; this._entries = entryArray; this._freeList = num; }
private void Resize() { int num = this._buckets.Length; bool flag = false; int i; for (i = 0; i < this._entries.Length; i++) { if (this._entries[i].depHnd.IsAllocated && this._entries[i].depHnd.GetPrimary() == null) { flag = true; break; } } if (!flag) { num = HashHelpers.GetPrime((this._buckets.Length == 0) ? 6 : (this._buckets.Length * 2)); } int num2 = -1; int[] array = new int[num]; for (int j = 0; j < num; j++) { array[j] = -1; } ConditionalWeakTable <TKey, TValue> .Entry[] array2 = new ConditionalWeakTable <TKey, TValue> .Entry[num]; for (i = 0; i < this._entries.Length; i++) { DependentHandle depHnd = this._entries[i].depHnd; if (depHnd.IsAllocated && depHnd.GetPrimary() != null) { int num3 = this._entries[i].hashCode % num; array2[i].depHnd = depHnd; array2[i].hashCode = this._entries[i].hashCode; array2[i].next = array[num3]; array[num3] = i; } else { this._entries[i].depHnd.Free(); array2[i].depHnd = default(DependentHandle); array2[i].next = num2; num2 = i; } } while (i != array2.Length) { array2[i].depHnd = default(DependentHandle); array2[i].next = num2; num2 = i; i++; } this._buckets = array; this._entries = array2; this._freeList = num2; }
internal FixedHashTable(int size) { // Prime number is essential to reduce hash collision // Add 10%, minimum 11 to make sure hash table has around 10% free entries to reduce collision m_size = HashHelpers.GetPrime(Math.Max(11, size * 11 / 10)); // Using int array instead of creating an Entry[] array with three ints to avoid // adding a new array type, which costs around 3kb in binary size m_entries = new int[m_size * 3]; }
private int Initialize(int capacity) { int size = HashHelpers.GetPrime(capacity); _freeList = -1; _buckets = new int[size]; _entries = new Entry[size]; return(size); }
//===============================================================// private void Initialize( int capacity ) { int size = HashHelpers.GetPrime( capacity ); _Buckets = new int[ size ]; for ( int i = 0; i < _Buckets.Length; i++ ) { _Buckets[ i ] = -1; } _Entries = new Entry[ size ]; _FreeList = -1; }
private void Initialize(int capacity) { int prime = HashHelpers.GetPrime(capacity); buckets = new int[prime]; for (int i = 0; i < buckets.Length; i++) { buckets[i] = -1; } entries = new Entry[prime]; freeList = -1; }
private void Initialize(int capacity) { var size = HashHelpers.GetPrime(capacity); _buckets = new PoolingListVal <int>(); for (var i = 0; i < size; i++) { _buckets.Add(-1); } _entries = new PoolingListVal <Entry>(); _freeList = -1; }
//public Dictionary(IDictionary<TKey, TValue> dictionary) : this(dictionary, null) { } #region private private void Initialize(int capacity) { //大于字典容量的一个最小的质数 int size = HashHelpers.GetPrime(capacity); buckets = new int[size]; for (int i = 0; i < buckets.Length; i++) { buckets[i] = -1; } entries = new Entry[size]; freeList = -1; }
// Returns size of hashtable to grow to. public static int ExpandPrime(int oldSize) { int newSize = 2 * oldSize; // Allow the hashtables to grow to maximum possible size (~2G elements) before encoutering capacity overflow. // Note that this check works even when _items.Length overflowed thanks to the (TKey) cast if ((uint)newSize > HashHelpers.MaxPrimeArrayLength && HashHelpers.MaxPrimeArrayLength > oldSize) { System.Diagnostics.Contracts.Contract.Assert(HashHelpers.MaxPrimeArrayLength == HashHelpers.GetPrime(HashHelpers.MaxPrimeArrayLength), "Invalid MaxPrimeArrayLength"); return(HashHelpers.MaxPrimeArrayLength); } return(HashHelpers.GetPrime(newSize)); }
void Initialize(int capacity, bool forceSize) { int size = forceSize ? capacity : HashHelpers.GetPrime(capacity); buckets = new int[size]; entriesHashCode = new int[size]; entriesKey = new TKey[size]; entriesNext = new int[size]; entriesValue = new TElement[size]; for (int i = 0; i < buckets.Length; i++) { buckets[i] = -1; entriesNext[i] = -1; } }
public HashSet(int capacity, Allocator allocator) { _control = (HashSetControl *)Util.Malloc <HashSetControl>(allocator); var prime = HashHelpers.GetPrime(capacity); _buckets = new List <int>(prime, allocator); _slots = new List <Slot>(prime, allocator); _slots.Resize(prime, NativeArrayOptions.ClearMemory); _buckets.Resize(prime, NativeArrayOptions.ClearMemory); _control->LastIndex = 0; _control->Count = 0; _control->FreeList = -1; _control->Allocator = allocator; SetCapacity(prime); }
/*LOCKING: _lock must be held*/ void Rehash() { uint newSize = (uint)HashHelpers.GetPrime((data.Length << 1) | 1); //Console.WriteLine ("--- resizing from {0} to {1}", data.Length, newSize); Ephemeron[] tmp = new Ephemeron [newSize]; GC.register_ephemeron_array(tmp); size = 0; for (int i = 0; i < data.Length; ++i) { object key = data[i].key; object value = data[i].value; if (key == null || key == GC.EPHEMERON_TOMBSTONE) { continue; } int len = tmp.Length; int idx, initial_idx; int free_slot = -1; idx = initial_idx = (RuntimeHelpers.GetHashCode(key) & int.MaxValue) % len; do { object k = tmp [idx].key; //keys might be GC'd during Rehash if (k == null || k == GC.EPHEMERON_TOMBSTONE) { free_slot = idx; break; } if (++idx == len) //Wrap around { idx = 0; } } while (idx != initial_idx); tmp [free_slot].key = key; tmp [free_slot].value = value; ++size; } data = tmp; }
void Initialize(int capacity, bool forceSize) { int size = forceSize ? capacity : HashHelpers.GetPrime(capacity); buckets = new int[size]; for (int i = 0; i < buckets.Length; i++) { buckets[i] = -1; } entriesHashCode = new int[size]; entriesKey = new string[size]; entriesNext = new int[size]; entriesValue = new ReplaceResData[size]; freeList = -1; }
public int EnsureCapacity(int capacity) { if (capacity < 0) { throw new ArgumentOutOfRangeException(nameof(capacity)); } if (_entries.Length >= capacity) { return(_entries.Length); } int newSize = HashHelpers.GetPrime(capacity); Resize(newSize); ++_version; return(newSize); }