/// <summary> /// Returns index position /// </summary> /// <param name="text"></param> /// <returns></returns> public int Store(ReadOnlySpan <char> text) { int hash = hashSequence(text); int foundPos = findByHash(hash, text); if (foundPos != -1) { return(foundPos); } int pos = charBuffer.Store(text); if (pos == -1) { int newSize = charBuffer.Capacity * 2; if (newSize < text.Length + charBuffer.Capacity + 2) // Allow 2 for null terminators { newSize = charBuffer.Capacity + text.Length + 2; } charBuffer.Resize(newSize); pos = charBuffer.Store(text); if (pos == -1) { throw new Exception("Resizing charBuffer didn't give us enough space"); } } if (!chainedLookup.Store(hash, pos)) { rebuildLookup(); chainedLookup.Store(hash, pos); } return(pos); }
private void rebuildLookup() { // Doubling capacity will halve the number of moduloed hash collisions var newLookup = new HashBucket(chainedLookup.Capacity * 2, linearSearchLimit); // Populate a new lookup from our existing data. foreach (var itm in charBuffer) { if (!newLookup.Store(hashSequence(itm.Span), itm.Pos)) { throw new Exception($"Too many hash collisions. Increase {nameof(LinearSearchLimit)} to overcome."); } } // Use the new lookup chainedLookup = newLookup; }
private void rebuildBuckets() { var newBuckets = new HashBucket(buckets.Capacity * 2, buckets.MaxChain); for (int idx = 0; idx < entries.Count; idx++) { var hashCode = newHashCode(); foreach (var textRef in chain(idx).Reverse()) { var text = buf.Retrieve(textRef); foreach (var elem in text) { hashCode.Add(elem); } } int hash = hashCode.ToHashCode(); newBuckets.Store(hash, idx); } this.buckets = newBuckets; }