public HashedCharBuffer(HashedCharBufferOptions options) { charBuffer = new CharBuffer(options.InitialCharCapacity); chainedLookup = new HashBucket(options.InitialHashCapacity, options.LinearSearchLimit); this.newHashCode = options.NewHashCode; this.linearSearchLimit = options.LinearSearchLimit; }
private void rebuildLookup() { // Doubling capacity will halve the number of moduloed hash collisions var newLookup = new HashBucket(chainedLookup.Capacity * 2, linearSearchLimit); // Populate a new lookup from our existing data. foreach (var itm in charBuffer) { if (!newLookup.Store(hashSequence(itm.Span), itm.Pos)) { throw new Exception($"Too many hash collisions. Increase {nameof(LinearSearchLimit)} to overcome."); } } // Use the new lookup chainedLookup = newLookup; }
public PathStorage(PathStorageOptions options) { this.newHashCode = options.NewHashCode; buf = new HashedCharBuffer(new HashedCharBufferOptions() { NewHashCode = options.NewHashCode, InitialCharCapacity = options.InitialCharCapacity, InitialHashCapacity = options.InitialHashCapacity, LinearSearchLimit = options.LinearSearchLimit }); buckets = new HashBucket( options.HashBucketInitialCapacity, options.HashBucketMaxChain); entries = new List <Entry>(); }
private void rebuildBuckets() { var newBuckets = new HashBucket(buckets.Capacity * 2, buckets.MaxChain); for (int idx = 0; idx < entries.Count; idx++) { var hashCode = newHashCode(); foreach (var textRef in chain(idx).Reverse()) { var text = buf.Retrieve(textRef); foreach (var elem in text) { hashCode.Add(elem); } } int hash = hashCode.ToHashCode(); newBuckets.Store(hash, idx); } this.buckets = newBuckets; }