internal override Bucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> comparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue) { if (this.Hash == bucket.Hash) { int pos = this.Find(bucket.Key, comparer); if (pos >= 0) { // If the value hasn't changed for this key, return the original bucket. if (valueComparer.Equals(bucket.Value, this.buckets[pos].Value)) { return(this); } else { if (overwriteExistingValue) { return(new ListBucket(this.buckets.ReplaceAt(pos, bucket))); } else { throw new ArgumentException(Strings.DuplicateKey); } } } else { return(new ListBucket(this.buckets.InsertAt(this.buckets.Length, bucket))); } } else { return(new HashBucket(suggestedHashRoll, this, bucket)); } }
internal override Bucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> keyComparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue) { int logicalSlot = ComputeLogicalSlot(bucket.Hash); if (IsInUse(logicalSlot)) { // if this slot is in use, then add the new item to the one in this slot int physicalSlot = ComputePhysicalSlot(logicalSlot); var existing = this.buckets[physicalSlot]; // suggest hash roll that will cause any nested hash bucket to use entirely new bits for picking logical slot // note: we ignore passed in suggestion, and base new suggestion off current hash roll. var added = existing.Add(this.hashRoll + 5, bucket, keyComparer, valueComparer, overwriteExistingValue); if (added != existing) { var newBuckets = this.buckets.ReplaceAt(physicalSlot, added); return(new HashBucket(this.hashRoll, this.used, newBuckets, this.count - existing.Count + added.Count)); } else { return(this); } } else { int physicalSlot = ComputePhysicalSlot(logicalSlot); var newBuckets = this.buckets.InsertAt(physicalSlot, bucket); var newUsed = InsertBit(logicalSlot, this.used); return(new HashBucket(this.hashRoll, newUsed, newBuckets, this.count + bucket.Count)); } }
internal override AbstractBucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <K> keyComparer, bool mutate) { if (this.Hash == bucket.Hash) { int pos = this.Find(bucket.Key, keyComparer); if (pos >= 0) { if (mutate) { this.buckets[pos] = bucket; return(this); } else { return(new ListBucket(this.buckets.ReplaceAt(pos, bucket))); } } else { return(new ListBucket(this.buckets.InsertAt(this.buckets.Length, bucket))); } } else { return(new HashBucket(suggestedHashRoll, this, bucket)); } }
internal override Bucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> comparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue) { if (this.Hash == bucket.Hash) { if (comparer.Equals(this.Key, bucket.Key)) { // Overwrite of same key. If the value is the same as well, don't switch out the bucket. if (valueComparer.Equals(this.Value, bucket.Value)) { return(this); } else { if (overwriteExistingValue) { return(bucket); } else { throw new ArgumentException(Strings.DuplicateKey); } } } else { // two of the same hash will never be happy in a hash bucket return(new ListBucket(new ValueBucket[] { this, bucket })); } } else { return(new HashBucket(suggestedHashRoll, this, bucket)); } }
internal abstract Bucket Add( int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> comparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue );
internal override Bucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> keyComparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue) { int logicalSlot = ComputeLogicalSlot(bucket.Hash); if (IsInUse(logicalSlot)) { int physicalSlot = ComputePhysicalSlot(logicalSlot); var existing = _buckets[physicalSlot]; var added = existing.Add(_hashRoll + 5, bucket, keyComparer, valueComparer, overwriteExistingValue); if (added != existing) { var newBuckets = _buckets.ReplaceAt(physicalSlot, added); return(new HashBucket(_hashRoll, _used, newBuckets, _count - existing.Count + added.Count)); } else { return(this); } } else { int physicalSlot = ComputePhysicalSlot(logicalSlot); var newBuckets = _buckets.InsertAt(physicalSlot, bucket); var newUsed = InsertBit(logicalSlot, _used); return(new HashBucket(_hashRoll, newUsed, newBuckets, _count + bucket.Count)); } }
internal override Bucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <TKey> comparer, IEqualityComparer <TValue> valueComparer, bool overwriteExistingValue) { if (this.Hash == bucket.Hash) { if (comparer.Equals(this.Key, bucket.Key)) { if (valueComparer.Equals(this.Value, bucket.Value)) { return(this); } else { if (overwriteExistingValue) { return(bucket); } else { throw new ArgumentException(Strings.DuplicateKey); } } } else { return(new ListBucket(new ValueBucket[] { this, bucket })); } } else { return(new HashBucket(suggestedHashRoll, this, bucket)); } }
private AbstractBucket AddBucket(AbstractBucket root, K key, V value, bool mutate) { var vb = new ValueBucket(key, value, this.KeyComparer); if (root == null) { return(vb); } else { return(root.Add(0, vb, this.KeyComparer, mutate)); } }
public ImmutableHashMap <TKey, TValue> Add(TKey key, TValue value) { Requires.NotNullAllowStructs(key, "key"); Contract.Ensures(Contract.Result <ImmutableHashMap <TKey, TValue> >() != null); var vb = new ValueBucket(key, value, this.keyComparer.GetHashCode(key)); if (this.root == null) { return(this.Wrap(vb)); } else { return(this.Wrap(this.root.Add(0, vb, this.keyComparer, this.valueComparer, false))); } }
public ImmutableHashMap <TKey, TValue> SetItem(TKey key, TValue value) { Requires.NotNullAllowStructs(key, "key"); Contract.Ensures(Contract.Result <ImmutableHashMap <TKey, TValue> >() != null); Contract.Ensures(!Contract.Result <ImmutableHashMap <TKey, TValue> >().IsEmpty); var vb = new ValueBucket(key, value, _keyComparer.GetHashCode(key)); if (_root == null) { return(this.Wrap(vb)); } else { return(this.Wrap(_root.Add(0, vb, _keyComparer, _valueComparer, true))); } }
public bool IsParameter(ARmDataSet ds, string name, ValueBucket value, Func <ARmDataSet, string, ValueBucket, bool> del) { bool flag = del(ds, name, value); if (!flag) { Keys key; if (_keysDictionary.TryGetValue(name, out key)) { value.Value = ds[key]; return(true); } return(false); } return(flag); }
internal override AbstractBucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <K> keyComparer, bool mutate) { if (this.Hash == bucket.Hash) { if (keyComparer.Equals(this.Key, bucket.Key)) { // overwrite of same key return(bucket); } else { // two of the same hash must be stored in a list return(new ListBucket(new ValueBucket[] { this, bucket })); } } else { return(new HashBucket(suggestedHashRoll, this, bucket)); } }
public Datapool(IGrinderContext grinderContext, IDatapoolMetatdata <T> datapoolMetadata) { if (grinderContext == null) { throw new ArgumentNullException("grinderContext"); } if (datapoolMetadata == null) { throw new ArgumentNullException("datapoolMetadata"); } GrinderContext = grinderContext; distributionMode = datapoolMetadata.DistributionMode; PhysicalSize = datapoolMetadata.Values.Count; int minCapacity; int agentCount = int.Parse(GrinderContext.GetProperty(Constants.AgentCountKey, "1")); int processCount = int.Parse(GrinderContext.GetProperty(Constants.ProcessCountKey, "1")); int agentOffset = GrinderContext.AgentNumber; int processOffset = GrinderContext.ProcessNumber - GrinderContext.FirstProcessNumber; threadCount = int.Parse(GrinderContext.GetProperty(Constants.ThreadCountKey, "1")); if (distributionMode == DatapoolThreadDistributionMode.ThreadUnique) { if (!(agentCount > GrinderContext.AgentNumber)) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Cannot ceate thread unique datapool '{0}', because property '{1}' = '{2}'. Current AgentNumber = '{3}' and indicates that '{1}' must be at least '{4}' for thread uniqueness to work correctly", datapoolMetadata.Name, Constants.AgentCountKey, agentCount, GrinderContext.AgentNumber, GrinderContext.AgentNumber + 1)); } if (processOffset < 0) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Cannot ceate thread unique datapool '{0}', because thread offset negative ({1}). FirstProcessNumber = {2}, ProcessNumber = {3}", datapoolMetadata.Name, processOffset, GrinderContext.FirstProcessNumber, GrinderContext.ProcessNumber)); } if (!(processCount > processOffset)) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Cannot ceate thread unique datapool '{0}', because thread offset = '{1}' is not less than property 'grinder.threads' = '{2}'. FirstProcessNumber = {3}, ProcessNumber = {4}", datapoolMetadata.Name, processOffset, processCount, GrinderContext.FirstProcessNumber, GrinderContext.ProcessNumber)); } minCapacity = agentCount * processCount * threadCount; } else { minCapacity = 1; } if (PhysicalSize < minCapacity) { throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "To low capacity for datapool '{0}', expected at least '{1}', but was '{2}'", datapoolMetadata.Name, minCapacity, PhysicalSize)); } T[] values = datapoolMetadata.Values.ToArray(); if (datapoolMetadata.IsRandom && PhysicalSize > 1) { var random = new Random(datapoolMetadata.Seed); for (int i = 0; i < PhysicalSize; i++) { int swapWith = random.Next(PhysicalSize); T orgValue = values[i]; values[i] = values[swapWith]; values[swapWith] = orgValue; } } Tuple <int, int> agentSlice = GetSubtupleInTupleSlicedBy(agentOffset, new Tuple <int, int>(0, PhysicalSize - 1), agentCount); Tuple <int, int> processSlice = GetSubtupleInTupleSlicedBy(processOffset, agentSlice, processCount); if (distributionMode == DatapoolThreadDistributionMode.ThreadShared) { nonThreadUniqueValueBucket = new ValueBucket { Values = values, Name = datapoolMetadata.Name, StartOffset = 0, EndOffset = PhysicalSize - 1, NextOffset = -1, IsThreadUnique = false, IsCircular = datapoolMetadata.IsCircular, LogicalSize = processSlice.Item2 - processSlice.Item1 + 1 }; } else { threadUniqueValueBuckets = new ValueBucket[threadCount]; for (int i = 0; i < threadCount; i++) { Tuple <int, int> threadSlice = distributionMode == DatapoolThreadDistributionMode.ThreadUnique ? GetSubtupleInTupleSlicedBy(i, processSlice, threadCount) : new Tuple <int, int>(0, PhysicalSize - 1); threadUniqueValueBuckets[i] = new ValueBucket { Values = values, Name = datapoolMetadata.Name, StartOffset = threadSlice.Item1, EndOffset = threadSlice.Item2, NextOffset = threadSlice.Item1 - 1, IsThreadUnique = true, IsCircular = datapoolMetadata.IsCircular, LogicalSize = threadSlice.Item2 - threadSlice.Item1 + 1 }; } } }
internal abstract AbstractBucket Add(int suggestedHashRoll, ValueBucket bucket, IEqualityComparer <K> keyComparer, bool mutate);