public void DuplicatedKeysWithDefaultCapacity() { // Make rehash get called because to many items with duplicated keys have been added to the hashtable var hash = new HashtableEx(); const int Iterations = 1600; for (int i = 0; i < Iterations; i += 2) { hash.Add(new BadHashCode(i), i.ToString()); hash.Add(new BadHashCode(i + 1), (i + 1).ToString()); hash.Remove(new BadHashCode(i)); hash.Remove(new BadHashCode(i + 1)); } for (int i = 0; i < Iterations; i++) { hash.Add(i.ToString(), i); } for (int i = 0; i < Iterations; i++) { Assert.AreEqual(i, hash[i.ToString()]); } }
public void Clone_IsShallowCopy() { var hash = new HashtableEx(); for (int i = 0; i < 10; i++) { hash.Add(i, new Foo()); } HashtableEx clone = (HashtableEx)hash.Clone(); for (int i = 0; i < clone.Count; i++) { Assert.AreEqual("Hello World", ((Foo)clone[i]).StringValue); Assert.AreEqual(hash[i], clone[i]); } // Change object in original hashtable ((Foo)hash[1]).StringValue = "Goodbye"; Assert.AreEqual("Goodbye", ((Foo)clone[1]).StringValue); // Removing an object from the original hashtable doesn't change the clone hash.Remove(0); Assert.IsTrue(clone.Contains(0)); }
public void Remove_SameHashcode() { // We want to add and delete items (with the same hashcode) to the hashtable in such a way that the hashtable // does not expand but have to tread through collision bit set positions to insert the new elements. We do this // by creating a default hashtable of size 11 (with the default load factor of 0.72), this should mean that // the hashtable does not expand as long as we have at most 7 elements at any given time? var hash = new HashtableEx(); var arrList = new ArrayList(); for (int i = 0; i < 7; i++) { var hashConfuse = new BadHashCode(i); arrList.Add(hashConfuse); hash.Add(hashConfuse, i); } var rand = new Random(-55); int iCount = 7; for (int i = 0; i < 100; i++) { for (int j = 0; j < 7; j++) { Assert.AreEqual(hash[arrList[j]], ((BadHashCode)arrList[j]).Value); } // Delete 3 elements from the hashtable for (int j = 0; j < 3; j++) { int iElement = rand.Next(6); hash.Remove(arrList[iElement]); Assert.IsFalse(hash.ContainsValue(null)); arrList.RemoveAt(iElement); int testInt = iCount++; var hashConfuse = new BadHashCode(testInt); arrList.Add(hashConfuse); hash.Add(hashConfuse, testInt); } } }
public static HashtableEx CreateIntHashtable(int count, int start = 0) { var hashtable = new HashtableEx(); for (int i = start; i < start + count; i++) { hashtable.Add(i, i); } return(hashtable); }
public static HashtableEx CreateStringHashtable(int count, int start = 0) { var hashtable = new HashtableEx(); for (int i = start; i < start + count; i++) { string key = "Key_" + i; string value = "Value_" + i; hashtable.Add(key, value); } return(hashtable); }
public void Add_ClearRepeatedly() { const int Iterations = 2; const int Count = 2; var hash = new HashtableEx(); for (int i = 0; i < Iterations; i++) { for (int j = 0; j < Count; j++) { string key = "Key: i=" + i + ", j=" + j; string value = "Value: i=" + i + ", j=" + j; hash.Add(key, value); } Assert.AreEqual(Count, hash.Count); hash.Clear(); } }
public void AddRemove_LargeAmountNumbers() { // Generate a random 100,000 array of ints as test data var inputData = new int[100000]; var random = new Random(341553); for (int i = 0; i < inputData.Length; i++) { inputData[i] = random.Next(7500000, int.MaxValue); } var hash = new HashtableEx(); int count = 0; foreach (long number in inputData) { hash.Add(number, count++); } count = 0; foreach (long number in inputData) { Assert.AreEqual(hash[number], count); Assert.IsTrue(hash.ContainsKey(number)); count++; } foreach (long number in inputData) { hash.Remove(number); } Assert.AreEqual(0, hash.Count); }
internal void Add(Type key, DependencyProperty[] dependencyProperty) => _entries.Add(key, dependencyProperty);
internal void Add(PropertyCacheEntry key, DependencyProperty dependencyProperty) => _entries.Add(key, dependencyProperty);
internal void Add(Type ownerType, PropertyMetadata ownerTypeMetadata) => _table.Add(ownerType, ownerTypeMetadata);
internal void Add(_Key key, DependencyProperty[] value) => _entries.Add(key, value);
internal void Add(Entry key, Typeface?typeFace) => _entries.Add(key, typeFace);
internal void Add(Type key, bool isNullable) => _entries.Add(key, isNullable);