public void HashSet_Generic_RemoveWhere_AllElements(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); int removedCount = set.RemoveWhere((value) => { return(true); }); Assert.Equal(setLength, removedCount); }
public void CanBeCastedToISet() { SegmentedHashSet <T> set = new SegmentedHashSet <T>(); ISet <T> iset = (set as ISet <T>); Assert.NotNull(iset); }
/// <inheritdoc cref="ImmutableHashSet{T}.Builder.ExceptWith(IEnumerable{T})"/> public void ExceptWith(IEnumerable <T> other) { if (other is null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.other); } if (_mutableSet is not null) { _mutableSet.ExceptWith(other); return; } if (other == this) { Clear(); return; } else if (other is ImmutableSegmentedHashSet <T> otherSet) { if (otherSet == _set) { Clear(); return; } else if (otherSet.IsEmpty) { // No action required return; } else { GetOrCreateMutableSet().ExceptWith(otherSet._set); return; } } else { // Manually enumerate to avoid changes to the builder if 'other' is empty or does not contain any // items present in the current set. SegmentedHashSet <T>?mutableSet = null; foreach (var item in other) { if (mutableSet is null) { if (!ReadOnlySet.Contains(item)) { continue; } mutableSet = GetOrCreateMutableSet(); } mutableSet.Remove(item); } return; } }
public void HashSet_Generic_CopyTo_NegativeCount_ThrowsArgumentOutOfRangeException(int count) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(count); T[] arr = new T[count]; Assert.Throws <ArgumentOutOfRangeException>(() => set.CopyTo(arr, 0, -1)); Assert.Throws <ArgumentOutOfRangeException>(() => set.CopyTo(arr, 0, int.MinValue)); }
public void HashSet_Generic_Constructor_IEnumerable_WithManyDuplicates(int count) { IEnumerable <T> items = CreateEnumerable(EnumerableType.List, null, count, 0, 0); SegmentedHashSet <T> hashSetFromDuplicates = new SegmentedHashSet <T>(Enumerable.Range(0, 40).SelectMany(i => items).ToArray()); SegmentedHashSet <T> hashSetFromNoDuplicates = new SegmentedHashSet <T>(items); Assert.True(hashSetFromNoDuplicates.SetEquals(hashSetFromDuplicates)); }
public void EnsureCapacity_Generic_ExistingCapacityRequested_SameValueReturned(int capacity) { var set = new SegmentedHashSet <T>(capacity); Assert.Equal(capacity, set.EnsureCapacity(capacity)); set = (SegmentedHashSet <T>)GenericISetFactory(capacity); Assert.Equal(capacity, set.EnsureCapacity(capacity)); }
public void HashSet_Generic_Constructor_IEnumerable(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) { _ = setLength; _ = numberOfMatchingElements; IEnumerable <T> enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, numberOfDuplicateElements); SegmentedHashSet <T> set = new SegmentedHashSet <T>(enumerable); Assert.True(set.SetEquals(enumerable)); }
public void HashSet_Generic_Constructor_int_AddUpToAndBeyondCapacity(int capacity) { SegmentedHashSet <T> set = new SegmentedHashSet <T>(capacity); AddToCollection(set, capacity); Assert.Equal(capacity, set.Count); AddToCollection(set, capacity + 1); Assert.Equal(capacity + 1, set.Count); }
public void HashSet_Generic_CopyTo_NoIndexDefaultsToZero(int count) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(count); T[] arr1 = new T[count]; T[] arr2 = new T[count]; set.CopyTo(arr1); set.CopyTo(arr2, 0); Assert.True(arr1.SequenceEqual(arr2)); }
public void HashSet_Generic_TrimExcess_Repeatedly(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); List <T> expected = set.ToList(); set.TrimExcess(); set.TrimExcess(); set.TrimExcess(); Assert.True(set.SetEquals(expected)); }
public void HashSet_Generic_Constructor_int_IEqualityComparer_AddUpToAndBeyondCapacity(int capacity) { IEqualityComparer <T> comparer = GetIEqualityComparer(); SegmentedHashSet <T> set = new SegmentedHashSet <T>(capacity, comparer); AddToCollection(set, capacity); Assert.Equal(capacity, set.Count); AddToCollection(set, capacity + 1); Assert.Equal(capacity + 1, set.Count); }
public void EnsureCapacity_Generic_RequestedCapacitySmallerThanCurrent_CapacityUnchanged(int currentCapacity) { SegmentedHashSet <T> set; // assert capacity remains the same when ensuring a capacity smaller or equal than existing for (int i = 0; i <= currentCapacity; i++) { set = new SegmentedHashSet <T>(currentCapacity); Assert.Equal(currentCapacity, set.EnsureCapacity(i)); } }
protected override IEnumerable NonGenericIEnumerableFactory(int count) { var set = new SegmentedHashSet <string>(); int seed = 12354; while (set.Count < count) { set.Add(CreateT(set, seed++)); } return(set); }
public void HashSet_Generic_TryGetValue_NotContains() { T value = CreateT(1); SegmentedHashSet <T> set = new SegmentedHashSet <T> { value }; T equalValue = CreateT(2); Assert.False(set.TryGetValue(equalValue, out T? actualValue)); Assert.Equal(default(T), actualValue); }
public void HashSet_Generic_Constructor_Capacity_ToNextPrimeNumber() { // Highest pre-computed number + 1. const int Capacity = 7199370; var set = new SegmentedHashSet <T>(Capacity); // Assert that the HashTable's capacity is set to the descendant prime number of the given one. const int NextPrime = 7199371; Assert.Equal(NextPrime, set.EnsureCapacity(0)); }
public void EnsureCapacity_Generic_CapacityIsSetToPrimeNumberLargerOrEqualToRequested() { var set = new SegmentedHashSet <T>(); Assert.Equal(17, set.EnsureCapacity(17)); set = new SegmentedHashSet <T>(); Assert.Equal(17, set.EnsureCapacity(15)); set = new SegmentedHashSet <T>(); Assert.Equal(17, set.EnsureCapacity(13)); }
public void EnsureCapacity_Generic_HashsetNotEmpty_SetsToAtLeastTheRequested(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); // get current capacity int currentCapacity = set.EnsureCapacity(0); // assert we can update to a larger capacity int newCapacity = set.EnsureCapacity(currentCapacity * 2); Assert.InRange(newCapacity, currentCapacity * 2, int.MaxValue); }
public void HashSet_Generic_Constructor_HashSet_SparselyFilled(int count) { SegmentedHashSet <T> source = (SegmentedHashSet <T>)CreateEnumerable(EnumerableType.SegmentedHashSet, null, count, 0, 0); List <T> sourceElements = source.ToList(); foreach (int i in NonSquares(count)) { source.Remove(sourceElements[i]);// Unevenly spaced survivors increases chance of catching any spacing-related bugs. } SegmentedHashSet <T> set = new SegmentedHashSet <T>(source, GetIEqualityComparer()); Assert.True(set.SetEquals(source)); }
public void SetComparer_SetEqualsTests() { List <T> objects = new List <T>() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; var set = new SegmentedHashSet <SegmentedHashSet <T> >() { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var noComparerSet = new SegmentedHashSet <SegmentedHashSet <T> >() { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var comparerSet1 = new SegmentedHashSet <SegmentedHashSet <T> >(SegmentedHashSet <T> .CreateSetComparer()) { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var comparerSet2 = new SegmentedHashSet <SegmentedHashSet <T> >(SegmentedHashSet <T> .CreateSetComparer()) { new SegmentedHashSet <T> { objects[3], objects[4], objects[5] }, new SegmentedHashSet <T> { objects[0], objects[1], objects[2] } }; Assert.False(noComparerSet.SetEquals(set)); Assert.True(comparerSet1.SetEquals(set)); Assert.True(comparerSet2.SetEquals(set)); }
public void HashSet_Generic_Constructor_NullIEqualityComparer() { IEqualityComparer <T>?comparer = null; SegmentedHashSet <T> set = new SegmentedHashSet <T>(comparer); if (comparer == null) { Assert.Equal(EqualityComparer <T> .Default, set.Comparer); } else { Assert.Equal(comparer, set.Comparer); } }
public void HashSet_Generic_RemoveWhere_NewObject() // Regression Dev10_624201 { object[] array = new object[2]; object obj = new(); SegmentedHashSet <object> set = new SegmentedHashSet <object>(); set.Add(obj); set.Remove(obj); foreach (object o in set) { } set.CopyTo(array, 0, 2); set.RemoveWhere((element) => { return(false); }); }
public void HashSet_Generic_TryGetValue_NotContains_OverwriteOutputParam() { T value = CreateT(1); SegmentedHashSet <T> set = new SegmentedHashSet <T> { value }; T equalValue = CreateT(2); #pragma warning disable IDE0059 // Unnecessary assignment of a value (intentional for the test) T?actualValue = equalValue; #pragma warning restore IDE0059 // Unnecessary assignment of a value Assert.False(set.TryGetValue(equalValue, out actualValue)); Assert.Equal(default(T), actualValue); }
public void EnsureCapacity_Generic_GrowCapacityWithFreeList(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); // Remove the first element to ensure we have a free list. Assert.True(set.Remove(set.ElementAt(0))); int currentCapacity = set.EnsureCapacity(0); Assert.True(currentCapacity > 0); int newCapacity = set.EnsureCapacity(currentCapacity + 1); Assert.True(newCapacity > currentCapacity); }
public void EnsureCapacity_Generic_EnsureCapacityCalledTwice_ReturnsSameValue(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); int capacity = set.EnsureCapacity(0); Assert.Equal(capacity, set.EnsureCapacity(0)); set = (SegmentedHashSet <T>)GenericISetFactory(setLength); capacity = set.EnsureCapacity(setLength); Assert.Equal(capacity, set.EnsureCapacity(setLength)); set = (SegmentedHashSet <T>)GenericISetFactory(setLength); capacity = set.EnsureCapacity(setLength + 1); Assert.Equal(capacity, set.EnsureCapacity(setLength + 1)); }
public void EnsureCapacity_Generic_RequestingLargerCapacity_DoesNotInvalidateEnumeration(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)(GenericISetFactory(setLength)); var capacity = set.EnsureCapacity(0); IEnumerator valuesEnum = set.GetEnumerator(); IEnumerator valuesListEnum = new List <T>(set).GetEnumerator(); set.EnsureCapacity(capacity + 1); // Verify EnsureCapacity does not invalidate enumeration while (valuesEnum.MoveNext()) { valuesListEnum.MoveNext(); Assert.Equal(valuesListEnum.Current, valuesEnum.Current); } }
public void HashSet_Generic_TrimExcess_AfterClearingAndAddingAllElementsBack(int setLength) { if (setLength > 0) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); set.TrimExcess(); set.Clear(); set.TrimExcess(); Assert.Equal(0, set.Count); AddToCollection(set, setLength); set.TrimExcess(); Assert.Equal(setLength, set.Count); } }
private SegmentedHashSet <T> GetOrCreateMutableSet() { if (_mutableSet is null) { var originalSet = RoslynImmutableInterlocked.InterlockedExchange(ref _set, default); if (originalSet.IsDefault) { throw new InvalidOperationException($"Unexpected concurrent access to {GetType()}"); } _mutableSet = new SegmentedHashSet <T>(originalSet._set, originalSet.KeyComparer); } return(_mutableSet); }
public void HashSet_Generic_Constructor_int_IEqualityComparer(int capacity) { IEqualityComparer <T> comparer = GetIEqualityComparer(); SegmentedHashSet <T> set = new SegmentedHashSet <T>(capacity, comparer); Assert.Equal(0, set.Count); if (comparer == null) { Assert.Equal(EqualityComparer <T> .Default, set.Comparer); } else { Assert.Equal(comparer, set.Comparer); } }
/// <inheritdoc cref="ImmutableHashSet{T}.Builder.Clear()"/> public void Clear() { if (ReadOnlySet.Count != 0) { if (_mutableSet is null) { _mutableSet = new SegmentedHashSet <T>(KeyComparer); _set = default; } else { _mutableSet.Clear(); } } }
public void HashSet_Generic_TryGetValue_Contains() { T value = CreateT(1); SegmentedHashSet <T> set = new SegmentedHashSet <T> { value }; T equalValue = CreateT(1); Assert.True(set.TryGetValue(equalValue, out T? actualValue)); Assert.Equal(value, actualValue); if (!typeof(T).IsValueType) { Assert.Same((object)value, (object?)actualValue); } }