public void HashSet_Generic_Constructor_IEnumerable_WithManyDuplicates(int count) { IEnumerable <T> items = CreateEnumerable(EnumerableType.List, null, count, 0, 0); SegmentedHashSet <T> hashSetFromDuplicates = new SegmentedHashSet <T>(Enumerable.Range(0, 40).SelectMany(i => items).ToArray()); SegmentedHashSet <T> hashSetFromNoDuplicates = new SegmentedHashSet <T>(items); Assert.True(hashSetFromNoDuplicates.SetEquals(hashSetFromDuplicates)); }
public void HashSet_Generic_Constructor_IEnumerable(EnumerableType enumerableType, int setLength, int enumerableLength, int numberOfMatchingElements, int numberOfDuplicateElements) { _ = setLength; _ = numberOfMatchingElements; IEnumerable <T> enumerable = CreateEnumerable(enumerableType, null, enumerableLength, 0, numberOfDuplicateElements); SegmentedHashSet <T> set = new SegmentedHashSet <T>(enumerable); Assert.True(set.SetEquals(enumerable)); }
public void HashSet_Generic_TrimExcess_Repeatedly(int setLength) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); List <T> expected = set.ToList(); set.TrimExcess(); set.TrimExcess(); set.TrimExcess(); Assert.True(set.SetEquals(expected)); }
public void HashSet_Generic_Constructor_HashSet_SparselyFilled(int count) { SegmentedHashSet <T> source = (SegmentedHashSet <T>)CreateEnumerable(EnumerableType.SegmentedHashSet, null, count, 0, 0); List <T> sourceElements = source.ToList(); foreach (int i in NonSquares(count)) { source.Remove(sourceElements[i]);// Unevenly spaced survivors increases chance of catching any spacing-related bugs. } SegmentedHashSet <T> set = new SegmentedHashSet <T>(source, GetIEqualityComparer()); Assert.True(set.SetEquals(source)); }
public void SetComparer_SetEqualsTests() { List <T> objects = new List <T>() { CreateT(1), CreateT(2), CreateT(3), CreateT(4), CreateT(5), CreateT(6) }; var set = new SegmentedHashSet <SegmentedHashSet <T> >() { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var noComparerSet = new SegmentedHashSet <SegmentedHashSet <T> >() { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var comparerSet1 = new SegmentedHashSet <SegmentedHashSet <T> >(SegmentedHashSet <T> .CreateSetComparer()) { new SegmentedHashSet <T> { objects[0], objects[1], objects[2] }, new SegmentedHashSet <T> { objects[3], objects[4], objects[5] } }; var comparerSet2 = new SegmentedHashSet <SegmentedHashSet <T> >(SegmentedHashSet <T> .CreateSetComparer()) { new SegmentedHashSet <T> { objects[3], objects[4], objects[5] }, new SegmentedHashSet <T> { objects[0], objects[1], objects[2] } }; Assert.False(noComparerSet.SetEquals(set)); Assert.True(comparerSet1.SetEquals(set)); Assert.True(comparerSet2.SetEquals(set)); }
public void HashSet_Generic_TrimExcess_AfterRemovingOneElement(int setLength) { if (setLength > 0) { SegmentedHashSet <T> set = (SegmentedHashSet <T>)GenericISetFactory(setLength); List <T> expected = set.ToList(); T elementToRemove = set.ElementAt(0); set.TrimExcess(); Assert.True(set.Remove(elementToRemove)); expected.Remove(elementToRemove); set.TrimExcess(); Assert.True(set.SetEquals(expected)); } }