public void RemovingItemsShouldChangeTheContentHash(string[] initialContents, int[] itemsToRemove) { PexAssume.IsNotNullOrEmpty(initialContents); PexAssume.IsNotNullOrEmpty(itemsToRemove); PexAssume.AreDistinctValues(initialContents); PexAssume.AreDistinctValues(itemsToRemove); PexAssume.TrueForAll(itemsToRemove, x => x < initialContents.Length && x > 0); (new TestScheduler()).With(sched => { var fixture = new SerializedCollection <ModelTestFixture>(initialContents.Select(x => new ModelTestFixture() { TestString = x })); var hashes = new List <Guid>(); int changeCount = 0; fixture.Changed.Subscribe(_ => { changeCount++; hashes.Add(fixture.ContentHash); }); var toRemove = itemsToRemove.Select(x => fixture[x]); foreach (var v in toRemove) { fixture.Remove(v); } sched.Start(); PexAssert.AreDistinctValues(hashes.ToArray()); PexAssert.AreEqual(itemsToRemove.Length, changeCount); }); }
public void Add([PexAssumeNotNull] int[] keys) { PexAssume.TrueForAll(keys, k => k < int.MaxValue); PexAssume.IsTrue(keys.Length > 0); var target = new SoftHeap <int, int>(1 / 4.0, int.MaxValue); Console.WriteLine("expected error rate: {0}", target.ErrorRate); foreach (var key in keys) { var count = target.Count; target.Add(key, key + 1); Assert.AreEqual(count + 1, target.Count); } int lastMin = int.MaxValue; int error = 0; while (target.Count > 0) { var kv = target.DeleteMin(); if (lastMin < kv.Key) { error++; } lastMin = kv.Key; Assert.AreEqual(kv.Key + 1, kv.Value); } Console.WriteLine("error rate: {0}", error / (double)keys.Length); Assert.IsTrue(error / (double)keys.Length <= target.ErrorRate); }
public void ChangingASerializableItemShouldChangeTheContentHash(string[] items, int toChange, string newValue) { PexAssume.IsNotNullOrEmpty(items); PexAssume.TrueForAll(items, x => x != null); PexAssume.AreDistinctReferences(items); PexAssume.IsTrue(toChange >= 0 && toChange < items.Length); (new TestScheduler()).With(sched => { var fixture = new SerializedCollection <ModelTestFixture>( items.Select(x => new ModelTestFixture() { TestString = x })); bool shouldDie = true; var hashBefore = fixture.ContentHash; PexAssume.AreNotEqual(newValue, fixture[toChange].TestString); fixture.Changed.Subscribe(_ => shouldDie = false); Observable.Return(newValue, sched).Subscribe(x => fixture[toChange].TestString = x); sched.Start(); PexAssert.AreNotEqual(hashBefore, fixture.ContentHash); PexAssert.IsFalse(shouldDie); }); }
public static UndirectedGraph <int, Edge <int> > CreateGraphArrayOfNodesAndEdges([PexAssumeNotNull] int[] nodes, [PexAssumeNotNull] bool[] edges) { PexAssume.IsTrue(edges.Length == nodes.Length); PexAssume.AreDistinctValues(nodes); PexAssume.TrueForAll(nodes, e => e <= 10 || e > 10); PexAssume.TrueForAll(nodes, e => e != 0); UndirectedGraph <int, Edge <int> > g = new UndirectedGraph <int, Edge <int> >(false); foreach (int ele in nodes) { g.AddVertex(ele); } for (int i = 0; i < edges.Length; i++) { int source = PexChoose.IndexValue("indexed value", nodes); PexObserve.ValueForViewing("CANED_SRC", source); if (edges[source] == false) { g.AddEdge(new Edge <int>(nodes[source], nodes[i])); g.AddEdge(new Edge <int>(nodes[i], nodes[i])); } } return(g); }
public static UndirectedGraph <int, Edge <int> > CreateGraphArrayOfNodesAndEdgesAssume([PexAssumeNotNull] int[] nodes, [PexAssumeNotNull] bool[] edges) { //PexAssume.IsTrue(nodes.Length <= 7 || nodes.Length > 7); PexAssume.IsTrue(nodes.Length <= 6 || nodes.Length > 6); PexAssume.IsTrue(edges.Length <= nodes.Length); PexAssume.AreDistinctValues(nodes); PexAssume.TrueForAll(nodes, e => e <= 12 || e > 12); //PexAssume.TrueForAll(nodes, e => e != 0); UndirectedGraph <int, Edge <int> > g = new UndirectedGraph <int, Edge <int> >(false); foreach (int ele in nodes) { g.AddVertex(ele); } int source = PexChoose.IndexValue("indexed value", nodes); for (int i = 0; i < edges.Length; i++) { if (edges[i] == false) { g.AddEdge(new Edge <int>(nodes[source], nodes[i])); } } return(g); }
public void Unions(int elementCount, [PexAssumeNotNull] KeyValuePair <int, int>[] unions) { PexAssume.IsTrue(0 < elementCount); PexSymbolicValue.Minimize(elementCount); PexAssume.TrueForAll( unions, u => 0 <= u.Key && u.Key < elementCount && 0 <= u.Value && u.Value < elementCount ); var target = new ForestDisjointSet <int>(); // fill up with 0..elementCount - 1 for (int i = 0; i < elementCount; i++) { target.MakeSet(i); Assert.IsTrue(target.Contains(i)); Assert.AreEqual(i + 1, target.ElementCount); Assert.AreEqual(i + 1, target.SetCount); } // apply Union for pairs unions[i], unions[i+1] for (int i = 0; i < unions.Length; i++) { var left = unions[i].Key; var right = unions[i].Value; var setCount = target.SetCount; bool unioned = target.Union(left, right); // should be in the same set now Assert.IsTrue(target.AreInSameSet(left, right)); // if unioned, the count decreased by 1 PexAssert.ImpliesIsTrue(unioned, () => setCount - 1 == target.SetCount); } }
public static Stack.Stack <int> Create([PexAssumeNotNull] int[] elems) { //PexAssume.AreDistinctValues(elems); PexAssume.TrueForAll(elems, e => e <= -6 || e > -6); //PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] < -2 || elems[_i] >= 2); Stack.Stack <int> ret = new Stack.Stack <int>(); for (int i = 0; i < elems.Length; i++) { ret.Push(elems[i]); } return(ret); }
public static Stack.Stack <int> Create([PexAssumeNotNull] int[] elems) { //PexAssume.IsTrue(elems != null && elems.Length < 11); PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] > -11 && elems[_i] < 11); Stack.Stack <int> ret = new Stack.Stack <int>(elems.Length + 2);// DataStructure has big enough capacity for Commutativity Test for (int i = 0; i < elems.Length; i++) { // For stack, add any element. ret.Push(elems[i]); } return(ret); }
public static Queue.Queue <int> Create([PexAssumeNotNull] int[] elems) { //PexAssume.AreDistinctValues(elems); PexAssume.TrueForAll(elems, e => e <= -6 || e > -6); //PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] > -11 && elems[_i] < 11); Queue.Queue <int> ret = new Queue.Queue <int>();// DataStructure has big enough capacity for Commutativity Test for (int i = 0; i < elems.Length; i++) { ret.Enqueue(elems[i]); } return(ret); }
public void Ctor_WhenCalledWithValues_MinValueBecomesFirst( KeyValuePair <int, int>[] values) { PexAssume.IsNotNullOrEmpty(values); PexAssume.TrueForAll(values, value => value.Key == value.Value); var minValue = values.Min(it => it.Value); var heap = new BinaryHeap <int, int>(values); var firstValue = heap.ExtractFirst(); Assert.That(firstValue, Is.EqualTo(minValue)); }
public void Add_WhenNewMinValueIsAdded_ItBecomesFirstValue <TValue>( KeyValuePair <int, TValue>[] existingValues, KeyValuePair <int, TValue> newMinValue) { PexAssume.IsNotNull(existingValues); var heap = new BinaryHeap <int, TValue>(existingValues); PexAssume.TrueForAll(existingValues, value => newMinValue.Key < value.Key); heap.Add(newMinValue.Key, newMinValue.Value); var minValue = heap.GetFirst(); Assert.That(minValue, Is.EqualTo(newMinValue.Value)); }
public static BinaryHeap <int, int> CreateBinaryHeapGeneral(int[] priorities, int[] values, int capacity) { PexAssume.IsTrue(capacity > 0); PexAssume.IsTrue(priorities.Length == values.Length); PexAssume.TrueForAll(priorities, e => e <= 12 || e >= 12); PexAssume.TrueForAll(values, e => e <= 12 || e >= 12); var bh = new BinaryHeap <int, int>(capacity, Comparer <int> .Default.Compare); for (int i = 0; i < priorities.Length; i++) { bh.Add(priorities[i], values[i]); } return(bh); }
public static ArrayList Create(int[] elems) { //PexAssume.IsTrue( elems.Length < 11); PexAssume.IsNotNull(elems); PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] < 5 || elems[_i] >= 5); ArrayList arrList = new ArrayList(); for (int i = 0; i < elems.Length; i++) { //if (!ret.Contains(elems[i])) arrList.Add(elems[i]); } return(arrList); }
public static Dictionary <int, int> Create([PexAssumeNotNull] int[] keys, [PexAssumeNotNull] int[] values) { PexAssume.AreDistinctValues(keys); PexAssume.IsTrue(keys.Length <= 3 || keys.Length > 3); PexAssume.IsTrue(keys.Length == values.Length); PexAssume.TrueForAll(0, keys.Length, _i => keys[_i] <= 1 || keys[_i] > 1); //PexAssume.TrueForAll(0, values.Length, _j => values[_j] <= -3 || values[_j] > -3); //DataStructures.Utility.Int32EqualityComparer comparer = new DataStructures.Utility.Int32EqualityComparer(); Dictionary.Dictionary <int, int> ret = new Dictionary.Dictionary <int, int>();// DataStructure has big enough capacity for Commutativity Test for (int i = 0; i < keys.Length; i++) { // For stack, add any element. //if (!ret.ContainsKey(keys[i])) ret.Add(keys[i], values[i]); } return(ret); }
public static HashSet.HashSet <int> Create([PexAssumeNotNull] int[] elems) { //PexAssume.IsTrue(elems != null && elems.Length < 11); //PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] > -11 && elems[_i] < 11); PexAssume.AreDistinctValues(elems); PexAssume.TrueForAll(0, elems.Length, _i => elems[_i] <= 0 || elems[_i] > 0); PexAssume.IsTrue(elems.Length <= 2 || elems.Length > 2); HashSet.HashSet <int> ret = new HashSet.HashSet <int>();// DataStructure has big enough capacity for Commutativity Test for (int i = 0; i < elems.Length; i++) { //PexAssume.IsTrue(elems[i] > -101 && elems[i] < 101); // For stack, add any element. //if(!ret.Contains(elems[i])) ret.Add(elems[i]); } return(ret); }
public void ReplaceTextSpans_ArbitraryTextSpans_NoCrash() { var tree1 = BBCodeTestUtil.GetAnyTree(); var chosenTexts = new List <string>(); var tree2 = BBCode.ReplaceTextSpans(tree1, txt => { var count = PexChoose.ValueFromRange("count", 0, 3); var indexes = PexChoose.Array <int>("indexes", count); PexAssume.TrueForAll(0, count, i => indexes[i] >= 0 && indexes[i] <= txt.Length && (i == 0 || indexes[i - 1] < indexes[i])); return (Enumerable.Range(0, count) .Select(i => { var maxIndex = i == count - 1 ? txt.Length : indexes[i + 1]; var text = PexChoose.ValueNotNull <string>("text"); chosenTexts.Add(text); return new TextSpanReplaceInfo(indexes[i], PexChoose.ValueFromRange("count", 0, indexes[i] - maxIndex + 1), new TextNode(text)); }) .ToArray()); }, null); var bbCode = tree2.ToBBCode(); PexAssert.TrueForAll(chosenTexts, s => bbCode.Contains(s)); }