public void EqualsForValueTypes() { var target = new ConcurrentFixedSizePriorityQueue <int, int>(4); Assert.IsTrue(target.Equals(1, 1)); Assert.IsFalse(target.Equals(1, 2)); Assert.IsFalse(target.Equals(2, 1)); }
public void EqualsForObjects() { var target = new ConcurrentFixedSizePriorityQueue <string, int>(4); Assert.IsTrue(target.Equals("a", "a")); Assert.IsFalse(target.Equals("a", "b")); Assert.IsFalse(target.Equals("a", null)); Assert.IsFalse(target.Equals(null, "a")); Assert.IsTrue(target.Equals(null, null)); }
public void MultiThreadEnqueue() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentFixedSizePriorityQueue <string, DateTime>(capacity); var execStats = new ExecWithStats[threadsCount]; var watcher = new Stopwatch(); // several threads enqueue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Enqueue {0}", count), count, () => target.Enqueue("a", new DateTime())); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(capacity, target.Count); Console.WriteLine("{0} threads each enqueue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); // several threads dequeue elements watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Dequeue {0}", count), count, () => target.Dequeue()); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("\n{0} threads each dequeue {1} elements. total time: {2}\n", threadsCount, count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); // several threads enqueue double amount of elements // so on the second half each enqueue will have to do a dequeue because the queue will be full watcher.Start(); Parallel.For(0, threadsCount, index => { execStats[index] = new ExecWithStats(string.Format("Enqueue {0}", 2 * count), 2 * count, () => target.Enqueue("a", new DateTime())); execStats[index].Exec(); }); watcher.Stop(); Assert.AreEqual(capacity, target.Count); Console.WriteLine("\n{0} threads each enqueue {1} elements. total time: {2}\n", threadsCount, 2 * count, watcher.Elapsed); ExecWithStats.OutputStatsSummary(execStats); }
public void RaceWithStats() { const int capacity = 1000000; const int threadsCount = 100; const int count = capacity / threadsCount; var target = new ConcurrentFixedSizePriorityQueue <string, DateTime>(capacity); var execStats = new List <ExecWithStats>(); var threadWait = new CountdownEvent(threadsCount); // odd threads will enqueue elements, while even threads will dequeue // obviously there will be a race condition and especially in the beginning dequeue will throw, because queue will often be empty // the total number of exceptions on dequeue threads will correspond the the number of items left in the queue for (var i = 0; i < threadsCount; i++) { ExecWithStats exec; if (i % 2 != 0) { exec = new ExecWithStats(string.Format("Enqueue {0} elements", count), count, () => target.Enqueue("a", new DateTime()), threadWait); } else { exec = new ExecWithStats(string.Format("Dequeue {0} elements", count), count, () => target.Dequeue(), threadWait); } execStats.Add(exec); var thread = new Thread(() => exec.Exec()); thread.Start(); } // Wait for all threads in pool to calculate. threadWait.Wait(); // Output stats summary ExecWithStats.OutputStatsSummary(execStats); // Output queue state Console.WriteLine("Queue count:{0}, capacity:{1}", target.Count, target.Capacity); // Un-comment for a detailed list of stats //Console.WriteLine("---------------------"); //foreach (var execStat in execStats) //{ // var stats = execStat.GetStats(); // Console.WriteLine("Name:{0}, Min: {1}, Median: {2}, Max {3}, Exceptions: {4}", stats.Name, stats.Min, stats.Med, stats.Max, stats.ExceptionsCount); //} }
public void EnqueueDequeue() { var target = new ConcurrentFixedSizePriorityQueue <string, int>(7); target.Enqueue("a", 7); target.Enqueue("b", 6); target.Enqueue("c", 5); Assert.AreEqual("a", target.Dequeue()); target.Enqueue("d", 4); Assert.AreEqual("b", target.Dequeue()); target.Enqueue("a", 7); Assert.AreEqual("a", target.Dequeue()); Assert.AreEqual("c", target.Dequeue()); Assert.AreEqual("d", target.Dequeue()); Assert.AreEqual(0, target.Count); }
public void SingleThreadTiming() { const int capacity = 1000000; var target = new ConcurrentFixedSizePriorityQueue <string, int>(capacity); var watcher = new Stopwatch(); watcher.Start(); for (int i = 0; i < capacity; i++) { target.Enqueue("a", 1); } watcher.Stop(); Assert.AreEqual(capacity, target.Count); Assert.AreEqual(capacity, target.Capacity); Console.WriteLine("Enqueue {0} elements: {1}", capacity, watcher.Elapsed); watcher.Restart(); // ReSharper disable once UnusedVariable var enumerator = target.GetEnumerator(); watcher.Stop(); Console.WriteLine("Get enumerator for {0} elements: {1}", capacity, watcher.Elapsed); watcher.Restart(); for (int i = 0; i < capacity; i++) { target.Dequeue(); } watcher.Stop(); Assert.AreEqual(0, target.Count); Console.WriteLine("Dequeue {0} elements: {1}", capacity, watcher.Elapsed); watcher.Start(); for (int i = 0; i < 2 * capacity; i++) { target.Enqueue("a", 1); } watcher.Stop(); Assert.AreEqual(capacity, target.Count); Assert.AreEqual(capacity, target.Capacity); Console.WriteLine("Enqueue twice the capacity of {0} elements: {1}", capacity, watcher.Elapsed); }
public void EnqueueAfterExceedingCapacity() { var target = new ConcurrentFixedSizePriorityQueue <string, int>(3); Assert.AreEqual(0, target.Count); target.Enqueue("a", 1); target.Enqueue("b", 2); target.Enqueue("c", 3); target.Enqueue("d", 4); Assert.AreEqual(3, target.Count); string result = string.Join(",", target); Assert.AreEqual("d,b,a", result); target.Enqueue("e", 0); Assert.AreEqual(3, target.Count); result = string.Join(",", target); Assert.AreEqual("b,a,e", result); }