public void drain_onRead(ConcurrentLinkedDictionary <int, int> map) { PaddedAtomicReference <ConcurrentLinkedDictionary <int, int> .Node>[] buffer = map.readBuffers[ConcurrentLinkedDictionary <int, int> .readBufferIndex()]; PaddedAtomicLong writeCounter = map.readBufferWriteCount[ConcurrentLinkedDictionary <int, int> .readBufferIndex()]; for (int i = 0; i < ConcurrentLinkedDictionary <int, int> .READ_BUFFER_THRESHOLD; i++) { var x = map[1]; } int pending = 0; foreach (PaddedAtomicReference <ConcurrentLinkedDictionary <int, int> .Node> slot in buffer) { if (slot.GetValue() != null) { pending++; } } Assert.That(pending, Is.EqualTo(ConcurrentLinkedDictionary <int, int> .READ_BUFFER_THRESHOLD)); Assert.That((int)writeCounter.GetValue(), Is.EqualTo(pending)); var k = map [1]; Assert.That(map.readBufferReadCount[ConcurrentLinkedDictionary <int, int> .readBufferIndex()], Is.EqualTo(writeCounter.GetValue())); for (int i = 0; i < map.readBuffers.Length; i++) { Assert.That(map.readBuffers[ConcurrentLinkedDictionary <int, int> .readBufferIndex()][i].GetValue(), Is.Null); } }
public void evict_alreadyRemoved() { ConcurrentLinkedDictionary <int, int> map = new Builder <int, int>() .MaximumWeightedCapacity(1) .Listener(listener) .Build(); map.put(0, 0); map.evictionLock.EnterWriteLock(); try { ConcurrentLinkedDictionary <int, int> .Node node = map.data[0]; checkStatus(map, node, Status.ALIVE); new System.Threading.Thread(() => { map.put(1, 1); Assert.That(map.remove(0), Is.EqualTo(0)); }).Start(); waitUntil(() => !((IDictionary <int, int>)map).ContainsKey(0)); checkStatus(map, node, Status.RETIRED); map.DrainBuffers(); checkStatus(map, node, Status.DEAD); Assert.That(map.ContainsKey(1), Is.True); Assert.That(listener.Evictions.Count, Is.EqualTo(0)); } finally { map.evictionLock.ExitWriteLock(); } }
void checkDrainBlocks(ConcurrentLinkedDictionary <int, int> map, Action task) { //ReentrantLock lock1 = (ReentrantLock) map.evictionLock; var done = new bool[1]; Thread thread = new Thread(() => { Console.WriteLine("z1"); map.drainStatus.SetValue(ConcurrentLinkedDictionary <int, int> .DrainStatus.REQUIRED); task(); Console.WriteLine("z2"); done[0] = true; Console.WriteLine("z3"); }); Console.WriteLine("a"); map.evictionLock.EnterWriteLock(); try { Console.WriteLine("b"); thread.Start(); Console.WriteLine("c"); waitUntil(() => { return(map.evictionLock.WaitingWriteCount == 1); }); Console.WriteLine("d"); } finally{ map.evictionLock.ExitWriteLock(); } Console.WriteLine("e"); waitUntil(() => done[0]); Console.WriteLine("f"); }
public void weightedConcurrency(Builder <int, IList <int> > builder) { ConcurrentLinkedDictionary <int, IList <int> > map = builder .Weigher(Weighers.List <int>()) .MaximumWeightedCapacity(threads) .ConcurrencyLevel(threads) .Build(); ConcurrentQueue <List <int> > values = new ConcurrentQueue <List <int> >(); for (int i = 1; i <= threads; i++) { int[] array = new int[i]; array.SetAll(int.MinValue); values.Enqueue(array.ToList()); } executeWithTimeOut(map, () => { return(ConcurrentTestHarness.timeTasks(threads, () => { // todo: was concurrentlinkedqueue.poll blocking? List <int> value; if (values.TryDequeue(out value)) { for (int i = 0; i < iterations; i++) { map.put(i % 10, value); } } })); }); }
//@SuppressWarnings("rawtypes") private static String dequeToString <K, V>(ConcurrentLinkedDictionary <K, V> map, bool ascending) { map.evictionLock.EnterWriteLock(); try { StringBuilder buffer = new StringBuilder("\n"); ISet <object> seen = new HashSet <object>(); IEnumerator <ConcurrentLinkedDictionary <K, V> .Node> iterator = ascending ? (IEnumerator <ConcurrentLinkedDictionary <K, V> .Node>)map.evictionDeque.GetEnumerator() : map.evictionDeque.GetDescendingEnumerator(); while (iterator.MoveNext()) { ConcurrentLinkedDictionary <K, V> .Node node = iterator.Current; buffer.Append(nodeToString(node)).Append("\n"); bool added = seen.Add(node); if (!added) { buffer.Append("Failure: Loop detected\n"); break; } } return(buffer.ToString()); } finally { map.evictionLock.ExitWriteLock(); } }
public Thrasher(MultiThreadedTest test, ConcurrentLinkedDictionary <int, int> map, List <List <int> > sets) { this.index = new AtomicInteger(); _test = test; this.map = map; this.sets = sets; }
public void capacity_decreaseBelowMinimum(ConcurrentLinkedDictionary <int, int> map) { try { map.setCapacity(-1); } finally { Assert.That(map.Capacity(), Is.EqualTo(Capacity())); } }
public void AscendingDictionaryWithLimit_greaterThan(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); WarmUp(expected, 1, Capacity() / 2); Assert.That(map.AscendingDictionaryWithLimit((int)Capacity() / 2), Is.EqualTo(expected)); }
public void AscendingKeySetWithLimit_lessThan(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); WarmUp(expected, 1, Capacity()); Assert.That(map.AscendingKeySetWithLimit((int)Capacity() * 2), Is.EqualTo(expected.Keys)); }
private void checkReorder(ConcurrentLinkedDictionary <int, int> map, IList <int> keys, params int[] expect) { foreach (int i in keys) { var a = map [i]; } checkContainsInOrder(map, expect); }
public void capacity_increase(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = immutableCopyOf(newWarmedMap()); long newMaxCapacity = 2 * Capacity(); map.setCapacity(newMaxCapacity); Assert.That(map, Is.EqualTo(expected)); Assert.That(map.Capacity(), Is.EqualTo(newMaxCapacity)); }
private void checkEvict(ConcurrentLinkedDictionary <int, int> map, IList <int> keys, params int[] expect) { foreach (int i in keys) { map.put(i, i); } checkContainsInOrder(map, expect); }
private void checkEvictionDeque(ConcurrentLinkedDictionary <K, V> map) { var deque = map.evictionDeque; checkLinks(map); builder.ExpectThat("dequeue count incorrect", deque.Count, Is.EqualTo(map.Count)); // todo: need to implement validLinkedDequeue!! //validLinkedDeque().matchesSafely(map.evictionDeque, builder.getDescription()); }
public void drain_blocksDescendingMap(ConcurrentLinkedDictionary <int, int> map) { checkDrainBlocks(map, () => { map.DescendingDictionary(); }); checkDrainBlocks(map, () => { map.DescendingDictionaryWithLimit((int)Capacity()); }); }
public void drain_blocksAscendingKeySet(ConcurrentLinkedDictionary <int, int> map) { checkDrainBlocks(map, () => { map.AscendingKeySet(); }); checkDrainBlocks(map, () => { map.AscendingKeySetWithLimit((int)Capacity()); }); }
public void DescendingDictionaryWithLimit_lessThan(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); for (int i = (int)Capacity(); i > 0; i--) { expected[i] = -i; } Assert.That(map.DescendingDictionaryWithLimit((int)Capacity() * 2), Is.EqualTo(expected)); }
private void CheckIsEmpty(ConcurrentLinkedDictionary <K, V> map) { map.DrainBuffers(); builder.ExpectThat(map.data.IsEmpty, Is.True); builder.ExpectThat(map.data.Count, Is.EqualTo(0)); builder.ExpectThat(map.WeightedSize(), Is.EqualTo(0)); builder.ExpectThat(map.weightedSize.GetValue(), Is.EqualTo(0)); builder.ExpectThat(map.evictionDeque.Peek(), Is.Null); }
public void DescendingKeySetWithLimit_lessThan(ConcurrentLinkedDictionary <int, int> map) { ISet <int> expected = new SortedSet <int>(); for (int i = (int)Capacity(); i > 0; i--) { expected.Add(i); } Assert.That(map.DescendingKeySetWithLimit((int)Capacity() * 2), Is.EqualTo(expected)); }
public void AscendingDictionaryWithLimit_lessThan(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); WarmUp(expected, 1, Capacity()); var dict = map.AscendingDictionaryWithLimit((int)Capacity() * 2); Assert.That(dict, Is.InstanceOf <SortedDictionary <int, int> >()); Assert.That(dict.ToArray(), Is.EqualTo(expected.ToArray())); }
public void exceedsMaximumBufferSize_onWrite(ConcurrentLinkedDictionary <int, int> map) { var b = new bool[1]; map.afterWrite(() => { b [0] = true; }); Assert.That(b [0], Is.True); Assert.That(map.writeBuffer, HasCount(0)); }
private void updateRecency(ConcurrentLinkedDictionary <int, int> map, Action operation) { ConcurrentLinkedDictionary <int, int> .Node first = map.evictionDeque.Peek(); operation(); map.DrainBuffers(); Assert.That(map.evictionDeque.Peek(), Is.Not.SameAs(first)); Assert.That(map.evictionDeque.Count, Is.Not.EqualTo(1)); Assert.That(map, validConcurrentLinkedDictionary <int, int>()); }
public void AscendingKeySetWithLimit_snapshot(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); WarmUp(expected, 1, Capacity() / 2); ISet <int> original = map.AscendingKeySetWithLimit((int)Capacity() / 2); map.put((int)Capacity(), (int)-Capacity()); Assert.That(original, Is.EqualTo(expected.Keys)); }
public void exceedsMaximumBufferSize_onRead(ConcurrentLinkedDictionary <int, int> map) { PaddedAtomicLong drainCounter = map.readBufferDrainAtWriteCount[ConcurrentLinkedDictionary <int, int> .readBufferIndex()]; map.readBufferWriteCount[ConcurrentLinkedDictionary <int, int> .readBufferIndex()].SetValue(ConcurrentLinkedDictionary <int, int> .READ_BUFFER_THRESHOLD - 1); map.afterRead(null); Assert.That(drainCounter.GetValue(), Is.EqualTo(0L)); map.afterRead(null); Assert.That(drainCounter.GetValue(), Is.EqualTo(ConcurrentLinkedDictionary <int, int> .READ_BUFFER_THRESHOLD + 1L)); }
public void beforeMemoryLeakTest() { runningTime = 0; stream = new StreamWriter(new FileStream(statusFile, FileMode.OpenOrCreate)); timer = new Timer(Status, null, statusInterval * 1000, statusInterval * 1000); map = new Builder <long, long>() .MaximumWeightedCapacity(threads) .Build(); }
public void AscendingDictionaryWithLimit_snapshot(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); WarmUp(expected, 1, Capacity() / 2); IDictionary <int, int> original = map.AscendingDictionaryWithLimit((int)Capacity() / 2); map.put((int)Capacity(), (int)-Capacity()); Assert.That(original, Is.InstanceOf <SortedDictionary <int, int> >()); Assert.That(original.ToArray(), Is.EqualTo(expected.ToArray())); }
public void updateRecency_onGetQuietly(ConcurrentLinkedDictionary <int, int> map) { PaddedAtomicLong drainCounter = map.readBufferDrainAtWriteCount[ConcurrentLinkedDictionary <int, int> .readBufferIndex()]; var first = map.evictionDeque.Peek(); long drained = drainCounter.GetValue(); map.GetQuietly(first.Key); map.DrainBuffers(); Assert.That(map.evictionDeque.Peek(), Is.SameAs(first)); Assert.That(drainCounter.GetValue(), Is.EqualTo(drained)); }
public void evict_listenerFails(Builder <int, int> builder) { var listener = new FailingEvictionListener <int, int> (); ConcurrentLinkedDictionary <int, int> map = builder .MaximumWeightedCapacity(0) .Listener(listener) .Build(); try { WarmUp(map, 1, Capacity()); } finally { Assert.That(map, validConcurrentLinkedDictionary <int, int>()); } }
public void evict_maximumCapacity(Builder <int, int> builder) { ConcurrentLinkedDictionary <int, int> map = builder .MaximumWeightedCapacity(MAXIMUM_CAPACITY) .Build(); map.put(1, 2); map.capacity.SetValue(MAXIMUM_CAPACITY); map.weightedSize.SetValue(MAXIMUM_CAPACITY); map.put(2, 3); Assert.That(map.WeightedSize(), Is.EqualTo(MAXIMUM_CAPACITY)); Assert.That(map, Is.EqualTo(singletonMap(2, 3))); }
private void checkContainsInOrder(ConcurrentLinkedDictionary <int, int> map, params int[] expect) { map.DrainBuffers(); List <int> evictionList = new List <int> (); foreach (ConcurrentLinkedDictionary <int, int> .Node node in map.evictionDeque) { evictionList.Add(node.Key); } Assert.That(map.Count, Is.EqualTo(expect.Length)); Assert.That(map.Keys, CollectionConstraints.ContainsInAnyOrder <int>(expect)); Assert.That(evictionList, Is.EqualTo(asList(expect))); }
public void DescendingDictionaryWithLimit_snapshot(ConcurrentLinkedDictionary <int, int> map) { IDictionary <int, int> expected = newLinkedHashMap <int, int>(); for (int i = (int)Capacity(); i > Capacity() / 2; i--) { expected[i] = -i; } IDictionary <int, int> original = map.DescendingDictionaryWithLimit((int)Capacity() / 2); map.put((int)Capacity(), (int)-Capacity()); Assert.That(original, Is.EqualTo(expected)); }