/// <summary> /// Add messages to the cache /// </summary> /// <param name="messages"></param> public void AddToCache(IList <IBatchContainer> messages) { DateTime dequeueTimeUtc = DateTime.UtcNow; foreach (IBatchContainer container in messages) { cache.Add(container as GeneratedBatchContainer, dequeueTimeUtc); } }
public void AvoidCacheMissMultipleStreamsActive() { var bufferPool = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize)); var dataAdapter = new TestCacheDataAdapter(); var cache = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null, TimeSpan.FromSeconds(30)); var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)), null, null); evictionStrategy.PurgeObservable = cache; var converter = new CachedMessageConverter(bufferPool, evictionStrategy); var seqNumber = 123; var streamKey = Guid.NewGuid(); var stream = StreamId.Create(TestStreamNamespace, streamKey); // Enqueue a message for our stream var firstSequenceNumber = EnqueueMessage(streamKey); // Enqueue a few other messages for other streams EnqueueMessage(Guid.NewGuid()); EnqueueMessage(Guid.NewGuid()); // Consume the first event and see that the cursor has moved to last seen event (not matching our streamIdentity) var cursor = cache.GetCursor(stream, new EventSequenceTokenV2(firstSequenceNumber)); Assert.True(cache.TryGetNextMessage(cursor, out var firstContainer)); Assert.False(cache.TryGetNextMessage(cursor, out _)); // Remove multiple events, including the one that the cursor is currently pointing to cache.RemoveOldestMessage(); cache.RemoveOldestMessage(); cache.RemoveOldestMessage(); // Enqueue another message for stream var lastSequenceNumber = EnqueueMessage(streamKey); // Should be able to consume the event just pushed Assert.True(cache.TryGetNextMessage(cursor, out var lastContainer)); Assert.Equal(stream, lastContainer.StreamId); Assert.Equal(lastSequenceNumber, lastContainer.SequenceToken.SequenceNumber); long EnqueueMessage(Guid streamId) { var now = DateTime.UtcNow; var msg = new TestQueueMessage { StreamId = StreamId.Create(TestStreamNamespace, streamId), SequenceNumber = seqNumber, }; cache.Add(new List <CachedMessage>() { converter.ToCachedMessage(msg, now) }, now); seqNumber++; return(msg.SequenceNumber); } }
/// <summary> /// Add messages to the cache /// </summary> /// <param name="messages"></param> public void AddToCache(IList <IBatchContainer> messages) { List <MemoryMessageData> memoryMessages = messages .Cast <MemoryBatchContainer <TSerializer> >() .Select(container => container.MessageData) .ToList(); cache.Add(memoryMessages, DateTime.UtcNow); }
/// <summary> /// Add messages to the cache /// </summary> /// <param name="messages"></param> public void AddToCache(IList <IBatchContainer> messages) { DateTime dequeueTimeUtc = DateTime.UtcNow; foreach (IBatchContainer container in messages) { MemoryBatchContainer <TSerializer> memoryBatchContainer = (MemoryBatchContainer <TSerializer>)container; cache.Add(memoryBatchContainer.MessageData, dequeueTimeUtc); } }
/// <summary> /// Add messages to the cache /// </summary> /// <param name="messages"></param> public void AddToCache(IList <IBatchContainer> messages) { DateTime utcNow = DateTime.UtcNow; List <CachedMessage> generatedMessages = messages .Cast <GeneratedBatchContainer>() .Select(batch => QueueMessageToCachedMessage(batch, utcNow)) .ToList(); cache.Add(generatedMessages, utcNow); }
/// <summary> /// Add messages to the cache /// </summary> /// <param name="messages"></param> public void AddToCache(IList <IBatchContainer> messages) { DateTime utcNow = DateTime.UtcNow; List <CachedMessage> memoryMessages = messages .Cast <MemoryBatchContainer <TSerializer> >() .Select(container => container.MessageData) .Select(batch => QueueMessageToCachedMessage(batch, utcNow)) .ToList(); cache.Add(memoryMessages, DateTime.UtcNow); }
/// <summary> /// Add a list of EventHub EventData to the cache. /// </summary> /// <param name="messages"></param> /// <param name="dequeueTimeUtc"></param> /// <returns></returns> public List <StreamPosition> Add(List <EventData> messages, DateTime dequeueTimeUtc) { List <StreamPosition> positions = new List <StreamPosition>(); List <CachedMessage> cachedMessages = new List <CachedMessage>(); foreach (EventData message in messages) { StreamPosition position = this.dataAdapter.GetStreamPosition(this.Partition, message); cachedMessages.Add(this.dataAdapter.FromQueueMessage(position, message, dequeueTimeUtc, this.GetSegment)); positions.Add(position); } cache.Add(cachedMessages, dequeueTimeUtc); return(positions); }
/// <summary> /// Add an EventHub EventData to the cache. /// </summary> /// <param name="message"></param> /// <param name="dequeueTimeUtc"></param> /// <returns></returns> public StreamPosition Add(EventData message, DateTime dequeueTimeUtc) { return(cache.Add(message, dequeueTimeUtc)); }
public void QueueCacheMissTest() { var bufferPool = new TestBlockPool(); var dataAdapter = new TestCacheDataAdapter(bufferPool); var cache = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance); dataAdapter.PurgeAction = cache.Purge; int sequenceNumber = 10; IBatchContainer batch; IStreamIdentity streamId = new TestStreamIdentity { Guid = Guid.NewGuid(), Namespace = StreamNamespace }; // No data in cache, cursors should not throw. object cursor = cache.GetCursor(streamId, new EventSequenceToken(sequenceNumber++)); Assert.IsNotNull(cursor); // try to iterate, should throw bool gotNext = cache.TryGetNextMessage(cursor, out batch); Assert.IsNotNull(cursor); Assert.IsFalse(gotNext); // now add messages into cache newer than cursor // Adding enough to fill the pool for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++) { cache.Add(new TestQueueMessage { StreamGuid = streamId.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }); } // now that there is data, and the cursor should point to data older than in the cache, using cursor should throw Exception ex = null; try { cache.TryGetNextMessage(cursor, out batch); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); // Try getting new cursor into cache from data before the cache. Should throw ex = null; try { cursor = cache.GetCursor(streamId, new EventSequenceToken(10)); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); // Get valid cursor into cache cursor = cache.GetCursor(streamId, new EventSequenceToken(13)); // query once, to make sure cursor is good gotNext = cache.TryGetNextMessage(cursor, out batch); Assert.IsNotNull(cursor); Assert.IsTrue(gotNext); // Since pool should be full, adding one more message should trigger the cache to purge. cache.Add(new TestQueueMessage { StreamGuid = streamId.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }); // After purge, use of cursor should throw. ex = null; try { cache.TryGetNextMessage(cursor, out batch); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); }
private int RunGoldenPath(PooledQueueCache <TestQueueMessage, TestCachedMessage> cache, int startOfCache) { int sequenceNumber = startOfCache; IBatchContainer batch; IStreamIdentity stream1 = new TestStreamIdentity { Guid = Guid.NewGuid(), Namespace = StreamNamespace }; IStreamIdentity stream2 = new TestStreamIdentity { Guid = Guid.NewGuid(), Namespace = StreamNamespace }; // now add messages into cache newer than cursor // Adding enough to fill the pool for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++) { cache.Add(new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }); } // get cursor for stream1, walk all the events in the stream using the cursor object stream1Cursor = cache.GetCursor(stream1, new EventSequenceToken(startOfCache)); int stream1EventCount = 0; while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.IsNotNull(stream1Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream1.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream1EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount); // get cursor for stream2, walk all the events in the stream using the cursor object stream2Cursor = cache.GetCursor(stream2, new EventSequenceToken(startOfCache)); int stream2EventCount = 0; while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.IsNotNull(stream2Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream2.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream2EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount); // Add a blocks worth of events to the cache, then walk each cursor. Do this enough times to fill the cache twice. for (int j = 0; j < PooledBufferCount * 2; j++) { for (int i = 0; i < MessagesPerBuffer; i++) { cache.Add(new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }); } // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.IsNotNull(stream1Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream1.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream1EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount); // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.IsNotNull(stream2Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream2.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream2EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount); } return(sequenceNumber); }
/// <summary> /// Add a list of EventHub EventData to the cache. /// </summary> /// <param name="messages"></param> /// <param name="dequeueTimeUtc"></param> /// <returns></returns> public List <StreamPosition> Add(List <EventData> messages, DateTime dequeueTimeUtc) { return(cache.Add(messages, dequeueTimeUtc)); }
private int RunGoldenPath(PooledQueueCache <TestQueueMessage, TestCachedMessage> cache, int startOfCache) { int sequenceNumber = startOfCache; IBatchContainer batch; IStreamIdentity stream1 = new StreamIdentity(Guid.NewGuid(), TestStreamNamespace); IStreamIdentity stream2 = new StreamIdentity(Guid.NewGuid(), TestStreamNamespace); // now add messages into cache newer than cursor // Adding enough to fill the pool List <TestQueueMessage> messages = Enumerable.Range(0, MessagesPerBuffer * PooledBufferCount) .Select(i => new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = TestStreamNamespace, SequenceNumber = sequenceNumber + i }) .ToList(); cache.Add(messages, DateTime.UtcNow); sequenceNumber += MessagesPerBuffer * PooledBufferCount; // get cursor for stream1, walk all the events in the stream using the cursor object stream1Cursor = cache.GetCursor(stream1, new EventSequenceTokenV2(startOfCache)); int stream1EventCount = 0; while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.NotNull(stream1Cursor); Assert.NotNull(batch); Assert.Equal(stream1.Guid, batch.StreamGuid); Assert.Equal(TestStreamNamespace, batch.StreamNamespace); Assert.NotNull(batch.SequenceToken); stream1EventCount++; } Assert.Equal((sequenceNumber - startOfCache) / 2, stream1EventCount); // get cursor for stream2, walk all the events in the stream using the cursor object stream2Cursor = cache.GetCursor(stream2, new EventSequenceTokenV2(startOfCache)); int stream2EventCount = 0; while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.NotNull(stream2Cursor); Assert.NotNull(batch); Assert.Equal(stream2.Guid, batch.StreamGuid); Assert.Equal(TestStreamNamespace, batch.StreamNamespace); Assert.NotNull(batch.SequenceToken); stream2EventCount++; } Assert.Equal((sequenceNumber - startOfCache) / 2, stream2EventCount); // Add a blocks worth of events to the cache, then walk each cursor. Do this enough times to fill the cache twice. for (int j = 0; j < PooledBufferCount * 2; j++) { List <TestQueueMessage> moreMessages = Enumerable.Range(0, MessagesPerBuffer) .Select(i => new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = TestStreamNamespace, SequenceNumber = sequenceNumber + i }) .ToList(); cache.Add(moreMessages, DateTime.UtcNow); sequenceNumber += MessagesPerBuffer; // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.NotNull(stream1Cursor); Assert.NotNull(batch); Assert.Equal(stream1.Guid, batch.StreamGuid); Assert.Equal(TestStreamNamespace, batch.StreamNamespace); Assert.NotNull(batch.SequenceToken); stream1EventCount++; } Assert.Equal((sequenceNumber - startOfCache) / 2, stream1EventCount); // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.NotNull(stream2Cursor); Assert.NotNull(batch); Assert.Equal(stream2.Guid, batch.StreamGuid); Assert.Equal(TestStreamNamespace, batch.StreamNamespace); Assert.NotNull(batch.SequenceToken); stream2EventCount++; } Assert.Equal((sequenceNumber - startOfCache) / 2, stream2EventCount); } return(sequenceNumber); }
private void AvoidCacheMiss(bool emptyCache) { var bufferPool = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize)); var dataAdapter = new TestCacheDataAdapter(); var cache = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null, TimeSpan.FromSeconds(30)); var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)), null, null); evictionStrategy.PurgeObservable = cache; var converter = new CachedMessageConverter(bufferPool, evictionStrategy); var seqNumber = 123; var stream = StreamId.Create(TestStreamNamespace, Guid.NewGuid()); // Enqueue a message for stream var firstSequenceNumber = EnqueueMessage(stream); // Consume first event var cursor = cache.GetCursor(stream, new EventSequenceTokenV2(firstSequenceNumber)); Assert.True(cache.TryGetNextMessage(cursor, out var firstContainer)); Assert.Equal(stream, firstContainer.StreamId); Assert.Equal(firstSequenceNumber, firstContainer.SequenceToken.SequenceNumber); // Remove first message, that was consumed cache.RemoveOldestMessage(); if (!emptyCache) { // Enqueue something not related to the stream // so the cache isn't empty EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); EnqueueMessage(StreamId.Create(TestStreamNamespace, Guid.NewGuid())); } // Enqueue another message for stream var lastSequenceNumber = EnqueueMessage(stream); // Should be able to consume the event just pushed Assert.True(cache.TryGetNextMessage(cursor, out var lastContainer)); Assert.Equal(stream, lastContainer.StreamId); Assert.Equal(lastSequenceNumber, lastContainer.SequenceToken.SequenceNumber); long EnqueueMessage(StreamId streamId) { var now = DateTime.UtcNow; var msg = new TestQueueMessage { StreamId = streamId, SequenceNumber = seqNumber, }; cache.Add(new List <CachedMessage>() { converter.ToCachedMessage(msg, now) }, now); seqNumber++; return(msg.SequenceNumber); } }