示例#1
0
        /// <summary>
        /// Pooled cache for memory stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        /// <param name="serializer"></param>
        public MemoryPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, Logger logger, TSerializer serializer)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool, serializer);

            cache = new PooledQueueCache <MemoryMessageData, MemoryMessageData>(dataAdapter, CacheDataComparer.Instance, logger);
            dataAdapter.PurgeAction = cache.Purge;
        }
        public void SimpleCacheMiss()
        {
            var bufferPool       = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
            var dataAdapter      = new TestCacheDataAdapter();
            var cache            = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null, TimeSpan.FromSeconds(10));
            var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)), null, null);

            evictionStrategy.PurgeObservable = cache;
            var converter = new CachedMessageConverter(bufferPool, evictionStrategy);

            var seqNumber = 123;
            var streamKey = Guid.NewGuid();
            var stream    = StreamId.Create(TestStreamNamespace, streamKey);

            var cursor = cache.GetCursor(stream, new EventSequenceTokenV2(seqNumber));

            // Start by enqueuing a message for stream, followed bu another one destined for another one
            EnqueueMessage(streamKey);
            EnqueueMessage(Guid.NewGuid());
            // Consume the stream, should be fine
            Assert.True(cache.TryGetNextMessage(cursor, out _));
            Assert.False(cache.TryGetNextMessage(cursor, out _));

            // Enqueue a new batch
            // First and last messages destined for stream, following messages
            // destined for other streams
            EnqueueMessage(streamKey);
            for (var idx = 0; idx < 20; idx++)
            {
                EnqueueMessage(Guid.NewGuid());
            }

            // Remove first three messages from the cache
            cache.RemoveOldestMessage(); // Destined for stream, consumed
            cache.RemoveOldestMessage(); // Not destined for stream
            cache.RemoveOldestMessage(); // Destined for stream, not consumed

            // Enqueue a new message for stream
            EnqueueMessage(streamKey);

            // Should throw since we missed the second message destined for stream
            Assert.Throws <QueueCacheMissException>(() => cache.TryGetNextMessage(cursor, out _));

            long EnqueueMessage(Guid streamId)
            {
                var now = DateTime.UtcNow;
                var msg = new TestQueueMessage
                {
                    StreamId       = StreamId.Create(TestStreamNamespace, streamId),
                    SequenceNumber = seqNumber,
                };

                cache.Add(new List <CachedMessage>()
                {
                    converter.ToCachedMessage(msg, now)
                }, now);
                seqNumber++;
                return(msg.SequenceNumber);
            }
        }
示例#3
0
 /// <summary>
 /// EventHub queue cache.
 /// </summary>
 /// <param name="partition">Partition this instance is caching.</param>
 /// <param name="defaultMaxAddCount">Default max number of items that can be added to the cache between purge calls.</param>
 /// <param name="bufferPool">raw data block pool.</param>
 /// <param name="dataAdapter">Adapts EventData to cached.</param>
 /// <param name="evictionStrategy">Eviction strategy manage purge related events</param>
 /// <param name="checkpointer">Logic used to store queue position.</param>
 /// <param name="logger"></param>
 /// <param name="cacheMonitor"></param>
 /// <param name="cacheMonitorWriteInterval"></param>
 /// <param name="metadataMinTimeInCache"></param>
 public EventHubQueueCache(
     string partition,
     int defaultMaxAddCount,
     IObjectPool <FixedSizeBuffer> bufferPool,
     IEventHubDataAdapter dataAdapter,
     IEvictionStrategy evictionStrategy,
     IStreamQueueCheckpointer <string> checkpointer,
     ILogger logger,
     ICacheMonitor cacheMonitor,
     TimeSpan?cacheMonitorWriteInterval,
     TimeSpan?metadataMinTimeInCache)
 {
     this.Partition          = partition;
     this.defaultMaxAddCount = defaultMaxAddCount;
     this.bufferPool         = bufferPool;
     this.dataAdapter        = dataAdapter;
     this.checkpointer       = checkpointer;
     this.cache                            = new PooledQueueCache(dataAdapter, logger, cacheMonitor, cacheMonitorWriteInterval, metadataMinTimeInCache);
     this.cacheMonitor                     = cacheMonitor;
     this.evictionStrategy                 = evictionStrategy;
     this.evictionStrategy.OnPurged        = this.OnPurge;
     this.evictionStrategy.PurgeObservable = this.cache;
     this.cachePressureMonitor             = new AggregatedCachePressureMonitor(logger, cacheMonitor);
     this.logger                           = logger;
 }
示例#4
0
        /// <summary>
        /// Pooled cache for generator stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        public GeneratorPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, Logger logger)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool);

            cache = new PooledQueueCache <GeneratedBatchContainer, CachedMessage>(dataAdapter, CacheDataComparer.Instance, logger);
            dataAdapter.PurgeAction = cache.Purge;
        }
示例#5
0
 /// <summary>
 /// Construct EventHub queue cache.
 /// </summary>
 /// <param name="defaultMaxAddCount">Default max number of items that can be added to the cache between purge calls.</param>
 /// <param name="checkpointer">Logic used to store queue position.</param>
 /// <param name="cacheDataAdapter">Performs data transforms appropriate for the various types of queue data.</param>
 /// <param name="comparer">Compares cached data</param>
 /// <param name="logger"></param>
 protected EventHubQueueCache(int defaultMaxAddCount, IStreamQueueCheckpointer <string> checkpointer, ICacheDataAdapter <EventData, TCachedMessage> cacheDataAdapter, ICacheDataComparer <TCachedMessage> comparer, Logger logger)
 {
     this.defaultMaxAddCount = defaultMaxAddCount;
     Checkpointer            = checkpointer;
     cache = new PooledQueueCache <EventData, TCachedMessage>(cacheDataAdapter, comparer, logger);
     cacheDataAdapter.PurgeAction = cache.Purge;
     cache.OnPurged            = OnPurge;
     this.cachePressureMonitor = new AggregatedCachePressureMonitor(logger);
 }
示例#6
0
        public void GoldenPathTest()
        {
            var bufferPool  = new TestBlockPool();
            var dataAdapter = new TestCacheDataAdapter(bufferPool);
            var cache       = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance);

            dataAdapter.PurgeAction = cache.Purge;
            RunGoldenPath(cache, 111);
        }
        public void AvoidCacheMissMultipleStreamsActive()
        {
            var bufferPool       = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
            var dataAdapter      = new TestCacheDataAdapter();
            var cache            = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null, TimeSpan.FromSeconds(30));
            var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)), null, null);

            evictionStrategy.PurgeObservable = cache;
            var converter = new CachedMessageConverter(bufferPool, evictionStrategy);

            var seqNumber = 123;
            var streamKey = Guid.NewGuid();
            var stream    = StreamId.Create(TestStreamNamespace, streamKey);

            // Enqueue a message for our stream
            var firstSequenceNumber = EnqueueMessage(streamKey);

            // Enqueue a few other messages for other streams
            EnqueueMessage(Guid.NewGuid());
            EnqueueMessage(Guid.NewGuid());

            // Consume the first event and see that the cursor has moved to last seen event (not matching our streamIdentity)
            var cursor = cache.GetCursor(stream, new EventSequenceTokenV2(firstSequenceNumber));

            Assert.True(cache.TryGetNextMessage(cursor, out var firstContainer));
            Assert.False(cache.TryGetNextMessage(cursor, out _));

            // Remove multiple events, including the one that the cursor is currently pointing to
            cache.RemoveOldestMessage();
            cache.RemoveOldestMessage();
            cache.RemoveOldestMessage();

            // Enqueue another message for stream
            var lastSequenceNumber = EnqueueMessage(streamKey);

            // Should be able to consume the event just pushed
            Assert.True(cache.TryGetNextMessage(cursor, out var lastContainer));
            Assert.Equal(stream, lastContainer.StreamId);
            Assert.Equal(lastSequenceNumber, lastContainer.SequenceToken.SequenceNumber);

            long EnqueueMessage(Guid streamId)
            {
                var now = DateTime.UtcNow;
                var msg = new TestQueueMessage
                {
                    StreamId       = StreamId.Create(TestStreamNamespace, streamId),
                    SequenceNumber = seqNumber,
                };

                cache.Add(new List <CachedMessage>()
                {
                    converter.ToCachedMessage(msg, now)
                }, now);
                seqNumber++;
                return(msg.SequenceNumber);
            }
        }
示例#8
0
        /// <summary>
        /// Pooled cache for memory stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        /// <param name="serializer"></param>
        public MemoryPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, Logger logger, TSerializer serializer)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool, serializer);

            cache = new PooledQueueCache <MemoryMessageData, MemoryMessageData>(dataAdapter, CacheDataComparer.Instance, logger);
            var evictionStrategy = new ExplicitEvictionStrategy();

            evictionStrategy.PurgeObservable = cache;
            dataAdapter.OnBlockAllocated     = evictionStrategy.OnBlockAllocated;
        }
示例#9
0
        public void GoldenPathTest()
        {
            var bufferPool = new TestBlockPool();
            PooledQueueCache <TestQueueMessage, TestCachedMessage>  cache       = null;
            ICacheDataAdapter <TestQueueMessage, TestCachedMessage> dataAdapter = new TestCacheDataAdapter(bufferPool,
                                                                                                           disposable => cache.Purge(disposable));

            cache = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter);
            RunGoldenPath(cache, 111);
        }
示例#10
0
 /// <summary>
 /// Pooled cache for memory stream provider
 /// </summary>
 /// <param name="bufferPool"></param>
 /// <param name="purgePredicate"></param>
 /// <param name="logger"></param>
 /// <param name="serializer"></param>
 /// <param name="cacheMonitor"></param>
 /// <param name="monitorWriteInterval">monitor write interval.  Only triggered for active caches.</param>
 public MemoryPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, TimePurgePredicate purgePredicate, ILogger logger, TSerializer serializer, ICacheMonitor cacheMonitor, TimeSpan?monitorWriteInterval)
 {
     this.bufferPool       = bufferPool;
     this.serializer       = serializer;
     this.cache            = new PooledQueueCache(this, logger, cacheMonitor, monitorWriteInterval);
     this.evictionStrategy = new ChronologicalEvictionStrategy(logger, purgePredicate, cacheMonitor, monitorWriteInterval)
     {
         PurgeObservable = cache
     };
 }
示例#11
0
        /// <summary>
        /// Pooled cache for generator stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        /// <param name="serializationManager"></param>
        public GeneratorPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, Logger logger, SerializationManager serializationManager)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool, serializationManager);

            cache = new PooledQueueCache <GeneratedBatchContainer, CachedMessage>(dataAdapter, CacheDataComparer.Instance, logger);
            var evictionStrategy = new ExplicitEvictionStrategy();

            evictionStrategy.PurgeObservable = cache;
            dataAdapter.OnBlockAllocated     = evictionStrategy.OnBlockAllocated;
        }
示例#12
0
        /// <summary>
        /// Construct EventHub queue cache.
        /// </summary>
        /// <param name="defaultMaxAddCount">Default max number of items that can be added to the cache between purge calls.</param>
        /// <param name="flowControlThreshold">percentage of unprocesses cache that triggers flow control</param>
        /// <param name="checkpointer">Logic used to store queue position.</param>
        /// <param name="cacheDataAdapter">Performs data transforms appropriate for the various types of queue data.</param>
        /// <param name="comparer">Compares cached data</param>
        /// <param name="logger"></param>
        protected EventHubQueueCache(int defaultMaxAddCount, double flowControlThreshold, IStreamQueueCheckpointer <string> checkpointer, ICacheDataAdapter <EventData, TCachedMessage> cacheDataAdapter, ICacheDataComparer <TCachedMessage> comparer, Logger logger)
        {
            this.defaultMaxAddCount = defaultMaxAddCount;
            Checkpointer            = checkpointer;
            cache = new PooledQueueCache <EventData, TCachedMessage>(cacheDataAdapter, comparer, logger);
            cacheDataAdapter.PurgeAction = cache.Purge;
            cache.OnPurged = OnPurge;

            cachePressureMonitor = new AveragingCachePressureMonitor(flowControlThreshold, logger);
        }
示例#13
0
        /// <summary>
        /// Pooled cache for memory stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="purgePredicate"></param>
        /// <param name="logger"></param>
        /// <param name="serializer"></param>
        /// <param name="cacheMonitor"></param>
        /// <param name="monitorWriteInterval">monitor write interval.  Only triggered for active caches.</param>
        public MemoryPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, TimePurgePredicate purgePredicate, Logger logger, TSerializer serializer, ICacheMonitor cacheMonitor, TimeSpan?monitorWriteInterval)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool, serializer);

            cache = new PooledQueueCache <MemoryMessageData, MemoryMessageData>(dataAdapter, CacheDataComparer.Instance, logger, cacheMonitor, monitorWriteInterval);
            this.evictionStrategy = new MemoryPooledCacheEvictionStrategy(logger, purgePredicate, cacheMonitor, monitorWriteInterval)
            {
                PurgeObservable = cache
            };
            EvictionStrategyCommonUtils.WireUpEvictionStrategy <MemoryMessageData, MemoryMessageData>(cache, dataAdapter, evictionStrategy);
        }
        public int Size => 0; // FIXME

        public FastPipeQueueCache(IObjectPool <FixedSizeBuffer> bufferPool, QueueId id)
        {
            if (bufferPool == null)
            {
                throw new ArgumentNullException("bufferPool");
            }
            this.bufferPool = bufferPool;
            Id         = id;
            operations = new CacheDataAdapter(bufferPool, disposable => cache.Purge(disposable));
            cache      = new PooledQueueCache <PlainBatchContainer, CachedMessage>(operations);
        }
示例#15
0
        public void SimpleCacheMiss()
        {
            var bufferPool       = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
            var dataAdapter      = new TestCacheDataAdapter();
            var cache            = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null, TimeSpan.FromSeconds(10));
            var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(1)), null, null);

            evictionStrategy.PurgeObservable = cache;
            var converter = new CachedMessageConverter(bufferPool, evictionStrategy);

            int idx;
            var seqNumber = 123;
            var stream    = StreamId.Create(TestStreamNamespace, Guid.NewGuid());

            // First and last messages destined for stream, following messages
            // destined for other streams
            for (idx = 0; idx < 20; idx++)
            {
                var now = DateTime.UtcNow;
                var msg = new TestQueueMessage
                {
                    StreamId       = (idx == 0) ? stream : StreamId.Create(TestStreamNamespace, Guid.NewGuid()),
                    SequenceNumber = seqNumber + idx,
                };
                cache.Add(new List <CachedMessage>()
                {
                    converter.ToCachedMessage(msg, now)
                }, now);
            }

            var cursor = cache.GetCursor(stream, new EventSequenceTokenV2(seqNumber));

            // Remove first message
            cache.RemoveOldestMessage();

            // Enqueue a new message for stream
            {
                idx++;
                var now = DateTime.UtcNow;
                var msg = new TestQueueMessage
                {
                    StreamId       = stream,
                    SequenceNumber = seqNumber + idx,
                };
                cache.Add(new List <CachedMessage>()
                {
                    converter.ToCachedMessage(msg, now)
                }, now);
            }

            // Should throw since we missed the first message
            Assert.Throws <QueueCacheMissException>(() => cache.TryGetNextMessage(cursor, out _));
        }
示例#16
0
 /// <summary>
 /// Construct EventHub queue cache.
 /// </summary>
 /// <param name="defaultMaxAddCount">Default max number of items that can be added to the cache between purge calls.</param>
 /// <param name="checkpointer">Logic used to store queue position.</param>
 /// <param name="cacheDataAdapter">Performs data transforms appropriate for the various types of queue data.</param>
 /// <param name="comparer">Compares cached data</param>
 /// <param name="logger"></param>
 /// <param name="evictionStrategy">Eviction stretagy manage purge related events</param>
 protected EventHubQueueCache(int defaultMaxAddCount, IStreamQueueCheckpointer <string> checkpointer, ICacheDataAdapter <EventData, TCachedMessage> cacheDataAdapter,
                              ICacheDataComparer <TCachedMessage> comparer, Logger logger, IEvictionStrategy <TCachedMessage> evictionStrategy)
 {
     this.defaultMaxAddCount = defaultMaxAddCount;
     Checkpointer            = checkpointer;
     cache = new PooledQueueCache <EventData, TCachedMessage>(cacheDataAdapter, comparer, logger);
     this.evictionStrategy                 = evictionStrategy;
     this.evictionStrategy.OnPurged        = this.OnPurge;
     this.evictionStrategy.PurgeObservable = cache;
     cacheDataAdapter.OnBlockAllocated     = this.evictionStrategy.OnBlockAllocated;
     this.cachePressureMonitor             = new AggregatedCachePressureMonitor(logger);
 }
        public void GoldenPathTest()
        {
            var bufferPool       = new TestBlockPool();
            var dataAdapter      = new TestCacheDataAdapter(bufferPool);
            var cache            = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance, NoOpTestLogger.Instance);
            var evictionStrategy = new ExplicitEvictionStrategy();

            evictionStrategy.PurgeObservable = cache;
            dataAdapter.OnBlockAllocated     = evictionStrategy.OnBlockAllocated;

            RunGoldenPath(cache, 111);
        }
        public void GoldenPathTest()
        {
            var bufferPool       = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
            var dataAdapter      = new TestCacheDataAdapter(bufferPool);
            var cache            = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance, NullLogger.Instance, null, null);
            var evictionStrategy = new EvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10)), null, null);

            evictionStrategy.PurgeObservable = cache;
            dataAdapter.OnBlockAllocated     = evictionStrategy.OnBlockAllocated;

            RunGoldenPath(cache, 111);
        }
示例#19
0
        /// <summary>
        /// Pooled cache for generator stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        /// <param name="serializationManager"></param>
        /// <param name="cacheMonitor"></param>
        /// <param name="monitorWriteInterval">monitor write interval.  Only triggered for active caches.</param>
        public GeneratorPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, ILogger logger, SerializationManager serializationManager, ICacheMonitor cacheMonitor, TimeSpan?monitorWriteInterval)
        {
            this.bufferPool           = bufferPool;
            this.serializationManager = serializationManager;
            cache = new PooledQueueCache(this, logger, cacheMonitor, monitorWriteInterval);
            TimePurgePredicate purgePredicate = new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10));

            this.evictionStrategy = new ChronologicalEvictionStrategy(logger, purgePredicate, cacheMonitor, monitorWriteInterval)
            {
                PurgeObservable = cache
            };
        }
示例#20
0
        public void GoldenPathTest()
        {
            var bufferPool       = new ObjectPool <FixedSizeBuffer>(() => new FixedSizeBuffer(PooledBufferSize));
            var dataAdapter      = new TestCacheDataAdapter();
            var cache            = new PooledQueueCache(dataAdapter, NullLogger.Instance, null, null);
            var evictionStrategy = new ChronologicalEvictionStrategy(NullLogger.Instance, new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10)), null, null);

            evictionStrategy.PurgeObservable = cache;
            var converter = new CachedMessageConverter(bufferPool, evictionStrategy);

            RunGoldenPath(cache, converter, 111);
        }
示例#21
0
 /// <summary>
 /// Construct EventHub queue cache.
 /// </summary>
 /// <param name="defaultMaxAddCount">Default max number of items that can be added to the cache between purge calls.</param>
 /// <param name="checkpointer">Logic used to store queue position.</param>
 /// <param name="cacheDataAdapter">Performs data transforms appropriate for the various types of queue data.</param>
 /// <param name="comparer">Compares cached data</param>
 /// <param name="logger"></param>
 /// <param name="evictionStrategy">Eviction stretagy manage purge related events</param>
 /// <param name="cacheMonitor"></param>
 /// <param name="cacheMonitorWriteInterval"></param>
 protected EventHubQueueCache(int defaultMaxAddCount, IStreamQueueCheckpointer <string> checkpointer, ICacheDataAdapter <EventData, TCachedMessage> cacheDataAdapter,
                              ICacheDataComparer <TCachedMessage> comparer, Logger logger, IEvictionStrategy <TCachedMessage> evictionStrategy,
                              ICacheMonitor cacheMonitor, TimeSpan?cacheMonitorWriteInterval)
 {
     this.defaultMaxAddCount = defaultMaxAddCount;
     Checkpointer            = checkpointer;
     cache                          = new PooledQueueCache <EventData, TCachedMessage>(cacheDataAdapter, comparer, logger, cacheMonitor, cacheMonitorWriteInterval);
     this.cacheMonitor              = cacheMonitor;
     this.evictionStrategy          = evictionStrategy;
     this.evictionStrategy.OnPurged = this.OnPurge;
     this.cachePressureMonitor      = new AggregatedCachePressureMonitor(logger, cacheMonitor);
     EvictionStrategyCommonUtils.WireUpEvictionStrategy <EventData, TCachedMessage>(this.cache, cacheDataAdapter, this.evictionStrategy);
 }
示例#22
0
        public void CacheDrainTest()
        {
            var bufferPool  = new TestBlockPool();
            var dataAdapter = new TestCacheDataAdapter(bufferPool);
            var cache       = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance);

            dataAdapter.PurgeAction = cache.Purge;
            int startSequenceNuber = 222;

            startSequenceNuber = RunGoldenPath(cache, startSequenceNuber);
            bufferPool.PurgeAll();
            RunGoldenPath(cache, startSequenceNuber);
        }
示例#23
0
        /// <summary>
        /// Pooled cache for generator stream provider
        /// </summary>
        /// <param name="bufferPool"></param>
        /// <param name="logger"></param>
        /// <param name="serializationManager"></param>
        /// <param name="cacheMonitor"></param>
        /// <param name="monitorWriteInterval"></param>
        public GeneratorPooledCache(IObjectPool <FixedSizeBuffer> bufferPool, Logger logger, SerializationManager serializationManager, ICacheMonitor cacheMonitor, TimeSpan?monitorWriteInterval)
        {
            var dataAdapter = new CacheDataAdapter(bufferPool, serializationManager);

            cache = new PooledQueueCache <GeneratedBatchContainer, CachedMessage>(dataAdapter, CacheDataComparer.Instance, logger, cacheMonitor, monitorWriteInterval);
            TimePurgePredicate purgePredicate = new TimePurgePredicate(TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(10));

            this.evictionStrategy = new GeneratorPooledCacheEvictionStrategy(logger, purgePredicate, cacheMonitor, monitorWriteInterval)
            {
                PurgeObservable = cache
            };
            EvictionStrategyCommonUtils.WireUpEvictionStrategy(cache, dataAdapter, this.evictionStrategy);
        }
示例#24
0
        public void CacheDrainTest()
        {
            var bufferPool = new TestBlockPool();
            PooledQueueCache <TestQueueMessage, TestCachedMessage>  cache       = null;
            ICacheDataAdapter <TestQueueMessage, TestCachedMessage> dataAdapter = new TestCacheDataAdapter(bufferPool,
                                                                                                           disposable => cache.Purge(disposable));

            cache = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter);
            int startSequenceNuber = 222;

            startSequenceNuber = RunGoldenPath(cache, startSequenceNuber);
            bufferPool.PurgeAll();
            RunGoldenPath(cache, startSequenceNuber);
        }
示例#25
0
        public async Task Initialize(TimeSpan timeout)
        {
            var dataAdapter = new EventHubDataAdapter(bufferPool);

            cache = new PooledQueueCache <EventData, CachedEventHubMessage>(dataAdapter)
            {
                OnPurged = OnPurged
            };
            dataAdapter.PurgeAction = cache.Purge;
            checkpoint = await EventHubPartitionCheckpoint.Create(config.CheckpointSettings, config.StreamProviderName, config.Partition);

            string offset = await checkpoint.Load();

            receiver = await CreateReceiver(config, offset);
        }
 public Cursor(PooledQueueCache <PlainBatchContainer, CachedMessage> cache, Guid streamGuid, string streamNamespace, StreamSequenceToken token)
 {
     this.cache = cache;
     cursor     = cache.GetCursor(streamGuid, streamNamespace, token);
 }
示例#27
0
 public Cursor(PooledQueueCache <GeneratedBatchContainer, CachedMessage> cache, IStreamIdentity streamIdentity, StreamSequenceToken token)
 {
     this.cache = cache;
     cursor     = cache.GetCursor(streamIdentity, token);
 }
示例#28
0
        public void QueueCacheMissTest()
        {
            var bufferPool  = new TestBlockPool();
            var dataAdapter = new TestCacheDataAdapter(bufferPool);
            var cache       = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance);

            dataAdapter.PurgeAction = cache.Purge;
            int             sequenceNumber = 10;
            IBatchContainer batch;

            IStreamIdentity streamId = new TestStreamIdentity {
                Guid = Guid.NewGuid(), Namespace = StreamNamespace
            };

            // No data in cache, cursors should not throw.
            object cursor = cache.GetCursor(streamId, new EventSequenceToken(sequenceNumber++));

            Assert.IsNotNull(cursor);

            // try to iterate, should throw
            bool gotNext = cache.TryGetNextMessage(cursor, out batch);

            Assert.IsNotNull(cursor);
            Assert.IsFalse(gotNext);

            // now add messages into cache newer than cursor
            // Adding enough to fill the pool
            for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++)
            {
                cache.Add(new TestQueueMessage
                {
                    StreamGuid      = streamId.Guid,
                    StreamNamespace = StreamNamespace,
                    SequenceNumber  = sequenceNumber++,
                });
            }

            // now that there is data, and the cursor should point to data older than in the cache, using cursor should throw
            Exception ex = null;

            try
            {
                cache.TryGetNextMessage(cursor, out batch);
            }
            catch (QueueCacheMissException cacheMissException)
            {
                ex = cacheMissException;
            }
            Assert.IsNotNull(ex);

            // Try getting new cursor into cache from data before the cache.  Should throw
            ex = null;
            try
            {
                cursor = cache.GetCursor(streamId, new EventSequenceToken(10));
            }
            catch (QueueCacheMissException cacheMissException)
            {
                ex = cacheMissException;
            }
            Assert.IsNotNull(ex);

            // Get valid cursor into cache
            cursor = cache.GetCursor(streamId, new EventSequenceToken(13));
            // query once, to make sure cursor is good
            gotNext = cache.TryGetNextMessage(cursor, out batch);
            Assert.IsNotNull(cursor);
            Assert.IsTrue(gotNext);
            // Since pool should be full, adding one more message should trigger the cache to purge.
            cache.Add(new TestQueueMessage
            {
                StreamGuid      = streamId.Guid,
                StreamNamespace = StreamNamespace,
                SequenceNumber  = sequenceNumber++,
            });
            // After purge, use of cursor should throw.
            ex = null;
            try
            {
                cache.TryGetNextMessage(cursor, out batch);
            }
            catch (QueueCacheMissException cacheMissException)
            {
                ex = cacheMissException;
            }
            Assert.IsNotNull(ex);
        }
示例#29
0
        private int RunGoldenPath(PooledQueueCache <TestQueueMessage, TestCachedMessage> cache, int startOfCache)
        {
            int             sequenceNumber = startOfCache;
            IBatchContainer batch;

            IStreamIdentity stream1 = new TestStreamIdentity {
                Guid = Guid.NewGuid(), Namespace = StreamNamespace
            };
            IStreamIdentity stream2 = new TestStreamIdentity {
                Guid = Guid.NewGuid(), Namespace = StreamNamespace
            };

            // now add messages into cache newer than cursor
            // Adding enough to fill the pool
            for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++)
            {
                cache.Add(new TestQueueMessage
                {
                    StreamGuid      = i % 2 == 0 ? stream1.Guid : stream2.Guid,
                    StreamNamespace = StreamNamespace,
                    SequenceNumber  = sequenceNumber++,
                });
            }

            // get cursor for stream1, walk all the events in the stream using the cursor
            object stream1Cursor     = cache.GetCursor(stream1, new EventSequenceToken(startOfCache));
            int    stream1EventCount = 0;

            while (cache.TryGetNextMessage(stream1Cursor, out batch))
            {
                Assert.IsNotNull(stream1Cursor);
                Assert.IsNotNull(batch);
                Assert.AreEqual(stream1.Guid, batch.StreamGuid);
                Assert.AreEqual(StreamNamespace, batch.StreamNamespace);
                Assert.IsNotNull(batch.SequenceToken);
                stream1EventCount++;
            }
            Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount);

            // get cursor for stream2, walk all the events in the stream using the cursor
            object stream2Cursor     = cache.GetCursor(stream2, new EventSequenceToken(startOfCache));
            int    stream2EventCount = 0;

            while (cache.TryGetNextMessage(stream2Cursor, out batch))
            {
                Assert.IsNotNull(stream2Cursor);
                Assert.IsNotNull(batch);
                Assert.AreEqual(stream2.Guid, batch.StreamGuid);
                Assert.AreEqual(StreamNamespace, batch.StreamNamespace);
                Assert.IsNotNull(batch.SequenceToken);
                stream2EventCount++;
            }
            Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount);

            // Add a blocks worth of events to the cache, then walk each cursor.  Do this enough times to fill the cache twice.
            for (int j = 0; j < PooledBufferCount * 2; j++)
            {
                for (int i = 0; i < MessagesPerBuffer; i++)
                {
                    cache.Add(new TestQueueMessage
                    {
                        StreamGuid      = i % 2 == 0 ? stream1.Guid : stream2.Guid,
                        StreamNamespace = StreamNamespace,
                        SequenceNumber  = sequenceNumber++,
                    });
                }

                // walk all the events in the stream using the cursor
                while (cache.TryGetNextMessage(stream1Cursor, out batch))
                {
                    Assert.IsNotNull(stream1Cursor);
                    Assert.IsNotNull(batch);
                    Assert.AreEqual(stream1.Guid, batch.StreamGuid);
                    Assert.AreEqual(StreamNamespace, batch.StreamNamespace);
                    Assert.IsNotNull(batch.SequenceToken);
                    stream1EventCount++;
                }
                Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount);

                // walk all the events in the stream using the cursor
                while (cache.TryGetNextMessage(stream2Cursor, out batch))
                {
                    Assert.IsNotNull(stream2Cursor);
                    Assert.IsNotNull(batch);
                    Assert.AreEqual(stream2.Guid, batch.StreamGuid);
                    Assert.AreEqual(StreamNamespace, batch.StreamNamespace);
                    Assert.IsNotNull(batch.SequenceToken);
                    stream2EventCount++;
                }
                Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount);
            }
            return(sequenceNumber);
        }
示例#30
0
 public Cursor(PooledQueueCache <MemoryMessageData, MemoryMessageData> cache, IStreamIdentity streamIdentity,
               StreamSequenceToken token)
 {
     this.cache = cache;
     cursor     = cache.GetCursor(streamIdentity, token);
 }