示例#1
0
        private List <IBatchContainer> DrainBucket(CacheBucket bucket)
        {
            var itemsToRelease = new List <IBatchContainer>(bucket.NumCurrentItems);

            // walk all items in the cache starting from last
            // and remove from the cache the oness that reside in the given bucket until we jump to a next bucket
            while (bucket.NumCurrentItems > 0)
            {
                SimpleQueueCacheItem item = cachedMessages.Last.Value;
                if (item.CacheBucket.Equals(bucket))
                {
                    if (!item.DeliveryFailure)
                    {
                        itemsToRelease.Add(item.Batch);
                    }
                    bucket.UpdateNumItems(-1);
                    cachedMessages.RemoveLast();
                }
                else
                {
                    // this item already points into the next bucket, so stop.
                    break;
                }
            }
            return(itemsToRelease);
        }
示例#2
0
        private void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null)
            {
                throw new ArgumentNullException("batch");
            }

            CacheBucket cacheBucket = null;

            if (cacheCursorHistogram.Count == 0)
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }
            else
            {
                cacheBucket = cacheCursorHistogram[cacheCursorHistogram.Count - 1]; // last one
            }

            if (cacheBucket.NumCurrentItems == CACHE_HISTOGRAM_MAX_BUCKET_SIZE) // last bucket is full, open a new one
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }

            // Add message to linked list
            var item = new SimpleQueueCacheItem
            {
                Batch         = batch,
                SequenceToken = sequenceToken,
                CacheBucket   = cacheBucket
            };

            cachedMessages.AddFirst(new LinkedListNode <SimpleQueueCacheItem>(item));
            cacheBucket.UpdateNumItems(1);

            if (Size > maxCacheSize)
            {
                //var last = cachedMessages.Last;
                cachedMessages.RemoveLast();
                var bucket = cacheCursorHistogram[0]; // same as:  var bucket = last.Value.CacheBucket;
                bucket.UpdateNumItems(-1);
                if (bucket.NumCurrentItems == 0)
                {
                    cacheCursorHistogram.RemoveAt(0);
                }
            }
        }
示例#3
0
        private void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null)
            {
                throw new ArgumentNullException(nameof(batch));
            }
            // this should never happen, but just in case
            if (Size >= maxCacheSize)
            {
                throw new CacheFullException();
            }

            CacheBucket cacheBucket;

            if (cacheCursorHistogram.Count == 0)
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }
            else
            {
                cacheBucket = cacheCursorHistogram[cacheCursorHistogram.Count - 1]; // last one
            }

            if (cacheBucket.NumCurrentItems == CACHE_HISTOGRAM_MAX_BUCKET_SIZE) // last bucket is full, open a new one
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }

            // Add message to linked list
            var item = new SimpleQueueCacheItem
            {
                Batch         = batch,
                SequenceToken = sequenceToken,
                CacheBucket   = cacheBucket
            };

            cachedMessages.AddFirst(new LinkedListNode <SimpleQueueCacheItem>(item));
            cacheBucket.UpdateNumItems(1);
        }
示例#4
0
        private void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null) throw new ArgumentNullException("batch");

            CacheBucket cacheBucket = null;
            if (cacheCursorHistogram.Count == 0)
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }
            else
            {
                cacheBucket = cacheCursorHistogram[cacheCursorHistogram.Count - 1]; // last one
            }

            if (cacheBucket.NumCurrentItems == CACHE_HISTOGRAM_MAX_BUCKET_SIZE) // last bucket is full, open a new one
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }

            // Add message to linked list
            var item = new SimpleQueueCacheItem
            {
                Batch = batch,
                SequenceToken = sequenceToken,
                CacheBucket = cacheBucket
            };

            cachedMessages.AddFirst(new LinkedListNode<SimpleQueueCacheItem>(item));
            cacheBucket.UpdateNumItems(1);

            if (Size > maxCacheSize)
            {
                //var last = cachedMessages.Last;
                cachedMessages.RemoveLast();
                var bucket = cacheCursorHistogram[0]; // same as:  var bucket = last.Value.CacheBucket;
                bucket.UpdateNumItems(-1);
                if (bucket.NumCurrentItems == 0)
                {
                    cacheCursorHistogram.RemoveAt(0);
                }
            }
        }