Esempio n. 1
0
        public void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null)
            {
                throw new ArgumentNullException("batch");
            }

            // Add message to linked list
            var item = new CacheItem
            {
                Batch         = batch,
                SequenceToken = (EventSequenceToken)sequenceToken
            };

            messages.AddFirst(new LinkedListNode <CacheItem>(item));

            if (cachedMessageCount < cacheSize)
            {
                cachedMessageCount++;
            }
            else
            {
                messages.RemoveLast();
            }
        }
        private bool ShouldDeliverBatch(StreamId streamId, IBatchContainer batchContainer, string filterData)
        {
            if (this.streamFilter is NoOpStreamFilter)
            {
                return(true);
            }

            try
            {
                foreach (var evt in batchContainer.GetEvents <object>())
                {
                    if (this.streamFilter.ShouldDeliver(streamId, evt.Item1, filterData))
                    {
                        return(true);
                    }
                }
                return(false);
            }
            catch (Exception exc)
            {
                var message = $"Ignoring exception while trying to evaluate subscription filter '{this.streamFilter.GetType().Name}' with data '{filterData}' on stream {streamId}";
                logger.Warn((int)ErrorCode.PersistentStreamPullingAgent_13, message, exc);
            }
            return(true);
        }
Esempio n. 3
0
        /// <summary>
        /// Adds a previously-constructed batch to a new frame/container.
        /// Use this if you already created a batch in a previous frame and wish to use it again.
        /// </summary>
        public void Reuse(IBatchContainer newContainer, int?newLayer = null)
        {
            lock (State) {
                if (Released)
                {
                    throw new ObjectDisposedException("batch");
                }
                else if (State.IsCombined)
                {
                    throw new InvalidOperationException("Batch was combined into another batch");
                }

                if (newLayer.HasValue)
                {
                    Layer = newLayer.Value;
                }

                if (!State.IsInitialized)
                {
                    throw new Exception("Not initialized");
                }

                if (State.IsPrepareQueued)
                {
                    throw new Exception("Batch currently queued for prepare");
                }

                State.IsPrepared = State.IsIssued = false;
            }

            newContainer.Add(this);
        }
Esempio n. 4
0
        /// <summary>
        /// Move to next message in the stream.
        /// If it returns false, there are no more messages.  The enumerator is still
        ///  valid however and can be called again when more data has come in on this
        ///  stream.
        /// </summary>
        /// <returns></returns>
        public virtual bool MoveNext()
        {
            if (current == null && IsSet && IsInStream(Element.Value.Batch))
            {
                current = Element.Value.Batch;
                return(true);
            }

            IBatchContainer next;

            while (cache.TryGetNextMessage(this, out next))
            {
                if (IsInStream(next))
                {
                    break;
                }
            }
            current = next;
            if (!IsInStream(next))
            {
                return(false);
            }

            return(true);
        }
Esempio n. 5
0
        protected virtual void OnReleaseResources()
        {
            if (Released)
            {
                return;
            }

            lock (State) {
                if (State.IsPrepareQueued)
                {
                    throw new Exception("Batch currently queued for prepare");
                }
                else if (!State.IsInitialized)
                {
                    throw new Exception("Batch uninitialized");
                }

                State.IsPrepared    = false;
                State.IsInitialized = false;

                Released = true;
                Pool.Release(this);

                Container = null;
                Material  = null;
            }
        }
Esempio n. 6
0
        private DateTime GetTimestampForItem(IBatchContainer batch)
        {
            // Here we check if the batch is a kafka stream
            var batchAsKafkaBatch = batch as KafkaBatchContainer;

            return(batchAsKafkaBatch == null ? DateTime.UtcNow : DateTime.ParseExact(batchAsKafkaBatch.Timestamp, "O", CultureInfo.InvariantCulture).ToUniversalTime());
        }
Esempio n. 7
0
        public static BitmapBatch New(
            IBatchContainer container, int layer, Material material,
            SamplerState samplerState = null, SamplerState samplerState2 = null,
            bool useZBuffer           = false, bool zBufferOnlySorting   = false,
            bool depthPrePass         = false, bool worldSpace = false,
            int?capacity = null
            )
        {
            if (container == null)
            {
                throw new ArgumentNullException("container");
            }
            if (material == null)
            {
                throw new ArgumentNullException("material");
            }
            if (material.Effect == null)
            {
                throw new ArgumentNullException("material.Effect");
            }

            var result = container.RenderManager.AllocateBatch <BitmapBatch>();

            result.Initialize(
                container, layer, material,
                samplerState, samplerState2 ?? samplerState,
                useZBuffer: useZBuffer, zBufferOnlySorting: zBufferOnlySorting,
                depthPrePass: depthPrePass, worldSpace: worldSpace,
                capacity: capacity
                );
            result.CaptureStack(0);
            return(result);
        }
Esempio n. 8
0
        /// <summary>
        /// Acquires the next message in the cache at the provided cursor
        /// </summary>
        /// <param name="cursor"></param>
        /// <param name="batch"></param>
        /// <returns></returns>
        internal bool TryGetNextMessage(SimpleQueueCacheCursor cursor, out IBatchContainer batch)
        {
            if (logger.IsEnabled(LogLevel.Debug))
            {
                logger.LogDebug("TryGetNextMessage: {Cursor}", cursor);
            }

            batch = null;
            if (!cursor.IsSet)
            {
                return(false);
            }

            // If we are at the end of the cache unset cursor and move offset one forward
            if (cursor.Element == cachedMessages.First)
            {
                UnsetCursor(cursor, null);
            }
            else // Advance to next:
            {
                AdvanceCursor(cursor, cursor.Element.Previous);
            }

            batch = cursor.Element?.Value.Batch;
            return(batch != null);
        }
Esempio n. 9
0
        public async Task <StreamSequenceToken> DeliverBatch(IBatchContainer batch, StreamSequenceToken prevToken)
        {
            if (expectedToken != null)
            {
                if (!expectedToken.Equals(prevToken))
                {
                    return(expectedToken);
                }
            }

            foreach (var itemTuple in batch.GetEvents <T>())
            {
                await NextItem(itemTuple.Item1, itemTuple.Item2);
            }

            // check again, in case the expectedToken was changed indiretly via ResumeAsync()
            if (expectedToken != null)
            {
                if (!expectedToken.Equals(prevToken))
                {
                    return(expectedToken);
                }
            }

            expectedToken = batch.SequenceToken;

            return(null);
        }
Esempio n. 10
0
        public async Task <IList <IBatchContainer> > GetQueueMessagesAsync(int maxCount)
        {
            try
            {
                var pubSubRef = _pubSub; // store direct ref, in case we are somehow asked to shutdown while we are receiving.
                if (pubSubRef == null)
                {
                    return(new List <IBatchContainer>());
                }

                var task = pubSubRef.GetMessages(maxCount);
                _outstandingTask = task;
                IEnumerable <ReceivedMessage> messages = await task;

                List <IBatchContainer> pubSubMessages = new List <IBatchContainer>();
                foreach (var message in messages)
                {
                    IBatchContainer container = _dataAdapter.FromPullResponseMessage(message.Message, _lastReadMessage++);
                    pubSubMessages.Add(container);
                    _pending.Add(new PendingDelivery(container.SequenceToken, message));
                }

                return(pubSubMessages);
            }
            finally
            {
                _outstandingTask = null;
            }
        }
Esempio n. 11
0
        public void Initialize(IBatchContainer container, int layer, Material material, Action <DeviceManager, object> batchSetup, object userData)
        {
            base.Initialize(container, layer, material, true);

            _BatchSetup = batchSetup;
            _UserData   = userData;
        }
Esempio n. 12
0
        public ImperativeRenderer(
            IBatchContainer container,
            DefaultMaterialSet materials,
            int layer = 0, 
            RasterizerState rasterizerState = null,
            DepthStencilState depthStencilState = null,
            BlendState blendState = null,
            SamplerState samplerState = null,
            bool worldSpace = true,
            bool useZBuffer = false,
            bool autoIncrementSortKey = false,
            bool autoIncrementLayer = false
        )
        {
            if (container == null)
                throw new ArgumentNullException("container");
            if (materials == null)
                throw new ArgumentNullException("materials");

            Container = container;
            Materials = materials;
            Layer = layer;
            RasterizerState = rasterizerState;
            DepthStencilState = depthStencilState;
            BlendState = blendState;
            SamplerState = samplerState;
            UseZBuffer = useZBuffer;
            WorldSpace = worldSpace;
            AutoIncrementSortKey = autoIncrementSortKey;
            AutoIncrementLayer = autoIncrementLayer;
            NextSortKey = 0;
            PreviousBatch = null;
        }
        public async Task <IList <IBatchContainer> > GetQueueMessagesAsync(int maxCount)
        {
            try
            {
                var queueRef = queue; // store direct ref, in case we are somehow asked to shutdown while we are receiving.
                if (queueRef == null)
                {
                    return(new List <IBatchContainer>());
                }

                int count = maxCount < 0 || maxCount == QueueAdapterConstants.UNLIMITED_GET_QUEUE_MSG ?
                            CloudQueueMessage.MaxNumberOfMessagesToPeek : Math.Min(maxCount, CloudQueueMessage.MaxNumberOfMessagesToPeek);

                var task = queueRef.GetQueueMessages(count);
                outstandingTask = task;
                IEnumerable <CloudQueueMessage> messages = await task;

                List <IBatchContainer> azureQueueMessages = new List <IBatchContainer>();
                foreach (var message in messages)
                {
                    IBatchContainer container = this.dataAdapter.FromCloudQueueMessage(message, lastReadMessage++);
                    azureQueueMessages.Add(container);
                    this.pending.Add(new PendingDelivery(container.SequenceToken, message));
                }

                return(azureQueueMessages);
            }
            finally
            {
                outstandingTask = null;
            }
        }
        public void RenderOutlines(IBatchContainer container, int layer, bool showLights, Color?lineColor = null, Color?lightColor = null)
        {
            using (var group = BatchGroup.New(container, layer)) {
                using (var gb = GeometryBatch.New(group, 0, IlluminantMaterials.DebugOutlines)) {
                    VisualizerLineWriterInstance.Batch = gb;
                    VisualizerLineWriterInstance.Color = lineColor.GetValueOrDefault(Color.White);

                    foreach (var lo in Environment.Obstructions)
                    {
                        lo.GenerateLines(VisualizerLineWriterInstance);
                    }

                    VisualizerLineWriterInstance.Batch = null;
                }

                int i = 0;

                if (showLights)
                {
                    foreach (var lightSource in Environment.LightSources)
                    {
                        var cMax = lightColor.GetValueOrDefault(Color.White);
                        var cMin = cMax * 0.25f;

                        using (var gb = GeometryBatch.New(group, i + 1, IlluminantMaterials.DebugOutlines)) {
                            gb.AddFilledRing(lightSource.Position, 0f, 2f, cMax, cMax);
                            gb.AddFilledRing(lightSource.Position, lightSource.RampStart - 1f, lightSource.RampStart + 1f, cMax, cMax);
                            gb.AddFilledRing(lightSource.Position, lightSource.RampEnd - 1f, lightSource.RampEnd + 1f, cMin, cMin);
                        }

                        i += 1;
                    }
                }
            }
        }
Esempio n. 15
0
        public async Task <StreamHandshakeToken> DeliverBatch(IBatchContainer batch, StreamHandshakeToken handshakeToken)
        {
            // we validate expectedToken only for ordered (rewindable) streams
            if (expectedToken != null)
            {
                if (!expectedToken.Equals(handshakeToken))
                {
                    return(expectedToken);
                }
            }

            if (batch is IBatchContainerBatch)
            {
                var batchContainerBatch = batch as IBatchContainerBatch;
                await NextBatch(batchContainerBatch);
            }
            else
            {
                foreach (var itemTuple in batch.GetEvents <T>())
                {
                    await NextItem(itemTuple.Item1, itemTuple.Item2);
                }
            }

            if (IsRewindable)
            {
                expectedToken = StreamHandshakeToken.CreateDeliveyToken(batch.SequenceToken);
            }
            return(null);
        }
        private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
        {
            if (batch.RequestContext != null)
            {
                RequestContext.Import(batch.RequestContext);
            }
            try
            {
                StreamSequenceToken prevToken = consumerData.LastToken;
                StreamSequenceToken newToken  = await consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);

                if (newToken != null)
                {
                    consumerData.LastToken = newToken;
                    consumerData.Cursor    = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
                                                                       consumerData.StreamId.Namespace, newToken);
                }
                else
                {
                    consumerData.LastToken = batch.SequenceToken; // this is the currently delivered token
                }
            }
            finally
            {
                if (batch.RequestContext != null)
                {
                    RequestContext.Clear();
                }
            }
        }
Esempio n. 17
0
        protected void Initialize(IBatchContainer container, int layer, Material material, bool addToContainer)
        {
            if (State.IsPrepareQueued)
            {
                throw new Exception("Batch currently queued for prepare");
            }

            if ((material != null) && (material.IsDisposed))
            {
                throw new ObjectDisposedException("material");
            }

            StackTrace = null;
            if (BatchesCombinedIntoThisOne != null)
            {
                BatchesCombinedIntoThisOne.Clear();
            }
            Released         = false;
            ReleaseAfterDraw = false;
            Layer            = layer;
            Material         = material;

            Index = Interlocked.Increment(ref _BatchCount);

            lock (State) {
                State.IsCombined    = false;
                State.IsInitialized = true;
                State.IsPrepared    = State.IsPrepareQueued = State.IsIssued = false;
            }

            if (addToContainer)
            {
                container.Add(this);
            }
        }
Esempio n. 18
0
 public void Draw(IBatchContainer container, int layer = 0)
 {
     foreach (var system in Systems)
     {
         system.Draw(this, container, layer);
     }
 }
Esempio n. 19
0
 public async Task DeliverBatch(IBatchContainer batch)
 {
     foreach (var itemTuple in batch.GetEvents <T>())
     {
         await DeliverItem(itemTuple.Item1, itemTuple.Item2);
     }
 }
        private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
        {
            StreamSequenceToken        prevToken = consumerData.LastToken;
            Task <StreamSequenceToken> batchDeliveryTask;

            bool isRequestContextSet = batch.ImportRequestContext();

            try
            {
                batchDeliveryTask = consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);
            }
            finally
            {
                if (isRequestContextSet)
                {
                    // clear RequestContext before await!
                    RequestContext.Clear();
                }
            }
            StreamSequenceToken newToken = await batchDeliveryTask;

            if (newToken != null)
            {
                consumerData.LastToken = newToken;
                consumerData.Cursor    = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
                                                                   consumerData.StreamId.Namespace, newToken);
            }
            else
            {
                consumerData.LastToken = batch.SequenceToken; // this is the currently delivered token
            }
        }
Esempio n. 21
0
 /// <summary>
 /// Clean up cache data when done
 /// </summary>
 /// <param name="disposing"></param>
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         cache.UnsetCursor(this, null);
         current = null;
     }
 }
Esempio n. 22
0
        public static void Marker(IBatchContainer container, int layer, string format, params object[] values)
        {
            if (!EnableTracing)
            {
                return;
            }

            Marker(container, layer, String.Format(format, values));
        }
Esempio n. 23
0
        new protected void Initialize(
            IBatchContainer container, int layer, Material material,
            int?capacity = null
            )
        {
            base.Initialize(container, layer, material);

            _DrawCalls = _ListPool.Allocate(capacity);
        }
Esempio n. 24
0
        public void Initialize(IBatchContainer container, int layer, Action <DeviceManager, object> before, Action <DeviceManager, object> after, object userData)
        {
            base.Initialize(container, layer, null);

            RenderManager = container.RenderManager;
            _Before       = before;
            _After        = after;
            _UserData     = userData;
        }
Esempio n. 25
0
 new protected void Initialize(
     IBatchContainer container, int layer, Material material,
     bool addToContainer, int?capacity = null
     )
 {
     _DrawCalls.ListPool = _ListPool;
     _DrawCalls.Clear();
     base.Initialize(container, layer, material, addToContainer);
 }
Esempio n. 26
0
            public CachedBatch(
                CachedBatchType cbt,
                IBatchContainer container,
                int layer,
                bool worldSpace,
                RasterizerState rasterizerState,
                DepthStencilState depthStencilState,
                BlendState blendState,
                SamplerState samplerState,
                Material customMaterial,
                bool useZBuffer
                )
            {
                Batch     = null;
                BatchType = cbt;
                Container = container;
                Layer     = layer;
                // FIXME: Mask if multimaterial?
                WorldSpace = worldSpace;
                UseZBuffer = useZBuffer;

                if (cbt != CachedBatchType.MultimaterialBitmap)
                {
                    RasterizerState   = rasterizerState;
                    DepthStencilState = depthStencilState;
                    BlendState        = blendState;
                    SamplerState      = samplerState;
                    CustomMaterial    = customMaterial;
                }
                else
                {
                    RasterizerState   = null;
                    DepthStencilState = null;
                    BlendState        = null;
                    SamplerState      = null;
                    CustomMaterial    = null;
                }

                HashCode = Container.GetHashCode() ^
                           Layer.GetHashCode();

                if (BlendState != null)
                {
                    HashCode ^= BlendState.GetHashCode();
                }

                if (SamplerState != null)
                {
                    HashCode ^= SamplerState.GetHashCode();
                }

                if (CustomMaterial != null)
                {
                    HashCode ^= CustomMaterial.GetHashCode();
                }
            }
Esempio n. 27
0
        public void Initialize(IBatchContainer container, int layer, Action <DeviceManager, object> before, Action <DeviceManager, object> after, object userData, bool addToContainer = true)
        {
            base.Initialize(container, layer, null, addToContainer);

            RenderManager  = container.RenderManager;
            _Before        = before;
            _After         = after;
            _UserData      = userData;
            IsReleased     = false;
            OcclusionQuery = null;
        }
Esempio n. 28
0
        private Task TrackMessage(IBatchContainer container)
        {
            if (!_options.MessageTrackingEnabled)
            {
                return(Task.CompletedTask);
            }

            var trackingGrain = _grainFactory.GetMessageTrackerGrain(_queueProperties.QueueName);

            return(trackingGrain.Track(container));
        }
Esempio n. 29
0
        public static void Marker(IBatchContainer container, int layer, string name)
        {
            if (!EnableTracing)
            {
                return;
            }

            var batch = new MarkerBatch(layer, name);

            container.Add(batch);
        }
Esempio n. 30
0
            public bool MoveNext()
            {
                IBatchContainer next;
                if (!cache.TryGetNextMessage(cursor, out next))
                {
                    return false;
                }

                current = next;
                return true;
            }
Esempio n. 31
0
 /// <summary>
 /// Cursor into a simple queue cache
 /// </summary>
 /// <param name="cache"></param>
 /// <param name="streamIdentity"></param>
 /// <param name="logger"></param>
 public SimpleQueueCacheCursor(SimpleQueueCache cache, IStreamIdentity streamIdentity, Logger logger)
 {
     if (cache == null)
     {
         throw new ArgumentNullException("cache");
     }
     this.cache = cache;
     this.streamIdentity = streamIdentity;
     this.logger = logger;
     current = null;
     SimpleQueueCache.Log(logger, "SimpleQueueCacheCursor New Cursor for {0}, {1}", streamIdentity.Guid, streamIdentity.Namespace);
 }
            public bool MoveNext()
            {
                IBatchContainer next;

                if (!this.cache.TryGetNextMessage(this.cursor, out next))
                {
                    return(false);
                }

                this.current = next;
                return(true);
            }
Esempio n. 33
0
 public SimpleQueueCacheCursor(SimpleQueueCache cache, Guid streamGuid, string streamNamespace, Logger logger)
 {
     if (cache == null)
     {
         throw new ArgumentNullException("cache");
     }
     this.cache = cache;
     this.streamGuid = streamGuid;
     this.streamNamespace = streamNamespace;
     this.logger = logger;
     current = null;
     SimpleQueueCache.Log(logger, "SimpleQueueCacheCursor New Cursor for {0}, {1}", streamGuid, streamNamespace);
 }
 public TimedQueueCacheCursor(TimedQueueCache cache, Guid streamGuid, string streamNamespace, Logger logger)
 {
     if (cache == null)
     {
         throw new ArgumentNullException(nameof(cache));
     }
     _cache = cache;
     _streamGuid = streamGuid;
     _streamNamespace = streamNamespace;
     _logger = logger;
     _current = null;
     TimedQueueCache.Log(logger, "TimedQueueCacheCursor New Cursor for {0}, {1}", streamGuid, streamNamespace);
 }
Esempio n. 35
0
        public virtual bool MoveNext()
        {
            IBatchContainer next;
            while (cache.TryGetNextMessage(this, out next))
            {
                if(IsInStream(next))
                    break;
            }
            if (!IsInStream(next))
                return false;

            current = next;
            return true;
        }
        /// <summary>
        /// Acquires the next message in the cache at the provided cursor
        /// </summary>
        /// <param name="cursor"></param>
        /// <param name="batch"></param>
        /// <returns></returns>
        internal bool TryGetNextMessage(TimedQueueCacheCursor cursor, out IBatchContainer batch)
        {
            Log(_logger, "TimedQueueCache for QueueId:{0}, TryGetNextMessage: {0}", Id.ToString(), cursor);

            batch = null;

            if (cursor == null) throw new ArgumentNullException(nameof(cursor));

            //if not set, try to set and then get next
            if (!cursor.IsSet)
            {
                Log(_logger, "TimedQueueCache for QueueId:{0}, TryGetNextMessage: cursor was not set on a value, initializing with the cursor sequence token", Id.ToString());
                InitializeCursor(cursor, cursor.SequenceToken);
                return cursor.IsSet && TryGetNextMessage(cursor, out batch);
            }

            // has this message been purged
            if (cursor.SequenceToken.Older(LastItem.SequenceToken))
            {
                Log(_logger, "TimedQueueCache for QueueId:{0}, This is a faulted state, by this point the cursor should point to an item in the cache. The cursor is {1}", Id.ToString(), cursor.ToString());
                SetCursor(cursor, _cachedMessages.Last);
            }

            // Cursor now points to a valid message in the cache. Get it!
            // Capture the current element and advance to the next one.
            batch = cursor.NextElement.Value.Batch;
            Log(_logger, "TimedQueueCache for QueueId:{0}, TryGetNextMessage: retrieved an item from cache.", Id.ToString());

            // Advance to next:
            if (cursor.NextElement == _cachedMessages.First)
            {
                Log(_logger, "TimedQueueCache for QueueId:{0}, TryGetNextMessage: reached end of cache, resetting the cursor to a future token.", Id.ToString());

                // If we are at the end of the cache unset cursor and move offset one forward
                ResetCursor(cursor, ((EventSequenceToken)cursor.SequenceToken).NextSequenceNumber());
            }
            else // move to next
            {
                UpdateCursor(cursor, cursor.NextElement.Previous);
            }
            return true;
        }
 private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
 {
     if (batch.RequestContext != null)
     {
         RequestContext.Import(batch.RequestContext);
     }
     try
     {
         StreamSequenceToken newToken = await consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable());
         if (newToken != null)
         {
             consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
                 consumerData.StreamId.Namespace, newToken);
         }
     }
     finally
     {
         if (batch.RequestContext != null)
         {
             RequestContext.Clear();
         }
     }
 }
 private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
 {
     if (batch.RequestContext != null)
     {
         RequestContext.Import(batch.RequestContext);
     }
     try
     {
         StreamSequenceToken prevToken = consumerData.LastToken;
         StreamSequenceToken newToken = await consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);
         if (newToken != null)
         {
             consumerData.LastToken = newToken;
             consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
                 consumerData.StreamId.Namespace, newToken);
         }
         else
         {
             consumerData.LastToken = batch.SequenceToken; // this is the currently delivered token
         }
     }
     finally
     {
         if (batch.RequestContext != null)
         {
             RequestContext.Clear();
         }
     }
 }
 private async Task DeliverErrorToConsumer(StreamConsumerData consumerData, Exception exc, IBatchContainer batch)
 {
     if (batch !=null && batch.RequestContext != null)
     {
         RequestContext.Import(batch.RequestContext);
     }
     try
     {
         await consumerData.StreamConsumer.ErrorInStream(consumerData.SubscriptionId, exc);
     }
     finally
     {
         if (batch != null && batch.RequestContext != null)
         {
             RequestContext.Clear();
         }
     }
 }
Esempio n. 40
0
        private void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null) throw new ArgumentNullException("batch");

            CacheBucket cacheBucket = null;
            if (cacheCursorHistogram.Count == 0)
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }
            else
            {
                cacheBucket = cacheCursorHistogram[cacheCursorHistogram.Count - 1]; // last one
            }

            if (cacheBucket.NumCurrentItems == CACHE_HISTOGRAM_MAX_BUCKET_SIZE) // last bucket is full, open a new one
            {
                cacheBucket = new CacheBucket();
                cacheCursorHistogram.Add(cacheBucket);
            }

            // Add message to linked list
            var item = new SimpleQueueCacheItem
            {
                Batch = batch,
                SequenceToken = sequenceToken,
                CacheBucket = cacheBucket
            };

            cachedMessages.AddFirst(new LinkedListNode<SimpleQueueCacheItem>(item));
            cacheBucket.UpdateNumItems(1);

            if (Size > maxCacheSize)
            {
                //var last = cachedMessages.Last;
                cachedMessages.RemoveLast();
                var bucket = cacheCursorHistogram[0]; // same as:  var bucket = last.Value.CacheBucket;
                bucket.UpdateNumItems(-1);
                if (bucket.NumCurrentItems == 0)
                {
                    cacheCursorHistogram.RemoveAt(0);
                }
            }
        }
Esempio n. 41
0
        /// <summary>
        /// Aquires the next message in the cache at the provided cursor
        /// </summary>
        /// <param name="cursor"></param>
        /// <param name="batch"></param>
        /// <returns></returns>
        internal bool TryGetNextMessage(SimpleQueueCacheCursor cursor, out IBatchContainer batch)
        {
            Log(logger, "TryGetNextMessage: {0}", cursor);

            batch = null;

            if (cursor == null) throw new ArgumentNullException("cursor");
            
            //if not set, try to set and then get next
            if (!cursor.IsSet)
            {
                InitializeCursor(cursor, cursor.SequenceToken, false);
                return cursor.IsSet && TryGetNextMessage(cursor, out batch);
            }

            // has this message been purged
            if (cursor.SequenceToken.Older(cachedMessages.Last.Value.SequenceToken))
            {
                throw new QueueCacheMissException(cursor.SequenceToken, cachedMessages.Last.Value.SequenceToken, cachedMessages.First.Value.SequenceToken);
            }

            // Cursor now points to a valid message in the cache. Get it!
            // Capture the current element and advance to the next one.
            batch = cursor.Element.Value.Batch;
            
            // Advance to next:
            if (cursor.Element == cachedMessages.First)
            {
                // If we are at the end of the cache unset cursor and move offset one forward
                ResetCursor(cursor, ((EventSequenceToken)cursor.SequenceToken).NextSequenceNumber());
            }
            else // move to next
            {
                UpdateCursor(cursor, cursor.Element.Previous);
            }
            return true;
        }
Esempio n. 42
0
        public static void Marker(IBatchContainer container, int layer, string name)
        {
            if (!EnableTracing)
                return;

            var batch = new MarkerBatch(layer, name);
            container.Add(batch);
        }
        private async Task<bool> ErrorProtocol(StreamConsumerData consumerData, Exception exceptionOccured, bool isDeliveryError, IBatchContainer batch, StreamSequenceToken token)
        {
            // for loss of client, we just remove the subscription
            if (exceptionOccured is ClientNotAvailableException)
            {
                logger.Warn(ErrorCode.Stream_ConsumerIsDead,
                    "Consumer {0} on stream {1} is no longer active - permanently removing Consumer.", consumerData.StreamConsumer, consumerData.StreamId);
                pubSub.UnregisterConsumer(consumerData.SubscriptionId, consumerData.StreamId, consumerData.StreamId.ProviderName).Ignore();
                return true;
            }

            // notify consumer about the error or that the data is not available.
            await OrleansTaskExtentions.ExecuteAndIgnoreException(
                () => DeliverErrorToConsumer(
                    consumerData, exceptionOccured, batch));
            // record that there was a delivery failure
            if (isDeliveryError)
            {
                await OrleansTaskExtentions.ExecuteAndIgnoreException(
                    () => streamFailureHandler.OnDeliveryFailure(
                        consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
            }
            else
            {
                await OrleansTaskExtentions.ExecuteAndIgnoreException(
                       () => streamFailureHandler.OnSubscriptionFailure(
                           consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
            }
            // if configured to fault on delivery failure and this is not an implicit subscription, fault and remove the subscription
            if (streamFailureHandler.ShouldFaultSubsriptionOnError && !SubscriptionMarker.IsImplicitSubscription(consumerData.SubscriptionId.Guid))
            {
                try
                {
                    // notify consumer of faulted subscription, if we can.
                    await OrleansTaskExtentions.ExecuteAndIgnoreException(
                        () => DeliverErrorToConsumer(
                            consumerData, new FaultedSubscriptionException(consumerData.SubscriptionId, consumerData.StreamId), batch));
                    // mark subscription as faulted.
                    await pubSub.FaultSubscription(consumerData.StreamId, consumerData.SubscriptionId);
                }
                finally
                {
                    // remove subscription
                    RemoveSubscriber_Impl(consumerData.SubscriptionId, consumerData.StreamId);
                }
                return true;
            }
            return false;
        }
Esempio n. 44
0
        private async Task DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
        {
            StreamHandshakeToken prevToken = consumerData.LastToken;
            Task<StreamHandshakeToken> batchDeliveryTask;

            bool isRequestContextSet = batch.ImportRequestContext();
            try
            {
                batchDeliveryTask = consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);
            }
            finally
            {
                if (isRequestContextSet)
                {
                    // clear RequestContext before await!
                    RequestContext.Clear();
                }
            }
            StreamHandshakeToken newToken = await batchDeliveryTask;
            if (newToken != null)
            {
                consumerData.LastToken = newToken;
                consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId.Guid,
                    consumerData.StreamId.Namespace, newToken.Token);
            }
            else
            {
                consumerData.LastToken = StreamHandshakeToken.CreateDeliveyToken(batch.SequenceToken); // this is the currently delivered token
            }

        }
 private async Task<bool> ErrorProtocol(StreamConsumerData consumerData, Exception exceptionOccured, bool isDeliveryError, IBatchContainer batch, StreamSequenceToken token)
 {
     // notify consumer about the error or that the data is not available.
     await OrleansTaskExtentions.ExecuteAndIgnoreException(
         () => DeliverErrorToConsumer(
             consumerData, exceptionOccured, batch));
     // record that there was a delivery failure
     if (isDeliveryError)
     {
         await OrleansTaskExtentions.ExecuteAndIgnoreException(
             () => streamFailureHandler.OnDeliveryFailure(
                 consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
     }
     else
     {
         await OrleansTaskExtentions.ExecuteAndIgnoreException(
                () => streamFailureHandler.OnSubscriptionFailure(
                    consumerData.SubscriptionId, streamProviderName, consumerData.StreamId, token));
     }
     // if configured to fault on delivery failure and this is not an implicit subscription, fault and remove the subscription
     if (streamFailureHandler.ShouldFaultSubsriptionOnError && !SubscriptionMarker.IsImplicitSubscription(consumerData.SubscriptionId.Guid))
     {
         try
         {
             // notify consumer of faulted subscription, if we can.
             await OrleansTaskExtentions.ExecuteAndIgnoreException(
                 () => DeliverErrorToConsumer(
                     consumerData, new FaultedSubscriptionException(consumerData.SubscriptionId, consumerData.StreamId), batch));
             // mark subscription as faulted.
             await pubSub.FaultSubscription(consumerData.StreamId, consumerData.SubscriptionId);
         }
         finally
         {
             // remove subscription
             RemoveSubscriber_Impl(consumerData.SubscriptionId, consumerData.StreamId);
         }
         return true;
     }
     return false;
 }
 private static async Task DeliverErrorToConsumer(StreamConsumerData consumerData, Exception exc, IBatchContainer batch)
 {
     Task errorDeliveryTask;
     bool isRequestContextSet = batch != null && batch.ImportRequestContext();
     try
     {
         errorDeliveryTask = consumerData.StreamConsumer.ErrorInStream(consumerData.SubscriptionId, exc);
     }
     finally
     {
         if (isRequestContextSet)
         {
             RequestContext.Clear(); // clear RequestContext before await!
         }
     }
     await errorDeliveryTask;
 }
        private async Task<StreamHandshakeToken> DeliverBatchToConsumer(StreamConsumerData consumerData, IBatchContainer batch)
        {
            StreamHandshakeToken prevToken = consumerData.LastToken;
            Task<StreamHandshakeToken> batchDeliveryTask;

            bool isRequestContextSet = batch.ImportRequestContext();
            try
            {
                batchDeliveryTask = consumerData.StreamConsumer.DeliverBatch(consumerData.SubscriptionId, batch.AsImmutable(), prevToken);
            }
            finally
            {
                if (isRequestContextSet)
                {
                    // clear RequestContext before await!
                    RequestContext.Clear();
                }
            }
            StreamHandshakeToken newToken = await batchDeliveryTask;
            consumerData.LastToken = StreamHandshakeToken.CreateDeliveyToken(batch.SequenceToken); // this is the currently delivered token
            return newToken;
        }
Esempio n. 48
0
        public static void Marker(IBatchContainer container, int layer, string format, params object[] values)
        {
            if (!EnableTracing)
                return;

            Marker(container, layer, String.Format(format, values));
        }
Esempio n. 49
0
 private bool IsInStream(IBatchContainer batchContainer)
 {
     return batchContainer != null &&
             batchContainer.StreamGuid.Equals(streamIdentity.Guid) &&
             string.Equals(batchContainer.StreamNamespace, streamIdentity.Namespace);
 }
        private void Add(IBatchContainer batch, StreamSequenceToken sequenceToken)
        {
            if (batch == null) throw new ArgumentNullException(nameof(batch));

            var cacheBucket = GetOrCreateBucket();

            cacheBucket.UpdateNumItems(1);
            // Add message to linked list
            var item = new TimedQueueCacheItem
            {
                Batch = batch,
                SequenceToken = sequenceToken,
                CacheBucket = cacheBucket,
            };

            item.Timestamp = GetTimestampForItem(batch);

            var newNode = new LinkedListNode<TimedQueueCacheItem>(item);

            // If it's the first item, then we also update 
            if (cacheBucket.NumCurrentItems == 1)
            {
                Log(_logger, "TimedQueueCache for QueueId:{0}, Add: The oldest timespan in the cache is {1}", Id.ToString(), item.Timestamp);
                cacheBucket.OldestMemberTimestamp = item.Timestamp;
                cacheBucket.OldestMember = newNode;
            }

            // Setting the newest member
            cacheBucket.NewestMemberTimestamp = item.Timestamp;
            cacheBucket.NewestMember = newNode;

            _cachedMessages.AddFirst(newNode);

            _counterMessagesInCache.Increment(Id.ToString(), 1);
        }
 private DateTime GetTimestampForItem(IBatchContainer batch)
 {
     // Here we check if the batch is a kafka stream 
     var batchAsKafkaBatch = batch as KafkaBatchContainer;
     return batchAsKafkaBatch == null ? DateTime.UtcNow : DateTime.ParseExact(batchAsKafkaBatch.Timestamp, "O", CultureInfo.InvariantCulture).ToUniversalTime();
 }
 public void Draw (IBatchContainer container, int layer = 0) {
     foreach (var system in Systems)
         system.Draw(this, container, layer);
 }