// Called by rendezvous when new remote subscriber subscribes to this stream. private async Task AddSubscriber_Impl( GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, StreamSequenceToken token, IStreamFilterPredicateWrapper filter) { IQueueCacheCursor cursor = null; // if not cache, then we can't get cursor and there is no reason to ask consumer for token. if (queueCache != null) { try { StreamSequenceToken consumerToken = await streamConsumer.GetSequenceToken(subscriptionId); // Set cursor if not cursor is set, or if subscription provides new token consumerToken = consumerToken ?? token; if (token != null) { cursor = queueCache.GetCacheCursor(streamId.Guid, streamId.Namespace, consumerToken); } } catch (DataNotAvailableException dataNotAvailableException) { // notify consumer that the data is not available, if we can. streamConsumer.ErrorInStream(subscriptionId, dataNotAvailableException).Ignore(); } } AddSubscriberToSubscriptionCache(subscriptionId, streamId, streamConsumer, cursor, filter); }
private IBatchContainer GetBatchForConsumer(IQueueCacheCursor cursor, IStreamFilterPredicateWrapper filterWrapper, StreamId streamId) { if (this.options.BatchContainerBatchSize <= 1) { Exception ignore; if (!cursor.MoveNext()) { return(null); } return(cursor.GetCurrent(out ignore)); } else if (this.options.BatchContainerBatchSize > 1) { Exception ignore; int i = 0; var batchContainers = new List <IBatchContainer>(); while (i < this.options.BatchContainerBatchSize) { if (!cursor.MoveNext()) { break; } var batchContainer = cursor.GetCurrent(out ignore); if (!batchContainer.ShouldDeliver( streamId, filterWrapper.FilterData, filterWrapper.ShouldReceive)) { continue; } batchContainers.Add(batchContainer); i++; } if (i == 0) { return(null); } return(new BatchContainerBatch(batchContainers)); } return(null); }
internal void SafeDisposeCursor(Logger logger) { try { if (Cursor != null) { // kill cursor activity and ensure it does not start again on this consumer data. Utils.SafeExecute(Cursor.Dispose, logger, () => String.Format("Cursor.Dispose on stream {0}, StreamConsumer {1} has thrown exception.", StreamId, StreamConsumer)); } } finally { Cursor = null; } }
internal void SafeDisposeCursor(ILogger logger) { try { if (Cursor != null) { // kill cursor activity and ensure it does not start again on this consumer data. Utils.SafeExecute(Cursor.Dispose, logger, () => String.Format("Cursor.Dispose on stream {0}, StreamConsumer {1} has thrown exception.", StreamId, StreamConsumer)); } } finally { Cursor = null; } }
private IBatchContainer GetBatchForConsumer(IQueueCacheCursor cursor, StreamId streamId, string filterData) { if (this.options.BatchContainerBatchSize <= 1) { if (!cursor.MoveNext()) { return(null); } return(cursor.GetCurrent(out _)); } else if (this.options.BatchContainerBatchSize > 1) { int i = 0; var batchContainers = new List <IBatchContainer>(); while (i < this.options.BatchContainerBatchSize) { if (!cursor.MoveNext()) { break; } var batchContainer = cursor.GetCurrent(out _); if (!ShouldDeliverBatch(streamId, batchContainer, filterData)) { continue; } batchContainers.Add(batchContainer); i++; } if (i == 0) { return(null); } return(new BatchContainerBatch(batchContainers)); } return(null); }
private IBatchContainer GetBatchForConsumer(IQueueCacheCursor cursor, StreamId streamId) { if (this.options.BatchContainerBatchSize <= 1) { Exception ignore; if (!cursor.MoveNext()) { return(null); } return(cursor.GetCurrent(out ignore)); } else if (this.options.BatchContainerBatchSize > 1) { Exception ignore; int i = 0; var batchContainers = new List <IBatchContainer>(); while (i < this.options.BatchContainerBatchSize) { if (!cursor.MoveNext()) { break; } var batchContainer = cursor.GetCurrent(out ignore); batchContainers.Add(batchContainer); i++; } if (i == 0) { return(null); } return(new BatchContainerBatch(batchContainers)); } return(null); }
// Called by rendezvous when new remote subscriber subscribes to this stream or when registering a new stream with the pubsub system. private void AddSubscriberToSubscriptionCache( GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, IQueueCacheCursor newCursor, StreamSequenceToken requestedToken, IStreamFilterPredicateWrapper filter) { StreamConsumerCollection streamDataCollection; if (!pubSubCache.TryGetValue(streamId, out streamDataCollection)) { streamDataCollection = new StreamConsumerCollection(DateTime.UtcNow); pubSubCache.Add(streamId, streamDataCollection); } StreamConsumerData data; if (!streamDataCollection.TryGetConsumer(subscriptionId, out data)) { data = streamDataCollection.AddConsumer(subscriptionId, streamId, streamConsumer, filter); } data.LastToken = requestedToken; // if we have a new cursor, use it if (newCursor != null) { data.Cursor = newCursor; } // else if we don't yet have a cursor, get a cursor at the end of the cash (null sequence token). else if (data.Cursor == null && queueCache != null) { data.Cursor = queueCache.GetCacheCursor(streamId.Guid, streamId.Namespace, null); } if (data.State == StreamConsumerDataState.Inactive) { RunConsumerCursor(data, filter).Ignore(); // Start delivering events if not actively doing so } }
// Called by rendezvous when new remote subscriber subscribes to this stream. private async Task AddSubscriber_Impl( GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, StreamSequenceToken token, IStreamFilterPredicateWrapper filter) { IQueueCacheCursor cursor = null; StreamSequenceToken requestedToken = null; // if not cache, then we can't get cursor and there is no reason to ask consumer for token. if (queueCache != null) { DataNotAvailableException errorOccured = null; try { requestedToken = await streamConsumer.GetSequenceToken(subscriptionId); // Set cursor if not cursor is set, or if subscription provides new token requestedToken = requestedToken ?? token; if (requestedToken != null) { cursor = queueCache.GetCacheCursor(streamId.Guid, streamId.Namespace, requestedToken); } } catch (DataNotAvailableException dataNotAvailableException) { errorOccured = dataNotAvailableException; } if (errorOccured != null) { // notify consumer that the data is not available, if we can. await OrleansTaskExtentions.ExecuteAndIgnoreException(() => streamConsumer.ErrorInStream(subscriptionId, errorOccured)); } } AddSubscriberToSubscriptionCache(subscriptionId, streamId, streamConsumer, cursor, requestedToken, filter); }
private async Task SendAndReceiveFromQueueAdapter(IQueueAdapterFactory adapterFactory, IProviderConfiguration config) { IQueueAdapter adapter = await adapterFactory.CreateAdapter(); IQueueAdapterCache cache = adapterFactory.GetQueueAdapterCache(); // Create receiver per queue IStreamQueueMapper mapper = adapterFactory.GetStreamQueueMapper(); Dictionary <QueueId, IQueueAdapterReceiver> receivers = mapper.GetAllQueues().ToDictionary(queueId => queueId, adapter.CreateReceiver); Dictionary <QueueId, IQueueCache> caches = mapper.GetAllQueues().ToDictionary(queueId => queueId, cache.CreateQueueCache); await Task.WhenAll(receivers.Values.Select(receiver => receiver.Initialize(TimeSpan.FromSeconds(5)))); // test using 2 streams Guid streamId1 = Guid.NewGuid(); Guid streamId2 = Guid.NewGuid(); int receivedBatches = 0; var streamsPerQueue = new ConcurrentDictionary <QueueId, HashSet <IStreamIdentity> >(); // reader threads (at most 2 active queues because only two streams) var work = new List <Task>(); foreach (KeyValuePair <QueueId, IQueueAdapterReceiver> receiverKvp in receivers) { QueueId queueId = receiverKvp.Key; var receiver = receiverKvp.Value; var qCache = caches[queueId]; Task task = Task.Factory.StartNew(() => { while (receivedBatches < NumBatches) { var messages = receiver.GetQueueMessagesAsync(CloudQueueMessage.MaxNumberOfMessagesToPeek).Result.ToArray(); if (!messages.Any()) { continue; } foreach (AzureQueueBatchContainer message in messages.Cast <AzureQueueBatchContainer>()) { streamsPerQueue.AddOrUpdate(queueId, id => new HashSet <IStreamIdentity> { new StreamIdentity(message.StreamGuid, message.StreamGuid.ToString()) }, (id, set) => { set.Add(new StreamIdentity(message.StreamGuid, message.StreamGuid.ToString())); return(set); }); output.WriteLine("Queue {0} received message on stream {1}", queueId, message.StreamGuid); Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents <int>().Count()); // "Half the events were ints" Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents <string>().Count()); // "Half the events were strings" } Interlocked.Add(ref receivedBatches, messages.Length); qCache.AddToCache(messages); } }); work.Add(task); } // send events List <object> events = CreateEvents(NumMessagesPerBatch); work.Add(Task.Factory.StartNew(() => Enumerable.Range(0, NumBatches) .Select(i => i % 2 == 0 ? streamId1 : streamId2) .ToList() .ForEach(streamId => adapter.QueueMessageBatchAsync(streamId, streamId.ToString(), events.Take(NumMessagesPerBatch).ToArray(), null, RequestContext.Export()).Wait()))); await Task.WhenAll(work); // Make sure we got back everything we sent Assert.Equal(NumBatches, receivedBatches); // check to see if all the events are in the cache and we can enumerate through them StreamSequenceToken firstInCache = new EventSequenceToken(0); foreach (KeyValuePair <QueueId, HashSet <IStreamIdentity> > kvp in streamsPerQueue) { var receiver = receivers[kvp.Key]; var qCache = caches[kvp.Key]; foreach (IStreamIdentity streamGuid in kvp.Value) { // read all messages in cache for stream IQueueCacheCursor cursor = qCache.GetCacheCursor(streamGuid, firstInCache); int messageCount = 0; StreamSequenceToken tenthInCache = null; StreamSequenceToken lastToken = firstInCache; while (cursor.MoveNext()) { Exception ex; messageCount++; IBatchContainer batch = cursor.GetCurrent(out ex); output.WriteLine("Token: {0}", batch.SequenceToken); Assert.True(batch.SequenceToken.CompareTo(lastToken) >= 0, $"order check for event {messageCount}"); lastToken = batch.SequenceToken; if (messageCount == 10) { tenthInCache = batch.SequenceToken; } } output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid); Assert.Equal(NumBatches / 2, messageCount); Assert.NotNull(tenthInCache); // read all messages from the 10th cursor = qCache.GetCacheCursor(streamGuid, tenthInCache); messageCount = 0; while (cursor.MoveNext()) { messageCount++; } output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid); const int expected = NumBatches / 2 - 10 + 1; // all except the first 10, including the 10th (10 + 1) Assert.Equal(expected, messageCount); } } }
private async Task RunConsumerCursor(StreamConsumerData consumerData, IStreamFilterPredicateWrapper filterWrapper) { try { // double check in case of interleaving if (consumerData.State == StreamConsumerDataState.Active || consumerData.Cursor == null) { return; } consumerData.State = StreamConsumerDataState.Active; while (consumerData.Cursor != null) { IBatchContainer batch = null; Exception exceptionOccured = null; try { Exception ignore; if (!consumerData.Cursor.MoveNext()) { break; } batch = consumerData.Cursor.GetCurrent(out ignore); } catch (Exception exc) { exceptionOccured = exc; consumerData.SafeDisposeCursor(logger); consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId, null); } // Apply filtering to this batch, if applicable if (filterWrapper != null && batch != null) { try { // Apply batch filter to this input batch, to see whether we should deliver it to this consumer. if (!batch.ShouldDeliver( consumerData.StreamId, filterWrapper.FilterData, filterWrapper.ShouldReceive)) { continue; // Skip this batch -- nothing to do } } catch (Exception exc) { var message = $"Ignoring exception while trying to evaluate subscription filter function {filterWrapper} on stream {consumerData.StreamId} in PersistentStreamPullingAgentGrain.RunConsumerCursor"; logger.Warn((int)ErrorCode.PersistentStreamPullingAgent_13, message, exc); } } try { numSentMessagesCounter.Increment(); if (batch != null) { StreamHandshakeToken newToken = await AsyncExecutorWithRetries.ExecuteWithRetries( i => DeliverBatchToConsumer(consumerData, batch), AsyncExecutorWithRetries.INFINITE_RETRIES, (exception, i) => !(exception is ClientNotAvailableException), config.MaxEventDeliveryTime, DeliveryBackoffProvider); if (newToken != null) { consumerData.LastToken = newToken; IQueueCacheCursor newCursor = queueCache.GetCacheCursor(consumerData.StreamId, newToken.Token); consumerData.SafeDisposeCursor(logger); consumerData.Cursor = newCursor; } } } catch (Exception exc) { consumerData.Cursor?.RecordDeliveryFailure(); var message = $"Exception while trying to deliver msgs to stream {consumerData.StreamId} in PersistentStreamPullingAgentGrain.RunConsumerCursor"; logger.Error(ErrorCode.PersistentStreamPullingAgent_14, message, exc); exceptionOccured = exc is ClientNotAvailableException ? exc : new StreamEventDeliveryFailureException(consumerData.StreamId); } // if we failed to deliver a batch if (exceptionOccured != null) { bool faultedSubscription = await ErrorProtocol(consumerData, exceptionOccured, true, batch, batch?.SequenceToken); if (faultedSubscription) { return; } } } consumerData.State = StreamConsumerDataState.Inactive; } catch (Exception exc) { // RunConsumerCursor is fired with .Ignore so we should log if anything goes wrong, because there is no one to catch the exception logger.Error(ErrorCode.PersistentStreamPullingAgent_15, "Ignored RunConsumerCursor Error", exc); consumerData.State = StreamConsumerDataState.Inactive; throw; } }
private async Task RunConsumerCursor(StreamConsumerData consumerData) { try { // double check in case of interleaving if (consumerData.State == StreamConsumerDataState.Active || consumerData.Cursor == null) { return; } consumerData.State = StreamConsumerDataState.Active; while (consumerData.Cursor != null) { IBatchContainer batch = null; Exception exceptionOccured = null; try { batch = GetBatchForConsumer(consumerData.Cursor, consumerData.StreamId, consumerData.FilterData); if (batch == null) { break; } } catch (Exception exc) { exceptionOccured = exc; consumerData.SafeDisposeCursor(logger); consumerData.Cursor = queueCache.GetCacheCursor(consumerData.StreamId, null); } if (batch != null) { if (!ShouldDeliverBatch(consumerData.StreamId, batch, consumerData.FilterData)) { continue; } } try { numSentMessagesCounter.Increment(); if (batch != null) { StreamHandshakeToken newToken = await AsyncExecutorWithRetries.ExecuteWithRetries( i => DeliverBatchToConsumer(consumerData, batch), AsyncExecutorWithRetries.INFINITE_RETRIES, (exception, i) => !(exception is ClientNotAvailableException) || IsShutdown, this.options.MaxEventDeliveryTime, DeliveryBackoffProvider); if (newToken != null) { consumerData.LastToken = newToken; IQueueCacheCursor newCursor = queueCache.GetCacheCursor(consumerData.StreamId, newToken.Token); consumerData.SafeDisposeCursor(logger); consumerData.Cursor = newCursor; } } } catch (Exception exc) { consumerData.Cursor?.RecordDeliveryFailure(); var message = $"Exception while trying to deliver msgs to stream {consumerData.StreamId} in PersistentStreamPullingAgentGrain.RunConsumerCursor"; logger.Error(ErrorCode.PersistentStreamPullingAgent_14, message, exc); exceptionOccured = exc is ClientNotAvailableException ? exc : new StreamEventDeliveryFailureException(consumerData.StreamId); } // if we failed to deliver a batch if (exceptionOccured != null) { bool faultedSubscription = await ErrorProtocol(consumerData, exceptionOccured, true, batch, batch?.SequenceToken); if (faultedSubscription) { return; } } } consumerData.State = StreamConsumerDataState.Inactive; } catch (Exception exc) { // RunConsumerCursor is fired with .Ignore so we should log if anything goes wrong, because there is no one to catch the exception logger.Error(ErrorCode.PersistentStreamPullingAgent_15, "Ignored RunConsumerCursor Error", exc); consumerData.State = StreamConsumerDataState.Inactive; throw; } }
// Called by rendezvous when new remote subscriber subscribes to this stream or when registering a new stream with the pubsub system. private void AddSubscriberToSubscriptionCache( GuidId subscriptionId, StreamId streamId, IStreamConsumerExtension streamConsumer, IQueueCacheCursor newCursor, StreamSequenceToken requestedToken, IStreamFilterPredicateWrapper filter) { StreamConsumerCollection streamDataCollection; if (!pubSubCache.TryGetValue(streamId, out streamDataCollection)) { streamDataCollection = new StreamConsumerCollection(DateTime.UtcNow); pubSubCache.Add(streamId, streamDataCollection); } StreamConsumerData data; if (!streamDataCollection.TryGetConsumer(subscriptionId, out data)) data = streamDataCollection.AddConsumer(subscriptionId, streamId, streamConsumer, filter); data.LastToken = requestedToken; // if we have a new cursor, use it if (newCursor != null) { data.Cursor = newCursor; } // else if we don't yet have a cursor, get a cursor at the end of the cash (null sequence token). else if (data.Cursor == null && queueCache != null) { data.Cursor = queueCache.GetCacheCursor(streamId.Guid, streamId.Namespace, null); } if (data.State == StreamConsumerDataState.Inactive) RunConsumerCursor(data, filter).Ignore(); // Start delivering events if not actively doing so }