private void SetCursor(Cursor cursor, EventSequenceToken sequenceToken) { if (messages.Count == 0 || // nothing in cache or messages.First.Value.SequenceToken.CompareTo(sequenceToken) < 0) // sequenceId is too new to be in cache { cursor.IsUnset = true; cursor.SequenceToken = sequenceToken; return; } LinkedListNode <CacheItem> lastMessage = messages.Last; // if offset of -1, iterate from last message in cache if (sequenceToken.IsInvalid()) { cursor.IsUnset = false; cursor.Current = lastMessage; cursor.SequenceToken = lastMessage.Value.SequenceToken; return; } // Check to see if offset is too old to be in cache if (lastMessage.Value.SequenceToken.CompareTo(sequenceToken) > 0) { // throw cache miss exception throw new QueueAdapterCacheMissException(sequenceToken, lastMessage.Value.SequenceToken, messages.First.Value.SequenceToken); } // Find first message at or below offset // Events are ordered from newest to oldest, so iterate from start of list until we hit a node at a previous offset, or the end. LinkedListNode <CacheItem> node = messages.First; while (node != null && node.Value.SequenceToken.CompareTo(sequenceToken) > 0) { // did we get to the end? if (node == lastMessage) { break; } // if sequenceId is between the two, take the higher if (node.Next.Value.SequenceToken.CompareTo(sequenceToken) < 0) { break; } node = node.Next; } // return cursor from start. cursor.IsUnset = false; cursor.Current = node; cursor.SequenceToken = node.Value.SequenceToken; }
/// <summary> /// Aquires the next message in the cache at the provided cursor /// </summary> /// <param name="cursorObj"></param> /// <param name="batch"></param> /// <param name="backPressure">Indicates how much backpressure this cursor should exert (0-100)</param> /// <returns></returns> public bool TryGetNextMessage(object cursorObj, out IBatchContainer batch, out double backPressure) { batch = null; backPressure = 0; if (cursorObj == null) { throw new ArgumentNullException("cursorObj"); } var cursor = cursorObj as Cursor; if (cursor == null) { throw new ArgumentOutOfRangeException("cursorObj", "Cursor is bad"); } //if unset, try to set and then get next if (cursor.IsUnset) { SetCursor(cursor, cursor.SequenceToken); return(!cursor.IsUnset && TryGetNextMessage(cursor, out batch, out backPressure)); } // has this message been purged if (cursor.SequenceToken.CompareTo(messages.Last.Value.SequenceToken) < 0) { throw new QueueAdapterCacheMissException(cursor.SequenceToken, messages.Last.Value.SequenceToken, messages.First.Value.SequenceToken); } // get message batch = cursor.Message; backPressure = Math.Min(1.0, Math.Max(((double)EventSequenceToken.Distance(messages.First.Value.SequenceToken, cursor.SequenceToken) / cachedMessageCount), 0.0)); // are we up to date? if so unset cursor, and move offset one forward if (cursor.Current == messages.First) { cursor.IsUnset = true; cursor.SequenceToken = cursor.SequenceToken.NextSequenceNumber(); } else // move to next { cursor.Current = cursor.Current.Previous; cursor.SequenceToken = cursor.Current.Value.SequenceToken; } return(true); }
private StreamSequenceToken FloorSequenceToken(StreamSequenceToken token) { if (!(token is EventSequenceToken)) return token; EventSequenceToken tokenAsEventSequenceToken = (EventSequenceToken)token; if (tokenAsEventSequenceToken.EventIndex == 0) return token; EventSequenceToken flooredToken = new EventSequenceToken(tokenAsEventSequenceToken.SequenceNumber); return flooredToken; }
private async Task SendAndReceiveFromQueueAdapter(IQueueAdapterFactory adapterFactory, IProviderConfiguration config) { IQueueAdapter adapter = await adapterFactory.CreateAdapter(); IQueueAdapterCache cache = adapterFactory.GetQueueAdapterCache(); // Create receiver per queue IStreamQueueMapper mapper = adapterFactory.GetStreamQueueMapper(); Dictionary<QueueId, IQueueAdapterReceiver> receivers = mapper.GetAllQueues().ToDictionary(queueId => queueId, adapter.CreateReceiver); Dictionary<QueueId, IQueueCache> caches = mapper.GetAllQueues().ToDictionary(queueId => queueId, cache.CreateQueueCache); await Task.WhenAll(receivers.Values.Select(receiver => receiver.Initialize(TimeSpan.FromSeconds(5)))); // test using 2 streams Guid streamId1 = Guid.NewGuid(); Guid streamId2 = Guid.NewGuid(); int receivedBatches = 0; var streamsPerQueue = new ConcurrentDictionary<QueueId, HashSet<IStreamIdentity>>(); // reader threads (at most 2 active queues because only two streams) var work = new List<Task>(); foreach( KeyValuePair<QueueId, IQueueAdapterReceiver> receiverKvp in receivers) { QueueId queueId = receiverKvp.Key; var receiver = receiverKvp.Value; var qCache = caches[queueId]; Task task = Task.Factory.StartNew(() => { while (receivedBatches < NumBatches) { var messages = receiver.GetQueueMessagesAsync(CloudQueueMessage.MaxNumberOfMessagesToPeek).Result.ToArray(); if (!messages.Any()) { continue; } foreach (AzureQueueBatchContainer message in messages.Cast<AzureQueueBatchContainer>()) { streamsPerQueue.AddOrUpdate(queueId, id => new HashSet<IStreamIdentity> { new StreamIdentity(message.StreamGuid, message.StreamGuid.ToString()) }, (id, set) => { set.Add(new StreamIdentity(message.StreamGuid, message.StreamGuid.ToString())); return set; }); output.WriteLine("Queue {0} received message on stream {1}", queueId, message.StreamGuid); Assert.AreEqual(NumMessagesPerBatch / 2, message.GetEvents<int>().Count(), "Half the events were ints"); Assert.AreEqual(NumMessagesPerBatch / 2, message.GetEvents<string>().Count(), "Half the events were strings"); } Interlocked.Add(ref receivedBatches, messages.Length); qCache.AddToCache(messages); } }); work.Add(task); } // send events List<object> events = CreateEvents(NumMessagesPerBatch); work.Add(Task.Factory.StartNew(() => Enumerable.Range(0, NumBatches) .Select(i => i % 2 == 0 ? streamId1 : streamId2) .ToList() .ForEach(streamId => adapter.QueueMessageBatchAsync(streamId, streamId.ToString(), events.Take(NumMessagesPerBatch).ToArray(), null, RequestContext.Export()).Wait()))); await Task.WhenAll(work); // Make sure we got back everything we sent Assert.AreEqual(NumBatches, receivedBatches); // check to see if all the events are in the cache and we can enumerate through them StreamSequenceToken firstInCache = new EventSequenceToken(0); foreach (KeyValuePair<QueueId, HashSet<IStreamIdentity>> kvp in streamsPerQueue) { var receiver = receivers[kvp.Key]; var qCache = caches[kvp.Key]; foreach (IStreamIdentity streamGuid in kvp.Value) { // read all messages in cache for stream IQueueCacheCursor cursor = qCache.GetCacheCursor(streamGuid, firstInCache); int messageCount = 0; StreamSequenceToken tenthInCache = null; StreamSequenceToken lastToken = firstInCache; while (cursor.MoveNext()) { Exception ex; messageCount++; IBatchContainer batch = cursor.GetCurrent(out ex); output.WriteLine("Token: {0}", batch.SequenceToken); Assert.IsTrue(batch.SequenceToken.CompareTo(lastToken) >= 0, "order check for event {0}", messageCount); lastToken = batch.SequenceToken; if (messageCount == 10) { tenthInCache = batch.SequenceToken; } } output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid); Assert.AreEqual(NumBatches / 2, messageCount); Assert.IsNotNull(tenthInCache); // read all messages from the 10th cursor = qCache.GetCacheCursor(streamGuid, tenthInCache); messageCount = 0; while (cursor.MoveNext()) { messageCount++; } output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid); const int expected = NumBatches / 2 - 10 + 1; // all except the first 10, including the 10th (10 + 1) Assert.AreEqual(expected, messageCount); } } }
internal static long Distance(EventSequenceToken first, EventSequenceToken second) { return first.sequenceNumber - second.sequenceNumber; }
public void EventSequenceToken_VerifyStillUsingFallbackSerializer() { var token = new EventSequenceToken(long.MaxValue, int.MaxValue); VerifyUsingFallbackSerializer(token); }
internal static long Distance(EventSequenceToken first, EventSequenceToken second) { return(first.SequenceNumber - second.SequenceNumber); }
private Mock<IBatchContainer> GenerateBatchContainerMock(Guid streamGuid, string streamNamespace, int sequenceNumber) { Mock<IBatchContainer> batchMock = new Mock<IBatchContainer>(); EventSequenceToken seq = new EventSequenceToken(sequenceNumber); batchMock.SetupGet(x => x.SequenceToken).Returns(seq); batchMock.SetupGet(x => x.StreamGuid).Returns(streamGuid); batchMock.SetupGet(x => x.StreamNamespace).Returns(streamNamespace); return batchMock; }
public bool NotifiedDeletion(EventSequenceToken token) { TimesCalled++; return true; }
public SimpleQueueAdapterCacheCursor(IQueueAdapterCache cache, Guid streamGuid, string streamNamespace, EventSequenceToken sequenceToken) { if (cache == null) { throw new ArgumentNullException("cache"); } this.cache = cache; this.streamGuid = streamGuid; this.streamNamespace = streamNamespace; cursor = this.cache.GetCursor(sequenceToken); }
public void EventSequenceToken_VerifyStillUsingFallbackSerializer() { var token = new EventSequenceToken(long.MaxValue, int.MaxValue); Tester.SerializationTests.SerializationTestsUtils.VerifyUsingFallbackSerializer(token); }
public QueueAdapterCacheMissException(EventSequenceToken requested, EventSequenceToken low, EventSequenceToken high) : this(requested.ToString(), low.ToString(), high.ToString()) { }