public InternalStreamMessageSender(IStreamProvider provider, Guid guid = default(Guid)) { guid = guid == default(Guid) ? Guid.NewGuid() : guid; _streamIdentity = new StreamIdentity(guid, StreamNamespacePrefix); _messageStream = provider.GetStream <IStreamMessage>(_streamIdentity.Guid, _streamIdentity.Namespace); _tearDownExecuted = false; }
/// <summary> /// Subscribe to a stream. /// </summary> /// <param name="streamIdentity">Stream to subscribe to.</param> /// <returns></returns> public async Task Subscribe(StreamIdentity streamIdentity) { _tearDownExecuted = false; var messageStream = _streamProvider.GetStream <IStreamMessage>(streamIdentity.Guid, streamIdentity.Namespace); _streamHandles.Add(await messageStream.SubscribeAsync(async(message, token) => await Visit(message), async() => await TearDown())); }
public static StreamConsumer[] Match(IActorSystem system, StreamIdentity stream) { var specifications = DictionaryExtensions.Find(configuration, stream.Provider) ?? Enumerable.Empty <StreamSubscriptionSpecification>(); return(Match(system, stream.Id, specifications)); }
public Fetcher(IDocumentStore store, DaemonSettings settings, AsyncOptions options, IDaemonLogger logger, IDaemonErrorHandler errorHandler, IEnumerable <Type> eventTypes) { _settings = settings; _options = options; _logger = logger; _errorHandler = errorHandler; State = FetcherState.Waiting; // TODO -- this will have to change _tenant = store.Tenancy.Default; _streamIdentity = store.Events.StreamIdentity; _selector = store.Events.StreamIdentity == StreamIdentity.AsGuid ? (IEventSelector) new EventSelector(store.Events, store.Advanced.Serializer) : new StringIdentifiedEventSelector(store.Events, store.Advanced.Serializer); _map = new NulloIdentityMap(store.Advanced.Serializer); EventTypeNames = eventTypes.Select(x => store.Events.EventMappingFor(x).Alias).ToArray(); _sql = $@" select seq_id from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id > :last and seq_id <= :limit and age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp) >= :buffer order by seq_id; {_selector.ToSelectClause(null)} where seq_id > :last and seq_id <= :limit and type = ANY(:types) and age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp) >= :buffer order by seq_id; select min(seq_id) from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id > :limit and type = ANY(:types) and age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp) >= :buffer; select max(seq_id) from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id >= :limit and age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp) >= :buffer ".Replace(" as d", ""); }
public static StreamSubscriptionMatch[] Match(IActorSystem system, StreamIdentity stream) { var specifications = configuration.Find(stream.Provider) ?? Enumerable.Empty <StreamSubscriptionSpecification>(); return(Match(system, stream.Id, specifications)); }
public StreamPosition GetStreamPosition(TestQueueMessage queueMessage) { IStreamIdentity streamIdentity = new StreamIdentity(queueMessage.StreamGuid, queueMessage.StreamNamespace); StreamSequenceToken sequenceToken = new EventSequenceToken(queueMessage.SequenceNumber); return(new StreamPosition(streamIdentity, sequenceToken)); }
public override StreamPosition GetStreamPosition(string partition, EventData queueMessage) { var steamIdentity = new StreamIdentity(Guid.NewGuid(), "EmptySpace"); var sequenceToken = new EventHubSequenceTokenV2(this.eventHubOffset, this.sequenceNumberCounter++, this.eventIndex); return(new StreamPosition(steamIdentity, sequenceToken)); }
public Fetcher(IDocumentStore store, DaemonSettings settings, AsyncOptions options, IDaemonLogger logger, IDaemonErrorHandler errorHandler, IEnumerable <Type> eventTypes) { _settings = settings; _options = options; _logger = logger; _errorHandler = errorHandler; State = FetcherState.Waiting; // TODO -- this will have to change _tenant = store.Tenancy.Default; _streamIdentity = store.Events.StreamIdentity; _selector = _tenant.EventStorage(); EventTypeNames = eventTypes.Select(x => store.Events.EventMappingFor(x).Alias).ToArray(); var fields = _selector.SelectFields().Join(", "); _sql = $@" select seq_id from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id > :last and seq_id <= :limit and extract(epoch from age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp)) >= :buffer order by seq_id; select {fields} from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id > :last and seq_id <= :limit and type = ANY(:types) and extract(epoch from age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp)) >= :buffer order by seq_id; select min(seq_id) from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id > :limit and type = ANY(:types) and extract(epoch from age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp)) >= :buffer; select max(seq_id) from {_selector.Events.DatabaseSchemaName}.mt_events where seq_id >= :limit and extract(epoch from age(transaction_timestamp(), {_selector.Events.DatabaseSchemaName}.mt_events.timestamp)) >= :buffer ".Replace(" as d", ""); }
public StreamPosition GetStreamPosition(TestQueueMessage queueMessage) { IStreamIdentity streamIdentity = new StreamIdentity(queueMessage.StreamGuid, null); StreamSequenceToken sequenceToken = queueMessage.SequenceToken; return(new StreamPosition(streamIdentity, sequenceToken)); }
public override StreamPosition GetStreamPosition(EventData queueMessage) { IStreamIdentity stremIdentity = new StreamIdentity(partitionStreamGuid, null); StreamSequenceToken token = new EventSequenceToken(queueMessage.SequenceNumber, 0); return(new StreamPosition(stremIdentity, token)); }
public override StreamPosition GetStreamPosition(EventData queueMessage) { IStreamIdentity stremIdentity = new StreamIdentity(partitionStreamGuid, null); StreamSequenceToken token = new EventHubSequenceTokenV2(queueMessage.SystemProperties.Offset, queueMessage.SystemProperties.SequenceNumber, 0); return(new StreamPosition(stremIdentity, token)); }
public SingleStreamProvider(IStreamProvider provider, Guid guid = default(Guid)) { guid = guid == default(Guid) ? Guid.NewGuid() : guid; _streamIdentity = new StreamIdentity <T>(StreamNamespacePrefix, guid); _messageStream = provider.GetStream <IStreamMessage>(_streamIdentity.StreamIdentifier.Item1, _streamIdentity.StreamIdentifier.Item2); _tearDownExecuted = false; }
public async Task SetInput(StreamIdentity <TIn> inputStream) { _tearDownExecuted = false; var messageStream = _streamProvider.GetStream <IStreamMessage>(inputStream.StreamIdentifier.Item1, inputStream.StreamIdentifier.Item2); _messageStreamSubscriptionHandle = await messageStream.SubscribeAsync((message, token) => message.Accept(this), async() => await TearDown()); }
private void InitStore(TenancyStyle tenancyStyle, StreamIdentity streamIdentity = StreamIdentity.AsGuid) { StoreOptions(_ => { _.Events.TenancyStyle = tenancyStyle; _.Events.StreamIdentity = streamIdentity; _.Policies.AllDocumentsAreMultiTenanted(); }, true); }
public SingleStreamProvider(IStreamProvider provider, Guid guid = default(Guid)) { guid = guid == default(Guid) ? Guid.NewGuid() : guid; _streamIdentity = new StreamIdentity <T>(StreamNamespacePrefix, guid); Console.WriteLine("<< SingleStreamProvider GetStream: " + _streamIdentity.StreamIdentifier.Guid + " " + _streamIdentity.StreamIdentifier.Namespace); _messageStream = provider.GetStream <IStreamMessage>(_streamIdentity.StreamIdentifier.Guid, _streamIdentity.StreamIdentifier.Namespace); _tearDownExecuted = false; }
/// <summary> /// Gets the stream position from a queue message /// </summary> /// <param name="queueMessage"></param> /// <returns></returns> public virtual StreamPosition GetStreamPosition(EventData queueMessage) { Guid streamGuid = Guid.Parse(queueMessage.PartitionKey); string streamNamespace = queueMessage.GetStreamNamespaceProperty(); IStreamIdentity stremIdentity = new StreamIdentity(streamGuid, streamNamespace); StreamSequenceToken token = new EventSequenceToken(queueMessage.SequenceNumber, 0); return(new StreamPosition(stremIdentity, token)); }
public override StreamPosition GetStreamPosition(EventData queueMessage) { IStreamIdentity stremIdentity = new StreamIdentity(partitionStreamGuid, null); StreamSequenceToken token = #if NETSTANDARD new EventSequenceTokenV2(queueMessage.SystemProperties.SequenceNumber, 0); #else new EventSequenceTokenV2(queueMessage.SequenceNumber, 0); #endif return(new StreamPosition(stremIdentity, token)); }
public async Task SetInput(StreamIdentity <TIn> inputStream) { _tearDownExecuted = false; Console.WriteLine(">>> SetInput GetStream: " + inputStream.StreamIdentifier); var messageStream = _streamProvider.GetStream <IStreamMessage>(inputStream.StreamIdentifier.Guid, inputStream.StreamIdentifier.Namespace); //Modified: Await split Func <Task> onCompleted = new Func <Task>(async() => await TearDown()); _messageStreamSubscriptionHandle = await messageStream.SubscribeAsync((message, token) => message.Accept(this), onCompleted); }
private void InitStore(TenancyStyle tenancyStyle, StreamIdentity streamIdentity = StreamIdentity.AsGuid) { var databaseSchema = $"{GetType().Name}_{tenancyStyle.ToString().ToLower()}"; StoreOptions(_ => { _.Events.DatabaseSchemaName = databaseSchema; _.Events.TenancyStyle = tenancyStyle; _.Events.StreamIdentity = streamIdentity; _.Policies.AllDocumentsAreMultiTenanted(); }); }
public static StreamAction[] ToStreams(StreamIdentity streamIdentity, IEnumerable <IEvent> events) { if (streamIdentity == StreamIdentity.AsGuid) { return(events.GroupBy(x => x.StreamId) .Select(group => StreamAction.Append(@group.Key, @group.ToArray())) .ToArray()); } return(events.GroupBy(x => x.StreamKey) .Select(group => StreamAction.Append(@group.Key, @group.ToArray())) .ToArray()); }
/// <summary> /// Duplicate of EventHub's EventData class. /// </summary> /// <param name="cachedMessage"></param> public EventHubMessage(CachedEventHubMessage cachedMessage) { int readOffset = 0; StreamIdentity = new StreamIdentity(cachedMessage.StreamGuid, SegmentBuilder.ReadNextString(cachedMessage.Segment, ref readOffset)); Offset = SegmentBuilder.ReadNextString(cachedMessage.Segment, ref readOffset); PartitionKey = SegmentBuilder.ReadNextString(cachedMessage.Segment, ref readOffset); SequenceNumber = cachedMessage.SequenceNumber; EnqueueTimeUtc = cachedMessage.EnqueueTimeUtc; DequeueTimeUtc = cachedMessage.DequeueTimeUtc; Properties = SegmentBuilder.ReadNextBytes(cachedMessage.Segment, ref readOffset).DeserializeProperties(); Payload = SegmentBuilder.ReadNextBytes(cachedMessage.Segment, ref readOffset).ToArray(); }
public async Task <Guid> EnumerateToStream(StreamIdentity streamIdentity, Guid transactionId) { var transactionalSender = SetupSenderStream(streamIdentity); await transactionalSender.StartTransaction(transactionId); var elements = Elements.ToList(); await OutputProducer.SendMessage(new ItemMessage <ContainerElement <T> >(elements)); await transactionalSender.EndTransaction(transactionId); await transactionalSender.TearDown(); return(transactionId); }
/// <summary> /// Switch the DocumentStore between stream identity styles, but reuse /// the underlying document store /// </summary> /// <param name="identity"></param> internal void UseStreamIdentity(StreamIdentity identity) { _session = null; if (identity == StreamIdentity.AsGuid) { _store = _fixture.Store; } else { _store = _fixture.StringStreamIdentifiers.Value; _store.Advanced.Clean.DeleteAllEventData(); } }
private void InitStore(TenancyStyle tenancyStyle, StreamIdentity streamIdentity = StreamIdentity.AsGuid) { var databaseSchema = $"{GetType().Name}_{tenancyStyle.ToString().ToLower()}_{streamIdentity}"; using var conn = new NpgsqlConnection(ConnectionSource.ConnectionString); conn.Open(); conn.DropSchema(databaseSchema).GetAwaiter().GetResult(); StoreOptions(_ => { _.Events.DatabaseSchemaName = databaseSchema; _.Events.TenancyStyle = tenancyStyle; _.Events.StreamIdentity = streamIdentity; _.Policies.AllDocumentsAreMultiTenanted(); }); }
public EventHubMessage(CachedEventHubMessage cachedMessage) { int readOffset = 0; StreamIdentity = new StreamIdentity(cachedMessage.StreamGuid, SegmentBuilder.ReadNextString(cachedMessage.Segment, ref readOffset)); Offset = SegmentBuilder.ReadNextString(cachedMessage.Segment, ref readOffset); SequenceNumber = cachedMessage.SequenceNumber; EnqueueTimeUtc = cachedMessage.EnqueueTimeUtc; DequeueTimeUtc = cachedMessage.DequeueTimeUtc; Properties = SegmentBuilder.ReadNextBytes(cachedMessage.Segment, ref readOffset).DeserializeProperties(); object offsetObj; PartitionKey = Properties.TryGetValue("PartitionKey", out offsetObj) ? offsetObj as string : default(string); Payload = SegmentBuilder.ReadNextBytes(cachedMessage.Segment, ref readOffset).ToArray(); }
/// <summary> /// Gets the stream position from a queue message /// </summary> /// <param name="queueMessage"></param> /// <returns></returns> public virtual StreamPosition GetStreamPosition(EventData queueMessage) { Guid streamGuid = #if NETSTANDARD Guid.Parse(queueMessage.SystemProperties.PartitionKey); #else Guid.Parse(queueMessage.PartitionKey); #endif string streamNamespace = queueMessage.GetStreamNamespaceProperty(); IStreamIdentity stremIdentity = new StreamIdentity(streamGuid, streamNamespace); StreamSequenceToken token = #if NETSTANDARD new EventHubSequenceTokenV2(queueMessage.SystemProperties.Offset, queueMessage.SystemProperties.SequenceNumber, 0); #else new EventHubSequenceTokenV2(queueMessage.Offset, queueMessage.SequenceNumber, 0); #endif return(new StreamPosition(stremIdentity, token)); }
public static EventStream[] ToStreams(StreamIdentity streamIdentity, IEnumerable <IEvent> events) { if (streamIdentity == StreamIdentity.AsGuid) { return(events.GroupBy(x => x.StreamId) .Select(group => { return new EventStream(group.Key, group.OrderBy(x => x.Version).ToArray(), false); }) .ToArray()); } return(events.GroupBy(x => x.StreamKey) .Select(group => { return new EventStream(group.Key, group.OrderBy(x => x.Version).ToArray(), false); }) .ToArray()); }
public void QueueCacheMissTest() { var bufferPool = new TestBlockPool(); var dataAdapter = new TestCacheDataAdapter(bufferPool); var cache = new PooledQueueCache <TestQueueMessage, TestCachedMessage>(dataAdapter, TestCacheDataComparer.Instance); dataAdapter.PurgeAction = cache.Purge; int sequenceNumber = 10; IBatchContainer batch; IStreamIdentity streamId = new StreamIdentity(Guid.NewGuid(), StreamNamespace); // No data in cache, cursors should not throw. object cursor = cache.GetCursor(streamId, new EventSequenceToken(sequenceNumber++)); Assert.IsNotNull(cursor); // try to iterate, should throw bool gotNext = cache.TryGetNextMessage(cursor, out batch); Assert.IsNotNull(cursor); Assert.IsFalse(gotNext); // now add messages into cache newer than cursor // Adding enough to fill the pool for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++) { cache.Add(new TestQueueMessage { StreamGuid = streamId.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }, DateTime.UtcNow); } // now that there is data, and the cursor should point to data older than in the cache, using cursor should throw Exception ex = null; try { cache.TryGetNextMessage(cursor, out batch); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); // Try getting new cursor into cache from data before the cache. Should throw ex = null; try { cursor = cache.GetCursor(streamId, new EventSequenceToken(10)); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); // Get valid cursor into cache cursor = cache.GetCursor(streamId, new EventSequenceToken(13)); // query once, to make sure cursor is good gotNext = cache.TryGetNextMessage(cursor, out batch); Assert.IsNotNull(cursor); Assert.IsTrue(gotNext); // Since pool should be full, adding one more message should trigger the cache to purge. cache.Add(new TestQueueMessage { StreamGuid = streamId.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }, DateTime.UtcNow); // After purge, use of cursor should throw. ex = null; try { cache.TryGetNextMessage(cursor, out batch); } catch (QueueCacheMissException cacheMissException) { ex = cacheMissException; } Assert.IsNotNull(ex); }
private int RunGoldenPath(PooledQueueCache <TestQueueMessage, TestCachedMessage> cache, int startOfCache) { int sequenceNumber = startOfCache; IBatchContainer batch; IStreamIdentity stream1 = new StreamIdentity(Guid.NewGuid(), StreamNamespace); IStreamIdentity stream2 = new StreamIdentity(Guid.NewGuid(), StreamNamespace); // now add messages into cache newer than cursor // Adding enough to fill the pool for (int i = 0; i < MessagesPerBuffer * PooledBufferCount; i++) { cache.Add(new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }, DateTime.UtcNow); } // get cursor for stream1, walk all the events in the stream using the cursor object stream1Cursor = cache.GetCursor(stream1, new EventSequenceToken(startOfCache)); int stream1EventCount = 0; while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.IsNotNull(stream1Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream1.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream1EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount); // get cursor for stream2, walk all the events in the stream using the cursor object stream2Cursor = cache.GetCursor(stream2, new EventSequenceToken(startOfCache)); int stream2EventCount = 0; while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.IsNotNull(stream2Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream2.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream2EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount); // Add a blocks worth of events to the cache, then walk each cursor. Do this enough times to fill the cache twice. for (int j = 0; j < PooledBufferCount * 2; j++) { for (int i = 0; i < MessagesPerBuffer; i++) { cache.Add(new TestQueueMessage { StreamGuid = i % 2 == 0 ? stream1.Guid : stream2.Guid, StreamNamespace = StreamNamespace, SequenceNumber = sequenceNumber++, }, DateTime.UtcNow); } // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream1Cursor, out batch)) { Assert.IsNotNull(stream1Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream1.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream1EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream1EventCount); // walk all the events in the stream using the cursor while (cache.TryGetNextMessage(stream2Cursor, out batch)) { Assert.IsNotNull(stream2Cursor); Assert.IsNotNull(batch); Assert.AreEqual(stream2.Guid, batch.StreamGuid); Assert.AreEqual(StreamNamespace, batch.StreamNamespace); Assert.IsNotNull(batch.SequenceToken); stream2EventCount++; } Assert.AreEqual((sequenceNumber - startOfCache) / 2, stream2EventCount); } return(sequenceNumber); }
protected StreamMessageSender <ContainerElement <T> > SetupSenderStream(StreamIdentity streamIdentity) { var transactionalSender = new StreamMessageSender <ContainerElement <T> >(GetStreamProvider(StreamProviderName), streamIdentity); return(transactionalSender); }