public async Task <T> LoadState <T, TId>(StreamName stream, CancellationToken cancellationToken) where T : AggregateState <T, TId>, new() where TId : AggregateId { var state = new T(); try { await _eventStore.ReadStream(stream, StreamReadPosition.Start, Fold, cancellationToken).Ignore(); } catch (Exceptions.StreamNotFound e) { throw new Exceptions.StreamNotFound(stream); } return(state); void Fold(StreamEvent streamEvent) { var evt = Deserialize(streamEvent); if (evt == null) { return; } state = state.When(evt); } }
public async Task <AppendEventsResult> AppendEvents( StreamName stream, ExpectedStreamVersion expectedVersion, IReadOnlyCollection <StreamEvent> events, CancellationToken cancellationToken ) { var streamName = stream.ToString(); var documents = events.Select(AsDocument).ToArray(); var bulk = new BulkDescriptor(_options.IndexName).CreateMany(documents).Refresh(Refresh.WaitFor); var result = await _client.BulkAsync(bulk, cancellationToken); return(result.IsValid ? new AppendEventsResult(0, documents.Last().StreamPosition + 1) : throw new ApplicationException($"Unable to add events: {result.DebugInformation}")); PersistedEvent AsDocument(StreamEvent evt) => new( evt.Id.ToString(), TypeMap.Instance.GetTypeName(evt.Payload !), evt.Position + 1, evt.ContentType, streamName, (ulong)evt.Position + 1, evt.Payload, evt.Metadata.ToHeaders(), DateTime.Now ); }
private async Task <Channel> RequestChannel( StreamName stream_name, CancellationToken cancel_token) { Guid channel_id; if (!Guid.TryParse(stream_name.Name, out channel_id)) { return(null); } var tracker_uri = stream_name.Parameters.ContainsKey("tip") ? OutputStreamBase.CreateTrackerUri(channel_id, stream_name.Parameters["tip"]) : null; var channel = owner.RequestChannel(channel_id, tracker_uri); if (channel == null) { return(null); } await Task.WhenAny( Task.Delay(10000), channel.WaitForReadyContentTypeAsync(cancel_token)).ConfigureAwait(false); if (channel.ChannelInfo == null || String.IsNullOrEmpty(channel.ChannelInfo.ContentType) || channel.ChannelInfo.ContentType != "FLV") { return(null); } return(channel); }
public Task <StreamEvent[]> ReadEvents( StreamName stream, StreamReadPosition start, int count, CancellationToken cancellationToken ) { var read = _client.ReadStreamAsync( Direction.Forwards, stream, start.AsStreamPosition(), count, cancellationToken: cancellationToken ); return(TryExecute( async() => { var resolvedEvents = await read.ToArrayAsync(cancellationToken).NoContext(); return ToStreamEvents(resolvedEvents); }, stream, () => new ErrorInfo( "Unable to read {Count} starting at {Start} events from {Stream}", count, start, stream ), (s, ex) => new ReadFromStreamException(s, ex) )); }
public Task <StreamEvent[]> ReadEventsBackwards( StreamName stream, int count, CancellationToken cancellationToken ) { var read = _client.ReadStreamAsync( Direction.Backwards, stream, StreamPosition.End, count, resolveLinkTos: true, cancellationToken: cancellationToken ); return(TryExecute( async() => { var resolvedEvents = await read.ToArrayAsync(cancellationToken).NoContext(); return ToStreamEvents(resolvedEvents); }, stream, () => new ErrorInfo( "Unable to read {Count} events backwards from {Stream}", count, stream ), (s, ex) => new ReadFromStreamException(s, ex) )); }
public async Task read_snapshot_when_loading_aggregate() { var now = DateTime.UtcNow; var esStore = new EsEventStore(GetEventStoreClient(), "snapshot_test"); var esAggregateStore = new EsAggregateStore(esStore, 5); var aggregate = new Day(); var slots = new List <ScheduledSlot> { new ScheduledSlot(TimeSpan.FromMinutes(10), now), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(10)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(20)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(30)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(40)) }; aggregate.Schedule(new DoctorId(Guid.NewGuid()), DateTime.UtcNow, slots, Guid.NewGuid); await esAggregateStore.Save(aggregate, new CommandMetadata(new CorrelationId(Guid.NewGuid()), new CausationId(Guid.NewGuid()))); await esStore.TruncateStream(StreamName.For <Day>(aggregate.Id), Convert.ToUInt64(aggregate.GetChanges().Count())); var reloadedAggregate = await esAggregateStore.Load <Day>(aggregate.Id); Assert.Equal(5, reloadedAggregate.Version); }
public async Task write_snapshot_if_threshold_reached() { var now = DateTime.UtcNow; var esStore = new EsEventStore(GetEventStoreClient(), "snapshot_test"); var esAggregateStore = new EsAggregateStore(esStore, 5); var aggregate = new Day(); var slots = new List <ScheduledSlot> { new ScheduledSlot(TimeSpan.FromMinutes(10), now), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(10)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(20)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(30)), new ScheduledSlot(TimeSpan.FromMinutes(10), now.AddMinutes(40)) }; aggregate.Schedule(new DoctorId(Guid.NewGuid()), DateTime.UtcNow, slots, Guid.NewGuid); await esAggregateStore.Save(aggregate, new CommandMetadata(new CorrelationId(Guid.NewGuid()), new CausationId(Guid.NewGuid()))); var snapshotEnvelope = await esStore.LoadSnapshot(StreamName.For <Day>(aggregate.Id)); var snapshot = snapshotEnvelope?.Snapshot as DaySnapshot; Assert.NotNull(snapshot); }
public Task <long> ReadStream( StreamName stream, StreamReadPosition start, int count, Action <StreamEvent> callback, CancellationToken cancellationToken ) => default !;
public Task TruncateStream( StreamName stream, StreamTruncatePosition truncatePosition, ExpectedStreamVersion expectedVersion, CancellationToken cancellationToken ) { var meta = new StreamMetadata(truncateBefore: truncatePosition.AsStreamPosition()); return(TryExecute( () => AnyOrNot( expectedVersion, () => _client.SetStreamMetadataAsync( stream, StreamState.Any, meta, cancellationToken: cancellationToken ), () => _client.SetStreamMetadataAsync( stream, expectedVersion.AsStreamRevision(), meta, cancellationToken: cancellationToken ) ), stream, () => new ErrorInfo( "Unable to truncate stream {Stream} at {Position}", stream, truncatePosition ), (s, ex) => new TruncateStreamException(s, ex) )); }
public async Task ShouldProjectImported() { var evt = DomainFixture.CreateImportBooking(); var stream = StreamName.For <Booking>(evt.BookingId); var append = await Instance.EventStore.AppendEvents( stream, ExpectedStreamVersion.Any, new[] { new StreamEvent(Guid.NewGuid(), evt, new Metadata(), "application/json", 0) }, CancellationToken.None ); await Task.Delay(500); var expected = new BookingDocument(evt.BookingId) { RoomId = evt.RoomId, CheckInDate = evt.CheckIn, CheckOutDate = evt.CheckOut, Position = append.GlobalPosition }; var actual = await Instance.Mongo.LoadDocument <BookingDocument>(evt.BookingId); actual.Should().Be(expected); }
private void WriteToStream(Message parent, StreamName stream, string text) { try { // Send the engine's output to stdout. this.ShellServer.SendIoPubMessage( new Message { Header = new MessageHeader { MessageType = "stream" }, Content = new StreamContent { StreamName = stream, Text = text } }.AsReplyTo(parent) ); } catch (Exception e) { this.Logger?.LogError(e, "Unexpected error when trying to write to stream."); } }
public RecordedEvent(StreamName stream, object @event) { Stream = stream; Event = @event ?? throw new ArgumentNullException(nameof(@event)); MessageId = Guid.NewGuid(); Metadata = null; }
private RecordedEvent(StreamName stream, object @event, Guid messageId, object metadata) { Stream = stream; Event = @event ?? throw new ArgumentNullException(nameof(@event)); MessageId = messageId; Metadata = metadata; }
public async Task <T> Load <T>(string id) where T : Aggregate, new() { Ensure.NotEmptyString(id, nameof(id)); var stream = StreamName.For <T>(id); var aggregate = new T(); try { await _eventStore.ReadStream(stream, StreamReadPosition.Start, Fold); } catch (Exceptions.StreamNotFound e) { throw new Exceptions.AggregateNotFound <T>(id, e); } return(aggregate); void Fold(StreamEvent streamEvent) { var evt = Deserialize(streamEvent); if (evt == null) { return; } aggregate !.Fold(evt); } object?Deserialize(StreamEvent streamEvent) => _serializer.Deserialize(streamEvent.Data.AsSpan(), streamEvent.EventType); }
public async Task <T> Load <T>(string id, CancellationToken cancellationToken) where T : Aggregate, new() { Ensure.NotEmptyString(id, nameof(id)); var stream = StreamName.For <T>(id); var aggregate = new T(); try { await _eventStore.ReadStream(stream, StreamReadPosition.Start, Fold, cancellationToken).Ignore(); } catch (Exceptions.StreamNotFound e) { throw new Exceptions.AggregateNotFound <T>(id, e); } return(aggregate); void Fold(StreamEvent streamEvent) { var evt = Deserialize(streamEvent); if (evt == null) { return; } aggregate !.Fold(evt); } }
public void EndsWithReturnsExpectedResult(string name, string suffix, bool expected) { var sut = new StreamName(name); var result = sut.EndsWith(suffix); Assert.Equal(expected, result); }
public void WithoutSuffixReturnsExpectedResult(string name, string suffix, string expected) { var sut = new StreamName(name); var result = sut.WithoutSuffix(suffix); Assert.Equal(new StreamName(expected), result); }
public void StartsWithReturnsExpectedResult(string name, string prefix, bool expected) { var sut = new StreamName(name); var result = sut.StartsWith(prefix); Assert.Equal(expected, result); }
protected override async Task ProduceMessages( StreamName stream, IEnumerable <ProducedMessage> messages, ElasticProduceOptions?options, CancellationToken cancellationToken = default ) { var documents = messages.Select(x => x.Message); var mode = options?.ProduceMode ?? ProduceMode.Create; var bulk = GetOp(new BulkDescriptor(stream.ToString())); var result = await _elasticClient.BulkAsync(bulk, cancellationToken); if (!result.IsValid) { if (result.DebugInformation.Contains("version conflict")) { SubscriptionsEventSource.Log.Warn("ElasticProducer: version conflict"); } else { throw result.OriginalException ?? throw new InvalidOperationException(result.DebugInformation); } } BulkDescriptor GetOp(BulkDescriptor descriptor) => mode switch { ProduceMode.Create => descriptor.CreateMany(documents), ProduceMode.Index => descriptor.IndexMany(documents), _ => throw new ArgumentOutOfRangeException() }; } }
public static StreamName Parse(string str) { var result = new StreamName(); var match = namePattern.Match(str); if (!match.Success) { result.Name = str; return(result); } result.Name = Uri.UnescapeDataString(match.Groups["name"].Value); if (match.Groups["format"].Success) { result.Format = Uri.UnescapeDataString(match.Groups["format"].Value); } if (match.Groups["params"].Success) { var params_str = match.Groups["params"].Value; foreach (var param_str in params_str.Split('&')) { var idx = param_str.IndexOf('='); if (idx < 0) { continue; } var key = Uri.UnescapeDataString(param_str.Substring(0, idx)); var val = Uri.UnescapeDataString(param_str.Substring(idx + 1)); result.Parameters[key] = val; } } return(result); }
public override int GetHashCode() { var result = StreamName.GetHashCode(); result = 31 * result + StartPartitionId.GetHashCode(); result = 31 * result + LimitPartitions; return(result); }
public async Task Save(Group @group, Guid causation, Guid correlation, Metadata metadata, CancellationToken ct = default(CancellationToken)) { var stream = new StreamName($"groups-{group.Id.ToGuid():N}"); var result = await _writer.WriteStreamAsync(stream, group, causation, correlation, metadata, ct); //CQS violation - only required if an event source instance is to handle multiple commands or is kept in memory. ((IEventSource)group).ExpectedVersion = result.NextExpectedVersion; }
async Task <T> LoadInternal <T>(StreamName streamName, bool failIfNotFound, CancellationToken cancellationToken) where T : Aggregate { var aggregate = _factoryRegistry.CreateInstance <T>(); var hotEvents = await LoadStreamEvents(_eventStore, StreamReadPosition.Start); var archivedEvents = hotEvents.Length == 0 || hotEvents[0].Position > 0 ? await LoadStreamEvents(_archiveReader, StreamReadPosition.Start) : Enumerable.Empty <StreamEvent>(); var streamEvents = hotEvents.Concat(archivedEvents).Distinct(Comparer).ToArray(); if (streamEvents.Length == 0 && failIfNotFound) { throw new AggregateNotFoundException <T>(streamName, new StreamNotFound(streamName)); } foreach (var streamEvent in streamEvents) { Fold(streamEvent); } return(aggregate); void Fold(StreamEvent streamEvent) { var evt = streamEvent.Payload; if (evt == null) { return; } aggregate.Fold(evt); } async Task <StreamEvent[]> LoadStreamEvents(IEventReader reader, StreamReadPosition start) { try { return(await reader.ReadStream( streamName, start, failIfNotFound, cancellationToken )); } catch (StreamNotFound) { return(Array.Empty <StreamEvent>()); } catch (Exception e) { Log.UnableToLoadAggregate <T>(streamName, e); throw; } } }
IScenarioGivenEventsStateBuilder IScenarioGivenEventsStateBuilder.Given(StreamName stream, params object[] events) { if (events == null) { throw new ArgumentNullException(nameof(events)); } return(new Scenario(_givens.AddRange(Transform(stream, events)), _when, _thens, _result)); }
/// <summary>Indicates whether this instance and a specified object are equal.</summary> /// <param name="obj">The object to compare with the current instance.</param> /// <returns> /// true if <paramref name="obj" /> and this instance are the same type and represent the same value; otherwise, false. /// </returns> public override bool Equals(object obj) { if (obj is AlternateDataStreamInfo) { AlternateDataStreamInfo other = (AlternateDataStreamInfo)obj; return(StreamName.Equals(other.StreamName, StringComparison.OrdinalIgnoreCase) && Size.Equals(other.Size)); } return(false); }
/// <inheritdoc /> public async Task Produce( StreamName stream, IEnumerable <ProducedMessage> messages, TProduceOptions?options, CancellationToken cancellationToken = default ) { var(activity, msgs) = ProducerActivity.Start(messages, DefaultTags); if (activity is { IsAllDataRequested : true })
/// <summary>Indicates whether this instance and a specified object are equal.</summary> /// <param name="obj">The object to compare with the current instance.</param> /// <returns> /// true if <paramref name="obj" /> and this instance are the same type and represent the same value; otherwise, false. /// </returns> public override bool Equals(object obj) { if (obj is AlternateDataStreamInfo) { AlternateDataStreamInfo other = (AlternateDataStreamInfo)obj; return(StreamName.Equals(other.StreamName) && Size.Equals(other.Size)); } return(false); }
public static IScenarioGivenStateBuilder Given( this IScenarioInitialStateBuilder builder, StreamName stream, params object[] events) { if (events == null) { throw new ArgumentNullException(nameof(events)); } return(builder.Given(events.Select(@event => new RecordedEvent(stream, @event)))); }
public async Task <Group> TryLoadById(GroupIdentifier identifier, CancellationToken ct = default(CancellationToken)) { var stream = new StreamName($"groups-{identifier.ToGuid():N}"); var result = await _reader.ReadStreamAsync(stream, ct); if (result.State == ReadResultState.Deleted) { throw new GroupDeletedException(identifier); } return(result.State == ReadResultState.Found ? (Group)result.Value : null); }
public StreamSubscriptionMeasure( string subscriptionId, StreamName streamName, EventStoreClient eventStoreClient, Func <EventPosition?> getLast ) { _subscriptionId = subscriptionId; _streamName = streamName; _eventStoreClient = eventStoreClient; _getLast = getLast; }
public static StreamName Parse(string str) { var result = new StreamName(); var param_begin = str.IndexOf('?'); if (param_begin<0) { result.Name = str; return result; } result.Name = Uri.UnescapeDataString(str.Substring(0, param_begin)); var params_str = str.Substring(param_begin+1); foreach (var param_str in params_str.Split('&')) { var idx = param_str.IndexOf('='); if (idx<0) continue; var key = Uri.UnescapeDataString(param_str.Substring(0, idx)); var val = Uri.UnescapeDataString(param_str.Substring(idx+1)); result.Parameters[key] = val; } return result; }
public void test1() { StreamName s1 = new StreamName(3, OrcProto.Stream.Types.Kind.DATA); StreamName s2 = new StreamName(3, OrcProto.Stream.Types.Kind.DICTIONARY_DATA); StreamName s3 = new StreamName(5, OrcProto.Stream.Types.Kind.DATA); StreamName s4 = new StreamName(5, OrcProto.Stream.Types.Kind.DICTIONARY_DATA); StreamName s1p = new StreamName(3, OrcProto.Stream.Types.Kind.DATA); Assert.Equal(true, s1.Equals(s1)); Assert.Equal(false, s1.Equals(s2)); Assert.Equal(false, s1.Equals(s3)); Assert.Equal(true, s1.Equals(s1p)); Assert.Equal(true, s1.CompareTo(null) < 0); Assert.Equal(false, s1.Equals(null)); Assert.Equal(true, s1.CompareTo(s2) < 0); Assert.Equal(true, s2.CompareTo(s3) < 0); Assert.Equal(true, s3.CompareTo(s4) < 0); Assert.Equal(true, s4.CompareTo(s1p) > 0); Assert.Equal(0, s1p.CompareTo(s1)); }
public static StreamName Parse(string str) { var result = new StreamName(); var match = namePattern.Match(str); if (!match.Success) { result.Name = str; return result; } result.Name = Uri.UnescapeDataString(match.Groups["name"].Value); if (match.Groups["format"].Success) { result.Format = Uri.UnescapeDataString(match.Groups["format"].Value); } if (match.Groups["params"].Success) { var params_str = match.Groups["params"].Value; foreach (var param_str in params_str.Split('&')) { var idx = param_str.IndexOf('='); if (idx<0) continue; var key = Uri.UnescapeDataString(param_str.Substring(0, idx)); var val = Uri.UnescapeDataString(param_str.Substring(idx+1)); result.Parameters[key] = val; } } return result; }
private async Task<Channel> RequestChannel( StreamName stream_name, CancellationToken cancel_token) { Guid channel_id; if (!Guid.TryParse(stream_name.Name, out channel_id)) { return null; } var tracker_uri = stream_name.Parameters.ContainsKey("tip") ? OutputStreamBase.CreateTrackerUri(channel_id, stream_name.Parameters["tip"]) : null; var channel = owner.RequestChannel(channel_id, tracker_uri); if (channel==null) return null; var trying = 0; while ( trying++<10 && (channel.ChannelInfo==null || String.IsNullOrEmpty(channel.ChannelInfo.ContentType))){ await Task.Delay(1000, cancel_token); } if (channel.ChannelInfo==null || String.IsNullOrEmpty(channel.ChannelInfo.ContentType) || channel.ChannelInfo.ContentType!="FLV") { return null; } return channel; }
/** * Create a stream to store part of a column. * @param column the column id for the stream * @param kind the kind of stream * @return The output outStream that the section needs to be written to. * @ */ public OutStream createStream(int column, OrcProto.Stream.Types.Kind kind) { StreamName name = new StreamName(column, kind); CompressionModifier[] modifiers; switch (kind) { case OrcProto.Stream.Types.Kind.BLOOM_FILTER: case OrcProto.Stream.Types.Kind.DATA: case OrcProto.Stream.Types.Kind.DICTIONARY_DATA: if (getCompressionStrategy() == OrcFile.CompressionStrategy.SPEED) { modifiers = new[] { CompressionModifier.FAST, CompressionModifier.TEXT }; } else { modifiers = new[] { CompressionModifier.DEFAULT, CompressionModifier.TEXT }; } break; case OrcProto.Stream.Types.Kind.LENGTH: case OrcProto.Stream.Types.Kind.DICTIONARY_COUNT: case OrcProto.Stream.Types.Kind.PRESENT: case OrcProto.Stream.Types.Kind.ROW_INDEX: case OrcProto.Stream.Types.Kind.SECONDARY: // easily compressed using the fastest modes modifiers = new[] { CompressionModifier.FASTEST, CompressionModifier.BINARY }; break; default: LOG.warn("Missing ORC compression modifiers for " + kind); modifiers = null; break; } BufferedStream result = writer.streams.get(name); if (result == null) { result = new BufferedStream(name.ToString(), writer.bufferSize, writer.codec == null ? writer.codec : writer.codec.modify(modifiers)); writer.streams.Add(name, result); } return result.outStream; }
public static IList<Win32StreamInfo> ListStreams(string filePath) { if (string.IsNullOrEmpty(filePath)) throw new ArgumentNullException("filePath"); if (-1 != filePath.IndexOfAny(Path.GetInvalidPathChars())) throw new ArgumentException(Resources.Error_InvalidFileChars, "filePath"); var result = new List<Win32StreamInfo>(); using (SafeFileHandle hFile = SafeCreateFile(filePath, NativeFileAccess.GenericRead, FileShare.Read, IntPtr.Zero, FileMode.Open, NativeFileFlags.BackupSemantics, IntPtr.Zero)) using (var hName = new StreamName()) { if (!hFile.IsInvalid) { var streamId = new Win32StreamId(); int dwStreamHeaderSize = Marshal.SizeOf(streamId); bool finished = false; IntPtr context = IntPtr.Zero; int bytesRead; string name; try { while (!finished) { // Read the next stream header: if (!BackupRead(hFile, ref streamId, dwStreamHeaderSize, out bytesRead, false, false, ref context)) { finished = true; } else if (dwStreamHeaderSize != bytesRead) { finished = true; } else { // Read the stream name: if (0 >= streamId.StreamNameSize) { name = null; } else { hName.EnsureCapacity(streamId.StreamNameSize); if (!BackupRead(hFile, hName.MemoryBlock, streamId.StreamNameSize, out bytesRead, false, false, ref context)) { name = null; finished = true; } else { // Unicode chars are 2 bytes: name = hName.ReadStreamName(bytesRead >> 1); } } // Add the stream info to the result: if (!string.IsNullOrEmpty(name)) { result.Add(new Win32StreamInfo { StreamType = (FileStreamType)streamId.StreamId, StreamAttributes = (FileStreamAttributes)streamId.StreamAttributes, StreamSize = streamId.Size.ToInt64(), StreamName = name }); } // Skip the contents of the stream: int bytesSeekedLow, bytesSeekedHigh; if (!finished && !BackupSeek(hFile, streamId.Size.Low, streamId.Size.High, out bytesSeekedLow, out bytesSeekedHigh, ref context)) { finished = true; } } } } finally { // Abort the backup: BackupRead(hFile, hName.MemoryBlock, 0, out bytesRead, true, false, ref context); } } } return result; }