private async Task <ReadAllPage> ReloadAfterDelay(long fromPositionInclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { Logger.InfoFormat($"ReadAllForwards: gap detected in position, reloading after {DefaultReloadInterval}ms"); await Task.Delay(DefaultReloadInterval, cancellationToken); var reloadedPage = await ReadAllForwardsInternal(fromPositionInclusive, maxCount, prefetch, readNext, cancellationToken) .NotOnCapturedContext(); return(await FilterExpired(reloadedPage, readNext, cancellationToken).NotOnCapturedContext()); }
public async Task <ReadAllPage> ReadAllForwards( long fromPositionInclusive, int maxCount, bool prefetchJsonData, CancellationToken cancellationToken = default(CancellationToken)) { Ensure.That(fromPositionInclusive, nameof(fromPositionInclusive)).IsGte(0); Ensure.That(maxCount, nameof(maxCount)).IsGte(1); GuardAgainstDisposed(); cancellationToken.ThrowIfCancellationRequested(); if (Logger.IsDebugEnabled()) { Logger.DebugFormat("ReadAllForwards from position {fromPositionInclusive} with max count " + "{maxCount}.", fromPositionInclusive, maxCount); } ReadNextAllPage readNext = (nextPosition, ct) => ReadAllForwards(nextPosition, maxCount, prefetchJsonData, ct); var page = await ReadAllForwardsInternal(fromPositionInclusive, maxCount, prefetchJsonData, readNext, cancellationToken) .NotOnCapturedContext(); // https://github.com/damianh/SqlStreamStore/issues/31 // Under heavy parallel load, gaps may appear in the position sequence due to sequence // number reservation of in-flight transactions. // Here we check if there are any gaps, and in the unlikely event there is, we delay a little bit // and re-issue the read. This is expected if (!page.IsEnd || page.Messages.Length <= 1) { return(await FilterExpired(page, readNext, cancellationToken).NotOnCapturedContext()); } // Check for gap between last page and this. if (page.Messages[0].Position != fromPositionInclusive) { page = await ReloadAfterDelay(fromPositionInclusive, maxCount, prefetchJsonData, readNext, cancellationToken); } // check for gap in messages collection for (int i = 0; i < page.Messages.Length - 1; i++) { if (page.Messages[i].Position + 1 != page.Messages[i + 1].Position) { page = await ReloadAfterDelay(fromPositionInclusive, maxCount, prefetchJsonData, readNext, cancellationToken); break; } } return(await FilterExpired(page, readNext, cancellationToken).NotOnCapturedContext()); }
/// <summary> /// Initializes a new instance of <see cref="ReadAllPage"/> /// </summary> /// <param name="fromPosition">A long representing the position where this page was read from.</param> /// <param name="nextPosition">A long representing the position where the next page should be read from.</param> /// <param name="isEnd">True if page reach end of the all stream at time of reading. Otherwise false.</param> /// <param name="direction">The direction of the the read request.</param> /// <param name="readNext">An operation to read the next page of messages.</param> /// <param name="messages">The collection messages read.</param> public ReadAllPage( long fromPosition, long nextPosition, bool isEnd, ReadDirection direction, ReadNextAllPage readNext, StreamMessage[] messages = null) { FromPosition = fromPosition; NextPosition = nextPosition; IsEnd = isEnd; Direction = direction; _readNext = readNext; Messages = messages ?? new StreamMessage[0]; }
public ReadAllPage( long fromPosition, long nextPosition, bool isEnd, ReadDirection direction, StreamMessage[] messages, ReadNextAllPage readNext) { FromPosition = fromPosition; NextPosition = nextPosition; IsEnd = isEnd; Direction = direction; Messages = messages; _readNext = readNext; }
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; try { var commandText = prefetch ? _schema.ReadAllForwardsWithData : _schema.ReadAllForwards; using (var connection = await OpenConnection(cancellationToken)) using (var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false)) using (var command = BuildStoredProcedureCall( commandText, transaction, Parameters.Count(maxCount + 1), Parameters.Position(fromPositionExclusive))) using (var reader = await command .ExecuteReaderAsync(CommandBehavior.SequentialAccess, cancellationToken) .ConfigureAwait(false)) { if (!reader.HasRows) { return(new ReadAllPage( fromPositionExclusive, fromPositionExclusive, true, ReadDirection.Forward, readNext, Array.Empty <StreamMessage>())); } var messages = new List <(StreamMessage message, int?maxAge)>(); while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { if (messages.Count == maxCount) { messages.Add(default);
private async Task <ReadAllPage> FilterExpired( ReadAllPage readAllPage, ReadNextAllPage readNext, CancellationToken cancellationToken) { if (_disableMetadataCache) { return(readAllPage); } var valid = new List <StreamMessage>(); var currentUtc = GetUtcNow(); foreach (var streamMessage in readAllPage.Messages) { if (streamMessage.StreamId.StartsWith("$")) { valid.Add(streamMessage); continue; } int?maxAge = _metadataMaxAgeCache == null ? null : await _metadataMaxAgeCache.GetMaxAge(streamMessage.StreamId, cancellationToken); if (!maxAge.HasValue) { valid.Add(streamMessage); continue; } if (streamMessage.CreatedUtc.AddSeconds(maxAge.Value) > currentUtc) { valid.Add(streamMessage); } else { PurgeExpiredMessage(streamMessage); } } return(new ReadAllPage( readAllPage.FromPosition, readAllPage.NextPosition, readAllPage.IsEnd, readAllPage.Direction, readNext, valid.ToArray())); }
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPosition, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; long position = fromPosition; using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).ConfigureAwait(false); var commandText = prefetch ? _scripts.ReadAllForwardWithData : _scripts.ReadAllForward; using (var command = new SqlCommand(commandText, connection)) { command.CommandTimeout = _commandTimeout; command.Parameters.AddWithValue("position", position); command.Parameters.AddWithValue("count", maxCount + 1); //Read extra row to see if at end or not var reader = await command .ExecuteReaderAsync(CommandBehavior.SequentialAccess, cancellationToken) .ConfigureAwait(false); List <StreamMessage> messages = new List <StreamMessage>(); if (!reader.HasRows) { return(new ReadAllPage( fromPosition, fromPosition, true, ReadDirection.Forward, readNext, messages.ToArray())); } while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { if (messages.Count == maxCount) { messages.Add(default);
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; using (var connection = await OpenConnection(cancellationToken)) using (var transaction = _createTransaction(connection)) using (var command = BuildFunctionCommand( _schema.ReadAll, transaction, Parameters.Count(maxCount + 1), Parameters.Position(fromPositionExclusive), Parameters.ReadDirection(ReadDirection.Forward), Parameters.Prefetch(prefetch))) using (var reader = await command .ExecuteReaderAsync(CommandBehavior.SequentialAccess, cancellationToken) .NotOnCapturedContext()) { if (!reader.HasRows) { return(new ReadAllPage( fromPositionExclusive, fromPositionExclusive, true, ReadDirection.Forward, readNext, Array.Empty <StreamMessage>())); } var messages = new List <(StreamMessage message, int?maxAge)>(); while (await reader.ReadAsync(cancellationToken).NotOnCapturedContext()) { if (messages.Count == maxCount) { messages.Add(default);
public async Task <ReadAllPage> ReadAllBackwards( long fromPositionInclusive, int maxCount, bool prefetchJsonData, CancellationToken cancellationToken = default) { Ensure.That(fromPositionInclusive, nameof(fromPositionInclusive)).IsGte(-1); Ensure.That(maxCount, nameof(maxCount)).IsGte(1); GuardAgainstDisposed(); cancellationToken.ThrowIfCancellationRequested(); Logger.Debug( "ReadAllBackwards from position {fromPositionInclusive} with max count {maxCount}.", fromPositionInclusive, maxCount); ReadNextAllPage readNext = (nextPosition, ct) => ReadAllBackwards(nextPosition, maxCount, prefetchJsonData, ct); var page = await ReadAllBackwardsInternal(fromPositionInclusive, maxCount, prefetchJsonData, readNext, cancellationToken); return(await FilterExpired(page, readNext, cancellationToken)); }
protected override Task <ReadAllPage> ReadAllBackwardsInternal(long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { throw new NotImplementedException(); }
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPositionExlusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; long ordinal = fromPositionExlusive; using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).NotOnCapturedContext(); var commandText = prefetch ? _scripts.ReadAllForwardWithData : _scripts.ReadAllForward; using (var command = new SqlCommand(commandText, connection)) { command.Parameters.AddWithValue("ordinal", ordinal); command.Parameters.AddWithValue("count", maxCount + 1); //Read extra row to see if at end or not var reader = await command .ExecuteReaderAsync(cancellationToken) .NotOnCapturedContext(); List <StreamMessage> messages = new List <StreamMessage>(); if (!reader.HasRows) { return(new ReadAllPage( fromPositionExlusive, fromPositionExlusive, true, ReadDirection.Forward, readNext, messages.ToArray())); } while (await reader.ReadAsync(cancellationToken).NotOnCapturedContext()) { if (messages.Count == maxCount) { messages.Add(default(StreamMessage)); } else { var streamId = reader.GetString(0); var streamVersion = reader.GetInt32(1); ordinal = reader.GetInt64(2); var eventId = reader.GetGuid(3); var created = reader.GetDateTime(4); var type = reader.GetString(5); var jsonMetadata = reader.GetString(6); Func <CancellationToken, Task <string> > getJsonData; if (prefetch) { var jsonData = reader.GetString(7); getJsonData = _ => Task.FromResult(jsonData); } else { var streamIdInfo = new StreamIdInfo(streamId); getJsonData = ct => GetJsonData(streamIdInfo.SqlStreamId.Id, streamVersion, ct); } var message = new StreamMessage(streamId, eventId, streamVersion, ordinal, created, type, jsonMetadata, getJsonData); messages.Add(message); } } bool isEnd = true; if (messages.Count == maxCount + 1) // An extra row was read, we're not at the end { isEnd = false; messages.RemoveAt(maxCount); } var nextPosition = messages[messages.Count - 1].Position + 1; return(new ReadAllPage( fromPositionExlusive, nextPosition, isEnd, ReadDirection.Forward, readNext, messages.ToArray())); } } }
protected abstract Task <ReadAllPage> ReadAllBackwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken);
protected override Task <ReadAllPage> ReadAllBackwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { GuardAgainstDisposed(); using (_lock.UseReadLock()) { if (fromPositionExclusive == Position.End) { fromPositionExclusive = _allStream.Last.Value.Position; } // Find the node to start from (it may not be equal to the exact position) var current = _allStream.First; if (current.Next == null) //Empty store { var result = new ReadAllPage(Position.Start, Position.Start, true, ReadDirection.Backward, StreamMessage.EmptyArray, readNext); return(Task.FromResult(result)); } var previous = current.Previous; while (current.Value.Position < fromPositionExclusive) { if (current.Next == null) // fromPosition is past end of store { var result = new ReadAllPage(fromPositionExclusive, fromPositionExclusive, true, ReadDirection.Backward, StreamMessage.EmptyArray, readNext); return(Task.FromResult(result)); } previous = current; current = current.Next; } var messages = new List <StreamMessage>(); while (maxCount > 0 && current != _allStream.First) { StreamMessage message; if (prefetch) { message = new StreamMessage( current.Value.StreamId, current.Value.MessageId, current.Value.StreamVersion, current.Value.Position, current.Value.Created, current.Value.Type, current.Value.JsonMetadata, current.Value.JsonData); } else { var currentCopy = current; message = new StreamMessage( current.Value.StreamId, current.Value.MessageId, current.Value.StreamVersion, current.Value.Position, current.Value.Created, current.Value.Type, current.Value.JsonMetadata, ct => { return(Task.Run( () => ReadMessageData(currentCopy.Value.StreamId, currentCopy.Value.MessageId), ct)); }); } messages.Add(message); maxCount--; previous = current; current = current.Previous; } bool isEnd; if (previous == null || previous.Value.Position == 0) { isEnd = true; } else { isEnd = false; } var nextCheckPoint = isEnd ? 0 : current.Value.Position; fromPositionExclusive = messages.Any() ? messages[0].Position : 0; var page = new ReadAllPage( fromPositionExclusive, nextCheckPoint, isEnd, ReadDirection.Backward, messages.ToArray(), readNext); return(Task.FromResult(page)); } }
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPosition, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; long position = fromPosition; using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).NotOnCapturedContext(); var commandText = prefetch ? _scripts.ReadAllForwardWithData : _scripts.ReadAllForward; using (var command = new SqlCommand(commandText, connection)) { command.CommandTimeout = _commandTimeout; command.Parameters.AddWithValue("position", position); command.Parameters.AddWithValue("count", maxCount + 1); //Read extra row to see if at end or not var reader = await command .ExecuteReaderAsync(CommandBehavior.SequentialAccess, cancellationToken) .NotOnCapturedContext(); if (!reader.HasRows) { return(new ReadAllPage( fromPosition, fromPosition, true, ReadDirection.Forward, readNext, Array.Empty <StreamMessage>())); } var messages = new List <(StreamMessage, int?)>(); while (await reader.ReadAsync(cancellationToken).NotOnCapturedContext()) { var ordinal = 0; var streamId = reader.GetString(ordinal++); var maxAge = reader.GetNullableInt32(ordinal++); var streamVersion = reader.GetInt32(ordinal++); position = reader.GetInt64(ordinal++); var eventId = reader.GetGuid(ordinal++); var created = reader.GetDateTime(ordinal++); var type = reader.GetString(ordinal++); var jsonMetadata = reader.GetString(ordinal++); Func <CancellationToken, Task <string> > getJsonData; if (prefetch) { var jsonData = await reader.GetTextReader(ordinal).ReadToEndAsync(); getJsonData = _ => Task.FromResult(jsonData); } else { var streamIdInfo = new StreamIdInfo(streamId); getJsonData = ct => GetJsonData(streamIdInfo.SqlStreamId.Id, streamVersion, ct); } var message = new StreamMessage(streamId, eventId, streamVersion, position, created, type, jsonMetadata, getJsonData); messages.Add((message, maxAge)); } bool isEnd = true; if (messages.Count == maxCount + 1) // An extra row was read, we're not at the end { isEnd = false; messages.RemoveAt(maxCount); } var filteredMessages = FilterExpired(messages); var nextPosition = filteredMessages[filteredMessages.Count - 1].Position + 1; return(new ReadAllPage( fromPosition, nextPosition, isEnd, ReadDirection.Forward, readNext, filteredMessages.ToArray())); } } }
protected override async Task <ReadAllPage> ReadAllForwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { GuardAgainstDisposed(); cancellationToken.ThrowIfCancellationRequested(); using (var connection = OpenConnection()) { // find starting node. var allStreamPosition = await connection.AllStream() .HeadPosition(cancellationToken); if (allStreamPosition == Position.None) { return(new ReadAllPage( Position.Start, Position.Start, true, ReadDirection.Forward, readNext)); } if (allStreamPosition < fromPositionExclusive) { return(new ReadAllPage( fromPositionExclusive, fromPositionExclusive, true, ReadDirection.Forward, readNext)); } var remaining = await connection.AllStream() .Remaining(ReadDirection.Forward, fromPositionExclusive); if (remaining == Position.End) { return(new ReadAllPage( fromPositionExclusive, Position.End, true, ReadDirection.Forward, readNext)); } var messages = await connection.AllStream() .Read(ReadDirection.Forward, fromPositionExclusive, maxCount, prefetch, cancellationToken); bool isEnd = remaining - messages.Count <= 0; var nextPosition = messages.Any() ? messages.Last().Position + 1 : Position.End; return(new ReadAllPage( fromPositionExclusive, nextPosition, isEnd, ReadDirection.Forward, readNext, messages.ToArray())); } }
protected override async Task <ReadAllPage> ReadAllBackwardsInternal( long fromPosition, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { GuardAgainstDisposed(); cancellationToken.ThrowIfCancellationRequested(); using (var connection = OpenConnection()) { long?beginningPosition = fromPosition; var allStreamPosition = await connection.AllStream() .HeadPosition(cancellationToken); if (allStreamPosition == Position.None) { return(new ReadAllPage( Position.Start, Position.Start, true, ReadDirection.Backward, readNext)); } if (fromPosition == Position.End) { beginningPosition = allStreamPosition > fromPosition ? allStreamPosition : fromPosition; } if (fromPosition > allStreamPosition && fromPosition > Position.Start) { return(new ReadAllPage( fromPosition, fromPosition, true, ReadDirection.Backward, readNext)); } // For reading $all, in the case where no events have been entered into // the root stream yet, we need to have a min beginning position of Position.Start (0). beginningPosition = beginningPosition < Position.Start ? Position.Start : beginningPosition; var remaining = await connection.AllStream() .Remaining(ReadDirection.Backward, beginningPosition); if (remaining == Position.End) { return(new ReadAllPage( allStreamPosition ?? Position.Start, Position.End, true, ReadDirection.Backward, readNext)); } var messages = await connection.AllStream() .Read(ReadDirection.Backward, beginningPosition, maxCount, prefetch, cancellationToken); bool isEnd = remaining - messages.Count <= 0; var nextPosition = messages.Any() ? Math.Max(messages.Last().Position - 1, Position.Start) : Position.Start; return(new ReadAllPage( beginningPosition.Value, nextPosition, isEnd, ReadDirection.Backward, readNext, messages.ToArray())); } }
protected override async Task <ReadAllPage> ReadAllBackwardsInternal( long fromPositionExclusive, int maxCount, bool prefetch, ReadNextAllPage readNext, CancellationToken cancellationToken) { maxCount = maxCount == int.MaxValue ? maxCount - 1 : maxCount; long position = fromPositionExclusive == Position.End ? long.MaxValue : fromPositionExclusive; using (var connection = _createConnection()) { await connection.OpenAsync(cancellationToken).NotOnCapturedContext(); var commandText = prefetch ? _scripts.ReadAllBackwardWithData : _scripts.ReadAllBackward; using (var command = new SqlCommand(commandText, connection)) { command.Parameters.AddWithValue("position", position); command.Parameters.AddWithValue("count", maxCount + 1); //Read extra row to see if at end or not var reader = await command .ExecuteReaderAsync(cancellationToken) .NotOnCapturedContext(); var messages = new List <(StreamMessage, int?)>(); if (!reader.HasRows) { // When reading backwards and there are no more items, then next position is LongPosition.Start, // regardles of what the fromPosition is. return(new ReadAllPage( Position.Start, Position.Start, true, ReadDirection.Backward, readNext, Array.Empty <StreamMessage>())); } long lastPosition = 0; while (await reader.ReadAsync(cancellationToken).NotOnCapturedContext()) { var ordinal = 0; var streamId = reader.GetString(ordinal++); var maxAge = reader.GetNullableInt32(ordinal++); var streamVersion = reader.GetInt32(ordinal++); position = reader.GetInt64(ordinal++); var messageId = reader.GetGuid(ordinal++); var created = reader.GetDateTime(ordinal++); var type = reader.GetString(ordinal++); var jsonMetadata = reader.GetString(ordinal++); Func <CancellationToken, Task <string> > getJsonData; if (prefetch) { var jsonData = reader.GetString(ordinal); getJsonData = _ => Task.FromResult(jsonData); } else { var streamIdInfo = new StreamIdInfo(streamId); getJsonData = ct => GetJsonData(streamIdInfo.SqlStreamId.Id, streamVersion, ct); } var message = new StreamMessage( streamId, messageId, streamVersion, position, created, type, jsonMetadata, getJsonData); messages.Add((message, maxAge)); lastPosition = position; } bool isEnd = true; var nextPosition = lastPosition; if (messages.Count == maxCount + 1) // An extra row was read, we're not at the end { isEnd = false; messages.RemoveAt(maxCount); } var filteredMessages = FilterExpired(messages); fromPositionExclusive = filteredMessages.Any() ? filteredMessages[0].Position : 0; return(new ReadAllPage( fromPositionExclusive, nextPosition, isEnd, ReadDirection.Backward, readNext, filteredMessages.ToArray())); } } }