public async Task Run(ReplayOptions opts) { _logger.Information("Starting replay (type: {Type}, after: {After})", opts.Type, opts.After); using var hasher = new SibrHasher(); var updates = _updateStore.ExportAllUpdatesRaw(UpdateType.Stream, new UpdateStore.EntityVersionQuery { After = opts.After }); var sw = new Stopwatch(); await using var conn = await _db.Obtain(); await foreach (var chunk in updates.Buffer(200)) { var extracted = chunk.SelectMany(streamUpdate => { var obj = JObject.Parse(streamUpdate.Data.GetRawText()); return(TgbUtils.ExtractUpdatesFromStreamRoot(streamUpdate.SourceId, streamUpdate.Timestamp, obj, hasher, opts.Type).EntityUpdates); }).ToList(); sw.Restart(); await using var tx = await conn.BeginTransactionAsync(); var saved = await _updateStore.SaveUpdates(conn, extracted, false); await tx.CommitAsync(); sw.Stop(); var timestamp = chunk.Min(u => u.Timestamp); _logger.Information("@ {Timestamp}: Saved {NewUpdateCount}/{UpdateCount} updates from {StreamObjects} stream objects (took {Duration})", timestamp, saved, extracted.Count, chunk.Count, sw.Elapsed); } }
public async Task Run(ReplayOptions opts) { _logger.Information("Starting replay (type: {Type}, start: {Start}, end: {End})", opts.Type != null ? string.Join(",", opts.Type) : null, opts.Start, opts.End); using var hasher = new SibrHasher(); var sw = new Stopwatch(); await using var conn = await _db.Obtain(); var page = opts.Start != null ? new PageToken(opts.Start.Value, default) : null; while (true) { var chunk = await _updateStore.ExportAllUpdatesChunked(conn, UpdateType.Stream, new UpdateStore.EntityVersionQuery { Page = page, Before = opts.End, Order = SortOrder.Asc, Count = 500 }); if (chunk.Count == 0) { break; } page = chunk.Last().NextPage; // if (opts.Type == UpdateType.Game) if (false) // todo: uhhh how do we do this { var extractedGameUpdates = chunk.SelectMany(streamUpdate => { var obj = JObject.Parse(streamUpdate.Data.GetRawText()); return(TgbUtils.ExtractUpdatesFromStreamRoot(streamUpdate.SourceId, streamUpdate.Timestamp, obj, hasher, opts.Type).GameUpdates); }).ToList(); sw.Restart(); await using var tx = await conn.BeginTransactionAsync(); var savedGameUpdates = await _gameUpdateStore.SaveGameUpdates(conn, extractedGameUpdates, false); await tx.CommitAsync(); sw.Stop(); var timestamp = chunk.Min(u => u.Timestamp); _logger.Information("@ {Timestamp}: Saved {GameUpdateCount}/{UpdateCount} game updates from {StreamObjects} stream objects (took {Duration})", timestamp, savedGameUpdates, extractedGameUpdates.Count, chunk.Count, sw.Elapsed); } else { var extractedUpdates = chunk.SelectMany(streamUpdate => { var obj = JObject.Parse(streamUpdate.Data.GetRawText()); return(TgbUtils.ExtractUpdatesFromStreamRoot(streamUpdate.SourceId, streamUpdate.Timestamp, obj, hasher, opts.Type).EntityUpdates); }).ToList(); sw.Restart(); await using var tx = await conn.BeginTransactionAsync(); var savedUpdates = await _updateStore.SaveUpdates(conn, extractedUpdates, false, append : false); await tx.CommitAsync(); sw.Stop(); var timestamp = chunk.Min(u => u.Timestamp); _logger.Information("@ {Timestamp}: Saved {NewUpdateCount}/{UpdateCount} updates from {StreamObjects} stream objects (took {Duration})", timestamp, savedUpdates, extractedUpdates.Count, chunk.Count, sw.Elapsed); } } }
protected override async Task <JobExecutionStatus> RunJobAsync(IClusterOperations cluster, CancellationToken cancellationToken = default) { Dictionary <int, string> processedAggregates = new Dictionary <int, string>(); string eventTypeId = Data.SourceEventTypeId; bool hasMoreRecords = true; while (hasMoreRecords && Data.IsCompleted == false) { string paginationToken = Data.EventTypePaging?.PaginationToken; LoadIndexRecordsResult indexRecordsResult = await eventToAggregateIndex.EnumerateRecordsAsync(eventTypeId, paginationToken).ConfigureAwait(false); IEnumerable <IndexRecord> indexRecords = indexRecordsResult.Records; Type publicEventType = typeof(IPublicEvent); ReplayOptions opt = new ReplayOptions() { AggregateIds = indexRecordsResult.Records.Select(indexRecord => AggregateUrn.Parse(Convert.ToBase64String(indexRecord.AggregateRootId), Urn.Base64)), ShouldSelect = commit => { bool result = (from publicEvent in commit.PublicEvents let eventType = publicEvent.GetType() where publicEventType.IsAssignableFrom(eventType) where eventType.GetContractId().Equals(eventTypeId) select publicEvent) .Any(); return(result); }, After = Data.After, Before = Data.Before }; LoadAggregateCommitsResult foundAggregateCommits = await eventStorePlayer.LoadAggregateCommitsAsync(opt).ConfigureAwait(false); foreach (AggregateCommit arCommit in foundAggregateCommits.Commits) { if (cancellationToken.IsCancellationRequested) { logger.Info(() => $"Job has been cancelled."); return(JobExecutionStatus.Running); } foreach (IPublicEvent publicEvent in arCommit.PublicEvents) { if (publicEvent.GetType().GetContractId().Equals(eventTypeId)) { var headers = new Dictionary <string, string>() { { MessageHeader.RecipientBoundedContext, Data.RecipientBoundedContext }, { MessageHeader.RecipientHandlers, Data.RecipientHandlers } }; publicEventPublisher.Publish(publicEvent, headers); } } } var progress = new ReplayPublicEvents_JobData.EventTypePagingProgress(eventTypeId, indexRecordsResult.PaginationToken, 0, 0); Data.MarkEventTypeProgress(progress); Data.Timestamp = DateTimeOffset.UtcNow; Data = await cluster.PingAsync(Data, cancellationToken).ConfigureAwait(false); hasMoreRecords = indexRecordsResult.Records.Any(); } Data.IsCompleted = true; Data.Timestamp = DateTimeOffset.UtcNow; Data = await cluster.PingAsync(Data).ConfigureAwait(false); logger.Info(() => $"The job has been completed."); return(JobExecutionStatus.Completed); }
public Task <LoadAggregateCommitsResult> LoadAggregateCommitsAsync(ReplayOptions replayOptions) { throw new NotImplementedException(); }