private async Task TestReaderWriterAsync(BackupVersion version, Func <IBackupWriter, Task> write, Func <IBackupReader, Task> read) { using (var stream = new MemoryStream()) { using (var writer = new BackupWriter(serializer, stream, true, version)) { await write(writer); } stream.Position = 0; using (var reader = new BackupReader(serializer, stream)) { await read(reader); } } }
private async Task ProcessAsync(BackupStateJob job, CancellationToken ct) { var jobId = job.Id.ToString(); var handlers = CreateHandlers(); var lastTimestamp = job.Started; try { using (var stream = await backupArchiveLocation.OpenStreamAsync(jobId)) { using (var writer = new BackupWriter(serializer, stream, true)) { await eventStore.QueryAsync(async storedEvent => { var @event = eventDataFormatter.Parse(storedEvent.Data); writer.WriteEvent(storedEvent); foreach (var handler in handlers) { await handler.BackupEventAsync(@event, Key, writer); } job.HandledEvents = writer.WrittenEvents; job.HandledAssets = writer.WrittenAttachments; lastTimestamp = await WritePeriodically(lastTimestamp); }, SquidexHeaders.AppId, Key.ToString(), null, ct); foreach (var handler in handlers) { await handler.BackupAsync(Key, writer); } foreach (var handler in handlers) { await handler.CompleteBackupAsync(Key, writer); } } stream.Position = 0; ct.ThrowIfCancellationRequested(); await assetStore.UploadAsync(jobId, 0, null, stream, false, ct); } job.Status = JobStatus.Completed; } catch (Exception ex) { log.LogError(ex, jobId, (ctx, w) => w .WriteProperty("action", "makeBackup") .WriteProperty("status", "failed") .WriteProperty("backupId", ctx)); job.Status = JobStatus.Failed; } finally { await Safe.DeleteAsync(backupArchiveLocation, jobId, log); job.Stopped = clock.GetCurrentInstant(); await state.WriteAsync(); currentTask = null; currentJob = null; } }
public async Task Should_write_and_read_events_to_backup(BackupVersion version) { var stream = new MemoryStream(); var random = new Random(); var randomGuids = new List <Guid>(); for (var i = 0; i < 100; i++) { randomGuids.Add(Guid.NewGuid()); } Guid RandomGuid() { return(randomGuids[random.Next(randomGuids.Count)]); } var sourceEvents = new List <(string Stream, Envelope <IEvent> Event)>(); for (var i = 0; i < 200; i++) { var @event = new MyEvent { GuidNamed = NamedId.Of(RandomGuid(), $"name{i}"), GuidRaw = RandomGuid(), Values = new Dictionary <Guid, string> { [RandomGuid()] = "Key" } }; var envelope = Envelope.Create <IEvent>(@event); envelope.Headers.Add(RandomGuid().ToString(), i); envelope.Headers.Add("Id", RandomGuid().ToString()); envelope.Headers.Add("Index", i); sourceEvents.Add(($"My-{RandomGuid()}", envelope)); } using (var writer = new BackupWriter(serializer, stream, true, version)) { foreach (var(_, envelope) in sourceEvents) { var eventData = formatter.ToEventData(envelope, Guid.NewGuid(), true); var eventStored = new StoredEvent("S", "1", 2, eventData); var index = int.Parse(envelope.Headers["Index"].ToString()); if (index % 17 == 0) { await writer.WriteBlobAsync(index.ToString(), innerStream => { innerStream.WriteByte((byte)index); return(TaskHelper.Done); }); } else if (index % 37 == 0) { await writer.WriteJsonAsync(index.ToString(), $"JSON_{index}"); } writer.WriteEvent(eventStored); } } stream.Position = 0; var targetEvents = new List <(string Stream, Envelope <IEvent> Event)>(); using (var reader = new BackupReader(serializer, stream)) { await reader.ReadEventsAsync(streamNameResolver, formatter, async @event => { var index = int.Parse(@event.Event.Headers["Index"].ToString()); if (index % 17 == 0) { await reader.ReadBlobAsync(index.ToString(), innerStream => { var byteRead = innerStream.ReadByte(); Assert.Equal((byte)index, byteRead); return(TaskHelper.Done); }); } else if (index % 37 == 0) { var json = await reader.ReadJsonAttachmentAsync <string>(index.ToString()); Assert.Equal($"JSON_{index}", json); } targetEvents.Add(@event); }); void CompareGuid(Guid source, Guid target) { Assert.Equal(source, reader.OldGuid(target)); Assert.NotEqual(source, target); } for (var i = 0; i < targetEvents.Count; i++) { var target = targetEvents[i].Event.To <MyEvent>(); var source = sourceEvents[i].Event.To <MyEvent>(); CompareGuid(source.Payload.Values.First().Key, target.Payload.Values.First().Key); CompareGuid(source.Payload.GuidRaw, target.Payload.GuidRaw); CompareGuid(source.Payload.GuidNamed.Id, target.Payload.GuidNamed.Id); CompareGuid(source.Headers.GetGuid("Id"), target.Headers.GetGuid("Id")); Assert.Equal(Guid.Empty, target.Payload.GuidEmpty); } } }
public async Task RunAsync() { if (currentTask != null) { throw new DomainException("Another backup process is already running."); } if (state.Jobs.Count >= MaxBackups) { throw new DomainException($"You cannot have more than {MaxBackups} backups."); } var job = new BackupStateJob { Id = Guid.NewGuid(), Started = clock.GetCurrentInstant(), Status = JobStatus.Started }; currentTask = new CancellationTokenSource(); currentJob = job; var lastTimestamp = job.Started; state.Jobs.Insert(0, job); await WriteAsync(); try { using (var stream = await backupArchiveLocation.OpenStreamAsync(job.Id)) { using (var writer = new BackupWriter(serializer, stream, true)) { await eventStore.QueryAsync(async storedEvent => { var @event = eventDataFormatter.Parse(storedEvent.Data); writer.WriteEvent(storedEvent); foreach (var handler in handlers) { await handler.BackupEventAsync(@event, appId, writer); } job.HandledEvents = writer.WrittenEvents; job.HandledAssets = writer.WrittenAttachments; lastTimestamp = await WritePeriodically(lastTimestamp); }, SquidexHeaders.AppId, appId.ToString(), null, currentTask.Token); foreach (var handler in handlers) { await handler.BackupAsync(appId, writer); } foreach (var handler in handlers) { await handler.CompleteBackupAsync(appId, writer); } } stream.Position = 0; currentTask.Token.ThrowIfCancellationRequested(); await assetStore.UploadAsync(job.Id.ToString(), 0, null, stream, currentTask.Token); } job.Status = JobStatus.Completed; } catch (Exception ex) { log.LogError(ex, w => w .WriteProperty("action", "makeBackup") .WriteProperty("status", "failed") .WriteProperty("backupId", job.Id.ToString())); job.Status = JobStatus.Failed; } finally { await Safe.DeleteAsync(backupArchiveLocation, job.Id, log); job.Stopped = clock.GetCurrentInstant(); await WriteAsync(); currentTask = null; currentJob = null; } }
public virtual Task CompleteBackupAsync(Guid appId, BackupWriter writer) { return(TaskHelper.Done); }
public virtual Task BackupEventAsync(Envelope <IEvent> @event, Guid appId, BackupWriter writer) { return(TaskHelper.Done); }
public Task <IBackupWriter> OpenWriterAsync(Stream stream) { var writer = new BackupWriter(jsonSerializer, stream, true); return(Task.FromResult <IBackupWriter>(writer)); }
public async Task Should_write_and_read_events() { var stream = new MemoryStream(); var sourceEvents = new List <StoredEvent>(); using (var writer = new BackupWriter(stream, true)) { for (var i = 0; i < 1000; i++) { var eventData = new EventData { Type = i.ToString(), Metadata = i, Payload = i }; var eventStored = new StoredEvent("S", "1", 2, eventData); if (i % 17 == 0) { await writer.WriteBlobAsync(eventData.Type, innerStream => { innerStream.WriteByte((byte)i); return(TaskHelper.Done); }); } else if (i % 37 == 0) { await writer.WriteJsonAsync(eventData.Type, $"JSON_{i}"); } writer.WriteEvent(eventStored); sourceEvents.Add(eventStored); } } stream.Position = 0; var readEvents = new List <StoredEvent>(); using (var reader = new BackupReader(stream)) { await reader.ReadEventsAsync(streamNameResolver, async @event => { var i = int.Parse(@event.Data.Type); if (i % 17 == 0) { await reader.ReadBlobAsync(@event.Data.Type, innerStream => { var b = innerStream.ReadByte(); Assert.Equal((byte)i, b); return(TaskHelper.Done); }); } else if (i % 37 == 0) { var j = await reader.ReadJsonAttachmentAsync(@event.Data.Type); Assert.Equal($"JSON_{i}", j.ToString()); } readEvents.Add(@event); }); } var sourceEventsWithNewStreamName = sourceEvents.Select(x => new StoredEvent(streamNameResolver.WithNewId(x.StreamName, null), x.EventPosition, x.EventStreamNumber, x.Data)).ToList(); readEvents.Should().BeEquivalentTo(sourceEventsWithNewStreamName); }