public override async Task ExportDeletions(JsonTextWriter jsonWriter, OperationState result, LastEtagsInfo maxEtagsToFetch) { jsonWriter.WritePropertyName("DocsDeletions"); jsonWriter.WriteStartArray(); result.LastDocDeleteEtag = await Operations.ExportDocumentsDeletion(jsonWriter, result.LastDocDeleteEtag, maxEtagsToFetch.LastDocDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("AttachmentsDeletions"); jsonWriter.WriteStartArray(); result.LastAttachmentsDeleteEtag = await Operations.ExportAttachmentsDeletion(jsonWriter, result.LastAttachmentsDeleteEtag, maxEtagsToFetch.LastAttachmentsDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); }
public static ExportOptions Create(OperationState state, ItemType types, bool exportDeletions, int maxNumberOfItemsToExport) { return new ExportOptions { ExportAttachments = types.HasFlag(ItemType.Attachments), ExportDocuments = types.HasFlag(ItemType.Documents), ExportDeletions = exportDeletions, StartEtags = state, MaxNumberOfDocumentsToExport = maxNumberOfItemsToExport - state.NumberOfExportedDocuments, MaxNumberOfAttachmentsToExport = maxNumberOfItemsToExport - state.NumberOfExportedAttachments }; }
public Task<IAsyncEnumerator<RavenJObject>> ExportItems(ItemType types, OperationState state) { var options = ExportOptions.Create(state, types, Options.ExportDeletions, Options.Limit); var client = (AsyncServerClient)Store.AsyncDatabaseCommands; return client.StreamExportAsync(options); }
public void PurgeTombstones(OperationState result) { throw new NotImplementedException("Purge tombstones is not supported for Command Line Smuggler"); }
public override Task ExportDeletions(JsonTextWriter jsonWriter, OperationState result, LastEtagsInfo maxEtagsToFetch) { throw new NotSupportedException("Exporting deletions is not supported for Command Line Smuggler"); }
public async Task DataDumperExportHandlesMaxEtagCorrectly() { using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { for (var i = 0; i < 10; i++) { session.Store(new User { Name = "oren #" + (i + 1) }); } session.SaveChanges(); } using (var textStream = new StringWriter()) using (var writer = new JsonTextWriter(textStream)) { var dumper = new CustomDataDumper(store.SystemDatabase); var startEtag = store.SystemDatabase.Statistics.LastDocEtag.IncrementBy(-5); var endEtag = startEtag.IncrementBy(2); writer.WriteStartArray(); var lastEtag = await dumper.ExportDocuments(writer, startEtag, endEtag); writer.WriteEndArray(); writer.Flush(); // read exported content var exportedDocs = RavenJArray.Parse(textStream.GetStringBuilder().ToString()); Assert.Equal(2, exportedDocs.Count()); Assert.Equal("01000000-0000-0001-0000-000000000007", exportedDocs.First().Value<RavenJObject>("@metadata").Value<string>("@etag")); Assert.Equal("01000000-0000-0001-0000-000000000008", exportedDocs.Last().Value<RavenJObject>("@metadata").Value<string>("@etag")); Assert.Equal("01000000-0000-0001-0000-000000000008", lastEtag.ToString()); } using (var textStream = new StringWriter()) using (var writer = new JsonTextWriter(textStream)) { var dumper = new CustomDataDumper(store.SystemDatabase); var startEtag = store.SystemDatabase.Statistics.LastDocEtag.IncrementBy(-5); writer.WriteStartArray(); var lastEtag = await dumper.ExportDocuments(writer, startEtag, null); writer.WriteEndArray(); writer.Flush(); // read exported content var exportedDocs = RavenJArray.Parse(textStream.GetStringBuilder().ToString()); Assert.Equal(5, exportedDocs.Count()); Assert.Equal("01000000-0000-0001-0000-000000000007", exportedDocs.First().Value<RavenJObject>("@metadata").Value<string>("@etag")); Assert.Equal("01000000-0000-0001-0000-00000000000B", exportedDocs.Last().Value<RavenJObject>("@metadata").Value<string>("@etag")); Assert.Equal("01000000-0000-0001-0000-00000000000B", lastEtag.ToString()); } for (var i = 0; i < 10; i++) { store.DatabaseCommands.PutAttachment("attach/" + (i + 1), null, new MemoryStream(new[] { (byte)i }), new RavenJObject()); } using (var textStream = new StringWriter()) using (var writer = new JsonTextWriter(textStream)) { var dumper = new CustomDataDumper(store.SystemDatabase); var startEtag = store.SystemDatabase.Statistics.LastAttachmentEtag.IncrementBy(-5); var endEtag = startEtag.IncrementBy(2); writer.WriteStartArray(); var lastEtag = await dumper.ExportAttachments(writer, startEtag, endEtag); writer.WriteEndArray(); writer.Flush(); // read exported content var exportedAttachments = RavenJArray.Parse(textStream.GetStringBuilder().ToString()); Assert.Equal(2, exportedAttachments.Count()); Assert.Equal("02000000-0000-0001-0000-000000000006", exportedAttachments.First().Value<string>("Etag")); Assert.Equal("02000000-0000-0001-0000-000000000007", exportedAttachments.Last().Value<string>("Etag")); Assert.Equal("02000000-0000-0001-0000-000000000007", lastEtag.ToString()); } using (var textStream = new StringWriter()) using (var writer = new JsonTextWriter(textStream)) { var dumper = new CustomDataDumper(store.SystemDatabase); var startEtag = store.SystemDatabase.Statistics.LastAttachmentEtag.IncrementBy(-5); writer.WriteStartArray(); var lastEtag = await dumper.ExportAttachments(writer, startEtag, null); writer.WriteEndArray(); writer.Flush(); // read exported content var exportedAttachments = RavenJArray.Parse(textStream.GetStringBuilder().ToString()); Assert.Equal(5, exportedAttachments.Count()); Assert.Equal("02000000-0000-0001-0000-000000000006", exportedAttachments.First().Value<string>("Etag")); Assert.Equal("02000000-0000-0001-0000-00000000000A", exportedAttachments.Last().Value<string>("Etag")); Assert.Equal("02000000-0000-0001-0000-00000000000A", lastEtag.ToString()); } WaitForIndexing(store); store.DatabaseCommands.DeleteByIndex("Raven/DocumentsByEntityName", new IndexQuery() { Query = "Tag:Users" }).WaitForCompletion(); for (var i = 0; i < 10; i++) { store.DatabaseCommands.DeleteAttachment("attach/" + (i + 1), null); } Etag user6DeletionEtag = null, user9DeletionEtag = null, attach5DeletionEtag = null, attach7DeletionEtag = null; WaitForUserToContinueTheTest(store); store.SystemDatabase.TransactionalStorage.Batch(accessor => { user6DeletionEtag = accessor.Lists.Read(Constants.RavenPeriodicExportsDocsTombstones, "users/6").Etag; user9DeletionEtag = accessor.Lists.Read(Constants.RavenPeriodicExportsDocsTombstones, "users/9").Etag; attach5DeletionEtag = accessor.Lists.Read(Constants.RavenPeriodicExportsAttachmentsTombstones, "attach/5").Etag; attach7DeletionEtag = accessor.Lists.Read(Constants.RavenPeriodicExportsAttachmentsTombstones, "attach/7").Etag; }); using (var textStream = new StringWriter()) using (var writer = new JsonTextWriter(textStream)) { var dumper = new CustomDataDumper(store.SystemDatabase); writer.WriteStartObject(); var lastEtags = new LastEtagsInfo(); var exportResult = new OperationState { LastDocDeleteEtag = user6DeletionEtag, LastAttachmentsDeleteEtag = attach5DeletionEtag }; lastEtags.LastDocDeleteEtag = user9DeletionEtag; lastEtags.LastAttachmentsDeleteEtag = attach7DeletionEtag; dumper.ExportDeletions(writer, exportResult, lastEtags).Wait(); writer.WriteEndObject(); writer.Flush(); // read exported content var exportJson = RavenJObject.Parse(textStream.GetStringBuilder().ToString()); var docsKeys = exportJson.Value<RavenJArray>("DocsDeletions").Select(x => x.Value<string>("Key")).ToArray(); var attachmentsKeys = exportJson.Value<RavenJArray>("AttachmentsDeletions") .Select(x => x.Value<string>("Key")) .ToArray(); Assert.Equal(new[] { "users/7", "users/8", "users/9" }, docsKeys); Assert.Equal(new[] { "attach/6", "attach/7" }, attachmentsKeys); } } }
public override Task ExportDeletions(JsonTextWriter jsonWriter, OperationState result, LastEtagsInfo maxEtags) { return base.ExportDeletions(jsonWriter, result, maxEtags); }
public void CanProperlyReadLastEtagUsingPreviousFormat() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); var etagFileLocation = Path.Combine(Path.GetDirectoryName(backupPath), "IncrementalExport.state.json"); using (var streamWriter = new StreamWriter(File.Create(etagFileLocation))) { new RavenJObject { {"LastDocEtag", Etag.Parse("00000000-0000-0000-0000-000000000001").ToString()}, {"LastAttachmentEtag", Etag.Parse("00000000-0000-0000-0000-000000000002").ToString()} }.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } var result = new OperationState { FilePath = backupPath }; SmugglerDatabaseApiBase.ReadLastEtagsFromFile(result); Assert.Equal("00000000-0000-0000-0000-000000000001", result.LastDocsEtag.ToString()); Assert.Equal("00000000-0000-0000-0000-000000000002", result.LastAttachmentsEtag.ToString()); Assert.Equal(Etag.Empty, result.LastDocDeleteEtag); Assert.Equal(Etag.Empty, result.LastAttachmentsDeleteEtag); }
public async Task CanHandleAttachmentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(); int allowDownload = 0; var port = 8070; var forwarder = new ProxyServer(ref port, 8079) { VetoTransfer = (totalRead, buffer) => { var payload = System.Text.Encoding.UTF8.GetString(buffer.Array, buffer.Offset, buffer.Count); return payload.Contains("GET /static/users/678 ") && Thread.VolatileRead(ref allowDownload) == 0; } }; try { string databaseName; using (var store = new DocumentStore { Url = "http://localhost:8079" }) { databaseName = store.DefaultDatabase; store.Initialize(); InsertAttachments(store, 2000); } var dumper = new SmugglerDatabaseApi { Options = { Limit = 1500, Incremental = true } }; var allAttachments = new List<RavenJObject>(); OperationState exportResult = null; try { exportResult = dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:" + port, DefaultDatabase = databaseName, } }).Result; Assert.False(true, "Previous op should throw."); } catch (AggregateException e) { var extractSingleInnerException = e.ExtractSingleInnerException() as SmugglerExportException; if (extractSingleInnerException == null) throw; var inner = extractSingleInnerException; exportResult = new OperationState { FilePath = inner.File }; } Interlocked.Increment(ref allowDownload); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var att1 = chunk1["Attachments"] as RavenJArray; allAttachments.AddRange(att1.Values<RavenJObject>()); } exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:8070", DefaultDatabase = databaseName, } }); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var attr2 = chunk2["Attachments"] as RavenJArray; allAttachments.AddRange(attr2.Values<RavenJObject>()); } Assert.Equal(2000, allAttachments.Count()); } finally { IOExtensions.DeleteDirectory(backupPath); forwarder.Dispose(); server.Dispose(); } }
public async Task CanHandleDocumentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(databaseName: Constants.SystemDatabase); var alreadyReset = false; var port = 8070; var forwarder = new ProxyServer(ref port, 8079) { VetoTransfer = (totalRead, buffer) => { if (alreadyReset == false && totalRead > 25000) { alreadyReset = true; return true; } return false; } }; try { string databaseName; using (var store = new DocumentStore { Url = "http://localhost:8079" }) { databaseName = store.DefaultDatabase; store.Initialize(); InsertUsers(store, 0, 2000); } var dumper = new SmugglerDatabaseApi { Options = { Limit = 1900, Incremental = true } }; var allDocs = new List<RavenJObject>(); OperationState exportResult = null; try { exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:" + port, DefaultDatabase = databaseName, } }); Assert.False(true, "Previous op should throw."); } catch (SmugglerExportException e) { exportResult = new OperationState { FilePath = e.File }; } using (var fileSteam = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileSteam, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var doc1 = chunk1["Docs"] as RavenJArray; allDocs.AddRange(doc1.Values<RavenJObject>()); } exportResult = await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:8070", DefaultDatabase = databaseName, } }); using (var fileStream = new FileStream(exportResult.FilePath, FileMode.Open)) using (var stream = new GZipStream(fileStream, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var doc2 = chunk2["Docs"] as RavenJArray; allDocs.AddRange(doc2.Values<RavenJObject>()); } Assert.Equal(2000, allDocs.Count(d => (d.Value<string>("Name") ?? String.Empty).StartsWith("User"))); } finally { forwarder.Dispose(); server.Dispose(); IOExtensions.DeleteDirectory(backupPath); } }
public Task<IAsyncEnumerator<RavenJObject>> ExportItems(ItemType types, OperationState state) { var exporter = new SmugglerExporter(database, ExportOptions.Create(state, types, Options.ExportDeletions, Options.Limit)); var items = new List<RavenJObject>(); exporter.Export(items.Add, database.WorkContext.CancellationToken); return new CompletedTask<IAsyncEnumerator<RavenJObject>>(new AsyncEnumeratorBridge<RavenJObject>(items.GetEnumerator())); }
public void PurgeTombstones(OperationState result) { database.TransactionalStorage.Batch(accessor => { // since remove all before is inclusive, but we want last etag for function FetchCurrentMaxEtags we modify ranges accessor.Lists.RemoveAllBefore(Constants.RavenPeriodicExportsDocsTombstones, result.LastDocDeleteEtag.IncrementBy(-1)); accessor.Lists.RemoveAllBefore(Constants.RavenPeriodicExportsAttachmentsTombstones, result.LastAttachmentsDeleteEtag.IncrementBy(-1)); }); }
public Task ExportDeletions(JsonTextWriter jsonWriter, OperationState result, Etag lastDocDeleteEtag, Etag lastAttachmentsDeleteEtag) { var maxEtags = new LastEtagsInfo { LastDocDeleteEtag = lastDocDeleteEtag, LastAttachmentsDeleteEtag = lastAttachmentsDeleteEtag }; return base.ExportDeletions(jsonWriter, result, maxEtags); }