public async Task CanExportFrom41AndImportTo40() { var file = Path.GetTempFileName(); long countOfDocuments; long countOfAttachments; long countOfIndexes; long countOfRevisions; try { using (var store41 = GetDocumentStore()) { store41.Maintenance.Send(new CreateSampleDataOperation()); using (var session = store41.OpenSession()) { var o = session.Load<Order>("orders/1-A"); Assert.NotNull(o); session.CountersFor(o).Increment("downloads", 100); session.SaveChanges(); } await store41.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; countOfAttachments = stats.CountOfAttachments; countOfIndexes = stats.CountOfIndexes; countOfRevisions = stats.CountOfRevisionDocuments; Assert.Equal(1, stats.CountOfCounters); } using (var store40 = await GetDocumentStoreAsync("4.0.6-patch-40047")) { var options = new DatabaseSmugglerImportOptions(); options.OperateOnTypes &= ~DatabaseItemType.Counters; options.SkipRevisionCreation = true; await store40.Smuggler.ImportAsync(options, file); var stats = await store40.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); Assert.Equal(countOfAttachments, stats.CountOfAttachments); Assert.Equal(countOfIndexes, stats.CountOfIndexes); Assert.Equal(countOfRevisions, stats.CountOfRevisionDocuments); Assert.Equal(0, stats.CountOfCounters); } } finally { File.Delete(file); } }
public async Task ImportDump(IDocumentStore store, Stream dumpStream) { var importOptions = new DatabaseSmugglerImportOptions(); var operation = await store.Smuggler.ImportAsync(importOptions, dumpStream); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); }
public async Task CanExportFrom40AndImportTo41() { var file = GetTempFileName(); long countOfDocuments; long countOfAttachments; long countOfIndexes; long countOfRevisions; try { using (var store40 = await GetDocumentStoreAsync("4.0.7")) { store40.Maintenance.Send(new CreateSampleDataOperation()); var options = new DatabaseSmugglerExportOptions(); #pragma warning disable CS0618 // Type or member is obsolete options.OperateOnTypes &= ~DatabaseItemType.Counters; #pragma warning restore CS0618 // Type or member is obsolete options.OperateOnTypes &= ~DatabaseItemType.CounterGroups; options.OperateOnTypes &= ~DatabaseItemType.Attachments; options.OperateOnTypes &= ~DatabaseItemType.Subscriptions; options.OperateOnTypes &= ~DatabaseItemType.CompareExchangeTombstones; var operation = await store40.Smuggler.ExportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store40.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; countOfAttachments = stats.CountOfAttachments; countOfIndexes = stats.CountOfIndexes; countOfRevisions = stats.CountOfRevisionDocuments; } using (var store41 = GetDocumentStore()) { var options = new DatabaseSmugglerImportOptions { SkipRevisionCreation = true }; var operation = await store41.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); Assert.Equal(countOfAttachments, stats.CountOfAttachments); Assert.Equal(countOfIndexes, stats.CountOfIndexes); Assert.Equal(countOfRevisions, stats.CountOfRevisionDocuments); } } finally { File.Delete(file); } }
public async Task Can_restore_legacy_counters_from_incremental_backup() { var assemblyPrefix = "SlowTests.Data.RavenDB_13512.Incremental."; var backupPath = NewDataPath(forceCreateDir: true); var fullBackupPath = Path.Combine(backupPath, "17.ravendb-full-backup"); var incrementalBackupPath1 = Path.Combine(backupPath, "18.ravendb-incremental-backup"); var incrementalBackupPath2 = Path.Combine(backupPath, "18-01.ravendb-incremental-backup"); RavenDB_13468.ExtractFile(fullBackupPath, assemblyPrefix + "17.ravendb-full-backup"); RavenDB_13468.ExtractFile(incrementalBackupPath1, assemblyPrefix + "18.ravendb-incremental-backup"); RavenDB_13468.ExtractFile(incrementalBackupPath2, assemblyPrefix + "18-01.ravendb-incremental-backup"); using (var store = GetDocumentStore()) { var importOptions = new DatabaseSmugglerImportOptions(); #pragma warning disable 618 importOptions.OperateOnTypes |= DatabaseItemType.Counters; #pragma warning restore 618 await store.Smuggler.ImportIncrementalAsync(importOptions, backupPath); using (var session = store.OpenAsyncSession()) { var user1 = await session.LoadAsync <User>("users/1"); Assert.NotNull(user1); var metadata = session.Advanced.GetMetadataFor(user1); Assert.True(metadata.ContainsKey("@counters")); var counters = await session.CountersFor(user1).GetAllAsync(); Assert.Equal(100, counters.Count); for (int i = 0; i < counters.Count; i++) { Assert.True(counters.TryGetValue("counter/" + i, out var val)); Assert.Equal(i * 3, val); } var user2 = await session.LoadAsync <User>("users/2"); Assert.NotNull(user2); counters = await session.CountersFor(user2).GetAllAsync(); Assert.Equal(100, counters.Count); for (int i = 0; i < counters.Count; i++) { Assert.True(counters.TryGetValue("counter/" + i, out var val)); Assert.Equal(i * 2, val); } } } }
public async Task CanExportFrom40AndImportTo41() { var file = Path.GetTempFileName(); long countOfDocuments; long countOfAttachments; long countOfIndexes; long countOfRevisions; try { using (var store40 = await GetDocumentStoreAsync("4.0.6-patch-40047")) { store40.Maintenance.Send(new CreateSampleDataOperation()); var options = new DatabaseSmugglerExportOptions(); options.OperateOnTypes &= ~DatabaseItemType.Counters; var operation = await store40.Smuggler.ExportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store40.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; countOfAttachments = stats.CountOfAttachments; countOfIndexes = stats.CountOfIndexes; countOfRevisions = stats.CountOfRevisionDocuments; } using (var store41 = GetDocumentStore()) { var options = new DatabaseSmugglerImportOptions { SkipRevisionCreation = true }; var operation = await store41.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); Assert.Equal(countOfAttachments, stats.CountOfAttachments); Assert.Equal(countOfIndexes, stats.CountOfIndexes); Assert.Equal(countOfRevisions, stats.CountOfRevisionDocuments); } } finally { File.Delete(file); } }
public async Task CanExportAndImportClient42Server41() { var file = GetTempFileName(); try { using (var store41 = await GetDocumentStoreAsync("4.1.4")) { using (var session = store41.OpenSession()) { for (var i = 0; i < 5; i++) { session.Store(new User { Name = "raven" + i }); } session.SaveChanges(); } var operation = await store41.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions() { OperateOnDatabaseRecordTypes = DatabaseRecordItemType.PeriodicBackups, OperateOnTypes = DatabaseItemType.DatabaseRecord }, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); long countOfDocuments = stats.CountOfDocuments; var options = new DatabaseSmugglerImportOptions() { OperateOnDatabaseRecordTypes = DatabaseRecordItemType.PeriodicBackups, OperateOnTypes = DatabaseItemType.DatabaseRecord }; options.SkipRevisionCreation = true; operation = await store41.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); } } finally { File.Delete(file); } }
public async Task CanExportFrom42AndImportTo5() { var file = GetTempFileName(); using var store5 = await GetDocumentStoreAsync(Server5Version); using var store42 = GetDocumentStore(); store42.Maintenance.Send(new CreateSampleDataOperation()); using (var session = store42.OpenAsyncSession()) { for (var i = 0; i < 5; i++) { var user = new User { Name = "raven" + i }; await session.StoreAsync(user); session.CountersFor(user).Increment("Like"); } await session.SaveChangesAsync(); } //Export var options = new DatabaseSmugglerExportOptions(); var importOperation = await store42.Smuggler.ExportAsync(options, file); await importOperation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var expected = await store42.Maintenance.SendAsync(new GetStatisticsOperation()); //Import var exportOperation = new DatabaseSmugglerImportOptions { SkipRevisionCreation = true }; var operation = await store5.Smuggler.ImportAsync(exportOperation, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var actual = await store5.Maintenance.SendAsync(new GetStatisticsOperation()); //Assert Assert.Equal(expected.CountOfDocuments, actual.CountOfDocuments); Assert.Equal(expected.CountOfAttachments, actual.CountOfAttachments); Assert.Equal(expected.CountOfIndexes, actual.CountOfIndexes); Assert.Equal(expected.CountOfRevisionDocuments, actual.CountOfRevisionDocuments); }
private void ImportDatabase(DocumentStore docStore, string database) { var options = new DatabaseSmugglerImportOptions(); if (DatabaseDumpFilePath != null) { AsyncHelpers.RunSync(() => docStore.Smuggler.ForDatabase(database) .ImportAsync(options, DatabaseDumpFilePath)); } else if (DatabaseDumpFileStream != null) { AsyncHelpers.RunSync(() => docStore.Smuggler.ForDatabase(database) .ImportAsync(options, DatabaseDumpFileStream)); } }
private async Task ImportDatabaseAsync(DocumentStore docStore, string database, TimeSpan?timeout = null) { var options = new DatabaseSmugglerImportOptions(); if (DatabaseDumpFilePath != null) { var operation = await docStore.Smuggler.ForDatabase(database).ImportAsync(options, DatabaseDumpFilePath); await operation.WaitForCompletionAsync(timeout); } else if (DatabaseDumpFileStream != null) { var operation = await docStore.Smuggler.ForDatabase(database).ImportAsync(options, DatabaseDumpFileStream); await operation.WaitForCompletionAsync(timeout); } }
public async Task CanImportLegacyCounters() { var assembly = typeof(SmugglerApiTests).GetTypeInfo().Assembly; using (var fs = assembly.GetManifestResourceStream("SlowTests.Data.legacy-counters.4.1.5.ravendbdump")) using (var store = GetDocumentStore()) { var options = new DatabaseSmugglerImportOptions(); options.OperateOnTypes &= ~DatabaseItemType.CounterGroups; #pragma warning disable 618 options.OperateOnTypes |= DatabaseItemType.Counters; #pragma warning restore 618 var operation = await store.Smuggler.ImportAsync(options, fs); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(1059, stats.CountOfDocuments); Assert.Equal(3, stats.CountOfIndexes); Assert.Equal(4645, stats.CountOfRevisionDocuments); Assert.Equal(17, stats.CountOfAttachments); Assert.Equal(29, stats.CountOfCounterEntries); using (var session = store.OpenSession()) { var q = session.Query <Supplier>().ToList(); Assert.Equal(29, q.Count); foreach (var supplier in q) { var counters = session.CountersFor(supplier).GetAll(); Assert.Equal(1, counters.Count); Assert.Equal(10, counters["likes"]); } } } }
public async Task <ActionResult> Import(IFormFile file) { // Make sure that we have an input file if (file == null) { return(BadRequest("No file to import specified.")); } // Store the uploaded file into a temporary location string filePath = Path.GetTempFileName(); using (var stream = new FileStream(filePath, FileMode.Create)) { // Copy file to temporary location await file.CopyToAsync(stream); } // Perform import var importOptions = new DatabaseSmugglerImportOptions() { OperateOnTypes = DatabaseItemType.Documents, IncludeExpired = false, }; var operation = await _documentStore.Smuggler.ImportAsync(importOptions, filePath); await operation.WaitForCompletionAsync(); // Delete temporary file System.IO.File.Delete(filePath); // Set the expirations as configured // TODO Restore functionality // await _expirationManager.ApplyExpirationPolicyAsync(await _configurationManager.GetOrCreateConfigurationAsync()); // Migrate the imported data asynchronously var taskWeShallNotWaitFor = _migrationManager.StartMigrating(); return(NoContent()); }
public async Task CanMigrateFromRavenDb() { var file = Path.Combine(NewDataPath(forceCreateDir: true), "export.ravendbdump"); var id = "users/1"; using (var store1 = GetDocumentStore()) { await RevisionsHelper.SetupRevisions(Server.ServerStore, store1.Database); using (var session = store1.OpenAsyncSession()) { await session.StoreAsync(new User { Name = "Egor" }, id); { session.CountersFor(id).Increment("Downloads", int.MaxValue); session.CountersFor(id).Increment("ShouldBePositiveValueAfterSmuggler", long.MaxValue); session.CountersFor(id).Increment("LittleCounter", 500); } await session.SaveChangesAsync(); } for (int i = 0; i < 10; i++) { using (var session = store1.OpenAsyncSession()) { var user = await session.LoadAsync <Company>(id); user.Name = "Egor " + i; await session.SaveChangesAsync(); } } using (var session = store1.OpenAsyncSession()) { var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(12, revisionsMetadata.Count); } var operation = await store1.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); } // all data import using (var store2 = GetDocumentStore()) { var operation = await store2.Smuggler.ImportAsync(new DatabaseSmugglerImportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); using (var session = store2.OpenAsyncSession()) { var metadata = session.Advanced.GetMetadataFor(await session.LoadAsync <User>(id)); Assert.Equal("HasRevisions, HasCounters", metadata.GetString("@flags")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(13, revisionsMetadata.Count); // +1 revision added when importing var dic = await session.CountersFor(id).GetAllAsync(); Assert.Equal(3, dic.Count); Assert.Equal(int.MaxValue, dic["Downloads"]); Assert.Equal(long.MaxValue, dic["ShouldBePositiveValueAfterSmuggler"]); Assert.Equal(500, dic["LittleCounter"]); } } // no DatabaseRecord, no RevisionDocuments, no Counters using (var store3 = GetDocumentStore()) { var importOptions = new DatabaseSmugglerImportOptions(); importOptions.OperateOnTypes -= DatabaseItemType.DatabaseRecord; importOptions.OperateOnTypes -= DatabaseItemType.RevisionDocuments; importOptions.OperateOnTypes -= DatabaseItemType.CounterGroups; var operation = await store3.Smuggler.ImportAsync(importOptions, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); using (var session = store3.OpenAsyncSession()) { var metadata = session.Advanced.GetMetadataFor(await session.LoadAsync <User>(id)); Assert.False(metadata.ContainsKey("@flags")); Assert.False(metadata.ContainsKey("@counters")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(0, revisionsMetadata.Count); // +1 revision added when importing var dic = await session.CountersFor(id).GetAllAsync(); Assert.Equal(0, dic.Count); } } // if doc has counters AND revisions => they must be kept. using (var store4 = GetDocumentStore()) { await RevisionsHelper.SetupRevisions(Server.ServerStore, store4.Database); using (var session = store4.OpenAsyncSession()) { await session.StoreAsync(new User { Name = "Egor" }, id); { session.CountersFor(id).Increment("ShouldBeKeptAfterSmugglerImport", 322); } await session.SaveChangesAsync(); } var importOptions = new DatabaseSmugglerImportOptions(); importOptions.OperateOnTypes -= DatabaseItemType.DatabaseRecord; importOptions.OperateOnTypes -= DatabaseItemType.RevisionDocuments; importOptions.OperateOnTypes -= DatabaseItemType.CounterGroups; var operation = await store4.Smuggler.ImportAsync(importOptions, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); using (var session = store4.OpenAsyncSession()) { var user = await session.LoadAsync <User>(id); Assert.Equal("Egor 9", user.Name); // check if document changed. var metadata = session.Advanced.GetMetadataFor(user); Assert.Equal("HasRevisions, HasCounters", metadata.GetString("@flags")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(3, revisionsMetadata.Count); // +1 revision added when importing var dic = await session.CountersFor(id).GetAllAsync(); Assert.Equal(1, dic.Count); Assert.Equal(322, dic["ShouldBeKeptAfterSmugglerImport"]); } } }
public async Task CanExportAndImportCounterTombstones() { var file = GetTempFileName(); try { using (var store1 = GetDocumentStore()) using (var store2 = GetDocumentStore()) { using (var session = store1.OpenAsyncSession()) { await session.StoreAsync(new User { Name = "Name1" }, "users/1"); await session.StoreAsync(new User { Name = "Name2" }, "users/2"); await session.StoreAsync(new User { Name = "Name3" }, "users/3"); await session.SaveChangesAsync(); } using (var session = store1.OpenAsyncSession()) { session.CountersFor("users/1").Increment("likes", 100); session.CountersFor("users/1").Increment("dislikes", 200); session.CountersFor("users/2").Increment("downloads", 500); session.CountersFor("users/2").Increment("votes", 1000); await session.SaveChangesAsync(); } using (var session = store1.OpenAsyncSession()) { session.Delete("users/3"); session.CountersFor("users/1").Delete("dislikes"); session.CountersFor("users/2").Delete("votes"); await session.SaveChangesAsync(); } var db = await GetDocumentDatabaseInstanceFor(store1); using (db.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext ctx)) using (ctx.OpenReadTransaction()) { var tombstones = db.DocumentsStorage.GetTombstonesFrom(ctx, 0, 0, int.MaxValue).ToList(); Assert.Equal(3, tombstones.Count); Assert.Equal(Tombstone.TombstoneType.Document, tombstones[0].Type); Assert.Equal(Tombstone.TombstoneType.Counter, tombstones[1].Type); Assert.Equal(Tombstone.TombstoneType.Counter, tombstones[2].Type); } var exportOptions = new DatabaseSmugglerExportOptions(); var importOptions = new DatabaseSmugglerImportOptions(); exportOptions.OperateOnTypes |= DatabaseItemType.Tombstones; importOptions.OperateOnTypes |= DatabaseItemType.Tombstones; var operation = await store1.Smuggler.ExportAsync(exportOptions, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); operation = await store2.Smuggler.ImportAsync(importOptions, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store2.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(2, stats.CountOfCounters); Assert.Equal(3, stats.CountOfTombstones); db = await GetDocumentDatabaseInstanceFor(store2); using (db.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext ctx)) using (ctx.OpenReadTransaction()) { var tombstones = db.DocumentsStorage.GetTombstonesFrom(ctx, 0, 0, int.MaxValue).ToList(); Assert.Equal(3, tombstones.Count); Assert.Equal(Tombstone.TombstoneType.Document, tombstones[0].Type); Assert.Equal(Tombstone.TombstoneType.Counter, tombstones[1].Type); Assert.Equal(Tombstone.TombstoneType.Counter, tombstones[2].Type); } } } finally { File.Delete(file); } }
public async Task IncludeArtificialDocuments_Backup_ShouldWork() { using (var store = GetDocumentStore()) { new MapReduce_WithOutput().Execute(store); using (var session = store.OpenSession()) { session.Store(new Company { Name = "HR" }); session.SaveChanges(); } Indexes.WaitForIndexing(store); string artificialDocumentId = null; Assert.True(WaitForValue(() => { using (var session = store.OpenSession()) { var result = session.Query <MapReduce_WithOutput.Result>().SingleOrDefault(); artificialDocumentId = result.Id; return(result != null); } }, true)); Assert.NotNull(artificialDocumentId); var toFolderWithArtificial = Path.Combine(NewDataPath(), "BackupFolder"); var config = Backup.CreateBackupConfiguration(toFolderWithArtificial); var backupTaskId = await Backup.UpdateConfigAndRunBackupAsync(Server, config, store); toFolderWithArtificial = Directory.GetDirectories(toFolderWithArtificial).First(); var toDatabaseName = store.Database + "_restored"; using (Backup.RestoreDatabase(store, new RestoreBackupConfiguration { BackupLocation = toFolderWithArtificial, DatabaseName = toDatabaseName, SkipIndexes = true })) { using (var session = store.OpenSession(toDatabaseName)) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } var importOptionsWithoutArtificial = new DatabaseSmugglerImportOptions(); var importOptionsWithArtificial = new DatabaseSmugglerImportOptions { IncludeArtificial = true }; importOptionsWithoutArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; importOptionsWithArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; var toFileWithArtificial = Directory.GetFiles(toFolderWithArtificial).First(); // artificial in file // include artificial is false using (var innerStore = GetDocumentStore()) { var operation = await innerStore.Smuggler.ImportAsync(importOptionsWithoutArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } // artificial in file // include artificial is true using (var innerStore = GetDocumentStore()) { var operation = await innerStore.Smuggler.ImportAsync(importOptionsWithArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.NotNull(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } } }
public async Task CanExportFrom42AndImportToCurrent(ExcludeOn excludeOn) { var file = GetTempFileName(); using var store42 = await GetDocumentStoreAsync(Server42Version); using var storeCurrent = GetDocumentStore(); store42.Maintenance.Send(new CreateSampleDataOperation()); using (var session = store42.OpenAsyncSession()) { for (var i = 0; i < 5; i++) { var user = new User { Name = "raven" + i }; await session.StoreAsync(user); session.CountersFor(user).Increment("Like"); } await session.SaveChangesAsync(); } //Export var exportOptions = new DatabaseSmugglerExportOptions(); exportOptions.OperateOnTypes &= ~DatabaseItemType.TimeSeries; if (excludeOn == ExcludeOn.Export) { exportOptions.OperateOnTypes &= ~(DatabaseItemType.Attachments | DatabaseItemType.RevisionDocuments | DatabaseItemType.CounterGroups); } var exportOperation = await store42.Smuggler.ExportAsync(exportOptions, file); await exportOperation.WaitForCompletionAsync(_operationTimeout); var expected = await store42.Maintenance.SendAsync(new GetStatisticsOperation()); //Import var importOptions = new DatabaseSmugglerImportOptions { SkipRevisionCreation = true }; if (excludeOn == ExcludeOn.Import) { importOptions.OperateOnTypes &= ~(DatabaseItemType.Attachments | DatabaseItemType.RevisionDocuments | DatabaseItemType.CounterGroups); } var importOperation = await storeCurrent.Smuggler.ImportAsync(importOptions, file); await importOperation.WaitForCompletionAsync(_operationTimeout); var actual = await storeCurrent.Maintenance.SendAsync(new GetStatisticsOperation()); //Assert Assert.Equal(expected.CountOfIndexes, actual.CountOfIndexes); Assert.Equal(expected.CountOfDocuments, actual.CountOfDocuments); var export = await GetMetadataCounts(store42); var import = await GetMetadataCounts(storeCurrent); if (excludeOn == ExcludeOn.Non) { Assert.Equal(expected.CountOfAttachments, actual.CountOfAttachments); Assert.Equal(expected.CountOfRevisionDocuments, actual.CountOfRevisionDocuments); Assert.Equal(expected.CountOfCounterEntries, actual.CountOfCounterEntries); Assert.Equal(export, import); } else { Assert.Equal(0, actual.CountOfAttachments); Assert.Equal(0, actual.CountOfRevisionDocuments); Assert.Equal(0, actual.CountOfCounterEntries); Assert.Equal((0, 0, 0), import); } }
public async Task CanImportRavenDbWithoutAttachments() { var folder = NewDataPath(forceCreateDir: true); var file = Path.Combine(folder, "export.ravendbdump"); var id = "users/1"; using (var store1 = GetDocumentStore()) { await RevisionsHelper.SetupRevisions(Server.ServerStore, store1.Database); using (var session = store1.OpenAsyncSession()) { await session.StoreAsync(new User { Name = "Egor" }, id); await session.SaveChangesAsync(); } for (int i = 0; i < 10; i++) { var attachmentFile = Path.Combine(folder, $"attachment.{i}"); // create file using (FileStream fs = File.Create(attachmentFile)) { var info = new UTF8Encoding(true).GetBytes($"Hi I am attachment.{i} file!"); fs.Write(info, 0, info.Length); } // add attachment using (Stream sr = File.OpenRead(attachmentFile)) { store1.Operations.Send( new PutAttachmentOperation(id, $"attachment.{i}", sr)); } } using (var session = store1.OpenAsyncSession()) { for (int i = 0; i < 10; i++) { Assert.True(await session.Advanced.Attachments.ExistsAsync(id, $"attachment.{i}")); } await session.SaveChangesAsync(); } var operation = await store1.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); operation.WaitForCompletion(TimeSpan.FromSeconds(15)); } // all data import using (var store2 = GetDocumentStore()) { var operation = await store2.Smuggler.ImportAsync(new DatabaseSmugglerImportOptions(), file); operation.WaitForCompletion(TimeSpan.FromSeconds(15)); using (var session = store2.OpenAsyncSession()) { var metadata = session.Advanced.GetMetadataFor(await session.LoadAsync <User>(id)); Assert.True(metadata.ContainsKey("@attachments")); Assert.Equal("HasRevisions, HasAttachments", metadata.GetString("@flags")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(12, revisionsMetadata.Count); // +1 revision added when importing for (int i = 0; i < 10; i++) { Assert.True(await session.Advanced.Attachments.ExistsAsync(id, $"attachment.{i}")); } } } // no Attachments using (var store3 = GetDocumentStore()) { var importOptions = new DatabaseSmugglerImportOptions(); importOptions.OperateOnTypes -= DatabaseItemType.Attachments; var operation = await store3.Smuggler.ImportAsync(importOptions, file); operation.WaitForCompletion(TimeSpan.FromSeconds(15)); using (var session = store3.OpenAsyncSession()) { var metadata = session.Advanced.GetMetadataFor(await session.LoadAsync <User>(id)); Assert.False(metadata.ContainsKey("@attachments")); Assert.Equal("HasRevisions", metadata.GetString("@flags")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(12, revisionsMetadata.Count); } } // if doc had attachments => they must be kept. using (var store4 = GetDocumentStore()) { using (var session = store4.OpenAsyncSession()) { await session.StoreAsync(new User { Name = "Egor" }, id); await session.SaveChangesAsync(); var attachmentFile = Path.Combine(folder, "attachment.kept"); // create file using (FileStream fs = File.Create(attachmentFile)) { var info = new UTF8Encoding(true).GetBytes("Hi I am attachment.kept file!"); fs.Write(info, 0, info.Length); } // add attachment using (Stream sr = File.OpenRead(attachmentFile)) { store4.Operations.Send( new PutAttachmentOperation(id, "attachment.kept", sr)); } } var importOptions = new DatabaseSmugglerImportOptions(); importOptions.OperateOnTypes -= DatabaseItemType.Attachments; var operation = await store4.Smuggler.ImportAsync(importOptions, file); operation.WaitForCompletion(TimeSpan.FromSeconds(15)); using (var session = store4.OpenAsyncSession()) { var metadata = session.Advanced.GetMetadataFor(await session.LoadAsync <User>(id)); Assert.True(metadata.ContainsKey("@attachments")); Assert.True(await session.Advanced.Attachments.ExistsAsync(id, "attachment.kept")); Assert.Equal("HasRevisions, HasAttachments", metadata.GetString("@flags")); var revisionsMetadata = await session.Advanced.Revisions.GetMetadataForAsync(id); Assert.Equal(11, revisionsMetadata.Count); } } }
public async Task IncludeArtificialDocuments_Smuggler_ShouldWork() { using (var store = GetDocumentStore()) { new MapReduce_WithOutput().Execute(store); using (var session = store.OpenSession()) { session.Store(new Company { Name = "HR" }); session.SaveChanges(); } WaitForIndexing(store); string artificialDocumentId = null; Assert.True(WaitForValue(() => { using (var session = store.OpenSession()) { var result = session.Query <MapReduce_WithOutput.Result>().SingleOrDefault(); artificialDocumentId = result.Id; return(result != null); } }, true)); Assert.NotNull(artificialDocumentId); var toFileWithoutArtificial = Path.Combine(NewDataPath(), "export_without_artificial.ravendbdump"); var operation = await store.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), toFileWithoutArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); var toFileWithArtificial = Path.Combine(NewDataPath(), "export_with_artificial.ravendbdump"); operation = await store.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions { IncludeArtificial = true }, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); var importOptionsWithoutArtificial = new DatabaseSmugglerImportOptions(); var importOptionsWithArtificial = new DatabaseSmugglerImportOptions { IncludeArtificial = true }; importOptionsWithoutArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; importOptionsWithArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; // no artificial in file // include artificial is false using (var innerStore = GetDocumentStore()) { operation = await innerStore.Smuggler.ImportAsync(importOptionsWithoutArtificial, toFileWithoutArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } // no artificial in file // include artificial is true using (var innerStore = GetDocumentStore()) { operation = await innerStore.Smuggler.ImportAsync(importOptionsWithArtificial, toFileWithoutArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } // artificial in file // include artificial is false using (var innerStore = GetDocumentStore()) { operation = await innerStore.Smuggler.ImportAsync(importOptionsWithoutArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } // artificial in file // include artificial is true using (var innerStore = GetDocumentStore()) { operation = await innerStore.Smuggler.ImportAsync(importOptionsWithArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.NotNull(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } } }
public async Task IncludeArtificialDocuments_Backup_ShouldWork() { using (var store = GetDocumentStore()) { new MapReduce_WithOutput().Execute(store); using (var session = store.OpenSession()) { session.Store(new Company { Name = "HR" }); session.SaveChanges(); } WaitForIndexing(store); string artificialDocumentId = null; Assert.True(WaitForValue(() => { using (var session = store.OpenSession()) { var result = session.Query <MapReduce_WithOutput.Result>().SingleOrDefault(); artificialDocumentId = result.Id; return(result != null); } }, true)); Assert.NotNull(artificialDocumentId); var toFolderWithArtificial = Path.Combine(NewDataPath(), "BackupFolder"); var config = new PeriodicBackupConfiguration { BackupType = BackupType.Backup, LocalSettings = new LocalSettings { FolderPath = toFolderWithArtificial }, IncrementalBackupFrequency = "* * * * *" //every minute }; var backupTaskId = (store.Maintenance.Send(new UpdatePeriodicBackupOperation(config))).TaskId; store.Maintenance.Send(new StartBackupOperation(true, backupTaskId)); var getPeriodicBackupStatusOperation = new GetPeriodicBackupStatusOperation(backupTaskId); Assert.True(WaitForValue(() => { var getPeriodicBackupResult = store.Maintenance.Send(getPeriodicBackupStatusOperation); return(getPeriodicBackupResult.Status?.LastEtag > 0); }, true)); toFolderWithArtificial = Directory.GetDirectories(toFolderWithArtificial).First(); var toDatabaseName = store.Database + "_restored"; using (RestoreDatabase(store, new RestoreBackupConfiguration { BackupLocation = toFolderWithArtificial, DatabaseName = toDatabaseName, SkipIndexes = true })) { using (var session = store.OpenSession(toDatabaseName)) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } var importOptionsWithoutArtificial = new DatabaseSmugglerImportOptions(); var importOptionsWithArtificial = new DatabaseSmugglerImportOptions { IncludeArtificial = true }; importOptionsWithoutArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; importOptionsWithArtificial.OperateOnTypes |= ~DatabaseItemType.Indexes; var toFileWithArtificial = Directory.GetFiles(toFolderWithArtificial).First(); // artificial in file // include artificial is false using (var innerStore = GetDocumentStore()) { var operation = await innerStore.Smuggler.ImportAsync(importOptionsWithoutArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.Null(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } // artificial in file // include artificial is true using (var innerStore = GetDocumentStore()) { var operation = await innerStore.Smuggler.ImportAsync(importOptionsWithArtificial, toFileWithArtificial); await operation.WaitForCompletionAsync(TimeSpan.FromSeconds(30)); using (var session = innerStore.OpenSession()) { Assert.NotNull(session.Load <MapReduce_WithOutput.Result>(artificialDocumentId)); } } } }
public async Task CanExportFrom42AndImportTo41() { var file = GetTempFileName(); try { long countOfDocuments; using (var store42 = GetDocumentStore(new Options { ModifyDatabaseRecord = record => { record.Settings[RavenConfiguration.GetKey(x => x.Patching.MaxNumberOfCachedScripts)] = "1024"; record.ConflictSolverConfig = new ConflictSolver { ResolveToLatest = false, ResolveByCollection = new Dictionary <string, ScriptResolver> { { "ConflictSolver", new ScriptResolver() { Script = "Script" } } } }; record.Sorters = new Dictionary <string, SorterDefinition> { { "MySorter", new SorterDefinition { Name = "MySorter", Code = GetSorter("RavenDB_8355.MySorter.cs") } } }; record.ExternalReplications = new List <ExternalReplication> { new ExternalReplication("tempDatabase", "ExternalReplication") { TaskId = 1, Name = "External", MentorNode = "B", DelayReplicationFor = new TimeSpan(4), Url = "http://127.0.0.1/", Disabled = false } }; record.SinkPullReplications = new List <PullReplicationAsSink> { new PullReplicationAsSink() { Database = "sinkDatabase", CertificatePassword = "******", CertificateWithPrivateKey = "CertificateWithPrivateKey", TaskId = 2, Name = "Sink", MentorNode = "A", HubDefinitionName = "hub" } }; record.HubPullReplications = new List <PullReplicationDefinition> { new PullReplicationDefinition() { TaskId = 3, Name = "hub", MentorNode = "A", DelayReplicationFor = new TimeSpan(3), } }; record.RavenEtls = new List <RavenEtlConfiguration> { new RavenEtlConfiguration() { AllowEtlOnNonEncryptedChannel = true, ConnectionStringName = "ConnectionName", MentorNode = "A", Name = "Etl", TaskId = 4 } }; record.SqlEtls = new List <SqlEtlConfiguration> { new SqlEtlConfiguration() { AllowEtlOnNonEncryptedChannel = true, ConnectionStringName = "connection", ForceQueryRecompile = false, Name = "sql", ParameterizeDeletes = false, TaskId = 5 } }; } })) { using (var session = store42.OpenSession()) { for (var i = 0; i < 5; i++) { session.Store(new User { Name = "raven" + i }); } session.SaveChanges(); } var operation = await store42.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store42.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; } using (var store41 = await GetDocumentStoreAsync("4.1.4")) { var options = new DatabaseSmugglerImportOptions(); options.OperateOnTypes &= ~DatabaseItemType.CounterGroups; options.OperateOnTypes &= ~DatabaseItemType.Subscriptions; options.OperateOnTypes &= ~DatabaseItemType.CompareExchangeTombstones; options.SkipRevisionCreation = true; var operation = await store41.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); var record = await store41.Maintenance.Server.SendAsync(new GetDatabaseRecordOperation(store41.Database)); record.Settings.TryGetValue("Patching.MaxNumberOfCachedScripts", out string value); Assert.Null(value); Assert.Null(record.ConflictSolverConfig); Assert.Equal(0, record.Sorters.Count); Assert.Equal(0, record.ExternalReplications.Count); Assert.Equal(0, record.SinkPullReplications.Count); Assert.Equal(0, record.HubPullReplications.Count); Assert.Equal(0, record.RavenEtls.Count); Assert.Equal(0, record.SqlEtls.Count); Assert.Equal(0, record.PeriodicBackups.Count); } } finally { File.Delete(file); } }
public async Task CanExportAndImportClient41Server42() { var file = GetTempFileName(); try { long countOfDocuments; using (var store42export = await GetDocumentStoreAsync("4.2.0-nightly-20190223-0601", new InterversionTestOptions { ModifyDatabaseName = s => $"{s}_1", ModifyDatabaseRecord = record => { record.Settings[RavenConfiguration.GetKey(x => x.Patching.MaxNumberOfCachedScripts)] = "1024"; record.ConflictSolverConfig = new ConflictSolver { ResolveToLatest = false, ResolveByCollection = new Dictionary <string, ScriptResolver> { { "ConflictSolver", new ScriptResolver() { Script = "Script" } } } }; record.ExternalReplications = new List <ExternalReplication> { new ExternalReplication("tempDatabase", "ExternalReplication") { TaskId = 1, Name = "External", DelayReplicationFor = new TimeSpan(4), Url = "http://127.0.0.1/", Disabled = false } }; record.RavenEtls = new List <RavenEtlConfiguration> { new RavenEtlConfiguration() { AllowEtlOnNonEncryptedChannel = true, ConnectionStringName = "ConnectionName", MentorNode = "A", Name = "Etl", TaskId = 4 } }; record.SqlEtls = new List <SqlEtlConfiguration> { new SqlEtlConfiguration() { AllowEtlOnNonEncryptedChannel = true, ForceQueryRecompile = false, ConnectionStringName = "connection", Name = "sql", ParameterizeDeletes = false, MentorNode = "A" } }; } })) { using (var session = store42export.OpenSession()) { for (var i = 0; i < 5; i++) { session.Store(new User { Name = "raven" + i }); } session.SaveChanges(); } var operation = await store42export.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store42export.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; } using (var store42Import = await GetDocumentStoreAsync("4.2.0-nightly-20190223-0601", new InterversionTestOptions { ModifyDatabaseName = s => $"{s}_2", })) { var options = new DatabaseSmugglerImportOptions(); options.SkipRevisionCreation = true; var operation = await store42Import.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store42Import.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); var record = await store42Import.Maintenance.Server.SendAsync(new GetDatabaseRecordOperation(store42Import.Database)); record.Settings.TryGetValue("Patching.MaxNumberOfCachedScripts", out string value); Assert.Null(value); Assert.NotNull(record.ConflictSolverConfig); Assert.Equal(false, record.ConflictSolverConfig.ResolveToLatest); Assert.Equal(1, record.ConflictSolverConfig.ResolveByCollection.Count); Assert.Equal(true, record.ConflictSolverConfig.ResolveByCollection.TryGetValue("ConflictSolver", out ScriptResolver sr)); Assert.Equal("Script", sr.Script); Assert.Equal(1, record.ExternalReplications.Count); Assert.Equal("tempDatabase", record.ExternalReplications[0].Database); Assert.Equal(true, record.ExternalReplications[0].Disabled); Assert.Equal(1, record.RavenEtls.Count); Assert.Equal("Etl", record.RavenEtls.First().Name); Assert.Equal("ConnectionName", record.RavenEtls.First().ConnectionStringName); Assert.Equal(true, record.RavenEtls.First().AllowEtlOnNonEncryptedChannel); Assert.Equal(true, record.RavenEtls.First().Disabled); Assert.Equal(1, record.SqlEtls.Count); Assert.Equal("sql", record.SqlEtls.First().Name); Assert.Equal(false, record.SqlEtls.First().ParameterizeDeletes); Assert.Equal(false, record.SqlEtls.First().ForceQueryRecompile); Assert.Equal("connection", record.SqlEtls.First().ConnectionStringName); Assert.Equal(true, record.SqlEtls.First().AllowEtlOnNonEncryptedChannel); Assert.Equal(false, record.SqlEtls.First().Disabled); } } finally { File.Delete(file); } }
protected static async Task CanExportWithPulsatingReadTransaction_ActualTest(int numberOfUsers, int numberOfCountersPerUser, int numberOfRevisionsPerDocument, int numberOfOrders, int deleteUserFactor, DocumentStore storeToExport, string file, DocumentStore storeToImport, string fileAfterDeletions, DocumentStore storeToAfterDeletions) { if (numberOfRevisionsPerDocument > 0) { var configuration = new RevisionsConfiguration { Default = new RevisionsCollectionConfiguration { Disabled = false, MinimumRevisionsToKeep = 10 } }; await storeToExport.Maintenance.SendAsync(new ConfigureRevisionsOperation(configuration)); } using (var bulk = storeToExport.BulkInsert()) { for (int i = 0; i < Math.Max(numberOfUsers, numberOfOrders); i++) { if (i < numberOfUsers) { bulk.Store(new User(), "users/" + i); } if (i < numberOfOrders) { bulk.Store(new Order(), "orders/" + i); } } } if (numberOfRevisionsPerDocument > 2) { for (int j = 0; j < numberOfRevisionsPerDocument; j++) { using (var bulk = storeToExport.BulkInsert()) { for (int i = 0; i < Math.Max(numberOfUsers, numberOfOrders); i++) { if (i < numberOfUsers) { bulk.Store(new User() { Name = i + " " + j }, "users/" + i); } if (i < numberOfOrders) { bulk.Store(new Order() { Company = i + " " + j }, "orders/" + i); } } } } } using (var session = storeToExport.OpenSession()) { for (int i = 0; i < numberOfUsers; i++) { for (int j = 0; j < numberOfCountersPerUser; j++) { session.CountersFor("users/" + i).Increment("counter/" + j, 100); } } session.SaveChanges(); } var originalStats = await storeToExport.Maintenance.SendAsync(new GetStatisticsOperation()); var options = new DatabaseSmugglerExportOptions(); var operation = await storeToExport.Smuggler.ExportAsync(options, file); var result = await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(2)); SmugglerResult.SmugglerProgress progress = ((SmugglerResult)result).Progress as SmugglerResult.SmugglerProgress; Assert.Equal(originalStats.CountOfDocuments, progress.Documents.ReadCount); Assert.Equal(originalStats.CountOfCounterEntries, progress.Counters.ReadCount); Assert.Equal(originalStats.CountOfRevisionDocuments, progress.RevisionDocuments.ReadCount); operation = await storeToImport.Smuggler.ImportAsync(new DatabaseSmugglerImportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(2)); var stats = await storeToImport.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(numberOfUsers + numberOfOrders, stats.CountOfDocuments); Assert.Equal(numberOfUsers, stats.CountOfCounterEntries); var expectedNumberOfRevisions = (numberOfUsers + numberOfOrders) * numberOfRevisionsPerDocument; if (numberOfCountersPerUser > 0) { // if we added counters then additional revisions were created expectedNumberOfRevisions += numberOfUsers; } Assert.Equal(expectedNumberOfRevisions, stats.CountOfRevisionDocuments); // deleting some docs var deletedUsers = 0; using (var session = storeToExport.OpenSession()) { for (int i = 0; i < numberOfUsers; i++) { if (i % deleteUserFactor != 0) { continue; } session.Delete("users/" + i); deletedUsers++; } session.SaveChanges(); } // import to new db var originalStatsAfterDeletions = await storeToExport.Maintenance.SendAsync(new GetStatisticsOperation()); options.OperateOnTypes |= DatabaseItemType.Tombstones; operation = await storeToExport.Smuggler.ExportAsync(options, fileAfterDeletions); result = await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(2)); progress = ((SmugglerResult)result).Progress as SmugglerResult.SmugglerProgress; Assert.Equal(originalStatsAfterDeletions.CountOfDocuments, progress.Documents.ReadCount); Assert.Equal(originalStatsAfterDeletions.CountOfCounterEntries, progress.Counters.ReadCount); Assert.Equal(originalStatsAfterDeletions.CountOfRevisionDocuments, progress.RevisionDocuments.ReadCount); Assert.Equal(originalStatsAfterDeletions.CountOfTombstones, progress.Tombstones.ReadCount); var importOptions = new DatabaseSmugglerImportOptions(); importOptions.OperateOnTypes |= DatabaseItemType.Tombstones; operation = await storeToAfterDeletions.Smuggler.ImportAsync(importOptions, fileAfterDeletions); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(2)); var statsAfterDeletions = await storeToAfterDeletions.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(numberOfUsers - deletedUsers + numberOfOrders, statsAfterDeletions.CountOfDocuments); Assert.Equal(numberOfUsers - deletedUsers, statsAfterDeletions.CountOfCounterEntries); Assert.Equal(expectedNumberOfRevisions, statsAfterDeletions.CountOfRevisionDocuments); Assert.Equal(deletedUsers, statsAfterDeletions.CountOfTombstones); }
public async Task CanExportFrom41AndImportTo40() { var file = GetTempFileName(); long countOfDocuments; long countOfAttachments; long countOfIndexes; long countOfRevisions; try { using (var store41 = GetDocumentStore()) { store41.Maintenance.Send(new CreateSampleDataOperation()); using (var session = store41.OpenSession()) { var o = session.Load <Order>("orders/1-A"); Assert.NotNull(o); session.CountersFor(o).Increment("downloads", 100); session.SaveChanges(); } var operation = await store41.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store41.Maintenance.SendAsync(new GetStatisticsOperation()); countOfDocuments = stats.CountOfDocuments; countOfAttachments = stats.CountOfAttachments; countOfIndexes = stats.CountOfIndexes; countOfRevisions = stats.CountOfRevisionDocuments; Assert.Equal(1, stats.CountOfCounterEntries); } using (var store40 = await GetDocumentStoreAsync("4.0.7")) { var options = new DatabaseSmugglerImportOptions(); #pragma warning disable CS0618 // Type or member is obsolete options.OperateOnTypes &= ~DatabaseItemType.Counters; #pragma warning restore CS0618 // Type or member is obsolete options.OperateOnTypes &= ~DatabaseItemType.CounterGroups; options.OperateOnTypes &= ~DatabaseItemType.Attachments; options.OperateOnTypes &= ~DatabaseItemType.Subscriptions; options.OperateOnTypes &= ~DatabaseItemType.CompareExchangeTombstones; options.SkipRevisionCreation = true; var operation = await store40.Smuggler.ImportAsync(options, file); await operation.WaitForCompletionAsync(TimeSpan.FromMinutes(1)); var stats = await store40.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(countOfDocuments, stats.CountOfDocuments); Assert.Equal(countOfAttachments, stats.CountOfAttachments); Assert.Equal(countOfIndexes, stats.CountOfIndexes); Assert.Equal(countOfRevisions, stats.CountOfRevisionDocuments); Assert.Equal(0, stats.CountOfCounterEntries); } } finally { File.Delete(file); } }