public async Task CanExportAndImportAttachmentsAndRevisionAttachments() { var file = GetTempFileName(); try { using (var store1 = GetDocumentStore(new Options { ModifyDatabaseName = s => $"{s}_store1" })) { await SetDatabaseId(store1, new Guid("00000000-48c4-421e-9466-000000000000")); await RevisionsHelper.SetupRevisions(Server.ServerStore, store1.Database, configuration => { configuration.Collections["Users"].PurgeOnDelete = false; configuration.Collections["Users"].MinimumRevisionsToKeep = 4; }); AttachmentsRevisions.CreateDocumentWithAttachments(store1); using (var bigStream = new MemoryStream(Enumerable.Range(1, 999 * 1024).Select(x => (byte)x).ToArray())) store1.Operations.Send(new PutAttachmentOperation("users/1", "big-file", bigStream, "image/png")); var exportOperation = await store1.Smuggler.ExportAsync(new DatabaseSmugglerExportOptions(), file); var exportResult = (SmugglerResult)exportOperation.WaitForCompletion(); Assert.Equal(1, exportResult.Documents.ReadCount); Assert.Equal(4, exportResult.RevisionDocuments.ReadCount); Assert.Equal(4, exportResult.Documents.Attachments.ReadCount); Assert.Equal(10, exportResult.RevisionDocuments.Attachments.ReadCount); var stats = await store1.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(1, stats.CountOfDocuments); Assert.Equal(4, stats.CountOfRevisionDocuments); Assert.Equal(14, stats.CountOfAttachments); Assert.Equal(4, stats.CountOfUniqueAttachments); } using (var store2 = GetDocumentStore(new Options { ModifyDatabaseName = s => $"{s}_store2" })) { var dbId = new Guid("00000000-48c4-421e-9466-000000000000"); await SetDatabaseId(store2, dbId); await RevisionsHelper.SetupRevisions(Server.ServerStore, store2.Database); for (var i = 0; i < 2; i++) // Make sure that we can import attachments twice and it will overwrite { var importOperation = await store2.Smuggler.ImportAsync(new DatabaseSmugglerImportOptions(), file); var importResult = (SmugglerResult)importOperation.WaitForCompletion(); Assert.Equal(1, importResult.Documents.ReadCount); Assert.Equal(4, importResult.RevisionDocuments.ReadCount); Assert.Equal(4, importResult.Documents.Attachments.ReadCount); Assert.Equal(4, importResult.RevisionDocuments.Attachments.ReadCount); var stats = await store2.Maintenance.SendAsync(new GetStatisticsOperation()); Assert.Equal(1, stats.CountOfDocuments); Assert.Equal(5, stats.CountOfRevisionDocuments); Assert.Equal(14 + 4, stats.CountOfAttachments); // the imported document will create 1 additional revision with 4 attachments Assert.Equal(4, stats.CountOfUniqueAttachments); using (var session = store2.OpenSession()) { var readBuffer = new byte[1024 * 1024]; using (var attachmentStream = new MemoryStream(readBuffer)) using (var attachment = session.Advanced.Attachments.Get("users/1", "big-file")) { attachment.Stream.CopyTo(attachmentStream); Assert.Equal("big-file", attachment.Details.Name); Assert.Equal("zKHiLyLNRBZti9DYbzuqZ/EDWAFMgOXB+SwKvjPAINk=", attachment.Details.Hash); Assert.Equal(999 * 1024, attachmentStream.Position); Assert.Equal(Enumerable.Range(1, 999 * 1024).Select(x => (byte)x), readBuffer.Take((int)attachmentStream.Position)); } } } } } finally { File.Delete(file); } }
public async Task CanExportAndImportAttachmentsAndRevisionAttachments() { var file = Path.GetTempFileName(); try { using (var store1 = GetDocumentStore(new Options { ModifyDatabaseName = s => $"{s}_store1" })) { await SetDatabaseId(store1, new Guid("00000000-48c4-421e-9466-000000000000")); await RevisionsHelper.SetupRevisions(Server.ServerStore, store1.Database, false, 4); AttachmentsRevisions.CreateDocumentWithAttachments(store1); using (var bigStream = new MemoryStream(Enumerable.Range(1, 999 * 1024).Select(x => (byte)x).ToArray())) store1.Operations.Send(new PutAttachmentOperation("users/1", "big-file", bigStream, "image/png")); /*var result = */ await store1.Smuggler.ExportAsync(new DatabaseSmugglerOptions(), file); // TODO: RavenDB-6936 store.Smuggler.Export and Import method should return the SmugglerResult var stats = await store1.Admin.SendAsync(new GetStatisticsOperation()); Assert.Equal(1, stats.CountOfDocuments); Assert.Equal(4, stats.CountOfRevisionDocuments); Assert.Equal(14, stats.CountOfAttachments); Assert.Equal(4, stats.CountOfUniqueAttachments); } using (var store2 = GetDocumentStore(new Options { ModifyDatabaseName = s => $"{s}_store2" })) { var dbId = new Guid("00000000-48c4-421e-9466-000000000000"); await SetDatabaseId(store2, dbId); await RevisionsHelper.SetupRevisions(Server.ServerStore, store2.Database); for (var i = 0; i < 2; i++) // Make sure that we can import attachments twice and it will overwrite { await store2.Smuggler.ImportAsync(new DatabaseSmugglerOptions(), file); var stats = await store2.Admin.SendAsync(new GetStatisticsOperation()); Assert.Equal(1, stats.CountOfDocuments); Assert.Equal(5, stats.CountOfRevisionDocuments); Assert.Equal(14, stats.CountOfAttachments); Assert.Equal(4, stats.CountOfUniqueAttachments); using (var session = store2.OpenSession()) { var readBuffer = new byte[1024 * 1024]; using (var attachmentStream = new MemoryStream(readBuffer)) using (var attachment = session.Advanced.GetAttachment("users/1", "big-file")) { attachment.Stream.CopyTo(attachmentStream); Assert.Contains("A:" + (2 + 20 * i), attachment.Details.ChangeVector); Assert.Equal("big-file", attachment.Details.Name); Assert.Equal("zKHiLyLNRBZti9DYbzuqZ/EDWAFMgOXB+SwKvjPAINk=", attachment.Details.Hash); Assert.Equal(999 * 1024, attachmentStream.Position); Assert.Equal(Enumerable.Range(1, 999 * 1024).Select(x => (byte)x), readBuffer.Take((int)attachmentStream.Position)); } } } } } finally { File.Delete(file); } }