public void CanBackupToDirectory() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDump_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData( new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public void MaxNumberOfItemsToProcessInSingleBatchShouldBeRespectedByDataDumper() { var path = Path.Combine(NewDataPath(forceCreateDir: true), "raven.dump"); using (var server = GetNewServer(configureConfig: configuration => configuration.MaxNumberOfItemsToProcessInSingleBatch = 1234)) { var dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.Options.BatchSize); dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = path }).ResultUnwrap(); Assert.Equal(1234, dumper.Options.BatchSize); dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.Options.BatchSize); dumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = path }).Wait(); Assert.Equal(1234, dumper.Options.BatchSize); dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 1000 }); Assert.Equal(1000, dumper.Options.BatchSize); dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = path }).ResultUnwrap(); Assert.Equal(1000, dumper.Options.BatchSize); } }
public async Task SmugglerTransformShouldRecognizeNumericPropertiesEvenThoughTheyHaveTheSameNames() { using (var stream = new MemoryStream()) { var testObject = new RavenJObject { {"Range", new RavenJArray {new RavenJObject {{"Min", 2.4}}}}, {"Min", 1} }; using (var store = NewDocumentStore()) { store.DatabaseCommands.Put("docs/1", null, testObject, new RavenJObject()); var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { TransformScript = EmptyTransform }); await smuggler.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { From = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase }, ToStream = stream }); } stream.Position = 0; using (var store = NewDocumentStore()) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { TransformScript = EmptyTransform }); await smuggler.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = stream, To = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }); var doc = store.DatabaseCommands.Get("docs/1").DataAsJson; Assert.NotNull(doc); Assert.Equal(testObject["Min"].Type, doc["Min"].Type); Assert.Equal(((RavenJObject)((RavenJArray)testObject["Range"])[0])["Min"].Type, ((RavenJObject)((RavenJArray)doc["Range"])[0])["Min"].Type); Assert.True(RavenJToken.DeepEquals(testObject, doc)); } } }
public void CanFullBackupToDirectory() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1,2,3,4,5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 500 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = false } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where(file => ".ravendb-full-dump".Equals(Path.GetExtension(file), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).First() }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.NotNull(store.DatabaseCommands.GetAttachment("attach/1")); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task CanGetCorrectResult() { using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.MailingList.Sandbox.ravendump")) { await dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = stream }); } using(var s = store.OpenSession()) { var objects = s.Query<object>("LogEntry/CountByDate") .Customize(x => x.WaitForNonStaleResults()) .ToList(); Assert.Equal(4, objects.Count); } } }
public async Task CanDumpWhenHiddenDocsWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var server = GetNewServer()) { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertHidenUsers(store, 2000); var user1 = store.DatabaseCommands.Get("users/1"); Assert.Null(user1); InsertUsers(store, 1, 25); // now perform full backup var dumper = new DatabaseDataDumper(server.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); } } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(25, session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task<HttpResponseMessage> CreateSampleData() { var results = Database.Queries.Query(Constants.DocumentsByEntityNameIndex, new IndexQuery(), CancellationToken.None); if (results.Results.Count > 0) { return GetMessageWithString("You cannot create sample data in a database that already contains documents", HttpStatusCode.BadRequest); } using (var sampleData = typeof(StudioTasksController).Assembly.GetManifestResourceStream("Raven.Database.Server.Assets.EmbeddedData.Northwind.dump")) { var dataDumper = new DatabaseDataDumper(Database) { Options = { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false } }; await dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = sampleData }); } return GetEmptyMessage(); }
public async Task<HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, bool stripReplicationInformation, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } string tempPath = Path.GetTempPath(); var fullTempPath = tempPath + Constants.TempUploadsDirectoryName; if (File.Exists(fullTempPath)) File.Delete(fullTempPath); if (Directory.Exists(fullTempPath) == false) Directory.CreateDirectory(fullTempPath); var streamProvider = new MultipartFileStreamProvider(fullTempPath); await Request.Content.ReadAsMultipartAsync(streamProvider).ConfigureAwait(false); var uploadedFilePath = streamProvider.FileData[0].LocalFileName; string fileName = null; var fileContent = streamProvider.Contents.SingleOrDefault(); if (fileContent != null) { fileName = fileContent.Headers.ContentDisposition.FileName.Replace("\"", string.Empty); } var status = new ImportOperationStatus(); var cts = new CancellationTokenSource(); var task = Task.Run(async () => { try { using (var fileStream = File.Open(uploadedFilePath, FileMode.Open, FileAccess.Read)) { var dataDumper = new DatabaseDataDumper(Database); dataDumper.Progress += s => status.LastProgress = s; var smugglerOptions = dataDumper.Options; smugglerOptions.BatchSize = batchSize; smugglerOptions.ShouldExcludeExpired = !includeExpiredDocuments; smugglerOptions.StripReplicationInformation = stripReplicationInformation; smugglerOptions.OperateOnTypes = operateOnTypes; smugglerOptions.TransformScript = transformScript; smugglerOptions.CancelToken = cts; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { smugglerOptions.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List<string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = fileStream }); } } catch (Exception e) { status.Faulted = true; status.State = RavenJObject.FromObject(new { Error = e.ToString() }); if (cts.Token.IsCancellationRequested) { status.State = RavenJObject.FromObject(new { Error = "Task was cancelled" }); cts.Token.ThrowIfCancellationRequested(); //needed for displaying the task status as canceled and not faulted } if (e is InvalidDataException) { status.ExceptionDetails = e.Message; } else if (e is Imports.Newtonsoft.Json.JsonReaderException) { status.ExceptionDetails = "Failed to load JSON Data. Please make sure you are importing .ravendump file, exported by smuggler (aka database export). If you are importing a .ravnedump file then the file may be corrupted"; } else { status.ExceptionDetails = e.ToString(); } throw; } finally { status.Completed = true; File.Delete(uploadedFilePath); } }, cts.Token); long id; Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription { StartTime = SystemTime.UtcNow, TaskType = TaskActions.PendingTaskType.ImportDatabase, Payload = fileName, }, out id, cts); return GetMessageWithObject(new { OperationId = id }); }
public void SmugglerCanStripReplicationInformationDuringImport_Embedded() { using (var stream = new MemoryStream()) { using (var store = NewDocumentStore(activeBundles: "Replication")) { var commands = store.DatabaseCommands; commands.Put("keys/1", null, new RavenJObject(), new RavenJObject()); var doc = commands.Get("keys/1"); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); commands.PutAttachment("keys/1", null, new MemoryStream(), new RavenJObject()); var attachment = commands.GetAttachment("keys/1"); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToStream = stream, From = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); } stream.Position = 0; using (var store = NewDocumentStore()) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = stream, To = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase, } }).Wait(TimeSpan.FromSeconds(15)); var commands = store.DatabaseCommands; var doc = commands.Get("keys/1"); Assert.False(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.False(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var attachment = commands.GetAttachment("keys/1"); Assert.False(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.False(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); } stream.Position = 0; using (var store = NewDocumentStore(activeBundles: "Replication")) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromStream = stream, To = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase, } }).Wait(TimeSpan.FromSeconds(15)); var commands = store.DatabaseCommands; var doc = commands.Get("keys/1"); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var attachment = commands.GetAttachment("keys/1"); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); } } }
public void CanBackupDocumentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); WaitForPeriodicExport(store.SystemDatabase, backupStatus, x => x.LastDocsDeletionEtag); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load<User>(1)); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => { var jsonDocument = store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey); if (jsonDocument == null) return false; var periodicBackupStatus = jsonDocument.DataAsJson.JsonDeserialization<PeriodicExportStatus>(); return periodicBackupStatus.LastDocsEtag != Etag.Empty && periodicBackupStatus.LastDocsEtag != null; }); var etagForBackups= store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = {Incremental = true}}; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.Equal("ayende", session.Load<User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task CanDisableVersioningDuringImport_Embedded() { using (var stream = new MemoryStream()) { long countOfDocuments; using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { for (int i = 0; i < 10; i++) { session.Store(new User()); session.Store(new Address()); } session.SaveChanges(); } countOfDocuments = store.DatabaseCommands.GetStatistics().CountOfDocuments; var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions()); await smuggler.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToStream = stream, From = new RavenConnectionStringOptions() { DefaultDatabase = store.DefaultDatabase } }); } stream.Position = 0; using (var store = NewDocumentStore(activeBundles: "Versioning")) { using (var session = store.OpenSession()) { session.Store(new Bundles.Versioning.Data.VersioningConfiguration { Exclude = false, Id = "Raven/Versioning/DefaultConfiguration", MaxRevisions = 5 }); session.SaveChanges(); } var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { ShouldDisableVersioningBundle = true }); await smuggler.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions>() { FromStream = stream, To = new RavenConnectionStringOptions() { DefaultDatabase = store.DefaultDatabase } }); var countOfDocsAfterImport = store.DatabaseCommands.GetStatistics().CountOfDocuments; Assert.Equal(countOfDocuments, countOfDocsAfterImport - 1); // one additional doc for versioning bundle configuration var metadata = store.DatabaseCommands.Get("users/1").Metadata; Assert.True(metadata.ContainsKey(Constants.RavenIgnoreVersioning) == false, "Metadata contains temporary " + Constants.RavenIgnoreVersioning + " marker"); // after import versioning should be active using (var session = store.OpenSession()) { session.Store(new User(), "users/arek"); session.SaveChanges(); var revisionsFor = session.Advanced.GetRevisionsFor<User>("users/arek", 0, 10); Assert.Equal(1, revisionsFor.Length); } } } }
public async Task CanDumpAttachmentsEmpty_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true, BatchSize = 100, Limit = 206 } }; await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); } VerifyDump(backupPath, store => Assert.Equal(0, store.DatabaseCommands.GetAttachmentHeadersStartingWith("user", 0, 500).Count())); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task can_export_all_attachments() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { for (var i = 0; i < 1000; i++) { store.DatabaseCommands.PutAttachment( "attachments/" + (i + 1), null, new MemoryStream(new[] { (byte)i }), new RavenJObject()); } var task1 = Task.Run(async () => { // now perform full backup var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { BatchSize = 10, Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); }); var task2 = Task.Run(() => { // change the one document, this document should be exported (any version of it) for (var i = 0; i < 100; i++) { store.DatabaseCommands.PutAttachment( "attachments/1000", null, new MemoryStream(new[] { (byte)i }), new RavenJObject()); } }); await Task.WhenAll(task1, task2); } using (var embeddableStore = NewDocumentStore()) { // import all the data var dumper = new DatabaseDataDumper(embeddableStore.SystemDatabase) { Options = { Incremental = true } }; dumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); var attachemnt = embeddableStore.DatabaseCommands.GetAttachment("attachments/1000"); Assert.NotNull(attachemnt); var attachments = embeddableStore.DatabaseCommands.GetAttachments(0, Etag.Empty, 1024).ToList(); Assert.Equal(1000, attachments.Count); var stats = embeddableStore.DatabaseCommands.GetStatistics(); Assert.Equal(1000, stats.CountOfAttachments); } } finally { IOExtensions.DeleteDirectory(backupPath); } }
private void AssertUsersCountInBackup(int expectedNumberOfUsers, string file) { using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase); dataDumper.Options.Incremental = false; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = file }).Wait(); WaitForIndexing(store); using (var session = store.OpenSession()) { Assert.Equal(expectedNumberOfUsers, session.Query<User>().Count()); } } }
public async Task can_export_all_documents() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { for (var i = 0; i < 1000; i++) { store.DatabaseCommands.Put("users/" + (i + 1), null, new RavenJObject() { { "Name", "test #" + i } }, new RavenJObject() { { Constants.RavenEntityName, "Users"} }); } var task1 = Task.Run(async () => { // now perform full backup var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { BatchSize = 10, Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> {ToFile = backupPath}); }); var task2 = Task.Run(() => { // change the one document, this document should be exported (any version of it) for (var i = 0; i < 100; i++) { using (var session = store.OpenSession()) { var user = session.Load<User>("users/1000"); user.Name = "test" + i; session.SaveChanges(); } } }); await Task.WhenAll(task1, task2); } using (var embeddableStore = NewDocumentStore()) { // import all the data var dumper = new DatabaseDataDumper(embeddableStore.SystemDatabase) { Options = { Incremental = true } }; dumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = embeddableStore.OpenSession()) { var user = session.Load<User>("users/1000"); //the document should exist in the export (any version of it) Assert.NotNull(user); var list = session.Query<User>() .Customize(x => x.WaitForNonStaleResultsAsOfNow()) .Take(1024) .ToList(); Assert.Equal(1000, list.Count); } var stats = embeddableStore.DatabaseCommands.GetStatistics(); Assert.Equal(1000, stats.CountOfDocuments); } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public void CanBackupAttachmentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPeriodicBackupStatus(store.SystemDatabase); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4 }), new RavenJObject()); WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.SystemDatabase, backupStatus); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } IOExtensions.DeleteDirectory(backupPath); }
private void VerifyDump(string backupPath, Action<EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dumper.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); action(store); } }
public async Task SmugglerTransformShouldWorkForDatabaseDataDumper() { var path = NewDataPath(forceCreateDir: true); var backupPath = Path.Combine(path, "backup.dump"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new Foo { Name = "N1" }); session.Store(new Foo { Name = "N2" }); session.SaveChanges(); } var smugglerApi = new DatabaseDataDumper(store.DocumentDatabase); smugglerApi.Options.TransformScript = @"function(doc) { var id = doc['@metadata']['@id']; if(id === 'foos/1') return null; return doc; }"; await smugglerApi.ExportData( new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath, From = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }); } using (var documentStore = NewDocumentStore()) { var smugglerApi = new DatabaseDataDumper(documentStore.DocumentDatabase); smugglerApi.Options.TransformScript = @"function(doc) { var id = doc['@metadata']['@id']; if(id === 'foos/1') return null; return doc; }"; await smugglerApi.ImportData( new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = backupPath, To = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = documentStore.DefaultDatabase } }); using (var session = documentStore.OpenSession()) { var foos = session.Query<Foo>() .Customize(customization => customization.WaitForNonStaleResultsAsOfNow()) .ToList(); Assert.Equal(1, foos.Count); Assert.Equal("foos/2", foos[0].Id); Assert.Equal("N2", foos[0].Name); Assert.Null(session.Load<Foo>(1)); } } }
public async Task CanPerformDumpWithLimitAndFilter_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { var counter = 0; counter = InsertUsers(store, counter, 1000); counter = InsertDevelopers(store, counter, 2); counter = InsertUsers(store, counter, 1000); InsertDevelopers(store, counter, 2); WaitForIndexing(store); var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Limit = 5, Incremental = true } }; dumper.Options.Filters.Add( new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = {"Developers"}, ShouldMatch = true, }); await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(4, session.Query<Developer>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public Task<HttpResponseMessage> ExportDatabase([FromBody]ExportData smugglerOptionsJson) { var requestString = smugglerOptionsJson.SmugglerOptions; SmugglerDatabaseOptions smugglerOptions; using (var jsonReader = new RavenJsonTextReader(new StringReader(requestString))) { var serializer = JsonExtensions.CreateDefaultJsonSerializer(); smugglerOptions = (SmugglerDatabaseOptions)serializer.Deserialize(jsonReader, typeof(SmugglerDatabaseOptions)); } var result = GetEmptyMessage(); // create PushStreamContent object that will be called when the output stream will be ready. result.Content = new PushStreamContent(async (outputStream, content, arg3) => { try { var dataDumper = new DatabaseDataDumper(Database, smugglerOptions); await dataDumper.ExportData( new SmugglerExportOptions<RavenConnectionStringOptions> { ToStream = outputStream }).ConfigureAwait(false); } finally { outputStream.Close(); } }); var fileName = String.IsNullOrEmpty(smugglerOptions.NoneDefualtFileName) || (smugglerOptions.NoneDefualtFileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) ? string.Format("Dump of {0}, {1}", DatabaseName, DateTime.Now.ToString("yyyy-MM-dd HH-mm", CultureInfo.InvariantCulture)) : smugglerOptions.NoneDefualtFileName; result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName + ".ravendump" }; return new CompletedTask<HttpResponseMessage>(result); }
public async Task CanDumpEmptyDatabase_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var server = GetNewServer(databaseName: Constants.SystemDatabase)) { using (new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { // now perform full backup var dumper = new DatabaseDataDumper(server.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); } } VerifyDump(backupPath, store => Assert.Equal(0, store.SystemDatabase.Documents.GetDocumentsAsJson(0, int.MaxValue, null, CancellationToken.None).Count())); } finally { IOExtensions.DeleteDirectory(backupPath); } }
private void TimerCallback(bool fullBackup) { if (currentTask != null) return; if (Database.Disposed) { Dispose(); return; } // we have shared lock for both incremental and full backup. lock (this) { if (currentTask != null) return; currentTask = Task.Factory.StartNew(async () => { var documentDatabase = Database; if (documentDatabase == null) return; using (LogContext.WithDatabase(documentDatabase.Name)) { try { var dataDumper = new DatabaseDataDumper(documentDatabase); var localBackupConfigs = exportConfigs; var localBackupStatus = exportStatus; if (localBackupConfigs == null) return; if (fullBackup == false) { var currentEtags = dataDumper.Operations.FetchCurrentMaxEtags(); // No-op if nothing has changed if (currentEtags.LastDocsEtag == localBackupStatus.LastDocsEtag && currentEtags.LastAttachmentsEtag == localBackupStatus.LastAttachmentsEtag && currentEtags.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag && currentEtags.LastAttachmentsDeleteEtag == localBackupStatus.LastAttachmentDeletionEtag) { return; } } var backupPath = localBackupConfigs.LocalFolderName ?? Path.Combine(documentDatabase.Configuration.DataDirectory, "PeriodicExport-Temp"); if (fullBackup) { // create filename for full dump backupPath = Path.Combine(backupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-full-dump"); if (File.Exists(backupPath)) { var counter = 1; while (true) { backupPath = Path.Combine(Path.GetDirectoryName(backupPath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-full-dump"); if (File.Exists(backupPath) == false) break; counter++; } } } var smugglerOptions = dataDumper.Options; if (fullBackup == false) { smugglerOptions.StartDocsEtag = localBackupStatus.LastDocsEtag; smugglerOptions.StartAttachmentsEtag = localBackupStatus.LastAttachmentsEtag; smugglerOptions.StartDocsDeletionEtag = localBackupStatus.LastDocsDeletionEtag; smugglerOptions.StartAttachmentsDeletionEtag = localBackupStatus.LastAttachmentDeletionEtag; smugglerOptions.Incremental = true; smugglerOptions.ExportDeletions = true; } var exportResult = await dataDumper.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = backupPath }); if (fullBackup == false) { // No-op if nothing has changed if (exportResult.LastDocsEtag == localBackupStatus.LastDocsEtag && exportResult.LastAttachmentsEtag == localBackupStatus.LastAttachmentsEtag && exportResult.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag && exportResult.LastAttachmentsDeleteEtag == localBackupStatus.LastAttachmentDeletionEtag) { logger.Info("Periodic export returned prematurely, nothing has changed since last export"); return; } } try { if (!localBackupConfigs.Disabled) { UploadToServer(exportResult.FilePath, localBackupConfigs, fullBackup); } } finally { // if user did not specify local folder we delete temporary file. if (String.IsNullOrEmpty(localBackupConfigs.LocalFolderName)) { IOExtensions.DeleteFile(exportResult.FilePath); } } if (fullBackup) { localBackupStatus.LastFullBackup = SystemTime.UtcNow; } else { localBackupStatus.LastAttachmentsEtag = exportResult.LastAttachmentsEtag; localBackupStatus.LastDocsEtag = exportResult.LastDocsEtag; localBackupStatus.LastDocsDeletionEtag = exportResult.LastDocDeleteEtag; localBackupStatus.LastAttachmentDeletionEtag = exportResult.LastAttachmentsDeleteEtag; localBackupStatus.LastBackup = SystemTime.UtcNow; } var ravenJObject = JsonExtensions.ToJObject(localBackupStatus); ravenJObject.Remove("Id"); var putResult = documentDatabase.Documents.Put(PeriodicExportStatus.RavenDocumentKey, null, ravenJObject, new RavenJObject(), null); // this result in exportStatus being refreshed localBackupStatus = exportStatus; if (localBackupStatus != null) { if (localBackupStatus.LastDocsEtag.IncrementBy(1) == putResult.ETag) // the last etag is with just us localBackupStatus.LastDocsEtag = putResult.ETag; // so we can skip it for the next time } } catch (ObjectDisposedException) { // shutting down, probably } catch (Exception e) { logger.ErrorException("Error when performing periodic export", e); Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = e.Message, Title = "Error in Periodic Export", Exception = e.ToString(), UniqueKey = "Periodic Export Error", }); } } }) .Unwrap(); currentTask.ContinueWith(_ => { currentTask = null; }); } }
public async void Sample3() { #region smuggler_api_4 using (EmbeddableDocumentStore store = new EmbeddableDocumentStore { DefaultDatabase = "Northwind" }) { store.Initialize(); var dataDumper = new DatabaseDataDumper( store.DocumentDatabase, new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Attachments | ItemType.Transformers, Incremental = false }); SmugglerExportOptions<RavenConnectionStringOptions> exportOptions = new SmugglerExportOptions<RavenConnectionStringOptions> { From = new EmbeddedRavenConnectionStringOptions(), ToFile = "dump.raven" }; await dataDumper.ExportData(exportOptions); } #endregion }