public ActionResult Backup(string key) { if (!key.Equals(ConfigurationManager.AppSettings["jobkey"], StringComparison.CurrentCulture)) { return(new JsonResult { Data = new { status = "unauthorized" }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); } var backupPath = Server.MapPath(ConfigurationManager.AppSettings["backuppath"]).TrimEnd('\\') + "\\dump.raven"; //Delete old if (System.IO.File.Exists(backupPath)) { System.IO.File.Delete(backupPath); } var dumper = new DatabaseDataDumper(DB.Instance.Store.DocumentDatabase, new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents, Incremental = false }); dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { From = new EmbeddedRavenConnectionStringOptions(), ToFile = backupPath }); return(new JsonResult { Data = new { status = "ok" }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); }
public async Task CanDumpAttachmentsWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { InsertAttachments(store, 328); var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true, BatchSize = 100, Limit = 206 } }; await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath }); } VerifyDump(backupPath, store => Assert.Equal(206, store.DatabaseCommands.GetAttachmentHeadersStartingWith("user", 0, 500).Count())); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task CanImportFromDumpFile() { var file = Path.GetTempFileName(); using (var store = NewDocumentStoreWithData()) { var dumper = new DatabaseDataDumper(store.SystemDatabase); await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = file }); } using (var store = NewDocumentStore()) { var dumper = new DatabaseDataDumper(store.SystemDatabase); await dumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = file }); using (var session = store.OpenSession()) { // Person imported. Assert.Equal(1, session.Query <Person>().Customize(x => x.WaitForNonStaleResults()).Take(5).Count()); // Attachment imported. var attachment = store.DatabaseCommands.GetAttachment("Attachments/1"); var data = ReadFully(attachment.Data()); Assert.Equal(new byte[] { 1, 2, 3 }, data); } } }
public async Task <ActionResult> Restore(string key, string path) { if (!key.Equals(ConfigurationManager.AppSettings["jobkey"], StringComparison.CurrentCulture)) { return(new JsonResult { Data = new { status = "unauthorized" }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); } if (ConfigurationManager.AppSettings["installation"] == null || ConfigurationManager.AppSettings["installation"] != "1") { return(new JsonResult { Data = new { status = "need to be in installation mode" }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); } var backupPath = Server.MapPath(path); var dumper = new DatabaseDataDumper(DB.Instance.Store.DocumentDatabase, new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents, Incremental = false }); await dumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { To = new EmbeddedRavenConnectionStringOptions(), FromFile = backupPath }); return(new JsonResult { Data = new { status = "restore complete" }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); }
public async Task CanPerformDump_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData( new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath, }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task SmugglerTransformShouldRecognizeNumericPropertiesEvenThoughTheyHaveTheSameNames() { using (var stream = new MemoryStream()) { var testObject = new RavenJObject { { "Range", new RavenJArray { new RavenJObject { { "Min", 2.4 } } } }, { "Min", 1 } }; using (var store = NewDocumentStore()) { store.DatabaseCommands.Put("docs/1", null, testObject, new RavenJObject()); var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { TransformScript = EmptyTransform }); await smuggler.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { From = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase }, ToStream = stream }); } stream.Position = 0; using (var store = NewDocumentStore()) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { TransformScript = EmptyTransform }); await smuggler.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = stream, To = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }); var doc = store.DatabaseCommands.Get("docs/1").DataAsJson; Assert.NotNull(doc); Assert.Equal(testObject["Min"].Type, doc["Min"].Type); Assert.Equal(((RavenJObject)((RavenJArray)testObject["Range"])[0])["Min"].Type, ((RavenJObject)((RavenJArray)doc["Range"])[0])["Min"].Type); Assert.True(RavenJToken.DeepEquals(testObject, doc)); } } }
public async void Sample3() { #region smuggler_api_4 using (EmbeddableDocumentStore store = new EmbeddableDocumentStore { DefaultDatabase = "Northwind" }) { store.Initialize(); var dataDumper = new DatabaseDataDumper( store.DocumentDatabase, new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Attachments | ItemType.Transformers, Incremental = false }); SmugglerExportOptions <RavenConnectionStringOptions> exportOptions = new SmugglerExportOptions <RavenConnectionStringOptions> { From = new EmbeddedRavenConnectionStringOptions(), ToFile = "dump.raven" }; await dataDumper.ExportData(exportOptions); } #endregion }
public async Task CanDumpEmptyDatabase_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var server = GetNewServer(databaseName: Constants.SystemDatabase)) { using (new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { // now perform full backup var dumper = new DatabaseDataDumper(server.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath }); } } VerifyDump(backupPath, store => Assert.Equal(0, store.SystemDatabase.Documents.GetDocumentsAsJson(0, int.MaxValue, null, CancellationToken.None).Count())); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public void CanBackupDocumentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.SystemDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); WaitForPeriodicExport(store.SystemDatabase, backupStatus, x => x.LastDocsDeletionEtag); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load <User>(1)); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task BasicBetweenTestFromEmbeddedStore() { using (var store = NewDocumentStore()) { await new SmugglerBetweenTests.UsersIndex().ExecuteAsync(store.AsyncDatabaseCommands, new DocumentConvention()); await new SmugglerBetweenTests.UsersTransformer().ExecuteAsync(store); using (var session = store.OpenAsyncSession()) { await session.StoreAsync(new SmugglerBetweenTests.User { Name = "Robert" }); await session.StoreAsync(new SmugglerBetweenTests.User { Name = "James" }); await session.SaveChangesAsync(); } await store.AsyncDatabaseCommands.PutAttachmentAsync("1", null, new MemoryStream(new byte[] { 3 }), new RavenJObject()); await store.AsyncDatabaseCommands.PutAttachmentAsync("2", null, new MemoryStream(new byte[] { 2 }), new RavenJObject()); using (var server = GetNewServer(port: 8078)) { using (var targetStore = NewRemoteDocumentStore(ravenDbServer: server, databaseName: "TargetDB")) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions()); await smuggler.Between(new SmugglerBetweenOptions <RavenConnectionStringOptions> { To = new RavenConnectionStringOptions { Url = "http://localhost:8078", DefaultDatabase = "TargetDB" } }); await SmugglerBetweenTests.AssertDatabaseHasIndex <SmugglerBetweenTests.UsersIndex>(targetStore); await SmugglerBetweenTests.AssertDatabaseHasTransformer <SmugglerBetweenTests.UsersTransformer>(targetStore); using (var session = targetStore.OpenAsyncSession()) { Assert.NotNull(await session.LoadAsync <SmugglerBetweenTests.User>("users/1")); Assert.NotNull(await session.LoadAsync <SmugglerBetweenTests.User>("users/2")); var users = await session.Query <SmugglerBetweenTests.User, SmugglerBetweenTests.UsersIndex>().Customize(x => x.WaitForNonStaleResults()).ToListAsync(); Assert.Equal(2, users.Count); } Assert.NotNull(await targetStore.AsyncDatabaseCommands.GetAttachmentAsync("1")); Assert.NotNull(await targetStore.AsyncDatabaseCommands.GetAttachmentAsync("2")); } } } }
public void PeriodicExport_should_work_with_long_intervals() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { var periodicExportTask = store.DocumentDatabase.StartupTasks.OfType <PeriodicExportTask>().FirstOrDefault(); //get by reflection the maxTimerTimeoutInMilliseconds field //this field is the maximum interval acceptable in .Net's threading timer //if the requested export interval is bigger than this maximum interval, //a timer with maximum interval will be used several times until the interval cumulatively //will be equal to requested interval var maxTimerTimeoutInMillisecondsField = typeof(PeriodicExportTask) .GetField("maxTimerTimeoutInMilliseconds", BindingFlags.Instance | BindingFlags.NonPublic); Assert.NotNull(maxTimerTimeoutInMillisecondsField); //sanity check, can fail here only in case of source code change //that removes this field maxTimerTimeoutInMillisecondsField.SetValue(periodicExportTask, 5); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanFullBackupToDirectory() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 500 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = false } }; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where(file => ".ravendb-full-dump".Equals(Path.GetExtension(file), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).First() }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.NotNull(store.DatabaseCommands.GetAttachment("attach/1")); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task ImportReplacesAnExistingDatabase() { var file = Path.GetTempFileName(); using (var store = NewDocumentStoreWithData()) { var dumper = new DatabaseDataDumper(store.SystemDatabase); await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = file }); using (var session = store.OpenSession()) { var person = session.Load <Person>(1); person.Name = "Sean Kearon"; session.Store(new Person { Name = "Gillian" }); store.DatabaseCommands.DeleteAttachment("Attachments/1", null); store.DatabaseCommands.PutAttachment( "Attachments/2", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5, 6 }), new RavenJObject { { "Description", "This is another attachment." } }); session.SaveChanges(); } new DatabaseDataDumper(store.SystemDatabase).ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = file }).Wait(); using (var session = store.OpenSession()) { // Original attachment has been restored. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/1")); // The newly added attachment is still there. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/2")); // Original person has been restored. Assert.NotNull(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Single(x => x.Name == "Sean")); // The newly added person has not been removed. Assert.True(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Any(x => x.Name == "Gillian")); } } }
public async Task <HttpResponseMessage> Import() { // Make sure that we actually got the right data if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } try { // Store the uploaded file into a temporary location var provider = new MultipartFormDataStreamProvider(Path.GetTempPath()); await Request.Content.ReadAsMultipartAsync(provider); string filename = provider.FormData.GetValues("filename").First(); var file = provider.FileData.First(); // Setup an import using RavenDb's Smuggler API or the DatabaseDumper API depending on whether the embedded database is being used SmugglerDatabaseApiBase importer; RavenConnectionStringOptions connectionStringOptions; if (Database.DocumentStore is EmbeddableDocumentStore embeddableDocumentStore) { importer = new DatabaseDataDumper(embeddableDocumentStore.DocumentDatabase); connectionStringOptions = new EmbeddedRavenConnectionStringOptions(); } else { importer = new SmugglerDatabaseApi(); connectionStringOptions = new RavenConnectionStringOptions() { Url = Database.DocumentStore.Url }; } var importOptions = new SmugglerImportOptions <RavenConnectionStringOptions>() { FromFile = file.LocalFileName, To = connectionStringOptions }; await importer.ImportData(importOptions); return(new HttpResponseMessage(HttpStatusCode.OK)); } catch (Exception exp) { return(this.Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exp)); } }
private void VerifyDump(string backupPath, Action <EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); action(store); } }
public void CanBackupToDirectory() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var actualBackupPath = Directory.GetDirectories(backupPath)[0]; var fullBackupFilePath = Directory.GetFiles(actualBackupPath).FirstOrDefault(x => x.Contains("full")); Assert.NotNull(fullBackupFilePath); // import the full backup var dataDumper = new DatabaseDataDumper(store.SystemDatabase); dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = fullBackupFilePath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task CanPerformDumpWithLimitAndFilter_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var store = NewDocumentStore()) { var counter = 0; counter = InsertUsers(store, counter, 1000); counter = InsertDevelopers(store, counter, 2); counter = InsertUsers(store, counter, 1000); InsertDevelopers(store, counter, 2); WaitForIndexing(store); var dumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Limit = 5, Incremental = true } }; dumper.Options.Filters.Add( new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = { "Developers" }, ShouldMatch = true, }); await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(4, session.Query <Developer>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); } finally { IOExtensions.DeleteDirectory(backupPath); } }
private void AssertUsersCountInBackup(int expectedNumberOfUsers, string file) { using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase); dataDumper.Options.Incremental = false; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = file }).Wait(); WaitForIndexing(store); using (var session = store.OpenSession()) { Assert.Equal(expectedNumberOfUsers, session.Query <User>().Count()); } } }
public void CanBackupAttachmentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.SystemDatabase); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4 }), new RavenJObject()); WaitForPeriodicExport(store.SystemDatabase, backupStatus); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.SystemDatabase, backupStatus); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } IOExtensions.DeleteDirectory(backupPath); }
public async Task CanDumpWhenHiddenDocs_Dumper() { var backupPath = NewDataPath("BackupFolder"); try { using (var server = GetNewServer()) { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertHidenUsers(store, 2000); var user1 = store.DatabaseCommands.Get("users/1"); Assert.Null(user1); InsertUsers(store, 1, 25); // now perform full backup var dumper = new DatabaseDataDumper(server.SystemDatabase) { Options = { Incremental = true } }; await dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath }); } } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(25, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); } finally { IOExtensions.DeleteDirectory(backupPath); } }
public Task <HttpResponseMessage> ExportDatabase(ExportData smugglerOptionsJson) { var requestString = smugglerOptionsJson.SmugglerOptions; SmugglerDatabaseOptions smugglerOptions; using (var jsonReader = new RavenJsonTextReader(new StringReader(requestString))) { var serializer = JsonExtensions.CreateDefaultJsonSerializer(); smugglerOptions = (SmugglerDatabaseOptions)serializer.Deserialize(jsonReader, typeof(SmugglerDatabaseOptions)); } var result = GetEmptyMessage(); // create PushStreamContent object that will be called when the output stream will be ready. result.Content = new PushStreamContent(async(outputStream, content, arg3) => { try { var dataDumper = new DatabaseDataDumper(Database, smugglerOptions); await dataDumper.ExportData( new SmugglerExportOptions <RavenConnectionStringOptions> { ToStream = outputStream }).ConfigureAwait(false); } finally { outputStream.Close(); } }); var fileName = String.IsNullOrEmpty(smugglerOptions.NoneDefualtFileName) || (smugglerOptions.NoneDefualtFileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) ? string.Format("Dump of {0}, {1}", this.DatabaseName, DateTime.Now.ToString("yyyy-MM-dd HH-mm", CultureInfo.InvariantCulture)) : smugglerOptions.NoneDefualtFileName; result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName + ".ravendump" }; return(new CompletedTask <HttpResponseMessage>(result)); }
public async Task CanGetCorrectResult() { using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.MailingList.Sandbox.ravendbdump")) { await dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = stream }); } using (var s = store.OpenSession()) { var objects = s.Query <object>("LogEntry/CountByDate") .Customize(x => x.WaitForNonStaleResults()) .ToList(); Assert.Equal(4, objects.Count); } } }
public void MaxNumberOfItemsToProcessInSingleBatchShouldBeRespectedByDataDumper() { var path = Path.Combine(NewDataPath(forceCreateDir: true), "raven.dump"); using (var server = GetNewServer(configureConfig: configuration => configuration.MaxNumberOfItemsToProcessInSingleBatch = 1234)) { var dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.Options.BatchSize); dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = path }).ResultUnwrap(); Assert.Equal(1234, dumper.Options.BatchSize); dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.Options.BatchSize); dumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = path }).Wait(); Assert.Equal(1234, dumper.Options.BatchSize); dumper = new DatabaseDataDumper(server.SystemDatabase, options: new SmugglerDatabaseOptions { BatchSize = 1000 }); Assert.Equal(1000, dumper.Options.BatchSize); dumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = path }).ResultUnwrap(); Assert.Equal(1000, dumper.Options.BatchSize); } }
public void CanBackupToDirectory() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var dataDumper = new DatabaseDataDumper(store.SystemDatabase) { Options = { Incremental = true } }; dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task <HttpResponseMessage> CreateSampleData() { var results = Database.Queries.Query(Constants.DocumentsByEntityNameIndex, new IndexQuery(), CancellationToken.None); if (results.Results.Count > 0) { return(GetMessageWithString("You cannot create sample data in a database that already contains documents", HttpStatusCode.BadRequest)); } using (var sampleData = typeof(StudioTasksController).Assembly.GetManifestResourceStream("Raven.Database.Server.Assets.EmbeddedData.Northwind.dump")) { var dataDumper = new DatabaseDataDumper(Database) { Options = { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false } }; await dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = sampleData }); } return(GetEmptyMessage()); }
public void SmugglerCanStripReplicationInformationDuringImport_Embedded() { using (var stream = new MemoryStream()) { using (var store = NewDocumentStore(activeBundles: "Replication")) { var commands = store.DatabaseCommands; commands.Put("keys/1", null, new RavenJObject(), new RavenJObject()); var doc = commands.Get("keys/1"); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); commands.PutAttachment("keys/1", null, new MemoryStream(), new RavenJObject()); var attachment = commands.GetAttachment("keys/1"); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToStream = stream, From = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); } stream.Position = 0; using (var store = NewDocumentStore()) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = stream, To = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase, } }).Wait(TimeSpan.FromSeconds(15)); var commands = store.DatabaseCommands; var doc = commands.Get("keys/1"); Assert.False(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.False(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var attachment = commands.GetAttachment("keys/1"); Assert.False(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.False(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); } stream.Position = 0; using (var store = NewDocumentStore(activeBundles: "Replication")) { var smuggler = new DatabaseDataDumper(store.DocumentDatabase, new SmugglerDatabaseOptions { StripReplicationInformation = true }); smuggler.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = stream, To = new RavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase, } }).Wait(TimeSpan.FromSeconds(15)); var commands = store.DatabaseCommands; var doc = commands.Get("keys/1"); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(doc.Metadata.ContainsKey(Constants.RavenReplicationVersion)); var attachment = commands.GetAttachment("keys/1"); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationSource)); Assert.True(attachment.Metadata.ContainsKey(Constants.RavenReplicationVersion)); } } }
public Task <HttpResponseMessage> ExportDatabase([FromBody] ExportData smugglerOptionsJson) { var result = GetEmptyMessage(); var taskId = smugglerOptionsJson.ProgressTaskId; var requestString = smugglerOptionsJson.DownloadOptions; SmugglerDatabaseOptions smugglerOptions; using (var jsonReader = new RavenJsonTextReader(new StringReader(requestString))) { var serializer = JsonExtensions.CreateDefaultJsonSerializer(); smugglerOptions = (SmugglerDatabaseOptions)serializer.Deserialize(jsonReader, typeof(SmugglerDatabaseOptions)); } var fileName = string.IsNullOrEmpty(smugglerOptions.NoneDefaultFileName) || (smugglerOptions.NoneDefaultFileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) ? $"Dump of {DatabaseName}, {DateTime.Now.ToString("yyyy-MM-dd HH-mm", CultureInfo.InvariantCulture)}" : smugglerOptions.NoneDefaultFileName; //create PushStreamContent object that will be called when the output stream will be ready. result.Content = new PushStreamContent(async(outputStream, content, arg3) => { var status = new DataDumperOperationStatus(); var tcs = new TaskCompletionSource <object>(); var sp = Stopwatch.StartNew(); try { Database.Tasks.AddTask(tcs.Task, status, new TaskActions.PendingTaskDescription { StartTime = SystemTime.UtcNow, TaskType = TaskActions.PendingTaskType.ExportDatabase, Description = "Exporting database, file name: " + fileName }, taskId, smugglerOptions.CancelToken, skipStatusCheck: true); var dataDumper = new DatabaseDataDumper(Database, smugglerOptions); dataDumper.Progress += s => status.MarkProgress(s); var operationState = await dataDumper.ExportData( new SmugglerExportOptions <RavenConnectionStringOptions> { ToStream = outputStream }).ConfigureAwait(false); const string message = "Completed export"; status.MarkCompleted(message, sp.Elapsed); status.OperationState = operationState; } catch (OperationCanceledException e) { status.MarkCanceled(e.Message); } catch (Exception e) { status.ExceptionDetails = e.ToString(); status.MarkFaulted(e.ToString()); throw; } finally { tcs.SetResult("Completed"); outputStream.Close(); } }); result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName + ".ravendbdump" }; return(new CompletedTask <HttpResponseMessage>(result)); }
public async Task SmugglerTransformShouldWorkForDatabaseDataDumper() { var path = NewDataPath(forceCreateDir: true); var backupPath = Path.Combine(path, "backup.dump"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new Foo { Name = "N1" }); session.Store(new Foo { Name = "N2" }); session.SaveChanges(); } var smugglerApi = new DatabaseDataDumper(store.DocumentDatabase); smugglerApi.Options.TransformScript = @"function(doc) { var id = doc['@metadata']['@id']; if(id === 'foos/1') return null; return doc; }" ; await smugglerApi.ExportData( new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath, From = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = store.DefaultDatabase } }); } using (var documentStore = NewDocumentStore()) { var smugglerApi = new DatabaseDataDumper(documentStore.DocumentDatabase); smugglerApi.Options.TransformScript = @"function(doc) { var id = doc['@metadata']['@id']; if(id === 'foos/1') return null; return doc; }" ; await smugglerApi.ImportData( new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = backupPath, To = new EmbeddedRavenConnectionStringOptions { DefaultDatabase = documentStore.DefaultDatabase } }); using (var session = documentStore.OpenSession()) { var foos = session.Query <Foo>() .Customize(customization => customization.WaitForNonStaleResultsAsOfNow()) .ToList(); Assert.Equal(1, foos.Count); Assert.Equal("foos/2", foos[0].Id); Assert.Equal("N2", foos[0].Name); Assert.Null(session.Load <Foo>(1)); } } }
public async Task <HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, bool stripReplicationInformation, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } string tempPath = Path.GetTempPath(); var fullTempPath = tempPath + Constants.TempUploadsDirectoryName; if (File.Exists(fullTempPath)) { File.Delete(fullTempPath); } if (Directory.Exists(fullTempPath) == false) { Directory.CreateDirectory(fullTempPath); } var streamProvider = new MultipartFileStreamProvider(fullTempPath); await Request.Content.ReadAsMultipartAsync(streamProvider).ConfigureAwait(false); var uploadedFilePath = streamProvider.FileData[0].LocalFileName; string fileName = null; var fileContent = streamProvider.Contents.SingleOrDefault(); if (fileContent != null) { fileName = fileContent.Headers.ContentDisposition.FileName.Replace("\"", string.Empty); } var status = new ImportOperationStatus(); var cts = new CancellationTokenSource(); var task = Task.Run(async() => { try { using (var fileStream = File.Open(uploadedFilePath, FileMode.Open, FileAccess.Read)) { var dataDumper = new DatabaseDataDumper(Database); dataDumper.Progress += s => status.LastProgress = s; var smugglerOptions = dataDumper.Options; smugglerOptions.BatchSize = batchSize; smugglerOptions.ShouldExcludeExpired = !includeExpiredDocuments; smugglerOptions.StripReplicationInformation = stripReplicationInformation; smugglerOptions.OperateOnTypes = operateOnTypes; smugglerOptions.TransformScript = transformScript; smugglerOptions.CancelToken = cts; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { smugglerOptions.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List <string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromStream = fileStream }); } } catch (Exception e) { status.Faulted = true; status.State = RavenJObject.FromObject(new { Error = e.ToString() }); if (cts.Token.IsCancellationRequested) { status.State = RavenJObject.FromObject(new { Error = "Task was cancelled" }); cts.Token.ThrowIfCancellationRequested(); //needed for displaying the task status as canceled and not faulted } if (e is InvalidDataException) { status.ExceptionDetails = e.Message; } else if (e is Imports.Newtonsoft.Json.JsonReaderException) { status.ExceptionDetails = "Failed to load JSON Data. Please make sure you are importing .ravendump file, exported by smuggler (aka database export). If you are importing a .ravnedump file then the file may be corrupted"; } else { status.ExceptionDetails = e.ToString(); } throw; } finally { status.Completed = true; File.Delete(uploadedFilePath); } }, cts.Token); long id; Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription { StartTime = SystemTime.UtcNow, TaskType = TaskActions.PendingTaskType.ImportDatabase, Payload = fileName, }, out id, cts); return(GetMessageWithObject(new { OperationId = id })); }
private void TimerCallback(bool fullBackup) { if (currentTask != null) { return; } if (Database.Disposed) { Dispose(); return; } // we have shared lock for both incremental and full backup. lock (this) { if (currentTask != null) { return; } currentTask = Task.Factory.StartNew(async() => { var documentDatabase = Database; if (documentDatabase == null) { return; } using (LogContext.WithResource(documentDatabase.Name)) { try { OperationState exportResult; bool performAnotherRun = false; do { var dataDumper = new DatabaseDataDumper(documentDatabase, new SmugglerDatabaseOptions() { Limit = backupLimit }); var localBackupConfigs = exportConfigs; var localBackupStatus = exportStatus; if (localBackupConfigs == null) { return; } if (localBackupConfigs.Disabled) { return; } if (fullBackup == false) { var currentEtags = dataDumper.Operations.FetchCurrentMaxEtags(); // No-op if nothing has changed if (currentEtags.LastDocsEtag == localBackupStatus.LastDocsEtag && currentEtags.LastAttachmentsEtag == localBackupStatus.LastAttachmentsEtag && currentEtags.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag && currentEtags.LastAttachmentsDeleteEtag == localBackupStatus.LastAttachmentDeletionEtag) { return; } } var backupPath = localBackupConfigs.LocalFolderName ?? Path.Combine(documentDatabase.Configuration.DataDirectory, "PeriodicExport-Temp"); if (Directory.Exists(backupPath) == false) { Directory.CreateDirectory(backupPath); } if (fullBackup) { // create filename for full dump backupPath = Path.Combine(backupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-full-dump"); if (File.Exists(backupPath)) { var counter = 1; while (true) { backupPath = Path.Combine(Path.GetDirectoryName(backupPath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-full-dump"); if (File.Exists(backupPath) == false) { break; } counter++; } } } var smugglerOptions = dataDumper.Options; if (fullBackup == false) { smugglerOptions.StartDocsEtag = localBackupStatus.LastDocsEtag; smugglerOptions.StartAttachmentsEtag = localBackupStatus.LastAttachmentsEtag; smugglerOptions.StartDocsDeletionEtag = localBackupStatus.LastDocsDeletionEtag; smugglerOptions.StartAttachmentsDeletionEtag = localBackupStatus.LastAttachmentDeletionEtag; smugglerOptions.Incremental = true; smugglerOptions.ExportDeletions = true; } exportResult = await dataDumper.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = backupPath }).ConfigureAwait(false); if (fullBackup == false) { // No-op if nothing has changed if (exportResult.LastDocsEtag == localBackupStatus.LastDocsEtag && exportResult.LastAttachmentsEtag == localBackupStatus.LastAttachmentsEtag && exportResult.LastDocDeleteEtag == localBackupStatus.LastDocsDeletionEtag && exportResult.LastAttachmentsDeleteEtag == localBackupStatus.LastAttachmentDeletionEtag) { logger.Info("Periodic export returned prematurely, nothing has changed since last export"); return; } } try { await UploadToServer(exportResult.FilePath, localBackupConfigs, fullBackup).ConfigureAwait(false); } finally { // if user did not specify local folder we delete temporary file. if (String.IsNullOrEmpty(localBackupConfigs.LocalFolderName)) { IOExtensions.DeleteFile(exportResult.FilePath); } } localBackupStatus.LastAttachmentsEtag = exportResult.LastAttachmentsEtag; localBackupStatus.LastDocsEtag = exportResult.LastDocsEtag; localBackupStatus.LastDocsDeletionEtag = exportResult.LastDocDeleteEtag; localBackupStatus.LastAttachmentDeletionEtag = exportResult.LastAttachmentsDeleteEtag; if (fullBackup) { localBackupStatus.LastFullBackup = SystemTime.UtcNow; } else { localBackupStatus.LastBackup = SystemTime.UtcNow; } var ravenJObject = JsonExtensions.ToJObject(localBackupStatus); ravenJObject.Remove("Id"); var putResult = documentDatabase.Documents.Put(PeriodicExportStatus.RavenDocumentKey, null, ravenJObject, new RavenJObject(), null); // this result in exportStatus being refreshed localBackupStatus = exportStatus; if (localBackupStatus != null) { if (localBackupStatus.LastDocsEtag.IncrementBy(1) == putResult.ETag) // the last etag is with just us { localBackupStatus.LastDocsEtag = putResult.ETag; // so we can skip it for the next time } } if (backupLimit != int.MaxValue) { backupLimit = int.MaxValue; performAnotherRun = true; } else { performAnotherRun = false; } } while (performAnotherRun); } catch (ObjectDisposedException) { // shutting down, probably } catch (OperationCanceledException) { // shutting down, probably } catch (Exception e) { backupLimit = 100; logger.ErrorException("Error when performing periodic export", e); Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = e.Message, Title = "Error in Periodic Export", Exception = e.ToString(), UniqueKey = "Periodic Export Error", }); } } }) .Unwrap(); currentTask.ContinueWith(_ => { currentTask = null; }); } }