public void MaxNumberOfItemsToProcessInSingleBatchShouldBeRespectedByDataDumper() { var path = Path.Combine(NewDataPath(forceCreateDir: true), "raven.dump"); using (var server = GetNewServer(configureConfig: configuration => configuration.MaxNumberOfItemsToProcessInSingleBatch = 1234)) { var dumper = new DataDumper(server.SystemDatabase, options: new SmugglerOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.SmugglerOptions.BatchSize); dumper.ExportData(new SmugglerExportOptions { ToFile = path }).ResultUnwrap(); Assert.Equal(1234, dumper.SmugglerOptions.BatchSize); dumper = new DataDumper(server.SystemDatabase, options: new SmugglerOptions { BatchSize = 4321 }); Assert.Equal(4321, dumper.SmugglerOptions.BatchSize); dumper.ImportData(new SmugglerImportOptions { FromFile = path }).Wait(); Assert.Equal(1234, dumper.SmugglerOptions.BatchSize); dumper = new DataDumper(server.SystemDatabase, options: new SmugglerOptions { BatchSize = 1000 }); Assert.Equal(1000, dumper.SmugglerOptions.BatchSize); dumper.ExportData(new SmugglerExportOptions { ToFile = path }).ResultUnwrap(); Assert.Equal(1000, dumper.SmugglerOptions.BatchSize); } }
public void CanBackupToDirectory() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicExportStatus.RavenDocumentKey) != null, 10000); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.SystemDatabase) {SmugglerOptions = {Incremental = true}}; dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanSaveImplicitChangesToDocumentsFromAQuery_UsingDunpFile() { using (var store = NewDocumentStore()) { store.Conventions.FindTypeTagName = FindTypeByTagName; var options = new SmugglerOptions { BackupPath = @"Dump of test-concurrency-exception2, 21 May 2013 14-36.ravendump" }; var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ImportData(options); using (var session = store.OpenSession()) { session.Advanced.UseOptimisticConcurrency = true; var foos = session.Query<SectionData>() .Customize(x => x.WaitForNonStaleResults()) .Take(1024) .ToList(); Assert.True(foos.Count > 200); session.SaveChanges(); } } }
public void CanGetCorrectResult() { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.Patching.failingdump11.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(TimeSpan.FromSeconds(15)); } using (var s = store.OpenSession()) { s.Advanced.LuceneQuery<object>("Raven/DocumentsByEntityName").WaitForNonStaleResults().ToList(); store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName", new IndexQuery {Query = "Tag:Regions"}, new ScriptedPatchRequest { Script = @"this.Test = 'test';" } , true); } } }
public async Task<HttpResponseMessage> ImportDatabase() { var dataDumper = new DataDumper(Database); var importData = dataDumper.ImportData(new SmugglerImportOptions { FromStream = await InnerRequest.Content.ReadAsStreamAsync() }, new SmugglerOptions()); throw new InvalidOperationException(); }
public void CanFullBackupToDirectory() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1,2,3,4,5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 500 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where(file => ".ravendb-full-dump".Equals(Path.GetExtension(file), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).First() }, new SmugglerOptions { Incremental = false }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.NotNull(store.DatabaseCommands.GetAttachment("attach/1")); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public async Task<HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents .First(c => c.Headers.ContentDisposition.Name == "\"file\"") .ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions { BatchSize = batchSize, ShouldExcludeExpired = includeExpiredDocuments, OperateOnTypes = operateOnTypes, TransformScript = transformScript }; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { options.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List<string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
public async Task CanGetCorrectResult() { using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.SystemDatabase); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.MailingList.Sandbox.ravendump")) { await dataDumper.ImportData(new SmugglerImportOptions {FromStream = stream}); } using(var s = store.OpenSession()) { var objects = s.Query<object>("LogEntry/CountByDate") .Customize(x => x.WaitForNonStaleResults()) .ToList(); Assert.Equal(4, objects.Count); } } }
public async Task<HttpResponseMessage> ImportDatabase() { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents.First().ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions(); await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
public void ShouldTakeUnder30Minutes() { var sw = Stopwatch.StartNew(); var smugglerOptions = new SmugglerOptions(); using (var store = NewDocumentStore()) { using (var stream = typeof(LoadBigFile).Assembly.GetManifestResourceStream("Raven.StressTests.Load.LoadBigFile.dump")) { var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions) { Progress = Console.WriteLine }; dataDumper.ImportData(stream, smugglerOptions); } } sw.Stop(); Assert.True(sw.Elapsed < TimeSpan.FromMinutes(30), string.Format("Test should run under 30 minutes, but run {0} minutes.", sw.Elapsed.TotalMinutes)); }
public void CanBackupToDirectory() { var backupPath = GetPath("BackupFolder"); using (var store = NewDocumentStore()) { Etag etagForBackups; using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); etagForBackups = session.Advanced.GetEtagFor(periodicBackupSetup); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanGetCorrectResult_esent() { using (var store = NewDocumentStore(requestedStorage: "esent")) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.MailingList.Sandbox.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(); } WaitForUserToContinueTheTest(store); using (var s = store.OpenSession()) { var objects = s.Query<object>("LogEntry/CountByDate") .Customize(x => x.WaitForNonStaleResults()) .ToList(); Assert.Equal(4, objects.Count); } } }
public async Task<HttpResponseMessage> CreateSampleData() { var results = Database.Queries.Query(Constants.DocumentsByEntityNameIndex, new IndexQuery(), CancellationToken.None); if (results.Results.Count > 0) { return GetMessageWithString("You cannot create sample data in a database that already contains documents", HttpStatusCode.BadRequest); } using (var sampleData = typeof(StudioTasksController).Assembly.GetManifestResourceStream("Raven.Database.Server.Assets.EmbeddedData.Northwind.dump")) { var smugglerOptions = new SmugglerOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false, }; var dataDumper = new DataDumper(Database); await dataDumper.ImportData(new SmugglerImportOptions {FromStream = sampleData}, smugglerOptions); } return GetEmptyMessage(); }
public async Task<HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } string tempPath = Path.GetTempPath(); var fullTempPath = tempPath + Constants.TempUploadsDirectoryName; if (File.Exists(fullTempPath)) File.Delete(fullTempPath); if (Directory.Exists(fullTempPath) == false) Directory.CreateDirectory(fullTempPath); var streamProvider = new MultipartFileStreamProvider(fullTempPath); await Request.Content.ReadAsMultipartAsync(streamProvider); var uploadedFilePath = streamProvider.FileData[0].LocalFileName; string fileName = null; var fileContent = streamProvider.Contents.SingleOrDefault(); if (fileContent != null) { fileName = fileContent.Headers.ContentDisposition.FileName.Replace("\"", string.Empty); } var status = new ImportOperationStatus(); var cts = new CancellationTokenSource(); var task = Task.Run(async () => { try { using (var fileStream = File.Open(uploadedFilePath, FileMode.Open, FileAccess.Read)) { var dataDumper = new DataDumper(Database); dataDumper.Progress += s => status.LastProgress = s; var smugglerOptions = dataDumper.SmugglerOptions; smugglerOptions.BatchSize = batchSize; smugglerOptions.ShouldExcludeExpired = !includeExpiredDocuments; smugglerOptions.OperateOnTypes = operateOnTypes; smugglerOptions.TransformScript = transformScript; smugglerOptions.CancelToken = cts; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { smugglerOptions.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List<string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(new SmugglerImportOptions { FromStream = fileStream }); } } catch (Exception e) { status.Faulted = true; status.State = RavenJObject.FromObject(new { Error = e.ToString() }); if (cts.Token.IsCancellationRequested) { status.State = RavenJObject.FromObject(new { Error = "Task was cancelled" }); cts.Token.ThrowIfCancellationRequested(); //needed for displaying the task status as canceled and not faulted } throw; } finally { status.Completed = true; File.Delete(uploadedFilePath); } }, cts.Token); long id; Database.Tasks.AddTask(task, status, new TaskActions.PendingTaskDescription { StartTime = SystemTime.UtcNow, TaskType = TaskActions.PendingTaskType.ImportDatabase, Payload = fileName, }, out id, cts); return GetMessageWithObject(new { OperationId = id }); }
private void VerifyDump(string backupPath, Action<EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var dumper = new DataDumper(store.SystemDatabase) {SmugglerOptions = {Incremental = true}}; dumper.ImportData(new SmugglerImportOptions { FromFile = backupPath }).Wait(); action(store); } }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => { var jsonDocument = store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey); if (jsonDocument == null) return false; var periodicBackupStatus = jsonDocument.DataAsJson.JsonDeserialization<PeriodicBackupStatus>(); return periodicBackupStatus.LastDocsEtag != Etag.Empty && periodicBackupStatus.LastDocsEtag != null; }); var etagForBackups= store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); Assert.Equal("ayende", session.Load<User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void CanBackupAttachmentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.DocumentDatabase); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1,2,3,4}), new RavenJObject()); WaitForPeriodicExport(store.DocumentDatabase, backupStatus); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.DocumentDatabase, backupStatus); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } IOExtensions.DeleteDirectory(backupPath); }
public void CanBackupDocumentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.DocumentDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } WaitForPeriodicExport(store.DocumentDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); WaitForPeriodicExport(store.DocumentDatabase, backupStatus, x => x.LastDocsDeletionEtag); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load<User>(1)); } } IOExtensions.DeleteDirectory(backupPath); }
private void AssertUsersCountInBackup(int expectedNumberOfUsers, string file) { using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = file }, new SmugglerOptions { Incremental = false }).Wait(); WaitForIndexing(store); using (var session = store.OpenSession()) { Assert.Equal(expectedNumberOfUsers, session.Query<User>().Count()); } } }
private void VerifyDump(string backupPath, Action<EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); action(store); } }