public void CanGetCorrectResult() { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.Patching.failingdump11.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(TimeSpan.FromSeconds(15)); } using (var s = store.OpenSession()) { s.Advanced.LuceneQuery <object>("Raven/DocumentsByEntityName").WaitForNonStaleResults().ToList(); store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName", new IndexQuery { Query = "Tag:Regions" }, new ScriptedPatchRequest { Script = @"this.Test = 'test';" } , true); } } }
public void CanImportFromDumpFile() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; using (var store = NewDocumentStoreWithData()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ExportData(options); } using (var store = NewDocumentStore()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ImportData(options); using (var session = store.OpenSession()) { // Person imported. Assert.Equal(1, session.Query <Person>().Customize(x => x.WaitForNonStaleResults()).Take(5).Count()); // Attachment imported. var attachment = store.DatabaseCommands.GetAttachment("Attachments/1"); var data = ReadFully(attachment.Data()); Assert.Equal(new byte[] { 1, 2, 3 }, data); } } }
public async Task SmugglerShouldNotThrowIfDatabaseExist1() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var store = NewRemoteDocumentStore()) { store.DatabaseCommands.ForSystemDatabase().EnsureDatabaseExists("DoesNotExist"); var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = "DoesNotExist" }); await smuggler.ImportData(options); await smuggler.ExportData(null, options, false); } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public async Task SmugglerShouldThrowIfDatabaseDoesNotExist() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = "DoesNotExist" }); var e = await AssertAsync.Throws <SmugglerException>(() => smuggler.ImportData(options)); Assert.Equal("Smuggler does not support database creation (database 'DoesNotExist' on server 'http://localhost:8079' must exist before running Smuggler).", e.Message); e = await AssertAsync.Throws <SmugglerException>(() => smuggler.ExportData(null, options, false)); Assert.Equal("Smuggler does not support database creation (database 'DoesNotExist' on server 'http://localhost:8079' must exist before running Smuggler).", e.Message); } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public async Task SmugglerShouldThrowIfDatabaseDoesNotExist() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = "DoesNotExist" }); var e = await AssertAsync.Throws<SmugglerException>(() => smuggler.ImportData(options)); Assert.Equal("Smuggler does not support database creation (database 'DoesNotExist' on server 'http://localhost:8079' must exist before running Smuggler).", e.Message); e = await AssertAsync.Throws<SmugglerException>(() => smuggler.ExportData(null, options, false)); Assert.Equal("Smuggler does not support database creation (database 'DoesNotExist' on server 'http://localhost:8079' must exist before running Smuggler).", e.Message); } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => { var jsonDocument = store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey); if (jsonDocument == null) { return(false); } var periodicBackupStatus = jsonDocument.DataAsJson.JsonDeserialization <PeriodicBackupStatus>(); return(periodicBackupStatus.LastDocsEtag != Etag.Empty && periodicBackupStatus.LastDocsEtag != null); }); var etagForBackups = store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.Equal("ayende", session.Load <User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public override async Task ImportData(Stream stream, SmugglerOptions options) { SmugglerJintHelper.Initialize(options ?? SmugglerOptions); var batchSize = options != null ? options.BatchSize : SmugglerOptions.BatchSize; using (store = CreateStore()) { Task disposeTask = null; try { operation = store.BulkInsert(options: new BulkInsertOptions { BatchSize = batchSize, CheckForUpdates = true }); operation.Report += text => ShowProgress(text); await base.ImportData(stream, options); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public async Task CanDumpEmptyDatabase_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var server = GetNewServer()) { using (new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { // now perform full backup var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new DataDumper(server.Database, options); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } } VerifyDump(backupPath, store => Assert.Equal(0, store.DocumentDatabase.GetDocuments(0, int.MaxValue, null, CancellationToken.None).Count())); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanDumpAttachmentsEmpty_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (NewRemoteDocumentStore()) { var options = new SmugglerOptions { BackupPath = backupPath, BatchSize = 100, Limit = 206 }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { Assert.Equal(0, store.DatabaseCommands.GetAttachmentHeadersStartingWith("user", 0, 500).Count()); }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDump_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewRemoteDocumentStore()) { InsertUsers(store, 0, 2000); var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task SmugglerShouldNotThrowIfDatabaseExist2() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); await smuggler.ImportData(options); await smuggler.ExportData(null, options, false); } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
protected async override Task <DatabaseTaskOutcome> RunImplementation() { var statistics = await DatabaseCommands.GetStatisticsAsync(); if (statistics.CountOfDocuments > 0) { ReportError("Database already contains documents"); return(DatabaseTaskOutcome.Error); } Report("Creating Sample Data, Please wait..."); // this code assumes a small enough dataset, and doesn't do any sort // of paging or batching whatsoever. using (var sampleData = typeof(CreateSampleDataTask).Assembly.GetManifestResourceStream("Raven.Studio.Assets.EmbeddedData.Northwind.dump")) { Report("Reading documents"); var smugglerOptions = new SmugglerOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false, }; var smuggler = new SmugglerApi(smugglerOptions, DatabaseCommands, s => Report(s)); await smuggler.ImportData(sampleData, smugglerOptions); } return(DatabaseTaskOutcome.Succesful); }
public void Configure(SmugglerOptions options) { var current = options.BatchSize; var maxNumberOfItemsToProcessInSingleBatch = database.Configuration.MaxNumberOfItemsToProcessInSingleBatch; options.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); }
protected override void PutDocument(RavenJObject document, SmugglerOptions options, int size) { if (document != null) { var metadata = document.Value <RavenJObject>("@metadata"); var key = metadata.Value <string>("@id"); document.Remove("@metadata"); bulkInsertBatch.Add(new JsonDocument { Key = key, Metadata = metadata, DataAsJson = document, }); return; } var batchToSave = new List <IEnumerable <JsonDocument> > { bulkInsertBatch }; bulkInsertBatch = new List <JsonDocument>(); database.Documents.BulkInsert(new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }, batchToSave, Guid.NewGuid(), CancellationToken.None); }
public async Task SmugglerBehaviorWhenServerIsDown() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8078/", DefaultDatabase = "DoesNotExist" }); var e = await AssertAsync.Throws <SmugglerException>(() => smuggler.ImportData(options)); Assert.Equal("Smuggler encountered a connection problem: 'Unable to connect to the remote server'.", e.Message); e = await AssertAsync.Throws <SmugglerException>(() => smuggler.ExportData(null, options, false)); Assert.Equal("Smuggler encountered a connection problem: 'Unable to connect to the remote server'.", e.Message); } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public SmugglerApi(SmugglerOptions smugglerOptions, IAsyncDatabaseCommands commands, Action <string> output) : base(smugglerOptions) { this.commands = commands; this.output = output; batch = new List <RavenJObject>(); }
public async Task TransformScriptFiltering() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName(), TransformScript = @"function(doc) { var id = doc['@metadata']['@id']; if(id === 'foos/1') return null; return doc; }" }; try { using (var store = NewRemoteDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new Foo {Name = "N1"}); session.Store(new Foo {Name = "N2"}); session.SaveChanges(); } var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); await smugglerApi.ExportData(null, options, false); } using (var documentStore = NewRemoteDocumentStore()) { var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = documentStore.Url }); await smugglerApi.ImportData(options); using (var session = documentStore.OpenSession()) { var foos = session.Query<Foo>() .Customize(customization => customization.WaitForNonStaleResultsAsOfNow()) .ToList(); Assert.Equal(1, foos.Count); Assert.Equal("foos/2", foos[0].Id); Assert.Equal("N2", foos[0].Name); } } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
private async Task <DatabaseTaskOutcome> ExecuteInternal() { if (includeDocuments == false && includeAttachements == false && includeIndexes == false && includeTransformers == false) { return(DatabaseTaskOutcome.Abandoned); } var openFile = new OpenFileDialog { Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; if (openFile.ShowDialog() != true) { return(DatabaseTaskOutcome.Abandoned); } Report(String.Format("Importing from {0}", openFile.File.Name)); using (var stream = openFile.File.OpenRead()) { ItemType operateOnTypes = 0; if (includeDocuments) { operateOnTypes |= ItemType.Documents; } if (includeAttachements) { operateOnTypes |= ItemType.Attachments; } if (includeIndexes) { operateOnTypes |= ItemType.Indexes; } if (includeTransformers) { operateOnTypes |= ItemType.Transformers; } var smugglerOptions = new SmugglerOptions { BatchSize = batchSize, Filters = filterSettings, TransformScript = transformScript, ShouldExcludeExpired = shouldExcludeExpired, OperateOnTypes = operateOnTypes }; var smuggler = new SmugglerApi(smugglerOptions, DatabaseCommands, message => Report(message)); await smuggler.ImportData(stream, smugglerOptions); } return(DatabaseTaskOutcome.Succesful); }
public void NegativeMetadataFiltersShouldNotFilterOutWhenThereAreNoMatches() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName(), Filters = new EquatableList <FilterSetting> { new FilterSetting { Path = "@metadata.Raven-Entity-Name", ShouldMatch = false, Values = new EquatableList <string> { "Products" } } } }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ExportData(null, options, false).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ImportData(options).Wait(TimeSpan.FromSeconds(15)); Assert.NotNull(store.DatabaseCommands.Get("key/1")); using (var session = store.OpenSession()) { var product1 = session.Load <Product>(1); var product2 = session.Load <Product>(2); var product3 = session.Load <Product>(3); Assert.Null(product1); Assert.Null(product2); Assert.Null(product3); } } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = GetPath("BackupFolder"); using (var store = NewDocumentStore()) { Guid?etagForBackups; using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); etagForBackups = session.Advanced.GetEtagFor(periodicBackupSetup); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); etagForBackups = store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.Equal("ayende", session.Load <User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public void NegativeFiltersShouldNotFilterOutWhenThereAreNoMatches() { var path = Path.GetTempFileName(); var options = new SmugglerOptions { Filters = new EquatableList<FilterSetting> { new FilterSetting { Path = "Value", ShouldMatch = false, Values = new EquatableList<string> { "Value1" } } } }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(); smuggler.ExportData(new SmugglerExportOptions { ToFile = path, From = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(); smuggler.ImportData(new SmugglerImportOptions { FromFile = path, To = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); Assert.NotNull(store.DatabaseCommands.Get("key/1")); using (var session = store.OpenSession()) { var product1 = session.Load<Product>(1); var product2 = session.Load<Product>(2); var product3 = session.Load<Product>(3); Assert.Null(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(path); } }
public void SmugglerWithExcludeExpiredDocumentsShouldWork2() { var path = Path.GetTempFileName(); var options = new SmugglerOptions { ShouldExcludeExpired = true }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(); smuggler.ExportData(new SmugglerExportOptions { ToFile = path, From = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { SystemTime.UtcDateTime = () => DateTime.Now.AddMinutes(10); var smuggler = new SmugglerApi(); smuggler.ImportData(new SmugglerImportOptions { FromFile = path, To = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); using (var session = store.OpenSession()) { var product1 = session.Load <Product>(1); var product2 = session.Load <Product>(2); var product3 = session.Load <Product>(3); Assert.NotNull(product1); Assert.Null(product2); Assert.Null(product3); } } } finally { IOExtensions.DeleteDirectory(path); } }
public async Task CanPerformDumpWithLimitAndFilter_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewRemoteDocumentStore()) { var counter = 0; counter = InsertUsers(store, counter, 1000); counter = InsertDevelopers(store, counter, 2); counter = InsertUsers(store, counter, 1000); InsertDevelopers(store, counter, 2); WaitForIndexing(store); var options = new SmugglerOptions { Limit = 5, Incremental = true, Filters = { new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = { "Developers" }, ShouldMatch = true, } } }; var dumper = new SmugglerApi(); await dumper.ExportData(new SmugglerExportOptions { ToFile = backupPath, From = new RavenConnectionStringOptions { Url = "http://localhost:8079", DefaultDatabase = store.DefaultDatabase, } }, options); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(4, session.Query <Developer>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public void DateTimePreserved() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { var docId = string.Empty; using (var documentStore = NewRemoteDocumentStore()) { using (var session = documentStore.OpenSession()) { var foo = new Foo { Created = DateTime.Today }; session.Store(foo); docId = foo.Id; session.SaveChanges(); } var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions() { Url = documentStore.Url }); smugglerApi.ExportData(options); } using (var documentStore = NewRemoteDocumentStore()) { var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions() { Url = documentStore.Url }); smugglerApi.ImportData(options); using (var session = documentStore.OpenSession()) { var created = session.Load <Foo>(docId).Created; Assert.False(session.Advanced.HasChanges); } } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
public void ImportReplacesAnExistingDatabase() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; using (var store = NewDocumentStoreWithData()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ExportData(options); using (var session = store.OpenSession()) { var person = session.Load <Person>(1); person.Name = "Sean Kearon"; session.Store(new Person { Name = "Gillian" }); store.DatabaseCommands.DeleteAttachment("Attachments/1", null); store.DatabaseCommands.PutAttachment( "Attachments/2", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5, 6 }), new RavenJObject { { "Description", "This is another attachment." } }); session.SaveChanges(); } new DataDumper(store.DocumentDatabase, options).ImportData(options); using (var session = store.OpenSession()) { // Original attachment has been restored. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/1")); // The newly added attachment is still there. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/2")); // Original person has been restored. Assert.NotNull(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Single(x => x.Name == "Sean")); // The newly added person has not been removed. Assert.True(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Any(x => x.Name == "Gillian")); } } }
private void VerifyDump(string backupPath, Action <EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); action(store); } }
/// <summary> /// /// </summary> /// <param name="jsonWriter"></param> /// <param name="options"></param> /// <param name="result"></param> /// <param name="maxEtags">Max etags are inclusive</param> protected async override void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtags) { jsonWriter.WritePropertyName("DocsDeletions"); jsonWriter.WriteStartArray(); result.LastDocDeleteEtag = await ExportDocumentsDeletion(options, jsonWriter, result.LastDocDeleteEtag, maxEtags.LastDocDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("AttachmentsDeletions"); jsonWriter.WriteStartArray(); result.LastAttachmentsDeleteEtag = await ExportAttachmentsDeletion(options, jsonWriter, result.LastAttachmentsDeleteEtag, maxEtags.LastAttachmentsDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); }
public async Task <HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents .First(c => c.Headers.ContentDisposition.Name == "\"file\"") .ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions { BatchSize = batchSize, ShouldExcludeExpired = includeExpiredDocuments, OperateOnTypes = operateOnTypes, TransformScript = transformScript }; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { options.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List <string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(importOptions, options); return(GetEmptyMessage()); }
public static void Initialize(SmugglerOptions options) { if (options != null && !string.IsNullOrEmpty(options.TransformScript)) { jint = new JintEngine() .AllowClr(false) .SetDebugMode(false) .SetMaxRecursions(50) .SetMaxSteps(10 * 1000); jint.Run(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};" , options.TransformScript)); } }
public void SmugglerWithExcludeExpiredDocumentsShouldWork1() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName(), ShouldExcludeExpired = true }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ExportData(null, options, false).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ImportData(options).Wait(TimeSpan.FromSeconds(15)); using (var session = store.OpenSession()) { var product1 = session.Load <Product>(1); var product2 = session.Load <Product>(2); var product3 = session.Load <Product>(3); Assert.NotNull(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public void CanExportImportTransformers() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var documentStore = NewRemoteDocumentStore()) { new ProductWithQueryInput().Execute(documentStore); var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = documentStore.Url }); smugglerApi.ExportData(null, options, false).Wait(TimeSpan.FromSeconds(15)); } using (var documentStore = NewRemoteDocumentStore()) { var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = documentStore.Url }); smugglerApi.ImportData(options).Wait(TimeSpan.FromSeconds(15)); var transformers = documentStore.DatabaseCommands.GetTransformers(0, 128); Assert.NotNull(transformers); Assert.Equal(1, transformers.Length); Assert.Equal("ProductWithQueryInput", transformers[0].Name); } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
public static void Initialize(SmugglerOptions options) { if (options != null && !string.IsNullOrEmpty(options.TransformScript)) { jint = new JintEngine() .AllowClr(false) .SetDebugMode(false) .SetMaxRecursions(50) .SetMaxSteps(options.MaxStepsForTransformScript); jint.Run(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};" , options.TransformScript)); } propertiesByValue = new Dictionary <JsInstance, KeyValuePair <RavenJValue, object> >(); }
public static void Initialize(SmugglerOptions options) { if (options != null && !string.IsNullOrEmpty(options.TransformScript)) { jint = new JintEngine() .AllowClr(false) .SetDebugMode(false) .SetMaxRecursions(50) .SetMaxSteps(options.MaxStepsForTransformScript); jint.Run(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};" , options.TransformScript)); } propertiesTypeByName = new Dictionary <string, JTokenType>(); }
protected Task <Etag> ExportAttachmentsDeletion(SmugglerOptions options, JsonTextWriter jsonWriter, Etag startAttachmentsDeletionEtag, Etag maxAttachmentEtag) { var lastEtag = startAttachmentsDeletionEtag; database.TransactionalStorage.Batch(accessor => { foreach (var listItem in accessor.Lists.Read(Constants.RavenPeriodicExportsAttachmentsTombstones, startAttachmentsDeletionEtag, maxAttachmentEtag, int.MaxValue)) { var o = new RavenJObject { { "Key", listItem.Key } }; o.WriteTo(jsonWriter); lastEtag = listItem.Etag; } }); return(new CompletedTask <Etag>(lastEtag)); }
public void SmugglerWithExcludeExpiredDocumentsShouldWork1() { var path = Path.GetTempFileName(); var options = new SmugglerOptions { ShouldExcludeExpired = true }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(); smuggler.ExportData(new SmugglerExportOptions { ToFile = path, From = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(); smuggler.ImportData(new SmugglerImportOptions { FromFile = path, To = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }, options).Wait(TimeSpan.FromSeconds(15)); using (var session = store.OpenSession()) { var product1 = session.Load<Product>(1); var product2 = session.Load<Product>(2); var product3 = session.Load<Product>(3); Assert.NotNull(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(path); } }
public void Sample() { #region smuggler-api var smugglerOptions = new SmugglerOptions { }; var connectionStringOptions = new RavenConnectionStringOptions { ApiKey = "ApiKey", Credentials = new NetworkCredential("username", "password", "domain"), DefaultDatabase = "database", Url = "http://localhost:8080", }; var smugglerApi = new SmugglerApi(smugglerOptions, connectionStringOptions); smugglerApi.ExportData(null, new SmugglerOptions { BackupPath = "dump.raven", OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Attachments | ItemType.Transformers }, incremental: false); smugglerApi.ImportData(new SmugglerOptions { BackupPath = "dump.raven", OperateOnTypes = ItemType.Documents | ItemType.Indexes }); #endregion }
private Program() { connectionStringOptions = new RavenConnectionStringOptions(); options = new SmugglerOptions(); optionSet = new OptionSet { { "operate-on-types:", "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments", value => { try { options.OperateOnTypes = (ItemType) options.ItemTypeParser( value); } catch (Exception e) { PrintUsageAndExit(e); } } }, { "metadata-filter:{=}", "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters["@metadata." + key] = val }, { "filter:{=}", "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters[key] = val }, {"d|database:", "The database to operate on. If no specified, the operations will be on the default database.", value => connectionStringOptions.DefaultDatabase = value}, {"u|user|username:"******"The username to use when the database requires the client to authenticate.", value => Credentials.UserName = value}, {"p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => Credentials.Password = value}, {"domain:", "The domain to use when the database requires the client to authenticate.", value => Credentials.Domain = value}, {"key|api-key|apikey:", "The API-key to use, when using OAuth.", value => connectionStringOptions.ApiKey = value}, {"incremental", "States usage of incremental operations", _ => incremental = true }, {"h|?|help", v => PrintUsageAndExit(0)}, }; }
public void SmugglerWithoutExcludeExpiredDocumentsShouldWork() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ExportData(null, options, false).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); smuggler.ImportData(options).Wait(TimeSpan.FromSeconds(15)); using (var session = store.OpenSession()) { var product1 = session.Load<Product>(1); var product2 = session.Load<Product>(2); var product3 = session.Load<Product>(3); Assert.NotNull(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(options.BackupPath); } }
public void DateTimePreserved() { var options = new SmugglerOptions {BackupPath = Path.GetTempFileName()}; try { var docId = string.Empty; using (var documentStore = NewRemoteDocumentStore()) { using (var session = documentStore.OpenSession()) { var foo = new Foo {Created = DateTime.Today}; session.Store(foo); docId = foo.Id; session.SaveChanges(); } var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions() {Url = documentStore.Url}); smugglerApi.ExportData(null, options, false).Wait(TimeSpan.FromSeconds(15)); } using (var documentStore = NewRemoteDocumentStore()) { var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions() {Url = documentStore.Url}); smugglerApi.ImportData(options).Wait(TimeSpan.FromSeconds(15)); using (var session = documentStore.OpenSession()) { var created = session.Load<Foo>(docId).Created; Assert.False(session.Advanced.HasChanges); } } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
public async Task CanDumpWhenHiddenDocsWithLimit_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (GetNewServer()) { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertHidenUsers(store, 2000); var user1 = store.DatabaseCommands.Get("users/1"); Assert.Null(user1); InsertUsers(store, 1, 25); // now perform full backup var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(25, session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanDumpEmptyDatabase_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (NewRemoteDocumentStore()) { // now perform full backup var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => Assert.Equal(0, store.DocumentDatabase.GetDocuments(0, int.MaxValue, null, CancellationToken.None).Count())); IOExtensions.DeleteDirectory(backupPath); }
public override async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); SmugglerJintHelper.Initialize(options); using (store = CreateStore(importOptions.To)) { Task disposeTask; try { operation = new ChunkedBulkInsertOperation(store.DefaultDatabase, store, store.Listeners, new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }, store.Changes(), options.ChunkSize); operation.Report += text => ShowProgress(text); await base.ImportData(importOptions, options, stream); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public new Task<Etag> ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Etag lastEtag, Etag maxEtag) { return base.ExportDocuments(new RavenConnectionStringOptions(), options, jsonWriter, lastEtag, maxEtag); }
public async Task CanHandleDocumentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(); var alreadyReset = false; var forwarder = new PortForwarder(8070, 8079, (totalRead, bytes, offset, count) => { if (alreadyReset == false && totalRead > 10000) { alreadyReset = true; return true; } return false; }); forwarder.Forward(); try { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertUsers(store, 0, 2000); } var options = new SmugglerOptions { Limit = 1500, BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8070", }); var allDocs = new List<RavenJObject>(); var memoryStream = new MemoryStream(); Assert.Throws<AggregateException>(() => dumper.ExportData(memoryStream, null, true).Wait()); memoryStream.Position = 0; using (var stream = new GZipStream(memoryStream, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var doc1 = chunk1["Docs"] as RavenJArray; allDocs.AddRange(doc1.Values<RavenJObject>()); } var memoryStream2 = new MemoryStream(); await dumper.ExportData(memoryStream2, null, true); memoryStream2.Position = 0; using (var stream = new GZipStream(memoryStream2, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var doc2 = chunk2["Docs"] as RavenJArray; allDocs.AddRange(doc2.Values<RavenJObject>()); } Assert.Equal(2000, allDocs.Count(d => (d.Value<string>("Name") ?? String.Empty).StartsWith("User"))); IOExtensions.DeleteDirectory(backupPath); } finally { forwarder.Stop(); server.Dispose(); } }
public void Previously_deleted_docs_will_survive_export_import_cycle_if_purge_is_false() { using (var session = documentStore.OpenSession()) { session.Store(new VersioningConfiguration { Exclude = false, PurgeOnDelete = false, Id = "Raven/Versioning/Companies", MaxRevisions = 5 }); session.SaveChanges(); } var company = new Company { Id = "companies/1", Name = "Company Name" }; using (var session = documentStore.OpenSession()) { session.Store(company); session.SaveChanges(); company.Name = "Company Name 2"; session.SaveChanges(); } using (var session = documentStore.OpenSession()) { var doc = session.Load<Company>("companies/1"); Assert.Equal(2, session.Advanced.GetMetadataFor(doc).Value<int>("Raven-Document-Revision")); session.Delete(doc); session.SaveChanges(); } var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; try { var exportSmuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = documentStore.Url }); exportSmuggler.ExportData(options); using (CreateRavenDbServer(port: 8078)) using (var documentStore2 = CreateDocumentStore(port: 8078)) { var importSmuggler = new SmugglerApi(options, new RavenConnectionStringOptions { Url = documentStore2.Url }); importSmuggler.ImportData(options); using (var session = documentStore2.OpenSession()) { session.Store(company); session.SaveChanges(); Assert.Equal(3, session.Advanced.GetMetadataFor(company).Value<int>("Raven-Document-Revision")); } using (var session = documentStore2.OpenSession()) { var doc = session.Load<Company>("companies/1"); doc.Name = "Company Name 3"; session.SaveChanges(); Assert.Equal(4, session.Advanced.GetMetadataFor(doc).Value<int>("Raven-Document-Revision")); } } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
public override async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus lastEtag) { using (store = CreateStore()) { return await base.ExportData(stream, options, incremental, lastEtagsFromFile, lastEtag); } }
public override async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, PeriodicBackupStatus backupStatus = null) { using (store = CreateStore()) { return await base.ExportData(stream, options, incremental, backupStatus); } }
public async Task CanDumpAttachmentsEmpty_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { var options = new SmugglerOptions { BackupPath = backupPath, BatchSize = 100, Limit = 206 }; var dumper = new DataDumper(store.DocumentDatabase, options); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { Assert.Equal(0, store.DatabaseCommands.GetAttachmentHeadersStartingWith("user", 0, 500).Count()); }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDump_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewRemoteDocumentStore()) { InsertUsers(store, 0, 2000); var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanHandleAttachmentExceptionsGracefully_Smuggler() { var backupPath = NewDataPath("BackupFolder"); var server = GetNewServer(); var resetCount = 0; var forwarder = new PortForwarder(8070, 8079, (totalRead, bytes, offset, count) => { var payload = System.Text.Encoding.UTF8.GetString(bytes, offset, count); //reset count is requred as raven can retry attachment download if (payload.Contains("GET /static/users/678 ") && resetCount < 5) { resetCount++; return true; } return false; }); forwarder.Forward(); try { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertAttachments(store, 2000); } var options = new SmugglerOptions { Limit = 1500, BackupPath = backupPath, }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8070", }); var allAttachments = new List<RavenJObject>(); var memoryStream = new MemoryStream(); Assert.Throws<AggregateException>(() => dumper.ExportData(memoryStream, null, true).Wait()); memoryStream.Position = 0; using (var stream = new GZipStream(memoryStream, CompressionMode.Decompress)) { var chunk1 = RavenJToken.TryLoad(stream) as RavenJObject; var att1 = chunk1["Attachments"] as RavenJArray; allAttachments.AddRange(att1.Values<RavenJObject>()); } var memoryStream2 = new MemoryStream(); await dumper.ExportData(memoryStream2, null, true); memoryStream2.Position = 0; using (var stream = new GZipStream(memoryStream2, CompressionMode.Decompress)) { var chunk2 = RavenJToken.TryLoad(stream) as RavenJObject; var attr2 = chunk2["Attachments"] as RavenJArray; allAttachments.AddRange(attr2.Values<RavenJObject>()); } Assert.Equal(2000, allAttachments.Count()); IOExtensions.DeleteDirectory(backupPath); } finally { forwarder.Stop(); server.Dispose(); } }
public async Task CanPerformDumpWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var options = new SmugglerOptions { Limit = 1500, BackupPath = backupPath, Filters = { new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = {"Users"}, ShouldMatch = true, } } }; var dumper = new DataDumper(store.DocumentDatabase, options); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(1500, session.Query<User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public new void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtags) { base.ExportDeletions(jsonWriter, options, result, maxEtags); }
private void VerifyDump(string backupPath, Action<EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); action(store); } }
public async Task TransformScriptModifying() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName(), TransformScript = @"function(doc) { doc['Name'] = 'Changed'; return doc; }" }; try { using (var store = NewRemoteDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new Foo { Name = "N1" }); session.Store(new Foo { Name = "N2" }); session.SaveChanges(); } var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); await smugglerApi.ExportData(null, options, false); } using (var store = NewRemoteDocumentStore()) { var smugglerApi = new SmugglerApi(options, new RavenConnectionStringOptions { Url = store.Url }); await smugglerApi.ImportData(options); using (var session = store.OpenSession()) { var foos = session.Query<Foo>() .Customize(customization => customization.WaitForNonStaleResultsAsOfNow()) .ToList(); Assert.Equal(2, foos.Count); foreach (var foo in foos) { Assert.Equal("Changed", foo.Name); } } } } finally { if (File.Exists(options.BackupPath)) { File.Delete(options.BackupPath); } } }
public async Task CanPerformDumpWithLimitAndFilter_Smuggler() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewRemoteDocumentStore()) { var counter = 0; counter = InsertUsers(store, counter, 1000); counter = InsertDevelopers(store, counter, 2); counter = InsertUsers(store, counter, 1000); InsertDevelopers(store, counter, 2); WaitForIndexing(store); var options = new SmugglerOptions { Limit = 5, BackupPath = backupPath, Filters = { new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = {"Developers"}, ShouldMatch = true, } } }; var dumper = new SmugglerApi(options, new RavenConnectionStringOptions { Url = "http://localhost:8079", }); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(4, session.Query<Developer>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public SmugglerApi(SmugglerOptions smugglerOptions, RavenConnectionStringOptions connectionStringOptions) : base(smugglerOptions) { ConnectionStringOptions = connectionStringOptions; }