public void Configure(SmugglerDatabaseOptions databaseOptions) { if (Store.HasJsonRequestFactory == false) { return; } var url = Store.Url.ForDatabase(Store.DefaultDatabase) + "/debug/config"; try { using (var request = Store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, HttpMethods.Get, Store.DatabaseCommands.PrimaryCredentials, Store.Conventions))) { var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return; } var current = databaseOptions.BatchSize; databaseOptions.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); } } catch (ErrorResponseException e) { if (e.StatusCode == HttpStatusCode.Forbidden) // let it continue with the user defined batch size { return; } throw; } }
private static int GetBatchSize(DocumentStore store, SmugglerDatabaseOptions databaseOptions) { if (store.HasJsonRequestFactory == false) { return(databaseOptions.BatchSize); } var url = store.Url.ForDatabase(store.DefaultDatabase) + "/debug/config"; try { using (var request = store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", store.DatabaseCommands.PrimaryCredentials, store.Conventions))) { var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return(databaseOptions.BatchSize); } return(Math.Min(databaseOptions.BatchSize, maxNumberOfItemsToProcessInSingleBatch)); } } catch (ErrorResponseException e) { if (e.StatusCode == HttpStatusCode.Forbidden) // let it continue with the user defined batch size { return(databaseOptions.BatchSize); } throw; } }
public void Configure(SmugglerDatabaseOptions databaseOptions) { var current = databaseOptions.BatchSize; var maxNumberOfItemsToProcessInSingleBatch = database.Configuration.MaxNumberOfItemsToProcessInSingleBatch; databaseOptions.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); }
public void NegativeFiltersShouldNotFilterOutWhenThereAreNoMatches() { var path = Path.GetTempFileName(); var options = new SmugglerDatabaseOptions { Filters = new EquatableList<FilterSetting> { new FilterSetting { Path = "Value", ShouldMatch = false, Values = new EquatableList<string> {"Value1"} } } }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerDatabaseApi(options); smuggler.ExportData(new SmugglerExportOptions<RavenConnectionStringOptions> { ToFile = path, From = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerDatabaseApi(options); smuggler.ImportData(new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = path, To = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); Assert.NotNull(store.DatabaseCommands.Get("key/1")); using (var session = store.OpenSession()) { var product1 = session.Load<Product>(1); var product2 = session.Load<Product>(2); var product3 = session.Load<Product>(3); Assert.Null(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(path); } }
public HttpResponseMessage ValidateExportOptions([FromBody] SmugglerDatabaseOptions smugglerOptions) { try { new SmugglerJintHelper().Initialize(smugglerOptions); } catch (Exception e) { throw new InvalidDataException("Incorrect transform script", e); } return(GetEmptyMessage(HttpStatusCode.NoContent)); }
public void Initialize(SmugglerDatabaseOptions databaseOptions) { if (databaseOptions == null || string.IsNullOrEmpty(databaseOptions.TransformScript)) return; jint = new Engine(cfg => { cfg.AllowDebuggerStatement(false); cfg.MaxStatements(databaseOptions.MaxStepsForTransformScript); }); jint.Execute(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};", databaseOptions.TransformScript)); }
public void Initialize(SmugglerDatabaseOptions databaseOptions) { if (databaseOptions == null || string.IsNullOrEmpty(databaseOptions.TransformScript)) { return; } jint = new Engine(cfg => { cfg.AllowDebuggerStatement(false); cfg.MaxStatements(databaseOptions.MaxStepsForTransformScript); }); jint.Execute(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};" , databaseOptions.TransformScript)); }
private static int GetBatchSize(DocumentStore store, SmugglerDatabaseOptions databaseOptions) { if (store.HasJsonRequestFactory == false) { return(databaseOptions.BatchSize); } var url = store.Url.ForDatabase(store.DefaultDatabase) + "/debug/config"; var request = store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", store.DatabaseCommands.PrimaryCredentials, store.Conventions)); var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return(databaseOptions.BatchSize); } return(Math.Min(databaseOptions.BatchSize, maxNumberOfItemsToProcessInSingleBatch)); }
public void Configure(SmugglerDatabaseOptions databaseOptions) { if (Store.HasJsonRequestFactory == false) { return; } var url = Store.Url.ForDatabase(Store.DefaultDatabase) + "/debug/config"; var request = Store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", Store.DatabaseCommands.PrimaryCredentials, Store.Conventions)); var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return; } var current = databaseOptions.BatchSize; databaseOptions.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); }
private static async Task <Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; var lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); var isLastLoop = false; try { while (true) { try { var beforeCount = totalCount; if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); var res = await TransferStreamedDocuments(exportStore, databaseOptions, now, jintHelper, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) { timeout = 30; } try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var res = await TransferDocumentsWithoutStreaming(exportStore, databaseOptions, exportBatchSize, operationMetadata, now, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { return(Etag.Empty); } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) <= 0 && !isLastLoop) { if (totalCount == beforeCount) { isLastLoop = true; ShowProgress("Got no new results , trying one more loop from: {0}", lastEtag); } else { ShowProgress("Finished streaming batch, but haven't reached an end (last reached etag = {0})", lastEtag); } continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) { return; } doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
private async Task <Etag> ExportAttachments(ISmugglerDatabaseOperations exportOperations, ISmugglerDatabaseOperations importOperations, SmugglerDatabaseOptions databaseOptions) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; while (true) { try { if (databaseOptions.Limit - totalCount <= 0) { await importOperations.PutAttachment(null).ConfigureAwait(false); // force flush ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } var maxRecords = Math.Min(databaseOptions.Limit - totalCount, databaseOptions.BatchSize); var attachments = await exportOperations.GetAttachments(totalCount, lastEtag, maxRecords).ConfigureAwait(false); if (attachments.Count == 0) { var databaseStatistics = await exportOperations.GetStats().ConfigureAwait(false); if (lastEtag == null) { lastEtag = Etag.Empty; } if (lastEtag.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, maxRecords); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Count; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Count, totalCount); foreach (var attachment in attachments) { var attachmentData = await exportOperations.GetAttachmentData(attachment).ConfigureAwait(false); if (attachmentData == null) { continue; } var attachmentToExport = new AttachmentExportInfo { Key = attachment.Key, Metadata = attachment.Metadata, Data = new MemoryStream(attachmentData) }; if (databaseOptions.StripReplicationInformation) { attachmentToExport.Metadata = StripReplicationInformationFromMetadata(attachmentToExport.Metadata); } await importOperations.PutAttachment(attachmentToExport).ConfigureAwait(false); lastEtag = attachment.Etag; } } catch (Exception e) { ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); ShowProgress("Done with reading attachments, total: {0}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } }
private static async Task ExportTransformers(DocumentStore exportStore, DocumentStore importStore, int exportBatchSize, SmugglerDatabaseOptions databaseOptions) { var totalCount = 0; var retries = RetriesCount; while (true) { TransformerDefinition[] transformers; try { transformers = await exportStore.AsyncDatabaseCommands.GetTransformersAsync(totalCount, exportBatchSize); } catch (Exception e) { if (retries-- == 0 & databaseOptions.IgnoreErrorsAndContinue) { ShowProgress("Failed getting transformers too much times, stopping the transformer export entirely. Message: {0}", e.Message); return; } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("Failed fetching transformer information from exporting store. {0} retries remaining. Message: {1}", retries, e.Message); continue; } if (transformers.Length == 0) { ShowProgress("Done with reading transformers, total: {0}", totalCount); break; } totalCount += transformers.Length; ShowProgress("Reading batch of {0,3} transformers, read so far: {1,10:#,#;;0}", transformers.Length, totalCount); foreach (var transformer in transformers) { try { var transformerName = await importStore.AsyncDatabaseCommands.PutTransformerAsync(transformer.Name, transformer); ShowProgress("Successfully PUT transformer '{0}'", transformerName); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("PUT of a transformer {0} failed. Message: {1}", transformer.Name, e.Message); } } } }
private static async Task <Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); try { while (true) { try { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } if (databaseOptions.ShouldDisableVersioningBundle) { document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); } document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); if (!string.IsNullOrEmpty(databaseOptions.TransformScript)) { document = jintHelper.Transform(databaseOptions.TransformScript, document); if (document == null) { continue; } metadata = document.Value <RavenJObject>("@metadata"); } document.Remove("@metadata"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) { timeout = 30; } try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (var jToken in documents) { var document = (RavenJObject)jToken; var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } if (databaseOptions.ShouldDisableVersioningBundle) { document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); } document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { return(Etag.Empty); } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) { return; } doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
private static async Task <Tuple <int, string, DateTime> > TransferDocumentsWithoutStreaming(DocumentStore exportStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize, OperationMetadata operationMetadata, DateTime now, BulkInsertOperation bulkInsertOperation, TimeSpan reportInterval, int totalCount, string fromEtag, DateTime lastReport) { var documents = await((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, fromEtag, exportBatchSize, operationMetadata); foreach (var jToken in documents) { var document = (RavenJObject)jToken; var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); fromEtag = etag; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } if (databaseOptions.ShouldDisableVersioningBundle) { document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); } document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } return(Tuple.Create(totalCount, fromEtag, lastReport)); }
public DatabaseDataDumper(DocumentDatabase database, SmugglerDatabaseOptions options = null) : base(options ?? new SmugglerDatabaseOptions()) { Operations = new SmugglerEmbeddedDatabaseOperations(database); }
public void Initialize(SmugglerDatabaseOptions databaseOptions) { Options = databaseOptions; }
public void Initialize(SmugglerDatabaseOptions databaseOptions) { Options = databaseOptions; jintHelper.Initialize(databaseOptions); }
private static async Task <Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); try { while (true) { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); document.Remove("@metadata"); bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) { timeout = 30; } try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (RavenJObject document in documents) { var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } break; } catch (Exception e) { if (retries-- == 0) { throw; } exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading documents, total: {0}", totalCount); return(lastEtag); } } finally { bulkInsertOperation.Dispose(); } }
public void Configure(SmugglerDatabaseOptions databaseOptions) { if (Store.HasJsonRequestFactory == false) return; var url = Store.Url.ForDatabase(Store.DefaultDatabase) + "/debug/config"; var request = Store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", Store.DatabaseCommands.PrimaryCredentials, Store.Conventions)); var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value<int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) return; var current = databaseOptions.BatchSize; databaseOptions.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); }
private void DefineDatabaseOptionsSet(SmugglerDatabaseOptions databaseOptions) { databaseOptionSet = new OptionSet(); databaseOptionSet.OnWarning += s => ConsoleHelper.WriteLineWithColor(ConsoleColor.Yellow, s); databaseOptionSet.Add("operate-on-types:", OptionCategory.SmugglerDatabase, "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments,RemoveAnalyzers", value => { try { if (string.IsNullOrWhiteSpace(value) == false) { databaseOptions.OperateOnTypes = (ItemType)Enum.Parse(typeof(ItemType), value, ignoreCase: true); } } catch (Exception e) { PrintUsageAndExit(e); } }); databaseOptionSet.Add("metadata-filter:{=}", OptionCategory.SmugglerDatabase, "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts, or Raven-Entity-Name=Posts,Persons for multiple document types", (key, val) => databaseOptions.Filters.Add(new FilterSetting { Path = "@metadata." + key, ShouldMatch = true, Values = FilterSetting.ParseValues(val) })); databaseOptionSet.Add("negative-metadata-filter:{=}", OptionCategory.SmugglerDatabase, "Filter documents NOT matching a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => databaseOptions.Filters.Add( new FilterSetting { Path = "@metadata." + key, ShouldMatch = false, Values = FilterSetting.ParseValues(val) })); databaseOptionSet.Add("filter:{=}", OptionCategory.SmugglerDatabase, "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => databaseOptions.Filters.Add( new FilterSetting { Path = key, ShouldMatch = true, Values = FilterSetting.ParseValues(val) })); databaseOptionSet.Add("negative-filter:{=}", OptionCategory.SmugglerDatabase, "Filter documents NOT matching a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => databaseOptions.Filters.Add( new FilterSetting { Path = key, ShouldMatch = false, Values = FilterSetting.ParseValues(val) })); databaseOptionSet.Add("ignore-errors-and-continue", OptionCategory.SmugglerDatabase, "If this option is enabled, smuggler will not halt its operation on errors. Errors still will be displayed to the user.", value => { databaseOptions.IgnoreErrorsAndContinue = true; }); databaseOptionSet.Add("transform:", OptionCategory.SmugglerDatabase, "Transform documents using a given script (import only)", script => databaseOptions.TransformScript = script); databaseOptionSet.Add("transform-file:", OptionCategory.SmugglerDatabase, "Transform documents using a given script file (import only)", script => databaseOptions.TransformScript = File.ReadAllText(script)); databaseOptionSet.Add("max-steps-for-transform-script:", OptionCategory.SmugglerDatabase, "Maximum number of steps that transform script can have (import only)", s => databaseOptions.MaxStepsForTransformScript = int.Parse(s)); databaseOptionSet.Add("batch-size:", OptionCategory.SmugglerDatabase, "The batch size for requests", s => databaseOptions.BatchSize = int.Parse(s)); databaseOptionSet.Add("chunk-size:", OptionCategory.SmugglerDatabase, "The number of documents to import before new connection will be opened", s => databaseOptions.ChunkSize = int.Parse(s)); databaseOptionSet.Add("d|database:", OptionCategory.SmugglerDatabase, "The database to operate on. If no specified, the operations will be on the default database.", value => databaseOptions.Source.DefaultDatabase = value); databaseOptionSet.Add("d2|database2:", OptionCategory.SmugglerDatabase, "The database to export to. If no specified, the operations will be on the default database. This parameter is used only in the between operation.", value => databaseOptions.Destination.DefaultDatabase = value); databaseOptionSet.Add("wait-for-indexing", OptionCategory.SmugglerDatabase, "Wait until all indexing activity has been completed (import only)", _ => databaseOptions.WaitForIndexing = true); databaseOptionSet.Add("excludeexpired", OptionCategory.SmugglerDatabase, "Excludes expired documents created by the expiration bundle", _ => databaseOptions.ShouldExcludeExpired = true); databaseOptionSet.Add("disable-versioning-during-import", OptionCategory.SmugglerDatabase, "Disables versioning for the duration of the import", _ => databaseOptions.ShouldDisableVersioningBundle = true); databaseOptionSet.Add("limit:", OptionCategory.SmugglerDatabase, "Reads at most VALUE documents/attachments.", s => databaseOptions.Limit = int.Parse(s)); databaseOptionSet.Add("max-split-export-file-size:", OptionCategory.SmugglerDatabase, "Split exported file by size (after compression) in MB", s => databaseOptions.MaxSplitExportFileSize = int.Parse(s)); databaseOptionSet.Add("timeout:", OptionCategory.SmugglerDatabase, "The timeout to use for requests", s => databaseOptions.Timeout = TimeSpan.FromMilliseconds(int.Parse(s))); databaseOptionSet.Add("incremental", OptionCategory.SmugglerDatabase, "States usage of incremental operations", _ => databaseOptions.Incremental = true); databaseOptionSet.Add("u|user|username:"******"The username to use when the database requires the client to authenticate.", value => GetCredentials(databaseOptions.Source).UserName = value); databaseOptionSet.Add("u2|user2|username2:", OptionCategory.SmugglerDatabase, "The username to use when the database requires the client to authenticate. This parameter is used only in the between operation.", value => GetCredentials(databaseOptions.Destination).UserName = value); databaseOptionSet.Add("p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => GetCredentials(databaseOptions.Source).Password = value); databaseOptionSet.Add("p2|pass2|password2:", OptionCategory.SmugglerDatabase, "The password to use when the database requires the client to authenticate. This parameter is used only in the between operation.", value => GetCredentials(databaseOptions.Destination).Password = value); databaseOptionSet.Add("domain:", OptionCategory.SmugglerDatabase, "The domain to use when the database requires the client to authenticate.", value => GetCredentials(databaseOptions.Source).Domain = value); databaseOptionSet.Add("domain2:", OptionCategory.SmugglerDatabase, "The domain to use when the database requires the client to authenticate. This parameter is used only in the between operation.", value => GetCredentials(databaseOptions.Destination).Domain = value); databaseOptionSet.Add("key|api-key|apikey:", OptionCategory.SmugglerDatabase, "The API-key to use, when using OAuth.", value => databaseOptions.Source.ApiKey = value); databaseOptionSet.Add("key2|api-key2|apikey2:", OptionCategory.SmugglerDatabase, "The API-key to use, when using OAuth. This parameter is used only in the between operation.", value => databaseOptions.Destination.ApiKey = value); databaseOptionSet.Add("strip-replication-information", OptionCategory.SmugglerDatabase, "Remove all replication information from metadata (import only)", _ => databaseOptions.StripReplicationInformation = true); databaseOptionSet.Add("continuation-token:", OptionCategory.SmugglerDatabase, "Activates the usage of a continuation token in case of unreliable connections or huge imports", s => databaseOptions.ContinuationToken = s); databaseOptionSet.Add("skip-conflicted", OptionCategory.SmugglerDatabase, "The database will issue and error when conflicted documents are put. The default is to alert the user, this allows to skip them to continue.", _ => databaseOptions.SkipConflicted = true); databaseOptionSet.Add("system-database", OptionCategory.SmugglerDatabase, "Set to true in order to work on a system database", _ => allowOperationOnSystemDatabase = true); }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); try { while (true) { try { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); if (!string.IsNullOrEmpty(databaseOptions.TransformScript)) { document = jintHelper.Transform(databaseOptions.TransformScript, document); if (document == null) continue; metadata = document.Value<RavenJObject>("@metadata"); } document.Remove("@metadata"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await ((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (var jToken in documents) { var document = (RavenJObject)jToken; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); lastEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.Empty; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) return; doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return lastEtag; } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
private async static Task <Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; while (true) { var attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); if (attachments.Length == 0) { var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { if (databaseOptions.StripReplicationInformation) { attachmentInformation.Metadata = StripReplicationInformationFromMetadata(attachmentInformation.Metadata); } ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } lastEtag = Etag.Parse(attachments.Last().Etag); } }
public async Task <HttpResponseMessage> Export() { try { // Setup an export using RavenDb's Smuggler API var exportTimestamp = DateTime.Now; var fileName = $"augurk-{exportTimestamp.ToString("yyyy-dd-M-HHmmss")}.bak"; var options = new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents, Filters = new List <FilterSetting> { new FilterSetting { Path = "@metadata.@id", ShouldMatch = false, Values = new List <string> { ConfigurationManager.DOCUMENT_KEY, CustomizationManager.DOCUMENT_KEY, } } } }; // Determine the appropriate import method to use SmugglerDatabaseApiBase exporter; RavenConnectionStringOptions connectionStringOptions; if (Database.DocumentStore is EmbeddableDocumentStore embeddableDocumentStore) { exporter = new DatabaseDataDumper(embeddableDocumentStore.DocumentDatabase, options); connectionStringOptions = new EmbeddedRavenConnectionStringOptions(); } else { exporter = new SmugglerDatabaseApi(options); connectionStringOptions = new RavenConnectionStringOptions() { Url = Database.DocumentStore.Url }; } var exportOptions = new SmugglerExportOptions <RavenConnectionStringOptions>() { ToFile = Path.Combine(Path.GetTempPath(), fileName), From = connectionStringOptions }; // Perform the export await exporter.ExportData(exportOptions); // Stream the backup back to the client var result = new HttpResponseMessage(HttpStatusCode.OK) { Content = new ByteArrayContent(File.ReadAllBytes(exportOptions.ToFile)) }; result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName }; result.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); return(result); } catch { return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, "An exception occured while generating export.")); } }
public void Initialize(SmugglerDatabaseOptions databaseOptions) { Options = databaseOptions; scriptedJsonPatcher.Initialize(databaseOptions); }
public void NegativeFiltersShouldNotFilterOutWhenThereAreNoMatches() { var path = Path.GetTempFileName(); var options = new SmugglerDatabaseOptions { Filters = new EquatableList <FilterSetting> { new FilterSetting { Path = "Value", ShouldMatch = false, Values = new EquatableList <string> { "Value1" } } } }; try { using (var store = NewRemoteDocumentStore()) { Initialize(store); var smuggler = new SmugglerDatabaseApi(options); smuggler.ExportData(new SmugglerExportOptions <RavenConnectionStringOptions> { ToFile = path, From = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); } using (var store = NewRemoteDocumentStore()) { var smuggler = new SmugglerDatabaseApi(options); smuggler.ImportData(new SmugglerImportOptions <RavenConnectionStringOptions> { FromFile = path, To = new RavenConnectionStringOptions { Url = store.Url, DefaultDatabase = store.DefaultDatabase } }).Wait(TimeSpan.FromSeconds(15)); Assert.NotNull(store.DatabaseCommands.Get("key/1")); using (var session = store.OpenSession()) { var product1 = session.Load <Product>(1); var product2 = session.Load <Product>(2); var product3 = session.Load <Product>(3); Assert.Null(product1); Assert.Null(product2); Assert.NotNull(product3); } } } finally { IOExtensions.DeleteDirectory(path); } }
private static async Task ExportIdentities(DocumentStore exportStore, DocumentStore importStore, ItemType operateOnTypes, SmugglerDatabaseOptions databaseOptions) { int start = 0; const int pageSize = 1024; long totalIdentitiesCount = 0; var identities = new List<KeyValuePair<string, long>>(); var retries = RetriesCount; ShowProgress("Exporting Identities"); do { var url = exportStore.Url.ForDatabase(exportStore.DefaultDatabase) + "/debug/identities?start=" + start + "&pageSize=" + pageSize; using (var request = exportStore.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", exportStore.DatabaseCommands.PrimaryCredentials, exportStore.Conventions))) { RavenJObject identitiesInfo; try { identitiesInfo = (RavenJObject)await request.ReadResponseJsonAsync(); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { ShowProgress("Failed to fetch identities too much times. Cancelling identities export. Message: {0}", e.Message); return; } if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("Failed to fetch identities. {0} retries remaining. Message: {1}", retries, e.Message); continue; } totalIdentitiesCount = identitiesInfo.Value<long>("TotalCount"); // ReSharper disable once LoopCanBeConvertedToQuery --> the code is more readable when NOT converted to linq foreach (var identity in identitiesInfo.Value<RavenJArray>("Identities")) { identities.Add(new KeyValuePair<string, long>(identity.Value<string>("Key"), identity.Value<long>("Value"))); } start += pageSize; } } while (identities.Count < totalIdentitiesCount); ShowProgress("Exported {0} following identities: {1}", identities.Count, string.Join(", ", identities.Select(x => x.Key))); var filteredIdentities = identities.Where(x => { if ("Raven/Etag".Equals(x.Key, StringComparison.OrdinalIgnoreCase)) return false; if ("IndexId".Equals(x.Key, StringComparison.OrdinalIgnoreCase)) return false; if (Constants.RavenSubscriptionsPrefix.Equals(x.Key, StringComparison.OrdinalIgnoreCase)) return false; if (operateOnTypes.HasFlag(ItemType.Documents)) return true; return false; }).ToList(); ShowProgress("After filtering {0} identities need to be imported: {1}", filteredIdentities.Count, string.Join(", ", filteredIdentities.Select(x => x.Key))); foreach (var identityInfo in filteredIdentities) { try { importStore.DatabaseCommands.SeedIdentityFor(identityInfo.Key, identityInfo.Value); } catch (Exception e) { ShowProgress("Failed seeding identity for {0}. Message: {1}", identityInfo.Key, e.Message); continue; } ShowProgress("Identity '{0}' imported with value {1}", identityInfo.Key, identityInfo.Value); } ShowProgress("Done with importing indentities"); }
private static async Task ExportIdentities(DocumentStore exportStore, DocumentStore importStore, ItemType operateOnTypes, SmugglerDatabaseOptions databaseOptions) { int start = 0; const int pageSize = 1024; long totalIdentitiesCount = 0; var identities = new List <KeyValuePair <string, long> >(); var retries = RetriesCount; ShowProgress("Exporting Identities"); do { var url = exportStore.Url.ForDatabase(exportStore.DefaultDatabase) + "/debug/identities?start=" + start + "&pageSize=" + pageSize; using (var request = exportStore.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", exportStore.DatabaseCommands.PrimaryCredentials, exportStore.Conventions))) { RavenJObject identitiesInfo; try { identitiesInfo = (RavenJObject)await request.ReadResponseJsonAsync(); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { ShowProgress("Failed to fetch identities too much times. Cancelling identities export. Message: {0}", e.Message); return; } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("Failed to fetch identities. {0} retries remaining. Message: {1}", retries, e.Message); continue; } totalIdentitiesCount = identitiesInfo.Value <long>("TotalCount"); // ReSharper disable once LoopCanBeConvertedToQuery --> the code is more readable when NOT converted to linq foreach (var identity in identitiesInfo.Value <RavenJArray>("Identities")) { identities.Add(new KeyValuePair <string, long>(identity.Value <string>("Key"), identity.Value <long>("Value"))); } start += pageSize; } } while (identities.Count < totalIdentitiesCount); ShowProgress("Exported {0} following identities: {1}", identities.Count, string.Join(", ", identities.Select(x => x.Key))); var filteredIdentities = identities.Where(x => { if ("Raven/Etag".Equals(x.Key, StringComparison.InvariantCultureIgnoreCase)) { return(false); } if ("IndexId".Equals(x.Key, StringComparison.InvariantCultureIgnoreCase) && operateOnTypes.HasFlag(ItemType.Indexes)) { return(false); } if (operateOnTypes.HasFlag(ItemType.Documents)) { return(true); } return(false); }).ToList(); ShowProgress("After filtering {0} identities need to be imported: {1}", filteredIdentities.Count, string.Join(", ", filteredIdentities.Select(x => x.Key))); foreach (var identityInfo in filteredIdentities) { try { importStore.DatabaseCommands.SeedIdentityFor(identityInfo.Key, identityInfo.Value); } catch (Exception e) { ShowProgress("Failed seeding identity for {0}. Message: {1}", identityInfo.Key, e.Message); continue; } ShowProgress("Identity '{0}' imported with value {1}", identityInfo.Key, identityInfo.Value); } ShowProgress("Done with importing indentities"); }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; var lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); var jintHelper = new SmugglerJintHelper(); jintHelper.Initialize(databaseOptions); var isLastLoop = false; try { while (true) { try { var beforeCount = totalCount; if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); var res = await TransferStreamedDocuments(exportStore, databaseOptions, now, jintHelper, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var res = await TransferDocumentsWithoutStreaming(exportStore, databaseOptions, exportBatchSize, operationMetadata, now, bulkInsertOperation, reportInterval, totalCount, lastEtag, lastReport); totalCount = res.Item1; lastEtag = res.Item2; lastReport = res.Item3; break; } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.Empty; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) <= 0 && !isLastLoop) { if (totalCount == beforeCount) { isLastLoop = true; ShowProgress("Got no new results , trying one more loop from: {0}", lastEtag); } else ShowProgress("Finished streaming batch, but haven't reached an end (last reached etag = {0})", lastEtag); continue; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { var doc = exportStore.DatabaseCommands.Get("Raven/Hilo/" + collectionName); if (doc == null) return; doc.Metadata["@id"] = doc.Key; bulkInsertOperation.Store(doc.DataAsJson, doc.Metadata, doc.Key); totalCount++; }); } }); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return lastEtag; } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } } finally { bulkInsertOperation.Dispose(); } }
public static async Task Between(SmugglerBetweenOptions <RavenConnectionStringOptions> betweenOptions, SmugglerDatabaseOptions databaseOptions) { SetDatabaseNameIfEmpty(betweenOptions.From); SetDatabaseNameIfEmpty(betweenOptions.To); using (var exportStore = CreateStore(betweenOptions.From)) using (var importStore = CreateStore(betweenOptions.To)) { SmugglerDatabaseApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.From, exportStore); SmugglerDatabaseApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.To, importStore); var exportBatchSize = GetBatchSize(exportStore, databaseOptions); var importBatchSize = GetBatchSize(importStore, databaseOptions); var exportStoreSupportedFeatures = await DetectServerSupportedFeatures(exportStore); var importStoreSupportedFeatures = await DetectServerSupportedFeatures(importStore); if (string.IsNullOrEmpty(betweenOptions.IncrementalKey)) { betweenOptions.IncrementalKey = ((AsyncServerClient)exportStore.AsyncDatabaseCommands).Url; } var incremental = new ExportIncremental(); if (databaseOptions.Incremental) { var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { var smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); ExportIncremental value; if (smugglerExportIncremental.ExportIncremental.TryGetValue(betweenOptions.IncrementalKey, out value)) { incremental = value; } databaseOptions.StartDocsEtag = incremental.LastDocsEtag ?? Etag.Empty; databaseOptions.StartAttachmentsEtag = incremental.LastAttachmentsEtag ?? Etag.Empty; } } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportStore, importStore, exportBatchSize, databaseOptions); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Transformers) && exportStoreSupportedFeatures.IsTransformersSupported && importStoreSupportedFeatures.IsTransformersSupported) { await ExportTransformers(exportStore, importStore, exportBatchSize, databaseOptions); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Documents)) { incremental.LastDocsEtag = await ExportDocuments(exportStore, importStore, databaseOptions, exportStoreSupportedFeatures, exportBatchSize, importBatchSize); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Attachments)) { incremental.LastAttachmentsEtag = await ExportAttachments(exportStore, importStore, databaseOptions, exportBatchSize); } if (exportStoreSupportedFeatures.IsIdentitiesSmugglingSupported && importStoreSupportedFeatures.IsIdentitiesSmugglingSupported) { await ExportIdentities(exportStore, importStore, databaseOptions.OperateOnTypes, databaseOptions); } if (databaseOptions.Incremental) { var smugglerExportIncremental = new SmugglerExportIncremental(); var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); } smugglerExportIncremental.ExportIncremental[betweenOptions.IncrementalKey] = incremental; await importStore.AsyncDatabaseCommands.PutAsync(SmugglerExportIncremental.RavenDocumentKey, null, RavenJObject.FromObject(smugglerExportIncremental), new RavenJObject()); } } }
private static async Task<Tuple<int, string, DateTime>> TransferDocumentsWithoutStreaming(DocumentStore exportStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize, OperationMetadata operationMetadata, DateTime now, BulkInsertOperation bulkInsertOperation, TimeSpan reportInterval, int totalCount, string fromEtag, DateTime lastReport) { var documents = await ((AsyncServerClient) exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, fromEtag, exportBatchSize, operationMetadata); foreach (var jToken in documents) { var document = (RavenJObject) jToken; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); fromEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } return Tuple.Create(totalCount, fromEtag, lastReport); }
private async static Task <Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; var retries = RetriesCount; while (true) { try { AttachmentInformation[] attachments; try { attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { return(Etag.InvalidEtag); } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("Failed to get attachments. {0} retries remaining. Message: {1}", retries, e.Message); continue; } if (attachments.Length == 0) { DatabaseStatistics databaseStatistics; try { databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) { return(Etag.Empty); } if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("Failed to get database statistics. Message: {0}", e.Message); continue; } var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); try { var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); if (attachment == null) { continue; } if (databaseOptions.StripReplicationInformation) { attachment.Metadata = StripReplicationInformationFromMetadata(attachment.Metadata); } await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("IMPORT of an attachment {0} failed. Message: {1}", attachmentInformation.Key, e.Message); } } lastEtag = Etag.Parse(attachments.Last().Etag); } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading attachments, total: {0}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } }
private static int GetBatchSize(DocumentStore store, SmugglerDatabaseOptions databaseOptions) { if (store.HasJsonRequestFactory == false) return databaseOptions.BatchSize; var url = store.Url.ForDatabase(store.DefaultDatabase) + "/debug/config"; using (var request = store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", store.DatabaseCommands.PrimaryCredentials, store.Conventions))) { var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value<int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) return databaseOptions.BatchSize; return Math.Min(databaseOptions.BatchSize, maxNumberOfItemsToProcessInSingleBatch); } }
private async Task <Etag> ExportDocuments(ISmugglerDatabaseOperations exportOperations, ISmugglerDatabaseOperations importOperations, SmugglerDatabaseOptions databaseOptions) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); while (true) { bool hasDocs = false; try { var maxRecords = databaseOptions.Limit - totalCount; if (maxRecords > 0) { var amountToFetchFromServer = Math.Min(databaseOptions.BatchSize, maxRecords); using (var documents = await exportOperations.GetDocuments(lastEtag, amountToFetchFromServer).ConfigureAwait(false)) { while (await documents.MoveNextAsync().ConfigureAwait(false)) { hasDocs = true; var document = documents.Current; var tempLastEtag = Etag.Parse(document.Value <RavenJObject>("@metadata").Value <string>("@etag")); Debug.Assert(!String.IsNullOrWhiteSpace(document.Value <RavenJObject>("@metadata").Value <string>("@id"))); lastEtag = tempLastEtag; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } if (databaseOptions.ShouldDisableVersioningBundle) { document["@metadata"] = DisableVersioning(document["@metadata"] as RavenJObject); } document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); await importOperations.PutDocument(document, (int)DocumentHelpers.GetRoughSize(document)).ConfigureAwait(false); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } if (hasDocs) { continue; } // The server can filter all the results. In this case, we need to try to go over with the next batch. // Note that if the ETag' server restarts number is not the same, this won't guard against an infinite loop. // (This code provides support for legacy RavenDB version: 1.0) var databaseStatistics = await exportOperations.GetStats().ConfigureAwait(false); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, amountToFetchFromServer); if (lastEtag.CompareTo(databaseStatistics.LastDocEtag) >= 0) { lastEtag = databaseStatistics.LastDocEtag; } ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } } } catch (Exception e) { ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } // Load HiLo documents for selected collections databaseOptions.Filters.ForEach(filter => { if (string.Equals(filter.Path, "@metadata.Raven-Entity-Name", StringComparison.OrdinalIgnoreCase)) { filter.Values.ForEach(collectionName => { JsonDocument doc = exportOperations.GetDocument("Raven/Hilo/" + collectionName); if (doc != null) { doc.Metadata["@id"] = doc.Key; var jsonDoc = doc.ToJson(); AsyncHelpers.RunSync(() => importOperations.PutDocument(jsonDoc, (int)DocumentHelpers.GetRoughSize(jsonDoc))); totalCount++; } }); } }); await importOperations.PutDocument(null, -1).ConfigureAwait(false); // force flush ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); } }
public static async Task Between(SmugglerBetweenOptions<RavenConnectionStringOptions> betweenOptions, SmugglerDatabaseOptions databaseOptions) { SetDatabaseNameIfEmpty(betweenOptions.From); SetDatabaseNameIfEmpty(betweenOptions.To); using (var exportStore = CreateStore(betweenOptions.From)) using (var importStore = CreateStore(betweenOptions.To)) { SmugglerDatabaseApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.From, exportStore); SmugglerDatabaseApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.To, importStore); var exportBatchSize = GetBatchSize(exportStore, databaseOptions); var importBatchSize = GetBatchSize(importStore, databaseOptions); var exportStoreSupportedFeatures = await DetectServerSupportedFeatures(exportStore); var importStoreSupportedFeatures = await DetectServerSupportedFeatures(importStore); if (string.IsNullOrEmpty(betweenOptions.IncrementalKey)) { betweenOptions.IncrementalKey = ((AsyncServerClient)exportStore.AsyncDatabaseCommands).Url; } var incremental = new ExportIncremental(); if (databaseOptions.Incremental) { var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { var smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization<SmugglerExportIncremental>(); ExportIncremental value; if (smugglerExportIncremental.ExportIncremental.TryGetValue(betweenOptions.IncrementalKey, out value)) { incremental = value; } databaseOptions.StartDocsEtag = incremental.LastDocsEtag ?? Etag.Empty; databaseOptions.StartAttachmentsEtag = incremental.LastAttachmentsEtag ?? Etag.Empty; } } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportStore, importStore, exportBatchSize); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Transformers) && exportStoreSupportedFeatures.IsTransformersSupported && importStoreSupportedFeatures.IsTransformersSupported) { await ExportTransformers(exportStore, importStore, exportBatchSize); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Documents)) { incremental.LastDocsEtag = await ExportDocuments(exportStore, importStore, databaseOptions, exportStoreSupportedFeatures, exportBatchSize, importBatchSize); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Attachments)) { incremental.LastAttachmentsEtag = await ExportAttachments(exportStore, importStore, databaseOptions, exportBatchSize); } if (exportStoreSupportedFeatures.IsIdentitiesSmugglingSupported && importStoreSupportedFeatures.IsIdentitiesSmugglingSupported) { await ExportIdentities(exportStore, importStore, databaseOptions.OperateOnTypes); } if (databaseOptions.Incremental) { var smugglerExportIncremental = new SmugglerExportIncremental(); var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization<SmugglerExportIncremental>(); } smugglerExportIncremental.ExportIncremental[betweenOptions.IncrementalKey] = incremental; await importStore.AsyncDatabaseCommands.PutAsync(SmugglerExportIncremental.RavenDocumentKey, null, RavenJObject.FromObject(smugglerExportIncremental), new RavenJObject()); } } }
public async Task Between(SmugglerBetweenOperations betweenOperations, SmugglerDatabaseOptions databaseOptions) { var exportOperations = betweenOperations.From; var importOperations = betweenOperations.To; exportOperations.Configure(databaseOptions); exportOperations.Initialize(databaseOptions); importOperations.Configure(databaseOptions); importOperations.Initialize(databaseOptions); if (string.IsNullOrEmpty(betweenOperations.IncrementalKey)) { betweenOperations.IncrementalKey = exportOperations.GetIdentifier(); } var incremental = new ExportIncremental(); if (databaseOptions.Incremental) { var jsonDocument = importOperations.GetDocument(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { var smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); ExportIncremental value; if (smugglerExportIncremental.ExportIncremental.TryGetValue(betweenOperations.IncrementalKey, out value)) { incremental = value; } databaseOptions.StartDocsEtag = incremental.LastDocsEtag ?? Etag.Empty; databaseOptions.StartAttachmentsEtag = incremental.LastAttachmentsEtag ?? Etag.Empty; } } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportOperations, importOperations).ConfigureAwait(false); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Transformers)) { await ExportTransformers(exportOperations, importOperations).ConfigureAwait(false); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Documents)) { incremental.LastDocsEtag = await ExportDocuments(exportOperations, importOperations, databaseOptions).ConfigureAwait(false); } if (databaseOptions.OperateOnTypes.HasFlag(ItemType.Attachments)) { incremental.LastAttachmentsEtag = await ExportAttachments(exportOperations, importOperations, databaseOptions).ConfigureAwait(false); } await ExportIdentities(exportOperations, importOperations, databaseOptions.OperateOnTypes).ConfigureAwait(false); if (databaseOptions.Incremental) { var smugglerExportIncremental = new SmugglerExportIncremental(); var jsonDocument = importOperations.GetDocument(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); } smugglerExportIncremental.ExportIncremental[betweenOperations.IncrementalKey] = incremental; var smugglerDoc = RavenJObject.FromObject(smugglerExportIncremental); smugglerDoc.Add("@metadata", new RavenJObject { { "@id", SmugglerExportIncremental.RavenDocumentKey } }); await importOperations.PutDocument(smugglerDoc, (int)DocumentHelpers.GetRoughSize(smugglerDoc)).ConfigureAwait(false); await importOperations.PutDocument(null, -1).ConfigureAwait(false); // force flush } }
private static async Task <Tuple <int, Etag, DateTime> > TransferStreamedDocuments(DocumentStore exportStore, SmugglerDatabaseOptions databaseOptions, DateTime now, SmugglerJintHelper jintHelper, BulkInsertOperation bulkInsertOperation, TimeSpan reportInterval, int totalCount, string fromEtag, DateTime lastReport) { Etag lastReadEtag = fromEtag; using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(fromEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); lastReadEtag = etag; if (!databaseOptions.MatchFilters(document)) { continue; } if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) { continue; } if (databaseOptions.StripReplicationInformation) { document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); } if (databaseOptions.ShouldDisableVersioningBundle) { document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); } document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); if (!string.IsNullOrEmpty(databaseOptions.TransformScript)) { document = jintHelper.Transform(databaseOptions.TransformScript, document); if (document == null) { continue; } metadata = document.Value <RavenJObject>("@metadata"); } document.Remove("@metadata"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) { throw; } ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } return(Tuple.Create(totalCount, lastReadEtag, lastReport)); }
public SmugglerDatabaseApi(SmugglerDatabaseOptions options = null) : base(options ?? new SmugglerDatabaseOptions()) { Operations = new SmugglerRemoteDatabaseOperations(() => store, () => operation, () => IsDocsStreamingSupported, () => IsTransformersSupported); }
private static DatabaseDataDumper CreateDumper(EmbeddableDocumentStore store, SmugglerDatabaseOptions smugglerDatabaseOptions) { return new DatabaseDataDumper(store.DocumentDatabase, smugglerDatabaseOptions); }
public void Configure(SmugglerDatabaseOptions databaseOptions) { if (Store.HasJsonRequestFactory == false) return; var url = Store.Url.ForDatabase(Store.DefaultDatabase) + "/debug/config"; try { using (var request = Store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", Store.DatabaseCommands.PrimaryCredentials, Store.Conventions))) { var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value<int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) return; var current = databaseOptions.BatchSize; databaseOptions.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); } } catch (ErrorResponseException e) { if (e.StatusCode == HttpStatusCode.Forbidden) // let it continue with the user defined batch size return; throw; } }
private static async Task<Tuple<int, Etag, DateTime>> TransferStreamedDocuments(DocumentStore exportStore, SmugglerDatabaseOptions databaseOptions, DateTime now, SmugglerJintHelper jintHelper, BulkInsertOperation bulkInsertOperation, TimeSpan reportInterval, int totalCount, string fromEtag, DateTime lastReport) { Etag lastReadEtag = fromEtag; using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(fromEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); lastReadEtag = etag; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; if (databaseOptions.StripReplicationInformation) document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject); if (databaseOptions.ShouldDisableVersioningBundle) document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject); document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject); if (!string.IsNullOrEmpty(databaseOptions.TransformScript)) { document = jintHelper.Transform(databaseOptions.TransformScript, document); if (document == null) continue; metadata = document.Value<RavenJObject>("@metadata"); } document.Remove("@metadata"); try { bulkInsertOperation.Store(document, metadata, id); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message); } totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } } } return Tuple.Create(totalCount, lastReadEtag, lastReport); }
private static async Task<Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = databaseOptions.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); try { while (true) { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); document.Remove("@metadata"); bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = databaseOptions.Timeout.Seconds; if (timeout < 30) timeout = 30; try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await ((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (RavenJObject document in documents) { var metadata = document.Value<RavenJObject>("@metadata"); var id = metadata.Value<string>("@id"); var etag = Etag.Parse(metadata.Value<string>("@etag")); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); if (!databaseOptions.MatchFilters(document)) continue; if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now)) continue; bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } break; } catch (Exception e) { if (retries-- == 0) throw; exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading documents, total: {0}", totalCount); return lastEtag; } } finally { bulkInsertOperation.Dispose(); } }
private static async Task<Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; var retries = RetriesCount; while (true) { try { AttachmentInformation[] attachments; try { attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.InvalidEtag; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("Failed to get attachments. {0} retries remaining. Message: {1}", retries, e.Message); continue; } if (attachments.Length == 0) { DatabaseStatistics databaseStatistics; try { databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); } catch (Exception e) { if (retries-- == 0 && databaseOptions.IgnoreErrorsAndContinue) return Etag.Empty; if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("Failed to get database statistics. Message: {0}", e.Message); continue; } var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return lastEtag; } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); try { var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); if (attachment == null) continue; if (databaseOptions.StripReplicationInformation) attachment.Metadata = StripReplicationInformationFromMetadata(attachment.Metadata); await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("IMPORT of an attachment {0} failed. Message: {1}", attachmentInformation.Key, e.Message); } } lastEtag = Etag.Parse(attachments.Last().Etag); } catch (Exception e) { ShowProgress("Got Exception during smuggler between. Exception: {0}. ", e.Message); ShowProgress("Done with reading attachments, total: {0}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } }
private async static Task<Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize) { Etag lastEtag = databaseOptions.StartAttachmentsEtag; int totalCount = 0; while (true) { var attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); if (attachments.Length == 0) { var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return lastEtag; } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } lastEtag = Etag.Parse(attachments.Last().Etag); } }
private static async Task ExportTransformers(DocumentStore exportStore, DocumentStore importStore, int exportBatchSize, SmugglerDatabaseOptions databaseOptions) { var totalCount = 0; var retries = RetriesCount; while (true) { TransformerDefinition[] transformers; try { transformers = await exportStore.AsyncDatabaseCommands.GetTransformersAsync(totalCount, exportBatchSize); } catch (Exception e) { if (retries-- == 0 & databaseOptions.IgnoreErrorsAndContinue) { ShowProgress("Failed getting transformers too much times, stopping the transformer export entirely. Message: {0}", e.Message); return; } if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("Failed fetching transformer information from exporting store. {0} retries remaining. Message: {1}", retries, e.Message); continue; } if (transformers.Length == 0) { ShowProgress("Done with reading transformers, total: {0}", totalCount); break; } totalCount += transformers.Length; ShowProgress("Reading batch of {0,3} transformers, read so far: {1,10:#,#;;0}", transformers.Length, totalCount); foreach (var transformer in transformers) { try { var transformerName = await importStore.AsyncDatabaseCommands.PutTransformerAsync(transformer.Name, transformer); ShowProgress("Successfully PUT transformer '{0}'", transformerName); } catch (Exception e) { if (databaseOptions.IgnoreErrorsAndContinue == false) throw; ShowProgress("PUT of a transformer {0} failed. Message: {1}", transformer.Name, e.Message); } } } }