public override async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); SmugglerJintHelper.Initialize(options); using (store = CreateStore(importOptions.To)) { Task disposeTask; try { operation = new ChunkedBulkInsertOperation(store.DefaultDatabase, store, store.Listeners, new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }, store.Changes(), options.ChunkSize, SmugglerOptions.DefaultDocumentSizeInChunkLimitInBytes); operation.Report += text => ShowProgress(text); await base.ImportData(importOptions, options, stream); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public override async Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, PeriodicBackupStatus backupStatus = null) { using (store = CreateStore()) { return(await base.ExportData(stream, options, incremental, backupStatus)); } }
public void ImportData(SmugglerOptions options) { using (FileStream fileStream = File.OpenRead(options.File)) { ImportData(fileStream, options); } }
public override async Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus lastEtag) { using (store = CreateStore()) { return(await base.ExportData(stream, options, incremental, lastEtagsFromFile, lastEtag)); } }
public override async Task ImportData(Stream stream, SmugglerOptions options) { SmugglerJintHelper.Initialize(options ?? SmugglerOptions); var batchSize = options != null ? options.BatchSize : SmugglerOptions.BatchSize; using (store = CreateStore()) { Task disposeTask = null; try { operation = store.BulkInsert(options: new BulkInsertOptions { BatchSize = batchSize, CheckForUpdates = true }); operation.Report += text => ShowProgress(text); await base.ImportData(stream, options); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
private void ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter) { var lastEtag = Guid.Empty; int totalCount = 0; while (true) { RavenJArray documents = null; var request = CreateRequest("/docs?pageSize=128&etag=" + lastEtag); request.ExecuteRequest(reader => documents = RavenJArray.Load(new JsonTextReader(reader))); if (documents.Length == 0) { Console.WriteLine("Done with reading documents, total: {0}", totalCount); break; } var final = documents.Where(options.MatchFilters).ToList(); final.ForEach(item => item.WriteTo(jsonWriter)); totalCount += final.Count; Console.WriteLine("Reading batch of {0,3} documents, read so far: {1,10:#,#;;0}", documents.Length, totalCount); lastEtag = new Guid(documents.Last().Value <RavenJObject>("@metadata").Value <string>("@etag")); } }
protected override void PutDocument(RavenJObject document, SmugglerOptions options) { if (document != null) { var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); document.Remove("@metadata"); operation.Store(document, metadata, id); } }
private Program() { connectionStringOptions = new RavenConnectionStringOptions(); options = new SmugglerOptions(); optionSet = new OptionSet { { "operate-on-types:", "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments", value => { try { options.OperateOnTypes = options.ItemTypeParser(value); } catch (Exception e) { PrintUsageAndExit(e); } } }, { "metadata-filter:{=}", "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters["@metadata." + key] = val }, { "filter:{=}", "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters[key] = val }, { "timeout:", "The timeout to use for requests", s => options.Timeout = int.Parse(s) }, { "batch-size:", "The batch size for requests", s => options.BatchSize = int.Parse(s) }, { "d|database:", "The database to operate on. If no specified, the operations will be on the default database.", value => connectionStringOptions.DefaultDatabase = value }, { "u|user|username:"******"The username to use when the database requires the client to authenticate.", value => Credentials.UserName = value }, { "p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => Credentials.Password = value }, { "domain:", "The domain to use when the database requires the client to authenticate.", value => Credentials.Domain = value }, { "key|api-key|apikey:", "The API-key to use, when using OAuth.", value => connectionStringOptions.ApiKey = value }, { "incremental", "States usage of incremental operations", _ => incremental = true }, { "wait-for-indexing", "Wait until all indexing activity has been completed (import only)", _ => waitForIndexing = true }, { "h|?|help", v => PrintUsageAndExit(0) }, }; }
private static int GetBatchSize(DocumentStore store, SmugglerOptions options) { if (store.HasJsonRequestFactory == false) { return(options.BatchSize); } var url = store.Url.ForDatabase(store.DefaultDatabase) + "/debug/config"; var request = store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", store.DatabaseCommands.PrimaryCredentials, store.Conventions)); var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return(options.BatchSize); } return(Math.Min(options.BatchSize, maxNumberOfItemsToProcessInSingleBatch)); }
protected override void PutDocument(RavenJObject document, SmugglerOptions options, int size) { if (document == null) { return; } var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); if (String.IsNullOrWhiteSpace(id)) { throw new InvalidDataException("Error while importing document from the dump: \n\r Missing id in the document metadata. This shouldn't be happening, most likely the dump you are importing from is corrupt"); } document.Remove("@metadata"); operation.Store(document, metadata, id, size); }
public void Configure(SmugglerOptions options) { if (Store.HasJsonRequestFactory == false) { return; } var url = Store.Url.ForDatabase(Store.DefaultDatabase) + "/debug/config"; var request = Store.JsonRequestFactory.CreateHttpJsonRequest(new CreateHttpJsonRequestParams(null, url, "GET", Store.DatabaseCommands.PrimaryCredentials, Store.Conventions)); var configuration = (RavenJObject)request.ReadResponseJson(); var maxNumberOfItemsToProcessInSingleBatch = configuration.Value <int>("MaxNumberOfItemsToProcessInSingleBatch"); if (maxNumberOfItemsToProcessInSingleBatch <= 0) { return; } var current = options.BatchSize; options.BatchSize = Math.Min(current, maxNumberOfItemsToProcessInSingleBatch); }
private Program() { connectionStringOptions = new RavenConnectionStringOptions(); options = new SmugglerOptions(); optionSet = new OptionSet { { "operate-on-types:", "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments", value => { try { options.OperateOnTypes = (ItemType) Enum.Parse(typeof (ItemType), value); } catch (Exception e) { PrintUsageAndExit(e); } } }, { "metadata-filter:{=}", "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters["@metadata." + key] = val }, { "filter:{=}", "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters[key] = val }, {"d|database:", "The database to operate on. If no specified, the operations will be on the default database.", value => connectionStringOptions.DefaultDatabase = value}, {"u|user|username:"******"The username to use when the database requires the client to authenticate.", value => Credentials.UserName = value}, {"p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => Credentials.Password = value}, {"domain:", "The domain to use when the database requires the client to authenticate.", value => Credentials.Domain = value}, {"key|api-key:", "The API-key to use, when using OAuth.", value => connectionStringOptions.ApiKey = value}, {"incremental", "States usage of incremental operations", _ => incremental = true }, {"h|?|help", v => PrintUsageAndExit(0)}, }; }
public void ExportData(SmugglerOptions options) { using (var streamWriter = new StreamWriter(new GZipStream(File.Create(options.File), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Indexes)) { ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { ExportDocuments(options, jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments)) { ExportAttachments(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } }
private Program() { connectionStringOptions = new RavenConnectionStringOptions(); options = new SmugglerOptions(); optionSet = new OptionSet { { "operate-on-types:", "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments", value => { try { options.OperateOnTypes = (ItemType)Enum.Parse(typeof(ItemType), value); } catch (Exception e) { PrintUsageAndExit(e); } } }, { "metadata-filter:{=}", "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters["@metadata." + key] = val }, { "filter:{=}", "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters[key] = val }, { "d|database:", "The database to operate on. If no specified, the operations will be on the default database.", value => connectionStringOptions.DefaultDatabase = value }, { "u|user|username:"******"The username to use when the database requires the client to authenticate.", value => Credentials.UserName = value }, { "p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => Credentials.Password = value }, { "domain:", "The domain to use when the database requires the client to authenticate.", value => Credentials.Domain = value }, { "key|api-key:", "The API-key to use, when using OAuth.", value => connectionStringOptions.ApiKey = value }, { "h|?|help", v => PrintUsageAndExit(0) }, }; }
public void Initialize(SmugglerOptions options) { Options = options; jintHelper.Initialize(options); }
public void ImportData(Stream stream, SmugglerOptions options) { var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { var streamReader = new StreamReader(new GZipStream(stream, CompressionMode.Decompress)); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (InvalidDataException) { stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); // should read indexes now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Indexes", jsonReader.Value) == false) throw new InvalidDataException("Indexes property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Indexes) == false) continue; var indexName = index.Value<string>("name"); if (indexName.StartsWith("Raven/") || indexName.StartsWith("Temp/")) continue; var request = CreateRequest("indexes/" + indexName, "PUT"); request.Write(index.Value<RavenJObject>("definition")); request.ExecuteRequest(); } // should read documents now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Docs", jsonReader.Value) == false) throw new InvalidDataException("Docs property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); var batch = new List<RavenJObject>(); int totalCount = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Documents) == false) continue; if (options.MatchFilters(document) == false) continue; totalCount += 1; batch.Add(document); if (batch.Count >= 128) FlushBatch(batch); } FlushBatch(batch); var attachmentCount = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Attachments", jsonReader.Value) == false) throw new InvalidDataException("Attachment property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { attachmentCount += 1; var item = RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) == false) continue; var attachmentExportInfo = new JsonSerializer { Converters = { new TrivialJsonToJsonJsonConverter() } }.Deserialize<AttachmentExportInfo>(new RavenJTokenReader(item)); Console.WriteLine("Importing attachment {0}", attachmentExportInfo.Key); var request = CreateRequest("static/" + attachmentExportInfo.Key, "PUT"); if (attachmentExportInfo.Metadata != null) { foreach (var header in attachmentExportInfo.Metadata) { request.WebRequest.Headers.Add(header.Key, StripQuotesIfNeeded(header.Value)); } } request.Write(attachmentExportInfo.Data); request.ExecuteRequest(); } Console.WriteLine("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", totalCount, attachmentCount, sw.ElapsedMilliseconds); }
private void ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter) { var lastEtag = Guid.Empty; int totalCount = 0; while (true) { RavenJArray documents = null; var request = CreateRequest("docs?pageSize=128&etag=" + lastEtag); request.ExecuteRequest(reader => documents = RavenJArray.Load(new JsonTextReader(reader))); if (documents.Length == 0) { Console.WriteLine("Done with reading documents, total: {0}", totalCount); break; } var final = documents.Where(options.MatchFilters).ToList(); final.ForEach(item => item.WriteTo(jsonWriter)); totalCount += final.Count; Console.WriteLine("Reading batch of {0,3} documents, read so far: {1,10:#,#;;0}", documents.Length, totalCount); lastEtag = new Guid(documents.Last().Value<RavenJObject>("@metadata").Value<string>("@etag")); } }
public void ImportData(Stream stream, SmugglerOptions options) { EnsureDatabaseExists(); var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { var streamReader = new StreamReader(new GZipStream(stream, CompressionMode.Decompress)); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } catch (InvalidDataException) { stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } // should read indexes now if (jsonReader.Read() == false) { return; } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Indexes", jsonReader.Value) == false) { throw new InvalidDataException("Indexes property was expected"); } if (jsonReader.Read() == false) { return; } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Indexes) == false) { continue; } var indexName = index.Value <string>("name"); if (indexName.StartsWith("Raven/") || indexName.StartsWith("Temp/")) { continue; } var request = CreateRequest("/indexes/" + indexName, "PUT"); request.Write(index.Value <RavenJObject>("definition")); request.ExecuteRequest(); } // should read documents now if (jsonReader.Read() == false) { return; } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Docs", jsonReader.Value) == false) { throw new InvalidDataException("Docs property was expected"); } if (jsonReader.Read() == false) { return; } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } var batch = new List <RavenJObject>(); int totalCount = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Documents) == false) { continue; } if (options.MatchFilters(document) == false) { continue; } totalCount += 1; batch.Add(document); if (batch.Count >= 128) { FlushBatch(batch); } } FlushBatch(batch); var attachmentCount = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) { return; } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Attachments", jsonReader.Value) == false) { throw new InvalidDataException("Attachment property was expected"); } if (jsonReader.Read() == false) { return; } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { attachmentCount += 1; var item = RavenJToken.ReadFrom(jsonReader); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) == false) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = { new TrivialJsonToJsonJsonConverter() } }.Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); Console.WriteLine("Importing attachment {0}", attachmentExportInfo.Key); var request = CreateRequest("/static/" + attachmentExportInfo.Key, "PUT"); if (attachmentExportInfo.Metadata != null) { foreach (var header in attachmentExportInfo.Metadata) { switch (header.Key) { case "Content-Type": request.WebRequest.ContentType = header.Value.Value <string>(); break; default: request.WebRequest.Headers.Add(header.Key, StripQuotesIfNeeded(header.Value)); break; } } } request.Write(attachmentExportInfo.Data); request.ExecuteRequest(); } Console.WriteLine("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", totalCount, attachmentCount, sw.ElapsedMilliseconds); }
public override async Task <ExportDataResult> ExportData(SmugglerExportOptions exportOptions, SmugglerOptions options) { using (store = CreateStore(exportOptions.From)) { return(await base.ExportData(exportOptions, options)); } }
private async static Task <Etag> ExportAttachments(DocumentStore exportStore, DocumentStore importStore, SmugglerOptions options, int exportBatchSize) { Etag lastEtag = options.StartAttachmentsEtag; int totalCount = 0; while (true) { var attachments = await exportStore.AsyncDatabaseCommands.GetAttachmentsAsync(0, lastEtag, exportBatchSize); if (attachments.Length == 0) { var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastAttachmentEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last attachment etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading attachments, total: {0}", totalCount); return(lastEtag); } totalCount += attachments.Length; ShowProgress("Reading batch of {0,3} attachments, read so far: {1,10:#,#;;0}", attachments.Length, totalCount); foreach (var attachmentInformation in attachments) { ShowProgress("Downloading attachment: {0}", attachmentInformation.Key); var attachment = await exportStore.AsyncDatabaseCommands.GetAttachmentAsync(attachmentInformation.Key); await importStore.AsyncDatabaseCommands.PutAttachmentAsync(attachment.Key, null, attachment.Data(), attachment.Metadata); } lastEtag = Etag.Parse(attachments.Last().Etag); } }
private static async Task <Etag> ExportDocuments(DocumentStore exportStore, DocumentStore importStore, SmugglerOptions options, ServerSupportedFeatures exportStoreSupportedFeatures, int exportBatchSize, int importBatchSize) { var now = SystemTime.UtcNow; string lastEtag = options.StartDocsEtag; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); ShowProgress("Exporting Documents"); var bulkInsertOperation = importStore.BulkInsert(null, new BulkInsertOptions { BatchSize = importBatchSize, OverwriteExisting = true, }); bulkInsertOperation.Report += text => ShowProgress(text); try { while (true) { if (exportStoreSupportedFeatures.IsDocsStreamingSupported) { ShowProgress("Streaming documents from " + lastEtag); using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(lastEtag)) { while (await documentsEnumerator.MoveNextAsync()) { var document = documentsEnumerator.Current; if (!options.MatchFilters(document)) { continue; } if (options.ShouldExcludeExpired && options.ExcludeExpired(document, now)) { continue; } var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); document.Remove("@metadata"); bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } } } else { int retries = RetriesCount; var originalRequestTimeout = exportStore.JsonRequestFactory.RequestTimeout; var timeout = options.Timeout.Seconds; if (timeout < 30) { timeout = 30; } try { var operationMetadata = new OperationMetadata(exportStore.Url, exportStore.Credentials, exportStore.ApiKey); while (true) { try { ShowProgress("Get documents from " + lastEtag); var documents = await((AsyncServerClient)exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, lastEtag, exportBatchSize, operationMetadata); foreach (RavenJObject document in documents) { var metadata = document.Value <RavenJObject>("@metadata"); var id = metadata.Value <string>("@id"); var etag = Etag.Parse(metadata.Value <string>("@etag")); document.Remove("@metadata"); metadata.Remove("@id"); metadata.Remove("@etag"); if (!options.MatchFilters(document)) { continue; } if (options.ShouldExcludeExpired && options.ExcludeExpired(document, now)) { continue; } bulkInsertOperation.Store(document, metadata, id); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } lastEtag = etag; } break; } catch (Exception e) { if (retries-- == 0) { throw; } exportStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); importStore.JsonRequestFactory.RequestTimeout = TimeSpan.FromSeconds(timeout *= 2); ShowProgress("Error reading from database, remaining attempts {0}, will retry. Error: {1}", retries, e); } } } finally { exportStore.JsonRequestFactory.RequestTimeout = originalRequestTimeout; } } // In a case that we filter all the results, the formEtag hasn't updaed to the latest, // but we still need to continue until we finish all the docs. var databaseStatistics = await exportStore.AsyncDatabaseCommands.GetStatisticsAsync(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, exportBatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading documents, total: {0}", totalCount); return(lastEtag); } } finally { bulkInsertOperation.Dispose(); } }
public SmugglerApi(SmugglerOptions options = null) : base(options ?? new SmugglerOptions()) { Operations = new RemoteSmugglerOperations(() => store, () => operation, () => IsDocsStreamingSupported, () => IsTransformersSupported); }
protected override void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtagsToFetch) { throw new NotImplementedException("Exporting deletions is not supported for Command Line Smuggler"); }
public void ImportData(SmugglerOptions options, bool incremental = false) { if (incremental == false) { using (FileStream fileStream = File.OpenRead(options.File)) { ImportData(fileStream, options); } return; } var files = Directory.GetFiles(Path.GetFullPath(options.File)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) return; var optionsWithoutIndexes = new SmugglerOptions { File = options.File, Filters = options.Filters, OperateOnTypes = options.OperateOnTypes & ~ItemType.Indexes }; for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(options.File, files[i]))) { ImportData(fileStream, optionsWithoutIndexes); } } using (var fileStream = File.OpenRead(Path.Combine(options.File, files.Last()))) { ImportData(fileStream, options); } }
public static async Task Between(SmugglerBetweenOptions betweenOptions, SmugglerOptions options) { SetDatabaseNameIfEmpty(betweenOptions.From); SetDatabaseNameIfEmpty(betweenOptions.To); using (var exportStore = CreateStore(betweenOptions.From)) using (var importStore = CreateStore(betweenOptions.To)) { SmugglerApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.From, exportStore); SmugglerApi.ValidateThatServerIsUpAndDatabaseExists(betweenOptions.To, importStore); var exportBatchSize = GetBatchSize(exportStore, options); var importBatchSize = GetBatchSize(importStore, options); var exportStoreSupportedFeatures = await DetectServerSupportedFeatures(exportStore); var importStoreSupportedFeatures = await DetectServerSupportedFeatures(importStore); if (string.IsNullOrEmpty(betweenOptions.IncrementalKey)) { betweenOptions.IncrementalKey = ((AsyncServerClient)exportStore.AsyncDatabaseCommands).Url; } var incremental = new ExportIncremental(); if (options.Incremental) { var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { var smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); ExportIncremental value; if (smugglerExportIncremental.ExportIncremental.TryGetValue(betweenOptions.IncrementalKey, out value)) { incremental = value; } options.StartDocsEtag = incremental.LastDocsEtag ?? Etag.Empty; options.StartAttachmentsEtag = incremental.LastAttachmentsEtag ?? Etag.Empty; } } if (options.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportStore, importStore, exportBatchSize); } if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && exportStoreSupportedFeatures.IsTransformersSupported && importStoreSupportedFeatures.IsTransformersSupported) { await ExportTransformers(exportStore, importStore, exportBatchSize); } if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { incremental.LastDocsEtag = await ExportDocuments(exportStore, importStore, options, exportStoreSupportedFeatures, exportBatchSize, importBatchSize); } if (options.OperateOnTypes.HasFlag(ItemType.Attachments)) { incremental.LastAttachmentsEtag = await ExportAttachments(exportStore, importStore, options, exportBatchSize); } if (options.Incremental) { var smugglerExportIncremental = new SmugglerExportIncremental(); var jsonDocument = await importStore.AsyncDatabaseCommands.GetAsync(SmugglerExportIncremental.RavenDocumentKey); if (jsonDocument != null) { smugglerExportIncremental = jsonDocument.DataAsJson.JsonDeserialization <SmugglerExportIncremental>(); } smugglerExportIncremental.ExportIncremental[betweenOptions.IncrementalKey] = incremental; await importStore.AsyncDatabaseCommands.PutAsync(SmugglerExportIncremental.RavenDocumentKey, null, RavenJObject.FromObject(smugglerExportIncremental), new RavenJObject()); } } }
private Program() { connectionStringOptions = new RavenConnectionStringOptions(); options = new SmugglerOptions(); optionSet = new OptionSet { { "operate-on-types:", "Specify the types to operate on. Specify the types to operate on. You can specify more than one type by combining items with a comma." + Environment.NewLine + "Default is all items." + Environment.NewLine + "Usage example: Indexes,Documents,Attachments", value => { try { options.OperateOnTypes = options.ItemTypeParser(value); } catch (Exception e) { PrintUsageAndExit(e); } } }, { "metadata-filter:{=}", "Filter documents by a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters.Add(new FilterSetting { Path = "@metadata." + key, ShouldMatch = true, Values = new List <string> { val } }) }, { "negative-metadata-filter:{=}", "Filter documents NOT matching a metadata property." + Environment.NewLine + "Usage example: Raven-Entity-Name=Posts", (key, val) => options.Filters.Add(new FilterSetting { Path = "@metadata." + key, ShouldMatch = false, Values = new List <string> { val } }) }, { "filter:{=}", "Filter documents by a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters.Add(new FilterSetting { Path = key, ShouldMatch = true, Values = new List <string> { val } }) }, { "negative-filter:{=}", "Filter documents NOT matching a document property" + Environment.NewLine + "Usage example: Property-Name=Value", (key, val) => options.Filters.Add(new FilterSetting { Path = key, ShouldMatch = false, Values = new List <string> { val } }) }, { "transform:", "Transform documents using a given script (import only)", script => options.TransformScript = script }, { "transform-file:", "Transform documents using a given script file (import only)", script => options.TransformScript = File.ReadAllText(script) }, { "max-steps-for-transform-script:", "Maximum number of steps that transform script can have (import only)", s => options.MaxStepsForTransformScript = int.Parse(s) }, { "timeout:", "The timeout to use for requests", s => options.Timeout = int.Parse(s) }, { "batch-size:", "The batch size for requests", s => options.BatchSize = int.Parse(s) }, { "d|database:", "The database to operate on. If no specified, the operations will be on the default database.", value => connectionStringOptions.DefaultDatabase = value }, { "u|user|username:"******"The username to use when the database requires the client to authenticate.", value => Credentials.UserName = value }, { "p|pass|password:"******"The password to use when the database requires the client to authenticate.", value => Credentials.Password = value }, { "domain:", "The domain to use when the database requires the client to authenticate.", value => Credentials.Domain = value }, { "key|api-key|apikey:", "The API-key to use, when using OAuth.", value => connectionStringOptions.ApiKey = value }, { "incremental", "States usage of incremental operations", _ => incremental = true }, { "wait-for-indexing", "Wait until all indexing activity has been completed (import only)", _ => waitForIndexing = true }, { "excludeexpired", "Excludes expired documents created by the expiration bundle", _ => options.ShouldExcludeExpired = true }, { "h|?|help", v => PrintUsageAndExit(0) }, }; }
public void ExportData(SmugglerOptions options, bool incremental = false) { var lastDocsEtag = Guid.Empty; var lastAttachmentEtag = Guid.Empty; var folder = options.File; var etagFileLocation = Path.Combine(folder, "IncrementalExport.state.json"); if (incremental == true) { if (Directory.Exists(folder) == false) { Directory.CreateDirectory(folder); } options.File = Path.Combine(folder, DateTime.Now.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(options.File)) { var counter = 1; var found = false; while (found == false) { options.File = Path.Combine(folder, DateTime.Now.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(options.File) == false) found = true; counter++; } } if (File.Exists(etagFileLocation)) { using (var streamReader = new StreamReader(new FileStream(etagFileLocation, FileMode.Open))) using (var jsonReader = new JsonTextReader(streamReader)) { var ravenJObject = RavenJObject.Load(jsonReader); lastDocsEtag = new Guid(ravenJObject.Value<string>("LastDocEtag")); lastAttachmentEtag = new Guid(ravenJObject.Value<string>("LastAttachmentEtag")); } } } using (var streamWriter = new StreamWriter(new GZipStream(File.Create(options.File), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Indexes)) { ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { lastDocsEtag = ExportDocuments(options, jsonWriter, lastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments)) { lastAttachmentEtag = ExportAttachments(jsonWriter, lastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (incremental != true) return; using (var streamWriter = new StreamWriter(File.Create(etagFileLocation))) { new RavenJObject { {"LastDocEtag", lastDocsEtag.ToString()}, {"LastAttachmentEtag", lastAttachmentEtag.ToString()} }.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } }
public SmugglerApi(SmugglerOptions smugglerOptions, RavenConnectionStringOptions connectionStringOptions) : base(smugglerOptions) { ConnectionStringOptions = connectionStringOptions; }