public async void Sample1() { #region smuggler_api_2 // import only Documents // from dump.raven file // to NewNorthwind database (must exist) // found on http://localhost:8080 server SmugglerDatabaseApi smugglerApi = new SmugglerDatabaseApi(new SmugglerDatabaseOptions { OperateOnTypes = ItemType.Documents, Incremental = false }); SmugglerImportOptions<RavenConnectionStringOptions> importOptions = new SmugglerImportOptions<RavenConnectionStringOptions> { FromFile = "dump.raven", To = new RavenConnectionStringOptions { DefaultDatabase = "NewNorthwind", Url = "http://localhost:8080" } }; await smugglerApi.ImportData(importOptions, null); #endregion }
public virtual async Task ImportData(SmugglerImportOptions <RavenConnectionStringOptions> importOptions) { if (Options.Incremental == false) { Stream stream = importOptions.FromStream; bool ownStream = false; try { if (stream == null) { stream = File.OpenRead(importOptions.FromFile); ownStream = true; } await ImportData(importOptions, stream); } finally { if (stream != null && ownStream) { stream.Dispose(); } } return; } var files = Directory.GetFiles(Path.GetFullPath(importOptions.FromFile)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) { return; } var oldItemType = Options.OperateOnTypes; Options.OperateOnTypes = Options.OperateOnTypes & ~(ItemType.Indexes | ItemType.Transformers); for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files[i]))) { Operations.ShowProgress("Starting to import file: {0}", files[i]); await ImportData(importOptions, fileStream); } } Options.OperateOnTypes = oldItemType; using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files.Last()))) { Operations.ShowProgress("Starting to import file: {0}", files.Last()); await ImportData(importOptions, fileStream); } }
public virtual async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options) { if (options.Incremental == false) { Stream stream = importOptions.FromStream; bool ownStream = false; try { if (stream == null) { stream = File.OpenRead(importOptions.FromFile); ownStream = true; } await ImportData(importOptions, options, stream); } finally { if (stream != null && ownStream) { stream.Dispose(); } } return; } var files = Directory.GetFiles(Path.GetFullPath(importOptions.FromFile)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) { return; } var optionsWithoutIndexes = new SmugglerOptions { Filters = options.Filters, OperateOnTypes = options.OperateOnTypes & ~(ItemType.Indexes | ItemType.Transformers) }; for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files[i]))) { await ImportData(importOptions, optionsWithoutIndexes, fileStream); } } using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files.Last()))) { await ImportData(importOptions, options, fileStream); } }
public virtual async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions) { Operations.ShowProgress("Importing filesystem"); if (Options.Incremental == false) { Stream stream = importOptions.FromStream; bool ownStream = false; try { if (stream == null) { stream = File.OpenRead(importOptions.FromFile); ownStream = true; } await ImportData(importOptions, stream); } finally { if (stream != null && ownStream) { stream.Dispose(); } } return; } var directory = new DirectoryInfo(importOptions.FromFile); if (!directory.Exists) { throw new InvalidOperationException("The directory does not exists."); } var files = Directory.GetFiles(directory.FullName) .Where(file => Path.GetExtension(file).Equals(".ravenfs-incremental-dump", StringComparison.CurrentCultureIgnoreCase)) .OrderBy(x => File.GetLastWriteTimeUtc(x)) .ToArray(); if (files.Length == 0) { return; } foreach (string filename in files) { using (var fileStream = File.OpenRead(filename)) { await ImportData(importOptions, fileStream); } } }
public override async Task ImportData(SmugglerImportOptions<FilesConnectionStringOptions> importOptions) { if (importOptions.To == null) throw new ArgumentNullException("importOptions"); using (primaryStore = await CreateStore(importOptions.To)) using (documentStore = CreateDocumentStore(importOptions.To)) { Operations = new SmugglerRemoteFilesOperations(() => primaryStore, () => documentStore); await base.ImportData(importOptions); } }
private async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions, string filename) { var sw = Stopwatch.StartNew(); var directory = Path.GetDirectoryName(filename); var serializer = JsonExtensions.CreateDefaultJsonSerializer(); // We open the zip file. using (var archive = new ZipArchive(File.OpenRead(filename), ZipArchiveMode.Read)) { var filesLookup = archive.Entries.ToDictionary(x => x.FullName); var metadataEntry = filesLookup[".metadata"]; using (var streamReader = new StreamReader(metadataEntry.Open())) { foreach (var json in streamReader.EnumerateJsonObjects()) { // For each entry in the metadata file. var container = serializer.Deserialize <FileContainer>(new StringReader(json)); var header = new FileHeader(container.Key, container.Metadata); if (header.IsTombstone) { continue; } var entry = filesLookup[container.Key]; using (var dataStream = entry.Open()) { if (Options.StripReplicationInformation) { container.Metadata = Operations.StripReplicationInformationFromMetadata(container.Metadata); } await Operations.PutFiles(header, dataStream, entry.Length); } Options.CancelToken.Token.ThrowIfCancellationRequested(); } Options.CancelToken.Token.ThrowIfCancellationRequested(); } } sw.Stop(); }
public async Task<HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents .First(c => c.Headers.ContentDisposition.Name == "\"file\"") .ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions { BatchSize = batchSize, ShouldExcludeExpired = includeExpiredDocuments, OperateOnTypes = operateOnTypes, TransformScript = transformScript }; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { options.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List<string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
public async Task<HttpResponseMessage> ImportDatabase() { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents.First().ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions(); await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
private async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions, Stream stream, int streamingBatchSize = 10 *1024 *1024) { Operations.Configure(Options); Operations.Initialize(Options); await DetectServerSupportedFeatures(importOptions.To).ConfigureAwait(false); var sw = Stopwatch.StartNew(); var serializer = JsonExtensions.CreateDefaultJsonSerializer(); // We open the zip file. using (var archive = new ZipArchive(stream, ZipArchiveMode.Read)) { var filesLookup = archive.Entries.ToDictionary(x => x.FullName); var configurationsCount = 0; ZipArchiveEntry configurationsEntry; if (filesLookup.TryGetValue(ConfigurationsEntry, out configurationsEntry)) // older exports can not have it { using (var streamReader = new StreamReader(configurationsEntry.Open())) { await ImportConfigs(streamReader, serializer, configurationsCount).ConfigureAwait(false); } } var metadataEntry = filesLookup[MetadataEntry]; using (var streamReader = new StreamReader(metadataEntry.Open())) { await ImportFiles(streamReader, serializer, filesLookup).ConfigureAwait(false); Options.CancelToken.Token.ThrowIfCancellationRequested(); } } sw.Stop(); }
public override async Task ImportData(SmugglerImportOptions<RavenConnectionStringOptions> importOptions, Stream stream) { using (store = CreateStore(importOptions.To)) { Task disposeTask; try { await CreateBulkInsertOperation(); await base.ImportData(importOptions, stream); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public virtual async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions) { Operations.Configure(Options); Operations.Initialize(Options); await DetectServerSupportedFeatures(importOptions.To); if (Options.Incremental == false) { await ImportData(importOptions, importOptions.FromFile); return; } var directory = new DirectoryInfo(importOptions.FromFile); if (!directory.Exists) { throw new InvalidOperationException("The directory does not exists."); } var files = Directory.GetFiles(directory.FullName) .Where(file => Path.GetExtension(file).Equals(".ravenfs-incremental-dump", StringComparison.CurrentCultureIgnoreCase)) .OrderBy(x => File.GetLastWriteTimeUtc(x)) .ToArray(); if (files.Length == 0) { return; } foreach (string filename in files) { await ImportData(importOptions, filename); } }
public async virtual Task ImportData(SmugglerImportOptions importOptions, Stream stream) { Operations.Configure(SmugglerOptions); Operations.Initialize(SmugglerOptions); await DetectServerSupportedFeatures(importOptions.To); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { stream.Position = 0; sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (Exception e) { if (e is InvalidDataException == false) throw; stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); var exportCounts = new Dictionary<string, int>(); var exportSectionRegistar = new Dictionary<string, Func<int>>(); SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); exportSectionRegistar.Add("Indexes", () => { Operations.ShowProgress("Begin reading indexes"); var indexCount = ImportIndexes(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); return indexCount; }); exportSectionRegistar.Add("Docs", () => { Operations.ShowProgress("Begin reading documents"); var documentCount = ImportDocuments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); return documentCount; }); exportSectionRegistar.Add("Attachments", () => { Operations.ShowProgress("Begin reading attachments"); var attachmentCount = ImportAttachments(importOptions.To, jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); return attachmentCount; }); exportSectionRegistar.Add("Transformers", () => { Operations.ShowProgress("Begin reading transformers"); var transformersCount = ImportTransformers(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); return transformersCount; }); exportSectionRegistar.Add("DocsDeletions", () => { Operations.ShowProgress("Begin reading deleted documents"); var deletedDocumentsCount = ImportDeletedDocuments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading deleted documents, total: {0}", deletedDocumentsCount)); return deletedDocumentsCount; }); exportSectionRegistar.Add("AttachmentsDeletions", () => { Operations.ShowProgress("Begin reading deleted attachments"); var deletedAttachmentsCount = ImportDeletedAttachments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading deleted attachments, total: {0}", deletedAttachmentsCount)); return deletedAttachmentsCount; }); exportSectionRegistar.Add("Identities", () => { Operations.ShowProgress("Begin reading identities"); var identitiesCount = ImportIdentities(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading identities, total: {0}", identitiesCount)); return identitiesCount; }); exportSectionRegistar.Keys.ForEach(k => exportCounts[k] = 0); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndObject) { SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); Func<int> currentAction; var currentSection = jsonReader.Value.ToString(); if (exportSectionRegistar.TryGetValue(currentSection, out currentAction) == false) { throw new InvalidDataException("Unexpected property found: " + jsonReader.Value); } if (jsonReader.Read() == false) { exportCounts[currentSection] = 0; continue; } if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); exportCounts[currentSection] = currentAction(); } sw.Stop(); Operations.ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments, deleted {2:#,#;;0} documents and {3:#,#;;0} attachments in {4:#,#.###;;0} s", exportCounts["Docs"], exportCounts["Attachments"], exportCounts["DocsDeletions"], exportCounts["AttachmentsDeletions"], sw.ElapsedMilliseconds / 1000f); SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); }
private async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions, Stream stream) { Operations.Configure(Options); Operations.Initialize(Options); await DetectServerSupportedFeatures(importOptions.To); var sw = Stopwatch.StartNew(); var serializer = JsonExtensions.CreateDefaultJsonSerializer(); // We open the zip file. using (var archive = new ZipArchive(stream, ZipArchiveMode.Read)) { var filesLookup = archive.Entries.ToDictionary(x => x.FullName); var configurationsCount = 0; ZipArchiveEntry configurationsEntry; if (filesLookup.TryGetValue(ConfigurationsEntry, out configurationsEntry)) // older exports can not have it { using (var streamReader = new StreamReader(configurationsEntry.Open())) { foreach (var json in streamReader.EnumerateJsonObjects()) { var config = serializer.Deserialize <ConfigContainer>(new StringReader(json)); if (Options.StripReplicationInformation) { if (config.Name.Equals(SynchronizationConstants.RavenSynchronizationVersionHiLo, StringComparison.OrdinalIgnoreCase)) { continue; } } await Operations.PutConfig(config.Name, config.Value); configurationsCount++; if (configurationsCount % 100 == 0) { Operations.ShowProgress("Read {0:#,#;;0} configurations", configurationsCount); } } } } var filesCount = 0; var metadataEntry = filesLookup[MetadataEntry]; using (var streamReader = new StreamReader(metadataEntry.Open())) { foreach (var json in streamReader.EnumerateJsonObjects()) { // For each entry in the metadata file. var container = serializer.Deserialize <FileContainer>(new StringReader(json)); var header = new FileHeader(container.Key, container.Metadata); if (header.IsTombstone) { continue; } var entry = filesLookup[container.Key]; using (var dataStream = entry.Open()) { if (Options.StripReplicationInformation) { container.Metadata = Operations.StripReplicationInformationFromMetadata(container.Metadata); } if (Options.ShouldDisableVersioningBundle) { container.Metadata = Operations.DisableVersioning(container.Metadata); } await Operations.PutFile(header, dataStream, entry.Length); } Options.CancelToken.Token.ThrowIfCancellationRequested(); filesCount++; if (filesCount % 100 == 0) { Operations.ShowProgress("Read {0:#,#;;0} files", filesCount); } } Options.CancelToken.Token.ThrowIfCancellationRequested(); } } sw.Stop(); }
public async Task ShouldExportAndImportConfigurations() { using (var exportStream = new MemoryStream()) { int countOfConfigurations; using (var store = NewStore()) { for (int i = 0; i < 100; i++) { await store.AsyncFilesCommands.Configuration.SetKeyAsync("items/" + i, new RavenJObject { { "test", "value" }, { "test-array", new RavenJArray { "item-1", "item-2", "item-3" } } }); } countOfConfigurations = (await store.AsyncFilesCommands.Configuration.GetKeyNamesAsync(0, 200)).Length; var exportOptions = new SmugglerExportOptions<FilesConnectionStringOptions> { From = new FilesConnectionStringOptions { Url = store.Url, DefaultFileSystem = store.DefaultFileSystem }, ToStream = exportStream }; await new SmugglerFilesApi().ExportData(exportOptions); } using (var import = NewStore(1)) { exportStream.Position = 0; var importOptions = new SmugglerImportOptions<FilesConnectionStringOptions> { FromStream = exportStream, To = new FilesConnectionStringOptions() { Url = import.Url, DefaultFileSystem = import.DefaultFileSystem } }; await new SmugglerFilesApi().ImportData(importOptions); Assert.Equal(countOfConfigurations, (await import.AsyncFilesCommands.Configuration.GetKeyNamesAsync(0, 200)).Length); for (int i = 0; i < 100; i++) { Assert.NotNull(await import.AsyncFilesCommands.Configuration.GetKeyAsync<RavenJObject>("items/" + i)); } } } }
private async Task ImportData(SmugglerImportOptions <FilesConnectionStringOptions> importOptions, Stream stream, int streamingBatchSize = 10 *1024 *1024) { Operations.Configure(Options); Operations.Initialize(Options); await DetectServerSupportedFeatures(importOptions.To).ConfigureAwait(false); var sw = Stopwatch.StartNew(); var serializer = JsonExtensions.CreateDefaultJsonSerializer(); // We open the zip file. using (var archive = new ZipArchive(stream, ZipArchiveMode.Read)) { var filesLookup = archive.Entries.ToDictionary(x => x.FullName); var configurationsCount = 0; ZipArchiveEntry configurationsEntry; if (filesLookup.TryGetValue(ConfigurationsEntry, out configurationsEntry)) // older exports can not have it { using (var streamReader = new StreamReader(configurationsEntry.Open())) { foreach (var json in streamReader.EnumerateJsonObjects()) { var config = serializer.Deserialize <ConfigContainer>(new StringReader(json)); if (Options.StripReplicationInformation) { if (config.Name.Equals(SynchronizationConstants.RavenSynchronizationVersionHiLo, StringComparison.OrdinalIgnoreCase)) { continue; } } await Operations.PutConfig(config.Name, config.Value).ConfigureAwait(false); configurationsCount++; if (configurationsCount % 100 == 0) { Operations.ShowProgress("Read {0:#,#;;0} configurations", configurationsCount); } } } } var filesCount = 0; ServerSupportedFeatures features; try { features = await DetectServerSupportedFeatures(importOptions.To).ConfigureAwait(false); } catch (WebException e) { throw new SmugglerImportException("Failed to query server for supported features. Reason : " + e.Message) { LastEtag = Etag.Empty }; } var metadataEntry = filesLookup[MetadataEntry]; using (var streamReader = new StreamReader(metadataEntry.Open())) { if (features.IsFilesStreamingSupported) { filesCount = await ImportFilesWithStreaming(streamingBatchSize, streamReader, serializer, filesLookup).ConfigureAwait(false); } else { filesCount = await ImportFilesLegacy(streamReader, serializer, filesLookup).ConfigureAwait(false); } Options.CancelToken.Token.ThrowIfCancellationRequested(); } } sw.Stop(); }
public async virtual Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); await DetectServerSupportedFeatures(importOptions.To); await EnsureDatabaseExists(importOptions.To); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } catch (Exception e) { if (e is InvalidDataException == false) { throw; } stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } var exportCounts = new Dictionary <string, int>(); var exportSectionRegistar = new Dictionary <string, Func <int> >(); exportSectionRegistar.Add("Indexes", () => { ShowProgress("Begin reading indexes"); var indexCount = ImportIndexes(jsonReader, options).Result; ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); return(indexCount); }); exportSectionRegistar.Add("Docs", () => { ShowProgress("Begin reading documents"); var documentCount = ImportDocuments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); return(documentCount); }); exportSectionRegistar.Add("Attachments", () => { ShowProgress("Begin reading attachments"); var attachmentCount = ImportAttachments(importOptions.To, jsonReader, options).Result; ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); return(attachmentCount); }); exportSectionRegistar.Add("Transformers", () => { ShowProgress("Begin reading transformers"); var transformersCount = ImportTransformers(jsonReader, options).Result; ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); return(transformersCount); }); exportSectionRegistar.Add("DocsDeletions", () => { ShowProgress("Begin reading deleted documents"); var deletedDocumentsCount = ImportDeletedDocuments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading deleted documents, total: {0}", deletedDocumentsCount)); return(deletedDocumentsCount); }); exportSectionRegistar.Add("AttachmentsDeletions", () => { ShowProgress("Begin reading deleted attachments"); var deletedAttachmentsCount = ImportDeletedAttachments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading deleted attachments, total: {0}", deletedAttachmentsCount)); return(deletedAttachmentsCount); }); exportSectionRegistar.Keys.ForEach(k => exportCounts[k] = 0); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndObject) { if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } Func <int> currentAction; var currentSection = jsonReader.Value.ToString(); if (exportSectionRegistar.TryGetValue(currentSection, out currentAction) == false) { throw new InvalidDataException("Unexpected property found: " + jsonReader.Value); } if (jsonReader.Read() == false) { exportCounts[currentSection] = 0; continue; } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } exportCounts[currentSection] = currentAction(); } sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments, deleted {2:#,#;;0} documents and {3:#,#;;0} attachments in {4:#,#;;0} ms", exportCounts["Docs"], exportCounts["Attachments"], exportCounts["DocsDeletions"], exportCounts["AttachmentsDeletions"], sw.ElapsedMilliseconds); }
private async Task <int> ImportFilesWithStreaming_ForNowShouldNotBeUsed(SmugglerImportOptions <FilesConnectionStringOptions> importOptions, int streamingBatchSize, StreamReader streamReader, JsonSerializer serializer, Dictionary <string, ZipArchiveEntry> filesLookup) { int filesCount = 0; var batchSizeFromConfig = 20;// shoudl be taken from config int?filesLeftForCurrentBatch = batchSizeFromConfig; var readSP = Stopwatch.StartNew(); Task previousTask = new CompletedTask(); using (var filesMetadataEnumerator = streamReader.EnumerateJsonObjects().GetEnumerator()) { while (filesMetadataEnumerator.MoveNext()) { long totalSize = 0; List <FileUploadUnitOfWork> filesAndHeaders = new List <FileUploadUnitOfWork>(); do { if (filesAndHeaders != null) { filesLeftForCurrentBatch--; } var jsonString = filesMetadataEnumerator.Current; var container = serializer.Deserialize <FileContainer>(new StringReader(jsonString)); var header = new FileHeader(container.Key, container.Metadata); if (header.IsTombstone) { continue; } var entry = filesLookup[container.Key]; filesAndHeaders.Add(new FileUploadUnitOfWork(entry, header)); if (Options.StripReplicationInformation) { container.Metadata = Operations.StripReplicationInformationFromMetadata(container.Metadata); } if (Options.ShouldDisableVersioningBundle) { container.Metadata = Operations.DisableVersioning(container.Metadata); } Options.CancelToken.Token.ThrowIfCancellationRequested(); filesCount++; if (filesCount % 100 == 0) { Operations.ShowProgress("Read {0:#,#;;0} files, It took {0:#,#;;0} ms", filesCount, readSP.ElapsedMilliseconds); readSP.Restart(); } totalSize += entry.Length + jsonString.Length; } while (totalSize <= streamingBatchSize && (filesLeftForCurrentBatch.HasValue == false || filesLeftForCurrentBatch > 0) && filesMetadataEnumerator.MoveNext()); filesLeftForCurrentBatch = batchSizeFromConfig; await previousTask.ConfigureAwait(false); previousTask = Operations.UploadFilesInStream(filesAndHeaders.ToArray()); Options.CancelToken.Token.ThrowIfCancellationRequested(); } } return(filesCount); }
public override async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); SmugglerJintHelper.Initialize(options); using (store = CreateStore(importOptions.To)) { Task disposeTask; try { operation = new ChunkedBulkInsertOperation(store.DefaultDatabase, store, store.Listeners, new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }, store.Changes(), options.ChunkSize); operation.Report += text => ShowProgress(text); await base.ImportData(importOptions, options, stream); } finally { disposeTask = operation.DisposeAsync(); } if (disposeTask != null) { await disposeTask; } } }
public virtual async Task ImportData(SmugglerImportOptions importOptions) { if (SmugglerOptions.Incremental == false) { Stream stream = importOptions.FromStream; bool ownStream = false; try { if (stream == null) { stream = File.OpenRead(importOptions.FromFile); ownStream = true; } await ImportData(importOptions, stream); } finally { if (stream != null && ownStream) stream.Dispose(); } return; } var files = Directory.GetFiles(Path.GetFullPath(importOptions.FromFile)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) return; var oldItemType = SmugglerOptions.OperateOnTypes; SmugglerOptions.OperateOnTypes = SmugglerOptions.OperateOnTypes & ~(ItemType.Indexes | ItemType.Transformers); for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files[i]))) { await ImportData(importOptions, fileStream); } } SmugglerOptions.OperateOnTypes = oldItemType; using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files.Last()))) { await ImportData(importOptions, fileStream); } }
public async Task ExportShouldDisableSynchronizationDestinations() { using (var exportStream = new MemoryStream()) using (var exportStore = NewStore()) using (var importStore = NewStore(1)) { await exportStore.AsyncFilesCommands.Synchronization.SetDestinationsAsync(new SynchronizationDestination() { ServerUrl = "http://sample.com", FileSystem = "Sample", Enabled = true }); var exportOptions = new SmugglerExportOptions<FilesConnectionStringOptions> { From = new FilesConnectionStringOptions { Url = exportStore.Url, DefaultFileSystem = exportStore.DefaultFileSystem }, ToStream = exportStream }; await new SmugglerFilesApi().ExportData(exportOptions); exportStream.Position = 0; var importOptions = new SmugglerImportOptions<FilesConnectionStringOptions> { FromStream = exportStream, To = new FilesConnectionStringOptions() { Url = importStore.Url, DefaultFileSystem = importStore.DefaultFileSystem } }; await new SmugglerFilesApi().ImportData(importOptions); var destinations = await importStore.AsyncFilesCommands.Synchronization.GetDestinationsAsync(); Assert.Equal(1, destinations.Length); Assert.Equal("http://sample.com/fs/Sample", destinations[0].Url); Assert.Equal("Sample", destinations[0].FileSystem); Assert.False(destinations[0].Enabled); } }