private async Task <Etag> CopyBetweenStores(SmugglerBetweenOptions <FilesConnectionStringOptions> options, Etag lastEtag, Etag maxEtag) { var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(10); Operations.ShowProgress("Exporting Files"); Exception exceptionHappened = null; try { using (var files = await Operations.GetFiles(options.From, lastEtag, Math.Min(Options.BatchSize, int.MaxValue))) { while (await files.MoveNextAsync()) { var file = files.Current; var tempLastEtag = file.Etag; if (maxEtag != null && tempLastEtag.CompareTo(maxEtag) > 0) { break; } var downloadedFile = await Operations.DownloadFile(file); await Operations.PutFiles(file, downloadedFile, file.TotalSize.Value); lastEtag = tempLastEtag; totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { //TODO: Show also the MB/sec and total GB exported. Operations.ShowProgress("Exported {0} files. ", totalCount); lastReport = SystemTime.UtcNow; } } } } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); Operations.ShowProgress("Done with reading files, total: {0}, lastEtag: {1}", totalCount, lastEtag); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } if (exceptionHappened != null) { throw exceptionHappened; } Operations.ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); }
public virtual async Task <ExportFilesResult> ExportData(SmugglerExportOptions <FilesConnectionStringOptions> exportOptions) { Operations.Configure(Options); Operations.Initialize(Options); var result = new ExportFilesResult { FilePath = exportOptions.ToFile, LastFileEtag = Options.StartFilesEtag, LastDeletedFileEtag = Options.StartFilesDeletionEtag, }; if (result.FilePath != null) { result.FilePath = Path.GetFullPath(result.FilePath); } if (Options.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) { result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; } else { Directory.CreateDirectory(result.FilePath); } } if (Options.StartFilesEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm-0", CultureInfo.InvariantCulture) + ".ravenfs-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + "-" + counter + ".ravenfs-incremental-dump"); if (File.Exists(result.FilePath) == false) { break; } counter++; } } } SmugglerExportException lastException = null; bool ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(result.FilePath); try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { throw new SmugglerExportException("Failed to query server for supported features. Reason : " + e.Message) { LastEtag = Etag.Empty, File = result.FilePath }; } try { // used to synchronize max returned values for put/delete operations var maxEtags = Operations.FetchCurrentMaxEtags(); try { // We use PositionWrapperStream due to: // http://connect.microsoft.com/VisualStudio/feedbackdetail/view/816411/ziparchive-shouldnt-read-the-position-of-non-seekable-streams using (var positionStream = new PositionWrapperStream(stream, leaveOpen: true)) using (var archive = new ZipArchive(positionStream, ZipArchiveMode.Create, leaveOpen: true)) { await ExportFiles(archive, result.LastFileEtag, maxEtags.LastFileEtag); await ExportConfigurations(archive); } } catch (SmugglerExportException ex) { result.LastFileEtag = ex.LastEtag; ex.File = result.FilePath; lastException = ex; } if (Options.Incremental) { WriteLastEtagsToFile(result, Path.GetDirectoryName(result.FilePath)); } if (lastException != null) { throw lastException; } return(result); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
private async Task <Etag> CopyBetweenStores(Etag lastEtag, Etag maxEtag) { var totalFiles = 0; var totalConfigurations = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(10); Operations.ShowProgress("Exporting Files"); Exception exceptionHappened = null; try { while (true) { bool hasConfigs = false; foreach (var config in await Operations.GetConfigurations(totalConfigurations, Options.BatchSize)) { if (internalConfigs.IsMatch(config.Key)) { continue; } hasConfigs = true; await Operations.PutConfig(config.Key, EnsureValidExportConfig(config)); totalConfigurations++; if (totalConfigurations % 100 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { Operations.ShowProgress("Exported {0} configurations. ", totalConfigurations); lastReport = SystemTime.UtcNow; } } if (hasConfigs == false) { break; } } Operations.ShowProgress("Done with reading configurations, total: {0}", totalConfigurations); using (var files = await Operations.GetFiles(lastEtag, Options.BatchSize)) { while (await files.MoveNextAsync()) { var file = files.Current; if (file.IsTombstone) { continue; // Skip if the file has been deleted. } var tempLastEtag = file.Etag; if (maxEtag != null && tempLastEtag.CompareTo(maxEtag) > 0) { break; } if (Options.StripReplicationInformation) { file.Metadata = Operations.StripReplicationInformationFromMetadata(file.Metadata); } if (Options.ShouldDisableVersioningBundle) { file.Metadata = Operations.DisableVersioning(file.Metadata); } var downloadedFile = await Operations.DownloadFile(file); await Operations.PutFile(file, downloadedFile, file.TotalSize.Value); lastEtag = tempLastEtag; totalFiles++; if (totalFiles % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { //TODO: Show also the MB/sec and total GB exported. Operations.ShowProgress("Exported {0} files. ", totalFiles); lastReport = SystemTime.UtcNow; } } } } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); Operations.ShowProgress("Done with reading files, total: {0}, lastEtag: {1}", totalFiles, lastEtag); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } if (exceptionHappened != null) { throw exceptionHappened; } Operations.ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalFiles, lastEtag); return(lastEtag); }
private async Task <Etag> ExportFiles(ZipArchive archive, Etag lastEtag, Etag maxEtag) { var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); Operations.ShowProgress("Exporting Files"); var metadataList = new List <FileContainer>(); Exception exceptionHappened = null; using (var cts = new CancellationTokenSource()) { var fileHeaders = new BlockingCollection <FileHeader>(); var getFilesTask = Task.Run(async() => await GetFilesTask(lastEtag, maxEtag, cts, fileHeaders), cts.Token); try { while (true) { FileHeader fileHeader = null; try { fileHeader = fileHeaders.Take(cts.Token); } catch (InvalidOperationException) // CompleteAdding Called { Operations.ShowProgress("Files List Retrieval Completed"); break; } cts.Token.ThrowIfCancellationRequested(); // Write the metadata (which includes the stream size and file container name) var fileContainer = new FileContainer { Key = Path.Combine(fileHeader.Directory.TrimStart('/'), fileHeader.Name), Metadata = fileHeader.Metadata, }; ZipArchiveEntry fileToStore = archive.CreateEntry(fileContainer.Key); using (var fileStream = await Operations.DownloadFile(fileHeader)) using (var zipStream = fileToStore.Open()) { await fileStream.CopyToAsync(zipStream).ConfigureAwait(false); } metadataList.Add(fileContainer); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { Operations.ShowProgress("Exported {0} files. ", totalCount); lastReport = SystemTime.UtcNow; } } } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e); Operations.ShowProgress("Done with reading files, total: {0}, lastEtag: {1}", totalCount, lastEtag); cts.Cancel(); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } try { getFilesTask.Wait(CancellationToken.None); } catch (OperationCanceledException) { // we are fine with this } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } var metadataEntry = archive.CreateEntry(MetadataEntry); using (var metadataStream = metadataEntry.Open()) using (var writer = new StreamWriter(metadataStream)) { foreach (var item in metadataList) { writer.WriteLine(RavenJObject.FromObject(item)); } } if (exceptionHappened != null) { throw exceptionHappened; } Operations.ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); }
public virtual async Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) { throw new ArgumentNullException("options"); } var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) { options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; } else { Directory.CreateDirectory(options.BackupPath); } } if (lastEtagsFromFile && backupStatus == null) { ReadLastEtagsFromFile(options); } if (backupStatus != null) { ReadLastEtagsFromClass(options, backupStatus); } file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) { break; } counter++; } } } #else if (incremental) { throw new NotSupportedException("Incremental exports are not supported in SL."); } #endif try { await DetectServerSupportedFeatures(); } catch (WebException e) { ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode } SmugglerExportException lastException = null; bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } catch (SmugglerExportException e) { options.LastDocsEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } catch (SmugglerExportException e) { options.LastAttachmentEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) { WriteLastEtagsFromFile(options); } #endif if (lastException != null) { throw lastException; } return(file); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
public virtual async Task <ExportFilesResult> ExportData(SmugglerExportOptions <FilesConnectionStringOptions> exportOptions) { Operations.Configure(Options); Operations.Initialize(Options); var result = new ExportFilesResult { FilePath = exportOptions.ToFile, LastFileEtag = Options.StartFilesEtag, LastDeletedFileEtag = Options.StartFilesDeletionEtag, }; result.FilePath = Path.GetFullPath(result.FilePath); if (Options.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) { result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; } else { Directory.CreateDirectory(result.FilePath); } } if (Options.StartFilesEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm-0", CultureInfo.InvariantCulture) + ".ravenfs-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + "-" + counter + ".ravenfs-incremental-dump"); if (File.Exists(result.FilePath) == false) { break; } counter++; } } } SmugglerExportException lastException = null; try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { throw new SmugglerExportException("Failed to query server for supported features. Reason : " + e.Message) { LastEtag = Etag.Empty, File = result.FilePath }; } if (string.IsNullOrWhiteSpace(result.FilePath)) { throw new SmugglerException("Output directory cannot be null, empty or whitespace."); } var stream = File.Create(result.FilePath); try { // used to synchronize max returned values for put/delete operations var maxEtags = Operations.FetchCurrentMaxEtags(); try { await ExportFiles(exportOptions, stream, result.LastFileEtag, maxEtags.LastFileEtag); } catch (SmugglerExportException ex) { result.LastFileEtag = ex.LastEtag; ex.File = result.FilePath; lastException = ex; } catch (Exception) { throw; } if (Options.Incremental) { WriteLastEtagsToFile(result, Path.GetDirectoryName(result.FilePath)); } if (lastException != null) { throw lastException; } return(result); } finally { stream.Dispose(); } }
private async Task <Etag> ExportFiles(SmugglerExportOptions <FilesConnectionStringOptions> options, Stream stream, Etag lastEtag, Etag maxEtag) { var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); Operations.ShowProgress("Exporting Files"); using (var archive = new ZipArchive(stream, ZipArchiveMode.Create)) { var metadataList = new List <FileContainer>(); Exception exceptionHappened = null; try { using (var files = await Operations.GetFiles(options.From, lastEtag, Math.Min(Options.BatchSize, int.MaxValue))) { while (await files.MoveNextAsync()) { var file = files.Current; if (file.IsTombstone) { continue; } var tempLastEtag = file.Etag; if (maxEtag != null && tempLastEtag.CompareTo(maxEtag) > 0) { break; } // Write the metadata (which includes the stream size and file container name) var fileContainer = new FileContainer { Key = Path.Combine(file.Directory.TrimStart('/'), file.Name), Metadata = file.Metadata, }; ZipArchiveEntry fileToStore = archive.CreateEntry(fileContainer.Key); using (var fileStream = await Operations.DownloadFile(file)) using (var zipStream = fileToStore.Open()) { await fileStream.CopyToAsync(zipStream).ConfigureAwait(false); } metadataList.Add(fileContainer); lastEtag = tempLastEtag; totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { //TODO: Show also the MB/sec and total GB exported. Operations.ShowProgress("Exported {0} files. ", totalCount); lastReport = SystemTime.UtcNow; } } } } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); Operations.ShowProgress("Done with reading files, total: {0}, lastEtag: {1}", totalCount, lastEtag); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } var metadataEntry = archive.CreateEntry(".metadata"); using (var metadataStream = metadataEntry.Open()) using (var writer = new StreamWriter(metadataStream)) { foreach (var item in metadataList) { writer.WriteLine(RavenJObject.FromObject(item)); } } if (exceptionHappened != null) { throw exceptionHappened; } Operations.ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); } }
public virtual async Task <ExportDataResult> ExportData(SmugglerExportOptions exportOptions, SmugglerOptions options) { SetSmugglerOptions(options); var result = new ExportDataResult { FilePath = exportOptions.ToFile, LastAttachmentsEtag = options.StartAttachmentsEtag, LastDocsEtag = options.StartDocsEtag, LastDocDeleteEtag = options.StartDocsDeletionEtag, LastAttachmentsDeleteEtag = options.StartAttachmentsDeletionEtag }; if (options.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) { result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; } else { Directory.CreateDirectory(result.FilePath); } } if (options.StartDocsEtag == Etag.Empty && options.StartAttachmentsEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { // ReSharper disable once AssignNullToNotNullAttribute result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath) == false) { break; } counter++; } } } SmugglerExportException lastException = null; bool ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(result.FilePath); try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode // lastException = new SmugglerExportException // { // LastEtag = Etag.Empty, // File = ownedStream ? result.FilePath : null // }; } try { using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); // used to synchronize max returned values for put/delete operations var maxEtags = FetchCurrentMaxEtags(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { result.LastDocsEtag = await ExportDocuments(exportOptions.From, options, jsonWriter, result.LastDocsEtag, maxEtags.LastDocsEtag); } catch (SmugglerExportException e) { result.LastDocsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { result.LastAttachmentsEtag = await ExportAttachments(exportOptions.From, jsonWriter, result.LastAttachmentsEtag, maxEtags.LastAttachmentsEtag); } catch (SmugglerExportException e) { result.LastAttachmentsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); if (options.ExportDeletions) { ExportDeletions(jsonWriter, options, result, maxEtags); } jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (options.Incremental) { WriteLastEtagsToFile(result, result.FilePath); } if (options.ExportDeletions) { PurgeTombstones(result); } if (lastException != null) { throw lastException; } return(result); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
private async Task <Etag> ExportFilesStreamingBulk(ZipArchive archive, Etag lastEtag, Etag maxEtag, int maxSizeInBytes = 10 * 1024 * 1024) { var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); Operations.ShowProgress("Exporting Files"); var metadataList = new List <FileContainer>(); Exception exceptionHappened = null; using (var cts = new CancellationTokenSource()) { var fileHeaders = new BlockingCollection <FileHeader>(); var getFilesTask = Task.Run(async() => await GetFilesTask(lastEtag, maxEtag, cts, fileHeaders).ConfigureAwait(false), cts.Token); try { var buffer = new byte[64 * 1024]; // reads file headers into batch of size maxSizeInBytes, then ask database to receive that whole bulk and proccess it while (fileHeaders.IsCompleted == false) { var fileHeadersInBatch = new List <FileHeader>(); long curBulkSize = 0; while (curBulkSize <= maxSizeInBytes) { FileHeader fileHeader; try { fileHeader = fileHeaders.Take(cts.Token); } catch (InvalidOperationException) // CompleteAdding Called { Operations.ShowProgress("Files List Retrieval Completed"); break; } fileHeadersInBatch.Add(fileHeader); curBulkSize += fileHeader.UploadedSize; } if (fileHeadersInBatch.Count == 0) { continue; } using (var batchStream = await Operations.StreamFiles(fileHeadersInBatch.Select(x => x.FullPath).ToList()).ConfigureAwait(false)) { var binaryReader = new BinaryReader(batchStream); foreach (var fileHeader in fileHeadersInBatch) { cts.Token.ThrowIfCancellationRequested(); var fileContainer = new FileContainer { Key = Path.Combine(fileHeader.Directory.TrimStart('/'), fileHeader.Name), Metadata = fileHeader.Metadata, }; var curDocumentSize = binaryReader.ReadInt64(); if (curDocumentSize == -1) { continue;// file probably was deleted between calls, ignoring } var fileToStore = archive.CreateEntry(fileContainer.Key); using (var zipStream = fileToStore.Open()) { long bytesRead = 0; while (bytesRead < curDocumentSize) { var remaining = (int)Math.Min(curDocumentSize - bytesRead, buffer.Length); var read = await batchStream.ReadAsync(buffer, 0, remaining, cts.Token).ConfigureAwait(false); if (read == 0) { throw new EndOfStreamException(); } bytesRead += read; zipStream.Write(buffer, 0, read); } } // Write the metadata (which includes the stream size and file container name) metadataList.Add(fileContainer); } } totalCount++; if (totalCount % 30 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { Operations.ShowProgress("Exported {0} files. ", totalCount); lastReport = SystemTime.UtcNow; } } } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e); Operations.ShowProgress("Done with reading files, total: {0}, lastEtag: {1}", totalCount, lastEtag); cts.Cancel(); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } try { getFilesTask.Wait(CancellationToken.None); } catch (OperationCanceledException) { // we are fine with this } catch (Exception e) { Operations.ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); exceptionHappened = new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } } var metadataEntry = archive.CreateEntry(MetadataEntry); using (var metadataStream = metadataEntry.Open()) using (var writer = new StreamWriter(metadataStream)) { foreach (var item in metadataList) { writer.WriteLine(RavenJObject.FromObject(item)); } } if (exceptionHappened != null) { throw exceptionHappened; } Operations.ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); }
/// <summary> /// Export counter data to specified destination (a file or a stream) /// </summary> /// <param name="exportOptions">options to specify the source and destination of the export</param> /// <exception cref="UnauthorizedAccessException">The caller does not have the required permission.-or- specified a file that is read-only. </exception> /// <exception cref="DirectoryNotFoundException">The specified path is invalid (for example, it is on an unmapped drive). </exception> /// <exception cref="IOException">An I/O error occurred while creating the file. </exception> /// <exception cref="SmugglerExportException">Encapsulates exception that happens when actually exporting data. See InnerException for details.</exception> public async Task <CounterOperationState> ExportData(SmugglerExportOptions <CounterConnectionStringOptions> exportOptions) { if (exportOptions.From == null) { throw new ArgumentNullException("exportOptions.From"); } if (string.IsNullOrWhiteSpace(exportOptions.ToFile) && exportOptions.ToStream == null) { throw new ArgumentException("ToFile or ToStream property in options must be non-null"); } var result = new CounterOperationState(); var exportFolder = string.Empty; if (Options.Incremental) { ShowProgress("Starting incremental export.."); exportFolder = CalculateExportFile(exportOptions, exportFolder); } else { ShowProgress("Starting full export..."); } SmugglerExportException lastException = null; var ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(exportOptions.ToFile); if (ownedStream) { ShowProgress("Export to dump file " + exportOptions.ToFile); } try { using (var counterStore = new CounterStore { Url = exportOptions.From.Url, Name = exportOptions.From.CounterStoreId, Credentials = new OperationCredentials(exportOptions.From.ApiKey, exportOptions.From.Credentials) }) using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { counterStore.Initialize(); var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName(Options.Incremental ? "CountersDeltas" : "CounterSnapshots"); //also for human readability jsonWriter.WriteStartArray(); try { if (Options.Incremental) { await ExportIncrementalData(counterStore, exportFolder, jsonWriter).WithCancellation(CancellationToken).ConfigureAwait(false); } else { await ExportFullData(counterStore, jsonWriter).WithCancellation(CancellationToken).ConfigureAwait(false); } } catch (SmugglerExportException e) { Debug.Assert(e.Data.Keys.Cast <string>().Contains("LastEtag")); result.LastWrittenEtag = (long)e.Data["LastEtag"]; lastException = e; var operation = Options.Incremental ? "Incremental" : "Full"; ShowProgress($"{operation} Export failed. {e}"); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (lastException != null) { throw lastException; } return(result); } finally { if (ownedStream && stream != null) { stream.Flush(); stream.Dispose(); ShowProgress("Finished export and closed file..."); } else { ShowProgress("Finished export..."); } } }