public virtual async Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) { throw new ArgumentNullException("options"); } var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) { options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; } else { Directory.CreateDirectory(options.BackupPath); } } if (lastEtagsFromFile && backupStatus == null) { ReadLastEtagsFromFile(options); } if (backupStatus != null) { ReadLastEtagsFromClass(options, backupStatus); } file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) { break; } counter++; } } } #else if (incremental) { throw new NotSupportedException("Incremental exports are not supported in SL."); } #endif Mode = await GetMode(); bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Documents) == ItemType.Documents) { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Attachments) == ItemType.Attachments) { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Transformers) == ItemType.Transformers) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) { WriteLastEtagsFromFile(options); } #endif return(file); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
public virtual async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile && backupStatus == null) ReadLastEtagsFromFile(options); if (backupStatus != null) ReadLastEtagsFromClass(options, backupStatus); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } #else if(incremental) throw new NotSupportedException("Incremental exports are not supported in SL."); #endif Mode = await GetMode(); bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Documents) == ItemType.Documents) { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Attachments) == ItemType.Attachments) { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Transformers) == ItemType.Transformers) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); #endif return file; } finally { if (ownedStream && stream != null) stream.Dispose(); } }
public async virtual Task ImportData(Stream stream, SmugglerOptions options) { options = options ?? SmugglerOptions; if (options == null) { throw new ArgumentNullException("options"); } Mode = await GetMode(); await EnsureDatabaseExists(); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } catch (InvalidDataException) { sizeStream = stream; stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } ShowProgress("Begin reading indexes"); var indexCount = await ImportIndexes(jsonReader, options); ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); ShowProgress("Begin reading documents"); var documentCount = await ImportDocuments(jsonReader, options); ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); ShowProgress("Begin reading attachments"); var attachmentCount = await ImportAttachments(jsonReader, options); ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); ShowProgress("Begin reading transformers"); var transformersCount = await ImportTransformers(jsonReader, options); ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", documentCount, attachmentCount, sw.ElapsedMilliseconds); }
public async virtual Task ImportData(Stream stream, SmugglerOptions options) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); Mode = await GetMode(); await EnsureDatabaseExists(); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (InvalidDataException) { sizeStream = stream; stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); ShowProgress("Begin reading indexes"); var indexCount = await ImportIndexes(jsonReader, options); ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); ShowProgress("Begin reading documents"); var documentCount = await ImportDocuments(jsonReader, sizeStream, options); ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); ShowProgress("Begin reading attachments"); var attachmentCount = await ImportAttachments(jsonReader, options); ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); ShowProgress("Begin reading transformers"); var transformersCount = await ImportTransformers(jsonReader, options); ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", documentCount, attachmentCount, sw.ElapsedMilliseconds); }