private void WriteToBuffer(Stream bufferedStream, out long bytesWritten) { using (var gzip = new GZipStream(bufferedStream, CompressionMode.Compress, leaveOpen: true)) using (var stream = new CountingStream(gzip)) { var binaryWriter = new BinaryWriter(stream); binaryWriter.Write(1); var bsonWriter = new BsonWriter(binaryWriter) { DateTimeKindHandling = DateTimeKind.Unspecified }; bsonWriter.WriteStartObject(); bsonWriter.WritePropertyName(String.Empty); bsonWriter.WriteValue("ABCDEFG"); bsonWriter.WriteEndObject(); bsonWriter.Flush(); binaryWriter.Flush(); stream.Flush(); bytesWritten = stream.NumberOfWrittenBytes; } }
private void WriteToken(WebRequest httpWebRequest) { if (EnvironmentUtils.RunningOnPosix) // mono must set ContentLength before GetRequestStream (unlike .net) { httpWebRequest.ContentLength = postedToken.ToString().Length; } using (var stream = httpWebRequest.GetRequestStream()) { using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var commpressedData = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(commpressedData, l => NumberOfBytesWrittenUncompressed = l)) { if (writeBson) { postedToken.WriteTo(new BsonWriter(countingStream2)); } else { var streamWriter = new StreamWriter(countingStream2); postedToken.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } commpressedData.Flush(); stream.Flush(); } } }
public void Write(byte[] data) { postedData = data; using (var stream = WebRequest.GetRequestStream()) using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var cmp = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(cmp, l => NumberOfBytesWrittenUncompressed = l)) { countingStream2.Write(data, 0, data.Length); cmp.Flush(); stream.Flush(); } }
public void Write(Stream streamToWrite) { postedStream = streamToWrite; using (var stream = WebRequest.GetRequestStream()) using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var commpressedStream = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(commpressedStream, l => NumberOfBytesWrittenUncompressed = l)) { streamToWrite.CopyTo(countingStream2); commpressedStream.Flush(); stream.Flush(); } }
public void Write(Stream streamToWrite) { postedStream = streamToWrite; if (EnvironmentUtils.RunningOnPosix) // mono must set ContentLength before GetRequestStream (unlike .net) { WebRequest.ContentLength = streamToWrite.Length; } using (var stream = WebRequest.GetRequestStream()) using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var commpressedStream = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(commpressedStream, l => NumberOfBytesWrittenUncompressed = l)) { streamToWrite.CopyTo(countingStream2); commpressedStream.Flush(); stream.Flush(); } }
public void Write(byte[] data) { postedData = data; if (EnvironmentUtils.RunningOnPosix) // mono must set ContentLength before GetRequestStream (unlike .net) { WebRequest.ContentLength = data.Length; } using (var stream = WebRequest.GetRequestStream()) using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var cmp = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(cmp, l => NumberOfBytesWrittenUncompressed = l)) { countingStream2.Write(data, 0, data.Length); cmp.Flush(); stream.Flush(); } }
private void WriteToken(WebRequest httpWebRequest) { using (var stream = httpWebRequest.GetRequestStream()) using (var countingStream = new CountingStream(stream, l => NumberOfBytesWrittenCompressed = l)) using (var commpressedData = new GZipStream(countingStream, CompressionMode.Compress)) using (var countingStream2 = new CountingStream(commpressedData, l => NumberOfBytesWrittenUncompressed = l)) { if (writeBson) { postedToken.WriteTo(new BsonWriter(countingStream2)); } else { var streamWriter = new StreamWriter(countingStream2); postedToken.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } commpressedData.Flush(); stream.Flush(); } }
private RavenJObject ReadDocumentData(string normalizedKey, Slice sliceKey, Etag existingEtag, RavenJObject metadata, out int size) { try { size = -1; var existingCachedDocument = documentCacher.GetCachedDocument(normalizedKey, existingEtag); if (existingCachedDocument != null) { size = existingCachedDocument.Size; return existingCachedDocument.Document; } var documentReadResult = tableStorage.Documents.Read(Snapshot, sliceKey, writeBatch.Value); if (documentReadResult == null) //non existing document return null; using (var stream = documentReadResult.Reader.AsStream()) { using (var decodedDocumentStream = documentCodecs.Aggregate(stream, (current, codec) => codec.Value.Decode(normalizedKey, metadata, current))) { var streamToUse = decodedDocumentStream; if (stream != decodedDocumentStream) streamToUse = new CountingStream(decodedDocumentStream); var documentData = decodedDocumentStream.ToJObject(); size = (int)Math.Max(stream.Position, streamToUse.Position); documentCacher.SetCachedDocument(normalizedKey, existingEtag, documentData, metadata, size); return documentData; } } } catch (Exception e) { InvalidDataException invalidDataException = null; try { size = -1; var documentReadResult = tableStorage.Documents.Read(Snapshot, sliceKey, writeBatch.Value); if (documentReadResult == null) //non existing document return null; using (var stream = documentReadResult.Reader.AsStream()) { using (var reader = new BinaryReader(stream)) { if (reader.ReadUInt32() == DocumentCompression.CompressFileMagic) { invalidDataException = new InvalidDataException(string.Format("Document '{0}' is compressed, but the compression bundle is not enabled.\r\n" + "You have to enable the compression bundle when dealing with compressed documents.", normalizedKey), e); } } } } catch (Exception) { // we are already in error handling mode, just ignore this } if(invalidDataException != null) throw invalidDataException; throw new InvalidDataException("Failed to de-serialize a document: " + normalizedKey, e); } }
public void WaitForIndexing() { while (true) { var response = httpClient.GetAsync(databaseUrl + "/stats").ResultUnwrap(); if (response.IsSuccessStatusCode == false) throw new InvalidOperationException(string.Format("STATS failed. Code: {0}.", response.StatusCode)); using (var stream = response.GetResponseStreamWithHttpDecompression().ResultUnwrap()) { var countingStream = new CountingStream(stream); var stats = (RavenJObject)RavenJToken.TryLoad(countingStream); var staleIndexes = (RavenJArray)stats["StaleIndexes"]; if (staleIndexes.Length == 0) return; Thread.Sleep(1000); } } }
public JsonDocument Get(string key) { var response = httpClient.GetAsync(databaseUrl + "/docs/" + Uri.EscapeUriString(key)).ResultUnwrap(); if (response.IsSuccessStatusCode == false) throw new InvalidOperationException(string.Format("GET failed. Code: {0}.", response.StatusCode)); using (var stream = response.GetResponseStreamWithHttpDecompression().ResultUnwrap()) { var countingStream = new CountingStream(stream); var data = RavenJToken.TryLoad(countingStream); var docKey = Uri.UnescapeDataString(response.Headers.GetFirstValue(Constants.DocumentIdFieldName)); response.Headers.Remove(Constants.DocumentIdFieldName); return new JsonDocument { DataAsJson = (RavenJObject)data, Metadata = response.Headers.FilterHeadersToObject(), Key = docKey }; } }
private RavenJObject ReadDocumentData(string key, Etag existingEtag, RavenJObject metadata, out int size) { var loweredKey = CreateKey(key); size = -1; var existingCachedDocument = documentCacher.GetCachedDocument(loweredKey, existingEtag); if (existingCachedDocument != null) { size = existingCachedDocument.Size; return existingCachedDocument.Document; } var documentReadResult = tableStorage.Documents.Read(Snapshot, loweredKey, writeBatch.Value); if (documentReadResult == null) //non existing document return null; using (var stream = documentReadResult.Reader.AsStream()) { using (var decodedDocumentStream = documentCodecs.Aggregate(stream, (current, codec) => codec.Value.Decode(loweredKey, metadata, current))) { var streamToUse = decodedDocumentStream; if(stream != decodedDocumentStream) streamToUse = new CountingStream(decodedDocumentStream); var documentData = decodedDocumentStream.ToJObject(); size = (int)Math.Max(stream.Position, streamToUse.Position); documentCacher.SetCachedDocument(loweredKey, existingEtag, documentData, metadata, size); return documentData; } } }
public async virtual Task ImportData(Stream stream, SmugglerOptions options) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); await DetectServerSupportedFeatures(); await EnsureDatabaseExists(); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (Exception e) { if (e is InvalidDataException == false #if SILVERLIGHT && e is ZlibException == false #endif ) throw; stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); ShowProgress("Begin reading indexes"); var indexCount = await ImportIndexes(jsonReader, options); ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); ShowProgress("Begin reading documents"); var documentCount = await ImportDocuments(jsonReader, options); ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); ShowProgress("Begin reading attachments"); var attachmentCount = await ImportAttachments(jsonReader, options); ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); ShowProgress("Begin reading transformers"); var transformersCount = await ImportTransformers(jsonReader, options); ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", documentCount, attachmentCount, sw.ElapsedMilliseconds); }
public void ImportData(Stream stream, SmugglerOptions options, bool importIndexes = true) { EnsureDatabaseExists(); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (InvalidDataException) { sizeStream = stream; stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); // should read indexes now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Indexes", jsonReader.Value) == false) throw new InvalidDataException("Indexes property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) continue; var indexName = index.Value<string>("name"); if (indexName.StartsWith("Temp/")) continue; if (index.Value<RavenJObject>("definition").Value<bool>("IsCompiled")) continue; // can't import compiled indexes PutIndex(indexName, index); } // should read documents now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Docs", jsonReader.Value) == false) throw new InvalidDataException("Docs property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); var batch = new List<RavenJObject>(); int totalCount = 0; long lastFlushedAt = 0; int batchCount = 0; long sizeOnDisk = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var before = sizeStream.Position; var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); var size = sizeStream.Position - before; if (size > 1024 * 1024) { Console.WriteLine("{0:#,#.##;;0} kb - {1}", (double)size / 1024, document["@metadata"].Value<string>("@id")); } if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents) continue; if (options.MatchFilters(document) == false) continue; totalCount += 1; batch.Add(document); sizeOnDisk = (sizeStream.Position - lastFlushedAt); if (batch.Count >= smugglerOptions.BatchSize || sizeOnDisk >= MaxSizeOfUncompressedSizeToSendToDatabase) { lastFlushedAt = sizeStream.Position; HandleBatch(options,batch, sizeOnDisk); sizeOnDisk = 0; if (++batchCount % 10 == 0) { OutputIndexingDistance(); } } } HandleBatch(options, batch, sizeOnDisk); OutputIndexingDistance(); var attachmentCount = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Attachments", jsonReader.Value) == false) throw new InvalidDataException("Attachment property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { attachmentCount += 1; var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) continue; var attachmentExportInfo = new JsonSerializer { Converters = { new JsonToJsonConverter() } }.Deserialize<AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); PutAttachment(attachmentExportInfo); } ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", totalCount, attachmentCount, sw.ElapsedMilliseconds); }
public async virtual Task ImportData(SmugglerImportOptions importOptions, Stream stream) { Operations.Configure(SmugglerOptions); Operations.Initialize(SmugglerOptions); await DetectServerSupportedFeatures(importOptions.To); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { stream.Position = 0; sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (Exception e) { if (e is InvalidDataException == false) throw; stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); var exportCounts = new Dictionary<string, int>(); var exportSectionRegistar = new Dictionary<string, Func<int>>(); SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); exportSectionRegistar.Add("Indexes", () => { Operations.ShowProgress("Begin reading indexes"); var indexCount = ImportIndexes(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); return indexCount; }); exportSectionRegistar.Add("Docs", () => { Operations.ShowProgress("Begin reading documents"); var documentCount = ImportDocuments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); return documentCount; }); exportSectionRegistar.Add("Attachments", () => { Operations.ShowProgress("Begin reading attachments"); var attachmentCount = ImportAttachments(importOptions.To, jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); return attachmentCount; }); exportSectionRegistar.Add("Transformers", () => { Operations.ShowProgress("Begin reading transformers"); var transformersCount = ImportTransformers(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); return transformersCount; }); exportSectionRegistar.Add("DocsDeletions", () => { Operations.ShowProgress("Begin reading deleted documents"); var deletedDocumentsCount = ImportDeletedDocuments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading deleted documents, total: {0}", deletedDocumentsCount)); return deletedDocumentsCount; }); exportSectionRegistar.Add("AttachmentsDeletions", () => { Operations.ShowProgress("Begin reading deleted attachments"); var deletedAttachmentsCount = ImportDeletedAttachments(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading deleted attachments, total: {0}", deletedAttachmentsCount)); return deletedAttachmentsCount; }); exportSectionRegistar.Add("Identities", () => { Operations.ShowProgress("Begin reading identities"); var identitiesCount = ImportIdentities(jsonReader).Result; Operations.ShowProgress(string.Format("Done with reading identities, total: {0}", identitiesCount)); return identitiesCount; }); exportSectionRegistar.Keys.ForEach(k => exportCounts[k] = 0); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndObject) { SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); Func<int> currentAction; var currentSection = jsonReader.Value.ToString(); if (exportSectionRegistar.TryGetValue(currentSection, out currentAction) == false) { throw new InvalidDataException("Unexpected property found: " + jsonReader.Value); } if (jsonReader.Read() == false) { exportCounts[currentSection] = 0; continue; } if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); exportCounts[currentSection] = currentAction(); } sw.Stop(); Operations.ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments, deleted {2:#,#;;0} documents and {3:#,#;;0} attachments in {4:#,#.###;;0} s", exportCounts["Docs"], exportCounts["Attachments"], exportCounts["DocsDeletions"], exportCounts["AttachmentsDeletions"], sw.ElapsedMilliseconds / 1000f); SmugglerOptions.CancelToken.Token.ThrowIfCancellationRequested(); }