public void WaitForIndexing() { while (true) { var response = httpClient.GetAsync(databaseUrl + "/stats").ResultUnwrap(); if (response.IsSuccessStatusCode == false) { throw new InvalidOperationException(string.Format("STATS failed. Code: {0}.", response.StatusCode)); } using (var stream = response.GetResponseStreamWithHttpDecompression().ResultUnwrap()) { var countingStream = new CountingStream(stream); var stats = (RavenJObject)RavenJToken.TryLoad(countingStream); var staleIndexes = (RavenJArray)stats["StaleIndexes"]; if (staleIndexes.Length == 0) { return; } Thread.Sleep(1000); } } }
private void WriteToBuffer(Stream bufferedStream, out long bytesWritten) { using (var gzip = new GZipStream(bufferedStream, CompressionMode.Compress, leaveOpen: true)) using (var stream = new CountingStream(gzip)) { var binaryWriter = new BinaryWriter(stream); binaryWriter.Write(1); var bsonWriter = new BsonWriter(binaryWriter) { DateTimeKindHandling = DateTimeKind.Unspecified }; bsonWriter.WriteStartObject(); bsonWriter.WritePropertyName(String.Empty); bsonWriter.WriteValue("ABCDEFG"); bsonWriter.WriteEndObject(); bsonWriter.Flush(); binaryWriter.Flush(); stream.Flush(); bytesWritten = stream.NumberOfWrittenBytes; } }
public JsonDocument Get(string key) { var response = httpClient.GetAsync(databaseUrl + "/docs/" + Uri.EscapeUriString(key)).ResultUnwrap(); if (response.IsSuccessStatusCode == false) { throw new InvalidOperationException(string.Format("GET failed. Code: {0}.", response.StatusCode)); } using (var stream = response.GetResponseStreamWithHttpDecompression().ResultUnwrap()) { var countingStream = new CountingStream(stream); var data = RavenJToken.TryLoad(countingStream); var docKey = Uri.UnescapeDataString(response.Headers.GetFirstValue(Constants.DocumentIdFieldName)); response.Headers.Remove(Constants.DocumentIdFieldName); return(new JsonDocument { DataAsJson = (RavenJObject)data, Metadata = response.Headers.FilterHeadersToObject(), Key = docKey }); } }
private static long WriteBatchToBuffer(BulkInsertOptions options, Stream stream, ICollection <RavenJObject> batch) { using (var countingStream = new CountingStream(stream)) { switch (options.Format) { case BulkInsertFormat.Bson: { WriteBsonBatchToBuffer(options, countingStream, batch); break; } case BulkInsertFormat.Json: { WriteJsonBatchToBuffer(options, countingStream, batch); break; } default: throw new NotSupportedException(string.Format("The format '{0}' is not supported", options.Format.ToString())); } countingStream.Flush(); return(countingStream.NumberOfWrittenBytes); } }
private async Task <RavenJToken> ReadJsonInternalAsync() { HandleReplicationStatusChanges(ResponseHeaders, primaryUrl, operationUrl); using (var responseStream = await Response.GetResponseStreamWithHttpDecompression().ConfigureAwait(false)) { var countingStream = new CountingStream(responseStream); var data = RavenJToken.TryLoad(countingStream); Size = countingStream.NumberOfReadBytes; if (Method == "GET" && ShouldCacheRequest) { factory.CacheResponse(Url, data, ResponseHeaders); } if (factory.CanLogRequest) { factory.OnLogRequest(owner, new RequestResultArgs { DurationMilliseconds = CalculateDuration(), Method = Method, HttpResult = (int)ResponseStatusCode, Status = RequestStatus.SentToServer, Result = (data ?? "").ToString(), Url = Url, PostedData = postedData }); } return(data); } }
private JsonDocument ReadCurrentDocument(string key) { var existingEtag = Etag.Parse(Api.RetrieveColumn(session, Documents, tableColumnsCache.DocumentsColumns["etag"])); var lastModified = Api.RetrieveColumnAsInt64(session, Documents, tableColumnsCache.DocumentsColumns["last_modified"]).Value; var existingCachedDocument = cacher.GetCachedDocument(key, existingEtag); if (existingCachedDocument != null) { return(new JsonDocument { SerializedSizeOnDisk = existingCachedDocument.Size, Key = key, DataAsJson = existingCachedDocument.Document, NonAuthoritativeInformation = false, LastModified = DateTime.FromBinary(lastModified), Etag = existingEtag, Metadata = existingCachedDocument.Metadata }); } int docSize; var metadataBuffer = Api.RetrieveColumn(session, Documents, tableColumnsCache.DocumentsColumns["metadata"]); var metadata = metadataBuffer.ToJObject(); RavenJObject dataAsJson; using (Stream stream = new BufferedStream(new ColumnStream(session, Documents, tableColumnsCache.DocumentsColumns["data"]))) { using (var aggregateStream = documentCodecs.Aggregate(stream, (bytes, codec) => codec.Decode(key, metadata, bytes))) { var streamInUse = aggregateStream; if (streamInUse != stream) { streamInUse = new CountingStream(aggregateStream); } dataAsJson = streamInUse.ToJObject(); docSize = (int)Math.Max(streamInUse.Position, stream.Position); } } var serializedSizeOnDisk = metadataBuffer.Length + docSize; cacher.SetCachedDocument(key, existingEtag, dataAsJson, metadata, serializedSizeOnDisk); return(new JsonDocument { SerializedSizeOnDisk = serializedSizeOnDisk, Key = key, DataAsJson = dataAsJson, NonAuthoritativeInformation = false, LastModified = DateTime.FromBinary(lastModified), Etag = existingEtag, Metadata = metadata }); }
public override IndexInput OpenInput(string fileName, IOContext context) { IndexInput @in = base.OpenInput(fileName, context); if (fileName.EndsWith(".frq")) { @in = new CountingStream(OuterInstance, @in); } return @in; }
private Task <Result <LogFile> > WriteLogsToFileAsync(OperationContext context, AbsolutePath logFilePath, string[] logs) { return(context.PerformOperationAsync(Tracer, async() => { long compressedSizeBytes = 0; long uncompressedSizeBytes = 0; using (Stream fileStream = await _fileSystem.OpenSafeAsync( logFilePath, FileAccess.Write, FileMode.CreateNew, FileShare.None, FileOptions.SequentialScan | FileOptions.Asynchronous)) { // We need to make sure we close the compression stream before we take the fileStream's // position, because the compression stream won't write everything until it's been closed, // which leads to bad recorded values in compressedSizeBytes. using (var gzipStream = new GZipStream(fileStream, CompressionLevel.Fastest, leaveOpen: true)) { using var recordingStream = new CountingStream(gzipStream); using var streamWriter = new StreamWriter(recordingStream, Encoding.UTF8, bufferSize: 32 * 1024, leaveOpen: true); if (OnFileOpen != null) { await OnFileOpen(streamWriter); } foreach (var log in logs) { await streamWriter.WriteLineAsync(log); } if (OnFileClose != null) { await OnFileClose(streamWriter); } // Needed to ensure the recording stream receives everything it needs to receive await streamWriter.FlushAsync(); uncompressedSizeBytes = recordingStream.BytesWritten; } compressedSizeBytes = fileStream.Position; } Tracer.TrackMetric(context, $"LogLinesWritten", logs.Length); Tracer.TrackMetric(context, $"CompressedBytesWritten", compressedSizeBytes); Tracer.TrackMetric(context, $"UncompressedBytesWritten", uncompressedSizeBytes); return new Result <LogFile>(new LogFile() { Path = logFilePath, UncompressedSizeBytes = uncompressedSizeBytes, CompressedSizeBytes = compressedSizeBytes, }); },
public override IndexInput OpenInput(string fileName, IOContext context) { IndexInput @in = base.OpenInput(fileName, context); if (fileName.EndsWith(".frq", StringComparison.Ordinal)) { @in = new CountingStream(outerInstance, @in); } return(@in); }
public void ByteArrayWriteIsCorrectlyCounted() { var bytes = new byte[] { 74, 117, 97, 110, 32, 67, 97, 114, 108, 111, 115, 32, 59, 41 }; var inner = new MemoryStream(capacity: bytes.Length); var wrapper = new CountingStream(inner); wrapper.Write(bytes, 0, bytes.Length); Assert.Equal(bytes.Length, wrapper.BytesWritten); Assert.Equal(bytes, inner.ToArray()); }
public void CountingStreamShouldCountBytesRead() { var memStream = new MemoryStream(new byte[10]); var stream = new CountingStream(memStream); stream.ReadByte(); stream.ReadByte(); stream.ReadByte(); stream.ReadByte(); stream.ReadByte(); Assert.Equal(5, stream.BytesRead); }
public void CountingStreamShouldCountBytesWritten() { var memStream = new MemoryStream(new byte[10]); var stream = new CountingStream(memStream); stream.WriteByte(1); stream.WriteByte(2); stream.WriteByte(3); stream.WriteByte(4); stream.WriteByte(5); Assert.Equal(5, stream.BytesWritten); }
private void WriteCentralDirectoryStructure(Stream s) { CountingStream output = s as CountingStream; long Start = (output != null) ? ((long)output.BytesWritten) : s.Position; foreach (ZipEntry e in this._entries) { e.WriteCentralDirectoryEntry(s); } long Finish = (output != null) ? ((long)output.BytesWritten) : s.Position; this.WriteCentralDirectoryFooter(s, Start, Finish); }
public SmugglerJsonTextWriter(StreamWriter streamWriter, int splitSizeMb, Formatting formatting, CountingStream countingStream, string filepath) { this.currentStream = streamWriter.BaseStream; this.currentStreamWriter = streamWriter; this.currentCountingStream = countingStream; this.splitSizeMb = splitSizeMb; this.formatting = formatting; this.filepath = filepath; this.currentJsonTextWriter = new JsonTextWriter(streamWriter) { Formatting = formatting }; jsonOperationsStructure = new LinkedList <JsonToken>(); }
/// <inheritdoc/> public async Task <TResponse> ConnectAsync <TResponse>(string hostname, int port, bool useSsl, IResponseParser <TResponse> parser) { log.Info("Connecting: {hostname} {port} (Use SSl = {useSsl})", hostname, port, useSsl); await client.ConnectAsync(hostname, port); Stream = await GetStreamAsync(hostname, useSsl); writer = new StreamWriter(Stream, UsenetEncoding.Default) { AutoFlush = true }; reader = new NntpStreamReader(Stream, UsenetEncoding.Default); return(GetResponse(parser)); }
public void Connect(int timeoutMilliseconds = 2000) { TcpClientWithTimeout tcpClient = new TcpClientWithTimeout(remoteEndPoint); tcpClient.Connect(timeoutMilliseconds); InternalTcpClient = tcpClient.InternalClient; CoutingStream = new CountingStream(InternalTcpClient.GetStream()); InternalBinaryReader = new BinaryReader(CoutingStream); InternalBinaryWriter = new BinaryWriter(CoutingStream); readerThread = new Thread(ReaderRun); readerThread.IsBackground = true; readerThread.Start(); }
/// <summary> /// Convert a RavenJToken to a byte array /// </summary> public static void WriteTo(this RavenJToken self, Stream stream) { using (var counting = new CountingStream(stream)) { using (var streamWriter = new StreamWriter(counting, Encoding.UTF8, 1024, true)) using (var jsonWriter = new JsonTextWriter(streamWriter)) { jsonWriter.Formatting = Formatting.None; jsonWriter.DateFormatHandling = DateFormatHandling.IsoDateFormat; jsonWriter.DateTimeZoneHandling = DateTimeZoneHandling.Utc; jsonWriter.DateFormatString = Default.DateTimeFormatsToWrite; self.WriteTo(jsonWriter, Default.Converters); } RegisterJsonStreamSerializationMetrics((int)counting.NumberOfWrittenBytes); } }
private Task <Result <LogFile> > WriteLogsToFileAsync(OperationContext context, AbsolutePath logFilePath, string[] logs) { return(context.PerformOperationAsync(Tracer, async() => { using var fileStream = await _fileSystem.OpenSafeAsync( logFilePath, FileAccess.Write, FileMode.CreateNew, FileShare.None, FileOptions.SequentialScan | FileOptions.Asynchronous); using var gzipStream = new GZipStream(fileStream, CompressionLevel.Fastest, leaveOpen: true); using var recordingStream = new CountingStream(gzipStream); using var streamWriter = new StreamWriter(recordingStream, Encoding.UTF8, bufferSize: 32 * 1024, leaveOpen: true); if (OnFileOpen != null) { await OnFileOpen(streamWriter); } foreach (var log in logs) { await streamWriter.WriteLineAsync(log); } if (OnFileClose != null) { await OnFileClose(streamWriter); } await streamWriter.FlushAsync(); Tracer.TrackMetric(context, $"LogLinesWritten", logs.Length); var compressedSizeBytes = fileStream.Position; Tracer.TrackMetric(context, $"CompressedBytesWritten", compressedSizeBytes); var uncompressedSizeBytes = recordingStream.BytesWritten; Tracer.TrackMetric(context, $"UncompressedBytesWritten", uncompressedSizeBytes); return new Result <LogFile>(new LogFile() { Path = logFilePath, UncompressedSizeBytes = uncompressedSizeBytes, CompressedSizeBytes = compressedSizeBytes, }); },
private static void WriteJsonBatchToBuffer(BulkInsertOptions options, CountingStream stream, ICollection <RavenJObject> batch) { var binaryWriter = new BinaryWriter(stream); binaryWriter.Write(batch.Count); binaryWriter.Flush(); var jsonWriter = new JsonTextWriter(new StreamWriter(stream)) { DateFormatHandling = DateFormatHandling.IsoDateFormat }; foreach (var doc in batch) { doc.WriteTo(jsonWriter); } jsonWriter.Flush(); }
private void DisposeStreams() { currentStreamWriter?.Flush(); // the first stream is the original instance, the caller is responsible to dispose it // the newer instances are created here and should be disposed if (splitsCount > 0) { currentStreamWriter?.Dispose(); currentStreamWriter = null; currentGzipStream?.Dispose(); currentGzipStream = null; currentCountingStream?.Dispose(); currentCountingStream = null; currentStream?.Dispose(); currentStream = null; } }
public void ResetCountersShouldResetBytesReadAndBytesWritten() { var memStream = new MemoryStream(new byte[10]); var stream = new CountingStream(memStream); stream.ReadByte(); stream.ReadByte(); stream.WriteByte(1); stream.WriteByte(2); Assert.Equal(2, stream.BytesRead); Assert.Equal(2, stream.BytesWritten); stream.ResetCounters(); Assert.Equal(0, stream.BytesRead); Assert.Equal(0, stream.BytesWritten); }
internal void Write(Stream outstream) { if (this._Source == EntrySource.Zipfile) { this.CopyThroughOneEntry(outstream); } else { bool readAgain = true; int nCycles = 0; do { nCycles++; this.WriteHeader(outstream, nCycles); if (this.IsDirectory) { break; } this._EmitOne(outstream); if (nCycles > 1) { readAgain = false; } else if (!outstream.CanSeek) { readAgain = false; } else { readAgain = this.WantReadAgain(); } if (readAgain) { outstream.Seek((long)this._RelativeOffsetOfHeader, SeekOrigin.Begin); outstream.SetLength(outstream.Position); CountingStream s1 = outstream as CountingStream; if (s1 != null) { s1.Adjust(this._TotalEntrySize); } } }while (readAgain); } }
private static void WriteBsonBatchToBuffer(BulkInsertOptions options, CountingStream stream, ICollection <RavenJObject> batch) { var binaryWriter = new BinaryWriter(stream); binaryWriter.Write(batch.Count); binaryWriter.Flush(); var bsonWriter = new BsonWriter(binaryWriter) { DateTimeKindHandling = DateTimeKind.Unspecified }; foreach (var doc in batch) { doc.WriteTo(bsonWriter); } bsonWriter.Flush(); }
public static bool TryGetJsonReaderForStream(Stream stream, out JsonTextReader jsonTextReader, out CountingStream sizeStream) { jsonTextReader = null; sizeStream = null; try { stream.Position = 0; sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonTextReader = new RavenJsonTextReader(streamReader); if (jsonTextReader.Read() == false) { return(false); } } catch (Exception e) { if (e is InvalidDataException == false) { if (sizeStream != null) { sizeStream.Dispose(); } throw; } stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(stream); var streamReader = new StreamReader(sizeStream); jsonTextReader = new JsonTextReader(streamReader); if (jsonTextReader.Read() == false) { return(false); } } return(true); }
private RavenJObject ReadDocumentData(string key, Etag existingEtag, RavenJObject metadata, out int size) { var loweredKey = CreateKey(key); size = -1; var existingCachedDocument = documentCacher.GetCachedDocument(loweredKey, existingEtag); if (existingCachedDocument != null) { size = existingCachedDocument.Size; return(existingCachedDocument.Document); } var documentReadResult = tableStorage.Documents.Read(Snapshot, loweredKey, writeBatch.Value); if (documentReadResult == null) //non existing document { return(null); } using (var stream = documentReadResult.Reader.AsStream()) { using (var decodedDocumentStream = documentCodecs.Aggregate(stream, (current, codec) => codec.Value.Decode(loweredKey, metadata, current))) { var streamToUse = decodedDocumentStream; if (stream != decodedDocumentStream) { streamToUse = new CountingStream(decodedDocumentStream); } var documentData = decodedDocumentStream.ToJObject(); size = (int)Math.Max(stream.Position, streamToUse.Position); documentCacher.SetCachedDocument(loweredKey, existingEtag, documentData, metadata, size); return(documentData); } } }
public Task InitializeAsync(DatabaseSmugglerOptions options, CancellationToken cancellationToken) { _options = options; try { _stream.Position = 0; _sizeStream = new CountingStream(new GZipStream(_stream, CompressionMode.Decompress)); _streamReader = new StreamReader(_sizeStream); _reader = new RavenJsonTextReader(_streamReader); } catch (Exception e) { if (e is InvalidDataException == false) { throw; } _stream.Seek(0, SeekOrigin.Begin); _sizeStream = new CountingStream(_stream); _streamReader = new StreamReader(_sizeStream); _reader = new JsonTextReader(_streamReader); } if (_reader.Read() == false) { return(new CompletedTask()); } if (_reader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } return(new CompletedTask()); }
private void SpinWriterIfReachedMaxSize() { if (splitSizeMb == 0 || currentCountingStream.NumberOfWrittenBytes < splitSizeMb * Mb) { return; } if (jsonOperationsStructure.Count != 2) { return; // don't break the file in the middle of a document } while (jsonOperationsStructure.Count > 0) { var op = jsonOperationsStructure.Last.Value; if (op == JsonToken.StartArray) { WriteEndArray(); } else if (op == JsonToken.StartObject) { WriteEndObject(); } } DisposeStreams(); currentStream = File.Create($"{filepath}.part{++splitsCount:D3}"); currentCountingStream = new CountingStream(currentStream); currentGzipStream = new GZipStream(currentCountingStream, CompressionMode.Compress, leaveOpen: true); currentStreamWriter = new StreamWriter(currentGzipStream); currentJsonTextWriter = new JsonTextWriter(currentStreamWriter) { Formatting = formatting }; WriteStartObject(); WritePropertyName(lastPropertyName); WriteStartArray(); }
private JsonDocument ReadCurrentDocument() { int docSize; var metadataBuffer = Api.RetrieveColumn(session, Documents, tableColumnsCache.DocumentsColumns["metadata"]); var metadata = metadataBuffer.ToJObject(); var key = Api.RetrieveColumnAsString(session, Documents, tableColumnsCache.DocumentsColumns["key"], Encoding.Unicode); RavenJObject dataAsJson; using (Stream stream = new BufferedStream(new ColumnStream(session, Documents, tableColumnsCache.DocumentsColumns["data"]))) { using (var aggregateStream = documentCodecs.Aggregate(stream, (bytes, codec) => codec.Decode(key, metadata, bytes))) { var streamInUse = aggregateStream; if (streamInUse != stream) { streamInUse = new CountingStream(aggregateStream); } dataAsJson = streamInUse.ToJObject(); docSize = (int)Math.Max(streamInUse.Position, stream.Position); } } bool isDocumentModifiedInsideTransaction = false; var lastModified = Api.RetrieveColumnAsInt64(session, Documents, tableColumnsCache.DocumentsColumns["last_modified"]).Value; return(new JsonDocument { SerializedSizeOnDisk = metadataBuffer.Length + docSize, Key = key, DataAsJson = dataAsJson, NonAuthoritativeInformation = isDocumentModifiedInsideTransaction, LastModified = DateTime.FromBinary(lastModified), Etag = Etag.Parse(Api.RetrieveColumn(session, Documents, tableColumnsCache.DocumentsColumns["etag"])), Metadata = metadata }); }
private void SendErrorToClient(Exception e) { if (_logger.IsInfoEnabled) { _logger.Info("Failure during bulk insert", e); } _messagesToClient.CompleteAdding(); try { _replyToCustomer.Wait(); } catch (Exception) { // we don't care about any errors here, we just need to make sure that the thread // isn't sending stuff to the client while we are sending the error } try { var error = TcpConnection.Context.ReadObject(new DynamicJsonValue { ["Type"] = "Error", ["Exception"] = e.ToString() }, "error/message"); using (var countingStream = new CountingStream(TcpConnection.Stream)) { TcpConnection.Context.Write(countingStream, error); TcpConnection.RegisterBytesSent(countingStream.NumberOfWrittenBytes); } } catch (Exception errorSending) { if (_logger.IsInfoEnabled) { _logger.Info("Could not write bulk insert error to client", errorSending); } } }
private void CopyThroughOneEntry(Stream outstream) { lock (_streamLock) { byte[] bytes = new byte[0x2200]; Stream input = this.ArchiveStream; input.Seek((long)this._RelativeOffsetOfHeader, SeekOrigin.Begin); this._EntryHeader = new byte[this._LengthOfHeader]; int n = input.Read(this._EntryHeader, 0, this._EntryHeader.Length); this._CheckRead(n); input.Seek((long)this._RelativeOffsetOfHeader, SeekOrigin.Begin); CountingStream counter = outstream as CountingStream; this._RelativeOffsetOfHeader = (counter != null) ? counter.BytesWritten : ((int)outstream.Position); for (int Remaining = this._TotalEntrySize; Remaining > 0; Remaining -= n) { int len = (Remaining > bytes.Length) ? bytes.Length : Remaining; n = input.Read(bytes, 0, len); this._CheckRead(n); outstream.Write(bytes, 0, n); } } }
/// <summary> /// Convert a byte array to a RavenJObject /// </summary> public static RavenJObject ToJObject(this Stream self) { var streamWithCachedHeader = new StreamWithCachedHeader(self, 5); using (var counting = new CountingStream(streamWithCachedHeader)) { if (IsJson(streamWithCachedHeader)) { // note that we intentionally don't close it here var jsonReader = new JsonTextReader(new StreamReader(counting)); var ravenJObject = RavenJObject.Load(jsonReader); RegisterJsonStreamDeserializationMetrics((int)counting.NumberOfReadBytes); return(ravenJObject); } var deserializedObject = RavenJObject.Load(new BsonReader(counting) { DateTimeKindHandling = DateTimeKind.Utc, }); RegisterJsonStreamDeserializationMetrics((int)counting.NumberOfReadBytes); return(deserializedObject); } }