public async Task DecodeStream() { // Reset position to use same buffer zlibMemoryStream.Position = 0; // Get compressed stream length to read var buff = new byte[4]; if (await BaseStream.ReadAsync(buff, 0, 4).ConfigureAwait(false) != 4) { throw new Exception("ZRLE decoder: Invalid compressed stream size"); } // BigEndian to LittleEndian conversion var compressedBufferSize = buff[3] | buff[2] << 8 | buff[1] << 16 | buff[0] << 24; if (compressedBufferSize > 64 * 1024 * 1024) { throw new Exception("ZRLE decoder: Invalid compressed data size"); } #region Decode stream // Decode stream // int pos = 0; // while (pos++ < compressedBufferSize) // zlibDecompressedStream.WriteByte(this.BaseStream.ReadByte()); #endregion #region Decode stream in blocks // Decode stream in blocks var bytesNeeded = compressedBufferSize; const int maxBufferSize = 64 * 1024; // 64k buffer var receiveBuffer = new byte[maxBufferSize]; var netStream = (NetworkStream)BaseStream; netStream.ReadTimeout = 15000; // Set timeout to 15s do { // TODO remove sleep if (netStream.DataAvailable) { var bytesToRead = bytesNeeded; // the byteToRead should never exceed the maxBufferSize if (bytesToRead > maxBufferSize) { bytesToRead = maxBufferSize; } // try reading bytes (read in 1024 byte chunks) - improvement for slow connections int toRead = (bytesToRead > 1024) ? 1024 : bytesToRead; int bytesRead = 0; try { bytesRead = await netStream.ReadAsync(receiveBuffer, bytesRead, toRead).ConfigureAwait(false); } catch { } // lower the bytesNeeded with the bytesRead. bytesNeeded -= bytesRead; // write the readed bytes to the decompression stream. await zlibDecompressedStream.WriteAsync(receiveBuffer, 0, bytesRead).ConfigureAwait(false); } else { // there isn't any data atm. let's give the processor some time. Thread.Sleep(100); // increased to 100ms for slow connections } } while (bytesNeeded > 0); #endregion zlibMemoryStream.Position = 0; }