protected static async IAsyncEnumerable <Packet> ZlibDeCompressAsync(byte[] bytes) { //Skip Zlib Header using var ms = new MemoryStream(bytes, 2, bytes.Length - 2); using var zs = new DeflateStream(ms, CompressionMode.Decompress); var len = 1; while (len > 0) { var headerbuffer = new byte[PacketHeader.PACKET_HEADER_LENGTH]; len = await zs.ReadAsync(headerbuffer.AsMemory(0, PacketHeader.PACKET_HEADER_LENGTH)); if (len == 0) { break; } var header = new PacketHeader(headerbuffer); var buffer = new byte[header.BodyLength]; len = await zs.ReadAsync(buffer.AsMemory(0, buffer.Length)); yield return(new Packet { Header = header, PacketBody = buffer }); } }
public async Task <byte[]> Decompress(byte[] compressedArray, int bufferSize) { using (var stream = new MemoryStream(compressedArray)) using (var decompressor = new DeflateStream(stream, CompressionMode.Decompress)) { var buffer = BufferPool.GetBuffer(bufferSize); try { using (var output = new MemoryStream()) { int read; while ((read = await decompressor.ReadAsync(buffer, 0, buffer.Length)) > 0) { output.Write(buffer, 0, read); } return(output.ToArray()); } } finally { BufferPool.ReturnBuffer(buffer); } } }
public void ReadWriteArgumentValidation() { using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress)) { Assert.Throws <ArgumentNullException>(() => ds.Write(null, 0, 0)); Assert.Throws <ArgumentOutOfRangeException>(() => ds.Write(new byte[1], -1, 0)); Assert.Throws <ArgumentOutOfRangeException>(() => ds.Write(new byte[1], 0, -1)); AssertExtensions.Throws <ArgumentException>(null, () => ds.Write(new byte[1], 0, 2)); AssertExtensions.Throws <ArgumentException>(null, () => ds.Write(new byte[1], 1, 1)); Assert.Throws <InvalidOperationException>(() => ds.Read(new byte[1], 0, 1)); ds.Write(new byte[1], 0, 0); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress)) { Assert.Throws <ArgumentNullException>(() => { ds.WriteAsync(null, 0, 0); }); Assert.Throws <ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], -1, 0); }); Assert.Throws <ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], 0, -1); }); AssertExtensions.Throws <ArgumentException>(null, () => { ds.WriteAsync(new byte[1], 0, 2); }); AssertExtensions.Throws <ArgumentException>(null, () => { ds.WriteAsync(new byte[1], 1, 1); }); Assert.Throws <InvalidOperationException>(() => { ds.Read(new byte[1], 0, 1); }); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress)) { Assert.Throws <ArgumentNullException>(() => ds.Read(null, 0, 0)); Assert.Throws <ArgumentOutOfRangeException>(() => ds.Read(new byte[1], -1, 0)); Assert.Throws <ArgumentOutOfRangeException>(() => ds.Read(new byte[1], 0, -1)); AssertExtensions.Throws <ArgumentException>(null, () => ds.Read(new byte[1], 0, 2)); AssertExtensions.Throws <ArgumentException>(null, () => ds.Read(new byte[1], 1, 1)); Assert.Throws <InvalidOperationException>(() => ds.Write(new byte[1], 0, 1)); var data = new byte[1] { 42 }; Assert.Equal(0, ds.Read(data, 0, 0)); Assert.Equal(42, data[0]); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress)) { Assert.Throws <ArgumentNullException>(() => { ds.ReadAsync(null, 0, 0); }); Assert.Throws <ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], -1, 0); }); Assert.Throws <ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], 0, -1); }); AssertExtensions.Throws <ArgumentException>(null, () => { ds.ReadAsync(new byte[1], 0, 2); }); AssertExtensions.Throws <ArgumentException>(null, () => { ds.ReadAsync(new byte[1], 1, 1); }); Assert.Throws <InvalidOperationException>(() => { ds.Write(new byte[1], 0, 1); }); } }
public async Task WrapStreamReturningBadReadValues() { using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress)) Assert.Throws <InvalidDataException>(() => ds.Read(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress)) await Assert.ThrowsAsync <InvalidDataException>(() => ds.ReadAsync(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress)) Assert.Equal(0, ds.Read(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress)) Assert.Equal(0, await ds.ReadAsync(new byte[1024], 0, 1024)); }
public void Precancellation() { var ms = new MemoryStream(); using (DeflateStream ds = new DeflateStream(ms, CompressionMode.Compress, leaveOpen: true)) { Assert.True(ds.WriteAsync(new byte[1], 0, 1, new CancellationToken(true)).IsCanceled); Assert.True(ds.FlushAsync(new CancellationToken(true)).IsCanceled); } using (DeflateStream ds = new DeflateStream(ms, CompressionMode.Decompress, leaveOpen: true)) { Assert.True(ds.ReadAsync(new byte[1], 0, 1, new CancellationToken(true)).IsCanceled); } }
/// <summary> /// Decompresses raw byte data. /// </summary> /// <param name="compressedBytes">The byte data to decompress.</param> /// <param name="uncompressedSize">The final size of the compressed data after decompression.</param> /// <returns>The decompressed byte data.</returns> public static async Task <byte[]> Decompressor(byte[] compressedBytes, int uncompressedSize) { var decompressedBytes = new byte[uncompressedSize]; using (var ms = new MemoryStream(compressedBytes)) { using (var ds = new DeflateStream(ms, CompressionMode.Decompress)) { await ds.ReadAsync(decompressedBytes, 0, uncompressedSize); } } return(decompressedBytes); }
public static async Task OverlappingReadAsync() { byte[] buffer = new byte[32]; string testFilePath = gzTestFile("GZTestDocument.pdf.gz"); using (var readStream = await ManualSyncMemoryStream.GetStreamFromFileAsync(testFilePath, false, true)) using (var unzip = new DeflateStream(readStream, CompressionMode.Decompress, true)) { Task task = null; try { task = unzip.ReadAsync(buffer, 0, 32); Assert.True(readStream.ReadHit); Assert.Throws<InvalidOperationException>(() => { unzip.ReadAsync(buffer, 0, 32); }); // "overlapping read" } finally { // Unblock Async operations readStream.manualResetEvent.Set(); // The original ReadAsync should be able to complete Assert.True(task.Wait(100 * 500)); } } }
public static async Task OverlappingReadAsync() { byte[] buffer = new byte[32]; string testFilePath = gzTestFile("GZTestDocument.pdf.gz"); using (var readStream = await ManualSyncMemoryStream.GetStreamFromFileAsync(testFilePath, false, true)) using (var unzip = new DeflateStream(readStream, CompressionMode.Decompress, true)) { Task task = null; try { task = unzip.ReadAsync(buffer, 0, 32); Assert.True(readStream.ReadHit); Assert.Throws <InvalidOperationException>(() => { unzip.ReadAsync(buffer, 0, 32); }); // "overlapping read" } finally { // Unblock Async operations readStream.manualResetEvent.Set(); // The original ReadAsync should be able to complete Assert.True(task.Wait(100 * 500)); } } }
private static async Task ReadCompressedStreamAsync( DeflateStream compressedStream, BinaryWriter targetStream, long fileLength, CancellationToken ct = default, IProgress <double> progress = null) { var buffer = new byte[4096]; int bytesRead; var totalBytesRead = 0L; var lastProgress = 0d; while ((bytesRead = await compressedStream.ReadAsync(buffer, 0, buffer.Length)) > 0) { ct.ThrowIfCancellationRequested(); if (bytesRead > fileLength) { totalBytesRead += fileLength; targetStream.Write(buffer, 0, (int)fileLength); } else if (totalBytesRead + bytesRead <= fileLength) { totalBytesRead += bytesRead; targetStream.Write(buffer, 0, bytesRead); } else if (totalBytesRead + bytesRead > fileLength) { var leftToRead = fileLength - totalBytesRead; totalBytesRead += leftToRead; targetStream.Write(buffer, 0, (int)leftToRead); } var newProgress = (double)totalBytesRead / fileLength * 100; if (newProgress - lastProgress > 1) { progress?.Report(newProgress); lastProgress = newProgress; } if (totalBytesRead >= fileLength) { break; } } }
public override Task <int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (offset < 0 || offset > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(offset)); } if (count < 0 || offset + count > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(count)); } return(_deflate.ReadAsync(buffer, offset, count, cancellationToken)); }
protected override async Task <bool> ExecuteAsync(CancellationToken cancellationToken) { var buffer = new byte[8]; using (var file = new DeflateStream(File.OpenRead(DatabasePath), CompressionMode.Decompress)) { await file.ReadAsync(buffer, 0, buffer.Length); } if (TryReadHeader(buffer, out var version)) { UnicodeDatabaseVersion = version.ToString(3); return(true); } Log.LogError("The database contained an invalid header."); return(false); }
/// <summary> /// Decompresses raw byte data. /// </summary> /// <param name="compressedBytes">The byte data to decompress.</param> /// <param name="uncompressedSize">The final size of the compressed data after decompression.</param> /// <returns>The decompressed byte data.</returns> public static async Task <byte[]> Decompressor(byte[] compressedBytes, int uncompressedSize) { var decompressedBytes = new byte[uncompressedSize]; using (var ms = new MemoryStream(compressedBytes)) { using (var ds = new DeflateStream(ms, CompressionMode.Decompress, true)) { int offset = 0; // offset for writing into buffer int bytesRead; // number of bytes read from Read operation while ((bytesRead = await ds.ReadAsync(decompressedBytes, offset, uncompressedSize - offset)) > 0) { offset += bytesRead; // offset in buffer for results of next reading } } } return(decompressedBytes); }
public async Task <byte[]> ReadBinary() { var ret = new List <byte[]>(); var compressed = false; while (true) { var(final, fcompressed, opcode, data) = await ReadFrame(); compressed = compressed || fcompressed; ret.Add(data); if (final) { break; } } if (compressed && UsingCompression) { ret.Add(new byte[] { 0x00, 0x00, 0xff, 0xff }); } var td = ret.SelectMany(x => x).ToArray(); if (compressed && UsingCompression) { var oret = new List <byte[]>(); using (var ds = new DeflateStream(new MemoryStream(td), CompressionMode.Decompress)) { while (true) { var chunk = new byte[65536]; var rlen = await ds.ReadAsync(chunk, 0, chunk.Length); if (rlen == 0) { break; } oret.Add(rlen == 65536 ? chunk : chunk.Take(rlen).ToArray()); } return(oret.SelectMany(x => x).ToArray()); } } return(td); }
public async Task <byte[]> Decompress(byte[] compressedArray, int bufferSize) { var stream = new MemoryStream(compressedArray); using (var decompressor = new DeflateStream(stream, CompressionMode.Decompress)) { var buffer = new byte[bufferSize]; using (var output = new MemoryStream()) { int read; while ((read = await decompressor.ReadAsync(buffer, 0, buffer.Length)) > 0) { await output.WriteAsync(buffer, 0, read); } return(output.ToArray()); } } }
public async Task <byte[]> Decompress(byte[] compressedArray) { var stream = new MemoryStream(compressedArray); using (var decompressor = new DeflateStream(stream, CompressionMode.Decompress)) { var buffer = new byte[Constants.BUFFER_SIZE]; using (var output = new MemoryStream()) { int read; while ((read = await decompressor.ReadAsync(buffer, 0, buffer.Length).ConfigureAwait(false)) > 0) { await output.WriteAsync(buffer, 0, read).ConfigureAwait(false); } return(output.ToArray()); } } }
// Making this async since regular read/write are tested below private async Task DecompressAsync(MemoryStream compareStream, MemoryStream gzStream) { var strippedMs = StripHeaderAndFooter.Strip(gzStream); var ms = new MemoryStream(); var zip = new DeflateStream(strippedMs, CompressionMode.Decompress); var deflateStream = new MemoryStream(); int _bufferSize = 1024; var bytes = new Byte[_bufferSize]; bool finished = false; int retCount; while (!finished) { retCount = await zip.ReadAsync(bytes, 0, _bufferSize); if (retCount != 0) { await deflateStream.WriteAsync(bytes, 0, retCount); } else { finished = true; } } deflateStream.Position = 0; compareStream.Position = 0; byte[] compareArray = compareStream.ToArray(); byte[] writtenArray = deflateStream.ToArray(); Assert.Equal(compareArray.Length, writtenArray.Length); for (int i = 0; i < compareArray.Length; i++) { Assert.Equal(compareArray[i], writtenArray[i]); } }
private static async Task <byte[]> DecompressBinaryWithDeflateAsync(byte[] compressedMessagePayload) { byte[] message = null; using (MemoryStream compressedMessagePayloadStream = new MemoryStream(compressedMessagePayload)) { using (DeflateStream compressedMessagePayloadDecompressStream = new DeflateStream(compressedMessagePayloadStream, CompressionMode.Decompress)) { byte[] messageBuffer = new byte[16 * 1024]; using (MemoryStream messageStream = new MemoryStream()) { int messageReadBytes; while ((messageReadBytes = await compressedMessagePayloadDecompressStream.ReadAsync(messageBuffer, 0, messageBuffer.Length)) > 0) { messageStream.Write(messageBuffer, 0, messageReadBytes); } message = messageStream.ToArray(); } } } return(message); }
public void ReadWriteArgumentValidation() { using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress)) { Assert.Throws<ArgumentNullException>(() => ds.Write(null, 0, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => ds.Write(new byte[1], -1, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => ds.Write(new byte[1], 0, -1)); Assert.Throws<ArgumentException>(() => ds.Write(new byte[1], 0, 2)); Assert.Throws<ArgumentException>(() => ds.Write(new byte[1], 1, 1)); Assert.Throws<InvalidOperationException>(() => ds.Read(new byte[1], 0, 1)); ds.Write(new byte[1], 0, 0); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Compress)) { Assert.Throws<ArgumentNullException>(() => { ds.WriteAsync(null, 0, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], -1, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { ds.WriteAsync(new byte[1], 0, -1); }); Assert.Throws<ArgumentException>(() => { ds.WriteAsync(new byte[1], 0, 2); }); Assert.Throws<ArgumentException>(() => { ds.WriteAsync(new byte[1], 1, 1); }); Assert.Throws<InvalidOperationException>(() => { ds.Read(new byte[1], 0, 1); }); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress)) { Assert.Throws<ArgumentNullException>(() => ds.Read(null, 0, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => ds.Read(new byte[1], -1, 0)); Assert.Throws<ArgumentOutOfRangeException>(() => ds.Read(new byte[1], 0, -1)); Assert.Throws<ArgumentException>(() => ds.Read(new byte[1], 0, 2)); Assert.Throws<ArgumentException>(() => ds.Read(new byte[1], 1, 1)); Assert.Throws<InvalidOperationException>(() => ds.Write(new byte[1], 0, 1)); var data = new byte[1] { 42 }; Assert.Equal(0, ds.Read(data, 0, 0)); Assert.Equal(42, data[0]); } using (var ds = new DeflateStream(new MemoryStream(), CompressionMode.Decompress)) { Assert.Throws<ArgumentNullException>(() => { ds.ReadAsync(null, 0, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], -1, 0); }); Assert.Throws<ArgumentOutOfRangeException>(() => { ds.ReadAsync(new byte[1], 0, -1); }); Assert.Throws<ArgumentException>(() => { ds.ReadAsync(new byte[1], 0, 2); }); Assert.Throws<ArgumentException>(() => { ds.ReadAsync(new byte[1], 1, 1); }); Assert.Throws<InvalidOperationException>(() => { ds.Write(new byte[1], 0, 1); }); } }
public override Task<int> ReadAsync(DeflateStream unzip, byte[] buffer, int offset, int count) => unzip.ReadAsync(buffer, offset, count);
public Task <int> ReadAsync(byte[] buffer, int offset, int count) { return(DeflateStreamInstance.ReadAsync(buffer, offset, count)); }
public override Task <int> ReadAsync(DeflateStream unzip, byte[] buffer, int offset, int count) => unzip.ReadAsync(buffer, offset, count);
/// <summary> /// 循环读取消息,禁止重复调用 /// </summary> /// <returns></returns> public async Task ReadMessageLoop() { while (_connected) { var headBuffer = new byte[ProtocolHeadLength]; //先读取一次头信息 await _roomStream.ReadAsync(headBuffer, 0, ProtocolHeadLength); //解析头信息 DanmuHead danmuHead = DanmuHead.BufferToDanmuHead(headBuffer); //判断协议 if (danmuHead.HeaderLength != ProtocolHeadLength) { continue; } //初始化一个放数据的byte数组 byte[] dataBuffer; if (danmuHead.Action == 3) { //给服务器发送心跳信息后的回应信息,所带的数据是直播间的观看人数(人气值) dataBuffer = new byte[danmuHead.MessageLength()]; await _roomStream.ReadAsync(dataBuffer, 0, danmuHead.MessageLength()); var audiences = EndianBitConverter.EndianBitConverter.BigEndian.ToInt32(dataBuffer, 0); _messageHandler.AudiencesHandlerAsync(audiences); continue; } string tmpData; JObject json = null; if (danmuHead.Action == 5 && danmuHead.Version == ProtocolVersion) { //有效负载为礼物、弹幕、公告等内容数据 //读取数据放入缓冲区 dataBuffer = new byte[danmuHead.MessageLength()]; await _roomStream.ReadAsync(dataBuffer, 0, danmuHead.MessageLength()); //之后把数据放入到内存流 string jsonStr; using (var ms = new MemoryStream(dataBuffer, 2, danmuHead.MessageLength() - 2)) { //使用内存流生成解压流(压缩流) var deflate = new DeflateStream(ms, CompressionMode.Decompress); var headerbuffer = new byte[ProtocolHeadLength]; try { while (true) { await deflate.ReadAsync(headerbuffer, 0, ProtocolHeadLength); danmuHead = DanmuHead.BufferToDanmuHead(headerbuffer); var messageBuffer = new byte[danmuHead.MessageLength()]; var readLength = await deflate.ReadAsync(messageBuffer, 0, danmuHead.MessageLength()); jsonStr = Encoding.UTF8.GetString(messageBuffer, 0, danmuHead.MessageLength()); if (readLength == 0) { break; } json = JObject.Parse(jsonStr); _messageDispatcher.DispatchAsync(json, _messageHandler); } continue; } catch (Exception e) { //读数据超出长度 Debug.WriteLine(e); throw; } } } dataBuffer = new byte[danmuHead.MessageLength()]; await _roomStream.ReadAsync(dataBuffer, 0, danmuHead.MessageLength()); tmpData = Encoding.UTF8.GetString(dataBuffer); try { json = JObject.Parse(tmpData); } catch (Exception e) { Debug.WriteLine(e); throw e; } if (!"DANMU_MSG".Equals(json["cmd"].ToString()) && !"SEND_GIFT".Equals(json["cmd"].ToString())) { _messageDispatcher.DispatchAsync(json, _messageHandler); } } }
public static async Task DecompressL33TZipAsync( Stream inputStream, Stream outputStream, IProgress <double> progress = null, CancellationToken ct = default) { //Get extracted content length long fileLength; using (var reader = new AsyncBinaryReader(inputStream, new UTF8Encoding(), true)) { var fileHeader = new string(await reader.ReadCharsAsync(4, ct)); switch (fileHeader.ToLower()) { case L33THeader: fileLength = await reader.ReadInt32Async(ct); break; case L66THeader: fileLength = await reader.ReadInt64Async(ct); break; default: throw new InvalidOperationException($"Header '{fileHeader}' is not recognized as a valid type"); } //Skip deflate specification (2 Byte) reader.BaseStream.Position += 2; } //Extract content var buffer = new byte[BufferSize]; int bytesRead; var totalBytesRead = 0L; var lastProgress = 0d; using var compressedStream = new DeflateStream(inputStream, CompressionMode.Decompress); while ((bytesRead = await compressedStream.ReadAsync(buffer, 0, buffer.Length, ct)) > 0) { ct.ThrowIfCancellationRequested(); if (bytesRead > fileLength) { totalBytesRead += fileLength; await outputStream.WriteAsync(buffer, 0, (int)fileLength, ct); } else if (totalBytesRead + bytesRead <= fileLength) { totalBytesRead += bytesRead; await outputStream.WriteAsync(buffer, 0, bytesRead, ct); } else if (totalBytesRead + bytesRead > fileLength) { var leftToRead = fileLength - totalBytesRead; totalBytesRead += leftToRead; await outputStream.WriteAsync(buffer, 0, (int)leftToRead, ct); } var newProgress = (double)totalBytesRead / fileLength * 100; if (newProgress - lastProgress > 1) { progress?.Report(newProgress); lastProgress = newProgress; } if (totalBytesRead >= fileLength) { break; } } }
public override Task <int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { return(_deflate.ReadAsync(buffer, offset, count, cancellationToken)); }
static async void Function2() { //ADAPTERY TEKSTOWE //using (FileStream fs = File.Create("test.txt")) //{ // using (TextWriter writer = new StreamWriter(fs)) // //using (TextWriter writer = new StreamWriter(fs, Encoding.Unicode)) // { // writer.WriteLine("wiersz1"); // writer.WriteLine("wiersz2"); // } //} //using (FileStream fs = File.OpenRead("test.txt")) //{ // using (TextReader reader = new StreamReader(fs)) // { // Console.WriteLine(reader.ReadLine()); // Console.WriteLine(reader.ReadLine()); // } //} //to samo nizej bo klasa File oferuje statyczne CreateText,AppendText i OpenText w celu skrócenai //using (TextWriter writer = File.CreateText("text.txt")) //{ // writer.WriteLine("wiersz1"); // writer.WriteLine("wiersz2"); //} //using (TextWriter writer = File.AppendText("text.txt")) // writer.WriteLine("wiersz3"); //using (TextReader reader = File.OpenText("text.txt")) // while (reader.Peek() > -1) // { // Console.WriteLine(reader.ReadLine()); // } //KOMPRESJA STRUMIENIA //string[] words = "The quixk brown fox jumps over the lazy dog".Split(); //Random rand = new Random(); //using (Stream s = File.Create("compressed.bin")) //using (Stream ds = new DeflateStream(s, CompressionMode.Compress)) //using (TextWriter w = new StreamWriter(ds)) //{ // for (int i = 0; i < 1000; i++) // await w.WriteAsync(words[rand.Next(words.Length)] + " "); //} //Console.WriteLine(new FileInfo("compressed.bin").Length); //using (Stream s = File.Create(("compressed.bin"))) //using (Stream ds = new DeflateStream(s, CompressionMode.Decompress)) //using (TextReader r = new StreamReader(ds)) // Console.Write(await r.ReadToEndAsync()); //Kompresja w pamięci byte[] data = new byte[1000]; MemoryStream ms = new MemoryStream(); using (Stream ds = new DeflateStream(ms, CompressionMode.Compress, true)) await ds.WriteAsync(data, 0, data.Length); Console.WriteLine(ms.Length); ms.Position = 0; using (Stream ds = new DeflateStream(ms, CompressionMode.Decompress)) { for (int i = 0; i < 1000; i += await ds.ReadAsync(data, i, 1000 - i)) { } } //praca z plikami w postacci archiwum zipstr. 637 //dodać referencje do projektu ZipFile //ZipFile.CreateFromDirectory(@"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\Chapter15", // @"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\skompresowany15.zip"); //dekompresja //ZipFile.ExtractToDirectory(@"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\skompresowany15.zip", @"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\Chapter15"); using (ZipArchive zip = ZipFile.Open(@"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\skompresowany15.zip", ZipArchiveMode.Read)) { foreach (var entry in zip.Entries) { Console.WriteLine(entry.FullName + " " + entry.Length); } } byte[] data2 = File.ReadAllBytes(@"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\README.md"); using (ZipArchive zip = ZipFile.Open(@"D:\Dokumenty\Visual Studio 2015\Projects\c_sharp_book\skompresowany15.zip", ZipArchiveMode.Update)) { zip.CreateEntry(@"Readme.md").Open().Write(data2, 0, data2.Length); } var letters = new string[] { "A", "B", "C", "D" }; var numbers = new int[] { 1, 2, 3 }; var q = letters.Zip(numbers, (l, n) => l + n.ToString()); foreach (var s in q) { Console.WriteLine(s); } }
private async Task ReceiveMessageLoop(CancellationToken token) { try { var headerBuff = new byte[16]; while (DmTcpConnected) { await _dmNetStream.ReadByteAsync(headerBuff, 0, 16, token); var protocol = new DanmuProtocol(headerBuff); if (protocol.PacketLength < 16) { throw new NotSupportedException($@"协议失败: (L:{protocol.PacketLength})"); } var bodyLength = protocol.PacketLength - 16; if (bodyLength == 0) { continue; //没有内容了 } var buffer = new byte[bodyLength]; await _dmNetStream.ReadByteAsync(buffer, 0, bodyLength, token); switch (protocol.Version) { case 0: case 1: { ProcessDanmu(protocol.Operation, buffer, bodyLength); break; } case 2: { await using var ms = new MemoryStream(buffer, 2, bodyLength - 2); await using var deflate = new DeflateStream(ms, CompressionMode.Decompress); while (await deflate.ReadAsync(headerBuff, 0, 16, token) > 0) { protocol = new DanmuProtocol(headerBuff); bodyLength = protocol.PacketLength - 16; if (bodyLength == 0) { continue; // 没有内容了 } if (buffer.Length < bodyLength) // 不够长再申请 { buffer = new byte[bodyLength]; } await deflate.ReadAsync(buffer, 0, bodyLength, token); ProcessDanmu(protocol.Operation, buffer, bodyLength); } break; } default: { LogEvent?.Invoke(this, new LogEventArgs { Log = $@"[{_roomId}] 弹幕协议不支持" }); break; } } } } catch (Exception ex) { Debug.WriteLine(ex); _client?.Close(); _dmNetStream = null; if (!token.IsCancellationRequested) { LogEvent?.Invoke(this, new LogEventArgs { Log = $@"[{_roomId}] 弹幕连接被断开,将尝试重连" }); await ConnectWithRetry(token); } } }
public async Task WrapNullReturningTasksStream() { using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnNullTasks), CompressionMode.Decompress)) await Assert.ThrowsAsync <ArgumentNullException>(() => ds.ReadAsync(new byte[1024], 0, 1024)); }
// Making this async since regular read/write are tested below private async Task DecompressAsync(MemoryStream compareStream, MemoryStream gzStream) { var strippedMs = StripHeaderAndFooter.Strip(gzStream); var ms = new MemoryStream(); var zip = new DeflateStream(strippedMs, CompressionMode.Decompress); var deflateStream = new MemoryStream(); int _bufferSize = 1024; var bytes = new Byte[_bufferSize]; bool finished = false; int retCount; while (!finished) { retCount = await zip.ReadAsync(bytes, 0, _bufferSize); if (retCount != 0) await deflateStream.WriteAsync(bytes, 0, retCount); else finished = true; } deflateStream.Position = 0; compareStream.Position = 0; byte[] compareArray = compareStream.ToArray(); byte[] writtenArray = deflateStream.ToArray(); Assert.Equal(compareArray.Length, writtenArray.Length); for (int i = 0; i < compareArray.Length; i++) { Assert.Equal(compareArray[i], writtenArray[i]); } }
public async Task WrapNullReturningTasksStream() { using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnNullTasks), CompressionMode.Decompress)) await Assert.ThrowsAsync<InvalidOperationException>(() => ds.ReadAsync(new byte[1024], 0, 1024)); }
public async Task WrapStreamReturningBadReadValues() { using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress)) Assert.Throws<InvalidDataException>(() => ds.Read(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooLargeCounts), CompressionMode.Decompress)) await Assert.ThrowsAsync<InvalidDataException>(() => ds.ReadAsync(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress)) Assert.Equal(0, ds.Read(new byte[1024], 0, 1024)); using (var ds = new DeflateStream(new BadWrappedStream(BadWrappedStream.Mode.ReturnTooSmallCounts), CompressionMode.Decompress)) Assert.Equal(0, await ds.ReadAsync(new byte[1024], 0, 1024)); }
async Task <IReadOnlyDictionary <string, FileFingerprint> > LoadBlobsImplAsync(CancellationToken cancellationToken) { var blobs = new Dictionary <string, FileFingerprint>(); var needRebuild = false; var blobCount = 0; var fileCount = 0; _fileSequence.Rescan(); var totalSize = 0L; var compressedSize = 0L; foreach (var fileInfo in _fileSequence.Files) { ++fileCount; try { fileInfo.Refresh(); if (fileInfo.Length < 5) { needRebuild = true; continue; } cancellationToken.ThrowIfCancellationRequested(); var pl = new Pipe(); var writer = pl.Writer; void CopyToWriter(int length, byte[] buffer) { var input = buffer.AsSpan(0, length); while (input.Length > 0) { var output = writer.GetSpan(length); if (output.Length <= 0) { throw new InvalidOperationException("Unexpected non-positive span length: " + output.Length); } var copySize = Math.Min(input.Length, output.Length); input.Slice(0, copySize).CopyTo(output); input = input.Slice(copySize); writer.Advance(copySize); } } var decompressTask = Task.Run(async() => { var buffer = ArrayPool <byte> .Shared.Rent(64 * 1024); using (var fileStream = OpenMsgPackFileForRead(fileInfo)) using (var decodeStream = new DeflateStream(fileStream, CompressionMode.Decompress)) { try { for (; ;) { var read = await decodeStream.ReadAsync(buffer, 0, buffer.Length, cancellationToken).ConfigureAwait(false); if (read < 1) { break; } totalSize += read; CopyToWriter(read, buffer); await writer.FlushAsync(cancellationToken).ConfigureAwait(false); } compressedSize += fileStream.Length; } catch (IOException ex) { needRebuild = true; // The entry might or might not be valid. Debug.WriteLine("MessagePackFileFingerprintStore.LoadBlobsImplAsync() read failed: " + ex.Message); } finally { ArrayPool <byte> .Shared.Return(buffer); writer.Complete(); } } }, cancellationToken); var reader = pl.Reader; var deserializeTask = Task.Run(async() => { var workBuffer = ArrayPool <byte> .Shared.Rent(64 * 1024); try { for (; ;) { if (!reader.TryRead(out var input)) { input = await reader.ReadAsync(cancellationToken).ConfigureAwait(false); } var buffer = input.Buffer; while (buffer.Length > 32) { // We now have enough data to start reading. buffer.Slice(0, 5).CopyTo(workBuffer.AsSpan()); var length = IntFormatter.Deserialize(workBuffer, 0, MessagePackSerializer.DefaultResolver, out var actualSize); if (length == 0) { return; // EOF } if (length < 0 || length > workBuffer.Length) { throw new FileFormatException($"Invalid block length {length}"); } buffer = buffer.Slice(actualSize); while (buffer.Length < length) { if (input.IsCompleted) { return; } reader.AdvanceTo(buffer.Start, buffer.End); if (!reader.TryRead(out input)) { input = await reader.ReadAsync(cancellationToken).ConfigureAwait(false); } buffer = input.Buffer; } // We now have enough data to decode the block. buffer.Slice(0, length).CopyTo(workBuffer.AsSpan()); buffer = buffer.Slice(length); var fileFingerprint = MessagePackSerializer.Deserialize <FileFingerprint>(workBuffer, 0, MessagePackSerializer.DefaultResolver, out actualSize); if (length != actualSize) { throw new FileFormatException($"Block length mismatch {length} != {actualSize}"); } if (blobs.ContainsKey(fileFingerprint.FullFilePath)) { Debug.WriteLine($"Collision for {fileFingerprint.FullFilePath}"); } blobs[fileFingerprint.FullFilePath] = fileFingerprint; ++blobCount; } reader.AdvanceTo(buffer.Start, buffer.End); if (input.IsCompleted) { break; } } } finally { ArrayPool <byte> .Shared.Return(workBuffer); reader.Complete(); } }); await Task.WhenAll(decompressTask, deserializeTask).ConfigureAwait(false); } catch (IOException) { needRebuild = true; } catch (InvalidDataException) { needRebuild = true; } catch (FileFormatException) { needRebuild = true; } } Debug.WriteLine($"Read {totalSize.BytesToMiB():F2}MiB bytes from {compressedSize.BytesToMiB():F2}MiB file"); var count = (double)blobs.Count; Debug.WriteLine($"Average size {totalSize / count:F1} bytes or {compressedSize / count:F1} compressed"); if (blobCount > blobs.Count + 100 + blobs.Count / 8) { needRebuild = true; } if (fileCount > 16) { needRebuild = true; } if (needRebuild) { Console.WriteLine("Rebuilding cache files"); await RebuildCacheAsync(blobs, cancellationToken).ConfigureAwait(false); } return(blobs); }