static void Main(string[] args) { var data = Encoding.ASCII.GetBytes("Hello World!"); Console.WriteLine("SnappyCodec roundtrip: {0}", Encoding.ASCII.GetString(SnappyCodec.Uncompress(SnappyCodec.Compress(data)))); var buffer = new MemoryStream(); var stream = new SnappyStream(buffer, CompressionMode.Compress); stream.Write(data, 0, data.Length); stream.Close(); buffer = new MemoryStream(buffer.ToArray()); stream = new SnappyStream(buffer, CompressionMode.Decompress); var roundtrip = new byte[data.Length]; int read = stream.Read(roundtrip, 0, data.Length); if (read != data.Length) { throw new ApplicationException(); } if (0 != stream.Read(roundtrip, 0, data.Length)) { throw new ApplicationException(); } Console.WriteLine("SnappyStream roundtrip: {0}", Encoding.ASCII.GetString(roundtrip)); }
byte[] GetCompressedFile(byte[] uncompressed) { var compressed = new MemoryStream(); using (var compressor = new SnappyStream(compressed, CompressionMode.Compress, true)) compressor.Write(uncompressed, 0, uncompressed.Length); compressed.Close(); return(compressed.ToArray()); }
public void stream_write_throws_exception_on_chunk_length_too_long() { var ms = new MemoryStream(); using (var target = new SnappyStream(ms, CompressionMode.Compress, true, false)) { byte[] buffer = new byte[1 << 20]; Assert.Throws <InvalidOperationException>(() => target.Write(buffer, 0, 1 << 20 + 1)); } }
public override byte[] Compress(byte[] uncompressedData) { var outStream = new MemoryStream(); using (var compress = new SnappyStream(outStream, CompressionMode.Compress)) { compress.Write(uncompressedData, 0, uncompressedData.Length); } return(outStream.ToArray()); }
public void data_written_to_stream() { var ms = new MemoryStream(); using (var target = new SnappyStream(ms, CompressionMode.Compress, true, false)) { byte[] buffer = new byte[100]; target.Write(buffer, 0, buffer.Length); } Assert.Equal(new byte[] { 255, 115, 78, 97, 80, 112, 89, 0, 0, 0, 8, 0, 100, 0, 254, 1, 0, 130, 1, 0, 0 }, ms.ToArray().Take((int)ms.Length)); }
static void ProcessFile(Stream input, Stream output, Options options) { var buffer = new byte[8192]; if (options.Mode == CompressionMode.Compress) { using (var compressor = new SnappyStream(output, CompressionMode.Compress, true)) { while (true) { int read = input.Read(buffer, 0, buffer.Length); if (read == 0) { break; } compressor.Write(buffer, 0, read); } } } else if (!options.Test) { using (var decompressor = new SnappyStream(input, CompressionMode.Decompress, true)) { while (true) { int read = decompressor.Read(buffer, 0, buffer.Length); if (read == 0) { break; } output.Write(buffer, 0, read); } } } else { using (var decompressor = new SnappyStream(input, CompressionMode.Decompress, true)) { while (true) { int read = input.Read(buffer, 0, buffer.Length); if (read == 0) { break; } } } } }
public void Compression(string name) { Benchmark.Run("Compressing", name, benchmark => { var stream = new NullStream(); var compressor = new SnappyStream(stream, CompressionMode.Compress); benchmark.Stopwatch.Start(); for (int i = 0; i < benchmark.Iterations; ++i) { compressor.Write(benchmark.Input, 0, benchmark.Input.Length); compressor.Flush(); } benchmark.Stopwatch.Stop(); benchmark.Note = String.Format(" ({0:0.00 %})", stream.Written / (double)benchmark.Input.Length / benchmark.Iterations); }); }
public void Twister() { var testdata = Directory.GetFiles(Benchmark.DataPath).Select(f => File.ReadAllBytes(f)).ToArray(); long totalData = 0; var stopwatch = new Stopwatch(); stopwatch.Start(); while (stopwatch.Elapsed < TimeSpan.FromSeconds(3)) { int count = Random.Next(0, 21); var sequence = Enumerable.Range(0, count).Select(n => testdata[Random.Next(testdata.Length)]).ToArray(); totalData += sequence.Sum(f => f.Length); var stream = new RandomChunkStream(); ManualResetEvent doneReading = new ManualResetEvent(false); ThreadPool.QueueUserWorkItem(ctx => { try { using (var decompressor = new SnappyStream(stream, CompressionMode.Decompress)) { foreach (var file in sequence) { var decompressed = new byte[file.Length]; if (decompressed.Length < 500) { for (int i = 0; i < decompressed.Length; ++i) { decompressed[i] = checked ((byte)decompressor.ReadByte()); } } else { #if SNAPPY_ASYNC if (ReadRandom.Next(2) == 0) { ReadAllAsync(decompressor, decompressed, 0, decompressed.Length).Wait(); } else #endif ReadAll(decompressor, decompressed, 0, decompressed.Length); } CheckBuffers(file, decompressed); } Assert.AreEqual(-1, decompressor.ReadByte()); } doneReading.Set(); } catch (Exception e) { Console.WriteLine("Read thread failed: {0}", e); throw; } }); using (var compressor = new SnappyStream(stream, CompressionMode.Compress)) { foreach (var file in sequence) { if (file.Length < 500) { for (int i = 0; i < file.Length; ++i) { compressor.WriteByte(file[i]); } } else { #if SNAPPY_ASYNC if (WriteRandom.Next(2) == 0) { compressor.WriteAsync(file, 0, file.Length).Wait(); } else #endif compressor.Write(file, 0, file.Length); } if (WriteRandom.Next(10) == 0) { #if SNAPPY_ASYNC if (WriteRandom.Next(2) == 0) { compressor.FlushAsync().Wait(); } else #endif compressor.Flush(); } } #if SNAPPY_ASYNC if (WriteRandom.Next(3) == 0) { compressor.FlushAsync().Wait(); } else #endif { if (WriteRandom.Next(2) == 0) { compressor.Flush(); } } } doneReading.WaitOne(); } stopwatch.Stop(); Console.WriteLine("Ran {0} MB through the stream, that's {1:0.0} MB/s", totalData / 1024 / 1024, totalData / stopwatch.Elapsed.TotalSeconds / 1024 / 1024); }
public void data_written_to_stream() { var ms = new MemoryStream(); using (var target = new SnappyStream(ms, CompressionMode.Compress, true, false)) { byte[] buffer = new byte[100]; target.Write(buffer, 0, buffer.Length); } Assert.Equal(new byte[] { 255, 115, 78, 97, 80, 112, 89, 0, 0, 0, 8, 0, 100, 0, 254, 1, 0, 130, 1, 0, 0 }, ms.GetBuffer().Take((int) ms.Length)); }
public void stream_write_throws_exception_on_chunk_length_too_long() { var ms = new MemoryStream(); using (var target = new SnappyStream(ms, CompressionMode.Compress, true, false)) { byte[] buffer = new byte[1 << 20]; Assert.Throws<InvalidOperationException>(() => target.Write(buffer, 0, 1 << 20)); } }