public void ZeroChunkCompressTest() { ChunkByteArray zip = BinaryCompressor.Zip(new ChunkByteArray(new byte[0])); ChunkByteArray result = BinaryCompressor.Unzip(zip); Assert.AreEqual(0, result.Length); }
public void SimpleChunkCompressTest() { ChunkByteArray zip = BinaryCompressor.Zip(new ChunkByteArray(new byte[] { 1 })); ChunkByteArray result = BinaryCompressor.Unzip(zip); Assert.AreEqual(1, result[0]); }
private static void AssertAreArrayEqual(ChunkByteArray source, ChunkByteArray dest) { Assert.AreEqual(source.Length, dest.Length); for (int i = 0; i < source.Length; i++) { Assert.AreEqual(source[i], dest[i]); } }
public static ChunkByteArray Unzip(ChunkByteArray sourceChunkBuffer) { using (Stream outputStream = UnzipStream(sourceChunkBuffer)) { var uncompressedChunkBuffer = new ChunkByteArray((int)outputStream.Length); int readed = outputStream.Read(uncompressedChunkBuffer, 0, (int)outputStream.Length); Debug.Assert(outputStream.Length == readed, "not all bytes readed"); return(uncompressedChunkBuffer); } }
public void ChunkByteArrayConstructorTest() { var array = GetTestArray(); var chankArray = new ChunkByteArray(array); Assert.AreEqual(array.Length, chankArray.Length); for (int i = 0; i < array.Length; i++) { Assert.AreEqual(array[i], chankArray[i]); } }
public void ChunkByteArrayLongTest() { var array = new ChunkByteArray(0x10000); Assert.AreEqual(0x10000, array.Length); for (int i = 0; i < array.Length; i++) { array[i] = (byte)i; } for (int i = 0; i < array.Length; i++) { Assert.AreEqual(array[i], (byte)i); } }
public static long GetUncompressedSize(ChunkByteArray sourceChunkBuffer) { Utils.CheckNotNull(sourceChunkBuffer, "sourceBuffer"); using (Stream stream = new MemoryStream(sourceChunkBuffer.Length)) { stream.Write(sourceChunkBuffer, 0, sourceChunkBuffer.Length); stream.Flush(); stream.Seek(0, SeekOrigin.Begin); using (ZipFile zip = ZipFile.Read(stream)) { ZipEntry e = zip[EntryName]; return(e.UncompressedSize); } } }
public void ComplexChunkCompressTest() { const int length = 1000000; var array = new byte[length]; new Random().NextBytes(array); var source = new ChunkByteArray(array); var zip = BinaryCompressor.Zip(source); var result = BinaryCompressor.Unzip(zip); Assert.AreEqual(source.Length, result.Length); for (int i = 0; i < length; i++) { Assert.AreEqual(source[i], result[i]); } }
public void ChunkByteArrayToStreamTest() { var array = GetTestArray(); ChunkByteArray chankArray; using (MemoryStream stream = new MemoryStream(array)) { chankArray = new ChunkByteArray((int)stream.Length); int readed = stream.Read(chankArray, 0, (int)stream.Length); Assert.AreEqual(stream.Length, readed, "not all bytes readed"); Assert.AreEqual(chankArray.Length, readed); } Assert.AreEqual(array.Length, chankArray.Length); for (int i = 0; i < array.Length; i++) { Assert.AreEqual(array[i], chankArray[i]); } }
public static ChunkByteArray Zip(ChunkByteArray sourceChunkBuffer) { Utils.CheckNotNull(sourceChunkBuffer, "sourceBuffer"); using (Stream stream = new MemoryStream()) { using (var zip = new ZipFile()) { zip.CompressionLevel = CompressionLevel.Default; zip.AddEntry(EntryName, sourceChunkBuffer.ToArray()); zip.Save(stream); } stream.Flush(); stream.Seek(0, SeekOrigin.Begin); var compressedChunkBuffer = new ChunkByteArray((int)stream.Length); int readed = stream.Read(compressedChunkBuffer, 0, (int)stream.Length); Debug.Assert(stream.Length == readed, "not all bytes readed"); return(compressedChunkBuffer); } }
public void ChunkByteArrayFromStreamTest() { ChunkByteArray chankArray = new ChunkByteArray(GetTestArray()); byte[] array = new byte[chankArray.Length]; using (Stream stream = new MemoryStream(chankArray.Length)) { stream.Write(chankArray, 0, chankArray.Length); stream.Flush(); stream.Seek(0, SeekOrigin.Begin); int readed = stream.Read(array, 0, (int)stream.Length); Assert.AreEqual(stream.Length, readed, "not all bytes readed"); Assert.AreEqual(array.Length, readed); } for (int i = 0; i < array.Length; i++) { Assert.AreEqual(chankArray[i], array[i]); } }
public static Stream UnzipStream(ChunkByteArray sourceChunkBuffer, ref byte[] inputBuffer, ref byte[] outputBuffer) { Utils.CheckNotNull(sourceChunkBuffer, "sourceBuffer"); if (inputBuffer != null && inputBuffer.Length < sourceChunkBuffer.Length) { long bufferSize = RoundCompressedSize(sourceChunkBuffer.Length); inputBuffer = new byte[bufferSize]; } MyMemoryStream stream = inputBuffer == null ? new MyMemoryStream(sourceChunkBuffer.Length) : new MyMemoryStream(inputBuffer); stream.Write(sourceChunkBuffer, 0, sourceChunkBuffer.Length); stream.Flush(); stream.Seek(0, SeekOrigin.Begin); using (ZipFile zip = ZipFile.Read(stream)) { ZipEntry e = zip[EntryName]; if (outputBuffer != null && outputBuffer.Length < e.UncompressedSize) { long bufferSize = RoundUncompressedSize(e.UncompressedSize); outputBuffer = new byte[bufferSize]; } Stream outputStream = outputBuffer == null ? new MemoryStream((int)e.UncompressedSize) : new MemoryStream(outputBuffer); e.Extract(outputStream); outputStream.Flush(); outputStream.Seek(0, SeekOrigin.Begin); return(outputStream); } }
public static int Read(this Stream stream, ChunkByteArray buffer, int offset, int count) { return(buffer.ReadFromStream(stream, buffer, offset, count)); }
public BaseTablePacketDTO() { BinaryBody = new ChunkByteArray(0); }
public static Stream UnzipStream(ChunkByteArray sourceChunkBuffer) { byte[] inputBuffer = null; byte[] outputBuffer = null; return(UnzipStream(sourceChunkBuffer, ref inputBuffer, ref outputBuffer)); }
public BaseTablePacketDTO(int rowCount, int bodyLength) { RowCount = rowCount; BinaryBody = new ChunkByteArray(bodyLength); }
public static void Write(this Stream stream, ChunkByteArray buffer, int offset, int count) { buffer.WriteToStream(stream, buffer, offset, count); }