public void TestStreamCompression() { const int compressionStrength = 31; MemoryStream buffer = new MemoryStream(); Random rnd = new Random(); PatternCompressor compressor = new PatternCompressor { CompressedBuffer = new byte[4 * TotalTestSampleSize], CompressionStrength = compressionStrength }; byte[] arrayOfInts; int bufferLength; int dataLength; int compressedLen; //bool match; for (int i = 0; i < TotalTestSampleSize; i++) { uint value = (uint)(rnd.NextDouble() * 100000); buffer.Write(BitConverter.GetBytes(value), 0, 4); compressor.Compress(value); } // Add one byte of extra space to accommodate compression algorithm buffer.WriteByte(0xFF); arrayOfInts = buffer.ToArray(); bufferLength = arrayOfInts.Length; dataLength = bufferLength - 1; compressedLen = PatternCompressor.CompressBuffer(arrayOfInts, 0, dataLength, bufferLength, compressionStrength); // Compressed arrays do not match. This is because the streaming compression // searches the back buffer queue starting from index 0, regardless of the // index of the start of the queue. The static method searches from the start // of the queue and wraps around in a circular fashion. This discrepancy does // not affect decompression. // //match = compressedLen == compressor.CompressedBufferLength; //for (int i = 0; match && i < compressedLen; i++) //{ // match = arrayOfInts[i] == compressor.CompressedBuffer[i]; //} Assert.AreEqual(compressedLen, compressor.CompressedBufferLength); //Assert.IsTrue(match); }
public void TestStreamDecompression() { MemoryStream memStream = new MemoryStream(); Random rnd = new Random(); byte[] original; byte[] decompressed; bool match; PatternCompressor compressor = new PatternCompressor { CompressedBuffer = new byte[4 * TotalTestSampleSize], CompressionStrength = 31 }; PatternDecompressor decompressor = new PatternDecompressor(); for (int i = 0; i < TotalTestSampleSize; i++) { uint value = (uint)(rnd.NextDouble() * 100000); memStream.Write(BitConverter.GetBytes(value), 0, 4); compressor.Compress(value); } original = memStream.ToArray(); memStream = new MemoryStream(); decompressor.AugmentBuffer(compressor.CompressedBuffer, compressor.CompressedBufferLength); for (int i = 0; i < TotalTestSampleSize; i++) { uint value; decompressor.Decompress(out value); memStream.Write(BitConverter.GetBytes(value), 0, 4); } decompressed = memStream.ToArray(); match = original.Length == decompressed.Length; for (int i = 0; match && i < original.Length; i++) { match = original[i] == decompressed[i]; } Assert.AreEqual(original.Length, decompressed.Length); Assert.IsTrue(match); }
public void TestDecompressionOfStreamCompression() { MemoryStream memStream = new MemoryStream(); Random rnd = new Random(); byte[] original; byte[] decompressed; int decompressedLen; bool match; PatternCompressor compressor = new PatternCompressor { CompressedBuffer = new byte[4 * TotalTestSampleSize], CompressionStrength = 31 }; for (int i = 0; i < TotalTestSampleSize; i++) { uint value = (uint)(rnd.NextDouble() * 100000); memStream.Write(BitConverter.GetBytes(value), 0, 4); compressor.Compress(value); } original = memStream.ToArray(); decompressed = new byte[PatternDecompressor.MaximumSizeDecompressed(compressor.CompressedBufferLength)]; Buffer.BlockCopy(compressor.CompressedBuffer, 0, decompressed, 0, compressor.CompressedBufferLength); decompressedLen = PatternDecompressor.DecompressBuffer(decompressed, 0, compressor.CompressedBufferLength, decompressed.Length); match = decompressedLen == original.Length; for (int i = 0; match && i < decompressedLen; i++) { match = decompressed[i] == original[i]; } Assert.AreEqual(original.Length, decompressedLen); Assert.IsTrue(match); }
/// <summary> /// Attempts to compress payload of <see cref="CompactMeasurement"/> values onto the <paramref name="destination"/> stream. /// </summary> /// <param name="compactMeasurements">Payload of <see cref="CompactMeasurement"/> values.</param> /// <param name="destination">Memory based <paramref name="destination"/> stream to hold compressed payload.</param> /// <param name="compressionStrength">Compression strength to use.</param> /// <param name="includeTime">Flag that determines if time should be included in the compressed payload.</param> /// <param name="flags">Current <see cref="DataPacketFlags"/>.</param> /// <returns><c>true</c> if payload was compressed and encoded onto <paramref name="destination"/> stream; otherwise <c>false</c>.</returns> /// <remarks> /// <para> /// Compressed payload will only be encoded onto <paramref name="destination"/> stream if compressed size would be smaller /// than normal serialized size. /// </para> /// <para> /// As an optimization this function uses a compression method that uses pointers to native structures, as such the /// endian order encoding of the compressed data will always be in the native-endian order of the operating system. /// This will be an important consideration when writing a endian order neutral payload decompressor. To help with /// this the actual endian order used during compression is marked in the data flags. However, measurements values /// are consistently encoded in big-endian order prior to buffer compression. /// </para> /// </remarks> public static bool CompressPayload(this IEnumerable <CompactMeasurement> compactMeasurements, BlockAllocatedMemoryStream destination, byte compressionStrength, bool includeTime, ref DataPacketFlags flags) { // Instantiate a buffer that is larger than we'll need byte[] buffer = new byte[ushort.MaxValue]; // Go ahead an enumerate all the measurements - this will cast all values to compact measurements CompactMeasurement[] measurements = compactMeasurements.ToArray(); int measurementCount = measurements.Length; int sizeToBeat = measurementCount * measurements[0].BinaryLength; int index = 0; // Encode compact state flags and runtime IDs together -- // Together these are three bytes, so we pad with a zero byte. // The zero byte and state flags are considered to be more compressible // than the runtime ID, so these are stored in the higher order bytes. for (int i = 0; i < measurementCount; i++) { uint value = ((uint)measurements[i].CompactStateFlags << 16) | measurements[i].RuntimeID; index += NativeEndianOrder.Default.CopyBytes(value, buffer, index); } // Encode values for (int i = 0; i < measurementCount; i++) { // Encode using adjusted value (accounts for adder and multiplier) index += NativeEndianOrder.Default.CopyBytes((float)measurements[i].AdjustedValue, buffer, index); } if (includeTime) { // Encode timestamps for (int i = 0; i < measurementCount; i++) { // Since large majority of 8-byte tick values will be repeated, they should compress well index += NativeEndianOrder.Default.CopyBytes((long)measurements[i].Timestamp, buffer, index); } } // Attempt to compress buffer int compressedSize = PatternCompressor.CompressBuffer(buffer, 0, index, ushort.MaxValue, compressionStrength); // Only encode compressed buffer if compression actually helped payload size if (compressedSize <= sizeToBeat) { // Set payload compression flag flags |= DataPacketFlags.Compressed; // Make sure decompressor knows original endian encoding order if (BitConverter.IsLittleEndian) { flags |= DataPacketFlags.LittleEndianCompression; } else { flags &= ~DataPacketFlags.LittleEndianCompression; } // Copy compressed payload onto destination stream destination.Write(buffer, 0, compressedSize); return(true); } // Clear payload compression flag flags &= ~DataPacketFlags.Compressed; return(false); }
public void TestCompressionCases() { uint[] case1 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; uint[] case2 = { 0xFF, 0xAD, 0xBC, 0x5D, 0x99, 0x84, 0xA8, 0x3D, 0x45, 0x02, 0 }; uint[] case3 = { 0xFFFF, 0xABCD, 0x1234, 0x9876, 0x1A2B, 0x1928, 0x9182, 0x6666, 0x5294, 0xAFBD, 0 }; uint[] case4 = { 0xFFFFFF, 0xABCDEF, 0xFEDCBA, 0xAFBECD, 0xFAEBDC, 0x123456, 0x654321, 0x162534, 0x615243, 0x987654, 0 }; uint[] case5 = { 0xFFFFFFFF, 0xABCDEFAB, 0xFEDCBAFE, 0xAFBECDAF, 0xFAEBDCFA, 0x12345678, 0x87654321, 0x18273645, 0x81726354, 0x98765432, 0 }; MemoryStream buffer; byte[] data; int compressedLen; // --- Test case1 --- buffer = new MemoryStream(); foreach (uint i in case1) { buffer.Write(BitConverter.GetBytes(i), 0, 4); } data = buffer.ToArray(); compressedLen = PatternCompressor.CompressBuffer(data, 0, data.Length - 4, data.Length); Assert.AreEqual(14, compressedLen); // ------------------ // --- Test case2 --- buffer = new MemoryStream(); foreach (uint i in case2) { buffer.Write(BitConverter.GetBytes(i), 0, 4); } data = buffer.ToArray(); compressedLen = PatternCompressor.CompressBuffer(data, 0, data.Length - 4, data.Length); Assert.AreEqual(23, compressedLen); // ------------------ // --- Test case3 --- buffer = new MemoryStream(); foreach (uint i in case3) { buffer.Write(BitConverter.GetBytes(i), 0, 4); } data = buffer.ToArray(); compressedLen = PatternCompressor.CompressBuffer(data, 0, data.Length - 4, data.Length); Assert.AreEqual(32, compressedLen); // ------------------ // --- Test case4 --- buffer = new MemoryStream(); foreach (uint i in case4) { buffer.Write(BitConverter.GetBytes(i), 0, 4); } data = buffer.ToArray(); compressedLen = PatternCompressor.CompressBuffer(data, 0, data.Length - 4, data.Length); Assert.AreEqual(41, compressedLen); // ------------------ // --- Test case5 --- buffer = new MemoryStream(); foreach (uint i in case5) { buffer.Write(BitConverter.GetBytes(i), 0, 4); } data = buffer.ToArray(); compressedLen = PatternCompressor.CompressBuffer(data, 0, data.Length - 4, data.Length); Assert.AreEqual(41, compressedLen); // ------------------ }
public void TestArrayOfIntCompressionOnRandomData() { StringBuilder results = new StringBuilder(); MemoryStream buffer = new MemoryStream(); Random rnd = new Random(); uint value; for (int i = 0; i < TotalTestSampleSize; i++) { value = (uint)(rnd.NextDouble() * 100000); buffer.Write(BitConverter.GetBytes(value), 0, 4); } // Add one byte of extra space to accommodate compression algorithm buffer.WriteByte(0xFF); byte[] arrayOfInts = buffer.ToArray(); byte[] copy = arrayOfInts.BlockCopy(0, arrayOfInts.Length); int bufferLen = arrayOfInts.Length; int dataLen = bufferLen - 1; int gzipLen = arrayOfInts.Compress().Length; int compressedLen, decompressedLen, maxDecompressedLen; Ticks compressTime, decompressTime; Ticks stopTime, startTime; bool lossless; // Make sure a buffer exists in the buffer pool so that operation time will not be skewed by buffer initialization: startTime = DateTime.UtcNow.Ticks; BufferPool.ReturnBuffer(BufferPool.TakeBuffer(dataLen + TotalTestSampleSize)); stopTime = DateTime.UtcNow.Ticks; results.AppendFormat("Buffer Pool initial take time: {0}\r\n", (stopTime - startTime).ToElapsedTimeString(4)); startTime = DateTime.UtcNow.Ticks; BufferPool.ReturnBuffer(BufferPool.TakeBuffer(dataLen + TotalTestSampleSize)); stopTime = DateTime.UtcNow.Ticks; results.AppendFormat("Buffer Pool cached take time: {0}\r\n\r\n", (stopTime - startTime).ToElapsedTimeString(4)); startTime = DateTime.UtcNow.Ticks; compressedLen = PatternCompressor.CompressBuffer(arrayOfInts, 0, dataLen, bufferLen, 31); stopTime = DateTime.UtcNow.Ticks; compressTime = stopTime - startTime; maxDecompressedLen = PatternDecompressor.MaximumSizeDecompressed(compressedLen); if (arrayOfInts.Length < maxDecompressedLen) { byte[] temp = new byte[maxDecompressedLen]; Buffer.BlockCopy(arrayOfInts, 0, temp, 0, compressedLen); arrayOfInts = temp; } startTime = DateTime.UtcNow.Ticks; decompressedLen = PatternDecompressor.DecompressBuffer(arrayOfInts, 0, compressedLen, maxDecompressedLen); stopTime = DateTime.UtcNow.Ticks; decompressTime = stopTime - startTime; lossless = decompressedLen == dataLen; for (int i = 0; lossless && i < Math.Min(decompressedLen, dataLen); i++) { lossless = arrayOfInts[i] == copy[i]; } // Publish results to debug window results.AppendFormat("Results of floating point compression algorithm over sequential data:\r\n\r\n"); results.AppendFormat("Total number of samples: \t{0:#,##0}\r\n", TotalTestSampleSize); results.AppendFormat("Total number of bytes: \t{0:#,##0}\r\n", dataLen); results.AppendFormat("Total compression time: \t{0}\r\n", compressTime.ToElapsedTimeString(4)); results.AppendFormat("Compression speed: \t{0:#,##0.0000} MB/sec\r\n", (dataLen / (double)SI2.Mega) / compressTime.ToSeconds()); results.AppendFormat("Total decompression time: \t{0}\r\n", decompressTime.ToElapsedTimeString(4)); results.AppendFormat("Decompression speed: \t{0:#,##0.0000} MB/sec\r\n", (dataLen / (double)SI2.Mega) / decompressTime.ToSeconds()); results.AppendFormat("Compression results: \t{0:0.00%}\r\n", (dataLen - compressedLen) / (double)dataLen); results.AppendFormat("Standard gzip results: \t{0:0.00%}\r\n", (dataLen - gzipLen) / (double)dataLen); Debug.WriteLine(results.ToString()); Assert.AreEqual(dataLen, decompressedLen); Assert.IsTrue(lossless); }