public void Compress2() { byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; byte[] encodedOutput = new byte[LZ4.MaximumOutputLength(input.Length)]; int compressedSize = 0; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) { compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, encodedOutput.Length, 1); } byte[] output = new byte[input.Length]; int uncompressedSize = 0; fixed(byte *outputPtr = output) fixed(byte *encodedOutputPtr = encodedOutput) { uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } }
public void Compress() { uint marker = 0xDEADBEEF; byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength + sizeof(uint)]; byte[] output = new byte[input.Length + sizeof(uint)]; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = output) { // Setup buffer overrun markers. *(uint *)(encodedOutputPtr + maximumOutputLength) = marker; *(uint *)(outputPtr + input.Length) = marker; int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); Assert.True(compressedSize <= maximumOutputLength); Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } // Check buffer overruns didn't happen. Assert.Equal(marker, *(uint *)(encodedOutputPtr + maximumOutputLength)); Assert.Equal(marker, *(uint *)(outputPtr + input.Length)); } }
public void LowBitsRandom() { int threshold = 1 << 4; var rnd = new Random(1000); for (int i = 0; i < input.Length; i++) { input[i] = (byte)(rnd.Next() % threshold); } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public void HighRepetition() { var main = new Random(1000); int i = 0; while (i < input.Length) { int sequenceNumber = main.Next(20); int sequenceLength = Math.Min(main.Next(128), input.Length - i); var rnd = new Random(sequenceNumber); for (int j = 0; j < sequenceLength; j++, i++) { input[i] = (byte)(rnd.Next() % 255); } } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public unsafe void LZ4Test(int size) { byte *encodeInput = NativeMemory.AllocateMemory(size); int compressedSize; byte *encodeOutput; var originStr = string.Join("", Enumerable.Repeat(1, size).Select(x => "sample")); var bytes = Encoding.UTF8.GetBytes(originStr); var maximumOutputLength = LZ4.MaximumOutputLength(bytes.Length); fixed(byte *pb = bytes) { encodeOutput = NativeMemory.AllocateMemory((int)maximumOutputLength); compressedSize = LZ4.Encode64(pb, encodeOutput, bytes.Length, (int)maximumOutputLength); } Array.Clear(bytes, 0, bytes.Length); fixed(byte *pb = bytes) { LZ4.Decode64(encodeOutput, compressedSize, pb, bytes.Length, true); } var actual = Encoding.UTF8.GetString(bytes); Assert.Equal(originStr, actual); NativeMemory.Free(encodeInput, size); NativeMemory.Free(encodeOutput, maximumOutputLength); }
private int DoCompression(byte *input, byte *output, int inputLength, int outputLength) { var doCompression = _lz4.Encode64( input, output, inputLength, outputLength); return(doCompression); }
public void CompressAndDecompress(int size, int bits) { uint marker = 0xDEADBEEF; int threshold = 1 << bits; var rnd = new Random(size * bits); byte[] input = new byte[size]; for (int i = 0; i < size; i++) { input[i] = (byte)(rnd.Next() % threshold); } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] output = new byte[size + sizeof(uint)]; byte[] encodeOutput = new byte[maximumOutputLength + sizeof(uint)]; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodeOutput) fixed(byte *outputPtr = output) { // Setup buffer overrun markers. *(uint *)(encodedOutputPtr + maximumOutputLength) = marker; *(uint *)(outputPtr + size) = marker; int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, size, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, size, true); Assert.True(compressedSize <= maximumOutputLength); Assert.Equal(size, uncompressedSize); for (int i = 0; i < size; i++) { Assert.Equal(input[i], output[i]); } // Check buffer overruns didn't happen. Assert.Equal(marker, *(uint *)(encodedOutputPtr + maximumOutputLength)); Assert.Equal(marker, *(uint *)(outputPtr + size)); } }
private unsafe byte *CompressBuffer(byte *buffer, int size, int maxGoodCompressionSize, out int compressedSize) { var compressionBuffer = GetCompressionBuffer(size); if (size > 128) { compressedSize = LZ4.Encode64(buffer, compressionBuffer, size, maxGoodCompressionSize, acceleration: CalculateCompressionAcceleration(size)); } else { compressedSize = SmallStringCompression.Instance.Compress(buffer, compressionBuffer, size, maxGoodCompressionSize); } return(compressionBuffer); }
public static IDisposable TryGetCompressedTempPage(LowLevelTransaction tx, TreePage page, out CompressionResult result, bool defrag = true) { if (defrag) { if (page.CalcSizeUsed() != page.SizeUsed - Constants.Tree.PageHeaderSize) // check if the page really requires defrag { page.Defrag(tx); } } var valuesSize = page.PageSize - page.Upper; TemporaryPage temp; var returnTempPage = tx.Environment.GetTemporaryPage(tx, out temp); var tempPage = temp.GetTempPage(); var compressionInput = page.Base + page.Upper; var compressionResult = tempPage.Base + Constants.Tree.PageHeaderSize + Constants.Compression.HeaderSize; // temp compression result has compressed values at the beginning of the page var offsetsSize = page.NumberOfEntries * Constants.Tree.NodeOffsetSize; var compressionOutput = compressionResult + offsetsSize; var compressedSize = LZ4.Encode64( compressionInput, compressionOutput, valuesSize, tempPage.PageSize - (Constants.Tree.PageHeaderSize + Constants.Compression.HeaderSize) - offsetsSize); if (compressedSize == 0 || compressedSize > valuesSize) { // output buffer size not enough or compressed output size is greater than uncompressed input result = null; return(returnTempPage); } var compressedOffsets = (ushort *)compressionResult; var offsets = page.KeysOffsets; for (var i = 0; i < page.NumberOfEntries; i++) { compressedOffsets[i] = (ushort)(offsets[i] - page.Upper); } var compressionSectionSize = compressedSize + offsetsSize; var sizeLeftInDecompressedPage = Constants.Compression.MaxPageSize - page.SizeUsed; var sizeLeftForUncompressedEntries = tx.PageSize - (Constants.Tree.PageHeaderSize + Constants.Compression.HeaderSize + compressionSectionSize); if (sizeLeftForUncompressedEntries > sizeLeftInDecompressedPage) { // expand compression section to prevent from adding next uncompressed entries what would result in // exceeding MaxPageSize after the decompression compressionSectionSize += sizeLeftForUncompressedEntries - sizeLeftInDecompressedPage; } compressionSectionSize += compressionSectionSize & 1; // ensure 2-byte alignment // check that after decompression we won't exceed MaxPageSize Debug.Assert(page.SizeUsed + // page header, node offsets, existing entries (tx.PageSize - // space that can be still used to insert next uncompressed entries (Constants.Tree.PageHeaderSize + Constants.Compression.HeaderSize + compressionSectionSize)) <= Constants.Compression.MaxPageSize); Memory.Copy(tempPage.Base, page.Base, Constants.Tree.PageHeaderSize); tempPage.Lower = (ushort)(Constants.Tree.PageHeaderSize + Constants.Compression.HeaderSize + compressionSectionSize); tempPage.Upper = (ushort)tempPage.PageSize; Debug.Assert(tempPage.Lower <= tempPage.Upper); result = new CompressionResult { CompressedPage = tempPage, CompressionOutputPtr = compressionResult, Header = new CompressedNodesHeader { SectionSize = (ushort)compressionSectionSize, CompressedSize = (ushort)compressedSize, UncompressedSize = (ushort)valuesSize, NumberOfCompressedEntries = page.NumberOfEntries, } }; return(returnTempPage); }