public byte *DecompressToTempBuffer() { var tempBuffer = _context.GetNativeTempBuffer(UncompressedSize); int uncompressedSize; if (UncompressedSize > 128) { uncompressedSize = LZ4.Decode64(Buffer, CompressedSize, tempBuffer, UncompressedSize, true); } else { uncompressedSize = SmallStringCompression.Instance.Decompress(Buffer, CompressedSize, tempBuffer, UncompressedSize); } if (uncompressedSize != UncompressedSize) { throw new FormatException("Wrong size detected on decompression"); } return(tempBuffer); }
public void Compress2() { byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; byte[] encodedOutput = new byte[LZ4.MaximumOutputLength(input.Length)]; int compressedSize = 0; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) { compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, encodedOutput.Length, 1); } byte[] output = new byte[input.Length]; int uncompressedSize = 0; fixed(byte *outputPtr = output) fixed(byte *encodedOutputPtr = encodedOutput) { uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } }
public void Compress() { uint marker = 0xDEADBEEF; byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength + sizeof(uint)]; byte[] output = new byte[input.Length + sizeof(uint)]; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = output) { // Setup buffer overrun markers. *(uint *)(encodedOutputPtr + maximumOutputLength) = marker; *(uint *)(outputPtr + input.Length) = marker; int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); Assert.True(compressedSize <= maximumOutputLength); Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } // Check buffer overruns didn't happen. Assert.Equal(marker, *(uint *)(encodedOutputPtr + maximumOutputLength)); Assert.Equal(marker, *(uint *)(outputPtr + input.Length)); } }
private byte *DecompressToBuffer(byte *tempBuffer, int sizeOfEscapePositions) { int uncompressedSize; if (UncompressedSize > 128) { uncompressedSize = LZ4.Decode64(Buffer, CompressedSize, tempBuffer, UncompressedSize, true); } else { uncompressedSize = SmallStringCompression.Instance.Decompress(Buffer, CompressedSize, tempBuffer, UncompressedSize); } if (uncompressedSize != UncompressedSize) { throw new FormatException("Wrong size detected on decompression"); } Memory.Copy(tempBuffer + uncompressedSize, Buffer + CompressedSize, sizeOfEscapePositions); return(tempBuffer); }
public void LowBitsRandom() { int threshold = 1 << 4; var rnd = new Random(1000); for (int i = 0; i < input.Length; i++) { input[i] = (byte)(rnd.Next() % threshold); } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public void HighRepetition() { var main = new Random(1000); int i = 0; while (i < input.Length) { int sequenceNumber = main.Next(20); int sequenceLength = Math.Min(main.Next(128), input.Length - i); var rnd = new Random(sequenceNumber); for (int j = 0; j < sequenceLength; j++, i++) { input[i] = (byte)(rnd.Next() % 255); } } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public unsafe void LZ4Test(int size) { byte *encodeInput = NativeMemory.AllocateMemory(size); int compressedSize; byte *encodeOutput; var originStr = string.Join("", Enumerable.Repeat(1, size).Select(x => "sample")); var bytes = Encoding.UTF8.GetBytes(originStr); var maximumOutputLength = LZ4.MaximumOutputLength(bytes.Length); fixed(byte *pb = bytes) { encodeOutput = NativeMemory.AllocateMemory((int)maximumOutputLength); compressedSize = LZ4.Encode64(pb, encodeOutput, bytes.Length, (int)maximumOutputLength); } Array.Clear(bytes, 0, bytes.Length); fixed(byte *pb = bytes) { LZ4.Decode64(encodeOutput, compressedSize, pb, bytes.Length, true); } var actual = Encoding.UTF8.GetString(bytes); Assert.Equal(originStr, actual); NativeMemory.Free(encodeInput, size); NativeMemory.Free(encodeOutput, maximumOutputLength); }
public bool ReadOneTransaction(StorageEnvironmentOptions options, bool checkCrc = true) { if (_readingPage >= _pager.NumberOfAllocatedPages) { return(false); } TransactionHeader *current; if (!TryReadAndValidateHeader(options, out current)) { return(false); } var compressedPages = (current->CompressedSize / AbstractPager.PageSize) + (current->CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); if (current->TransactionId <= _lastSyncedTransactionId) { LastTransactionHeader = current; _readingPage += compressedPages; return(true); // skipping } if (checkCrc && !ValidatePagesCrc(options, compressedPages, current)) { return(false); } var totalPageCount = current->PageCount + current->OverflowPageCount; _recoveryPager.EnsureContinuous(null, _recoveryPage, totalPageCount + 1); var dataPage = _recoveryPager.AcquirePagePointer(_recoveryPage); NativeMethods.memset(dataPage, 0, totalPageCount * AbstractPager.PageSize); try { LZ4.Decode64(_pager.AcquirePagePointer(_readingPage), current->CompressedSize, dataPage, current->UncompressedSize, true); } catch (Exception e) { options.InvokeRecoveryError(this, "Could not de-compress, invalid data", e); RequireHeaderUpdate = true; return(false); } var tempTransactionPageTranslaction = (*current).GetTransactionToPageTranslation(_recoveryPager, ref _recoveryPage); _readingPage += compressedPages; LastTransactionHeader = current; foreach (var pagePosition in tempTransactionPageTranslaction) { _transactionPageTranslation[pagePosition.Key] = pagePosition.Value; } return(true); }
public void ApplyShippedLog(byte[] txPagesRaw) { fixed(byte *pages = txPagesRaw) { using (var tx = _env.NewTransaction(TransactionFlags.ReadWrite)) { var transactionHeader = (TransactionHeader *)pages; var dataPages = pages + AbstractPager.PageSize; var compressedPages = (transactionHeader->CompressedSize / AbstractPager.PageSize) + (transactionHeader->CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); var crc = Crc.Value(dataPages, 0, compressedPages * AbstractPager.PageSize); var transactionId = transactionHeader->TransactionId; if (transactionHeader->Crc != crc) { throw new InvalidDataException("Invalid CRC signature for shipped transaction " + transactionId); } if (transactionId - 1 != PreviousTransactionId) { throw new InvalidDataException("Invalid id for shipped transaction got " + transactionId + " but expected " + (PreviousTransactionId + 1) + ", is there a break in the chain?"); } if (transactionHeader->PreviousTransactionCrc != PreviousTransactionCrc) { throw new InvalidDataException("Invalid CRC signature for previous shipped transaction " + transactionId + ", is there a break in the chain?"); } var totalPages = transactionHeader->PageCount + transactionHeader->OverflowPageCount; var decompressBuffer = _env.ScratchBufferPool.Allocate(tx, totalPages); try { try { var dest = _env.ScratchBufferPool.AcquirePagePointer(decompressBuffer.ScratchFileNumber, decompressBuffer.PositionInScratchBuffer); LZ4.Decode64(dataPages, transactionHeader->CompressedSize, dest, transactionHeader->UncompressedSize, true); } catch (Exception e) { throw new InvalidDataException("Could not de-compress shipped transaction pages, invalid data", e); } tx.WriteDirect(transactionHeader, decompressBuffer); _previousTransactionCrc = crc; _previousTransactionId = transactionHeader->TransactionId; } finally { _env.ScratchBufferPool.Free(decompressBuffer.ScratchFileNumber, decompressBuffer.PositionInScratchBuffer, -1); } tx.Commit(); OnTransactionApplied(transactionId, crc); } } }
private unsafe bool TryDecompressTransactionPages(StorageEnvironmentOptions options, TransactionHeader *current, byte *dataPage) { try { LZ4.Decode64(_pager.AcquirePagePointer(_readingPage), current->CompressedSize, dataPage, current->UncompressedSize, true); } catch (Exception e) { options.InvokeRecoveryError(this, "Could not de-compress, invalid data", e); RequireHeaderUpdate = true; return(false); } return(true); }
protected void ReadFromShippedTransaction(TransactionToShip transaction) { var compressedPages = (transaction.Header.CompressedSize / AbstractPager.PageSize) + (transaction.Header.CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); var compressedDataBuffer = new byte[compressedPages * AbstractPager.PageSize]; transaction.CompressedData.Read(compressedDataBuffer, 0, compressedPages * AbstractPager.PageSize); fixed(byte *compressedDataBufferPtr = compressedDataBuffer) { var crc = Crc.Value(compressedDataBufferPtr, 0, compressedPages * AbstractPager.PageSize); if (transaction.Header.Crc != crc || _previousTransactionCrc != transaction.PreviousTransactionCrc) { throw new InvalidDataException("Invalid CRC signature for transaction " + transaction.Header.TransactionId); } _previousTransactionCrc = crc; var totalPages = transaction.Header.PageCount + transaction.Header.OverflowPageCount; _pager.EnsureContinuous(null, currentPage, totalPages + 1); try { LZ4.Decode64(compressedDataBufferPtr, transaction.Header.CompressedSize, _pager.AcquirePagePointer(currentPage), transaction.Header.UncompressedSize, true); } catch (Exception e) { throw new InvalidDataException("Could not de-compress, invalid data", e); } } var lastAddedPage = currentPage + transaction.Header.PageCount; for (int pageNumber = currentPage; pageNumber < lastAddedPage; pageNumber++) { _pageNumbers.Add(pageNumber); } if (LastTransactionHeader.HasValue && LastTransactionHeader.Value.TransactionId < transaction.Header.TransactionId) { LastTransactionHeader = transaction.Header; } currentPage = lastAddedPage; }
public void CompressAndDecompress(int size, int bits) { uint marker = 0xDEADBEEF; int threshold = 1 << bits; var rnd = new Random(size * bits); byte[] input = new byte[size]; for (int i = 0; i < size; i++) { input[i] = (byte)(rnd.Next() % threshold); } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] output = new byte[size + sizeof(uint)]; byte[] encodeOutput = new byte[maximumOutputLength + sizeof(uint)]; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodeOutput) fixed(byte *outputPtr = output) { // Setup buffer overrun markers. *(uint *)(encodedOutputPtr + maximumOutputLength) = marker; *(uint *)(outputPtr + size) = marker; int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, size, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, size, true); Assert.True(compressedSize <= maximumOutputLength); Assert.Equal(size, uncompressedSize); for (int i = 0; i < size; i++) { Assert.Equal(input[i], output[i]); } // Check buffer overruns didn't happen. Assert.Equal(marker, *(uint *)(encodedOutputPtr + maximumOutputLength)); Assert.Equal(marker, *(uint *)(outputPtr + size)); } }
public bool ReadOneTransaction(StorageEnvironmentOptions options, bool checkCrc = true) { if (_readingPage >= _pager.NumberOfAllocatedPages) { return(false); } TransactionHeader *current; if (!TryReadAndValidateHeader(options, out current)) { return(false); } var compressedPages = (current->CompressedSize / AbstractPager.PageSize) + (current->CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); if (current->TransactionId <= _lastSyncedTransactionId) { LastTransactionHeader = current; _readingPage += compressedPages; return(true); // skipping } if (checkCrc) { uint crc = Crc.Value(_pager.AcquirePagePointer(_readingPage), 0, compressedPages * AbstractPager.PageSize); if (crc != current->Crc) { RequireHeaderUpdate = true; options.InvokeRecoveryError(this, "Invalid CRC signature for transaction " + current->TransactionId, null); return(false); } } _recoveryPager.EnsureContinuous(null, _recoveryPage, (current->PageCount + current->OverflowPageCount) + 1); var dataPage = _recoveryPager.AcquirePagePointer(_recoveryPage); NativeMethods.memset(dataPage, 0, (current->PageCount + current->OverflowPageCount) * AbstractPager.PageSize); try { LZ4.Decode64(_pager.AcquirePagePointer(_readingPage), current->CompressedSize, dataPage, current->UncompressedSize, true); } catch (Exception e) { options.InvokeRecoveryError(this, "Could not de-compress, invalid data", e); RequireHeaderUpdate = true; return(false); } var tempTransactionPageTranslaction = new Dictionary <long, JournalFile.PagePosition>(); for (var i = 0; i < current->PageCount; i++) { Debug.Assert(_pager.Disposed == false); Debug.Assert(_recoveryPager.Disposed == false); var page = _recoveryPager.Read(_recoveryPage); tempTransactionPageTranslaction[page.PageNumber] = new JournalFile.PagePosition { JournalPos = _recoveryPage, TransactionId = current->TransactionId }; if (page.IsOverflow) { var numOfPages = _recoveryPager.GetNumberOfOverflowPages(page.OverflowSize); _recoveryPage += numOfPages; } else { _recoveryPage++; } } _readingPage += compressedPages; LastTransactionHeader = current; foreach (var pagePosition in tempTransactionPageTranslaction) { _transactionPageTranslation[pagePosition.Key] = pagePosition.Value; } return(true); }