public void HighRepetition() { var main = new Random(1000); int i = 0; while (i < input.Length) { int sequenceNumber = main.Next(20); int sequenceLength = Math.Min(main.Next(128), input.Length - i); var rnd = new Random(sequenceNumber); for (int j = 0; j < sequenceLength; j++, i++) { input[i] = (byte)(rnd.Next() % 255); } } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public void Compress() { uint marker = 0xDEADBEEF; byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength + sizeof(uint)]; byte[] output = new byte[input.Length + sizeof(uint)]; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = output) { // Setup buffer overrun markers. *(uint *)(encodedOutputPtr + maximumOutputLength) = marker; *(uint *)(outputPtr + input.Length) = marker; int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); Assert.True(compressedSize <= maximumOutputLength); Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } // Check buffer overruns didn't happen. Assert.Equal(marker, *(uint *)(encodedOutputPtr + maximumOutputLength)); Assert.Equal(marker, *(uint *)(outputPtr + input.Length)); } }
public unsafe void LZ4Test(int size) { byte *encodeInput = NativeMemory.AllocateMemory(size); int compressedSize; byte *encodeOutput; var originStr = string.Join("", Enumerable.Repeat(1, size).Select(x => "sample")); var bytes = Encoding.UTF8.GetBytes(originStr); var maximumOutputLength = LZ4.MaximumOutputLength(bytes.Length); fixed(byte *pb = bytes) { encodeOutput = NativeMemory.AllocateMemory((int)maximumOutputLength); compressedSize = LZ4.Encode64(pb, encodeOutput, bytes.Length, (int)maximumOutputLength); } Array.Clear(bytes, 0, bytes.Length); fixed(byte *pb = bytes) { LZ4.Decode64(encodeOutput, compressedSize, pb, bytes.Length, true); } var actual = Encoding.UTF8.GetString(bytes); Assert.Equal(originStr, actual); NativeMemory.Free(encodeInput, size); NativeMemory.Free(encodeOutput, maximumOutputLength); }
void OnGUI() { if (downloadDone2 == true) { GUI.Label(new Rect(10, 0, 250, 30), "got package, ready to extract"); if (GUI.Button(new Rect(10, 90, 230, 50), "start StreamingAssets lz4 test")) { #if !UNITY_EDITOR plog("lz4 decompress: " + LZ4.decompressBuffer(ww2.bytes, ref outbuffer).ToString()); #else plog("lz4 decompress: " + LZ4.decompressBuffer(wwb, ref outbuffer).ToString()); #endif if (outbuffer != null) { tex.LoadImage(outbuffer); } } } if (tex != null) { GUI.DrawTexture(new Rect(360, 10, 375, 300), tex); } GUI.TextArea(new Rect(10, 370, Screen.width - 20, Screen.height - 400), log); }
public void LowBitsRandom() { int threshold = 1 << 4; var rnd = new Random(1000); for (int i = 0; i < input.Length; i++) { input[i] = (byte)(rnd.Next() % threshold); } var maximumOutputLength = LZ4.MaximumOutputLength(input.Length); byte[] encodedOutput = new byte[maximumOutputLength]; ExecuteBenchmark(() => { fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) fixed(byte *outputPtr = input) { int compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, (int)maximumOutputLength); int uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } }); }
public void Compress2() { byte[] input = new byte[] { 3, 3, 2, 2, 3, 0, 2, 0, 2, 1, 0, 1, 3, 1, 3, 0, 3, 0, 2, 0, 2, 1, 3, 1, 0, 3, 0, 0, 2, 0, 1, 2, 2, 2, 3, 2, 0, 0, 2, 1, 2, 2, 0, 3, 0, 0, 3, 2, 0, 2, 1, 2, 3, 2, 2, 1, 3, 0, 1, 0, 3, 1, 1, 2, 0, 2, 2, 1, 2, 1, 0, 3, 2, 0, 2, 0, 1, 3, 1, 3, 3, 2, 3, 0, 2, 2, 2, 0, 3, 2, 2, 0, 2, 2, 2, 0, 0, 1, 3, 1 }; byte[] encodedOutput = new byte[LZ4.MaximumOutputLength(input.Length)]; int compressedSize = 0; fixed(byte *inputPtr = input) fixed(byte *encodedOutputPtr = encodedOutput) { compressedSize = LZ4.Encode64(inputPtr, encodedOutputPtr, input.Length, encodedOutput.Length, 1); } byte[] output = new byte[input.Length]; int uncompressedSize = 0; fixed(byte *outputPtr = output) fixed(byte *encodedOutputPtr = encodedOutput) { uncompressedSize = LZ4.Decode64(encodedOutputPtr, compressedSize, outputPtr, input.Length, true); } Assert.Equal(input.Length, uncompressedSize); for (int i = 0; i < input.Length; i++) { Assert.Equal(input[i], output[i]); } }
private byte *DecompressToBuffer(byte *tempBuffer, int sizeOfEscapePositions) { int uncompressedSize; if (UncompressedSize > 128) { uncompressedSize = LZ4.Decode64(Buffer, CompressedSize, tempBuffer, UncompressedSize, true); } else { uncompressedSize = SmallStringCompression.Instance.Decompress(Buffer, CompressedSize, tempBuffer, UncompressedSize); } if (uncompressedSize != UncompressedSize) { throw new FormatException("Wrong size detected on decompression"); } Memory.Copy(tempBuffer + uncompressedSize, Buffer + CompressedSize, sizeOfEscapePositions); return(tempBuffer); }
IEnumerator DoTestsWebGL() { yield return(true); //File tests //compress a file to lz4 with highest level of compression (9). lz1 = LZ4.compress(ppath + "/" + myFile, ppath + "/" + myFile + ".lz4", 9, progress); //decompress the previously compressed archive lz2 = LZ4.decompress(ppath + "/" + myFile + ".lz4", ppath + "/" + myFile + "B.tif", bytes); //Buffer tests if (File.Exists(ppath + "/" + myFile)) { byte[] bt = File.ReadAllBytes(ppath + "/" + myFile); //compress a byte buffer (we write the output buffer to a file for debug purposes.) if (LZ4.compressBuffer(bt, ref buff, 9, true)) { lz3 = 1; File.WriteAllBytes(ppath + "/buffer1.lz4buf", buff); } byte[] bt2 = File.ReadAllBytes(ppath + "/buffer1.lz4buf"); //decompress a byte buffer (we write the output buffer to a file for debug purposes.) if (LZ4.decompressBuffer(bt2, ref buff, true)) { lz4 = 1; File.WriteAllBytes(ppath + "/buffer1.tif", buff); } bt2 = null; bt = null; } }
public byte *DecompressToTempBuffer() { var tempBuffer = _context.GetNativeTempBuffer(UncompressedSize); int uncompressedSize; if (UncompressedSize > 128) { uncompressedSize = LZ4.Decode64(Buffer, CompressedSize, tempBuffer, UncompressedSize, true); } else { uncompressedSize = SmallStringCompression.Instance.Decompress(Buffer, CompressedSize, tempBuffer, UncompressedSize); } if (uncompressedSize != UncompressedSize) { throw new FormatException("Wrong size detected on decompression"); } return(tempBuffer); }
public bool ReadOneTransaction(StorageEnvironmentOptions options, bool checkCrc = true) { if (_readingPage >= _pager.NumberOfAllocatedPages) { return(false); } TransactionHeader *current; if (!TryReadAndValidateHeader(options, out current)) { return(false); } var compressedPages = (current->CompressedSize / AbstractPager.PageSize) + (current->CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); if (current->TransactionId <= _lastSyncedTransactionId) { LastTransactionHeader = current; _readingPage += compressedPages; return(true); // skipping } if (checkCrc && !ValidatePagesCrc(options, compressedPages, current)) { return(false); } var totalPageCount = current->PageCount + current->OverflowPageCount; _recoveryPager.EnsureContinuous(null, _recoveryPage, totalPageCount + 1); var dataPage = _recoveryPager.AcquirePagePointer(_recoveryPage); NativeMethods.memset(dataPage, 0, totalPageCount * AbstractPager.PageSize); try { LZ4.Decode64(_pager.AcquirePagePointer(_readingPage), current->CompressedSize, dataPage, current->UncompressedSize, true); } catch (Exception e) { options.InvokeRecoveryError(this, "Could not de-compress, invalid data", e); RequireHeaderUpdate = true; return(false); } var tempTransactionPageTranslaction = (*current).GetTransactionToPageTranslation(_recoveryPager, ref _recoveryPage); _readingPage += compressedPages; LastTransactionHeader = current; foreach (var pagePosition in tempTransactionPageTranslaction) { _transactionPageTranslation[pagePosition.Key] = pagePosition.Value; } return(true); }
public void SendCompressed(int header, byte[] data, int offset, int size) { int length2 = LZ4.Encode(data, offset, m_sendBuffer, 8, size, 65528); BitConverter.TryWriteBytes(new Span <byte>(m_sendBuffer, 4, 4), length2); BitConverter.TryWriteBytes(new Span <byte>(m_sendBuffer, 0, 4), header); m_socket.Send(m_sendBuffer, length2 + 8, SocketFlags.None); }
public void Initialize(string[] args) { LZ4.LoadLib(); serverContext.License = System.IO.File.ReadAllText("COPING.txt"); serverContext.port = 25565; Console.WriteLine("Core Initialize."); }
public void ApplyShippedLog(byte[] txPagesRaw) { fixed(byte *pages = txPagesRaw) { using (var tx = _env.NewTransaction(TransactionFlags.ReadWrite)) { var transactionHeader = (TransactionHeader *)pages; var dataPages = pages + AbstractPager.PageSize; var compressedPages = (transactionHeader->CompressedSize / AbstractPager.PageSize) + (transactionHeader->CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); var crc = Crc.Value(dataPages, 0, compressedPages * AbstractPager.PageSize); var transactionId = transactionHeader->TransactionId; if (transactionHeader->Crc != crc) { throw new InvalidDataException("Invalid CRC signature for shipped transaction " + transactionId); } if (transactionId - 1 != PreviousTransactionId) { throw new InvalidDataException("Invalid id for shipped transaction got " + transactionId + " but expected " + (PreviousTransactionId + 1) + ", is there a break in the chain?"); } if (transactionHeader->PreviousTransactionCrc != PreviousTransactionCrc) { throw new InvalidDataException("Invalid CRC signature for previous shipped transaction " + transactionId + ", is there a break in the chain?"); } var totalPages = transactionHeader->PageCount + transactionHeader->OverflowPageCount; var decompressBuffer = _env.ScratchBufferPool.Allocate(tx, totalPages); try { try { var dest = _env.ScratchBufferPool.AcquirePagePointer(decompressBuffer.ScratchFileNumber, decompressBuffer.PositionInScratchBuffer); LZ4.Decode64(dataPages, transactionHeader->CompressedSize, dest, transactionHeader->UncompressedSize, true); } catch (Exception e) { throw new InvalidDataException("Could not de-compress shipped transaction pages, invalid data", e); } tx.WriteDirect(transactionHeader, decompressBuffer); _previousTransactionCrc = crc; _previousTransactionId = transactionHeader->TransactionId; } finally { _env.ScratchBufferPool.Free(decompressBuffer.ScratchFileNumber, decompressBuffer.PositionInScratchBuffer, -1); } tx.Commit(); OnTransactionApplied(transactionId, crc); } } }
private IntPtr[] CompressPages(Transaction tx, int numberOfPages, IVirtualPager compressionPager, uint previousTransactionCrc) { // numberOfPages include the tx header page, which we don't compress var dataPagesCount = numberOfPages - 1; var sizeInBytes = dataPagesCount * AbstractPager.PageSize; var outputBuffer = LZ4.MaximumOutputLength(sizeInBytes); var outputBufferInPages = outputBuffer / AbstractPager.PageSize + (outputBuffer % AbstractPager.PageSize == 0 ? 0 : 1); var pagesRequired = (dataPagesCount + outputBufferInPages); compressionPager.EnsureContinuous(tx, 0, pagesRequired); var tempBuffer = compressionPager.AcquirePagePointer(tx, 0); var compressionBuffer = compressionPager.AcquirePagePointer(tx, dataPagesCount); var write = tempBuffer; var txPages = tx.GetTransactionPages(); foreach (var txPage in txPages) { var scratchPage = tx.Environment.ScratchBufferPool.AcquirePagePointer(tx, txPage.ScratchFileNumber, txPage.PositionInScratchBuffer); var count = txPage.NumberOfPages * AbstractPager.PageSize; Memory.BulkCopy(write, scratchPage, count); write += count; } var len = DoCompression(tempBuffer, compressionBuffer, sizeInBytes, outputBuffer); var remainder = len % AbstractPager.PageSize; var compressedPages = (len / AbstractPager.PageSize) + (remainder == 0 ? 0 : 1); if (remainder != 0) { // zero the remainder of the page UnmanagedMemory.Set(compressionBuffer + len, 0, remainder); } var pages = new IntPtr[compressedPages + 1]; var txHeaderPage = tx.GetTransactionHeaderPage(); var txHeaderBase = tx.Environment.ScratchBufferPool.AcquirePagePointer(tx, txHeaderPage.ScratchFileNumber, txHeaderPage.PositionInScratchBuffer); var txHeader = (TransactionHeader *)txHeaderBase; txHeader->Compressed = true; txHeader->CompressedSize = len; txHeader->UncompressedSize = sizeInBytes; txHeader->PreviousTransactionCrc = previousTransactionCrc; pages[0] = new IntPtr(txHeaderBase); for (int index = 0; index < compressedPages; index++) { pages[index + 1] = new IntPtr(compressionBuffer + (index * AbstractPager.PageSize)); } txHeader->Crc = Crc.Value(compressionBuffer, 0, compressedPages * AbstractPager.PageSize); return(pages); }
public void LoadTileData(byte[] packet) { Layers = JsonConvert.DeserializeObject <Dictionary <string, Tile[, ]> >(LZ4.UnPickleString(packet), mJsonSerializerSettings); foreach (var layer in Options.Instance.MapOpts.Layers.All) { if (!Layers.ContainsKey(layer)) { Layers.Add(layer, new Tile[Options.MapWidth, Options.MapHeight]); } } }
private static unsafe AbstractPager CreateScratchFile(string scratchName, StorageEnvironment env, long inputSize, out byte *buffer) { var filename = Path.Combine(RavenTestHelper.NewDataPath(nameof(HugeTransactions), 0, forceCreateDir: true), $"TestBigCompression-{scratchName}"); long bufferSize = LZ4.MaximumOutputLength(inputSize); int bufferSizeInPages = checked ((int)(bufferSize / Constants.Storage.PageSize)); var pager = env.Options.CreateScratchPager(filename, (long)bufferSizeInPages * Constants.Storage.PageSize); pager.EnsureContinuous(0, bufferSizeInPages); buffer = pager.AcquirePagePointer(null, 0); return(pager); }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @Test public void shouldCompareModifierProtocolsByListOrder() public virtual void ShouldCompareModifierProtocolsByListOrder() { IList <ModifierSupportedProtocols> supportedProtocols = asList(new ModifierSupportedProtocols(COMPRESSION, new IList <string> { LZO.implementation(), SNAPPY.implementation(), LZ4.implementation() })); IComparer <Org.Neo4j.causalclustering.protocol.Protocol_ModifierProtocol> comparator = ModifierProtocolRepository.GetModifierProtocolComparator(supportedProtocols).apply(COMPRESSION.canonicalName()); assertThat(comparator.Compare(LZO, TestProtocols_TestModifierProtocols.Snappy), Matchers.greaterThan(0)); assertThat(comparator.Compare(TestProtocols_TestModifierProtocols.Snappy, TestProtocols_TestModifierProtocols.Lz4), Matchers.greaterThan(0)); }
private static unsafe AbstractPager CreateScratchFile(string scratchName, StorageEnvironment env, long inputSize, out byte *buffer) { var filename = $"{Path.GetTempPath()}{Path.DirectorySeparatorChar}TestBigCompression-{scratchName}"; long bufferSize = LZ4.MaximumOutputLength(inputSize); int bufferSizeInPages = checked ((int)(bufferSize / Constants.Storage.PageSize)); var pager = env.Options.CreateScratchPager(filename, (long)bufferSizeInPages * Constants.Storage.PageSize); pager.EnsureContinuous(0, bufferSizeInPages); buffer = pager.AcquirePagePointer(null, 0); return(pager); }
static void Test(byte[] bytes, Method method) { byte[] bytes2 = new byte[bytes.Length]; switch (method) { case Method.LZ10: bytes2 = LZ10.Decompress(new MemoryStream(LZ10.Compress(new MemoryStream(bytes))), bytes.Length); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZ11: bytes2 = LZ11.Decompress(new MemoryStream(LZ11.Compress(new MemoryStream(bytes))), bytes.Length); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZ40: bytes2 = LZ40.Decompress(new MemoryStream(LZ40.Compress(new MemoryStream(bytes))), bytes.Length); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZ77: bytes2 = LZ77.Decompress(new MemoryStream(LZ77.Compress(new MemoryStream(bytes)))); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.RevLZ77: bytes2 = RevLZ77.Decompress(new MemoryStream(RevLZ77.Compress(new MemoryStream(bytes)))); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZ4: bytes2 = LZ4.Decompress(new MemoryStream(LZ4.Compress(new MemoryStream(bytes)))); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZECD: bytes2 = LZECD.Decompress(new MemoryStream(LZECD.Compress(new MemoryStream(bytes)))); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.LZOvl: bytes2 = LZOvl.Decompress(new MemoryStream(LZOvl.Compress(new MemoryStream(bytes)))); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; case Method.MIO0: bytes2 = MIO0.Decompress(new MemoryStream(MIO0.Compress(new MemoryStream(bytes), ByteOrder.LittleEndian)), ByteOrder.LittleEndian); Assert.IsTrue(bytes.SequenceEqual(bytes2)); break; } }
public GFLXPack(string path) { Files = new List <byte[]>(); Names = new List <string>(); using (BinaryReader br = new BinaryReader(File.Open(path, FileMode.Open))) { //Read container header Magic = br.ReadChars(8).ToString(); br.ReadUInt64(); FileCnt = br.ReadUInt32(); br.ReadUInt32(); InfoOff = br.ReadUInt64(); br.ReadUInt64(); br.ReadUInt64(); br.ReadUInt64(); for (int i = 0; i < FileCnt; i++) { br.BaseStream.Position = (long)InfoOff + (i * 0x18); //Read file header br.ReadUInt32(); UInt32 size = br.ReadUInt32(); UInt32 zsize = br.ReadUInt32(); br.ReadUInt32(); //dummy UInt64 offset = br.ReadUInt64(); br.BaseStream.Position = (long)offset; byte[] compData = br.ReadBytes((int)zsize); byte[] decompData = LZ4.Decompress(compData, (int)size); string ext = string.Empty; switch (BitConverter.ToUInt32(decompData, 0)) { case 0x58544E42: ext = ".btnx"; break; case 0x48534E42: ext = ".bnsh"; break; case 0x20: ext = ".gfbmdl"; break; default: ext = ".bin"; break; } Names.Add(offset.ToString("X8") + ext); Files.Add(decompData); } } }
void DoTests() { //File tests //compress a file to lz4 with highest level of compression (9). lz1 = LZ4.compress(ppath + "/" + myFile, ppath + "/" + myFile + ".lz4", 9, progress); //decompress the previously compressed archive lz2 = LZ4.decompress(ppath + "/" + myFile + ".lz4", ppath + "/" + myFile + "B.tif", bytes); //Buffer tests if (File.Exists(ppath + "/" + myFile)) { byte[] bt = File.ReadAllBytes(ppath + "/" + myFile); //compress a byte buffer (we write the output buffer to a file for debug purposes.) if (LZ4.compressBuffer(bt, ref buff, 9, true)) { lz3 = 1; File.WriteAllBytes(ppath + "/buffer1.lz4buf", buff); } byte[] bt2 = File.ReadAllBytes(ppath + "/buffer1.lz4buf"); //decompress a byte buffer (we write the output buffer to a file for debug purposes.) if (LZ4.decompressBuffer(bt2, ref buff, true)) { lz4 = 1; File.WriteAllBytes(ppath + "/buffer1.tif", buff); } //FIXED BUFFER FUNCTION: int decommpressedSize = LZ4.decompressBufferFixed(bt2, ref fixedOutBuffer); if (decommpressedSize > 0) { Debug.Log(" # Decompress Fixed size Buffer: " + decommpressedSize); } bt2 = null; bt = null; } //make FileBuffer test on supported platfoms. #if (UNITY_IPHONE || UNITY_IOS || UNITY_STANDALONE_OSX || UNITY_ANDROID || UNITY_STANDALONE_LINUX || UNITY_EDITOR) && !UNITY_EDITOR_WIN //make a temp buffer to read an lz4 file in. if (File.Exists(ppath + "/" + myFile + ".lz4")) { byte[] FileBuffer = File.ReadAllBytes(ppath + "/" + myFile + ".lz4"); fbuftest = LZ4.decompress(null, ppath + "/" + myFile + ".FBUFF.tif", bytes, FileBuffer); } #endif }
void UnZipThread() { //FileStream stream = new FileStream(source_path, FileMode.Open); //int lzres = lzma.doDecompress7zip(source_path, dest_path, ref progress, true, true); //int lzres = fLZ.decompressFile(source_path, dest_path, true, progress); //int[] bytes = new int[1]; #if !UNITY_WEBPLAYER int lzres = LZ4.decompress(unzip_source_path, unzip_dest_path, uncompressed_bytes); Debug.LogFormat("lzres = {0}", lzres); #endif }
private byte *[] CompressPages(Transaction tx, int numberOfPages, IVirtualPager compressionPager) { // numberOfPages include the tx header page, which we don't compress var dataPagesCount = numberOfPages - 1; var sizeInBytes = dataPagesCount * AbstractPager.PageSize; var outputBuffer = LZ4.MaximumOutputLength(sizeInBytes); var outputBufferInPages = outputBuffer / AbstractPager.PageSize + (outputBuffer % AbstractPager.PageSize == 0 ? 0 : 1); var pagesRequired = (dataPagesCount + outputBufferInPages); compressionPager.EnsureContinuous(tx, 0, pagesRequired); var tempBuffer = compressionPager.AcquirePagePointer(0); var compressionBuffer = compressionPager.AcquirePagePointer(dataPagesCount); var write = tempBuffer; var txPages = tx.GetTransactionPages(); for (int index = 1; index < txPages.Count; index++) { var txPage = txPages[index]; var scratchPage = tx.Environment.ScratchBufferPool.AcquirePagePointer(txPage.PositionInScratchBuffer); var count = txPage.NumberOfPages * AbstractPager.PageSize; NativeMethods.memcpy(write, scratchPage, count); write += count; } var sizeAfterCompression = DoCompression(tempBuffer, compressionBuffer, sizeInBytes, outputBuffer); var compressedPages = (sizeAfterCompression / AbstractPager.PageSize) + (sizeAfterCompression % AbstractPager.PageSize == 0 ? 0 : 1); var txHeaderBase = tx.Environment.ScratchBufferPool.AcquirePagePointer(txPages[0].PositionInScratchBuffer); var txHeader = (TransactionHeader *)txHeaderBase; txHeader->Compressed = true; txHeader->CompressedSize = sizeAfterCompression; txHeader->UncompressedSize = sizeInBytes; var pages = new byte *[compressedPages + 1]; pages[0] = txHeaderBase; for (int index = 0; index < compressedPages; index++) { pages[index + 1] = compressionBuffer + (index * AbstractPager.PageSize); } txHeader->Crc = Crc.Value(compressionBuffer, 0, compressedPages * AbstractPager.PageSize); return(pages); }
static void Main(string[] args) { byte[] data = Encoding.ASCII.GetBytes(TEST_DATA); byte[] compressedData = LZ4.CompressFrame(data); byte[] decompressedData = LZ4.DecompressFrame(compressedData); float ratio = 100.0f * (float)compressedData.Length / (float)data.Length; Console.WriteLine($"Compressed data info:\nCompression ratio: {ratio:.02}%"); Console.WriteLine($"Original size: {data.Length}\nCompressed size: {compressedData.Length}"); Console.WriteLine($"Original size: {data.Length}\nDecompressed size: {decompressedData.Length}"); Console.WriteLine($"Decompression equivalence: {Enumerable.SequenceEqual(data, decompressedData)}"); }
private unsafe bool TryDecompressTransactionPages(StorageEnvironmentOptions options, TransactionHeader *current, byte *dataPage) { try { LZ4.Decode64(_pager.AcquirePagePointer(_readingPage), current->CompressedSize, dataPage, current->UncompressedSize, true); } catch (Exception e) { options.InvokeRecoveryError(this, "Could not de-compress, invalid data", e); RequireHeaderUpdate = true; return(false); } return(true); }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @Test public void shouldReturnModifierProtocolOfFirstConfiguredVersionRequestedAndSupported() public virtual void ShouldReturnModifierProtocolOfFirstConfiguredVersionRequestedAndSupported() { // given IList <ModifierSupportedProtocols> supportedProtocols = asList(new ModifierSupportedProtocols(COMPRESSION, new IList <string> { LZO.implementation(), SNAPPY.implementation(), LZ4.implementation() }), new ModifierSupportedProtocols(GRATUITOUS_OBFUSCATION, new IList <string> { NAME_CLASH.implementation() })); ModifierProtocolRepository modifierProtocolRepository = new ModifierProtocolRepository(TestProtocols_TestModifierProtocols.values(), supportedProtocols); // when Optional <Org.Neo4j.causalclustering.protocol.Protocol_ModifierProtocol> modifierProtocol = modifierProtocolRepository.Select(COMPRESSION.canonicalName(), asSet("bzip2", SNAPPY.implementation(), LZ4.implementation(), LZO.implementation(), "fast_lz")); // then //JAVA TO C# CONVERTER TODO TASK: Method reference arbitrary object instance method syntax is not converted by Java to C# Converter: assertThat(modifierProtocol.map(Protocol::implementation), OptionalMatchers.contains(LZO.implementation())); }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @Test public void shouldExceptionallyCompleteProtocolStackIfSwitchOverDiffersByVersionFromNegotiatedModifiedProtocol() public void shouldExceptionallyCompleteProtocolStackIfSwitchOverDiffersByVersionFromNegotiatedModifiedProtocol() { // given int version = 1; _server.handle(InitialMagicMessage.Instance()); _server.handle(new ApplicationProtocolRequest(RAFT.canonicalName(), asSet(version))); _server.handle(new ModifierProtocolRequest(COMPRESSION.canonicalName(), asSet(SNAPPY.implementation()))); // when _server.handle(new SwitchOverRequest(RAFT_1.category(), version, new IList <Pair <string, string> > { Pair.of(COMPRESSION.canonicalName(), LZ4.implementation()) })); // then AssertExceptionallyCompletedProtocolStackFuture(); }
public MapBase(Guid id) : base(id) { Name = "New Map"; //Create empty tile array and then compress it down if (Layers == null) { Layers = new Dictionary <string, Tile[, ]>(); TileData = LZ4.PickleString(JsonConvert.SerializeObject(Layers, Formatting.None, mJsonSerializerSettings)); Layers = null; } else { TileData = LZ4.PickleString(JsonConvert.SerializeObject(Layers, Formatting.None, mJsonSerializerSettings)); } mCachedAttributeData = LZ4.PickleString(JsonConvert.SerializeObject(Attributes, Formatting.None, mJsonSerializerSettings)); }
protected override Stream DoGetFileStream(string path) { var file = FindFileInfo(path); if (file == null) { throw new InvalidOperationException("File not found in archive: " + path); } Stream stream = new FileStream(ArchivePath, FileMode.Open, FileAccess.Read); stream.Position = file.DataOffset; long length = file.Size; if (flags.HasFlag(ArchiveFlags.FileNameBeforeData)) { // Consume (skip) filename before data var singleByteBuffer = new byte[1]; stream.Read(singleByteBuffer, 0, 1); var stringLength = singleByteBuffer[0]; stream.Seek(stringLength, SeekOrigin.Current); // Adjust length according to consumed data length -= stringLength + 1; } if (file.IsCompressed) { // Read original size var originalSizeBuffer = new byte[4]; stream.Read(originalSizeBuffer, 0, 4); uint originalSize = BitConverter.ToUInt32(originalSizeBuffer, 0); var compressedByteData = new byte[length]; stream.Read(compressedByteData, 0, (int)length); var uncompressedByteData = new byte[originalSize]; LZ4.Decode(compressedByteData, 0, (int)length, uncompressedByteData, 0, (int)originalSize, true); return(new MemoryStream(uncompressedByteData)); // Deflate stream //return new CustomDeflateStream(stream, originalSize); } else { return(new ArchiveSubstream(stream, length)); } }
protected void ReadFromShippedTransaction(TransactionToShip transaction) { var compressedPages = (transaction.Header.CompressedSize / AbstractPager.PageSize) + (transaction.Header.CompressedSize % AbstractPager.PageSize == 0 ? 0 : 1); var compressedDataBuffer = new byte[compressedPages * AbstractPager.PageSize]; transaction.CompressedData.Read(compressedDataBuffer, 0, compressedPages * AbstractPager.PageSize); fixed(byte *compressedDataBufferPtr = compressedDataBuffer) { var crc = Crc.Value(compressedDataBufferPtr, 0, compressedPages * AbstractPager.PageSize); if (transaction.Header.Crc != crc || _previousTransactionCrc != transaction.PreviousTransactionCrc) { throw new InvalidDataException("Invalid CRC signature for transaction " + transaction.Header.TransactionId); } _previousTransactionCrc = crc; var totalPages = transaction.Header.PageCount + transaction.Header.OverflowPageCount; _pager.EnsureContinuous(null, currentPage, totalPages + 1); try { LZ4.Decode64(compressedDataBufferPtr, transaction.Header.CompressedSize, _pager.AcquirePagePointer(currentPage), transaction.Header.UncompressedSize, true); } catch (Exception e) { throw new InvalidDataException("Could not de-compress, invalid data", e); } } var lastAddedPage = currentPage + transaction.Header.PageCount; for (int pageNumber = currentPage; pageNumber < lastAddedPage; pageNumber++) { _pageNumbers.Add(pageNumber); } if (LastTransactionHeader.HasValue && LastTransactionHeader.Value.TransactionId < transaction.Header.TransactionId) { LastTransactionHeader = transaction.Header; } currentPage = lastAddedPage; }