private byte[] ReadStream(Stream stream, string path) { var key = FileList.GetIndex(path); if (key == -1) { throw new FileNotFoundException("Not found in archive (" + ArchivePath + "): " + path); } var entry = FileIndex.Entries[key]; if (entry.Length > int.MaxValue) { throw new NotImplementedException("Unable to read large file: " + path); } using (var reader = new BinaryReader(stream)) { stream.Seek(entry.Location, SeekOrigin.Begin); if (entry.Compression == 1) { var length = reader.ReadUInt32(); return(Lzss.Decompress(reader.ReadBytes((int)length))); } if (entry.Compression == 2) { var length = reader.ReadUInt32(); return(Lz4.Decompress(reader.ReadBytes((int)length))); } return(reader.ReadBytes((int)entry.Length)); } }
public void Serialize(BsonWriter bsonWriter, Type nominalType, object value, IBsonSerializationOptions options) { var before = LittleEndian.DoublesToBytes(((Vec)value).ToArray()); var after = Lz4.CompressBytes(before, Lz4Mode.HighCompression); bsonWriter.WriteBinaryData(new BsonBinaryData(after, BsonBinarySubType.Binary)); }
public void NominaleTest() { const int size = 16 * Mem.KB; var count = size / sizeof(DoubleEntry); var chunk = Factory.CreateDoubleChunk(count); var buffer = new byte[size]; var result = new DoubleEntry[count]; fixed(byte *o = buffer) { int compressedSize; fixed(DoubleEntry *i = chunk) { var pi = (byte *)i; var po = o; compressedSize = Lz4.LZ4_compress_fast(pi, po, size, (int)Lz4.MaximumOutputLength(size), 1); } Console.WriteLine("Compression ratio: x {0}", size / (double)compressedSize); fixed(DoubleEntry *r = result) { var ip = o; var op = (byte *)r; Lz4.LZ4_decompress_fast(ref ip, ref op, compressedSize); } } result.Check(chunk); }
private static HttpResponseMessage GetResponse(LineDTO[] lines) { var json = JsonConvert.SerializeObject(lines); var compressed = Lz4.CompressString(json); return(new HttpResponseMessage { Content = new StringContent(compressed, Encoding.UTF8, "text/html") }); }
public override void Flush() { if (_writeBufferOffset <= 0) { return; } var count = Lz4.Compress(_writeBuffer, 0, _writeBufferOffset, ref _compressedBuffer, _compressionMode); _targetStream.Write(_compressedBuffer, 0, count); CompressedLength += count; _writeBufferOffset = 0; }
private Result ReadSegmentImpl(ref NsoHeader.SegmentHeader segment, uint fileSize, Buffer32 fileHash, bool isCompressed, bool checkHash, Span <byte> buffer) { // Select read size based on compression. if (!isCompressed) { fileSize = segment.Size; } // Validate size. if (fileSize > segment.Size) { return(ResultLoader.InvalidNso.Log()); } // Load data from file. uint loadAddress = isCompressed ? (uint)buffer.Length - fileSize : 0; Result rc = NsoFile.Read(out long bytesRead, segment.FileOffset, buffer.Slice((int)loadAddress), ReadOption.None); if (rc.IsFailure()) { return(rc); } if (bytesRead != fileSize) { return(ResultLoader.InvalidNso.Log()); } // Uncompress if necessary. if (isCompressed) { // todo: Fix in-place decompression // Lz4.Decompress(buffer.Slice((int)loadAddress), buffer); byte[] decomp = Lz4.Decompress(buffer.Slice((int)loadAddress).ToArray(), buffer.Length); decomp.CopyTo(buffer); } // Check hash if necessary. if (checkHash) { Buffer32 hash = default; Crypto.Sha256.GenerateSha256Hash(buffer.Slice(0, (int)segment.Size), hash.Bytes); if (hash.Bytes.SequenceCompareTo(fileHash.Bytes) != 0) { return(ResultLoader.InvalidNso.Log()); } } return(Result.Success); }
void MainWindow_Loaded(object sender, RoutedEventArgs e) { //var testFixture = new QuqeTest.EvolutionTests(); //testFixture.NoEvolveLeaks(); var originalText = "I came here for an argument"; var originalBytes = Encoding.UTF8.GetBytes(originalText); var compressed = Lz4.CompressBytes(originalBytes); var decompressed = Lz4.DecompressBytes(compressed); var decompressedText = Encoding.UTF8.GetString(decompressed); var decompressTry = Lz4.DecompressBytes(originalBytes); Trace.WriteLine("original : " + originalText); Trace.WriteLine("decompressed: " + decompressedText); }
public ExtraCompressStream([NotNull] Stream targetStream, int bufferSize = 0x100000, Lz4Mode compressionMode = 0, bool closeStream = false) : base( targetStream, new byte[bufferSize], new byte[Lz4.LZ4_compressBound(bufferSize)], compressionMode, closeStream) { if (targetStream == null) { throw new ArgumentNullException(nameof(targetStream)); } if (bufferSize <= 0) { throw new ArgumentOutOfRangeException(nameof(bufferSize)); } }
static Vec Deserialize(BsonReader bsonReader) { var bs = bsonReader.ReadBinaryData().Bytes; byte[] decompressed; try { decompressed = Lz4.DecompressBytes(bs); } catch (Exception) { decompressed = bs; } RecordCompression(bs.Length, decompressed.Length); return(new DenseVector(LittleEndian.BytesToDoubles(decompressed))); }
public void StringCompression_SimpleTest() { var txt = sampleTxt + sampleTxt + sampleTxt + sampleTxt + sampleTxt + sampleTxt; var comp = Lz4.CompressString(txt); var txt2 = Lz4.DecompressString(comp); Assert.IsTrue(txt == txt2); int sz1 = comp.Length; Assert.IsTrue(sz1 < txt.Length); comp = Lz4.CompressString(txt, Lz4Mode.HighCompression); txt2 = Lz4.DecompressString(comp); Assert.IsTrue(txt == txt2); Assert.IsTrue(sz1 >= comp.Length); }
private void Fill() { if (!_eof) { int num = _targetStream.Read(_header, 0, 8); if (num == 0) { _unpackedBuffer = null; _eof = true; } else { if (num != 8) { throw new InvalidDataException("input buffer corrupted (header)"); } int compressedSize = Lz4.GetCompressedSize(_header); if (compressedSize == 0) { _unpackedBuffer = null; _eof = true; } else { if ((_readBuffer == null) || (_readBuffer.Length < (compressedSize + 8))) { _readBuffer = new byte[compressedSize + 8]; } Buffer.BlockCopy(_header, 0, _readBuffer, 0, 8); if (_targetStream.Read(_readBuffer, 8, compressedSize) != compressedSize) { throw new InvalidDataException("input buffer corrupted (body)"); } _unpackedLength = Lz4.Decompress(_readBuffer, 0, ref _unpackedBuffer); _unpackedOffset = 0; CompressedLength += _unpackedLength; } } } }
private void WorkToDo() { var compressor = new Lz4(); while (!_isCanceled) { byte[] jsonBytes = new byte[0]; byte[] compressedBytes = new byte[0]; long alertListCount = 0; long eventListCount = 0; try { var alerts = ConsumeInSmallBatch(); // return to the while if there is nothing to save if (alerts.Item1.Count == 0) { continue; } //debug messaging incase anything goes wrong here. alertListCount = alerts.Item1.Count; eventListCount = alerts.Item1.Sum(x => x.Events.Length); string json = JsonConvert.SerializeObject(alerts.Item1); jsonBytes = Encoding.Unicode.GetBytes(json); compressedBytes = compressor.Compress(jsonBytes); _proxy.Produce(compressedBytes); } catch (Exception ex) { _exLog.Error($"alertListCount: {alertListCount} "); _exLog.Error($"eventListCount: {eventListCount} "); _exLog.Error($"jsonBytes: {jsonBytes.Length} "); _exLog.Error($"compressedBytes: {compressedBytes.Length} "); _exLog.Error(ex); } } }
/// <summary> /// /// </summary> /// <param name="data"></param> /// <returns></returns> public static byte[] Lz4Decompress(this byte[] data) { return(Lz4.DecompressBytes(data)); }
/// <summary> /// /// </summary> /// <param name="data"></param> /// <returns></returns> public static byte[] Lz4Compress(this byte[] data) { return(Lz4.CompressBytes(data, Lz4Mode.HighCompression)); }
public Nso(Stream Input) { BinaryReader Reader = new BinaryReader(Input); Input.Seek(0, SeekOrigin.Begin); int NsoMagic = Reader.ReadInt32(); int Version = Reader.ReadInt32(); int Reserved = Reader.ReadInt32(); int FlagsMsk = Reader.ReadInt32(); int TextOffset = Reader.ReadInt32(); int TextMemOffset = Reader.ReadInt32(); int TextDecSize = Reader.ReadInt32(); int ModNameOffset = Reader.ReadInt32(); int ROOffset = Reader.ReadInt32(); int ROMemOffset = Reader.ReadInt32(); int RODecSize = Reader.ReadInt32(); int ModNameSize = Reader.ReadInt32(); int DataOffset = Reader.ReadInt32(); int DataMemOffset = Reader.ReadInt32(); int DataDecSize = Reader.ReadInt32(); int BssSize = Reader.ReadInt32(); byte[] BuildId = Reader.ReadBytes(0x20); int TextSize = Reader.ReadInt32(); int ROSize = Reader.ReadInt32(); int DataSize = Reader.ReadInt32(); Input.Seek(0x24, SeekOrigin.Current); int DynStrOffset = Reader.ReadInt32(); int DynStrSize = Reader.ReadInt32(); int DynSymOffset = Reader.ReadInt32(); int DynSymSize = Reader.ReadInt32(); byte[] TextHash = Reader.ReadBytes(0x20); byte[] ROHash = Reader.ReadBytes(0x20); byte[] DataHash = Reader.ReadBytes(0x20); NsoFlags Flags = (NsoFlags)FlagsMsk; this.TextOffset = TextMemOffset; this.ROOffset = ROMemOffset; this.DataOffset = DataMemOffset; this.BssSize = BssSize; //Text segment Input.Seek(TextOffset, SeekOrigin.Begin); m_Text = Reader.ReadBytes(TextSize); if (Flags.HasFlag(NsoFlags.IsTextCompressed) || true) { m_Text = Lz4.Decompress(m_Text, TextDecSize); } //Read-only data segment Input.Seek(ROOffset, SeekOrigin.Begin); m_RO = Reader.ReadBytes(ROSize); if (Flags.HasFlag(NsoFlags.IsROCompressed) || true) { m_RO = Lz4.Decompress(m_RO, RODecSize); } //Data segment Input.Seek(DataOffset, SeekOrigin.Begin); m_Data = Reader.ReadBytes(DataSize); if (Flags.HasFlag(NsoFlags.IsDataCompressed) || true) { m_Data = Lz4.Decompress(m_Data, DataDecSize); } using (MemoryStream Text = new MemoryStream(m_Text)) { BinaryReader TextReader = new BinaryReader(Text); Text.Seek(4, SeekOrigin.Begin); Mod0Offset = TextReader.ReadInt32(); } }
//Fast lz4 paced Compression algorithms used: //used for XimIndex , XimDocument, XimData Compression. public LZ4Compressor() { Lz4.GetCompressedSize(new byte[] { }); }
public NxStaticObject(Stream input) { BinaryReader reader = new BinaryReader(input); input.Seek(0, SeekOrigin.Begin); int nsoMagic = reader.ReadInt32(); int version = reader.ReadInt32(); int reserved = reader.ReadInt32(); int flagsMsk = reader.ReadInt32(); int textOffset = reader.ReadInt32(); int textMemOffset = reader.ReadInt32(); int textDecSize = reader.ReadInt32(); int modNameOffset = reader.ReadInt32(); int roOffset = reader.ReadInt32(); int roMemOffset = reader.ReadInt32(); int roDecSize = reader.ReadInt32(); int modNameSize = reader.ReadInt32(); int dataOffset = reader.ReadInt32(); int dataMemOffset = reader.ReadInt32(); int dataDecSize = reader.ReadInt32(); int bssSize = reader.ReadInt32(); byte[] buildId = reader.ReadBytes(0x20); int textSize = reader.ReadInt32(); int roSize = reader.ReadInt32(); int dataSize = reader.ReadInt32(); input.Seek(0x24, SeekOrigin.Current); int dynStrOffset = reader.ReadInt32(); int dynStrSize = reader.ReadInt32(); int dynSymOffset = reader.ReadInt32(); int dynSymSize = reader.ReadInt32(); byte[] textHash = reader.ReadBytes(0x20); byte[] roHash = reader.ReadBytes(0x20); byte[] dataHash = reader.ReadBytes(0x20); NsoFlags flags = (NsoFlags)flagsMsk; TextOffset = textMemOffset; RoOffset = roMemOffset; DataOffset = dataMemOffset; BssSize = bssSize; //Text segment input.Seek(textOffset, SeekOrigin.Begin); Text = reader.ReadBytes(textSize); if (flags.HasFlag(NsoFlags.IsTextCompressed) && textSize != 0) { Text = Lz4.Decompress(Text, textDecSize); } //Read-only data segment input.Seek(roOffset, SeekOrigin.Begin); Ro = reader.ReadBytes(roSize); if (flags.HasFlag(NsoFlags.IsRoCompressed) && roSize != 0) { Ro = Lz4.Decompress(Ro, roDecSize); } //Data segment input.Seek(dataOffset, SeekOrigin.Begin); Data = reader.ReadBytes(dataSize); if (flags.HasFlag(NsoFlags.IsDataCompressed) && dataSize != 0) { Data = Lz4.Decompress(Data, dataDecSize); } }
static void Main(string[] args) { // todo: // In production make one of these a runtime variable // so there is no overlap if multiple processes are running /* * using (var cache = new SubscriberTable()) * { * cache.Subscribe("sub1", new[] { "chan1", "chan2" }); * cache.Subscribe("sub1", new[] { "chanx", "chan2" }); * cache.Unsubscribe("sub1", "chan2"); * var sub1Chans = cache.GetSubscriberChannels("sub1").ToList(); * * cache.Subscribe("sub2", new[] { "chan1", "chan3" }); * cache.Subscribe("sub3", new[] { "chan1", "chan4" }); * * cache.UnsubscribeAll("sub2"); * * var chanXSubs = cache.GetSubscribers("chanx").ToList(); * var chan1Subs = cache.GetSubscribers("chan1").ToList(); * var chan2Subs = cache.GetSubscribers("chan2").ToList(); * var chan3Subs = cache.GetSubscribers("chan3").ToList(); * var chan4Subs = cache.GetSubscribers("chan4").ToList(); * * Console.ReadLine(); * }*/ using (var cache = new CacheTable(256)) { // Test the ram usage // Add 12,000 88kb sample pages var bytes = System.IO.File.ReadAllBytes("Cache\\cache-sample.html"); var sw = System.Diagnostics.Stopwatch.StartNew(); for (int i = 0; i < 12000; i++) { var test = Lz4.CompressBytes(bytes, 0, bytes.Length, Lz4Mode.Fast); // Todo: // Make sure they have at least 3 depdenencies cache.Set( $"hash:test:{i}", test, new CacheEntryOptions() // .SetSlidingExpiration(TimeSpan.FromSeconds(5)) .RegisterPostEvictionCallback(OnPostEvication) ); } Console.WriteLine($"Writing 12k records took {sw.Elapsed.TotalSeconds}"); sw.Restart(); for (int i = 0; i < 12000; i++) { var data = cache.GetData( $"hash:test:{i}" ); var test = Lz4.DecompressBytes(data); } Console.WriteLine($"Reading 12k records took {sw.Elapsed.TotalSeconds}"); sw.Restart(); // Access the first 1000 a few times to cache them for (int k = 0; k < 12; k++) { for (int i = 0; i < 1000; i++) { var data = cache.GetData( $"hash:test:{i}" ); var test = Lz4.DecompressBytes(data); } } Console.WriteLine($"Reading 1k hot records 12 times took {sw.Elapsed.TotalMilliseconds}"); sw.Restart(); Console.ReadLine(); cache.Set( $"hash:test:slide5", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .SetSlidingExpiration(TimeSpan.FromSeconds(5)) .RegisterPostEvictionCallback(OnPostEvication) ); cache.Set( $"hash:test:slide10", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .SetSlidingExpiration(TimeSpan.FromSeconds(10)) .RegisterPostEvictionCallback(OnPostEvication) ); cache.Set( $"hash:test:slide60", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .SetSlidingExpiration(TimeSpan.FromSeconds(60)) .RegisterPostEvictionCallback(OnPostEvication) ); cache.Set( $"hash:test", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .SetAbsoluteExpiration(TimeSpan.FromSeconds(5)) .RegisterPostEvictionCallback(OnPostEvication) ); cache.Set( $"hash:test1", new byte[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }, new CacheEntryOptions() .SetAbsoluteExpiration(TimeSpan.FromSeconds(10)) .RegisterPostEvictionCallback(OnPostEvication) ); cache.Set( $"hash:test2", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .SetAbsoluteExpiration(TimeSpan.FromSeconds(60)) .RegisterPostEvictionCallback(OnAnotherPostEvication) ); cache.Set( $"hash:test3", new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }, new CacheEntryOptions() .RegisterPostEvictionCallback(OnAnotherPostEvication) ); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(10)); cache.StartScanForExpiredItems(true); Console.ReadLine(); // test the cleanup, should onlu remove one // cache.GetData() cache.RemoveByDependency("dep:0001"); // RemoveByKey // RemoveByDependency // GetByKey // Cursor // -> GetPostEvictionDelegate // -> // Remove all the test keys which remain for (int i = 0; i < 12000; i++) { cache.RemoveByKey($"{i}"); } // Dump the whole cache without callbacks cache.Clear(); } }