public object FromStream(ref BytePacker packer, IDecrypter decrypter) { BytePacker p = packer; if (Compression == Compression.LZ4) { byte[] encoded = (byte[])byteArrayConverter.Deserialize(p); byte[] data = LZ4Pickler.Unpickle(encoded); if (decrypter != null) { data = decrypter.Decrypt(data); } p = new BytePacker(data); } else if (decrypter != null) { try { byte[] data = (byte[])encArrayConverter.Deserialize(p); data = decrypter.Decrypt(data); p = new BytePacker(data); } catch (Exception e) { Util.Log("FromStream:" + e.Message); } } return(converter.Deserialize(p)); }
/// <summary> /// Used during serialization to compress properties /// </summary> /// <param name="content"></param> /// <param name="model"></param> /// <param name="published"></param> /// <remarks> /// This will essentially 'double compress' property data. The MsgPack data as a whole will already be compressed /// but this will go a step further and double compress property data so that it is stored in the nucache file /// as compressed bytes and therefore will exist in memory as compressed bytes. That is, until the bytes are /// read/decompressed as a string to be displayed on the front-end. This allows for potentially a significant /// memory savings but could also affect performance of first rendering pages while decompression occurs. /// </remarks> private void Compress(IReadOnlyContentBase content, ContentCacheDataModel model, bool published) { if (model.PropertyData is null) { return; } foreach (var propertyAliasToData in model.PropertyData) { if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key, published)) { foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is string)) { if (property.Value is string propertyValue) { property.Value = LZ4Pickler.Pickle(Encoding.UTF8.GetBytes(propertyValue), LZ4Level.L00_FAST); } } foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is int intVal)) { property.Value = Convert.ToBoolean((int?)property.Value); } } } }
public byte[] UnCompress(byte[] compressedProto, int offset, int length, out int outLength) { var unwrapped = LZ4Pickler.Unpickle(compressedProto, offset, length); outLength = unwrapped.Length; return(unwrapped); }
public int CompressLz4Fast() { int a = 0; if (TestType == Types.ShortSentence) { for (int i = 0; i < _sentences.GetLength(0); i++) { LZ4Pickler.Pickle(_sentences[i], LZ4Level.L00_FAST); a++; } } if (TestType == Types.Word) { for (int i = 0; i < _words.GetLength(0); i++) { LZ4Pickler.Pickle(_words[i], LZ4Level.L00_FAST); a++; } } if (TestType == Types.LongText) { LZ4Pickler.Pickle(_longText, LZ4Level.L00_FAST); } return(a); }
public int CompressLz4Opt() { int a = 0; int t = 0; Span <byte> span = new Span <byte>(); if (TestType == Types.ShortSentence) { for (int i = 0; i < _sentences.GetLength(0); i++) { LZ4Pickler.Pickle(_sentences[i], LZ4Level.L10_OPT); a++; } } if (TestType == Types.Word) { for (int i = 0; i < _words.GetLength(0); i++) { LZ4Pickler.Pickle(_words[i], LZ4Level.L10_OPT); a++; } } if (TestType == Types.LongText) { LZ4Pickler.Pickle(_longText, LZ4Level.L10_OPT); } return(a); }
/// <summary> /// Returns the decompressed string from the bytes. This methods can only be called once. /// </summary> /// <returns></returns> /// <exception cref="InvalidOperationException">Throws if this is called more than once</exception> public string DecompressString() { if (_str != null) { return(_str); } lock (_locker) { if (_str != null) { // double check return(_str); } if (_bytes == null) { throw new InvalidOperationException("Bytes have already been cleared"); } _str = Encoding.UTF8.GetString(LZ4Pickler.Unpickle(_bytes)); _bytes = null; } return(_str); }
public unsafe void PickleLorem(int length, LZ4Level level = LZ4Level.L00_FAST) { var original = new byte[length]; Lorem.Fill(original, 0, length); var pickled = LZ4Pickler.Pickle(original, level); var unpickled = LZ4Pickler.Unpickle(pickled); Tools.SameBytes(original, unpickled); // reiterating same test, but with different overloads fixed(byte *p = original) pickled = LZ4Pickler.Pickle(p, original.Length, level); fixed(byte *p = pickled) unpickled = LZ4Pickler.Unpickle(p, pickled.Length); Tools.SameBytes(original, unpickled); // reiterating same test, but with offset var copy = new byte[pickled.Length + 37]; Array.Copy(pickled, 0, copy, 37, pickled.Length); unpickled = LZ4Pickler.Unpickle(copy, 37, pickled.Length); Tools.SameBytes(original, unpickled); unpickled.AsSpan().Fill(0); LZ4Pickler.Unpickle(pickled.AsSpan(), unpickled.AsSpan()); Tools.SameBytes(original, unpickled); }
public Span <byte> Compress(Span <byte> bytes, Span <byte> target) { var result = LZ4Pickler.Pickle(bytes); Guard.Argument(result.Length).LessThan(target.Length); Unsafe.CopyBlock(ref target[0], ref result[0], (uint)result.Length); return(target.Slice(0, result.Length)); }
public void PickleAndUnpickle(string filename) { var original = File.ReadAllBytes(Tools.FindFile(filename)); var compressed = LZ4Pickler.Pickle(original); var decompressed = LZ4Pickler.Unpickle(compressed); Tools.SameBytes(original, decompressed); }
public static byte[] Unzip(byte[] data, CompressionMethod method = CompressionMethod.Lz4) { if (method == CompressionMethod.Lz4) { return(LZ4Pickler.Unpickle(data)); } return(null); }
public void PickleLorem(int length, LZ4Level level = LZ4Level.L00_FAST) { var original = new byte[length]; Lorem.Fill(original, 0, length); var pickled = LZ4Pickler.Pickle(original, level); var unpickled = LZ4Pickler.Unpickle(pickled); Tools.SameBytes(original, unpickled); }
public void PickleEntropy(int seed, int length, LZ4Level level = LZ4Level.L00_FAST) { var original = new byte[length]; new Random(seed).NextBytes(original); var pickled = LZ4Pickler.Pickle(original, level); var unpickled = LZ4Pickler.Unpickle(pickled); Tools.SameBytes(original, unpickled); }
/// <summary> /// Serializing an object to binary bois format, then compresses it using LZ4 pickle self-contained format. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="obj"></param> /// <param name="output"></param> /// <param name="lz4Level">Compression level</param> public void Pickle <T>(T obj, Stream output, LZ4Level lz4Level) { using (var mem = new MemoryStream()) { _serializer.Serialize(obj, mem); var serializedBuff = mem.GetBuffer(); var length = (int)mem.Length; var compressedBuff = LZ4Pickler.Pickle(serializedBuff, 0, length, lz4Level); output.Write(compressedBuff, 0, compressedBuff.Length); } }
/// <summary> /// Used during serialization to compress properties /// </summary> /// <param name="model"></param> /// <remarks> /// This will essentially 'double compress' property data. The MsgPack data as a whole will already be compressed /// but this will go a step further and double compress property data so that it is stored in the nucache file /// as compressed bytes and therefore will exist in memory as compressed bytes. That is, until the bytes are /// read/decompressed as a string to be displayed on the front-end. This allows for potentially a significant /// memory savings but could also affect performance of first rendering pages while decompression occurs. /// </remarks> private void Compress(IReadOnlyContentBase content, ContentCacheDataModel model) { foreach (var propertyAliasToData in model.PropertyData) { if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key)) { foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is string)) { property.Value = LZ4Pickler.Pickle(Encoding.UTF8.GetBytes((string)property.Value), LZ4Level.L00_FAST); } } } }
public object FromStream(ref BytePacker packer) { if (Compression == Compression.LZ4) { byte[] encoded = (byte[])lz4converter.Deserialize(packer); byte[] data = LZ4Pickler.Unpickle(encoded); BytePacker lz4packer = new BytePacker(data); return(converter.Deserialize(lz4packer)); } else { return(converter.Deserialize(packer)); } }
/// <summary> /// Deserializing binary data to a new instance. Decompression is using LZ4 pickle self-contained format. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="objectData">Compressed data expected</param> /// <returns></returns> public T Unpickle <T>(Stream objectData) { int length = 0; int offset = 0; byte[] compressedBuff = null; MemoryStream mem; using (mem = new MemoryStream()) { if (objectData is MemoryStream outMem) { #if NETCOREAPP || NETSTANDARD if (outMem.TryGetBuffer(out var arraySegment)) { compressedBuff = arraySegment.Array; length = arraySegment.Count; offset = arraySegment.Offset; } #else try { compressedBuff = outMem.GetBuffer(); length = (int)outMem.Length; offset = (int)outMem.Position; } catch (UnauthorizedAccessException) { // eat the error } #endif } if (compressedBuff == null) { objectData.CopyTo(mem); compressedBuff = mem.GetBuffer(); length = (int)mem.Length; offset = 0; } var serializedBuff = LZ4Pickler.Unpickle(compressedBuff, offset, length); mem.Dispose(); mem = new MemoryStream(serializedBuff); return(_serializer.Deserialize <T>(mem)); } }
public void PicklingSpansGivesIdenticalResults(int offset, int length) { var source = new byte[offset + length + offset]; Lorem.Fill(source, 0, source.Length); var array = LZ4Pickler.Pickle(source, offset, length); var span = LZ4Pickler.Pickle(source.AsSpan(offset, length)); Assert.Equal(array, span); Assert.Equal( LZ4Pickler.Unpickle(array), LZ4Pickler.Unpickle(span.AsSpan())); }
public void ToStream(object data, ref BytePacker packer) { if (Compression == Compression.LZ4) { int size = lz4converter.GetDataSize(data); byte[] buf = new byte[size]; BytePacker lz4packer = new BytePacker(buf); lz4converter.Serialize(lz4packer, data); byte[] encoded = LZ4Pickler.Pickle(buf); converter.Serialize(packer, encoded); } else { converter.Serialize(packer, data); } }
public static byte[] Zip(byte[] data, CompressionMethod method = CompressionMethod.Lz4) { if (method == CompressionMethod.Lz4) { return(LZ4Pickler.Pickle(data)); } return(null); /* * byte[] target = new byte[LZ4Codec.MaximumOutputSize(data.Length)]; * int size = LZ4Codec.Encode(data, 0, data.Length, target, 0, target.Length); * output = target; * return size; */ }
public static byte[] Decompress(this byte[] data, Internal.Packets.CompressionMode compression) { switch (compression) { case Internal.Packets.CompressionMode.Deflate: { using (MemoryStream input = new MemoryStream(data)) { using (MemoryStream output = new MemoryStream()) { using (DeflateStream dstream = new DeflateStream(input, System.IO.Compression.CompressionMode.Decompress)) { dstream.CopyTo(output); } return(output.ToArray()); } } } case Internal.Packets.CompressionMode.Gzip: { using (var compressedMs = new MemoryStream(data)) { using (var decompressedMs = new MemoryStream()) { using (var gzs = new BufferedStream(new GZipStream(compressedMs, System.IO.Compression.CompressionMode.Decompress), 64 * 1024)) { gzs.CopyTo(decompressedMs); } return(decompressedMs.ToArray()); } } } case Internal.Packets.CompressionMode.LZ4: return(LZ4Pickler.Unpickle(data)); case Internal.Packets.CompressionMode.Custom: return(OnCustomDecompression?.Invoke(data)); default: return(data); } }
public void PickleEntropyWithBufferWriter( int seed, int length, LZ4Level level = LZ4Level.L00_FAST) { var original = new byte[length]; new Random(seed).NextBytes(original); var pickledWriter = BufferWriter.New(); var unpickledWriter = BufferWriter.New(); LZ4Pickler.Pickle(original, pickledWriter, level); var pickled = pickledWriter.WrittenSpan; LZ4Pickler.Unpickle(pickled, unpickledWriter); var unpickled = unpickledWriter.WrittenSpan; Tools.SameBytes(original, unpickled); }
public void PickleLoremWithBufferWriter(int length, LZ4Level level = LZ4Level.L00_FAST) { var original = new byte[length]; Lorem.Fill(original, 0, length); var pickledWriter = BufferWriter.New(); var unpickledWriter = BufferWriter.New(); Assert.Throws <ArgumentNullException>( () => LZ4Pickler.Pickle(original, null, level)); LZ4Pickler.Pickle(original, pickledWriter, level); var pickled = pickledWriter.WrittenSpan; Assert.Throws <ArgumentNullException>( () => LZ4Pickler.Unpickle(pickledWriter.WrittenSpan, (IBufferWriter <byte>)null)); LZ4Pickler.Unpickle(pickled, unpickledWriter); var unpickled = unpickledWriter.WrittenSpan; Tools.SameBytes(original, unpickled); }
public static byte[] Compress(this byte[] data, Internal.Packets.CompressionMode compression) { switch (compression) { case Internal.Packets.CompressionMode.Deflate: { using (MemoryStream output = new MemoryStream()) { using (DeflateStream dstream = new DeflateStream(output, CompressionLevel.Fastest)) { dstream.Write(data, 0, data.Length); } return(output.ToArray()); //* Copy array. } } case Internal.Packets.CompressionMode.Gzip: { using (var compressIntoMs = new MemoryStream()) { using (var gzs = new BufferedStream(new GZipStream(compressIntoMs, System.IO.Compression.CompressionMode.Compress), 64 * 1024)) { gzs.Write(data, 0, data.Length); } return(compressIntoMs.ToArray()); } } case Internal.Packets.CompressionMode.LZ4: return(LZ4Pickler.Pickle(data)); case Internal.Packets.CompressionMode.Custom: return(OnCustomCompression?.Invoke(data)); default: return(data); } }
#pragma warning disable 1998 public async Task PersistPreset(PresetParserMetadata presetMetadata, byte[] data, bool force = false) #pragma warning restore 1998 { var preset = new Preset(); preset.Plugin = presetMetadata.Plugin; preset.Plugin.Presets.Add(preset); preset.SetFromPresetParser(presetMetadata); preset.PresetHash = HashUtils.getIxxHash(data); preset.PresetSize = data.Length; preset.PresetCompressedSize = data.Length; try { PresetData.Add(preset.OriginalMetadata.SourceFile, LZ4Pickler.Pickle(data)); } catch (Exception e) { Console.WriteLine($"Error while trying to add {preset.OriginalMetadata.SourceFile}"); throw e; } }
public void ToStream(object data, ref BytePacker packer, IEncrypter encrypter) { if (Compression == Compression.LZ4) { int size = converter.GetDataSize(data); byte[] buf = new byte[size]; BytePacker lz4packer = new BytePacker(buf); converter.Serialize(lz4packer, data); //Encrypt if (encrypter != null) { buf = encrypter.Encrypt(buf); } byte[] encoded = LZ4Pickler.Pickle(buf); byteArrayConverter.Serialize(packer, encoded); } else { //Encrypt if (encrypter != null) { int size = converter.GetDataSize(data); byte[] buf = new byte[size]; BytePacker encpacker = new BytePacker(buf); converter.Serialize(encpacker, data); buf = encrypter.Encrypt(buf); encArrayConverter.Serialize(packer, buf); } else { converter.Serialize(packer, data); } } }
public void Corruption() { var source = new byte[1234]; Lorem.Fill(source, 0, source.Length); var array = LZ4Pickler.Pickle(source); var copy = array.AsSpan().ToArray(); var output = source.AsSpan().ToArray(); // pass a buffer that's too short Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array.AsSpan().Slice(0, 2), output)); Assert.Throws <InvalidDataException>(() => LZ4Pickler.UnpickledSize(array.AsSpan().Slice(0, 2))); // corrupt the version array[0] = 0xff; Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array, output)); Assert.Throws <InvalidDataException>(() => _ = LZ4Pickler.UnpickledSize(array)); // corrupt the size array[0] = copy[0]; array[1] = 0xff; Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array, output)); }
public static byte[] Decompress(this byte[] input) { return(LZ4Pickler.Unpickle(input)); }
public static byte[] Compress(this byte[] input) { return(LZ4Pickler.Pickle(input)); }
public Task <byte[]> DecompressAsync(ReadOnlyMemory <byte> data) { return(Task.FromResult(LZ4Pickler.Unpickle(data.ToArray()))); }
public byte[] Decompress(ReadOnlyMemory <byte> data) { return(LZ4Pickler.Unpickle(data.ToArray())); }