示例#1
0
        public int CompressLz4Opt()
        {
            int         a    = 0;
            int         t    = 0;
            Span <byte> span = new Span <byte>();

            if (TestType == Types.ShortSentence)
            {
                for (int i = 0; i < _sentences.GetLength(0); i++)
                {
                    LZ4Pickler.Pickle(_sentences[i], LZ4Level.L10_OPT);
                    a++;
                }
            }
            if (TestType == Types.Word)
            {
                for (int i = 0; i < _words.GetLength(0); i++)
                {
                    LZ4Pickler.Pickle(_words[i], LZ4Level.L10_OPT);
                    a++;
                }
            }

            if (TestType == Types.LongText)
            {
                LZ4Pickler.Pickle(_longText, LZ4Level.L10_OPT);
            }
            return(a);
        }
        public unsafe void PickleLorem(int length, LZ4Level level = LZ4Level.L00_FAST)
        {
            var original = new byte[length];

            Lorem.Fill(original, 0, length);

            var pickled   = LZ4Pickler.Pickle(original, level);
            var unpickled = LZ4Pickler.Unpickle(pickled);

            Tools.SameBytes(original, unpickled);

            // reiterating same test, but with different overloads
            fixed(byte *p = original)
            pickled = LZ4Pickler.Pickle(p, original.Length, level);

            fixed(byte *p = pickled)
            unpickled = LZ4Pickler.Unpickle(p, pickled.Length);

            Tools.SameBytes(original, unpickled);

            // reiterating same test, but with offset
            var copy = new byte[pickled.Length + 37];

            Array.Copy(pickled, 0, copy, 37, pickled.Length);
            unpickled = LZ4Pickler.Unpickle(copy, 37, pickled.Length);
            Tools.SameBytes(original, unpickled);

            unpickled.AsSpan().Fill(0);
            LZ4Pickler.Unpickle(pickled.AsSpan(), unpickled.AsSpan());
            Tools.SameBytes(original, unpickled);
        }
示例#3
0
        public int CompressLz4Fast()
        {
            int a = 0;

            if (TestType == Types.ShortSentence)
            {
                for (int i = 0; i < _sentences.GetLength(0); i++)
                {
                    LZ4Pickler.Pickle(_sentences[i], LZ4Level.L00_FAST);
                    a++;
                }
            }
            if (TestType == Types.Word)
            {
                for (int i = 0; i < _words.GetLength(0); i++)
                {
                    LZ4Pickler.Pickle(_words[i], LZ4Level.L00_FAST);
                    a++;
                }
            }

            if (TestType == Types.LongText)
            {
                LZ4Pickler.Pickle(_longText, LZ4Level.L00_FAST);
            }
            return(a);
        }
        /// <summary>
        /// Used during serialization to compress properties
        /// </summary>
        /// <param name="content"></param>
        /// <param name="model"></param>
        /// <param name="published"></param>
        /// <remarks>
        /// This will essentially 'double compress' property data. The MsgPack data as a whole will already be compressed
        /// but this will go a step further and double compress property data so that it is stored in the nucache file
        /// as compressed bytes and therefore will exist in memory as compressed bytes. That is, until the bytes are
        /// read/decompressed as a string to be displayed on the front-end. This allows for potentially a significant
        /// memory savings but could also affect performance of first rendering pages while decompression occurs.
        /// </remarks>
        private void Compress(IReadOnlyContentBase content, ContentCacheDataModel model, bool published)
        {
            if (model.PropertyData is null)
            {
                return;
            }

            foreach (var propertyAliasToData in model.PropertyData)
            {
                if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key, published))
                {
                    foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is string))
                    {
                        if (property.Value is string propertyValue)
                        {
                            property.Value = LZ4Pickler.Pickle(Encoding.UTF8.GetBytes(propertyValue), LZ4Level.L00_FAST);
                        }
                    }

                    foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is int intVal))
                    {
                        property.Value = Convert.ToBoolean((int?)property.Value);
                    }
                }
            }
        }
        public Span <byte> Compress(Span <byte> bytes, Span <byte> target)
        {
            var result = LZ4Pickler.Pickle(bytes);

            Guard.Argument(result.Length).LessThan(target.Length);
            Unsafe.CopyBlock(ref target[0], ref result[0], (uint)result.Length);
            return(target.Slice(0, result.Length));
        }
        public void PickleAndUnpickle(string filename)
        {
            var original = File.ReadAllBytes(Tools.FindFile(filename));

            var compressed   = LZ4Pickler.Pickle(original);
            var decompressed = LZ4Pickler.Unpickle(compressed);

            Tools.SameBytes(original, decompressed);
        }
示例#7
0
        public void PickleLorem(int length, LZ4Level level = LZ4Level.L00_FAST)
        {
            var original = new byte[length];

            Lorem.Fill(original, 0, length);

            var pickled   = LZ4Pickler.Pickle(original, level);
            var unpickled = LZ4Pickler.Unpickle(pickled);

            Tools.SameBytes(original, unpickled);
        }
        public void PickleEntropy(int seed, int length, LZ4Level level = LZ4Level.L00_FAST)
        {
            var original = new byte[length];

            new Random(seed).NextBytes(original);

            var pickled   = LZ4Pickler.Pickle(original, level);
            var unpickled = LZ4Pickler.Unpickle(pickled);

            Tools.SameBytes(original, unpickled);
        }
示例#9
0
        /// <summary>
        /// Serializing an object to binary bois format, then compresses it using LZ4 pickle self-contained format.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="obj"></param>
        /// <param name="output"></param>
        /// <param name="lz4Level">Compression level</param>
        public void Pickle <T>(T obj, Stream output, LZ4Level lz4Level)
        {
            using (var mem = new MemoryStream())
            {
                _serializer.Serialize(obj, mem);

                var serializedBuff = mem.GetBuffer();
                var length         = (int)mem.Length;

                var compressedBuff = LZ4Pickler.Pickle(serializedBuff, 0, length, lz4Level);
                output.Write(compressedBuff, 0, compressedBuff.Length);
            }
        }
示例#10
0
 /// <summary>
 /// Used during serialization to compress properties
 /// </summary>
 /// <param name="model"></param>
 /// <remarks>
 /// This will essentially 'double compress' property data. The MsgPack data as a whole will already be compressed
 /// but this will go a step further and double compress property data so that it is stored in the nucache file
 /// as compressed bytes and therefore will exist in memory as compressed bytes. That is, until the bytes are
 /// read/decompressed as a string to be displayed on the front-end. This allows for potentially a significant
 /// memory savings but could also affect performance of first rendering pages while decompression occurs.
 /// </remarks>
 private void Compress(IReadOnlyContentBase content, ContentCacheDataModel model)
 {
     foreach (var propertyAliasToData in model.PropertyData)
     {
         if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key))
         {
             foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is string))
             {
                 property.Value = LZ4Pickler.Pickle(Encoding.UTF8.GetBytes((string)property.Value), LZ4Level.L00_FAST);
             }
         }
     }
 }
        public void PicklingSpansGivesIdenticalResults(int offset, int length)
        {
            var source = new byte[offset + length + offset];

            Lorem.Fill(source, 0, source.Length);

            var array = LZ4Pickler.Pickle(source, offset, length);
            var span  = LZ4Pickler.Pickle(source.AsSpan(offset, length));

            Assert.Equal(array, span);

            Assert.Equal(
                LZ4Pickler.Unpickle(array),
                LZ4Pickler.Unpickle(span.AsSpan()));
        }
示例#12
0
        public static byte[] Zip(byte[] data, CompressionMethod method = CompressionMethod.Lz4)
        {
            if (method == CompressionMethod.Lz4)
            {
                return(LZ4Pickler.Pickle(data));
            }

            return(null);

            /*
             * byte[] target = new byte[LZ4Codec.MaximumOutputSize(data.Length)];
             * int size = LZ4Codec.Encode(data, 0, data.Length, target, 0, target.Length);
             * output = target;
             * return size;
             */
        }
示例#13
0
 public void ToStream(object data, ref BytePacker packer)
 {
     if (Compression == Compression.LZ4)
     {
         int        size      = lz4converter.GetDataSize(data);
         byte[]     buf       = new byte[size];
         BytePacker lz4packer = new BytePacker(buf);
         lz4converter.Serialize(lz4packer, data);
         byte[] encoded = LZ4Pickler.Pickle(buf);
         converter.Serialize(packer, encoded);
     }
     else
     {
         converter.Serialize(packer, data);
     }
 }
        public void PickleEntropyWithBufferWriter(
            int seed, int length, LZ4Level level = LZ4Level.L00_FAST)
        {
            var original = new byte[length];

            new Random(seed).NextBytes(original);

            var pickledWriter   = BufferWriter.New();
            var unpickledWriter = BufferWriter.New();

            LZ4Pickler.Pickle(original, pickledWriter, level);
            var pickled = pickledWriter.WrittenSpan;

            LZ4Pickler.Unpickle(pickled, unpickledWriter);
            var unpickled = unpickledWriter.WrittenSpan;

            Tools.SameBytes(original, unpickled);
        }
        public void PickleLoremWithBufferWriter(int length, LZ4Level level = LZ4Level.L00_FAST)
        {
            var original = new byte[length];

            Lorem.Fill(original, 0, length);

            var pickledWriter   = BufferWriter.New();
            var unpickledWriter = BufferWriter.New();

            Assert.Throws <ArgumentNullException>(
                () => LZ4Pickler.Pickle(original, null, level));
            LZ4Pickler.Pickle(original, pickledWriter, level);
            var pickled = pickledWriter.WrittenSpan;

            Assert.Throws <ArgumentNullException>(
                () => LZ4Pickler.Unpickle(pickledWriter.WrittenSpan, (IBufferWriter <byte>)null));
            LZ4Pickler.Unpickle(pickled, unpickledWriter);
            var unpickled = unpickledWriter.WrittenSpan;

            Tools.SameBytes(original, unpickled);
        }
示例#16
0
        public static byte[] Compress(this byte[] data, Internal.Packets.CompressionMode compression)
        {
            switch (compression)
            {
            case Internal.Packets.CompressionMode.Deflate:
            {
                using (MemoryStream output = new MemoryStream())
                {
                    using (DeflateStream dstream = new DeflateStream(output, CompressionLevel.Fastest))
                    {
                        dstream.Write(data, 0, data.Length);
                    }
                    return(output.ToArray());        //* Copy array.
                }
            }

            case Internal.Packets.CompressionMode.Gzip:
            {
                using (var compressIntoMs = new MemoryStream())
                {
                    using (var gzs = new BufferedStream(new GZipStream(compressIntoMs,
                                                                       System.IO.Compression.CompressionMode.Compress), 64 * 1024))
                    {
                        gzs.Write(data, 0, data.Length);
                    }
                    return(compressIntoMs.ToArray());
                }
            }

            case Internal.Packets.CompressionMode.LZ4:
                return(LZ4Pickler.Pickle(data));

            case Internal.Packets.CompressionMode.Custom:
                return(OnCustomCompression?.Invoke(data));

            default:
                return(data);
            }
        }
示例#17
0
#pragma warning disable 1998
        public async Task PersistPreset(PresetParserMetadata presetMetadata, byte[] data, bool force = false)
#pragma warning restore 1998
        {
            var preset = new Preset();

            preset.Plugin = presetMetadata.Plugin;
            preset.Plugin.Presets.Add(preset);

            preset.SetFromPresetParser(presetMetadata);
            preset.PresetHash           = HashUtils.getIxxHash(data);
            preset.PresetSize           = data.Length;
            preset.PresetCompressedSize = data.Length;

            try
            {
                PresetData.Add(preset.OriginalMetadata.SourceFile, LZ4Pickler.Pickle(data));
            }
            catch (Exception e)
            {
                Console.WriteLine($"Error while trying to add {preset.OriginalMetadata.SourceFile}");
                throw e;
            }
        }
示例#18
0
        public void ToStream(object data, ref BytePacker packer, IEncrypter encrypter)
        {
            if (Compression == Compression.LZ4)
            {
                int        size      = converter.GetDataSize(data);
                byte[]     buf       = new byte[size];
                BytePacker lz4packer = new BytePacker(buf);
                converter.Serialize(lz4packer, data);

                //Encrypt
                if (encrypter != null)
                {
                    buf = encrypter.Encrypt(buf);
                }

                byte[] encoded = LZ4Pickler.Pickle(buf);
                byteArrayConverter.Serialize(packer, encoded);
            }
            else
            {
                //Encrypt
                if (encrypter != null)
                {
                    int        size      = converter.GetDataSize(data);
                    byte[]     buf       = new byte[size];
                    BytePacker encpacker = new BytePacker(buf);
                    converter.Serialize(encpacker, data);

                    buf = encrypter.Encrypt(buf);
                    encArrayConverter.Serialize(packer, buf);
                }
                else
                {
                    converter.Serialize(packer, data);
                }
            }
        }
        public void Corruption()
        {
            var source = new byte[1234];

            Lorem.Fill(source, 0, source.Length);

            var array  = LZ4Pickler.Pickle(source);
            var copy   = array.AsSpan().ToArray();
            var output = source.AsSpan().ToArray();

            // pass a buffer that's too short
            Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array.AsSpan().Slice(0, 2), output));
            Assert.Throws <InvalidDataException>(() => LZ4Pickler.UnpickledSize(array.AsSpan().Slice(0, 2)));

            // corrupt the version
            array[0] = 0xff;
            Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array, output));
            Assert.Throws <InvalidDataException>(() => _ = LZ4Pickler.UnpickledSize(array));

            // corrupt the size
            array[0] = copy[0];
            array[1] = 0xff;
            Assert.Throws <InvalidDataException>(() => LZ4Pickler.Unpickle(array, output));
        }
示例#20
0
 public byte[] Compress(byte[] proto)
 {
     return(LZ4Pickler.Pickle(proto));
 }
示例#21
0
 public Span <byte> Compress(Span <byte> bytes)
 {
     return(LZ4Pickler.Pickle(bytes, LZ4Level.L00_FAST));
 }
示例#22
0
 public static byte[] Compress(this byte[] input)
 {
     return(LZ4Pickler.Pickle(input));
 }
示例#23
0
 public Task <byte[]> CompressAsync(ReadOnlyMemory <byte> data)
 {
     return(Task.FromResult(LZ4Pickler.Pickle(data.ToArray(), _level)));
 }
示例#24
0
 public byte[] Compress(ReadOnlyMemory <byte> data)
 {
     return(LZ4Pickler.Pickle(data.ToArray(), _level));
 }
示例#25
0
        public ArraySegment <byte> Compress(ReadOnlyMemory <byte> inputData)
        {
            Guard.AgainstEmpty(inputData, nameof(inputData));

            return(LZ4Pickler.Pickle(inputData.Span, _level));
        }
示例#26
0
        public byte[] Process <T>(T item)
        {
            var bytes = _processor.Process(item);

            return(LZ4Pickler.Pickle(bytes));
        }
示例#27
0
 public byte[] Compress(byte[] bytes)
 {
     return(LZ4Pickler.Pickle(bytes, LZ4Level.L00_FAST));
 }
示例#28
0
 public byte[] Compress(object obj)
 {
     return(LZ4Pickler.Pickle(Serialize(obj), LZ4Level.L12_MAX));
 }
示例#29
0
 public static byte[] Pickle(byte[] data)
 {
     return(LZ4Pickler.Pickle(data, LZ4Level.L12_MAX));
 }
示例#30
0
 public static byte[] PickleString(string data)
 {
     return(LZ4Pickler.Pickle(Encoding.Default.GetBytes(data), LZ4Level.L00_FAST));
 }