public void CouldUseIDeltaMethods() { var first = new IntDelta { Value = 123 }; var second = new IntDelta { Value = 456 }; var delta = new IntDelta { Value = 456 - 123 }; Assert.AreEqual(delta, Unsafe.GetDeltaConstrained(ref first, ref second)); Assert.AreEqual(second, Unsafe.AddDeltaConstrained(ref first, ref delta)); }
public unsafe int Write(TElement[] value, int valueOffset, int valueCount, ref Memory <byte> destination, uint destinationOffset = 0u, MemoryStream temporaryStream = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { // NB Blosc calls below are visually large - many LOCs with comments, but this is only a single method call if (value == null) { throw new ArgumentNullException(nameof(value)); } var handle = destination.Retain(true); try { var ptr = (IntPtr)handle.PinnedPointer + (int)destinationOffset; if (temporaryStream != null) { var len = temporaryStream.Length; if (destination.Length < destinationOffset + len) { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } temporaryStream.WriteToPtr(ptr); temporaryStream.Dispose(); return(checked ((int)len)); } var compressionMethod = compression == CompressionMethod.DefaultOrNone ? BloscSettings.defaultCompressionMethod : (compression == CompressionMethod.LZ4 ? "lz4" : "zstd"); var isDelta = IsIDelta; var position = 8; if (valueCount > 0) { int compressedSize; if (ItemSize > 0) { if (typeof(TElement) == typeof(DateTime)) { isDelta = true; Trace.Assert(ItemSize == 8); var buffer = BufferPool <byte> .Rent(valueCount * 8); var dtArray = (DateTime[])(object)value; var first = dtArray[valueOffset]; // NB For DateTime we calculate delta not from the first but // from the previous value. This is a special case for the // fact that DT[] is usually increasing by a similar (regular) step // and the deltas are always positive, small and close to each other. // In contrast, Price/Decimal could fluctuate in a small range // and delta from previous could often change its sign, which // leads to a very different bits and significantly reduces // the Blosc shuffling benefits. For stationary time series // deltas from the first value are also stationary and their sign // changes less frequently that the sign of deltas from previous. var previousLong = (long *)&first; fixed(byte *srcPtr = &buffer[0]) { Unsafe.WriteUnaligned(srcPtr, *previousLong); for (var i = 1; i < valueCount; i++) { var current = dtArray[i + valueOffset]; var currentLong = (long *)(¤t); var diff = currentLong - previousLong; Unsafe.WriteUnaligned(srcPtr + i * ItemSize, diff); previousLong = currentLong; } compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes (IntPtr)srcPtr, ptr + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); } BufferPool <byte> .Return(buffer); } else if (IsIDelta) { var first = value[valueOffset]; var buffer = BufferPool <byte> .Rent(valueCount *ItemSize); fixed(byte *srcPtr = &buffer[0]) { Unsafe.WriteUnaligned(srcPtr, first); for (var i = 1; i < valueCount; i++) { var diff = Unsafe.GetDeltaConstrained(ref first, ref value[valueOffset + i]); Unsafe.WriteUnaligned(srcPtr + i * ItemSize, diff); } compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes (IntPtr)srcPtr, ptr + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); } BufferPool <byte> .Return(buffer); } else { var pinnedArray = GCHandle.Alloc(value, GCHandleType.Pinned); var srcPtr = Marshal.UnsafeAddrOfPinnedArrayElement(value, valueOffset); compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes srcPtr, ptr + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); pinnedArray.Free(); } } else if (Buffers.BufferPool.IsPreservedBuffer <TElement>()) { throw new NotImplementedException(); } else { MemoryStream tempStream; var bytesSize = BinarySerializer.SizeOf(new ArraySegment <TElement>(value, valueOffset, valueCount), out tempStream, compression); var buffer = BufferPool <byte> .Rent(bytesSize); var writtenBytes = BinarySerializer.Write(new ArraySegment <TElement>(value, valueOffset, valueCount), buffer, 0, tempStream); tempStream?.Dispose(); Debug.Assert(bytesSize == writtenBytes); compressedSize = CompressedArrayBinaryConverter <byte> .Instance.Write(buffer, 0, writtenBytes, ref destination, destinationOffset, null, compression); BufferPool <byte> .Return(buffer); } if (compressedSize > 0) { position += compressedSize; } else { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } } // length Marshal.WriteInt32(ptr, position); // version & flags Marshal.WriteByte(ptr + 4, (byte)((Version << 4) | (isDelta ? 0b0000_0011 : 0b0000_0001))); return(position); } finally { handle.Dispose(); } }