public unsafe int Write(string value, ref DirectBuffer destination, uint offset = 0u, MemoryStream temporaryStream = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { if (compression == CompressionMethod.DefaultOrNone) { if (temporaryStream == null) { fixed(char *charPtr = value) { var totalLength = 8 + Encoding.UTF8.GetByteCount(charPtr, value.Length); if (!destination.HasCapacity(offset, totalLength)) { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } var ptr = destination.Data + (int)offset; // size Marshal.WriteInt32(ptr, totalLength); // version Marshal.WriteByte(ptr + 4, Version); // payload var len = Encoding.UTF8.GetBytes(charPtr, value.Length, (byte *)ptr + 8, totalLength); Debug.Assert(totalLength == len + 8); return(len + 8); } } else { throw new NotSupportedException("StringBinaryConverter does not work with temp streams."); //throw new NotImplementedException(); //temporaryStream.WriteToPtr(ptr); //return checked((int)temporaryStream.Length); } } else { throw new NotImplementedException("TODO string compression"); } }
public static int Read(ref DirectBuffer source, out T value) { //if (MemoryMarshal.TryGetArray(source, out var segment)) //{ // var reader = new JsonReader(segment.Array, segment.Offset); // value = JsonSerializer.Deserialize<T>(ref reader); // return reader.GetCurrentOffsetUnsafe(); //} // var buffer = BufferPool<byte>.Rent(checked((int)(uint)source.Length)); //try //{ // source.Span.CopyTo(((Span<byte>)buffer)); var reader = new JsonReader(source); value = JsonSerializer.Deserialize <T>(ref reader); return(reader.GetCurrentOffsetUnsafe()); //} //finally //{ // BufferPool<byte>.Return(buffer); //} }
public unsafe int Write(DateTime[] value, ref DirectBuffer destination, uint offset = 0, MemoryStream temporaryStream = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { if (compression == CompressionMethod.DefaultOrNone) { Debug.Assert(temporaryStream == null); var length = 8 + value.Length * 8; if (destination.Length < offset + length) { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } for (var i = 0; i < value.Length; i++) { *(DateTime *)(destination.Data + (int)offset + 8 + i * 8) = value[i]; } destination.WriteInt32(offset, length); destination.WriteByte(offset + 4, Version); return(length); } else { return(CompressedArrayBinaryConverter <DateTime> .Instance.Write(value, 0, value.Length, ref destination, offset, temporaryStream, compression)); } }
public static int Write(T value, ref DirectBuffer destination) { var size = SizeOf(value, out var retainedMemory, out var withPadding); Debug.Assert(withPadding); try { // in general buffer could be empty/default if size is known, but not with Json ThrowHelper.AssertFailFast(size == retainedMemory.Length - BinarySerializer.BC_PADDING, "size == buffer.Count"); if (size > destination.Length) { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } retainedMemory.Span.Slice(BinarySerializer.BC_PADDING).CopyTo(destination.Span); return(size); } finally { retainedMemory.Dispose(); } }
public SafeDirectBuffer(ref DirectBuffer directBuffer) : base(false) { _directBuffer = directBuffer; base.SetHandle(_directBuffer._data); base.Initialize((uint)_directBuffer._length); }
/// <summary> /// Recycles an existing <see cref="FixedBuffer"/> from an unmanaged byte buffer owned by external code /// </summary> /// <param name="pBuffer">Unmanaged byte buffer</param> /// <param name="bufferLength">Length of the buffer</param> public void Wrap(long bufferLength, byte* pBuffer) { if (pBuffer == null) throw new ArgumentNullException("pBuffer"); if (bufferLength <= 0) throw new ArgumentException("Buffer size must be > 0", "bufferLength"); FreeGCHandle(); _directBuffer = new DirectBuffer(bufferLength, (IntPtr)pBuffer); _needToFreeGCHandle = false; base.SetHandle(_directBuffer.Data); base.Initialize((uint)_directBuffer.Capacity); }
/// <summary> /// Recycles an existing <see cref="FixedBuffer"/> /// </summary> /// <param name="byteArray">The byte array that will act as the backing buffer.</param> public void Wrap(byte[] byteArray) { if (byteArray == null) throw new ArgumentNullException("byteArray"); FreeGCHandle(); // pin the buffer so it does not get moved around by GC, this is required since we use pointers _pinnedGCHandle = GCHandle.Alloc(byteArray, GCHandleType.Pinned); _needToFreeGCHandle = true; _directBuffer = new DirectBuffer(byteArray.Length, (IntPtr)_pinnedGCHandle.AddrOfPinnedObject().ToPointer()); if (BufferRecylce != null && _array != null) { // return previous buffer for recylcing BufferRecylce(_array.Length, 0, _array); } _array = byteArray; base.SetHandle(_directBuffer.Data); base.Initialize((uint)_directBuffer.Capacity); }
public unsafe int Write(TElement[] value, int valueOffset, int valueCount, ref DirectBuffer destination, uint destinationOffset = 0u, MemoryStream temporaryStream = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { if (value == null) { throw new ArgumentNullException(nameof(value)); } if (temporaryStream != null) { var len = temporaryStream.Length; if (destination.Length < destinationOffset + len) { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } temporaryStream.WriteToPtr(destination.Data + (int)destinationOffset); temporaryStream.Dispose(); return(checked ((int)len)); } bool isDiffable = false; var compressionMethod = compression == CompressionMethod.DefaultOrNone ? BloscSettings.defaultCompressionMethod : (compression == CompressionMethod.LZ4 ? "lz4" : "zstd"); var position = 8; if (valueCount > 0) { int compressedSize; if (ItemSize > 0) { if (typeof(TElement) == typeof(DateTime)) { var buffer = BufferPool <byte> .Rent(valueCount * 8); var dtArray = (DateTime[])(object)value; fixed(byte *srcPtr = &buffer[0]) { for (var i = 0; i < valueCount; i++) { *(DateTime *)(srcPtr + i * 8) = dtArray[i + valueOffset]; } compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes (IntPtr)srcPtr, destination.Data + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); } BufferPool <byte> .Return(buffer); } else if (value[0] is IDiffable <TElement> diffableFirst) { isDiffable = true; // TODO (!) this is probably inefficient... some generic method caching or dynamic dispatch? // however there is only a single pattern match with IDiffable boxing var first = value[0]; var buffer = BufferPool <byte> .Rent(valueCount *ItemSize); fixed(byte *srcPtr = &buffer[0]) { Unsafe.Write(srcPtr, first); for (var i = 1; i < valueCount; i++) { var current = value[i]; var diff = diffableFirst.GetDelta(current); Unsafe.Write(srcPtr + i * ItemSize, diff); } compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes (IntPtr)srcPtr, destination.Data + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); } BufferPool <byte> .Return(buffer); } else { var pinnedArray = GCHandle.Alloc(value, GCHandleType.Pinned); var srcPtr = Marshal.UnsafeAddrOfPinnedArrayElement(value, valueOffset); compressedSize = BloscMethods.blosc_compress_ctx( new IntPtr(9), // max compression 9 new IntPtr(1), // do byte shuffle 1 new UIntPtr((uint)ItemSize), // type size new UIntPtr((uint)(valueCount * ItemSize)), // number of input bytes srcPtr, destination.Data + position, // destination new UIntPtr((uint)(destination.Length - position)), // destination length compressionMethod, new UIntPtr((uint)0), // default block size BloscMethods.ProcessorCount // ); pinnedArray.Free(); } } else { MemoryStream tempStream; var bytesSize = BinarySerializer.SizeOf(new ArraySegment <TElement>(value, valueOffset, valueCount), out tempStream, compression); var buffer = BufferPool <byte> .Rent(bytesSize); var writtenBytes = BinarySerializer.Write(new ArraySegment <TElement>(value, valueOffset, valueCount), buffer, 0, tempStream); tempStream?.Dispose(); Debug.Assert(bytesSize == writtenBytes); compressedSize = CompressedArrayBinaryConverter <byte> .Instance.Write(buffer, 0, writtenBytes, ref destination, destinationOffset, null, compression); BufferPool <byte> .Return(buffer); } if (compressedSize > 0) { position += compressedSize; } else { return((int)BinaryConverterErrorCode.NotEnoughCapacity); } } // length destination.WriteInt32(0, position); // include all headers // version & flags destination.WriteByte(4, (byte)((Version << 4) | (isDiffable ? 0b0000_0011 : 0b0000_0001))); return(position); }
public static int Read <T>(DirectBuffer buffer, ref T value) { return(Read <T>(buffer, 0, ref value)); }
public int Read(ref DirectBuffer source, out T value) { throw new NotImplementedException(); }
public int Write(T value, ref DirectBuffer destination) { throw new NotImplementedException(); }
int IBinaryConverter <T> .Read(ref DirectBuffer source, out T value) { return(Read(ref source, out value)); }
int IBinaryConverter <T> .Write(T value, ref DirectBuffer destination) { return(Write(value, ref destination)); }
private static long ReadHHMMSSXXXXXXAsUtcTicks(DateTime date, DirectBuffer db, int index) { // TODO method ReadAsciiDigit var hh = (db.ReadAsciiDigit(index)) * 10 + db.ReadAsciiDigit(index + 1); var mm = (db.ReadAsciiDigit(index + 2)) * 10 + db.ReadAsciiDigit(index + 3); var ss = (db.ReadAsciiDigit(index + 4)) * 10 + db.ReadAsciiDigit(index + 5); var micros = (db.ReadAsciiDigit(index + 6)) * 100000 + (db.ReadAsciiDigit(index + 7)) * 10000 + (db.ReadAsciiDigit(index + 8)) * 1000 + (db.ReadAsciiDigit(index + 9)) * 100 + (db.ReadAsciiDigit(index + 10)) * 10 + (db.ReadAsciiDigit(index + 11)); var ticks = date.Date.Ticks // hours + hh * TimeSpan.TicksPerHour // minutes + mm * TimeSpan.TicksPerMinute // seconds + ss * TimeSpan.TicksPerSecond // micros + micros * 10; var dt = new DateTime(ticks, DateTimeKind.Unspecified); // this is pefromance killer - for the same date delta is always the same, should // calculate utc via ticks by adding pre-calculated delta //dt = dt.ConvertToUtcWithUncpecifiedKind("ny"); return dt.Ticks; }
public int Write(ArraySegment <TElement> segment, ref DirectBuffer destination, uint destinationOffset = 0u, MemoryStream temporaryStream = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { return(Write(segment.Array, segment.Offset, segment.Count, ref destination, destinationOffset, temporaryStream, compression)); }
private static ulong ReadUInt64(DirectBuffer db, int index, int length) { ulong ret = 0; for (int pos = 0; pos < length; pos++) { byte b = (byte)(db.ReadAsciiDigit(index + pos)); if (b > 0) { ret += b * (ULongPower(10, (short)(length - pos - 1))); } } return ret; }
private static int WriteObject <T>(object value, ref DirectBuffer destination, uint offset = 0u, MemoryStream ms = null, CompressionMethod compression = CompressionMethod.DefaultOrNone) { var temp = value == null ? default(T) : (T)value; return(TypeHelper <T> .Write(temp, ref destination, offset, ms, compression)); }