/** <inheritdoc /> */ public void Update(IBinaryStream stream, Marshaller marshaller) { Debug.Assert(stream != null); Debug.Assert(marshaller != null); if (_stopped) { return; } var mode = _keepBinary ? BinaryMode.ForceBinary : BinaryMode.Deserialize; var reader = marshaller.StartUnmarshal(stream, mode); var key = reader.ReadObject <TK>(); var hasVal = stream.ReadBool(); if (hasVal) { var val = reader.ReadObject <TV>(); var part = stream.ReadInt(); var ver = new AffinityTopologyVersion(stream.ReadLong(), stream.ReadInt()); _map[key] = new PlatformCacheEntry <TV>(val, GetBoxedAffinityTopologyVersion(ver), part); } else { PlatformCacheEntry <TV> unused; _map.TryRemove(key, out unused); } }
private object ReadDistributionMapResponse(IBinaryStream s) { var affinityTopologyVersion = new AffinityTopologyVersion(s.ReadLong(), s.ReadInt()); var size = s.ReadInt(); var mapping = new Dictionary <int, ClientCachePartitionMap>(); for (int i = 0; i < size; i++) { var grp = new ClientCachePartitionAwarenessGroup(s); if (grp.PartitionMap == null) { // Partition awareness is not applicable for these caches. foreach (var cache in grp.Caches) { mapping[cache.Key] = null; } continue; } // Count partitions to avoid reallocating array. int maxPartNum = 0; if (grp.PartitionMap == null) { continue; } foreach (var partMap in grp.PartitionMap) { foreach (var part in partMap.Value) { if (part > maxPartNum) { maxPartNum = part; } } } // Populate partition array. var partNodeIds = new Guid[maxPartNum + 1]; foreach (var partMap in grp.PartitionMap) { foreach (var part in partMap.Value) { partNodeIds[part] = partMap.Key; } } foreach (var cache in grp.Caches) { mapping[cache.Key] = new ClientCachePartitionMap(partNodeIds, cache.Value); } } _distributionMap = new ClientCacheTopologyPartitionMap(mapping, affinityTopologyVersion); return(null); }
[ExcludeFromCodeCoverage] // big-endian only private BinaryObjectHeader(IBinaryStream stream) { Header = stream.ReadByte(); Version = stream.ReadByte(); Flags = (Flag)stream.ReadShort(); Length = stream.ReadInt(); TypeId = stream.ReadInt(); HashCode = stream.ReadInt(); SchemaId = stream.ReadInt(); SchemaOffset = stream.ReadInt(); }
public ClientCacheAffinityAwarenessGroup(IBinaryStream stream) { // Whether this group is eligible for client-side partition awareness. var applicable = stream.ReadBool(); var cachesCount = stream.ReadInt(); _caches = new List <KeyValuePair <int, Dictionary <int, int> > >(cachesCount); for (var i = 0; i < cachesCount; i++) { var cacheId = stream.ReadInt(); if (!applicable) { _caches.Add(new KeyValuePair <int, Dictionary <int, int> >(cacheId, null)); continue; } var keyCfgCount = stream.ReadInt(); Dictionary <int, int> keyCfgs = null; if (keyCfgCount > 0) { keyCfgs = new Dictionary <int, int>(keyCfgCount); for (var j = 0; j < keyCfgCount; j++) { keyCfgs[stream.ReadInt()] = stream.ReadInt(); } } _caches.Add(new KeyValuePair <int, Dictionary <int, int> >(cacheId, keyCfgs)); } if (!applicable) { return; } var partMapSize = stream.ReadInt(); _partitionMap = new List <KeyValuePair <Guid, List <int> > >(partMapSize); var reader = BinaryUtils.Marshaller.StartUnmarshal(stream); for (var i = 0; i < partMapSize; i++) { var nodeId = reader.ReadGuid(); Debug.Assert(nodeId != null); var partCount = stream.ReadInt(); var parts = new List <int>(partCount); for (int j = 0; j < partCount; j++) { parts.Add(stream.ReadInt()); } _partitionMap.Add(new KeyValuePair <Guid, List <int> >(nodeId.Value, parts)); } }
/// <summary> /// Read dictionary returned by GET_ALL operation. /// </summary> /// <param name="reader">Reader.</param> /// <returns>Dictionary.</returns> private static ICollection <ICacheEntry <TK, TV> > ReadGetAllDictionary(BinaryReader reader) { if (reader == null) { return(null); } IBinaryStream stream = reader.Stream; if (stream.ReadBool()) { int size = stream.ReadInt(); var res = new List <ICacheEntry <TK, TV> >(size); for (int i = 0; i < size; i++) { TK key = reader.ReadObject <TK>(); TV val = reader.ReadObject <TV>(); res.Add(new CacheEntry <TK, TV>(key, val)); } return(res); } return(null); }
/// <summary> /// Handles continuous query events. /// </summary> private void HandleContinuousQueryEvents(IBinaryStream stream, Exception err, ICacheEntryEventListener <TK, TV> listener, ClientContinuousQueryHandle qryHandle) { if (err != null) { qryHandle.OnError(err); return; } var flags = (ClientFlags)stream.ReadShort(); var opCode = (ClientOp)stream.ReadShort(); if ((flags & ClientFlags.Error) == ClientFlags.Error) { var status = (ClientStatusCode)stream.ReadInt(); var msg = _marsh.Unmarshal <string>(stream); GetLogger().Error("Error while handling Continuous Query notification ({0}): {1}", status, msg); qryHandle.OnError(new IgniteClientException(msg, null, status)); return; } if (opCode == ClientOp.QueryContinuousEventNotification) { var evts = ContinuousQueryUtils.ReadEvents <TK, TV>(stream, _marsh, _keepBinary); listener.OnEvent(evts); return; } GetLogger().Error("Error while handling Continuous Query notification: unexpected op '{0}'", opCode); }
/// <summary> /// Read dictionary returned by GET_ALL operation. /// </summary> /// <param name="reader">Reader.</param> /// <returns>Dictionary.</returns> private static IDictionary <TK, TV> ReadGetAllDictionary(BinaryReader reader) { if (reader == null) { return(null); } IBinaryStream stream = reader.Stream; if (stream.ReadBool()) { int size = stream.ReadInt(); IDictionary <TK, TV> res = new Dictionary <TK, TV>(size); for (int i = 0; i < size; i++) { TK key = reader.ReadObject <TK>(); TV val = reader.ReadObject <TV>(); res[key] = val; } return(res); } return(null); }
/// <summary> /// Reads the schema, maintains stream position. /// </summary> public static int[] GetFieldIds(BinaryObjectHeader hdr, IIgniteInternal ignite, IBinaryStream stream, int objectPos) { Debug.Assert(stream != null); if (hdr.IsCompactFooter) { // Get schema from Java return(GetFieldIds(hdr, ignite)); } var pos = stream.Position; stream.Seek(objectPos + hdr.SchemaOffset, SeekOrigin.Begin); var count = hdr.SchemaFieldCount; var offsetSize = hdr.SchemaFieldOffsetSize; var res = new int[count]; for (var i = 0; i < count; i++) { res[i] = stream.ReadInt(); stream.Seek(offsetSize, SeekOrigin.Current); // Skip offsets. } stream.Seek(pos, SeekOrigin.Begin); return(res); }
/// <summary> /// Gets the fields cursor. /// </summary> private ClientQueryCursorBase <T> GetFieldsCursorNoColumnNames <T>(IBinaryStream s, Func <IBinaryRawReader, int, T> readerFunc) { var cursorId = s.ReadLong(); var columnCount = s.ReadInt(); return(new ClientQueryCursorBase <T>(_ignite, cursorId, _keepBinary, s, ClientOp.QuerySqlFieldsCursorGetPage, r => readerFunc(r, columnCount))); }
/// <summary> /// Reads the schema according to this header data. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <returns>Schema.</returns> public BinaryObjectSchemaField[] ReadSchema(IBinaryStream stream, int position) { Debug.Assert(stream != null); ThrowIfUnsupported(); var schemaSize = SchemaFieldCount; if (schemaSize == 0) { return(null); } stream.Seek(position + SchemaOffset, SeekOrigin.Begin); var schema = new BinaryObjectSchemaField[schemaSize]; var offsetSize = SchemaFieldOffsetSize; if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) { schema[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadByte()); } } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) { schema[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadShort()); } } else { for (var i = 0; i < schemaSize; i++) { schema[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadInt()); } } return(schema); }
/// <summary> /// Gets the raw offset of this object in specified stream. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <returns>Raw offset.</returns> public int GetRawOffset(IBinaryStream stream, int position) { Debug.Assert(stream != null); if (!HasRaw || !HasSchema) { return(SchemaOffset); } stream.Seek(position + Length - 4, SeekOrigin.Begin); return(stream.ReadInt()); }
/// <summary> /// Gets the raw offset of this object in specified stream. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The binary object position in the stream.</param> /// <returns>Raw offset.</returns> public int GetRawOffset(IBinaryStream stream, int position) { Debug.Assert(stream != null); // Either schema or raw is not present - offset is in the header. if (!HasRaw || !HasSchema) { return(SchemaOffset); } // Both schema and raw data are present: raw offset is in the last 4 bytes. stream.Seek(position + Length - 4, SeekOrigin.Begin); return(stream.ReadInt()); }
/// <summary> /// Reads results of InvokeAll operation. /// </summary> /// <typeparam name="T">The type of the result.</typeparam> /// <param name="inStream">Stream.</param> /// <returns>Results of InvokeAll operation.</returns> private IDictionary <TK, ICacheEntryProcessorResult <T> > ReadInvokeAllResults <T>(IBinaryStream inStream) { var count = inStream.ReadInt(); if (count == -1) { return(null); } var results = new Dictionary <TK, ICacheEntryProcessorResult <T> >(count); for (var i = 0; i < count; i++) { var key = Unmarshal <TK>(inStream); var hasError = inStream.ReadBool(); results[key] = hasError ? new CacheEntryProcessorResult <T>(ReadException(inStream)) : new CacheEntryProcessorResult <T>(Unmarshal <T>(inStream)); } return(results); }
/// <summary> /// Reads the schema according to this header data. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <param name="hdr">The header.</param> /// <param name="fieldIdsFunc">The field ids function.</param> /// <returns> /// Schema. /// </returns> public static unsafe BinaryObjectSchemaField[] ReadSchema(IBinaryStream stream, int position, BinaryObjectHeader hdr, Func <int[]> fieldIdsFunc) { Debug.Assert(stream != null); Debug.Assert(fieldIdsFunc != null); var schemaSize = hdr.SchemaFieldCount; if (schemaSize == 0) { return(null); } stream.Seek(position + hdr.SchemaOffset, SeekOrigin.Begin); var res = new BinaryObjectSchemaField[schemaSize]; var offsetSize = hdr.SchemaFieldOffsetSize; if (hdr.IsCompactFooter) { var fieldIds = fieldIdsFunc(); Debug.Assert(fieldIds.Length == schemaSize); if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadByte()); } } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(fieldIds[i], (ushort)stream.ReadShort()); } } else { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadInt()); } } } else { if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadByte()); } } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(stream.ReadInt(), (ushort)stream.ReadShort()); } } else { if (BitConverter.IsLittleEndian) { fixed(BinaryObjectSchemaField *ptr = &res[0]) { stream.Read((byte *)ptr, schemaSize * BinaryObjectSchemaField.Size); } } else { for (var i = 0; i < schemaSize; i++) { res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadInt()); } } } } return(res); }
/// <summary> /// Tests the stream. /// </summary> private static unsafe void TestStream(IBinaryStream stream, bool sameArr, Action flush) { Action seek = () => Assert.AreEqual(0, stream.Seek(0, SeekOrigin.Begin)); Action <Action, Func <object>, object> check = (write, read, expectedResult) => { seek(); write(); flush(); seek(); Assert.AreEqual(expectedResult, read()); }; // Arrays. if (stream.CanGetArray) { Assert.AreEqual(sameArr, stream.IsSameArray(stream.GetArray())); } Assert.IsFalse(stream.IsSameArray(new byte[1])); Assert.IsFalse(stream.IsSameArray(stream.GetArrayCopy())); // byte* byte *bytes = stackalloc byte[10]; *bytes = 1; *(bytes + 1) = 2; stream.Write(bytes, 2); Assert.AreEqual(2, stream.Position); var proc = new SumStreamProcessor(); Assert.AreEqual(0, stream.Apply(proc, 0)); Assert.AreEqual(1, stream.Apply(proc, 1)); Assert.AreEqual(3, stream.Apply(proc, 2)); flush(); seek(); Assert.AreEqual(sameArr ? 256 : 2, stream.Remaining); byte *bytes2 = stackalloc byte[2]; stream.Read(bytes2, 2); Assert.AreEqual(1, *bytes2); Assert.AreEqual(2, *(bytes2 + 1)); // char* seek(); char *chars = stackalloc char[10]; *chars = 'a'; *(chars + 1) = 'b'; Assert.AreEqual(2, stream.WriteString(chars, 2, 2, Encoding.ASCII)); flush(); seek(); stream.Read(bytes2, 2); Assert.AreEqual('a', *bytes2); Assert.AreEqual('b', *(bytes2 + 1)); // Others. check(() => stream.Write(new byte[] { 3, 4, 5 }, 1, 2), () => stream.ReadByteArray(2), new byte[] { 4, 5 }); check(() => stream.WriteBool(true), () => stream.ReadBool(), true); check(() => stream.WriteBoolArray(new[] { true, false }), () => stream.ReadBoolArray(2), new[] { true, false }); check(() => stream.WriteByte(4), () => stream.ReadByte(), 4); check(() => stream.WriteByteArray(new byte[] { 4, 5, 6 }), () => stream.ReadByteArray(3), new byte[] { 4, 5, 6 }); check(() => stream.WriteChar('x'), () => stream.ReadChar(), 'x'); check(() => stream.WriteCharArray(new[] { 'a', 'b' }), () => stream.ReadCharArray(2), new[] { 'a', 'b' }); check(() => stream.WriteDouble(4), () => stream.ReadDouble(), 4d); check(() => stream.WriteDoubleArray(new[] { 4d }), () => stream.ReadDoubleArray(1), new[] { 4d }); check(() => stream.WriteFloat(4), () => stream.ReadFloat(), 4f); check(() => stream.WriteFloatArray(new[] { 4f }), () => stream.ReadFloatArray(1), new[] { 4f }); check(() => stream.WriteInt(4), () => stream.ReadInt(), 4); check(() => stream.WriteInt(0, 4), () => stream.ReadInt(), 4); check(() => stream.WriteIntArray(new[] { 4 }), () => stream.ReadIntArray(1), new[] { 4 }); check(() => stream.WriteLong(4), () => stream.ReadLong(), 4L); check(() => stream.WriteLongArray(new[] { 4L }), () => stream.ReadLongArray(1), new[] { 4L }); check(() => stream.WriteShort(4), () => stream.ReadShort(), (short)4); check(() => stream.WriteShortArray(new short[] { 4 }), () => stream.ReadShortArray(1), new short[] { 4 }); }
/// <summary> /// Reads the schema according to this header data. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <param name="hdr">The header.</param> /// <param name="fieldIdsFunc">The field ids function.</param> /// <returns> /// Schema. /// </returns> public static BinaryObjectSchemaField[] ReadSchema(IBinaryStream stream, int position, BinaryObjectHeader hdr, Func<int[]> fieldIdsFunc) { Debug.Assert(stream != null); Debug.Assert(fieldIdsFunc != null); var schemaSize = hdr.SchemaFieldCount; if (schemaSize == 0) return null; stream.Seek(position + hdr.SchemaOffset, SeekOrigin.Begin); var res = new BinaryObjectSchemaField[schemaSize]; var offsetSize = hdr.SchemaFieldOffsetSize; if (hdr.IsCompactFooter) { var fieldIds = fieldIdsFunc(); Debug.Assert(fieldIds.Length == schemaSize); if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadByte()); } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadShort()); } else { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadInt()); } } else { if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadByte()); } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadShort()); } else { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadInt()); } } return res; }
/// <summary> /// Initializes a new instance of the <see cref="OptimizedMarshallerObject"/> class. /// </summary> public OptimizedMarshallerObject(IBinaryStream stream) { Debug.Assert(stream != null); _data = stream.ReadByteArray(stream.ReadInt()); }
/// <summary> /// Tests the stream. /// </summary> private static unsafe void TestStream(IBinaryStream stream, bool sameArr, Action flush) { Action seek = () => Assert.AreEqual(0, stream.Seek(0, SeekOrigin.Begin)); Action<Action, Func<object>, object> check = (write, read, expectedResult) => { seek(); write(); flush(); seek(); Assert.AreEqual(expectedResult, read()); }; // Arrays. Assert.AreEqual(sameArr, stream.IsSameArray(stream.GetArray())); Assert.IsFalse(stream.IsSameArray(new byte[1])); Assert.IsFalse(stream.IsSameArray(stream.GetArrayCopy())); // byte* byte* bytes = stackalloc byte[10]; *bytes = 1; *(bytes + 1) = 2; stream.Write(bytes, 2); Assert.AreEqual(2, stream.Position); flush(); seek(); Assert.AreEqual(sameArr ? 256 : 2, stream.Remaining); byte* bytes2 = stackalloc byte[2]; stream.Read(bytes2, 2); Assert.AreEqual(1, *bytes2); Assert.AreEqual(2, *(bytes2 + 1)); // char* seek(); char* chars = stackalloc char[10]; *chars = 'a'; *(chars + 1) = 'b'; Assert.AreEqual(2, stream.WriteString(chars, 2, 2, Encoding.ASCII)); flush(); seek(); stream.Read(bytes2, 2); Assert.AreEqual('a', *bytes2); Assert.AreEqual('b', *(bytes2 + 1)); // Others. check(() => stream.Write(new byte[] {3, 4, 5}, 1, 2), () => stream.ReadByteArray(2), new byte[] {4, 5}); check(() => stream.WriteBool(true), () => stream.ReadBool(), true); check(() => stream.WriteBoolArray(new[] {true, false}), () => stream.ReadBoolArray(2), new[] {true, false}); check(() => stream.WriteByte(4), () => stream.ReadByte(), 4); check(() => stream.WriteByteArray(new byte[] {4, 5, 6}), () => stream.ReadByteArray(3), new byte[] {4, 5, 6}); check(() => stream.WriteChar('x'), () => stream.ReadChar(), 'x'); check(() => stream.WriteCharArray(new[] {'a', 'b'}), () => stream.ReadCharArray(2), new[] {'a', 'b'}); check(() => stream.WriteDouble(4), () => stream.ReadDouble(), 4d); check(() => stream.WriteDoubleArray(new[] {4d}), () => stream.ReadDoubleArray(1), new[] {4d}); check(() => stream.WriteFloat(4), () => stream.ReadFloat(), 4f); check(() => stream.WriteFloatArray(new[] {4f}), () => stream.ReadFloatArray(1), new[] {4f}); check(() => stream.WriteInt(4), () => stream.ReadInt(), 4); check(() => stream.WriteInt(0, 4), () => stream.ReadInt(), 4); check(() => stream.WriteIntArray(new[] {4}), () => stream.ReadIntArray(1), new[] {4}); check(() => stream.WriteLong(4), () => stream.ReadLong(), 4L); check(() => stream.WriteLongArray(new[] {4L}), () => stream.ReadLongArray(1), new[] {4L}); check(() => stream.WriteShort(4), () => stream.ReadShort(), (short)4); check(() => stream.WriteShortArray(new short[] {4}), () => stream.ReadShortArray(1), new short[] {4}); }