/// <summary> /// Reads the schema according to this header data. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <param name="hdr">The header.</param> /// <param name="schema">The schema.</param> /// <param name="marsh">The marshaller.</param> /// <returns> /// Schema. /// </returns> public static BinaryObjectSchemaField[] ReadSchema(IBinaryStream stream, int position, BinaryObjectHeader hdr, BinaryObjectSchema schema, Marshaller marsh) { Debug.Assert(stream != null); Debug.Assert(schema != null); Debug.Assert(marsh != null); return ReadSchema(stream, position, hdr, () => GetFieldIds(hdr, schema, marsh)); }
/** <inheritdoc /> */ public int Invoke(IBinaryStream stream) { var reader = _ignite.Marshaller.StartUnmarshal(stream); var evt = EventReader.Read<IEvent>(reader); reader.ReadGuid(); // unused node id return _filter(evt) ? 1 : 0; }
/// <summary> /// Writes this instance to the stream. /// </summary> /// <param name="stream">Stream.</param> /// <param name="marsh">Marshaller.</param> public void Write(IBinaryStream stream, Marshaller marsh) { var writer = marsh.StartMarshal(stream); try { Marshal(writer); } finally { marsh.FinishMarshal(writer); } }
/// <summary> /// Reads proxy method invocation data from the specified reader. /// </summary> /// <param name="stream">Stream.</param> /// <param name="marsh">Marshaller.</param> /// <param name="mthdName">Method name.</param> /// <param name="mthdArgs">Method arguments.</param> public static void ReadProxyMethod(IBinaryStream stream, Marshaller marsh, out string mthdName, out object[] mthdArgs) { var reader = marsh.StartUnmarshal(stream); var srvKeepBinary = reader.ReadBoolean(); mthdName = reader.ReadString(); if (reader.ReadBoolean()) { mthdArgs = new object[reader.ReadInt()]; if (srvKeepBinary) reader = marsh.StartUnmarshal(stream, true); for (var i = 0; i < mthdArgs.Length; i++) mthdArgs[i] = reader.ReadObject<object>(); } else mthdArgs = null; }
/// <summary> /// Reads method invocation result. /// </summary> /// <param name="stream">Stream.</param> /// <param name="marsh">Marshaller.</param> /// <param name="keepBinary">Binary flag.</param> /// <returns> /// Method invocation result, or exception in case of error. /// </returns> public static object ReadInvocationResult(IBinaryStream stream, Marshaller marsh, bool keepBinary) { Debug.Assert(stream != null); Debug.Assert(marsh != null); var mode = keepBinary ? BinaryMode.ForceBinary : BinaryMode.Deserialize; var reader = marsh.StartUnmarshal(stream, mode); object err; var res = BinaryUtils.ReadInvocationResult(reader, out err); if (err == null) return res; var binErr = err as IBinaryObject; throw binErr != null ? new ServiceInvocationException("Proxy method invocation failed with a binary error. " + "Examine BinaryCause for details.", binErr) : new ServiceInvocationException("Proxy method invocation failed with an exception. " + "Examine InnerException for details.", (Exception) err); }
public override ImageData Read(IBinaryStream file, ImageMetaData info) { using (var png = DeobfuscateStream(file)) return(Png.Read(png, info)); }
/// <summary> /// Reads the partitions assignment from a stream. /// </summary> /// <param name="stream">The stream.</param> /// <param name="marsh">The marshaller.</param> /// <returns>Partitions assignment.</returns> internal static IEnumerable<IEnumerable<IClusterNode>> ReadPartitions(IBinaryStream stream, Marshaller marsh) { Debug.Assert(stream != null); Debug.Assert(marsh != null); var reader = marsh.StartUnmarshal(stream); var partCnt = reader.ReadInt(); var res = new List<IEnumerable<IClusterNode>>(partCnt); for (var i = 0; i < partCnt; i++) res.Add(IgniteUtils.ReadNodes(reader)); return res; }
/// <summary> /// Constructor. /// </summary> /// <param name="marsh">Marshaller.</param> /// <param name="stream">Stream.</param> internal BinaryWriter(Marshaller marsh, IBinaryStream stream) { _marsh = marsh; _stream = stream; }
void UnpackLnd16(IBinaryStream input, byte[] output) { throw new NotImplementedException("KID Lnd16 compression not implemented."); }
/// <summary> /// Tests the stream. /// </summary> private static unsafe void TestStream(IBinaryStream stream, bool sameArr, Action flush) { Action seek = () => Assert.AreEqual(0, stream.Seek(0, SeekOrigin.Begin)); Action<Action, Func<object>, object> check = (write, read, expectedResult) => { seek(); write(); flush(); seek(); Assert.AreEqual(expectedResult, read()); }; // Arrays. Assert.AreEqual(sameArr, stream.IsSameArray(stream.GetArray())); Assert.IsFalse(stream.IsSameArray(new byte[1])); Assert.IsFalse(stream.IsSameArray(stream.GetArrayCopy())); // byte* byte* bytes = stackalloc byte[10]; *bytes = 1; *(bytes + 1) = 2; stream.Write(bytes, 2); Assert.AreEqual(2, stream.Position); flush(); seek(); Assert.AreEqual(sameArr ? 256 : 2, stream.Remaining); byte* bytes2 = stackalloc byte[2]; stream.Read(bytes2, 2); Assert.AreEqual(1, *bytes2); Assert.AreEqual(2, *(bytes2 + 1)); // char* seek(); char* chars = stackalloc char[10]; *chars = 'a'; *(chars + 1) = 'b'; Assert.AreEqual(2, stream.WriteString(chars, 2, 2, Encoding.ASCII)); flush(); seek(); stream.Read(bytes2, 2); Assert.AreEqual('a', *bytes2); Assert.AreEqual('b', *(bytes2 + 1)); // Others. check(() => stream.Write(new byte[] {3, 4, 5}, 1, 2), () => stream.ReadByteArray(2), new byte[] {4, 5}); check(() => stream.WriteBool(true), () => stream.ReadBool(), true); check(() => stream.WriteBoolArray(new[] {true, false}), () => stream.ReadBoolArray(2), new[] {true, false}); check(() => stream.WriteByte(4), () => stream.ReadByte(), 4); check(() => stream.WriteByteArray(new byte[] {4, 5, 6}), () => stream.ReadByteArray(3), new byte[] {4, 5, 6}); check(() => stream.WriteChar('x'), () => stream.ReadChar(), 'x'); check(() => stream.WriteCharArray(new[] {'a', 'b'}), () => stream.ReadCharArray(2), new[] {'a', 'b'}); check(() => stream.WriteDouble(4), () => stream.ReadDouble(), 4d); check(() => stream.WriteDoubleArray(new[] {4d}), () => stream.ReadDoubleArray(1), new[] {4d}); check(() => stream.WriteFloat(4), () => stream.ReadFloat(), 4f); check(() => stream.WriteFloatArray(new[] {4f}), () => stream.ReadFloatArray(1), new[] {4f}); check(() => stream.WriteInt(4), () => stream.ReadInt(), 4); check(() => stream.WriteInt(0, 4), () => stream.ReadInt(), 4); check(() => stream.WriteIntArray(new[] {4}), () => stream.ReadIntArray(1), new[] {4}); check(() => stream.WriteLong(4), () => stream.ReadLong(), 4L); check(() => stream.WriteLongArray(new[] {4L}), () => stream.ReadLongArray(1), new[] {4L}); check(() => stream.WriteShort(4), () => stream.ReadShort(), (short)4); check(() => stream.WriteShortArray(new short[] {4}), () => stream.ReadShortArray(1), new short[] {4}); }
/// <summary> /// Constructor. /// </summary> /// <param name="stream">Stream.</param> public BinaryStreamAdapter(IBinaryStream stream) { _stream = stream; }
public override ImageMetaData ReadMetaData(IBinaryStream stream) { var header = new byte[0x14]; if (12 != stream.Read(header, 0, 12)) { return(null); } int packed_size0 = LittleEndian.ToInt32(header, 0); int packed_size1 = LittleEndian.ToInt32(header, 1); int data_offset, packed_size, unpacked_pos; int tail_size = 0; if (5 + packed_size1 + 0x14 == stream.Length) { packed_size = packed_size1; data_offset = 5; tail_size = 0x14; unpacked_pos = 0x10; } else if (5 + packed_size1 + 0xC == stream.Length) { packed_size = packed_size1; data_offset = 5; tail_size = 12; unpacked_pos = 8; } else if (4 + packed_size0 + 0xC == stream.Length) { packed_size = packed_size0; data_offset = 4; tail_size = 12; unpacked_pos = 8; } else { packed_size = (int)stream.Length - 12; data_offset = 12; unpacked_pos = 8; } if (tail_size > 0) { stream.Seek(-tail_size, SeekOrigin.End); if (tail_size != stream.Read(header, 0, tail_size)) { return(null); } } int x = LittleEndian.ToInt32(header, 0); int y = LittleEndian.ToInt32(header, 4); if (Math.Abs(x) > 4096 || Math.Abs(y) > 4096) { return(null); } int unpacked_size = LittleEndian.ToInt32(header, unpacked_pos); int pack_type = (unpacked_size >> 30) & 3; if (3 == pack_type) { return(null); } unpacked_size &= (int)~0xC0000000; stream.Position = data_offset; byte[] bmp = UnpackBitmap(stream.AsStream, pack_type, packed_size, 0x26); if (bmp[0] != 'B' && bmp[0] != 'C' || bmp[1] != 'M') { return(null); } return(new IafMetaData { Width = LittleEndian.ToUInt32(bmp, 0x12), Height = LittleEndian.ToUInt32(bmp, 0x16), OffsetX = x, OffsetY = y, BPP = LittleEndian.ToInt16(bmp, 0x1c), DataOffset = data_offset, PackedSize = packed_size, UnpackedSize = unpacked_size, PackType = pack_type, }); }
/// <summary> /// Unmarshal object using the given stream. /// </summary> /// <param name="stream">Stream.</param> /// <returns>Unmarshalled object.</returns> protected virtual T Unmarshal <T>(IBinaryStream stream) { return(_marsh.Unmarshal <T>(stream)); }
public GrpReader(IBinaryStream file, GrpMetaData info) { m_input = file; m_info = info; m_output = new byte[m_info.Stride * (int)m_info.Height]; }
public override ImageData Read(IBinaryStream stream, ImageMetaData info) { using (var bmp = OpenAsBitmap(stream)) return(base.Read(bmp, info)); }
/// <summary> /// Reads the deployment result. /// </summary> private object ReadDeploymentResult(IBinaryStream s) { ServiceProxySerializer.ReadDeploymentResult(s, Marshaller, _keepBinary); return(null); }
/** <inheritDoc /> */ protected override T Unmarshal <T>(IBinaryStream stream) { bool keep = _keepBinary.Value; return(Marshaller.Unmarshal <T>(stream, keep)); }
public AbmReader(IBinaryStream file, AbmImageData info) : base(file, info) { m_info = info; }
/// <summary> /// Writes an array of fields to a stream. /// </summary> /// <param name="fields">Fields.</param> /// <param name="stream">Stream.</param> /// <param name="offset">Offset in the array.</param> /// <param name="count">Field count to write.</param> /// <param name="compact">Compact mode without field ids.</param> /// <returns> /// Flags according to offset sizes: <see cref="BinaryObjectHeader.Flag.OffsetOneByte" />, /// <see cref="BinaryObjectHeader.Flag.OffsetTwoBytes" />, or 0. /// </returns> public static unsafe BinaryObjectHeader.Flag WriteSchema(BinaryObjectSchemaField[] fields, IBinaryStream stream, int offset, int count, bool compact) { Debug.Assert(fields != null); Debug.Assert(stream != null); Debug.Assert(count > 0); Debug.Assert(offset >= 0); Debug.Assert(offset < fields.Length); unchecked { // Last field is the farthest in the stream var maxFieldOffset = fields[offset + count - 1].Offset; if (compact) { if (maxFieldOffset <= byte.MaxValue) { for (int i = offset; i < count + offset; i++) stream.WriteByte((byte)fields[i].Offset); return BinaryObjectHeader.Flag.OffsetOneByte; } if (maxFieldOffset <= ushort.MaxValue) { for (int i = offset; i < count + offset; i++) stream.WriteShort((short)fields[i].Offset); return BinaryObjectHeader.Flag.OffsetTwoBytes; } for (int i = offset; i < count + offset; i++) stream.WriteInt(fields[i].Offset); } else { if (maxFieldOffset <= byte.MaxValue) { for (int i = offset; i < count + offset; i++) { var field = fields[i]; stream.WriteInt(field.Id); stream.WriteByte((byte)field.Offset); } return BinaryObjectHeader.Flag.OffsetOneByte; } if (maxFieldOffset <= ushort.MaxValue) { for (int i = offset; i < count + offset; i++) { var field = fields[i]; stream.WriteInt(field.Id); stream.WriteShort((short)field.Offset); } return BinaryObjectHeader.Flag.OffsetTwoBytes; } if (BitConverter.IsLittleEndian) { fixed (BinaryObjectSchemaField* ptr = &fields[offset]) { stream.Write((byte*)ptr, count / BinaryObjectSchemaField.Size); } } else { for (int i = offset; i < count + offset; i++) { var field = fields[i]; stream.WriteInt(field.Id); stream.WriteInt(field.Offset); } } } return BinaryObjectHeader.Flag.None; } }
/// <summary> /// Tests the stream. /// </summary> private static unsafe void TestStream(IBinaryStream stream, bool sameArr, Action flush) { Action seek = () => Assert.AreEqual(0, stream.Seek(0, SeekOrigin.Begin)); Action <Action, Func <object>, object> check = (write, read, expectedResult) => { seek(); write(); flush(); seek(); Assert.AreEqual(expectedResult, read()); }; // Arrays. Assert.AreEqual(sameArr, stream.IsSameArray(stream.GetArray())); Assert.IsFalse(stream.IsSameArray(new byte[1])); Assert.IsFalse(stream.IsSameArray(stream.GetArrayCopy())); // byte* byte *bytes = stackalloc byte[10]; *bytes = 1; *(bytes + 1) = 2; stream.Write(bytes, 2); Assert.AreEqual(2, stream.Position); flush(); seek(); Assert.AreEqual(sameArr ? 256 : 2, stream.Remaining); byte *bytes2 = stackalloc byte[2]; stream.Read(bytes2, 2); Assert.AreEqual(1, *bytes2); Assert.AreEqual(2, *(bytes2 + 1)); // char* seek(); char *chars = stackalloc char[10]; *chars = 'a'; *(chars + 1) = 'b'; Assert.AreEqual(2, stream.WriteString(chars, 2, 2, Encoding.ASCII)); flush(); seek(); stream.Read(bytes2, 2); Assert.AreEqual('a', *bytes2); Assert.AreEqual('b', *(bytes2 + 1)); // Others. check(() => stream.Write(new byte[] { 3, 4, 5 }, 1, 2), () => stream.ReadByteArray(2), new byte[] { 4, 5 }); check(() => stream.WriteBool(true), () => stream.ReadBool(), true); check(() => stream.WriteBoolArray(new[] { true, false }), () => stream.ReadBoolArray(2), new[] { true, false }); check(() => stream.WriteByte(4), () => stream.ReadByte(), 4); check(() => stream.WriteByteArray(new byte[] { 4, 5, 6 }), () => stream.ReadByteArray(3), new byte[] { 4, 5, 6 }); check(() => stream.WriteChar('x'), () => stream.ReadChar(), 'x'); check(() => stream.WriteCharArray(new[] { 'a', 'b' }), () => stream.ReadCharArray(2), new[] { 'a', 'b' }); check(() => stream.WriteDouble(4), () => stream.ReadDouble(), 4d); check(() => stream.WriteDoubleArray(new[] { 4d }), () => stream.ReadDoubleArray(1), new[] { 4d }); check(() => stream.WriteFloat(4), () => stream.ReadFloat(), 4f); check(() => stream.WriteFloatArray(new[] { 4f }), () => stream.ReadFloatArray(1), new[] { 4f }); check(() => stream.WriteInt(4), () => stream.ReadInt(), 4); check(() => stream.WriteInt(0, 4), () => stream.ReadInt(), 4); check(() => stream.WriteIntArray(new[] { 4 }), () => stream.ReadIntArray(1), new[] { 4 }); check(() => stream.WriteLong(4), () => stream.ReadLong(), 4L); check(() => stream.WriteLongArray(new[] { 4L }), () => stream.ReadLongArray(1), new[] { 4L }); check(() => stream.WriteShort(4), () => stream.ReadShort(), (short)4); check(() => stream.WriteShortArray(new short[] { 4 }), () => stream.ReadShortArray(1), new short[] { 4 }); }
public override ImageMetaData ReadMetaData(IBinaryStream stream) { var header = stream.ReadHeader(0x20); int type = header.ToUInt16(0x10); if (0x0C == type) { int count = header.ToInt32(0); if (!ArchiveFormat.IsSaneCount(count)) { return(null); } int block_size = header.ToInt32(4); if (block_size <= 0) { return(null); } int bpp = header.ToUInt16(0x12); uint width = header.ToUInt16(0x14); uint height = header.ToUInt16(0x16); if (bpp != 32 || 0 == width || 0 == height) { return(null); } return(new PxMetaData { Width = width, Height = height, BPP = bpp, Type = type, FrameCount = count, BlockSize = block_size, BlocksWidth = header.ToUInt16(0x1C), BlocksHeight = header.ToUInt16(0x1E), }); } else if (0x90 == type) { if (!header.AsciiEqual(0x14, "Leaf")) { return(null); } int count = header.ToInt32(4); if (!ArchiveFormat.IsSaneCount(count)) { return(null); } var header_ex = stream.ReadBytes(0x20); if (0x20 != header_ex.Length) { return(null); } if (0x0A != LittleEndian.ToUInt16(header_ex, 0x10)) { return(null); } return(new PxMetaData { Width = LittleEndian.ToUInt32(header_ex, 0), Height = LittleEndian.ToUInt32(header_ex, 4), BPP = LittleEndian.ToUInt16(header_ex, 0x12), Type = type, FrameCount = count, }); } else if (0x40 == type || 0x44 == type) { int count = header.ToInt32(0); if (!ArchiveFormat.IsSaneCount(count)) { return(null); } return(new PxMetaData { Width = header.ToUInt32(0x14), Height = header.ToUInt32(0x18), Type = 0x40, BPP = 32, FrameCount = count, }); } else if (1 == type || 4 == type || 7 == type) { int bpp = header.ToUInt16(0x12); if (bpp != 32 && bpp != 8) { return(null); } return(new PxMetaData { Width = header.ToUInt32(0x14), Height = header.ToUInt32(0x18), Type = type, BPP = bpp, FrameCount = 1, }); } return(null); }
/// <summary> /// Transfer bytes from one stream to another. /// </summary> /// <param name="inStream">Input stream.</param> /// <param name="outStream">Output stream.</param> /// <param name="cnt">Bytes count.</param> private static void TransferBytes(BinaryHeapStream inStream, IBinaryStream outStream, int cnt) { outStream.Write(inStream.InternalArray, inStream.Position, cnt); inStream.Seek(cnt, SeekOrigin.Current); }
/// <summary> /// Invokes a store operation. /// </summary> /// <param name="input">Input stream.</param> /// <param name="cb">Callback.</param> /// <param name="grid">Grid.</param> /// <returns>Invocation result.</returns> /// <exception cref="IgniteException">Invalid operation type: + opType</exception> public int Invoke(IBinaryStream input, IUnmanagedTarget cb, Ignite grid) { IBinaryReader reader = grid.Marshaller.StartUnmarshal(input, _convertBinary ? BinaryMode.Deserialize : BinaryMode.ForceBinary); IBinaryRawReader rawReader = reader.GetRawReader(); int opType = rawReader.ReadByte(); // Setup cache sessoin for this invocation. long sesId = rawReader.ReadLong(); CacheStoreSession ses = grid.HandleRegistry.Get<CacheStoreSession>(sesId, true); ses.CacheName = rawReader.ReadString(); _sesProxy.SetSession(ses); try { // Perform operation. switch (opType) { case OpLoadCache: _store.LoadCache((k, v) => WriteObjects(cb, grid, k, v), rawReader.ReadArray<object>()); break; case OpLoad: object val = _store.Load(rawReader.ReadObject<object>()); if (val != null) WriteObjects(cb, grid, val); break; case OpLoadAll: var keys = rawReader.ReadCollection(); var result = _store.LoadAll(keys); foreach (DictionaryEntry entry in result) WriteObjects(cb, grid, entry.Key, entry.Value); break; case OpPut: _store.Write(rawReader.ReadObject<object>(), rawReader.ReadObject<object>()); break; case OpPutAll: _store.WriteAll(rawReader.ReadDictionary()); break; case OpRmv: _store.Delete(rawReader.ReadObject<object>()); break; case OpRmvAll: _store.DeleteAll(rawReader.ReadCollection()); break; case OpSesEnd: grid.HandleRegistry.Release(sesId); _store.SessionEnd(rawReader.ReadBoolean()); break; default: throw new IgniteException("Invalid operation type: " + opType); } return 0; } finally { _sesProxy.ClearSession(); } }
/// <summary> /// Internal mutation routine. /// </summary> /// <param name="inStream">Input stream.</param> /// <param name="outStream">Output stream.</param> /// <param name="ctx">Context.</param> /// <param name="changeHash">WHether hash should be changed.</param> /// <param name="vals">Values to be replaced.</param> /// <returns>Mutated object.</returns> private void Mutate0(Context ctx, BinaryHeapStream inStream, IBinaryStream outStream, bool changeHash, IDictionary <int, BinaryBuilderField> vals) { int inStartPos = inStream.Position; int outStartPos = outStream.Position; byte inHdr = inStream.ReadByte(); if (inHdr == BinaryUtils.HdrNull) { outStream.WriteByte(BinaryUtils.HdrNull); } else if (inHdr == BinaryUtils.HdrHnd) { int inHnd = inStream.ReadInt(); int oldPos = inStartPos - inHnd; int newPos; if (ctx.OldToNew(oldPos, out newPos)) { // Handle is still valid. outStream.WriteByte(BinaryUtils.HdrHnd); outStream.WriteInt(outStartPos - newPos); } else { // Handle is invalid, write full object. int inRetPos = inStream.Position; inStream.Seek(oldPos, SeekOrigin.Begin); Mutate0(ctx, inStream, outStream, false, EmptyVals); inStream.Seek(inRetPos, SeekOrigin.Begin); } } else if (inHdr == BinaryUtils.HdrFull) { var inHeader = BinaryObjectHeader.Read(inStream, inStartPos); BinaryUtils.ValidateProtocolVersion(inHeader.Version); int hndPos; if (ctx.AddOldToNew(inStartPos, outStartPos, out hndPos)) { // Object could be cached in parent builder. BinaryBuilderField cachedVal; if (_parent._cache != null && _parent._cache.TryGetValue(inStartPos, out cachedVal)) { WriteField(ctx, cachedVal); } else { // New object, write in full form. var inSchema = BinaryObjectSchemaSerializer.ReadSchema(inStream, inStartPos, inHeader, _desc.Schema, _binary.Marshaller.Ignite); var outSchema = BinaryObjectSchemaHolder.Current; var schemaIdx = outSchema.PushSchema(); try { // Skip header as it is not known at this point. outStream.Seek(BinaryObjectHeader.Size, SeekOrigin.Current); if (inSchema != null) { foreach (var inField in inSchema) { BinaryBuilderField fieldVal; var fieldFound = vals.TryGetValue(inField.Id, out fieldVal); if (fieldFound && fieldVal == BinaryBuilderField.RmvMarker) { continue; } outSchema.PushField(inField.Id, outStream.Position - outStartPos); if (!fieldFound) { fieldFound = _parent._cache != null && _parent._cache.TryGetValue(inField.Offset + inStartPos, out fieldVal); } if (fieldFound) { WriteField(ctx, fieldVal); vals.Remove(inField.Id); } else { // Field is not tracked, re-write as is. inStream.Seek(inField.Offset + inStartPos, SeekOrigin.Begin); Mutate0(ctx, inStream, outStream, false, EmptyVals); } } } // Write remaining new fields. foreach (var valEntry in vals) { if (valEntry.Value == BinaryBuilderField.RmvMarker) { continue; } outSchema.PushField(valEntry.Key, outStream.Position - outStartPos); WriteField(ctx, valEntry.Value); } var flags = inHeader.IsUserType ? BinaryObjectHeader.Flag.UserType : BinaryObjectHeader.Flag.None; if (inHeader.IsCustomDotNetType) { flags |= BinaryObjectHeader.Flag.CustomDotNetType; } // Write raw data. int outRawOff = outStream.Position - outStartPos; if (inHeader.HasRaw) { var inRawOff = inHeader.GetRawOffset(inStream, inStartPos); var inRawLen = inHeader.SchemaOffset - inRawOff; flags |= BinaryObjectHeader.Flag.HasRaw; outStream.Write(inStream.InternalArray, inStartPos + inRawOff, inRawLen); } // Write schema int outSchemaOff = outRawOff; var schemaPos = outStream.Position; int outSchemaId; if (inHeader.IsCompactFooter) { flags |= BinaryObjectHeader.Flag.CompactFooter; } var hasSchema = outSchema.WriteSchema(outStream, schemaIdx, out outSchemaId, ref flags); if (hasSchema) { outSchemaOff = schemaPos - outStartPos; flags |= BinaryObjectHeader.Flag.HasSchema; if (inHeader.HasRaw) { outStream.WriteInt(outRawOff); } if (_desc.Schema.Get(outSchemaId) == null) { _desc.Schema.Add(outSchemaId, outSchema.GetSchema(schemaIdx)); } } var outLen = outStream.Position - outStartPos; var outHash = inHeader.HashCode; if (changeHash) { // Get from identity resolver. outHash = BinaryArrayEqualityComparer.GetHashCode(outStream, outStartPos + BinaryObjectHeader.Size, schemaPos - outStartPos - BinaryObjectHeader.Size); } var outHeader = new BinaryObjectHeader(inHeader.TypeId, outHash, outLen, outSchemaId, outSchemaOff, flags); BinaryObjectHeader.Write(outHeader, outStream, outStartPos); outStream.Seek(outStartPos + outLen, SeekOrigin.Begin); // seek to the end of the object } finally { outSchema.PopSchema(schemaIdx); } } } else { // Object has already been written, write as handle. outStream.WriteByte(BinaryUtils.HdrHnd); outStream.WriteInt(outStartPos - hndPos); } // Synchronize input stream position. inStream.Seek(inStartPos + inHeader.Length, SeekOrigin.Begin); } else { // Try writing as well-known type with fixed size. outStream.WriteByte(inHdr); if (!WriteAsPredefined(inHdr, inStream, outStream, ctx)) { throw new IgniteException("Unexpected header [position=" + (inStream.Position - 1) + ", header=" + inHdr + ']'); } } }
protected override void Dispose( bool disposing ) { if( disposing ) { if( this.binaryStream.NotNullReference() ) { this.binaryStream.Close(); this.binaryStream = null; } } base.Dispose(disposing); }
/// <summary> /// Write object as a predefined type if possible. /// </summary> /// <param name="hdr">Header.</param> /// <param name="inStream">Input stream.</param> /// <param name="outStream">Output stream.</param> /// <param name="ctx">Context.</param> /// <returns><c>True</c> if was written.</returns> private bool WriteAsPredefined(byte hdr, BinaryHeapStream inStream, IBinaryStream outStream, Context ctx) { switch (hdr) { case BinaryUtils.TypeByte: TransferBytes(inStream, outStream, 1); break; case BinaryUtils.TypeShort: TransferBytes(inStream, outStream, 2); break; case BinaryUtils.TypeInt: TransferBytes(inStream, outStream, 4); break; case BinaryUtils.TypeLong: TransferBytes(inStream, outStream, 8); break; case BinaryUtils.TypeFloat: TransferBytes(inStream, outStream, 4); break; case BinaryUtils.TypeDouble: TransferBytes(inStream, outStream, 8); break; case BinaryUtils.TypeChar: TransferBytes(inStream, outStream, 2); break; case BinaryUtils.TypeBool: TransferBytes(inStream, outStream, 1); break; case BinaryUtils.TypeDecimal: TransferBytes(inStream, outStream, 4); // Transfer scale int magLen = inStream.ReadInt(); // Transfer magnitude length. outStream.WriteInt(magLen); TransferBytes(inStream, outStream, magLen); // Transfer magnitude. break; case BinaryUtils.TypeString: BinaryUtils.WriteString(BinaryUtils.ReadString(inStream), outStream); break; case BinaryUtils.TypeGuid: TransferBytes(inStream, outStream, 16); break; case BinaryUtils.TypeTimestamp: TransferBytes(inStream, outStream, 12); break; case BinaryUtils.TypeArrayByte: TransferArray(inStream, outStream, 1); break; case BinaryUtils.TypeArrayShort: TransferArray(inStream, outStream, 2); break; case BinaryUtils.TypeArrayInt: TransferArray(inStream, outStream, 4); break; case BinaryUtils.TypeArrayLong: TransferArray(inStream, outStream, 8); break; case BinaryUtils.TypeArrayFloat: TransferArray(inStream, outStream, 4); break; case BinaryUtils.TypeArrayDouble: TransferArray(inStream, outStream, 8); break; case BinaryUtils.TypeArrayChar: TransferArray(inStream, outStream, 2); break; case BinaryUtils.TypeArrayBool: TransferArray(inStream, outStream, 1); break; case BinaryUtils.TypeArrayDecimal: case BinaryUtils.TypeArrayString: case BinaryUtils.TypeArrayGuid: case BinaryUtils.TypeArrayTimestamp: int arrLen = inStream.ReadInt(); outStream.WriteInt(arrLen); for (int i = 0; i < arrLen; i++) { Mutate0(ctx, inStream, outStream, false, null); } break; case BinaryUtils.TypeArrayEnum: case BinaryUtils.TypeArray: int type = inStream.ReadInt(); outStream.WriteInt(type); if (type == BinaryUtils.TypeUnregistered) { outStream.WriteByte(inStream.ReadByte()); // String header. BinaryUtils.WriteString(BinaryUtils.ReadString(inStream), outStream); // String data. } arrLen = inStream.ReadInt(); outStream.WriteInt(arrLen); for (int i = 0; i < arrLen; i++) { Mutate0(ctx, inStream, outStream, false, EmptyVals); } break; case BinaryUtils.TypeCollection: int colLen = inStream.ReadInt(); outStream.WriteInt(colLen); outStream.WriteByte(inStream.ReadByte()); for (int i = 0; i < colLen; i++) { Mutate0(ctx, inStream, outStream, false, EmptyVals); } break; case BinaryUtils.TypeDictionary: int dictLen = inStream.ReadInt(); outStream.WriteInt(dictLen); outStream.WriteByte(inStream.ReadByte()); for (int i = 0; i < dictLen; i++) { Mutate0(ctx, inStream, outStream, false, EmptyVals); Mutate0(ctx, inStream, outStream, false, EmptyVals); } break; case BinaryUtils.TypeBinary: TransferArray(inStream, outStream, 1); // Data array. TransferBytes(inStream, outStream, 4); // Offset in array. break; case BinaryUtils.TypeEnum: TransferBytes(inStream, outStream, 8); // int typeId, int value. break; default: return(false); } return(true); }
public override ImageData Read(IBinaryStream stream, ImageMetaData info) { stream.Position = 0x14; return(ReadBitmapData(stream, info)); }
/** <inheritDoc /> */ protected override T Unmarshal <T>(IBinaryStream stream) { return(Marshaller.Unmarshal <T>(stream, _flagKeepBinary)); }
internal static void UnpackLnd(IBinaryStream input, byte[] output) { int unpacked_size = output.Length; int dst = 0; while (dst < unpacked_size) { int ctl = input.ReadByte(); if (-1 == ctl) { break; } if ((ctl & 0x80) != 0) { if ((ctl & 0x40) != 0) { int count = (ctl & 0x1F) + 2; if ((ctl & 0x20) != 0) { count += input.ReadUInt8() << 5; } count = Math.Min(count, unpacked_size - dst); byte v = input.ReadUInt8(); for (int i = 0; i < count; ++i) { output[dst++] = v; } } else { int count = ((ctl >> 2) & 0xF) + 2; int offset = ((ctl & 3) << 8) + input.ReadUInt8() + 1; count = Math.Min(count, unpacked_size - dst); Binary.CopyOverlapped(output, dst - offset, dst, count); dst += count; } } else if ((ctl & 0x40) != 0) { int length = Math.Min((ctl & 0x3F) + 2, unpacked_size - dst); int count = input.ReadUInt8(); input.Read(output, dst, length); dst += length; count = Math.Min(count * length, unpacked_size - dst); if (count > 0) { Binary.CopyOverlapped(output, dst - length, dst, count); dst += count; } } else { int count = (ctl & 0x1F) + 1; if ((ctl & 0x20) != 0) { count += input.ReadUInt8() << 5; } count = Math.Min(count, unpacked_size - dst); input.Read(output, dst, count); dst += count; } } }
IBinaryStream OpenBmpStream(IBinaryStream file) { var part = new StreamRegion(file.AsStream, 8, true); return(new BinaryStream(part, file.Name)); }
/// <summary> /// Read and convert a value. /// </summary> public T Convert(IBinaryStream stream) { var reader = stream == null ? null : _marsh.StartUnmarshal(stream, _keepBinary); return(_func(reader)); }
/// <summary> /// Reads the schema according to this header data. /// </summary> /// <param name="stream">The stream.</param> /// <param name="position">The position.</param> /// <param name="hdr">The header.</param> /// <param name="fieldIdsFunc">The field ids function.</param> /// <returns> /// Schema. /// </returns> public static BinaryObjectSchemaField[] ReadSchema(IBinaryStream stream, int position, BinaryObjectHeader hdr, Func<int[]> fieldIdsFunc) { Debug.Assert(stream != null); Debug.Assert(fieldIdsFunc != null); var schemaSize = hdr.SchemaFieldCount; if (schemaSize == 0) return null; stream.Seek(position + hdr.SchemaOffset, SeekOrigin.Begin); var res = new BinaryObjectSchemaField[schemaSize]; var offsetSize = hdr.SchemaFieldOffsetSize; if (hdr.IsCompactFooter) { var fieldIds = fieldIdsFunc(); Debug.Assert(fieldIds.Length == schemaSize); if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadByte()); } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadShort()); } else { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(fieldIds[i], stream.ReadInt()); } } else { if (offsetSize == 1) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadByte()); } else if (offsetSize == 2) { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadShort()); } else { for (var i = 0; i < schemaSize; i++) res[i] = new BinaryObjectSchemaField(stream.ReadInt(), stream.ReadInt()); } } return res; }
public override ImageData Read(IBinaryStream file, ImageMetaData info) { using (var bmp = DeobfuscateStream(file)) return(Bmp.Read(bmp, info)); }
/// <summary> /// Create job instance. /// </summary> /// <param name="grid">Grid.</param> /// <param name="stream">Stream.</param> /// <returns></returns> internal static ComputeJobHolder CreateJob(Ignite grid, IBinaryStream stream) { try { return grid.Marshaller.StartUnmarshal(stream).ReadObject<ComputeJobHolder>(); } catch (Exception e) { throw new IgniteException("Failed to deserialize the job [errType=" + e.GetType().Name + ", errMsg=" + e.Message + ']'); } }
/// <summary> /// Initializes a new instance of the <see cref="ZipFileSystem"/> class /// </summary> /// <param name="stream">The <see cref="IBinaryStream"/> that contains the archive.</param> /// <param name="escapeFileNames">Indicates whether the original file names are escaped.</param> public ZipFileSystem( IBinaryStream stream, bool escapeFileNames ) : this(IOWrapper.Wrap(stream), escapeFileNames) { }
/// <summary> /// Writes method invocation result. /// </summary> /// <param name="stream">Stream.</param> /// <param name="marsh">Marshaller.</param> /// <param name="methodResult">Method result.</param> /// <param name="invocationError">Method invocation error.</param> public static void WriteInvocationResult(IBinaryStream stream, Marshaller marsh, object methodResult, Exception invocationError) { Debug.Assert(stream != null); Debug.Assert(marsh != null); var writer = marsh.StartMarshal(stream); BinaryUtils.WriteInvocationResult(writer, invocationError == null, invocationError ?? methodResult); }
/// <summary> /// Initializes a new instance of the <see cref="CacheClientConfiguration"/> class. /// </summary> internal CacheClientConfiguration(IBinaryStream stream, ClientProtocolVersion srvVer) { Debug.Assert(stream != null); ClientCacheConfigurationSerializer.Read(stream, this, srvVer); }
/// <summary> /// Invokes a store operation. /// </summary> /// <param name="stream">Input stream.</param> /// <param name="grid">Grid.</param> /// <returns>Invocation result.</returns> /// <exception cref="IgniteException">Invalid operation type: + opType</exception> public int Invoke(IBinaryStream stream, Ignite grid) { IBinaryReader reader = grid.Marshaller.StartUnmarshal(stream, _convertBinary ? BinaryMode.Deserialize : BinaryMode.ForceBinary); IBinaryRawReader rawReader = reader.GetRawReader(); int opType = rawReader.ReadByte(); // Setup cache session for this invocation. long sesId = rawReader.ReadLong(); CacheStoreSession ses = grid.HandleRegistry.Get <CacheStoreSession>(sesId, true); // Session cache name may change in cross-cache transaction. // Single session is used for all stores in cross-cache transactions. ses.CacheName = rawReader.ReadString(); _sesProxy.SetSession(ses); try { // Perform operation. switch (opType) { case OpLoadCache: { var args = rawReader.ReadArray <object>(); stream.Seek(0, SeekOrigin.Begin); int cnt = 0; stream.WriteInt(cnt); // Reserve space for count. var writer = grid.Marshaller.StartMarshal(stream); _store.LoadCache((k, v) => { lock (writer) // User-defined store can be multithreaded. { writer.WriteObjectDetached(k); writer.WriteObjectDetached(v); cnt++; } }, args); stream.WriteInt(0, cnt); grid.Marshaller.FinishMarshal(writer); break; } case OpLoad: { var val = _store.Load(rawReader.ReadObject <TK>()); stream.Seek(0, SeekOrigin.Begin); var writer = grid.Marshaller.StartMarshal(stream); writer.WriteObject(val); grid.Marshaller.FinishMarshal(writer); break; } case OpLoadAll: { // We can't do both read and write lazily because stream is reused. // Read keys non-lazily, write result lazily. var keys = ReadAllKeys(rawReader); var result = _store.LoadAll(keys); stream.Seek(0, SeekOrigin.Begin); int cnt = 0; stream.WriteInt(cnt); // Reserve space for count. var writer = grid.Marshaller.StartMarshal(stream); foreach (var entry in result) { var entry0 = entry; // Copy modified closure. writer.WriteObjectDetached(entry0.Key); writer.WriteObjectDetached(entry0.Value); cnt++; } stream.WriteInt(0, cnt); grid.Marshaller.FinishMarshal(writer); break; } case OpPut: _store.Write(rawReader.ReadObject <TK>(), rawReader.ReadObject <TV>()); break; case OpPutAll: _store.WriteAll(ReadPairs(rawReader)); break; case OpRmv: _store.Delete(rawReader.ReadObject <TK>()); break; case OpRmvAll: _store.DeleteAll(ReadKeys(rawReader)); break; case OpSesEnd: { var commit = rawReader.ReadBoolean(); var last = rawReader.ReadBoolean(); if (last) { grid.HandleRegistry.Release(sesId); } _store.SessionEnd(commit); break; } default: throw new IgniteException("Invalid operation type: " + opType); } return(0); } finally { _sesProxy.ClearSession(); } }
internal bool Serialize(IBinaryStream stream) { ClusterGroupImpl prj = _ignite.ClusterGroup; BinaryWriter writer = prj.Marshaller.StartMarshal(stream); try { writer.Write(this); return true; } catch (Exception e) { writer.WriteString("Failed to marshal job [job=" + _job + ", errType=" + e.GetType().Name + ", errMsg=" + e.Message + ']'); return false; } finally { // 4. Process metadata. prj.FinishMarshal(writer); } }
/// <summary> /// Invokes the cache filter. /// </summary> /// <param name="input">The input stream.</param> /// <returns>Invocation result.</returns> public int Invoke(IBinaryStream input) { var rawReader = _marsh.StartUnmarshal(input, _keepBinary).GetRawReader(); return(_invoker(rawReader.ReadObject <object>(), rawReader.ReadObject <object>()) ? 1 : 0); }
/// <summary> /// Invokes the cache filter. /// </summary> /// <param name="input">The input stream.</param> /// <returns>Invocation result.</returns> public int Invoke(IBinaryStream input) { var rawReader = _marsh.StartUnmarshal(input, _keepBinary).GetRawReader(); return _invoker(rawReader.ReadObject<object>(), rawReader.ReadObject<object>()) ? 1 : 0; }
/// <summary> /// Start marshal session. /// </summary> /// <param name="stream">Stream.</param> /// <returns>Writer.</returns> public BinaryWriter StartMarshal(IBinaryStream stream) { return(new BinaryWriter(this, stream)); }
/// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="keepBinary">Whether to keep binary objects in binary form.</param> /// <returns> /// Object. /// </returns> public T Unmarshal <T>(IBinaryStream stream, bool keepBinary) { return(Unmarshal <T>(stream, keepBinary ? BinaryMode.KeepBinary : BinaryMode.Deserialize, null)); }
/// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="mode">The mode.</param> /// <returns> /// Object. /// </returns> public T Unmarshal <T>(IBinaryStream stream, BinaryMode mode = BinaryMode.Deserialize) { return(Unmarshal <T>(stream, mode, null)); }
/// <summary> /// Kernal start callback. /// </summary> /// <param name="interopProc">Interop processor.</param> /// <param name="stream">Stream.</param> internal static void OnStart(IUnmanagedTarget interopProc, IBinaryStream stream) { try { // 1. Read data and leave critical state ASAP. BinaryReader reader = BinaryUtils.Marshaller.StartUnmarshal(stream); // ReSharper disable once PossibleInvalidOperationException var name = reader.ReadString(); // 2. Set ID and name so that Start() method can use them later. _startup.Name = name; if (Nodes.ContainsKey(new NodeKey(name))) throw new IgniteException("Ignite with the same name already started: " + name); _startup.Ignite = new Ignite(_startup.Configuration, _startup.Name, interopProc, _startup.Marshaller, _startup.LifecycleBeans, _startup.Callbacks); } catch (Exception e) { // 5. Preserve exception to throw it later in the "Start" method and throw it further // to abort startup in Java. _startup.Error = e; throw; } }
/// <summary> /// Unmarshal object. /// </summary> /// <param name="stream">Stream over underlying byte array with correct position.</param> /// <param name="mode">The mode.</param> /// <param name="builder">Builder.</param> /// <returns> /// Object. /// </returns> public T Unmarshal <T>(IBinaryStream stream, BinaryMode mode, BinaryObjectBuilder builder) { return(new BinaryReader(this, stream, mode, builder).Deserialize <T>()); }
/// <summary> /// Writes collected schema to the stream and pops it. /// </summary> /// <param name="stream">The stream.</param> /// <param name="schemaOffset">The schema offset.</param> /// <param name="schemaId">The schema identifier.</param> /// <param name="flags">Flags according to offset sizes.</param> /// <returns> /// True if current schema was non empty; false otherwise. /// </returns> public bool WriteSchema(IBinaryStream stream, int schemaOffset, out int schemaId, ref BinaryObjectHeader.Flag flags) { schemaId = Fnv1Hash.Basis; var count = _idx - schemaOffset; if (count == 0) return false; flags |= BinaryObjectSchemaSerializer.WriteSchema(_fields, stream, schemaOffset, count, (flags & BinaryObjectHeader.Flag.CompactFooter) == BinaryObjectHeader.Flag.CompactFooter); for (var i = schemaOffset; i < _idx; i++) schemaId = Fnv1Hash.Update(schemaId, _fields[i].Id); return true; }
/// <summary> /// Start unmarshal session. /// </summary> /// <param name="stream">Stream.</param> /// <param name="keepBinary">Whether to keep binarizable as binary.</param> /// <returns> /// Reader. /// </returns> public BinaryReader StartUnmarshal(IBinaryStream stream, bool keepBinary) { return(new BinaryReader(this, stream, keepBinary ? BinaryMode.KeepBinary : BinaryMode.Deserialize, null)); }
internal IBinaryStreamAsStream( IBinaryStream stream ) { if( stream.NullReference() ) throw new ArgumentNullException().StoreFileLine(); this.binaryStream = stream; }
/// <summary> /// Start unmarshal session. /// </summary> /// <param name="stream">Stream.</param> /// <param name="mode">The mode.</param> /// <returns>Reader.</returns> public BinaryReader StartUnmarshal(IBinaryStream stream, BinaryMode mode = BinaryMode.Deserialize) { return(new BinaryReader(this, stream, mode, null)); }
/// <summary> /// Wraps the specified <see cref="IBinaryStream"/>, in <see cref="Stream"/>. /// </summary> /// <param name="binaryStream">The <see cref="IBinaryStream"/> to wrap.</param> /// <returns>The disposable wrapper created.</returns> public static Stream Wrap( IBinaryStream binaryStream ) { return new IBinaryStreamAsStream(binaryStream); }
/// <summary> /// Invokes a store operation. /// </summary> /// <param name="stream">Input stream.</param> /// <param name="grid">Grid.</param> /// <returns>Invocation result.</returns> /// <exception cref="IgniteException">Invalid operation type: + opType</exception> public int Invoke(IBinaryStream stream, Ignite grid) { IBinaryReader reader = grid.Marshaller.StartUnmarshal(stream, _convertBinary ? BinaryMode.Deserialize : BinaryMode.ForceBinary); IBinaryRawReader rawReader = reader.GetRawReader(); int opType = rawReader.ReadByte(); // Setup cache session for this invocation. long sesId = rawReader.ReadLong(); CacheStoreSession ses = grid.HandleRegistry.Get<CacheStoreSession>(sesId, true); ses.CacheName = rawReader.ReadString(); _sesProxy.SetSession(ses); try { // Perform operation. switch (opType) { case OpLoadCache: { var args = rawReader.ReadArray<object>(); stream.Seek(0, SeekOrigin.Begin); int cnt = 0; stream.WriteInt(cnt); // Reserve space for count. var writer = grid.Marshaller.StartMarshal(stream); _store.LoadCache((k, v) => { lock (writer) // User-defined store can be multithreaded. { writer.WithDetach(w => { w.WriteObject(k); w.WriteObject(v); }); cnt++; } }, args); stream.WriteInt(0, cnt); grid.Marshaller.FinishMarshal(writer); break; } case OpLoad: { var val = _store.Load(rawReader.ReadObject<object>()); stream.Seek(0, SeekOrigin.Begin); var writer = grid.Marshaller.StartMarshal(stream); writer.WriteObject(val); grid.Marshaller.FinishMarshal(writer); break; } case OpLoadAll: { var keys = rawReader.ReadCollection(); var result = _store.LoadAll(keys); stream.Seek(0, SeekOrigin.Begin); stream.WriteInt(result.Count); var writer = grid.Marshaller.StartMarshal(stream); foreach (DictionaryEntry entry in result) { var entry0 = entry; // Copy modified closure. writer.WithDetach(w => { w.WriteObject(entry0.Key); w.WriteObject(entry0.Value); }); } grid.Marshaller.FinishMarshal(writer); break; } case OpPut: _store.Write(rawReader.ReadObject<object>(), rawReader.ReadObject<object>()); break; case OpPutAll: var size = rawReader.ReadInt(); var dict = new Hashtable(size); for (int i = 0; i < size; i++) dict[rawReader.ReadObject<object>()] = rawReader.ReadObject<object>(); _store.WriteAll(dict); break; case OpRmv: _store.Delete(rawReader.ReadObject<object>()); break; case OpRmvAll: _store.DeleteAll(rawReader.ReadCollection()); break; case OpSesEnd: grid.HandleRegistry.Release(sesId); _store.SessionEnd(rawReader.ReadBoolean()); break; default: throw new IgniteException("Invalid operation type: " + opType); } return 0; } finally { _sesProxy.ClearSession(); } }