internal long WriteObject(BinaryWriter writer, object obj, ExtendedType typeSupport, int currentDepth, string path, int index) { // if (typeSupport.Name == "StandardCard" || path == ".Table`1.<GameRound>k__BackingField.TexasHoldemPokerGame.<GamePots>k__BackingField.StandardCard._rank") // System.Diagnostics.Debugger.Break(); // increment the current recursion depth currentDepth++; var isTypeMapped = false; TypeId objectTypeId = TypeId.None; var newTypeSupport = typeSupport; try { objectTypeId = TypeUtil.GetTypeId(newTypeSupport); } catch (InvalidOperationException ex) { throw new InvalidOperationException($"[{path}] {ex.Message}", ex); } // if the object type is not a concrete type, indicate so in the type mask isTypeMapped = _typeDescriptors != null && !newTypeSupport.IsConcreteType; // also resolve the concrete type as it may require being typemapped if (_typeDescriptors != null && newTypeSupport.ConcreteType != null && newTypeSupport.Type != newTypeSupport.ConcreteType && !newTypeSupport.IsConcreteType) { // a special condition for writing anonymous types and types without implementation or concrete type newTypeSupport = new ExtendedType(newTypeSupport.ConcreteType); isTypeMapped = true; objectTypeId = TypeUtil.GetTypeId(newTypeSupport); } // if we couldn't resolve a concrete type, don't map it if (isTypeMapped && newTypeSupport.Type == typeof(object)) { isTypeMapped = false; } byte objectTypeIdByte = (byte)objectTypeId; // if the object is null, indicate so in the type mask if (obj == null) { objectTypeIdByte |= (byte)TypeId.NullValue; } if (isTypeMapped) { objectTypeIdByte |= (byte)TypeId.TypeMapped; } // write the object type being serialized in position 0x00 writer.Write(objectTypeIdByte); // make a note of where this object starts, so we can populate the length header later var lengthStartPosition = writer.BaseStream.Position; // make room for the length prefix and object reference id if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { writer.Seek((int)(Constants.CompactLengthHeaderSize + Constants.ObjectReferenceIdSize + (int)writer.BaseStream.Position), SeekOrigin.Begin); } else { writer.Seek((int)(Constants.LengthHeaderSize + Constants.ObjectReferenceIdSize + (int)writer.BaseStream.Position), SeekOrigin.Begin); } // write the optional type descriptor id - only interfaces can store type descriptors var containsTypeDescriptorId = false; ushort typeDescriptorId = 0; if (isTypeMapped) { typeDescriptorId = _typeDescriptors.AddKnownType(newTypeSupport); writer.Write(typeDescriptorId); containsTypeDescriptorId = true; } // construct a hashtable of objects we have already inspected (simple recursion loop preventer) // we use this hashcode method as it does not use any custom hashcode handlers the object might implement ushort objectReferenceId = 0; bool alreadyMapped = false; var hashCode = System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(obj); if (obj != null && !_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { alreadyMapped = _referenceTracker.ContainsHashcode(hashCode, obj.GetType()); // if we already wrote this object, we want to write a reference to it in the data if (alreadyMapped) { objectReferenceId = _referenceTracker.GetObjectReferenceId(hashCode, obj.GetType()); } } if (hashCode != 0 && !alreadyMapped) { if (!_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { // ensure we can refer back to the reference for this object objectReferenceId = _referenceTracker.AddObject(hashCode, obj); } // custom types support var @switch = new Dictionary <Type, Action> { { typeof(XDocument), () => WriteValueType(writer, lengthStartPosition, obj, newTypeSupport) }, }; if (@switch.ContainsKey(newTypeSupport.Type)) { @switch[newTypeSupport.Type](); } else { switch (objectTypeId) { case TypeId.Object: WriteObjectType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Struct: WriteStructType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Array: WriteArrayType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.IDictionary: WriteDictionaryType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.IEnumerable: WriteEnumerableType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.KeyValuePair: WriteKeyValueType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Tuple: WriteTupleType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Enum: WriteValueType(writer, lengthStartPosition, obj, new ExtendedType(typeof(Enum))); break; default: WriteValueType(writer, lengthStartPosition, obj, newTypeSupport); break; } } } var currentPosition = writer.BaseStream.Position; // write the length header at the start of this object, excluding the objectReferenceId at the end var dataLength = (int)(writer.BaseStream.Position - lengthStartPosition - sizeof(ushort)); // if we wrote a typeDescriptorId, that doesn't apply to the dataLength if (containsTypeDescriptorId) { dataLength -= (int)Constants.ObjectTypeDescriptorId; } WriteDebugBuilder(writer.BaseStream.Position, typeSupport, objectTypeId, currentDepth, path, index, dataLength, objectReferenceId, typeDescriptorId, hashCode); writer.Seek((int)lengthStartPosition, SeekOrigin.Begin); if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { if (dataLength > ushort.MaxValue) { throw new ExceedsMaxSizeException($"The object type '{newTypeSupport.Type}' serializes to a data size '{dataLength}' which is greater than supported for Compact mode (max: '{ushort.MaxValue}')"); } writer.Write((ushort)dataLength); } else { writer.Write((uint)dataLength); } // write the object reference Id from the object tree. // this is used so we don't have to serialize objects already in the data, we can just reference it's id writer.Write(objectReferenceId); // reset the position to current writer.Seek((int)currentPosition, SeekOrigin.Begin); return(dataLength); }
/// <summary> /// Read a single chunk of data, recursively. /// </summary> /// <param name="reader"></param> /// <returns></returns> private bool ReadChunk(BinaryReader reader, TypeDescriptors typeDescriptors, SerializerDataSettings dataSettings) { var isChunkValid = true; var objectTypeIdByte = reader.ReadByte(); var objectTypeId = (TypeId)objectTypeIdByte; var isTypeMapped = TypeUtil.IsTypeMapped(objectTypeId); var isTypeDescriptorMap = TypeUtil.IsTypeDescriptorMap(objectTypeId); uint length = 0; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { length = reader.ReadUInt16(); } else { length = reader.ReadUInt32(); } var lengthStartPosition = reader.BaseStream.Position; ushort objectReferenceId = 0; ushort typeDescriptorId = 0; if (isTypeDescriptorMap) { uint dataLength = 0; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength = length - sizeof(ushort); } else { dataLength = length - sizeof(uint); } // read in the type descriptor map var typeDescriptorMap = TypeReader.GetTypeDescriptorMap(reader, dataLength); // continue reading the data return(ReadChunk(reader, typeDescriptorMap, dataSettings)); } else { // read the object reference id objectReferenceId = reader.ReadUInt16(); } // only interfaces can store type descriptors if (typeDescriptors != null && isTypeMapped) { typeDescriptorId = reader.ReadUInt16(); if (!typeDescriptors.Contains(typeDescriptorId)) { return(false); } } // value type if (length > 0) { switch (objectTypeId) { // these types may contain additional chunks, only value types may not. case TypeId.Array: case TypeId.Tuple: case TypeId.IDictionary: case TypeId.IEnumerable: case TypeId.Struct: case TypeId.Enum: case TypeId.KeyValuePair: case TypeId.Object: isChunkValid = ReadChunk(reader, typeDescriptors, dataSettings); if (!isChunkValid) { return(false); } break; default: // it's not a chunk type, it's a value type isChunkValid = true; break; } if (reader.BaseStream.Position - lengthStartPosition - sizeof(ushort) == 0) { // it's a value type, read the full data byte[] data = null; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { data = reader.ReadBytes((int)(length - Constants.CompactLengthHeaderSize)); } else { data = reader.ReadBytes((int)(length - Constants.LengthHeaderSize)); } } else if (reader.BaseStream.Position < reader.BaseStream.Length) { // read another chunk isChunkValid = ReadChunk(reader, typeDescriptors, dataSettings); if (!isChunkValid) { return(false); } } } return(isChunkValid); }
/// <summary> /// Read an object recursively /// </summary> /// <param name="reader"></param> /// <param name="typeSupport">Type type of object being read</param> /// <param name="customSerializers"></param> /// <param name="currentDepth"></param> /// <param name="maxDepth"></param> /// <param name="objectReferences"></param> /// <param name="path"></param> /// <param name="ignoreAttributes"></param> /// <param name="typeRegistry"></param> /// <param name="typeDescriptors"></param> /// <param name="typeDescriptor"></param> /// <param name="dataLength"></param> /// <param name="headerLength"></param> /// <returns></returns> internal object ReadObject(BinaryReader reader, ExtendedType typeSupport, int currentDepth, string path, ref uint dataLength, ref uint headerLength) { var objectFactory = new ObjectFactory(); var arrayDimensions = new List <int>(); var arrayRank = 0; dataLength = 0; headerLength = 0; // increment the current recursion depth currentDepth++; // ensure we don't go too deep if specified if (_maxDepth > 0 && currentDepth >= _maxDepth) { return(default(object)); } // drop any objects we are ignoring by attribute if (typeSupport.Attributes.Any(x => _ignoreAttributes.Contains(x))) { return(default(object)); } // for delegate types, return null if (typeSupport.IsDelegate) { return(default(object)); } // read the object type var objectTypeByte = reader.ReadByte(); headerLength += Constants.TypeHeaderSize; var isTypeDescriptorMap = TypeUtil.IsTypeDescriptorMap((TypeId)objectTypeByte); var isTypeMapped = TypeUtil.IsTypeMapped((TypeId)objectTypeByte); var isNullValue = TypeUtil.IsNullValue((TypeId)objectTypeByte); var objectTypeId = TypeUtil.GetTypeId(objectTypeByte); // read the length prefix (minus the length field itself) if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength = reader.ReadUInt16(); } else { dataLength = reader.ReadUInt32(); } var actualDataRemaining = reader.BaseStream.Length - reader.BaseStream.Position; uint expectedDataRemaining = 0; if (dataLength > 0) { if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength -= Constants.CompactLengthHeaderSize; headerLength += Constants.CompactLengthHeaderSize; } else { dataLength -= Constants.LengthHeaderSize; headerLength += Constants.LengthHeaderSize; } expectedDataRemaining = dataLength + Constants.ObjectReferenceIdSize; } if (expectedDataRemaining > actualDataRemaining) { throw new DataFormatException($"The object length read ({dataLength}) for type {objectTypeId} at path {path} cannot exceed the remaining size ({actualDataRemaining}) of the stream!"); } if (isTypeDescriptorMap) { // process a type descriptor map, then continue _typeDescriptors = GetTypeDescriptorMap(reader, dataLength); return(ReadObject(reader, typeSupport, currentDepth, path, ref dataLength, ref headerLength)); } // read in the object reference id var objectReferenceId = reader.ReadUInt16(); headerLength += Constants.ObjectReferenceIdSize; // only interfaces can store type descriptors TypeDescriptor typeDescriptor = null; if (_typeDescriptors?.Types.Any() == true && isTypeMapped) { // type descriptors are embedded, read in the type var typeId = reader.ReadUInt16(); headerLength += Constants.ObjectTypeDescriptorId; typeDescriptor = _typeDescriptors.GetTypeDescriptor(typeId); } // an null value was written if (dataLength == 0 && isNullValue) { return(null); } // do we already have this object as a reference? if (!_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { if (_objectReferences.ContainsKey(objectReferenceId)) { var reference = _objectReferences[objectReferenceId]; if (reference != null) { // if the types are a match, allow using it as a reference var referenceType = reference.GetType(); if (typeDescriptor != null) { var typeDescriptorType = Type.GetType(typeDescriptor.FullName); if (referenceType == typeDescriptorType) { return(reference); } } else { if (referenceType == typeSupport.Type) { return(reference); } } } } } // if it's an array, read it's dimensions before we create a new object for it uint arrayStartPosition = 0; if (objectTypeId == TypeId.Array) { // number of dimensions arrayRank = (int)reader.ReadUInt32(); arrayStartPosition += sizeof(uint); // length of each dimension for (var i = 0; i < arrayRank; i++) { arrayDimensions.Add((int)reader.ReadUInt32()); arrayStartPosition += sizeof(uint); } } try { if (dataLength == 0) { // an empty initialized object was written if (!string.IsNullOrEmpty(typeDescriptor?.FullName)) { return(objectFactory.CreateEmptyObject(typeDescriptor.FullName, _typeRegistry)); } return(objectFactory.CreateEmptyObject(typeSupport.Type, _typeRegistry)); } } catch (InvalidOperationException ex) { throw new DataFormatException($"[{path}] {ex.Message}", ex); } // get the type support object for this object type ExtendedType objectExtendedType = null; if (objectTypeId != TypeId.Struct) { objectExtendedType = TypeUtil.GetType(objectTypeId); // does this object map to something expected? if (!TypeUtil.GetTypeId(objectExtendedType).Equals(objectTypeId)) { throw new DataFormatException($"Serialized data wants to map {objectTypeId} to {typeSupport.Type.Name}, invalid data."); } } object newObj = null; // for arrays, we need to pass the dimensions of the desired arrays var destinationTypeSupport = typeSupport; try { if (!string.IsNullOrEmpty(typeDescriptor?.FullName)) { newObj = objectFactory.CreateEmptyObject(typeDescriptor.FullName, _typeRegistry, arrayDimensions); destinationTypeSupport = Type.GetType(typeDescriptor.FullName).GetExtendedType(); } else { newObj = objectFactory.CreateEmptyObject(destinationTypeSupport.Type, _typeRegistry, arrayDimensions); } } catch (InvalidOperationException ex) { throw new DataFormatException($"[{path}] {ex.Message}", ex); } // custom types support var objectDataLength = dataLength; var @switch = new Dictionary <Type, Func <object> > { { typeof(XDocument), () => { return(ReadValueType(reader, objectDataLength, destinationTypeSupport, currentDepth, path)); } }, }; if (@switch.ContainsKey(destinationTypeSupport.Type)) { newObj = @switch[destinationTypeSupport.Type](); } else { switch (objectTypeId) { case TypeId.Object: newObj = ReadObjectType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Struct: newObj = ReadStructType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Array: newObj = ReadArrayType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor, arrayStartPosition); break; case TypeId.IDictionary: newObj = ReadDictionaryType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.IEnumerable: newObj = ReadEnumerableType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.KeyValuePair: newObj = ReadKeyValueType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Enum: newObj = ReadValueType(reader, dataLength, new ExtendedType(typeof(Enum)), currentDepth, path); break; case TypeId.Tuple: newObj = ReadTupleType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; default: newObj = ReadValueType(reader, dataLength, destinationTypeSupport, currentDepth, path); break; } } // store the object reference id in the object reference map if (!_objectReferences.ContainsKey(objectReferenceId)) { _objectReferences.Add(objectReferenceId, newObj); } return(newObj); }