/// <summary> /// Read in a map of all the types used in the serialized data /// </summary> /// <param name="reader"></param> /// <param name="dataLength"></param> /// <returns></returns> internal static TypeDescriptors GetTypeDescriptorMap(BinaryReader reader, uint dataLength) { // read in the data var typeDescriptors = new TypeDescriptors(); if (dataLength > 0) { var bytes = reader.ReadBytes((int)dataLength); typeDescriptors.Deserialize(bytes); } return(typeDescriptors); }
/// <summary> /// Inspect an object and serialize its contents /// </summary> /// <param name="sourceObject"></param> /// <param name="currentDepth"></param> /// <param name="maxDepth"></param> /// <param name="options">The serialization options</param> /// <param name="objectTree"></param> /// <param name="ignoreAttributes"></param> /// <param name="path"></param> /// <returns></returns> internal byte[] InspectAndSerialize(object sourceObject, uint maxDepth, SerializerOptions options, ICollection <object> ignoreAttributes, ICollection <string> ignorePropertiesOrPaths = null) { if (sourceObject == null) { return(null); } var typeSupport = sourceObject.GetType().GetExtendedType(); // drop any objects we are ignoring by attribute if (typeSupport.Attributes.Any(x => ignoreAttributes.Contains(x))) { return(null); } // for delegate types, return null if (typeSupport.IsDelegate) { return(null); } byte[] dataBytes = null; TypeDescriptors typeDescriptors = null; using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream)) { typeDescriptors = TypeWriter.Write(writer, sourceObject, typeSupport, maxDepth, options, ignoreAttributes, out var diagnosticLog, ignorePropertiesOrPaths); DiagnosticLog = diagnosticLog; } dataBytes = stream.ToArray(); } if (typeDescriptors != null) { dataBytes = BuildTypeDescriptorMap(dataBytes, typeDescriptors); } if (options.BitwiseHasFlag(SerializerOptions.Compress)) { #if FEATURE_COMPRESSION // enable data compression for strings dataBytes = CompressData(dataBytes); #else throw new InvalidOperationException($"Compression is only available in .Net Framework 4.6+ and .Net Standard 1.6+"); #endif } return(dataBytes); }
/// <summary> /// Validate a byte array for valid serialization data /// </summary> /// <param name="bytes"></param> /// <returns></returns> public bool Validate(byte[] bytes) { var isValid = false; try { TypeDescriptors typeDescriptors = null; using (var stream = new MemoryStream(bytes)) { using (var reader = new BinaryReader(stream)) { /** * [SettingsByte] 1 byte (SerializerDataSettings) * * Chunk Format * [ChunkType] 1 byte (byte) * [ChunkLength] 4 bytes (UInt32) or 2 bytes (UInt16) * [ObjectReferenceId] 2 bytes (UInt16) * [OptionalTypeDescriptor] 2 bytes (UInt16) * [Data] [ChunkLength-Int32] bytes * * Chunks may contain value types or Chunks, it's a recursive structure. * By reading if you've read all of the data bytes, you know you've read * the whole structure. */ // read in byte 0, the data settings var dataReader = reader; var dataSettings = (SerializerDataSettings)reader.ReadByte(); // if it's a compressed data stream, decompress it first if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compress)) { dataReader = TypeReader.Decompress(dataReader); } // read in all chunks isValid = ReadChunk(dataReader, typeDescriptors, dataSettings); } } } catch (Exception) { isValid = false; } return(isValid); }
public TypeWriter(uint maxDepth, SerializerDataSettings dataSettings, SerializerOptions options, ICollection <object> ignoreAttributes, TypeDescriptors typeDescriptors, ICollection <string> ignorePropertiesOrPaths = null) { _debugWriter = new DebugReportWriter(); _maxDepth = maxDepth; _dataSettings = dataSettings; _options = options; _ignoreAttributes = ignoreAttributes; _ignorePropertiesOrPaths = ignorePropertiesOrPaths; _typeDescriptors = typeDescriptors; _referenceTracker = new ObjectReferenceTracker(); _customSerializers = new Dictionary <Type, Lazy <ICustomSerializer> > { { typeof(Point), new Lazy <ICustomSerializer>(() => new PointSerializer()) }, { typeof(Enum), new Lazy <ICustomSerializer>(() => new EnumSerializer()) }, { typeof(XDocument), new Lazy <ICustomSerializer>(() => new XDocumentSerializer()) }, }; }
/// <summary> /// Write the parent object, and recursively process it's children /// </summary> /// <param name="writer"></param> /// <param name="obj"></param> /// <param name="typeSupport"></param> /// <param name="maxDepth"></param> /// <param name="options">The serialization options</param> /// <param name="objectTree"></param> /// <param name="ignoreAttributes"></param> internal static TypeDescriptors Write(BinaryWriter writer, object obj, ExtendedType typeSupport, uint maxDepth, SerializerOptions options, ICollection <object> ignoreAttributes, out string diagnosticLog, ICollection <string> ignorePropertiesOrPaths = null) { var currentDepth = 0; diagnosticLog = string.Empty; TypeDescriptors typeDescriptors = null; if (options.BitwiseHasFlag(SerializerOptions.EmbedTypes)) { typeDescriptors = new TypeDescriptors(); } var dataSettings = SerializerDataSettings.None; if (typeDescriptors != null) { dataSettings |= SerializerDataSettings.TypeMap; } if (options.BitwiseHasFlag(SerializerOptions.Compact)) { dataSettings |= SerializerDataSettings.Compact; } if (options.BitwiseHasFlag(SerializerOptions.Compress)) { dataSettings |= SerializerDataSettings.Compress; } // write the serializer byte 0, data settings writer.Write((byte)dataSettings); var typeWriter = new TypeWriter(maxDepth, dataSettings, options, ignoreAttributes, typeDescriptors, ignorePropertiesOrPaths); typeWriter.WriteObject(writer, obj, typeSupport, currentDepth, string.Empty, 0); if (options.BitwiseHasFlag(SerializerOptions.WriteDiagnosticLog)) { diagnosticLog = typeWriter.GetDiagnosticLog(); } return(typeDescriptors); }
/// <summary> /// Build a map of all of the types used in serialization /// </summary> /// <param name="dataBytes"></param> /// <param name="typeDescriptors"></param> /// <returns></returns> private byte[] BuildTypeDescriptorMap(byte[] dataBytes, TypeDescriptors typeDescriptors) { // write the type descriptors header byte[] typeDescriptorBytes = null; using (var stream = new MemoryStream()) { using (var writer = new BinaryWriter(stream)) { // write the object type header writer.Write((byte)TypeId.TypeDescriptorMap); // make a note of where this object starts, so we can populate the length header later var lengthStartPosition = writer.BaseStream.Position; // make room for the length prefix writer.Seek((int)(Constants.LengthHeaderSize + writer.BaseStream.Position), SeekOrigin.Begin); var descriptorBytes = typeDescriptors.Serialize(); writer.Write(descriptorBytes, 0, descriptorBytes.Length); // write the length header at the start of this object var length = writer.BaseStream.Length - lengthStartPosition; writer.Seek((int)lengthStartPosition, SeekOrigin.Begin); writer.Write((int)length); } typeDescriptorBytes = stream.ToArray(); } // prepend the type descriptors byte[] newDataBytes = new byte[dataBytes.Length + typeDescriptorBytes.Length]; // copy dataSettings to the front newDataBytes[0] = dataBytes[0]; // copy the type descriptors to the front of dataBytes + 1 (dataSettings is byte-0) Array.Copy(typeDescriptorBytes, 0, newDataBytes, 1, typeDescriptorBytes.Length); // append the dataBytes without dataSettings at byte-0 Array.Copy(dataBytes, 1, newDataBytes, typeDescriptorBytes.Length + 1, dataBytes.Length - 1); return(newDataBytes); }
/// <summary> /// Read a single chunk of data, recursively. /// </summary> /// <param name="reader"></param> /// <returns></returns> private bool ReadChunk(BinaryReader reader, TypeDescriptors typeDescriptors, SerializerDataSettings dataSettings) { var isChunkValid = true; var objectTypeIdByte = reader.ReadByte(); var objectTypeId = (TypeId)objectTypeIdByte; var isTypeMapped = TypeUtil.IsTypeMapped(objectTypeId); var isTypeDescriptorMap = TypeUtil.IsTypeDescriptorMap(objectTypeId); uint length = 0; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { length = reader.ReadUInt16(); } else { length = reader.ReadUInt32(); } var lengthStartPosition = reader.BaseStream.Position; ushort objectReferenceId = 0; ushort typeDescriptorId = 0; if (isTypeDescriptorMap) { uint dataLength = 0; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength = length - sizeof(ushort); } else { dataLength = length - sizeof(uint); } // read in the type descriptor map var typeDescriptorMap = TypeReader.GetTypeDescriptorMap(reader, dataLength); // continue reading the data return(ReadChunk(reader, typeDescriptorMap, dataSettings)); } else { // read the object reference id objectReferenceId = reader.ReadUInt16(); } // only interfaces can store type descriptors if (typeDescriptors != null && isTypeMapped) { typeDescriptorId = reader.ReadUInt16(); if (!typeDescriptors.Contains(typeDescriptorId)) { return(false); } } // value type if (length > 0) { switch (objectTypeId) { // these types may contain additional chunks, only value types may not. case TypeId.Array: case TypeId.Tuple: case TypeId.IDictionary: case TypeId.IEnumerable: case TypeId.Struct: case TypeId.Enum: case TypeId.KeyValuePair: case TypeId.Object: isChunkValid = ReadChunk(reader, typeDescriptors, dataSettings); if (!isChunkValid) { return(false); } break; default: // it's not a chunk type, it's a value type isChunkValid = true; break; } if (reader.BaseStream.Position - lengthStartPosition - sizeof(ushort) == 0) { // it's a value type, read the full data byte[] data = null; if (dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { data = reader.ReadBytes((int)(length - Constants.CompactLengthHeaderSize)); } else { data = reader.ReadBytes((int)(length - Constants.LengthHeaderSize)); } } else if (reader.BaseStream.Position < reader.BaseStream.Length) { // read another chunk isChunkValid = ReadChunk(reader, typeDescriptors, dataSettings); if (!isChunkValid) { return(false); } } } return(isChunkValid); }