public void TestOneOrManyString() { const Plurality plurality = Plurality.OneOrMany; var clrType = typeof(string); var exType = new ExtendedType(clrType, plurality); var toString = exType.ToString(); Assert.AreEqual(clrType + "[+]", toString); }
public void TestSingularString() { const Plurality plurality = Plurality.Singular; var clrType = typeof(string); var exType = new ExtendedType(clrType, plurality); var toString = exType.ToString(); Assert.AreEqual(clrType.ToString(), toString); }
public void TestSingularViewType() { const Plurality plurality = Plurality.Singular; var clrType = typeof(string); var exType = new ExtendedType(clrType, plurality); var viewType = exType.ViewType; Assert.AreEqual(clrType, viewType); }
public void TestPluralViewType() { const Plurality plurality = Plurality.Plural; var clrType = typeof(string); var exType = new ExtendedType(clrType, plurality); var viewType = exType.ViewType; Assert.IsTrue(viewType.IsArray); Assert.IsTrue(clrType == viewType.GetElementType()); }
public void Method_InstanceFuncValid() { ExtendedType et = typeof(ComplexOverloads <int>); Method method = et.GetMethod("Add", typeof(int), typeof(int), typeof(int)); Assert.IsNotNull(method); Func <ComplexOverloads <int>, int, int, int> add = method.GetFunc( typeof(ComplexOverloads <int>), typeof(int), typeof(int), typeof(int)) as Func <ComplexOverloads <int>, int, int, int>; Assert.IsNotNull(add); int a = Random.Next(); int b = Random.Next(); ComplexOverloads <int> obj = new ComplexOverloads <int>(); Assert.AreEqual(a + b, add(obj, a, b)); }
public override object ConvertFrom(ITypeDescriptorContext ctx, CultureInfo ci, object data) { if (data == null) { return(null); } string typeName = (string)data; // ReSharper disable ExceptionNotDocumented Type result = ExtendedType.FindType(typeName, false, true); // ReSharper restore ExceptionNotDocumented if (result == null) { throw new ArgumentException( // ReSharper disable once AssignNullToNotNullAttribute string.Format(Resources.SimplifiedTypeNameConverter_ConvertFrom_Unknown_Type, typeName), nameof(data)); } return(result); }
/// <summary> /// Write the parent object, and recursively process it's children /// </summary> /// <param name="writer"></param> /// <param name="obj"></param> /// <param name="typeSupport"></param> /// <param name="maxDepth"></param> /// <param name="options">The serialization options</param> /// <param name="objectTree"></param> /// <param name="ignoreAttributes"></param> internal static TypeDescriptors Write(BinaryWriter writer, object obj, ExtendedType typeSupport, uint maxDepth, SerializerOptions options, ICollection <object> ignoreAttributes, out string diagnosticLog, ICollection <string> ignorePropertiesOrPaths = null) { var currentDepth = 0; diagnosticLog = string.Empty; TypeDescriptors typeDescriptors = null; if (options.BitwiseHasFlag(SerializerOptions.EmbedTypes)) { typeDescriptors = new TypeDescriptors(); } var dataSettings = SerializerDataSettings.None; if (typeDescriptors != null) { dataSettings |= SerializerDataSettings.TypeMap; } if (options.BitwiseHasFlag(SerializerOptions.Compact)) { dataSettings |= SerializerDataSettings.Compact; } if (options.BitwiseHasFlag(SerializerOptions.Compress)) { dataSettings |= SerializerDataSettings.Compress; } // write the serializer byte 0, data settings writer.Write((byte)dataSettings); var typeWriter = new TypeWriter(maxDepth, dataSettings, options, ignoreAttributes, typeDescriptors, ignorePropertiesOrPaths); typeWriter.WriteObject(writer, obj, typeSupport, currentDepth, string.Empty, 0); if (options.BitwiseHasFlag(SerializerOptions.WriteDiagnosticLog)) { diagnosticLog = typeWriter.GetDiagnosticLog(); } return(typeDescriptors); }
internal void WriteDictionaryType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element var dictionary = (IDictionary)obj; var keyExtendedType = typeSupport.GenericArgumentTypes.First().GetExtendedType(); var valueExtendedType = typeSupport.GenericArgumentTypes.Skip(1).First().GetExtendedType(); ExtendedType valueConcreteExtendedType = null; var index = 0; foreach (DictionaryEntry item in dictionary) { // write the key WriteObject(writer, item.Key, keyExtendedType, currentDepth, path, index); // write the value if (item.Value != null && valueConcreteExtendedType == null) { valueConcreteExtendedType = item.Value.GetType().GetExtendedType(); } WriteObject(writer, item.Value, valueConcreteExtendedType ?? valueExtendedType, currentDepth, path, index); index++; } }
internal object ReadObjectType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { // read each property into the object var fields = newObj.GetFields(FieldOptions.AllWritable).OrderBy(x => x.Name); var rootPath = path; var localPath = string.Empty; foreach (var field in fields) { localPath = $"{rootPath}.{field.ReflectedType.Name}.{field.Name}"; uint dataLength = 0; uint headerLength = 0; var fieldExtendedType = new ExtendedType(field.Type); if (fieldExtendedType.IsDelegate) { continue; } // check for ignore attributes if (IgnoreObjectName(field.Name, localPath, field.CustomAttributes)) { continue; } // also check the property for ignore, if this is a auto-backing property if (field.BackedProperty != null && IgnoreObjectName(field.BackedProperty.Name, $"{rootPath}.{field.ReflectedType.Name}.{field.BackedPropertyName}", field.BackedProperty.CustomAttributes)) { continue; } var fieldValue = ReadObject(reader, fieldExtendedType, currentDepth, localPath, ref dataLength, ref headerLength); newObj.SetFieldValue(field, fieldValue); } return(newObj); }
/// <summary> /// Returns true if two objects match equality /// </summary> /// <param name="typeSupport">The extended type for the left object</param> /// <param name="left"></param> /// <param name="right"></param> /// <param name="isObjectEqual">The result of a custom equals comparitor</param> /// <returns>True if object has custom equality comparitor</returns> private bool CompareForObjectEquality(ExtendedType typeSupport, object left, object right, out bool isObjectEqual) { // order of precedence: IEquatable => Equals => (==) // if the object implements IEquatable, use it var hasIEquatable = typeSupport.Implements(typeof(IEquatable <>)); if (hasIEquatable) { var equatableMethod = typeSupport.Methods?.FirstOrDefault(x => x.Name == "Equals" && x.Parameters.Any(y => y.ParameterType == typeSupport.Type)); isObjectEqual = (bool)equatableMethod?.MethodInfo.Invoke(left, new object[] { right }) == true; return(true); } else { // if the object overrides Equals(), use it var hasEqualsOverride = typeSupport.Methods?.Any(x => x.Name == "Equals" && x.IsOverride) == true; if (hasEqualsOverride) { isObjectEqual = left.Equals(right); return(true); } else { // if the object overrides the equality operator (==), use it var hasEqualityOperator = typeSupport.Methods?.Any(x => x.Name == "op_Equality" && x.IsOperatorOverload) == true; if (hasEqualityOperator) { var operatorMethod = typeSupport.Methods?.FirstOrDefault(x => x.Name == "op_Equality" && x.Parameters.All(y => y.ParameterType == typeSupport.Type)); isObjectEqual = (bool)operatorMethod?.MethodInfo.Invoke(left, new object[] { left, right }) == true; return(true); } } } isObjectEqual = false; return(false); }
internal void WriteObjectType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element var fields = obj.GetFields(FieldOptions.AllWritable).OrderBy(x => x.Name); var rootPath = path; var localPath = string.Empty; var index = 0; foreach (var field in fields) { localPath = $"{rootPath}.{field.ReflectedType.Name}.{field.Name}"; var fieldExtendedType = new ExtendedType(field.Type); var fieldValue = obj.GetFieldValue(field); fieldExtendedType.SetConcreteTypeFromInstance(fieldValue); if (fieldExtendedType.IsDelegate) { continue; } // check for ignore attributes if (IgnoreObjectName(field.Name, localPath, field.CustomAttributes)) { continue; } // also check the property for ignore, if this is a auto-backing property if (field.BackedProperty != null && IgnoreObjectName(field.BackedProperty.Name, $"{rootPath}.{field.ReflectedType.Name}.{field.BackedPropertyName}", field.BackedProperty.CustomAttributes)) { continue; } WriteObject(writer, fieldValue, fieldExtendedType, currentDepth, localPath, index); index++; } }
public virtual T Visit(ExtendedType node) { return(Visit((RealType)node)); }
/// <summary> /// Initializes a new instance of the <see cref="Field"/> class. /// </summary> /// <param name="extendedType">Type of the extended.</param> /// <param name="info">The info.</param> /// <remarks></remarks> public Field([NotNull] ExtendedType extendedType, [NotNull] FieldInfo info) { ExtendedType = extendedType; Info = info; }
/// <summary> /// Initializes a new instance of the <see cref="Property"/> class. /// </summary> /// <param name="extendedType">The extended type.</param> /// <param name="info"> /// The <see cref="System.Reflection.PropertyInfo">property info</see>. /// </param> internal Property([NotNull] ExtendedType extendedType, [NotNull] PropertyInfo info) { ExtendedType = extendedType; Info = info; _getMethod = new Lazy <MethodInfo>(() => info.GetGetMethod(true), LazyThreadSafetyMode.PublicationOnly); _setMethod = new Lazy <MethodInfo>(() => info.GetSetMethod(true), LazyThreadSafetyMode.PublicationOnly); // Tries to find the underlying field for an automatic property. _automaticField = new Lazy <Field>( () => { MethodInfo getMethod; MethodBody methodBody; // If the get/set accessor is missing or we can't retrieve the method body for the get accessor, // then we're not an automatic property. if (!info.CanRead || !info.CanWrite || ((getMethod = info.GetGetMethod()) == null) || ((methodBody = getMethod.GetMethodBody()) == null)) { return(null); } // Evaluate MSIL to resolve underlying field that is accessed. byte[] getter = methodBody.GetILAsByteArray(); // ReSharper disable once PossibleNullReferenceException byte ldfld = (byte)(info.GetGetMethod().IsStatic ? OpCodes.Ldsfld : OpCodes.Ldfld).Value; // ReSharper disable once AssignNullToNotNullAttribute byte[] fieldToken = getter.SkipWhile(b => b != ldfld).Skip(1).Take(4).ToArray(); if (fieldToken.Length != 4) { return(null); } // Grab the field FieldInfo field; try { Type[] typeArguments = ExtendedType.GenericArguments.Select(g => g.Type).ToArray(); if (typeArguments.Length < 1) { typeArguments = null; } // ReSharper disable once PossibleNullReferenceException field = info.DeclaringType.Module.ResolveField( BitConverter.ToInt32(fieldToken, 0), typeArguments, null); } catch { return(null); } // Compilers don't strictly have to add this attribute, so could relax this check, but this ensures // that we are indeed looking at an automatic property. return(field != null && field.IsDefined(typeof(CompilerGeneratedAttribute), false) ? field : null); }, LazyThreadSafetyMode.PublicationOnly); }
internal object ReadDictionaryType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { // length = entire collection // read each element uint i = 0; uint dataLength = 0; uint headerLength = 0; if (typeSupport.IsGeneric && typeSupport.GenericArgumentTypes.Any()) { // generic IDictionary<,> var genericTypes = typeSupport.Type.GetGenericArguments().ToList(); var typeSupports = genericTypes.Select(x => x.GetExtendedType()).ToList(); var keyExtendedType = typeSupports.First(); var valueExtendedType = typeSupports.Skip(1).First(); Type[] typeArgs = { genericTypes[0], genericTypes[1] }; var dictionaryType = typeof(Dictionary <,>).MakeGenericType(typeArgs); var newDictionary = Activator.CreateInstance(dictionaryType) as IDictionary; while (i < length) { var key = ReadObject(reader, keyExtendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; var value = ReadObject(reader, valueExtendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; newDictionary.Add(key, value); } // special case for concurrent dictionaries if (typeSupport.Type.GetGenericTypeDefinition() == typeof(ConcurrentDictionary <,>)) { dictionaryType = typeof(ConcurrentDictionary <,>).MakeGenericType(typeArgs); var newConcurrentDictionary = Activator.CreateInstance(dictionaryType, new object[] { newDictionary }) as IDictionary; newDictionary = newConcurrentDictionary; } // return the value return(newDictionary); } else { // non-generic IDictionary ExtendedType extendedType; if (typeSupport.GenericArgumentTypes.Any()) { extendedType = typeSupport.GenericArgumentTypes.First().GetExtendedType(); } else { extendedType = typeof(object).GetExtendedType(); } var factory = new ObjectFactory(); var newDictionary = (IDictionary)factory.CreateEmptyObject(typeSupport); while (i < length) { var key = ReadObject(reader, extendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; var value = ReadObject(reader, extendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; newDictionary.Add(key, value); } // return the value return(newDictionary); } }
internal void WriteKeyValueType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { var keyExtendedType = typeSupport.GenericArgumentTypes.First().GetExtendedType(); var valueExtendedType = typeSupport.GenericArgumentTypes.Skip(1).First().GetExtendedType(); ExtendedType valueConcreteExtendedType = null; var key = obj.GetPropertyValue("Key"); var value = obj.GetPropertyValue("Value"); // write the key WriteObject(writer, key, keyExtendedType, currentDepth, path, 0); // write the value valueConcreteExtendedType = value?.GetType().GetExtendedType(); WriteObject(writer, value, valueConcreteExtendedType ?? valueExtendedType, currentDepth, path, 0); }
internal void WriteTupleType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element, treat a tuple as a list of objects var enumerable = new List <object>(); if (typeSupport.IsValueTuple) { enumerable = obj.GetValueTupleItemObjects(); } else if (typeSupport.IsTuple) { enumerable = obj.GetTupleItemObjects(); } var valueExtendedTypes = typeSupport.GenericArgumentTypes.Select(x => x.GetExtendedType()).ToArray(); var index = 0; foreach (var item in enumerable) { WriteObject(writer, item, valueExtendedTypes[index], currentDepth, path, index); index++; } }
internal void WriteArrayType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element var arrayEnumerable = (IEnumerable)obj; var array = (Array)obj; var elementExtendedType = typeSupport.ElementType.GetExtendedType(); ExtendedType elementConcreteExtendedType = null; var index = 0; // calculate the dimensions of the array var rank = (uint)array.Rank; // write out the total number of dimensions writer.Write(rank); // write the length of each dimension for (var i = 0; i < rank; i++) { var dimensionSize = (uint)array.GetLength(i); writer.Write(dimensionSize); } // this will flatten a multidimensional array into a single list of values // we will need to know the dimensions (above) in order to restore it foreach (var item in arrayEnumerable) { if (item != null && elementConcreteExtendedType is null) { elementConcreteExtendedType = item.GetType().GetExtendedType(); } WriteObject(writer, item, elementConcreteExtendedType ?? elementExtendedType, currentDepth, path, index); index++; } }
public void ExtendedTypeConstructorTest() { ExtendedTypes type = new ExtendedTypes(); // TODO: Initialize to an appropriate value ExtendedType target = new ExtendedType(type); Assert.Inconclusive("TODO: Implement code to verify target"); }
/// <summary> /// Read an object recursively /// </summary> /// <param name="reader"></param> /// <param name="typeSupport">Type type of object being read</param> /// <param name="customSerializers"></param> /// <param name="currentDepth"></param> /// <param name="maxDepth"></param> /// <param name="objectReferences"></param> /// <param name="path"></param> /// <param name="ignoreAttributes"></param> /// <param name="typeRegistry"></param> /// <param name="typeDescriptors"></param> /// <param name="typeDescriptor"></param> /// <param name="dataLength"></param> /// <param name="headerLength"></param> /// <returns></returns> internal object ReadObject(BinaryReader reader, ExtendedType typeSupport, int currentDepth, string path, ref uint dataLength, ref uint headerLength) { var arrayDimensions = new List <int>(); var arrayRank = 0; dataLength = 0; headerLength = 0; // increment the current recursion depth currentDepth++; // ensure we don't go too deep if specified if (_maxDepth > 0 && currentDepth >= _maxDepth) { return(default(object)); } // drop any objects we are ignoring by attribute if (typeSupport.Attributes.Any(x => _ignoreAttributes.Contains(x))) { return(default(object)); } // for delegate types, return null if (typeSupport.IsDelegate) { return(default(object)); } // read the object type var objectTypeByte = reader.ReadByte(); headerLength += Constants.TypeHeaderSize; var isTypeDescriptorMap = TypeUtil.IsTypeDescriptorMap((TypeId)objectTypeByte); var isTypeMapped = TypeUtil.IsTypeMapped((TypeId)objectTypeByte); var isNullValue = TypeUtil.IsNullValue((TypeId)objectTypeByte); var objectTypeId = TypeUtil.GetTypeId(objectTypeByte); // read the length prefix (minus the length field itself) if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength = reader.ReadUInt16(); } else { dataLength = reader.ReadUInt32(); } var actualDataRemaining = reader.BaseStream.Length - reader.BaseStream.Position; uint expectedDataRemaining = 0; if (dataLength > 0) { if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { dataLength -= Constants.CompactLengthHeaderSize; headerLength += Constants.CompactLengthHeaderSize; } else { dataLength -= Constants.LengthHeaderSize; headerLength += Constants.LengthHeaderSize; } expectedDataRemaining = dataLength + Constants.ObjectReferenceIdSize; } if (expectedDataRemaining > actualDataRemaining) { throw new DataFormatException($"The object length read ({dataLength}) for type {objectTypeId} at path {path} cannot exceed the remaining size ({actualDataRemaining}) of the stream!"); } if (isTypeDescriptorMap) { // process a type descriptor map, then continue _typeDescriptors = GetTypeDescriptorMap(reader, dataLength); return(ReadObject(reader, typeSupport, currentDepth, path, ref dataLength, ref headerLength)); } // read in the object reference id var objectReferenceId = reader.ReadUInt16(); headerLength += Constants.ObjectReferenceIdSize; // only interfaces can store type descriptors TypeDescriptor typeDescriptor = null; if (_typeDescriptors?.Types.Any() == true && isTypeMapped) { // type descriptors are embedded, read in the type var typeId = reader.ReadUInt16(); headerLength += Constants.ObjectTypeDescriptorId; typeDescriptor = _typeDescriptors.GetTypeDescriptor(typeId); } // an null value was written if (dataLength == 0 && isNullValue) { return(null); } // do we already have this object as a reference? if (!_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { if (_objectReferences.ContainsKey(objectReferenceId)) { var reference = _objectReferences[objectReferenceId]; if (reference != null) { // if the types are a match, allow using it as a reference var referenceType = reference.GetType(); if (typeDescriptor != null) { var typeDescriptorType = Type.GetType(typeDescriptor.FullName); if (referenceType == typeDescriptorType) { return(reference); } } else { if (referenceType == typeSupport.Type) { return(reference); } } } } } // if it's an array, read it's dimensions before we create a new object for it uint arrayStartPosition = 0; if (objectTypeId == TypeId.Array) { // number of dimensions arrayRank = (int)reader.ReadUInt32(); arrayStartPosition += sizeof(uint); // length of each dimension for (var i = 0; i < arrayRank; i++) { arrayDimensions.Add((int)reader.ReadUInt32()); arrayStartPosition += sizeof(uint); } } try { if (dataLength == 0) { // an empty initialized object was written if (!string.IsNullOrEmpty(typeDescriptor?.FullName)) { return(_objectFactory.CreateEmptyObject(typeDescriptor.FullName, _typeRegistry)); } return(_objectFactory.CreateEmptyObject(typeSupport.Type, _typeRegistry)); } } catch (InvalidOperationException ex) { throw new DataFormatException($"[{path}] {ex.Message}", ex); } // get the type support object for this object type ExtendedType objectExtendedType = null; if (objectTypeId != TypeId.Struct) { objectExtendedType = TypeUtil.GetType(objectTypeId).GetExtendedType(); // does this object map to something expected? if (!TypeUtil.GetTypeId(objectExtendedType).Equals(objectTypeId)) { throw new DataFormatException($"Serialized data wants to map {objectTypeId} to {typeSupport.Type.Name}, invalid data."); } } object newObj = null; // for arrays, we need to pass the dimensions of the desired arrays var destinationTypeSupport = typeSupport; try { if (!string.IsNullOrEmpty(typeDescriptor?.FullName)) { newObj = _objectFactory.CreateEmptyObject(typeDescriptor.FullName, _typeRegistry, arrayDimensions); destinationTypeSupport = Type.GetType(typeDescriptor.FullName).GetExtendedType(); } else { // if the destination type is a generic object, but we know its a more specific type then swap types if (destinationTypeSupport.Type == typeof(object) && objectExtendedType != typeof(object)) { newObj = _objectFactory.CreateEmptyObject(objectExtendedType.Type, _typeRegistry, arrayDimensions); destinationTypeSupport = objectExtendedType; } else { // standard case of create object as intended newObj = _objectFactory.CreateEmptyObject(destinationTypeSupport, _typeRegistry, arrayDimensions); } } } catch (InvalidOperationException ex) { throw new DataFormatException($"[{path}] {ex.Message}", ex); } // custom types support var objectDataLength = dataLength; var @switch = new Dictionary <Type, Func <object> > { { typeof(XDocument), () => { return(ReadValueType(reader, objectDataLength, destinationTypeSupport, currentDepth, path)); } }, }; if (@switch.ContainsKey(destinationTypeSupport.Type)) { newObj = @switch[destinationTypeSupport.Type](); } else { switch (objectTypeId) { case TypeId.Object: newObj = ReadObjectType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Struct: newObj = ReadStructType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Array: newObj = ReadArrayType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor, arrayStartPosition); break; case TypeId.IDictionary: newObj = ReadDictionaryType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.IEnumerable: newObj = ReadEnumerableType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.KeyValuePair: newObj = ReadKeyValueType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; case TypeId.Enum: newObj = ReadValueType(reader, dataLength, typeof(Enum).GetExtendedType(), currentDepth, path); break; case TypeId.Tuple: newObj = ReadTupleType(newObj, reader, dataLength, destinationTypeSupport, currentDepth, path, typeDescriptor); break; default: newObj = ReadValueType(reader, dataLength, destinationTypeSupport, currentDepth, path); break; } } // store the object reference id in the object reference map if (!_objectReferences.ContainsKey(objectReferenceId)) { _objectReferences.Add(objectReferenceId, newObj); } return(newObj); }
internal Array ReadArrayType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor, uint arrayStartPosition) { // length = entire collection // read each element, starting from the position after the rank/dimension information is read uint i = arrayStartPosition; uint dataLength = 0; uint headerLength = 0; var elementExtendedType = typeSupport.ElementType.GetExtendedType(); var array = (Array)newObj; var arrayRank = array.Rank; var arrayDimensions = new List <int>(); for (var dimension = 0; dimension < arrayRank; dimension++) { arrayDimensions.Add(array.GetLength(dimension)); } var flatRowIndex = 0; while (i < length) { var element = ReadObject(reader, elementExtendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; // performance optimization, skip dimensional processing if it's a 1d array if (arrayRank > 1) { // this is an optimized multi-dimensional array reconstruction // based on the formula: indicies.Add((i / (arrayDimensions[arrayRank - 1] * arrayDimensions[arrayRank - 2] * arrayDimensions[arrayRank - 3] * arrayDimensions[arrayRank - 4] * arrayDimensions[arrayRank - 5])) % arrayDimensions[arrayRank - 6]); var indicies = new List <int>(); for (var r = 1; r <= arrayRank; r++) { var multi = 1; for (var p = 1; p < r; p++) { multi *= arrayDimensions[arrayRank - p]; } var b = (flatRowIndex / multi) % arrayDimensions[arrayRank - r]; indicies.Add(b); } indicies.Reverse(); // set element of multi-dimensional array array.SetValue(element, indicies.ToArray()); } else { // set element of 1d array array.SetValue(element, flatRowIndex); } flatRowIndex++; } return(array); }
internal object ReadEnumerableType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { // length = entire collection // read each element uint i = 0; uint dataLength = 0; uint headerLength = 0; // determine what this enumerable enumerates (it's not necessarily the generic argument of the class) Type genericType; ExtendedType genericExtendedType; // if it's a custom class that implements IEnumerable generically, get it's type argument var enumerableInterface = typeSupport.Interfaces.Where(x => x.IsGenericType && x.Name == "IEnumerable`1").FirstOrDefault(); if (enumerableInterface != null) { genericType = enumerableInterface.GetGenericArguments().FirstOrDefault(); genericExtendedType = genericType.GetExtendedType(); } else { // use the generic type from the class directly genericType = typeSupport.Type.GetGenericArguments().First(); genericExtendedType = genericType.GetExtendedType(); } var addMethod = typeSupport.Type.GetMethod("Add"); if (addMethod == null) { addMethod = typeSupport.Type.GetMethod("Push"); } if (addMethod == null) { addMethod = typeSupport.Type.GetMethod("Enqueue"); } if (addMethod == null) { throw new DataFormatException($"TypeReader does not know how to add items to this enumerable: {typeSupport.Type}"); } while (i < length) { var element = ReadObject(reader, genericExtendedType, currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; addMethod.Invoke(newObj, new[] { element }); } return(newObj); }
internal object ReadObjectType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { // read each property into the object var fields = newObj.GetFields(FieldOptions.AllWritable).OrderBy(x => x.Name); var rootPath = path; var localPath = string.Empty; foreach (var field in fields) { localPath = $"{rootPath}.{field.ReflectedType.Name}.{field.Name}"; uint dataLength = 0; uint headerLength = 0; var fieldExtendedType = field.Type; if (fieldExtendedType.IsDelegate) { continue; } // check for ignore attributes if (IgnoreObjectName(field.Name, localPath, field.CustomAttributes)) { continue; } // also check the property for ignore, if this is a auto-backing property if (field.BackedProperty != null && IgnoreObjectName(field.BackedProperty.Name, $"{rootPath}.{field.ReflectedType.Name}.{field.BackedPropertyName}", field.BackedProperty.CustomAttributes)) { continue; } var fieldValue = ReadObject(reader, fieldExtendedType, currentDepth, localPath, ref dataLength, ref headerLength); try { newObj.SetFieldValue(field, fieldValue); } catch (FieldAccessException) { // .net core 3.0+ no longer allows you to set values on static initializers // see https://github.com/dotnet/runtime/issues/11571 & https://github.com/dotnet/coreclr/pull/20886 } } return(newObj); }
internal object ReadKeyValueType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { uint dataLength = 0; uint headerLength = 0; var genericTypes = typeSupport.Type.GetGenericArguments().ToList(); var typeSupports = genericTypes.Select(x => x.GetExtendedType()).ToList(); var keyExtendedType = typeSupports.First(); var valueExtendedType = typeSupports.Skip(1).First(); var key = ReadObject(reader, keyExtendedType, currentDepth, path, ref dataLength, ref headerLength); var value = ReadObject(reader, valueExtendedType, currentDepth, path, ref dataLength, ref headerLength); newObj.SetFieldValue("key", key); newObj.SetFieldValue("value", value); // return the kvp return(newObj); }
internal void WriteValueType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport) { var @switch = new Dictionary <Type, Action> { { typeof(bool), () => writer.Write((bool)obj) }, { typeof(byte), () => writer.Write((byte)obj) }, { typeof(sbyte), () => writer.Write((sbyte)obj) }, { typeof(short), () => writer.Write((short)obj) }, { typeof(ushort), () => writer.Write((ushort)obj) }, { typeof(int), () => writer.Write((int)obj) }, { typeof(uint), () => writer.Write((uint)obj) }, { typeof(long), () => writer.Write((long)obj) }, { typeof(ulong), () => writer.Write((ulong)obj) }, { typeof(float), () => writer.Write((float)obj) }, { typeof(double), () => writer.Write((double)obj) }, { typeof(decimal), () => writer.Write((decimal)obj) }, { typeof(Enum), () => { var bytes = _customSerializers[typeof(Enum)].Value.Serialize((Enum)obj); writer.Write(bytes); } }, { typeof(XDocument), () => { var bytes = _customSerializers[typeof(XDocument)].Value.Serialize((XDocument)obj); writer.Write(bytes); } }, { typeof(string), () => { writer.Write((string)obj); } }, { typeof(char), () => writer.Write((char)obj) }, { typeof(IntPtr), () => { writer.Write(((IntPtr)obj).ToInt64()); } }, { typeof(Guid), () => writer.Write(((Guid)obj).ToByteArray()) }, { typeof(DateTime), () => writer.Write(((DateTime)obj).ToBinary()) }, { typeof(TimeSpan), () => writer.Write(((TimeSpan)obj).Ticks) }, { typeof(Point), () => { var bytes = _customSerializers[typeof(Point)].Value.Serialize((Point)obj); writer.Write(bytes); } }, }; // write the type bytes. If the value is null, don't write it and it will receive a zero length instruction if (obj != null) { @switch[typeSupport.NullableBaseType](); } }
private void RegisterHierarchy(Type entityType, string name) { if (_entityMaps.ContainsKey(name)) { return; } var properties = entityType.GetProperties(BindingFlags.Instance | BindingFlags.Public); var propertyMappings = new Dictionary <string, IPropertyMap>(); var propertyMapType = typeof(PropertyMap <>); var relationTypes = new List <Type>(); foreach (var property in properties.Where(p => p.CanRead && p.CanWrite).OrderBy(p => p.Name)) { if (!propertyMappings.ContainsKey(property.Name)) { var genericPropertyMapType = propertyMapType.MakeGenericType(property.PropertyType); var propertyIndex = propertyMappings.Count + 1; var propertyMap = (IPropertyMap)Activator.CreateInstance(genericPropertyMapType, property, propertyIndex); propertyMappings.Add(property.Name, propertyMap); var extended = new ExtendedType(property.PropertyType); if (extended.Classification == TypeClassification.Collection) { var collectionInfo = extended.Container.AsCollection(); var extendedElementType = new ExtendedType(collectionInfo.ElementType); if (extendedElementType.Classification == TypeClassification.Complex) { relationTypes.Add(collectionInfo.ElementType); } } else if (extended.Classification == TypeClassification.Dictionary) { var dictionaryInfo = extended.Container.AsDictionary(); var extendedKeyType = new ExtendedType(dictionaryInfo.KeyType); if (extendedKeyType.Classification == TypeClassification.Complex) { relationTypes.Add(dictionaryInfo.KeyType); } var extendedValueType = new ExtendedType(dictionaryInfo.ValueType); if (extendedValueType.Classification == TypeClassification.Complex) { relationTypes.Add(dictionaryInfo.ValueType); } } else if (extended.Classification == TypeClassification.Complex) { relationTypes.Add(property.PropertyType); } } } var entityMap = EntityMap.Create(entityType, propertyMappings.Values, new IIndex[] {}); if (propertyMappings.ContainsKey("Id")) { entityMap.KeyName = "Id"; } else if (propertyMappings.ContainsKey("ID")) { entityMap.KeyName = "ID"; } else if (propertyMappings.ContainsKey(entityType.Name + "Id")) { entityMap.KeyName = entityType.Name + "Id"; } else if (propertyMappings.ContainsKey(entityType.Name + "ID")) { entityMap.KeyName = entityType.Name + "ID"; } else if (propertyMappings.ContainsKey("Guid")) { entityMap.KeyName = "Guid"; } else if (propertyMappings.ContainsKey("GUID")) { entityMap.KeyName = "GUID"; } _entityMaps.Add(name, entityMap); foreach (var relationType in relationTypes) { RegisterHierarchy(relationType); } }
private TypeReflectionContext(Type type, ITypeProvider provider) { _type = type; _extended = provider.Extend(type); _properties = new Lazy <IList <PropertyReflectionContext> >(() => ParseProperties(type, provider)); }
internal void WriteEnumerableType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element var enumerable = (IEnumerable)obj; // special case for stack types, write the data in reverse order if (typeSupport.IsGeneric) { if (typeSupport.Type.GetGenericTypeDefinition() == typeof(ConcurrentBag <>) || typeSupport.Type.GetGenericTypeDefinition() == typeof(ConcurrentStack <>) || typeSupport.Type.GetGenericTypeDefinition() == typeof(Stack <>) ) { enumerable = Enumerable.Reverse((IEnumerable <object>)obj); } } var elementExtendedType = typeSupport.ElementType.GetExtendedType(); ExtendedType elementConcreteExtendedType = null; var index = 0; foreach (var item in enumerable) { if (item != null && elementConcreteExtendedType is null) { elementConcreteExtendedType = item.GetType().GetExtendedType(); } WriteObject(writer, item, elementConcreteExtendedType ?? elementExtendedType, currentDepth, path, index); index++; } }
/// <summary> /// (Recursive) Recursive function that inspects an object and its properties/fields and clones it /// </summary> /// <param name="sourceObject">The object to clone</param> /// <param name="currentDepth">The current tree depth</param> /// <param name="maxDepth">The max tree depth</param> /// <param name="options">The cloning options</param> /// <param name="objectTree">The object tree to prevent cyclical references</param> /// <param name="path">The current path being traversed</param> /// <param name="ignorePropertiesOrPaths">A list of properties or paths to ignore</param> /// <returns></returns> private object InspectAndCopy(object sourceObject, int currentDepth, int maxDepth, CloneOptions options, IDictionary <int, object> objectTree, string path, ICollection <string> ignorePropertiesOrPaths) { if (IgnoreObjectName(null, path, options, ignorePropertiesOrPaths)) { return(null); } if (sourceObject == null) { return(null); } // ensure we don't go too deep if specified if (maxDepth > 0 && currentDepth >= maxDepth) { return(null); } var typeSupport = new ExtendedType(sourceObject.GetType(), _defaultTypeSupportOptions); // always return the original value on value types if (typeSupport.IsValueType) { return(sourceObject); } // drop any objects we are ignoring by attribute if (typeSupport.Attributes.Any(x => _ignoreAttributes.Contains(x)) && options.BitwiseHasFlag(CloneOptions.DisableIgnoreAttributes)) { return(null); } // for delegate types, copy them by reference rather than returning null if (typeSupport.IsDelegate) { return(sourceObject); } object newObject = null; // create a new empty object of the desired type if (typeSupport.IsArray) { var sourceArray = sourceObject as Array; // calculate the dimensions of the array var arrayRank = sourceArray.Rank; // get the length of each dimension var arrayDimensions = new List <int>(); for (var dimension = 0; dimension < arrayRank; dimension++) { arrayDimensions.Add(sourceArray.GetLength(dimension)); } newObject = _objectFactory.CreateEmptyObject(typeSupport.Type, default(TypeRegistry), arrayDimensions.ToArray()); } else if (typeSupport.Type == typeof(string)) { // copy the item directly newObject = String.Copy((string)sourceObject); return(newObject); } else { newObject = _objectFactory.CreateEmptyObject(typeSupport.Type); } if (newObject == null) { return(newObject); } // increment the current recursion depth currentDepth++; // construct a hashtable of objects we have already inspected (simple recursion loop preventer) // we use this hashcode method as it does not use any custom hashcode handlers the object might implement if (sourceObject != null && !typeSupport.IsValueType) { var hashCode = System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(sourceObject); if (objectTree.ContainsKey(hashCode)) { return(objectTree[hashCode]); } // ensure we can refer back to the reference for this object objectTree.Add(hashCode, newObject); } // clone a dictionary's key/values if (typeSupport.IsDictionary && typeSupport.IsGeneric) { var genericType = typeSupport.Type.GetGenericArguments().ToList(); Type[] typeArgs = { genericType[0], genericType[1] }; var listType = typeof(Dictionary <,>).MakeGenericType(typeArgs); var newDictionary = Activator.CreateInstance(listType) as IDictionary; newObject = newDictionary; var iDictionary = (IDictionary)sourceObject; var success = false; var retryCount = 0; while (!success && retryCount < 10) { try { foreach (DictionaryEntry item in iDictionary) { var key = InspectAndCopy(item.Key, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); var value = InspectAndCopy(item.Value, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); newDictionary.Add(key, value); } success = true; } catch (InvalidOperationException) { // if the collection was modified during enumeration, stop re-initialize and retry success = false; retryCount++; newDictionary.Clear(); } } return(newObject); } else if (typeSupport.IsEnumerable && !typeSupport.IsArray) { // clone an enumerables' elements var addMethod = typeSupport.Type.GetMethod("Add"); if (addMethod == null) { addMethod = typeSupport.Type.GetMethod("Enqueue"); } if (addMethod == null) { addMethod = typeSupport.Type.GetMethod("Push"); } if (addMethod == null) { throw new TypeException($"Unsupported IEnumerable type: {typeSupport.Type.Name}"); } var enumerator = (IEnumerable)sourceObject; var success = false; var retryCount = 0; while (!success && retryCount < 10) { try { foreach (var item in enumerator) { var element = InspectAndCopy(item, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); addMethod.Invoke(newObject, new[] { element }); } success = true; } catch (InvalidOperationException) { // if the collection was modified during enumeration, stop re-initialize and retry success = false; retryCount++; var clearMethod = typeSupport.Type.GetMethod("Clear"); clearMethod?.Invoke(newObject, null); } } return(newObject); } // clone an arrays' elements if (typeSupport.IsArray) { var sourceArray = sourceObject as Array; var newArray = newObject as Array; var arrayRank = newArray.Rank; var arrayDimensions = new List <int>(); for (var dimension = 0; dimension < arrayRank; dimension++) { arrayDimensions.Add(newArray.GetLength(dimension)); } var flatRowIndex = 0; foreach (var row in sourceArray) { var newElement = InspectAndCopy(row, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); // performance optimization, skip dimensional processing if it's a 1d array if (arrayRank > 1) { // this is an optimized multi-dimensional array reconstruction // based on the formula: indicies.Add((i / (arrayDimensions[arrayRank - 1] * arrayDimensions[arrayRank - 2] * arrayDimensions[arrayRank - 3] * arrayDimensions[arrayRank - 4] * arrayDimensions[arrayRank - 5])) % arrayDimensions[arrayRank - 6]); var indicies = new List <int>(); for (var r = 1; r <= arrayRank; r++) { var multi = 1; for (var p = 1; p < r; p++) { multi *= arrayDimensions[arrayRank - p]; } var b = (flatRowIndex / multi) % arrayDimensions[arrayRank - r]; indicies.Add(b); } indicies.Reverse(); // set element of multi-dimensional array newArray.SetValue(newElement, indicies.ToArray()); } else { // set element of 1d array newArray.SetValue(newElement, flatRowIndex); } flatRowIndex++; } return(newArray); } var fields = sourceObject.GetFields(FieldOptions.AllWritable); var rootPath = path; var localPath = string.Empty; // clone and recurse fields if (newObject != null) { foreach (var field in fields) { localPath = $"{rootPath}.{field.Name}"; if (IgnoreObjectName(field.Name, localPath, options, ignorePropertiesOrPaths, field.CustomAttributes)) { continue; } // also check the property for ignore, if this is a auto-backing property if (field.BackedProperty != null && IgnoreObjectName(field.BackedProperty.Name, $"{rootPath}.{field.BackedPropertyName}", options, ignorePropertiesOrPaths, field.BackedProperty.CustomAttributes)) { continue; } var fieldTypeSupport = new ExtendedType(field.Type, _defaultTypeSupportOptions); var fieldValue = sourceObject.GetFieldValue(field); if (fieldTypeSupport.IsValueType || fieldTypeSupport.IsImmutable) { newObject.SetFieldValue(field, fieldValue); } else if (fieldValue != null) { var clonedFieldValue = InspectAndCopy(fieldValue, currentDepth, maxDepth, options, objectTree, localPath, ignorePropertiesOrPaths); newObject.SetFieldValue(field, clonedFieldValue); } } } return(newObject); }
internal void WriteDictionaryType(BinaryWriter writer, long lengthStartPosition, object obj, ExtendedType typeSupport, int currentDepth, string path) { // write each element var dictionary = (IDictionary)obj; // determine the generic iterator type var isDictionaryEntry = dictionary.OfType <DictionaryEntry>().Any(); var isKvp = dictionary.OfType <KeyValuePair <object, object> >().Any(); // if neither was determined (possibly because of no data) assume DictionaryEntry if (!isDictionaryEntry && !isKvp) { isDictionaryEntry = true; } if (typeSupport.IsGeneric && typeSupport.GenericArgumentTypes.Any()) { // generic IDictionary<,> var keyExtendedType = typeSupport.GenericArgumentTypes.First().GetExtendedType(); var valueExtendedType = typeSupport.GenericArgumentTypes.Skip(1).First().GetExtendedType(); ExtendedType valueConcreteExtendedType = null; var index = 0; if (isDictionaryEntry) { // iterate DictionaryEntry of objects foreach (DictionaryEntry item in dictionary) { // write the key WriteObject(writer, item.Key, keyExtendedType, currentDepth, path, index); // write the value if (item.Value != null && valueConcreteExtendedType is null) { valueConcreteExtendedType = item.Value.GetType().GetExtendedType(); } WriteObject(writer, item.Value, valueConcreteExtendedType ?? valueExtendedType, currentDepth, path, index); index++; } } else if (isKvp) { // this scenario isn't supported. Shouldn't be possible when using generics } } else { // non-generic IDictionary ExtendedType extendedType; if (typeSupport.GenericArgumentTypes.Any()) { extendedType = typeSupport.GenericArgumentTypes.First().GetExtendedType(); } else { extendedType = typeof(object).GetExtendedType(); } var index = 0; if (isDictionaryEntry) { // iterate DictionaryEntry of objects foreach (DictionaryEntry item in dictionary) { // write the key WriteObject(writer, item.Key, item.Key.GetExtendedType(), currentDepth, path, index); // write the value WriteObject(writer, item.Value, item.Value.GetExtendedType(), currentDepth, path, index); index++; } } else if (isKvp) { // iterate KeyValuePair of objects foreach (KeyValuePair <object, object> item in dictionary) { // write the key WriteObject(writer, item.Key, item.Key.GetExtendedType(), currentDepth, path, index); // write the value WriteObject(writer, item.Value, item.Value.GetExtendedType(), currentDepth, path, index); index++; } } } }
public override bool Visit(ExtendedType node) { Visit((RealType)node); return(true); }
internal long WriteObject(BinaryWriter writer, object obj, ExtendedType typeSupport, int currentDepth, string path, int index) { // increment the current recursion depth currentDepth++; var isTypeMapped = false; TypeId objectTypeId = TypeId.None; var newTypeSupport = typeSupport; try { objectTypeId = TypeUtil.GetTypeId(newTypeSupport); } catch (InvalidOperationException ex) { throw new InvalidOperationException($"[{path}] {ex.Message}", ex); } // if the object type is not a concrete type, indicate so in the type mask isTypeMapped = _typeDescriptors != null && !newTypeSupport.IsConcreteType; // also resolve the concrete type as it may require being typemapped if (_typeDescriptors != null && newTypeSupport.ConcreteType != null && newTypeSupport.Type != newTypeSupport.ConcreteType && !newTypeSupport.IsConcreteType) { // a special condition for writing anonymous types and types without implementation or concrete type newTypeSupport = newTypeSupport.ConcreteType.GetExtendedType(); isTypeMapped = true; objectTypeId = TypeUtil.GetTypeId(newTypeSupport); } // if we couldn't resolve a concrete type, don't map it if (isTypeMapped && newTypeSupport.Type == typeof(object)) { isTypeMapped = false; } byte objectTypeIdByte = (byte)objectTypeId; // if the object is null, indicate so in the type mask if (obj == null) { objectTypeIdByte |= (byte)TypeId.NullValue; } if (isTypeMapped) { objectTypeIdByte |= (byte)TypeId.TypeMapped; } // write the object type being serialized in position 0x00 writer.Write(objectTypeIdByte); // make a note of where this object starts, so we can populate the length header later var lengthStartPosition = writer.BaseStream.Position; // make room for the length prefix and object reference id if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { writer.Seek((int)(Constants.CompactLengthHeaderSize + Constants.ObjectReferenceIdSize + (int)writer.BaseStream.Position), SeekOrigin.Begin); } else { writer.Seek((int)(Constants.LengthHeaderSize + Constants.ObjectReferenceIdSize + (int)writer.BaseStream.Position), SeekOrigin.Begin); } // write the optional type descriptor id - only interfaces can store type descriptors var containsTypeDescriptorId = false; ushort typeDescriptorId = 0; if (isTypeMapped) { typeDescriptorId = _typeDescriptors.AddKnownType(newTypeSupport); writer.Write(typeDescriptorId); containsTypeDescriptorId = true; } // construct a hashtable of objects we have already inspected (simple recursion loop preventer) // we use this hashcode method as it does not use any custom hashcode handlers the object might implement ushort objectReferenceId = 0; bool alreadyMapped = false; var hashCode = System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(obj); if (obj != null && !_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { alreadyMapped = _referenceTracker.ContainsHashcode(hashCode, obj.GetType()); // if we already wrote this object, we want to write a reference to it in the data if (alreadyMapped) { objectReferenceId = _referenceTracker.GetObjectReferenceId(hashCode, obj.GetType()); } } if (hashCode != 0 && !alreadyMapped) { if (!_options.BitwiseHasFlag(SerializerOptions.DisableReferenceTracking)) { // ensure we can refer back to the reference for this object objectReferenceId = _referenceTracker.AddObject(hashCode, obj); } // custom types support var @switch = new Dictionary <Type, Action> { { typeof(XDocument), () => WriteValueType(writer, lengthStartPosition, obj, newTypeSupport) }, }; if (@switch.ContainsKey(newTypeSupport.Type)) { @switch[newTypeSupport.Type](); } else { switch (objectTypeId) { case TypeId.Object: WriteObjectType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Struct: WriteStructType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Array: WriteArrayType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.IDictionary: WriteDictionaryType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.IEnumerable: WriteEnumerableType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.KeyValuePair: WriteKeyValueType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Tuple: WriteTupleType(writer, lengthStartPosition, obj, newTypeSupport, currentDepth, path); break; case TypeId.Enum: WriteValueType(writer, lengthStartPosition, obj, typeof(Enum).GetExtendedType()); break; default: WriteValueType(writer, lengthStartPosition, obj, newTypeSupport); break; } } } var currentPosition = writer.BaseStream.Position; // write the length header at the start of this object, excluding the objectReferenceId at the end var dataLength = (int)(writer.BaseStream.Position - lengthStartPosition - sizeof(ushort)); // if we wrote a typeDescriptorId, that doesn't apply to the dataLength if (containsTypeDescriptorId) { dataLength -= (int)Constants.ObjectTypeDescriptorId; } WriteDebugBuilder(writer.BaseStream.Position, typeSupport, objectTypeId, currentDepth, path, index, dataLength, objectReferenceId, typeDescriptorId, hashCode); writer.Seek((int)lengthStartPosition, SeekOrigin.Begin); if (_dataSettings.BitwiseHasFlag(SerializerDataSettings.Compact)) { if (dataLength > ushort.MaxValue) { throw new ExceedsMaxSizeException($"The object type '{newTypeSupport.Type}' serializes to a data size '{dataLength}' which is greater than supported for Compact mode (max: '{ushort.MaxValue}')"); } writer.Write((ushort)dataLength); } else { writer.Write((uint)dataLength); } // write the object reference Id from the object tree. // this is used so we don't have to serialize objects already in the data, we can just reference it's id writer.Write(objectReferenceId); // reset the position to current writer.Seek((int)currentPosition, SeekOrigin.Begin); return(dataLength); }
internal object ReadTupleType(object newObj, BinaryReader reader, uint length, ExtendedType typeSupport, int currentDepth, string path, TypeDescriptor typeDescriptor) { // length = entire collection // read each element, treat a tuple as a list of objects uint i = 0; uint dataLength = 0; uint headerLength = 0; var genericTypes = typeSupport.Type.GetGenericArguments().ToList(); var typeSupports = genericTypes.Select(x => x.GetExtendedType()).ToList(); Type tupleType = null; if (typeSupport.IsValueTuple) { tupleType = TypeSupport.Extensions.TupleExtensions.CreateValueTuple(typeSupports.Select(x => x.Type).ToList()); } else { tupleType = TypeSupport.Extensions.TupleExtensions.CreateTuple(typeSupports.Select(x => x.Type).ToList()); } object newTuple = null; if (typeDescriptor != null && !string.IsNullOrEmpty(typeDescriptor.FullName)) { newTuple = new ObjectFactory().CreateEmptyObject(typeDescriptor.FullName, _typeRegistry); } else { newTuple = new ObjectFactory().CreateEmptyObject(tupleType, _typeRegistry); } var index = 0; while (i < length) { var element = ReadObject(reader, typeSupports[index], currentDepth, path, ref dataLength, ref headerLength); // increment the size of the data read i += dataLength + headerLength; var fieldName = $"m_Item{index + 1}"; if (typeSupport.IsValueTuple) { fieldName = $"Item{index + 1}"; } TypeUtil.SetFieldValue(fieldName, newTuple, element); index++; } // return the value return(newTuple); }
/// <summary> /// Initializes a new instance of the <see cref="Property"/> class. /// </summary> /// <param name="extendedType">The extended type.</param> /// <param name="info">The <see cref="System.Reflection.PropertyInfo">property info</see>.</param> /// <remarks></remarks> internal Indexer([NotNull] ExtendedType extendedType, [NotNull] PropertyInfo info) : base(extendedType, info) { Debug.Assert(extendedType.DefaultMember == info.Name || info.GetIndexParameters().Length > 0); _indexParameters = new Lazy <ParameterInfo[]>(info.GetIndexParameters, LazyThreadSafetyMode.PublicationOnly); }
/// <summary> /// (Recursive) Recursive function that inspects an object and its properties/fields and clones it /// </summary> /// <param name="sourceObject">The object to clone</param> /// <param name="destObject">The destination object</param> /// <param name="mapToType">The type to map to</param> /// <param name="currentDepth">The current tree depth</param> /// <param name="maxDepth">The max tree depth</param> /// <param name="options">The cloning options</param> /// <param name="objectTree">The object tree to prevent cyclical references</param> /// <param name="path">The current path being traversed</param> /// <param name="ignorePropertiesOrPaths">A list of properties or paths to ignore</param> /// <returns></returns> private object InspectAndMap <TSource, TDest>(object sourceObject, object destObject, ExtendedType mapToType, int currentDepth, int maxDepth, MappingOptions options, IDictionary <ObjectHashcode, object> objectTree, string path, ICollection <string> ignorePropertiesOrPaths = null) { if (IgnoreObjectName(null, path, options, ignorePropertiesOrPaths)) { return(null); } if (sourceObject == null) { return(null); } // ensure we don't go too deep if specified if (maxDepth > 0 && currentDepth >= maxDepth) { return(null); } var sourceType = typeof(TSource).GetExtendedType(); var destType = typeof(TDest).GetExtendedType(); if (ignorePropertiesOrPaths == null) { ignorePropertiesOrPaths = new List <string>(); } // drop any objects we are ignoring by attribute if (mapToType.Attributes.Any(x => _ignoreAttributes.Contains(x)) && options.BitwiseHasFlag(MappingOptions.DisableIgnoreAttributes)) { return(null); } // for delegate types, copy them by reference rather than returning null if (mapToType.IsDelegate) { return(sourceObject); } object newObject = destObject; // create a new empty object of the desired type if (newObject == null) { if (mapToType.IsArray) { var length = 0; if (mapToType.IsArray) { length = (sourceObject as Array).Length; } newObject = _objectFactory.CreateEmptyObject(mapToType.Type, length); } else if (mapToType.Type == typeof(string)) { // copy the item directly newObject = Convert.ToString(sourceObject); return(newObject); } else { newObject = _objectFactory.CreateEmptyObject(mapToType.Type); } } if (newObject == null) { return(newObject); } // increment the current recursion depth currentDepth++; // construct a hashtable of objects we have already inspected (simple recursion loop preventer) // we use this hashcode method as it does not use any custom hashcode handlers the object might implement if (sourceObject != null && !mapToType.IsValueType) { var hashCode = System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(sourceObject); var key = new ObjectHashcode(hashCode, newObject.GetType()); if (objectTree.ContainsKey(key)) { return(objectTree[key]); } // ensure we can refer back to the reference for this object objectTree.Add(key, newObject); } var objectMapper = TypeRegistry.ObjectMappings .FirstOrDefault(x => x.SourceObjectType == sourceType.Type && x.DestinationObjectType == destType.Type); try { // clone a dictionary's key/values if (mapToType.IsDictionary && mapToType.IsGeneric) { var genericType = mapToType.Type.GetGenericArguments().ToList(); Type[] typeArgs = { genericType[0], genericType[1] }; var listType = typeof(Dictionary <,>).MakeGenericType(typeArgs); var newDictionary = Activator.CreateInstance(listType) as IDictionary; newObject = newDictionary; var enumerator = (IDictionary)sourceObject; foreach (DictionaryEntry item in enumerator) { var key = InspectAndMap <TSource, TDest>(item.Key, null, item.Key.GetExtendedType(), currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); var value = InspectAndMap <TSource, TDest>(item.Value, null, item.Value.GetExtendedType(), currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); newDictionary.Add(key, value); } return(newObject); } // clone an enumerables' elements if (mapToType.IsEnumerable && mapToType.IsGeneric) { var genericType = mapToType.Type.GetGenericArguments().First(); var genericExtendedType = genericType.GetExtendedType(); var addMethod = mapToType.Type.GetMethod("Add"); var enumerator = (IEnumerable)sourceObject; foreach (var item in enumerator) { var element = InspectAndMap <TSource, TDest>(item, null, genericExtendedType, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); addMethod.Invoke(newObject, new object[] { element }); } return(newObject); } // clone an arrays' elements if (mapToType.IsArray) { var sourceArray = sourceObject as Array; var newArray = newObject as Array; newObject = newArray; for (var i = 0; i < sourceArray.Length; i++) { var element = sourceArray.GetValue(i); var newElement = InspectAndMap <TSource, TDest>(element, null, mapToType.ElementType.GetExtendedType(), currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); newArray.SetValue(newElement, i); } return(newArray); } var fields = sourceObject.GetFields(FieldOptions.AllWritable); var properties = sourceObject.GetProperties(PropertyOptions.HasGetter); var rootPath = path; // clone and recurse fields if (newObject != null) { foreach (var field in fields) { path = $"{rootPath}.{field.Name}"; if (IgnoreObjectName(field.Name, path, options, ignorePropertiesOrPaths, field.CustomAttributes)) { continue; } // also check the property for ignore, if this is a auto-backing property if (field.BackedProperty != null && IgnoreObjectName(field.BackedProperty.Name, $"{rootPath}.{field.BackedPropertyName}", options, ignorePropertiesOrPaths, field.BackedProperty.CustomAttributes)) { continue; } newObject = MapField <TSource, TDest>(newObject, sourceObject, objectMapper, field, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); } foreach (var property in properties) { path = $"{rootPath}.{property.Name}"; if (IgnoreObjectName(property.Name, path, options, ignorePropertiesOrPaths, property.CustomAttributes)) { continue; } // also check the backing field for ignore, if this is a auto-backing property if (property.BackingFieldName != null && IgnoreObjectName(property.BackingFieldName, $"{rootPath}.{property.BackingFieldName}", options, ignorePropertiesOrPaths, fields.FirstOrDefault(x => x.Name == property.BackingFieldName).CustomAttributes)) { continue; } if (string.IsNullOrEmpty(property.BackingFieldName)) { // map the property, it has no backing field so it's likely a method call newObject = MapProperty <TSource, TDest>(newObject, sourceObject, objectMapper, property, currentDepth, maxDepth, options, objectTree, path, ignorePropertiesOrPaths); } } } return(newObject); } finally { } }