public static object Deserialize(XElement element, Dictionary <string, Func <IBObject> > constructors, IReferenceResolver resolver) { var idAtt = element.Attribute("id"); object val = DeserializeInner(element, constructors, resolver); var ib = val as IBObject; if (idAtt != null) { int id = Int32.Parse(idAtt.Value, CultureInfo.InvariantCulture); if (ib != null) { ib.Id = id; resolver.Add(ib); } else { resolver.Add(id, val); } } if (ib != null) { ib.DeserializeContents(element.Elements(), constructors, resolver); } return(val); }
public static object Deserialize (XElement element, Dictionary<string, Func<IBObject>> constructors, IReferenceResolver resolver) { var idAtt = element.Attribute ("id"); object val = DeserializeInner (element, constructors, resolver); var ib = val as IBObject; if (idAtt != null) { int id = Int32.Parse (idAtt.Value); if (ib != null) { ib.Id = id; resolver.Add (ib); } else { resolver.Add (id, val); } } if (ib != null) ib.DeserializeContents (element.Elements (), constructors, resolver); return val; }
static object DeserializeInner(XElement element, Dictionary <string, Func <IBObject> > constructors, IReferenceResolver resolver) { switch (element.Name.ToString()) { case "int": return(Int32.Parse(element.Value, CultureInfo.InvariantCulture)); case "integer": return(Int32.Parse(element.Attribute("value").Value, CultureInfo.InvariantCulture)); case "nil": return(null); case "string": XAttribute typeAtt = element.Attribute("type"); if (typeAtt != null) { switch (typeAtt.Value) { case "base64-UTF8": //FIXME: figure out the encoding they're using. why do we have to remove the last char to make it decode? string s = element.Value.Replace("\n", "").Replace("\r", ""); int last = (s.Length / 4) * 4; return(Encoding.UTF8.GetString(Convert.FromBase64String(s.Substring(0, last)))); default: throw new Exception(String.Format("Unknown string encoding type {0}", typeAtt.Value)); } } return(element.Value); case "characters": return(element.Value); case "bool": return(element.Value == "YES"); case "boolean": return(element.Attribute("value").Value == "YES"); case "double": return(Double.Parse(element.Value, CultureInfo.InvariantCulture)); case "float": return(float.Parse(element.Value, CultureInfo.InvariantCulture)); case "real": return(float.Parse(element.Attribute("value").Value, CultureInfo.InvariantCulture)); case "bytes": //FIXME: figure out the encoding they're using. it's not straight base 64 return(new AppleEvilByteArrayEncoding(element.Value)); case "reference": var refAtt = element.Attribute("ref"); IBReference xibRef; if (refAtt != null) { xibRef = new IBReference(Int32.Parse(refAtt.Value, CultureInfo.InvariantCulture)); resolver.Add(xibRef); } else { //FIXME: handle null references more robustly xibRef = new IBReference(Int32.MinValue); } return(xibRef); case "object": { var className = (string)element.Attribute("class"); Func <IBObject> constructor; IBObject obj; if (constructors.TryGetValue(className, out constructor)) { obj = constructor(); } else { obj = new UnknownIBObject(className); } return(obj); } case "array": { var className = (string)element.Attribute("class"); if (className == null) { return(new NSArray()); } else if (className == "NSMutableArray") { return(new NSMutableArray()); } throw new InvalidOperationException("Unknown array class '" + className + "'"); } case "dictionary": { var className = (string)element.Attribute("class"); if (className == "NSMutableDictionary") { return(new NSMutableDictionaryDirect()); } throw new InvalidOperationException("Unknown dictionary class '" + className + "'"); } default: throw new Exception(String.Format("Cannot handle primitive type {0}", element.Name)); } }
static object DeserializeInner (XElement element, Dictionary<string, Func<IBObject>> constructors, IReferenceResolver resolver) { switch (element.Name.ToString ()) { case "int": return Int32.Parse (element.Value); case "integer": return Int32.Parse (element.Attribute ("value").Value); case "nil": return null; case "string": XAttribute typeAtt = element.Attribute ("type"); if (typeAtt != null) { switch (typeAtt.Value) { case "base64-UTF8": //FIXME: figure out the encoding they're using. why do we have to remove the last char to make it decode? string s = element.Value.Replace ("\n", "").Replace ("\r", ""); int last = (s.Length / 4 ) * 4; return Encoding.UTF8.GetString (Convert.FromBase64String (s.Substring (0, last))); default: throw new Exception (String.Format ("Unknown string encoding type {0}", typeAtt.Value)); } } return element.Value; case "characters": return element.Value; case "bool": return element.Value == "YES"; case "boolean": return element.Attribute ("value").Value == "YES"; case "double": return Double.Parse (element.Value); case "float": return float.Parse (element.Value); case "real": return float.Parse (element.Attribute ("value").Value); case "bytes": //FIXME: figure out the encoding they're using. it's not straight base 64 return new AppleEvilByteArrayEncoding (element.Value); case "reference": var refAtt = element.Attribute ("ref"); IBReference xibRef; if (refAtt != null) { xibRef = new IBReference (Int32.Parse (refAtt.Value)); resolver.Add (xibRef); } else { //FIXME: handle null references more robustly xibRef = new IBReference (Int32.MinValue); } return xibRef; case "object": string className = element.Attribute ("class").Value; Func<IBObject> constructor; IBObject obj; if (constructors.TryGetValue (className, out constructor)) obj = constructor (); else obj = new UnknownIBObject (className); return obj; default: throw new Exception (String.Format ("Cannot handle primitive type {0}", element.Name)); } }