/// <summary> /// Drop an index from a collection /// </summary> public bool DropIndex(string collection, string field) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (field.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(field)); } if (field == "_id") { throw LiteException.IndexDropId(); } return(this.Transaction <bool>(collection, false, (col) => { // no collection, no index if (col == null) { return false; } // search for index reference var index = col.GetIndex(field); // no index, no drop if (index == null) { return false; } _log.Write(Logger.COMMAND, "drop index on '{0}' :: '{1}'", collection, field); // delete all data pages + indexes pages _indexer.DropIndex(index); // clear index reference index.Clear(); // mark collection page as dirty _pager.SetDirty(col); return true; })); }
internal LiteFileInfo(LiteEngine engine, string id, string filename) { if (!IdPattern.IsMatch(id)) { throw LiteException.InvalidFormat(id); } _engine = engine; this.Id = id; this.Filename = Path.GetFileName(filename); this.MimeType = MimeTypeConverter.GetMimeType(this.Filename); this.Length = 0; this.Chunks = 0; this.UploadDate = DateTime.Now; this.Metadata = new BsonDocument(); }
/// <summary> /// Copy all file content to a steam /// </summary> public LiteFileInfo Download(string id, Stream stream) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } var file = this.FindById(id); if (file == null) { throw LiteException.FileNotFound(id); } file.CopyTo(stream); return(file); }
/// <summary> /// Rename a collection /// </summary> public bool RenameCollection(string collection, string newName) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (newName.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(newName)); } return(this.Transaction <bool>(collection, false, (col) => { if (col == null) { return false; } _log.Write(Logger.COMMAND, "rename collection '{0}' -> '{1}'", collection, newName); // check if newName already exists if (this.GetCollectionNames().Contains(newName, StringComparer.OrdinalIgnoreCase)) { throw LiteException.AlreadyExistsCollectionName(newName); } // change collection name and save col.CollectionName = newName; // set collection page as dirty _pager.SetDirty(col); // update header collection reference var header = _pager.GetPage <HeaderPage>(0); header.CollectionPages.Remove(collection); header.CollectionPages.Add(newName, col.PageID); _pager.SetDirty(header); return true; })); }
/// <summary> /// Try execute some action while has lock exception /// </summary> public static void TryExec(Action action, TimeSpan timeout) { var timer = DateTime.UtcNow.Add(timeout); do { try { action(); return; } catch (IOException ex) { ex.WaitIfLocked(25); } }while (DateTime.UtcNow < timer); throw LiteException.LockTimeout(timeout); }
/// <summary> /// Test if cache still valid (if datafile was changed by another process reset cache) /// Returns true if file was changed /// [Thread Safe] /// </summary> private bool DetectDatabaseChanges() { // if disk are exclusive don't need check dirty read if (_disk.IsExclusive) { return(false); } // empty cache? just exit if (_cache.CleanUsed == 0) { return(false); } _log.Write(Logger.CACHE, "checking disk to detect database changes from another process"); // get ChangeID from cache var header = _cache.GetPage(0) as HeaderPage; var changeID = header == null ? 0 : header.ChangeID; // and get header from disk var disk = BasePage.ReadPage(_disk.ReadPage(0)) as HeaderPage; // if disk header are in recovery mode, throw exception to datafile re-open and recovery pages if (disk.Recovery) { _log.Write(Logger.ERROR, "datafile in recovery mode, need re-open database"); throw LiteException.NeedRecover(); } // if header change, clear cache and add new header to cache if (disk.ChangeID != changeID) { _log.Write(Logger.CACHE, "file changed from another process, cleaning all cache pages"); _cache.ClearPages(); _cache.AddPage(disk); return(true); } return(false); }
/// <summary> /// Add a new collection. Check if name the not exists /// </summary> public CollectionPage Add(string name) { if (string.IsNullOrEmpty(name)) { throw new ArgumentNullException(nameof(name)); } if (!CollectionPage.NamePattern.IsMatch(name)) { throw LiteException.InvalidFormat(name); } _log.Write(Logger.COMMAND, "creating new collection '{0}'", name); // get header marked as dirty because I will use header after (and NewPage can get another header instance) var header = _pager.GetPage <HeaderPage>(0); // check limit count (8 bytes per collection = 4 to string length, 4 for uint pageID) if (header.CollectionPages.Sum(x => x.Key.Length + 8) + name.Length + 8 >= CollectionPage.MAX_COLLECTIONS_SIZE) { throw LiteException.CollectionLimitExceeded(CollectionPage.MAX_COLLECTIONS_SIZE); } // get new collection page (marked as dirty) var col = _pager.NewPage <CollectionPage>(); // add this page to header page collection header.CollectionPages.Add(name, col.PageID); col.CollectionName = name; // set header page as dirty _pager.SetDirty(header); // create PK index var pk = _indexer.CreateIndex(col); pk.Field = "_id"; pk.Expression = "$._id"; pk.Unique = true; return(col); }
/// <summary> /// Read a page with correct instance page object. Checks for pageType /// </summary> public static BasePage ReadPage(byte[] buffer) { var reader = new ByteReader(buffer); var pageID = reader.ReadUInt32(); var pageType = (PageType)reader.ReadByte(); if (pageID == 0 && (byte)pageType > 5) { throw LiteException.InvalidDatabase(); } var page = CreateInstance(pageID, pageType); page.ReadHeader(reader); page.ReadContent(reader); page.DiskData = buffer; return(page); }
/// <summary> /// Enter in Exclusive lock mode /// </summary> public LockControl Write() { // if already in exclusive, do nothing if (_thread.IsWriteLockHeld) { return(new LockControl(false, () => { })); } // let's test if is not in read lock if (_thread.IsReadLockHeld) { throw new NotSupportedException("Not support Write lock inside a Read lock"); } // try enter in write mode (thread) if (!_thread.TryEnterWriteLock(_timeout)) { throw LiteException.LockTimeout(_timeout); } _log.Write(Logger.LOCK, "entered in write lock mode in thread #{0}", this.ThreadId); // try enter in exclusive mode in disk var position = _disk.Lock(LockState.Write, _timeout); // call avoid dirty only if not came from a shared mode var changed = this.DetectDatabaseChanges(); return(new LockControl(changed, () => { // release disk write _disk.Unlock(LockState.Write, position); // release thread write _thread.ExitWriteLock(); _log.Write(Logger.LOCK, "exited write lock mode in thread #{0}", this.ThreadId); })); }
/// <summary> /// Run a shell command from a string. Execute command in current database and returns an IEnumerable collection of results /// </summary> public IList <BsonValue> Run(string command) { if (_commands.Count == 0) { RegisterCommands(); } var s = new StringScanner(command); // test all commands foreach (var cmd in _commands) { if (!cmd.IsCommand(s)) { continue; } var values = cmd.Execute(s, this); return(values.ToList()); } throw LiteException.InvalidCommand(command); }
/// <summary> /// Internal implementation of insert a document /// </summary> private void InsertDocument(CollectionPage col, BsonDocument doc, BsonType autoId) { // collection Sequence was created after release current datafile version. // In this case, Sequence will be 0 but already has documents. Let's fix this // ** this code can be removed when datafile change from 7 (HeaderPage.FILE_VERSION) ** if (col.Sequence == 0 && col.DocumentCount > 0) { var max = this.Max(col.CollectionName, "_id"); // if max value is a number, convert to Sequence last value // if not, just set sequence as document count col.Sequence = (max.IsInt32 || max.IsInt64 || max.IsDouble || max.IsDecimal) ? Convert.ToInt64(max.RawValue) : Convert.ToInt64(col.DocumentCount); } // increase collection sequence _id col.Sequence++; _pager.SetDirty(col); // if no _id, add one if (!doc.RawValue.TryGetValue("_id", out var id)) { doc["_id"] = id = autoId == BsonType.ObjectId ? new BsonValue(ObjectId.NewObjectId()) : autoId == BsonType.Guid ? new BsonValue(Guid.NewGuid()) : autoId == BsonType.DateTime ? new BsonValue(DateTime.Now) : autoId == BsonType.Int32 ? new BsonValue((Int32)col.Sequence) : autoId == BsonType.Int64 ? new BsonValue(col.Sequence) : BsonValue.Null; } // create bubble in sequence number if _id is bigger than current sequence else if (autoId == BsonType.Int32 || autoId == BsonType.Int64) { var current = id.AsInt64; // if current id is bigger than sequence, jump sequence to this number. Other was, do not increse sequnce col.Sequence = current >= col.Sequence ? current : col.Sequence - 1; } // test if _id is a valid type if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } _log.Write(Logger.COMMAND, "insert document on '{0}' :: _id = {1}", col.CollectionName, id.RawValue); // serialize object var bytes = _bsonWriter.Serialize(doc); // storage in data pages - returns dataBlock address var dataBlock = _data.Insert(col, bytes); // store id in a PK index [0 array] var pk = _indexer.AddNode(col.PK, id, null); // do link between index <-> data block pk.DataBlock = dataBlock.Position; // for each index, insert new IndexNode foreach (var index in col.GetIndexes(false)) { // for each index, get all keys (support now multi-key) - gets distinct values only // if index are unique, get single key only var expr = new BsonExpression(index.Expression); var keys = expr.Execute(doc, true); // do a loop with all keys (multi-key supported) foreach (var key in keys) { // insert node var node = _indexer.AddNode(index, key, pk); // link my index node to data block address node.DataBlock = dataBlock.Position; } } }
/// <summary> /// Use this method to override how your class can be, by default, mapped from entity to Bson document. /// Returns an EntityMapper from each requested Type /// </summary> protected virtual EntityMapper BuildEntityMapper(Type type) { var mapper = new EntityMapper { Members = new List <MemberMapper>(), ForType = type }; var idAttr = typeof(BsonIdAttribute); var ignoreAttr = typeof(BsonIgnoreAttribute); var fieldAttr = typeof(BsonFieldAttribute); var indexAttr = typeof(BsonIndexAttribute); var dbrefAttr = typeof(BsonRefAttribute); var members = this.GetTypeMembers(type); var id = this.GetIdMember(members); foreach (var memberInfo in members) { // checks [BsonIgnore] if (memberInfo.IsDefined(ignoreAttr, true)) { continue; } // checks field name conversion var name = this.ResolveFieldName(memberInfo.Name); // check if property has [BsonField] var field = (BsonFieldAttribute)memberInfo.GetCustomAttributes(fieldAttr, false).FirstOrDefault(); // check if property has [BsonField] with a custom field name if (field != null && field.Name != null) { name = field.Name; } // checks if memberInfo is id field if (memberInfo == id) { name = "_id"; } // test if field name is OK (avoid to check in all instances) - do not test internal classes, like DbRef if (BsonDocument.IsValidFieldName(name) == false) { throw LiteException.InvalidFormat(memberInfo.Name); } // create getter/setter function var getter = Reflection.CreateGenericGetter(type, memberInfo); var setter = Reflection.CreateGenericSetter(type, memberInfo); // check if property has [BsonId] to get with was setted AutoId = true var autoId = (BsonIdAttribute)memberInfo.GetCustomAttributes(idAttr, false).FirstOrDefault(); // get data type var dataType = memberInfo is PropertyInfo ? (memberInfo as PropertyInfo).PropertyType : (memberInfo as FieldInfo).FieldType; // check if datatype is list/array var isList = Reflection.IsList(dataType); // create a property mapper var member = new MemberMapper { AutoId = autoId == null ? true : autoId.AutoId, FieldName = name, MemberName = memberInfo.Name, DataType = dataType, IsList = isList, UnderlyingType = isList ? Reflection.GetListItemType(dataType) : dataType, Getter = getter, Setter = setter }; // check if property has [BsonRef] var dbRef = (BsonRefAttribute)memberInfo.GetCustomAttributes(dbrefAttr, false).FirstOrDefault(); if (dbRef != null && memberInfo is PropertyInfo) { BsonMapper.RegisterDbRef(this, member, dbRef.Collection ?? this.ResolveCollectionName((memberInfo as PropertyInfo).PropertyType)); } // support callback to user modify member mapper if (this.ResolveMember != null) { this.ResolveMember(type, memberInfo, member); } // test if has name and there is no duplicate field if (member.FieldName != null && mapper.Members.Any(x => x.FieldName == name) == false) { mapper.Members.Add(member); } } return(mapper); }
/// <summary> /// Start parse string into linq expression. Read path, function or base type bson values (int, double, bool, string) /// </summary> internal static Expression ParseSingleExpression(StringScanner s, ParameterExpression root, ParameterExpression current, bool isRoot) { if (s.Match(@"[\$@]") || isRoot) // read root path { var r = s.Scan(@"[\$@]"); // read root/current var method = typeof(BsonExpression).GetMethod("Root"); var name = Expression.Constant(s.Scan(@"\.?([\$\-\w]+)", 1)); var expr = Expression.Call(method, r == "@" ? current : root, name) as Expression; // parse the rest of path while (!s.HasTerminated) { var result = ParsePath(s, expr, root); if (result == null) { break; } expr = result; } return(expr); } else if (s.Match(@"-?\d*\.\d+")) // read double { var number = Convert.ToDouble(s.Scan(@"-?\d*\.\d+"), CultureInfo.InvariantCulture.NumberFormat); var value = Expression.Constant(new BsonValue(number)); return(Expression.NewArrayInit(typeof(BsonValue), value)); } else if (s.Match(@"-?\d+")) // read int { var number = Convert.ToInt32(s.Scan(@"-?\d+"), CultureInfo.InvariantCulture.NumberFormat); var value = Expression.Constant(new BsonValue(number)); return(Expression.NewArrayInit(typeof(BsonValue), value)); } else if (s.Match(@"(true|false)")) // read bool { var boolean = Convert.ToBoolean(s.Scan(@"(true|false)")); var value = Expression.Constant(new BsonValue(boolean)); return(Expression.NewArrayInit(typeof(BsonValue), value)); } else if (s.Match(@"null")) // read null { var value = Expression.Constant(BsonValue.Null); return(Expression.NewArrayInit(typeof(BsonValue), value)); } else if (s.Match(@"'")) // read string { var str = s.Scan(@"'([\s\S]*?)'", 1); var value = Expression.Constant(new BsonValue(str)); return(Expression.NewArrayInit(typeof(BsonValue), value)); } else if (s.Scan(@"\{\s*").Length > 0) // read document { { // read key value var method = typeof(ExpressionOperators).GetMethod("DOCUMENT"); var keys = new List <Expression>(); var values = new List <Expression>(); while (!s.HasTerminated) { // read key + value var key = s.Scan(@"(.+?)\s*:\s*", 1).ThrowIfEmpty("Invalid token", s); var value = ParseExpression(s, root, current, false); // add key and value to parameter list (as an expression) keys.Add(Expression.Constant(new BsonValue(key))); values.Add(value); if (s.Scan(@"\s*,\s*").Length > 0) { continue; } else if (s.Scan(@"\s*\}\s*").Length > 0) { break; } throw LiteException.SyntaxError(s); } var arrKeys = Expression.NewArrayInit(typeof(BsonValue), keys.ToArray()); var arrValues = Expression.NewArrayInit(typeof(IEnumerable <BsonValue>), values.ToArray()); return(Expression.Call(method, new Expression[] { arrKeys, arrValues })); } else if (s.Scan(@"\[\s*").Length > 0) // read array [ { var method = typeof(ExpressionOperators).GetMethod("ARRAY"); var values = new List <Expression>(); while (!s.HasTerminated) { // read value expression var value = ParseExpression(s, root, current, false); values.Add(value); if (s.Scan(@"\s*,\s*").Length > 0) { continue; } else if (s.Scan(@"\s*\]\s*").Length > 0) { break; } throw LiteException.SyntaxError(s); } var arrValues = Expression.NewArrayInit(typeof(IEnumerable <BsonValue>), values.ToArray()); return(Expression.Call(method, new Expression[] { arrValues })); } else if (s.Scan(@"\(\s*").Length > 0) // read inner ( { // read a inner expression inside ( and ) var inner = ParseExpression(s, root, current, false); if (s.Scan(@"\s*\)").Length == 0) { throw LiteException.SyntaxError(s); } return(inner); } else if (s.Match(@"\w+\s*\(")) // read function { // get static method from this class var name = s.Scan(@"(\w+)\s*\(", 1).ToUpper(); var parameters = new List <Expression>(); if (s.Scan(@"\s*\)\s*").Length == 0) { while (!s.HasTerminated) { var parameter = ParseExpression(s, root, current, false); parameters.Add(parameter); if (s.Scan(@"\s*,\s*").Length > 0) { continue; } else if (s.Scan(@"\s*\)\s*").Length > 0) { break; } throw LiteException.SyntaxError(s); } } var method = _methods.FirstOrDefault(x => x.Name == name && x.GetParameters().Count() == parameters.Count); if (method == null) { throw LiteException.SyntaxError(s, "Method " + name + " not exist or invalid parameter count"); } return(Expression.Call(method, parameters.ToArray())); } throw LiteException.SyntaxError(s); }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(CollectionPage col, BsonDocument doc) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } _log.Write(Logger.COMMAND, "update document on '{0}' :: _id = {1}", col.CollectionName, id.RawValue); // find indexNode from pk index var pkNode = _indexer.Find(col.PK, id, false, Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // serialize document in bytes var bytes = _bsonWriter.Serialize(doc); // update data storage var dataBlock = _data.Update(col, pkNode.DataBlock, bytes); // get all non-pk index nodes from this data block var allNodes = _indexer.GetNodeList(pkNode, false).ToArray(); // delete/insert indexes - do not touch on PK foreach (var index in col.GetIndexes(false)) { var expr = new BsonExpression(index.Expression); // getting all keys do check var keys = expr.Execute(doc).ToArray(); // get a list of to delete nodes (using ToArray to resolve now) var toDelete = allNodes .Where(x => x.Slot == index.Slot && !keys.Any(k => k == x.Key)) .ToArray(); // get a list of to insert nodes (using ToArray to resolve now) var toInsert = keys .Where(x => !allNodes.Any(k => k.Slot == index.Slot && k.Key == x)) .ToArray(); // delete changed index nodes foreach (var node in toDelete) { _indexer.Delete(index, node.Position); } // insert new nodes foreach (var key in toInsert) { // and add a new one var node = _indexer.AddNode(index, key, pkNode); // link my node to data block node.DataBlock = dataBlock.Position; } } return(true); }
/// <summary> /// Insert a new node index inside an collection index. /// </summary> private IndexNode AddNode(CollectionIndex index, BsonValue key, byte level, IndexNode last) { // calc key size var keyLength = key.GetBytesCount(false); // test for index key maxlength if (keyLength > MAX_INDEX_LENGTH) { throw LiteException.IndexKeyTooLong(); } // creating a new index node var node = new IndexNode(level) { Key = key, KeyLength = (ushort)keyLength, Slot = (byte)index.Slot }; // get a free page to insert my index node var page = _pager.GetFreePage <IndexPage>(index.FreeIndexPageID, node.Length); node.Page = page; // add index node to page page.AddNode(node); // now, let's link my index node on right place var cur = this.GetNode(index.HeadNode); // using as cache last IndexNode cache = null; // scan from top left for (var i = index.MaxLevel - 1; i >= 0; i--) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // for(; <while_not_this>; <do_this>) { ... } for (; cur.Next[i].IsEmpty == false; cur = cache) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // read next node to compare var diff = cache.Key.CompareTo(key); // if unique and diff = 0, throw index exception (must rollback transaction - others nodes can be dirty) if (diff == 0 && index.Unique) { throw LiteException.IndexDuplicateKey(index.Field, key); } if (diff == 1) { break; } } if (i <= (level - 1)) // level == length { // cur = current (immediately before - prev) // node = new inserted node // next = next node (where cur is pointing) _pager.SetDirty(cur.Page); node.Next[i] = cur.Next[i]; node.Prev[i] = cur.Position; cur.Next[i] = node.Position; var next = this.GetNode(node.Next[i]); if (next != null) { next.Prev[i] = node.Position; _pager.SetDirty(next.Page); } } } // add/remove indexPage on freelist if has space _pager.AddOrRemoveToFreeList(page.FreeBytes > IndexPage.INDEX_RESERVED_BYTES, page, index.Page, ref index.FreeIndexPageID); // if last node exists, create a double link list if (last != null) { // link new node with last node if (last.NextNode.IsEmpty == false) { // fix link pointer with has more nodes in list var next = this.GetNode(last.NextNode); next.PrevNode = node.Position; last.NextNode = node.Position; node.PrevNode = last.Position; node.NextNode = next.Position; _pager.SetDirty(next.Page); } else { last.NextNode = node.Position; node.PrevNode = last.Position; } // set last node page as dirty _pager.SetDirty(last.Page); } return(node); }
internal BsonValue Serialize(Type type, object obj, int depth) { if (++depth > MAX_DEPTH) { throw LiteException.DocumentMaxDepth(MAX_DEPTH, type); } if (obj == null) { return(BsonValue.Null); } Func <object, BsonValue> custom; // if is already a bson value if (obj is BsonValue) { return(new BsonValue((BsonValue)obj)); } // test string - mapper has some special options else if (obj is String) { var str = this.TrimWhitespace ? (obj as String).Trim() : (String)obj; if (this.EmptyStringToNull && str.Length == 0) { return(BsonValue.Null); } else { return(new BsonValue(str)); } } // basic Bson data types (cast datatype for better performance optimization) else if (obj is Int32) { return(new BsonValue((Int32)obj)); } else if (obj is Int64) { return(new BsonValue((Int64)obj)); } else if (obj is Double) { return(new BsonValue((Double)obj)); } else if (obj is Decimal) { return(new BsonValue((Decimal)obj)); } else if (obj is Byte[]) { return(new BsonValue((Byte[])obj)); } else if (obj is ObjectId) { return(new BsonValue((ObjectId)obj)); } else if (obj is Guid) { return(new BsonValue((Guid)obj)); } else if (obj is Boolean) { return(new BsonValue((Boolean)obj)); } else if (obj is DateTime) { return(new BsonValue((DateTime)obj)); } // basic .net type to convert to bson else if (obj is Int16 || obj is UInt16 || obj is Byte || obj is SByte) { return(new BsonValue(Convert.ToInt32(obj))); } else if (obj is UInt32) { return(new BsonValue(Convert.ToInt64(obj))); } else if (obj is UInt64) { var ulng = ((UInt64)obj); var lng = unchecked ((Int64)ulng); return(new BsonValue(lng)); } else if (obj is Single) { return(new BsonValue(Convert.ToDouble(obj))); } else if (obj is Char || obj is Enum) { return(new BsonValue(obj.ToString())); } // check if is a custom type else if (_customSerializer.TryGetValue(type, out custom) || _customSerializer.TryGetValue(obj.GetType(), out custom)) { return(custom(obj)); } // for dictionary else if (obj is IDictionary) { // when you are converting Dictionary<string, object> if (type == typeof(object)) { type = obj.GetType(); } var itemType = type.GetTypeInfo().GetGenericArguments()[1]; return(this.SerializeDictionary(itemType, obj as IDictionary, depth)); } // check if is a list or array else if (obj is IEnumerable) { return(this.SerializeArray(Reflection.GetListItemType(obj.GetType()), obj as IEnumerable, depth)); } // otherwise serialize as a plain object else { return(this.SerializeObject(type, obj, depth)); } }
internal object Deserialize(Type type, BsonValue value) { Func <BsonValue, object> custom; // null value - null returns if (value.IsNull) { return(null); } // if is nullable, get underlying type else if (Reflection.IsNullable(type)) { type = Reflection.UnderlyingTypeOf(type); } // check if your type is already a BsonValue/BsonDocument/BsonArray if (type == typeof(BsonValue)) { return(new BsonValue(value)); } else if (type == typeof(BsonDocument)) { return(value.AsDocument); } else if (type == typeof(BsonArray)) { return(value.AsArray); } // raw values to native bson values else if (_bsonTypes.Contains(type)) { return(value.RawValue); } // simple ConvertTo to basic .NET types else if (_basicTypes.Contains(type)) { return(Convert.ChangeType(value.RawValue, type)); } // special cast to UInt64 to Int64 else if (type == typeof(UInt64)) { return(unchecked ((UInt64)((Int64)value.RawValue))); } // enum value is an int else if (type.GetTypeInfo().IsEnum) { return(Enum.Parse(type, value.AsString)); } // test if has a custom type implementation else if (_customDeserializer.TryGetValue(type, out custom)) { return(custom(value)); } // if value is array, deserialize as array else if (value.IsArray) { // when array are from an object (like in Dictionary<string, object> { ["array"] = new string[] { "a", "b" } if (type == typeof(object)) { return(this.DeserializeArray(typeof(object), value.AsArray)); } if (type.IsArray) { return(this.DeserializeArray(type.GetElementType(), value.AsArray)); } else { return(this.DeserializeList(type, value.AsArray)); } } // if value is document, deserialize as document else if (value.IsDocument) { BsonValue typeField; var doc = value.AsDocument; // test if value is object and has _type if (doc.RawValue.TryGetValue("_type", out typeField)) { type = Type.GetType(typeField.AsString); if (type == null) { throw LiteException.InvalidTypedName(typeField.AsString); } } // when complex type has no definition (== typeof(object)) use Dictionary<string, object> to better set values else if (type == typeof(object)) { type = typeof(Dictionary <string, object>); } var o = _typeInstantiator(type); if (o is IDictionary && type.GetTypeInfo().IsGenericType) { var k = type.GetTypeInfo().GetGenericArguments()[0]; var t = type.GetTypeInfo().GetGenericArguments()[1]; this.DeserializeDictionary(k, t, (IDictionary)o, value.AsDocument); } else { this.DeserializeObject(type, o, doc); } return(o); } // in last case, return value as-is - can cause "cast error" // it's used for "public object MyInt { get; set; }" return(value.RawValue); }