/// <summary> /// Enter all database in exclusive lock. Wait for all transactions finish. In exclusive mode no one can enter in new transaction (for read/write) /// If current thread already in exclusive mode, returns false /// </summary> public bool EnterExclusive() { // if current thread already in exclusive mode if (_transaction.IsWriteLockHeld) { return(false); } // wait finish all transactions before enter in reserved mode if (_transaction.TryEnterWriteLock(_pragmas.Timeout) == false) { throw LiteException.LockTimeout("exclusive", _pragmas.Timeout); } return(true); }
/// <summary> /// Drop an index from a collection /// </summary> public bool DropIndex(string collection, string name) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (name.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(name)); } if (name == "_id") { throw LiteException.IndexDropId(); } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot); // no collection, no index if (col == null) { return false; } // search for index reference var index = col.GetCollectionIndex(name); // no index, no drop if (index == null) { return false; } // delete all data pages + indexes pages indexer.DropIndex(index); // remove index entry in collection page snapshot.CollectionPage.DeleteCollectionIndex(name); return true; })); }
/// <summary> /// Get current transaction or create a new one /// </summary> internal async Task <TransactionService> GetTransaction() { if (_disposed) { throw LiteException.DatabaseClosed(); } if (_transaction == null) { // lock transaction await _locker.WaitAsync(_header.Pragmas.Timeout); _transaction = new TransactionService(_header, _disk, _walIndex); } return(_transaction); }
/// <summary> /// Enter all database in exclusive lock. Wait for all reader/writers. In exclusive mode no one can read/write any another /// If current thread already in exclusive mode, returns false /// </summary> public bool EnterExclusive() { // if current thread already in exclusive mode if (_transaction.IsWriteLockHeld) { return(false); } // wait finish all transactions before enter in reserved mode if (_transaction.TryEnterWriteLock(_pragmas.Timeout) == false) { throw LiteException.LockTimeout("exclusive", _pragmas.Timeout); } ENSURE(_transaction.RecursiveReadCount == 0, "must have no other transaction here"); return(true); }
/// <summary> /// Internal implementation of insert a document /// </summary> private void InsertDocument(Snapshot snapshot, BsonDocument doc, BsonAutoId autoId, IndexService indexer, DataService data) { // if no _id, use AutoId if (!doc.TryGetValue("_id", out var id)) { doc["_id"] = id = autoId == BsonAutoId.ObjectId ? new BsonValue(ObjectId.NewObjectId()) : autoId == BsonAutoId.Guid ? new BsonValue(Guid.NewGuid()) : this.GetSequence(snapshot, autoId); } else if (id.IsNumber) { // update memory sequence of numeric _id this.SetSequence(snapshot, id); } // test if _id is a valid type if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // storage in data pages - returns dataBlock address var dataBlock = data.Insert(doc); IndexNode last = null; // for each index, insert new IndexNode foreach (var index in snapshot.CollectionPage.GetCollectionIndexes()) { // for each index, get all keys (supports multi-key) - gets distinct values only // if index are unique, get single key only var keys = index.BsonExpr.Execute(doc, _header.Pragmas.Collation); // do a loop with all keys (multi-key supported) foreach (var key in keys) { // insert node var node = indexer.AddNode(index, key, dataBlock, last); last = node; } } }
public BsonValue Run(DbEngine engine, string command) { if (string.IsNullOrEmpty(command)) { return(BsonValue.Null); } var s = new StringScanner(command); foreach (var cmd in _commands) { if (cmd.IsCommand(s)) { return(cmd.Execute(engine, s)); } } throw LiteException.InvalidCommand(command); }
/// <summary> /// Override read page decrypting data from disk /// </summary> public override byte[] ReadPage(uint pageID) { var buffer = base.ReadPage(pageID); // when read header, checks passoword if (pageID == 0) { // I know, header page will be double read (it's the price for isolated concerns) var header = (HeaderPage)BasePage.ReadPage(buffer); if (header.DbParams.Password.BinaryCompareTo(_password) != 0) { throw LiteException.DatabaseWrongPassword(); } return(buffer); } return(_crypto.Decrypt(buffer)); }
/// <summary> /// Enter transaction read lock /// </summary> public void EnterTransaction() { // if current thread are in reserved mode, do not enter in transaction if (_transaction.IsWriteLockHeld) { return; } try { if (_transaction.TryEnterReadLock(_timeout) == false) { throw LiteException.LockTimeout("transaction", _timeout); } } catch (LockRecursionException) { throw LiteException.AlreadyExistsTransaction(); } }
/// <summary> /// Read a page with correct instance page object. Checks for pageType /// </summary> public static BasePage ReadPage(byte[] buffer) { var reader = new ByteReader(buffer); var pageID = reader.ReadUInt32(); var pageType = (PageType)reader.ReadByte(); if (pageID == 0 && (byte)pageType > 5) { throw LiteException.InvalidDatabase(); } var page = CreateInstance(pageID, pageType); page.ReadHeader(reader); page.ReadContent(reader); page.DiskData = buffer; return(page); }
/// <summary> /// Exit collection reserved lock /// </summary> public void ExitReserved(string collectionName) { // if thread are in full reserved just exit if (_reserved.IsWriteLockHeld) { return; } if (_collections.TryGetValue(collectionName, out var collection) == false) { throw LiteException.CollectionLockerNotFound(collectionName); } collection.ExitUpgradeableReadLock(); // in global reserved case, you can have same thread tring read-lock twice on different snapshot - exit once if (_reserved.IsReadLockHeld) { _reserved.ExitReadLock(); } }
/// <summary> /// Insert a new node index inside an collection index. Flip coin to know level /// </summary> public IndexNode AddNode(CollectionIndex index, BsonValue key, PageAddress dataBlock, IndexNode last) { // do not accept Min/Max value as index key (only head/tail can have this value) if (key.IsMaxValue || key.IsMinValue) { throw LiteException.InvalidIndexKey($"BsonValue MaxValue/MinValue are not supported as index key"); } // random level (flip coin mode) - return number between 1-32 var level = this.Flip(); // set index collection with max-index level if (level > index.MaxLevel) { // update max level _snapshot.CollectionPage.UpdateCollectionIndex(index.Name).MaxLevel = level; } // call AddNode with key value return(this.AddNode(index, key, dataBlock, level, last)); }
/// <summary> /// Based on an expression, returns document field mapped from class Property. /// Support multi level dotted notation: x => x.Customer.Name /// Prefix is used on array expression like: x => x.Customers.Any(z => z.Name == "John") (prefix = "Customers." /// </summary> public string GetField(Expression expr, string prefix = "") { var property = prefix + expr.GetPath(); var parts = property.Split('.'); var fields = new string[parts.Length]; var type = _type; var isdbref = false; // loop "first.second.last" for (var i = 0; i < parts.Length; i++) { var entity = _mapper.GetEntityMapper(type); var part = parts[i]; var prop = entity.Members.Find(x => x.MemberName == part); if (prop == null) { throw LiteException.PropertyNotMapped(property); } // if property is a IEnumerable, gets underlayer type (otherwise, gets PropertyType) type = prop.UnderlyingType; fields[i] = prop.FieldName; if (prop.FieldName == "_id" && isdbref) { isdbref = false; fields[i] = "$id"; } // if this property is DbRef, so if next property is _id, change to $id if (prop.IsDbRef) { isdbref = true; } } return(string.Join(".", fields)); }
private Query ReadOneQuery(StringScanner s) { var field = BsonExpression.ReadExpression(s, false, false)?.Source ?? s.Scan(FieldPattern).Trim().ThrowIfEmpty("Invalid field", s); var oper = s.Scan(@"\s*(=|!=|>=|<=|>|<|like|starts[Ww]ith|in|between|contains)\s*").Trim().ToLower().ThrowIfEmpty("Invalid query operator", s); if (s.HasTerminated) { throw LiteException.SyntaxError(s, "Missing value"); } var value = JsonSerializer.Deserialize(s); switch (oper) { case "=": return(Query.EQ(field, value)); case "!=": return(Query.Not(field, value)); case ">": return(Query.GT(field, value)); case ">=": return(Query.GTE(field, value)); case "<": return(Query.LT(field, value)); case "<=": return(Query.LTE(field, value)); case "like": case "startswith": return(Query.StartsWith(field, value)); case "in": return(Query.In(field, value.AsArray)); case "between": return(Query.Between(field, value.AsArray[0], value.AsArray[1])); case "contains": return(Query.Contains(field, value)); default: throw new LiteException("Invalid query operator"); } }
/// <summary> /// Fill terms from where predicate list /// </summary> private void SplitWherePredicateInTerms() { void add(BsonExpression predicate) { // do not accept source * in WHERE if (predicate.UseSource) { throw new LiteException(0, $"WHERE filter can not use `*` expression in `{predicate.Source}"); } // add expression in where list breaking AND statments if (predicate.IsPredicate || predicate.Type == BsonExpressionType.Or) { _terms.Add(predicate); } else if (predicate.Type == BsonExpressionType.And) { var left = predicate.Left; var right = predicate.Right; predicate.Parameters.CopyTo(left.Parameters); predicate.Parameters.CopyTo(right.Parameters); add(left); add(right); } else { throw LiteException.InvalidExpressionTypePredicate(predicate); } } // check all where predicate for AND operators foreach (var predicate in _query.Where) { add(predicate); } }
/// <summary> /// Set engine pragma new value (some pragmas will be affected only after realod) /// </summary> public bool Pragma(string name, BsonValue value) { if (this.Pragma(name) == value) { return(false); } if (_locker.IsInTransaction) { throw LiteException.AlreadyExistsTransaction(); } // do a inside transaction to edit pragma on commit event return(this.AutoTransaction(transaction => { transaction.Pages.Commit += (h) => { h.Pragmas.Set(name, value, true); }; return true; })); }
/// <summary> /// Find witch index will be used and run Execute method /// </summary> public virtual async IAsyncEnumerable <IndexNode> Run(CollectionPage col, IndexService indexer) { // get index for this query var index = col.GetCollectionIndex(this.Name); if (index == null) { throw LiteException.IndexNotFound(this.Name); } var distinct = new HashSet <PageAddress>(); await foreach (var node in this.Execute(indexer, index)) { // distinct by dataBlock if (distinct.Contains(node.DataBlock) == false) { distinct.Add(node.DataBlock); yield return(node); } } }
/// <summary> /// Set engine pragma new value (some pragmas will be affected only after realod) /// </summary> public async Task <bool> PragmaAsync(string name, BsonValue value) { if (this.Pragma(name) == value) { return(false); } if (_transaction != null) { throw LiteException.AlreadyExistsTransaction(); } // do a inside transaction to edit pragma on commit event return(await this.AutoTransaction(transaction => { transaction.Pages.Commit += (h) => { h.Pragmas.Set(name, value, true); }; return Task.FromResult(true); })); }
public async Task <IBsonDataReader> Execute() { var ahead = _tokenizer.LookAhead().Expect(TokenType.Word); LOG($"executing `{ahead.Value.ToUpper()}`", "SQL"); switch (ahead.Value.ToUpper()) { case "SELECT": case "EXPLAIN": return(await this.ParseSelect()); case "INSERT": return(await this.ParseInsert()); case "DELETE": return(await this.ParseDelete()); case "UPDATE": return(await this.ParseUpdate()); case "DROP": return(await this.ParseDrop()); case "RENAME": return(await this.ParseRename()); case "CREATE": return(await this.ParseCreate()); case "CHECKPOINT": return(await this.ParseCheckpoint()); case "BEGIN": return(await this.ParseBegin()); case "ROLLBACK": return(await this.ParseRollback()); case "COMMIT": return(await this.ParseCommit()); case "PRAGMA": return(await this.ParsePragma()); default: throw LiteException.UnexpectedToken(ahead); } }
public void ThrowsExceptionOnLockTimeout() { using (var tmp = new TempFile()) { using (var db = new LiteDatabase($"filename={tmp.Filename};timeout=00:00:01")) { var transactionStarted = new AutoResetEvent(false); var transactionBlock = new AutoResetEvent(false); var blockTask = Task.Run(() => { using (db.BeginTrans()) { transactionStarted.Set(); transactionBlock.WaitOne(TimeSpan.FromSeconds(10)); } }); transactionStarted.WaitOne(TimeSpan.FromSeconds(10)); LiteException lockException = null; try { using (db.BeginTrans()) { } } catch (LiteException e) { lockException = e; transactionBlock.Set(); } blockTask.Wait(); Assert.IsNotNull(lockException); Assert.AreEqual(LiteException.LOCK_TIMEOUT, lockException.ErrorCode); } } }
public BsonValue Run(string command) { if (string.IsNullOrEmpty(command)) { return(BsonValue.Null); } var s = new StringScanner(command); foreach (var cmd in this.Commands) { if (cmd.IsCommand(s)) { if (this.Database == null) { throw LiteException.NoDatabase(); } return(cmd.Execute(this.Database, s)); } } throw LiteException.InvalidCommand(command); }
/// <summary> /// Enter all database in reserved lock. Wait for all reader/writers. /// If exclusive = false, new readers can read but no writers can write. If exclusive = true, no new readers/writers /// </summary> public void EnterReserved(bool exclusive) { // checks if engine was open in readonly mode if (_readonly) { throw new LiteException(0, "This operation are not support because engine was open in reaodnly mode"); } // wait finish all transactions before enter in reserved mode if (_transaction.TryEnterWriteLock(_timeout) == false) { throw LiteException.LockTimeout("reserved", _timeout); } ENSURE(_transaction.RecursiveReadCount == 0, "must have no other transaction here"); try { // reserved locker in write lock if (_reserved.TryEnterWriteLock(_timeout) == false) { // exit transaction write lock _transaction.ExitWriteLock(); throw LiteException.LockTimeout("reserved", _timeout); } } finally { if (exclusive == false) { // exit exclusive and allow new readers _transaction.ExitWriteLock(); } } }
/// <summary> /// Get all document using an indexInfo as start point (_id index). /// </summary> public IEnumerable <BsonDocument> GetDocuments(string collection) { var colPageID = (uint)_header["collections"].AsDocument[collection].AsInt32; var col = this.ReadPage(colPageID); var headPageID = (uint)col["indexes"][0]["headPageID"].AsInt32; var indexPages = this.VisitIndexPages(headPageID); foreach (var indexPageID in indexPages) { var indexPage = this.ReadPage(indexPageID); foreach (var node in indexPage["nodes"].AsArray) { var dataBlock = node["dataBlock"]; // if datablock link to a data page if (dataBlock["pageID"].AsInt32 != -1) { // read dataPage and data block var dataPage = this.ReadPage((uint)dataBlock["pageID"].AsInt32); if (dataPage["pageType"].AsInt32 != 4) { continue; } var block = dataPage["blocks"].AsArray.FirstOrDefault(x => x["index"] == dataBlock["index"]).AsDocument; if (block == null) { continue; } // read byte[] from block or from extend pages var data = block["extendPageID"] == -1 ? block["data"].AsBinary : this.ReadExtendData((uint)block["extendPageID"].AsInt32); if (data.Length == 0) { continue; } // BSON format still same from all version var doc = BsonSerializer.Deserialize(data); // change _id PK in _chunks collection if (collection == "_chunks") { var parts = doc["_id"].AsString.Split('\\'); if (!int.TryParse(parts[1], out var n)) { throw LiteException.InvalidFormat("_id"); } doc["_id"] = new BsonDocument { ["f"] = parts[0], ["n"] = n }; } yield return(doc); } } } }
/// <summary> /// Read all database pages from v7 structure into a flexible BsonDocument - only read what really needs /// </summary> private BsonDocument ReadPage(uint pageID) { if (pageID * V7_PAGE_SIZE > _stream.Length) { return(null); } _stream.Position = pageID * V7_PAGE_SIZE; // v7 uses 4k page size _stream.Read(_buffer, 0, V7_PAGE_SIZE); // decrypt encrypted page (except header page - header are plain data) if (_aes != null && pageID > 0) { _buffer = _aes.Decrypt(_buffer); } var reader = new ByteReader(_buffer); // reading page header var page = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["pageType"] = (int)reader.ReadByte(), ["prevPageID"] = (int)reader.ReadUInt32(), ["nextPageID"] = (int)reader.ReadUInt32(), ["itemCount"] = (int)reader.ReadUInt16() }; // skip freeByte + reserved reader.ReadBytes(2 + 8); #region Header (1) // read header if (page["pageType"] == 1) { var info = reader.ReadString(27); var ver = reader.ReadByte(); if (string.CompareOrdinal(info, HeaderPage.HEADER_INFO) != 0 || ver != 7) { throw LiteException.InvalidDatabase(); } // skip ChangeID + FreeEmptyPageID + LastPageID reader.ReadBytes(2 + 4 + 4); page["userVersion"] = (int)reader.ReadUInt16(); page["password"] = reader.ReadBytes(20); page["salt"] = reader.ReadBytes(16); page["collections"] = new BsonDocument(); var cols = reader.ReadByte(); for (var i = 0; i < cols; i++) { var name = reader.ReadString(); var colPageID = reader.ReadUInt32(); page["collections"][name] = (int)colPageID; } } #endregion #region Collection (2) // collection page else if (page["pageType"] == 2) { page["collectionName"] = reader.ReadString(); page["indexes"] = new BsonArray(); reader.ReadBytes(12); for (var i = 0; i < 16; i++) { var index = new BsonDocument(); var field = reader.ReadString(); var eq = field.IndexOf('='); if (eq > 0) { index["name"] = field.Substring(0, eq); index["expression"] = field.Substring(eq + 1); } else { index["name"] = field; index["expression"] = "$." + field; } index["unique"] = reader.ReadBoolean(); index["headPageID"] = (int)reader.ReadUInt32(); // skip HeadNode (index) + TailNode + FreeIndexPageID reader.ReadBytes(2 + 6 + 4); if (field.Length > 0) { page["indexes"].AsArray.Add(index); } } } #endregion #region Index (3) else if (page["pageType"] == 3) { page["nodes"] = new BsonArray(); for (var i = 0; i < page["itemCount"].AsInt32; i++) { var node = new BsonDocument { ["index"] = (int)reader.ReadUInt16() }; var levels = reader.ReadByte(); // skip Slot + PrevNode + NextNode reader.ReadBytes(1 + 6 + 6); var length = reader.ReadUInt16(); // skip DataType + KeyValue reader.ReadBytes(1 + length); node["dataBlock"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // reading Prev[0] node["prev"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // reading Next[0] node["next"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // skip Prev/Next[1..N] reader.ReadBytes((levels - 1) * (6 + 6)); page["nodes"].AsArray.Add(node); } } #endregion #region Data (4) else if (page["pageType"] == 4) { page["blocks"] = new BsonArray(); for (var i = 0; i < page["itemCount"].AsInt32; i++) { var block = new BsonDocument { ["index"] = (int)reader.ReadUInt16(), ["extendPageID"] = (int)reader.ReadUInt32() }; var length = reader.ReadUInt16(); block["data"] = reader.ReadBytes(length); page["blocks"].AsArray.Add(block); } } #endregion #region Extend (5) else if (page["pageType"] == 5) { page["data"] = reader.ReadBytes(page["itemCount"].AsInt32); } #endregion return(page); }
/// <summary> /// Get a new page segment for this length content using fixed index /// </summary> private BufferSlice InternalInsert(ushort bytesLength, ref byte index) { var isNewInsert = index == byte.MaxValue; ENSURE(_buffer.ShareCounter == BUFFER_WRITABLE, "page must be writable to support changes"); ENSURE(bytesLength > 0, "must insert more than 0 bytes"); ENSURE(this.FreeBytes >= bytesLength + (isNewInsert ? SLOT_SIZE : 0), "length must be always lower than current free space"); ENSURE(this.ItemsCount < byte.MaxValue, "page full"); ENSURE(this.FreeBytes >= this.FragmentedBytes, "fragmented bytes must be at most free bytes"); if (!(this.FreeBytes >= bytesLength + (isNewInsert ? SLOT_SIZE : 0))) { throw LiteException.InvalidFreeSpacePage(this.PageID, this.FreeBytes, bytesLength + (isNewInsert ? SLOT_SIZE : 0)); } // calculate how many continuous bytes are avaiable in this page var continuousBlocks = this.FreeBytes - this.FragmentedBytes - (isNewInsert ? SLOT_SIZE : 0); ENSURE(continuousBlocks == PAGE_SIZE - this.NextFreePosition - this.FooterSize - (isNewInsert ? SLOT_SIZE : 0), "continuousBlock must be same as from NextFreePosition"); // if continuous blocks are not enough for this data, must run page defrag if (bytesLength > continuousBlocks) { this.Defrag(); } // if index is new insert segment, must request for new Index if (index == byte.MaxValue) { // get new free index must run after defrag index = this.GetFreeIndex(); } if (index > this.HighestIndex || this.HighestIndex == byte.MaxValue) { ENSURE(index == (byte)(this.HighestIndex + 1), "new index must be next highest index"); this.HighestIndex = index; } // get segment addresses var positionAddr = CalcPositionAddr(index); var lengthAddr = CalcLengthAddr(index); ENSURE(_buffer.ReadUInt16(positionAddr) == 0, "slot position must be empty before use"); ENSURE(_buffer.ReadUInt16(lengthAddr) == 0, "slot length must be empty before use"); // get next free position in page var position = this.NextFreePosition; // write this page position in my position address _buffer.Write(position, positionAddr); // write page segment length in my length address _buffer.Write(bytesLength, lengthAddr); // update next free position and counters this.ItemsCount++; this.UsedBytes += bytesLength; this.NextFreePosition += bytesLength; this.IsDirty = true; ENSURE(position + bytesLength <= (PAGE_SIZE - (this.HighestIndex + 1) * SLOT_SIZE), "new buffer slice could not override footer area"); // create page segment based new inserted segment return(_buffer.Slice(position, bytesLength)); }
/// <summary> /// Update documents using transform expression (must return a scalar/document value) using predicate as filter /// </summary> public int UpdateMany(string collection, BsonExpression transform, BsonExpression predicate) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (transform == null) { throw new ArgumentNullException(nameof(transform)); } return(this.AutoTransaction(transaction => { return this.Update(collection, transformDocs()); IEnumerable <BsonDocument> transformDocs() { var q = new Query { Select = "$", ForUpdate = true }; if (predicate != null) { q.Where.Add(predicate); } using (var reader = this.Query(collection, q)) { while (reader.Read()) { var doc = reader.Current.AsDocument; var id = doc["_id"]; var value = transform.ExecuteScalar(doc, _header.Pragmas.Collation); if (!value.IsDocument) { throw new ArgumentException("Extend expression must return a document", nameof(transform)); } var result = BsonExpressionMethods.EXTEND(doc, value.AsDocument).AsDocument; // be sure result document will contain same _id as current doc if (result.TryGetValue("_id", out var newId)) { if (newId != id) { throw LiteException.InvalidUpdateField("_id"); } } else { result["_id"] = id; } yield return result; } } } })); }
/// <summary> /// Insert a new node index inside an collection index. /// </summary> private IndexNode AddNode(CollectionIndex index, BsonValue key, PageAddress dataBlock, byte level, IndexNode last) { // get a free index page for head note var bytesLength = IndexNode.GetNodeLength(level, key, out var keyLength); // test for index key maxlength if (keyLength > MAX_INDEX_KEY_LENGTH) { throw LiteException.InvalidIndexKey($"Index key must be less than {MAX_INDEX_KEY_LENGTH} bytes."); } var indexPage = _snapshot.GetFreeIndexPage(bytesLength, ref index.FreeIndexPageList); // create node in buffer var node = indexPage.InsertIndexNode(index.Slot, level, key, dataBlock, bytesLength); // now, let's link my index node on right place var cur = this.GetNode(index.Head); // using as cache last IndexNode cache = null; // scan from top left for (int i = index.MaxLevel - 1; i >= 0; i--) { // get cache for last node cache = cache != null && cache.Position == cur.Next[i] ? cache : this.GetNode(cur.Next[i]); // for(; <while_not_this>; <do_this>) { ... } for (; cur.Next[i].IsEmpty == false; cur = cache) { // get cache for last node cache = cache != null && cache.Position == cur.Next[i] ? cache : this.GetNode(cur.Next[i]); // read next node to compare var diff = cache.Key.CompareTo(key, _collation); // if unique and diff = 0, throw index exception (must rollback transaction - others nodes can be dirty) if (diff == 0 && index.Unique) { throw LiteException.IndexDuplicateKey(index.Name, key); } if (diff == 1) { break; } } if (i <= (level - 1)) // level == length { // cur = current (immediately before - prev) // node = new inserted node // next = next node (where cur is pointing) node.SetNext((byte)i, cur.Next[i]); node.SetPrev((byte)i, cur.Position); cur.SetNext((byte)i, node.Position); var next = this.GetNode(node.Next[i]); if (next != null) { next.SetPrev((byte)i, node.Position); } } } // if last node exists, create a double link list if (last != null) { ENSURE(last.NextNode == PageAddress.Empty, "last index node must point to null"); last.SetNextNode(node.Position); } // fix page position in free list slot _snapshot.AddOrRemoveFreeIndexList(node.Page, ref index.FreeIndexPageList); return(node); }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); // single document update if (s.Match(@"\s*\{")) { var doc = JsonSerializer.Deserialize(s.ToString()).AsDocument; s.ThrowIfNotFinish(); yield return(engine.Update(col, doc)); } // query update else { // db.colName.update // field = value, // array += valueToAdd, // where _id = 1 // and ... var query = Query.All(); var updates = new Update(); while (!s.HasTerminated) { var path = BsonExpression.ReadExpression(s, true, true).Source; var action = s.Scan(@"\s*\+?=\s*").Trim().ThrowIfEmpty("Invalid operator (support = or +=)", s); var value = this.ReadBsonValue(s); var expr = value == null?BsonExpression.ReadExpression(s, true, false)?.Source : null; if (action == "+=" && value != null) { updates.Add(path, value); } else if (action == "+=" && expr != null) { updates.AddExpr(path, expr); } else if (action == "=" && value != null) { updates.Set(path, value); } else if (action == "=" && expr != null) { updates.SetExpr(path, expr); } else { throw LiteException.SyntaxError(s); } s.Scan(@"\s*"); if (s.Scan(@",\s*").Length > 0) { continue; } else if (s.Scan(@"where\s*").Length > 0 || s.HasTerminated) { break; } else { throw LiteException.SyntaxError(s); } } if (!s.HasTerminated) { query = this.ReadQuery(s, false); } s.ThrowIfNotFinish(); yield return(engine.Update(col, query, updates)); } }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); var fields = new Dictionary <string, BsonExpression>(); var index = 0; // read all fields definitions (support AS as keyword no name field) while (!s.HasTerminated) { // try read any kind of expression var expression = BsonExpression.ReadExpression(s, false, false); // if not found a valid one, try read only as path (will add $. before) if (expression == null) { expression = BsonExpression.ReadExpression(s, true, true); } var key = s.Scan(@"\s*as\s+([\w-]+)", 1).TrimToNull() ?? this.NamedField(expression) ?? ("expr" + (++index)); // if key already exits, add with another name while (fields.ContainsKey(key)) { key = "expr" + (++index); } fields.Add(key, expression); if (s.Scan(@"\s*,\s*").Length > 0) { continue; } break; } // select command required output value, path or expression if (fields.Count == 0) { throw LiteException.SyntaxError(s, "Missing select path"); } var query = Query.All(); if (s.Scan(@"\s*where\s*").Length > 0) { query = this.ReadQuery(s, true); } var skipLimit = this.ReadSkipLimit(s); var includes = this.ReadIncludes(s); s.ThrowIfNotFinish(); var docs = engine.Find(col, query, includes, skipLimit.Key, skipLimit.Value); foreach (var doc in docs) { // if is a single value, return as just field if (fields.Count == 1) { foreach (var value in fields.Values.First().Execute(doc, false)) { yield return(value); } } else { var output = new BsonDocument(); foreach (var field in fields) { output[field.Key] = field.Value.Execute(doc, true).First(); } yield return(output); } } }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(Snapshot snapshot, CollectionPage col, BsonDocument doc, IndexService indexer, DataService data) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // find indexNode from pk index var pkNode = indexer.Find(col.PK, id, false, LiteDB.Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // update data storage data.Update(col, pkNode.DataBlock, doc); // get all current non-pk index nodes from this data block (slot, key, nodePosition) var oldKeys = indexer.GetNodeList(pkNode.NextNode) .Select(x => new Tuple <byte, BsonValue, PageAddress>(x.Slot, x.Key, x.Position)) .ToArray(); // build a list of all new key index keys var newKeys = new List <Tuple <byte, BsonValue, string> >(); foreach (var index in col.GetCollectionIndexes().Where(x => x.Name != "_id")) { // getting all keys from expression over document var keys = index.BsonExpr.Execute(doc, _header.Pragmas.Collation); foreach (var key in keys) { newKeys.Add(new Tuple <byte, BsonValue, string>(index.Slot, key, index.Name)); } } if (oldKeys.Length == 0 && newKeys.Count == 0) { return(true); } // get a list of all nodes that are in oldKeys but not in newKeys (must delete) var toDelete = new HashSet <PageAddress>(oldKeys .Where(x => newKeys.Any(n => n.Item1 == x.Item1 && n.Item2 == x.Item2) == false) .Select(x => x.Item3)); // get a list of all keys that are not in oldKeys (must insert) var toInsert = newKeys .Where(x => oldKeys.Any(o => o.Item1 == x.Item1 && o.Item2 == x.Item2) == false) .ToArray(); // if nothing to change, just exit if (toDelete.Count == 0 && toInsert.Length == 0) { return(true); } // delete nodes and return last keeped node in list var last = indexer.DeleteList(pkNode.Position, toDelete); // now, insert all new nodes foreach (var elem in toInsert) { var index = col.GetCollectionIndex(elem.Item3); last = indexer.AddNode(index, elem.Item2, pkNode.DataBlock, last); } return(true); }
/// <summary> /// Create a new index (or do nothing if already exists) to a collection/field /// </summary> public bool EnsureIndex(string collection, string name, BsonExpression expression, bool unique) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (name.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(name)); } if (expression == null) { throw new ArgumentNullException(nameof(expression)); } if (expression.IsIndexable == false) { throw new ArgumentException("Index expressions must contains at least one document field. Used methods must be immutable. Parameters are not supported.", nameof(expression)); } if (name.Length > INDEX_NAME_MAX_LENGTH) { throw LiteException.InvalidIndexName(name, collection, "MaxLength = " + INDEX_NAME_MAX_LENGTH); } if (!name.IsWord()) { throw LiteException.InvalidIndexName(name, collection, "Use only [a-Z$_]"); } if (name.StartsWith("$")) { throw LiteException.InvalidIndexName(name, collection, "Index name can't starts with `$`"); } if (name == "_id") { return(false); // always exists } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot); var data = new DataService(snapshot); // check if index already exists var current = col.GetCollectionIndex(name); // if already exists, just exit if (current != null) { // but if expression are different, throw error if (current.Expression != expression.Source) { throw LiteException.IndexAlreadyExist(name); } return false; } LOG($"create index `{collection}.{name}`", "COMMAND"); // create index head var index = indexer.CreateIndex(name, expression.Source, unique); var count = 0u; // read all objects (read from PK index) foreach (var pkNode in new IndexAll("_id", LiteDB.Query.Ascending).Run(col, indexer)) { using (var reader = new BufferReader(data.Read(pkNode.DataBlock))) { var doc = reader.ReadDocument(expression.Fields); // first/last node in this document that will be added IndexNode last = null; IndexNode first = null; // get values from expression in document var keys = expression.Execute(doc); // adding index node for each value foreach (var key in keys) { // when index key is an array, get items inside array. // valid only for first level (if this items are another array, this arrays will be indexed as array) if (key.IsArray) { var arr = key.AsArray; foreach (var itemKey in arr) { // insert new index node var node = indexer.AddNode(index, itemKey, pkNode.DataBlock, last, _flipCoin); if (first == null) { first = node; } last = node; count++; } } else { // insert new index node var node = indexer.AddNode(index, key, pkNode.DataBlock, last, _flipCoin); if (first == null) { first = node; } last = node; count++; } } // fix single linked-list in pkNode if (first != null) { last.SetNextNode(pkNode.NextNode); pkNode.SetNextNode(first.Position); } } transaction.Safepoint(); } index.KeyCount = count; return true; })); }