internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { var start = _start.Normalize(index.Options); var end = _end.Normalize(index.Options); // define order var order = start.CompareTo(end) <= 0 ? Query.Ascending : Query.Descending; // find first indexNode var node = indexer.Find(index, start, true, order); // navigate using next[0] do next node - if less or equals returns while (node != null) { var diff = node.Key.CompareTo(end); if (diff == 0 || diff != order) { yield return node; } else { break; } node = indexer.GetNode(node.NextPrev(0, order)); } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { // find first indexNode var value = _value.Normalize(index.Options); var node = indexer.Find(index, value, true, Query.Ascending); var str = value.AsString; // navigate using next[0] do next node - if less or equals returns while (node != null) { var valueString = node.Key.AsString; // value will not be null because null occurs before string (bsontype sort order) if (valueString.StartsWith(str)) { if (!node.DataBlock.IsEmpty) { yield return node; } } else { break; // if not more startswith, stop scanning } node = indexer.GetNode(node.Next[0]); } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { var v = _value.Normalize(index.Options); return indexer .FindAll(index, Query.Ascending) .Where(x => x.Key.IsString && x.Key.AsString.Contains(v)); }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { foreach (var value in _values.Distinct()) { foreach (var node in Query.EQ(this.Field, value).ExecuteIndex(indexer, index)) { yield return node; } } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { var node = indexer.Find(index, _value, false, Query.Ascending); if (node == null) yield break; yield return node; if (index.Unique == false) { // navigate using next[0] do next node - if equals, returns while (!node.Next[0].IsEmpty && ((node = indexer.GetNode(node.Next[0])).Key.CompareTo(_value) == 0)) { if (node.IsHeadTail(index)) yield break; yield return node; } } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { // find first indexNode var value = _value.Normalize(index.Options); var node = indexer.Find(index, value, true, Query.Ascending); if (node == null) yield break; // move until next is last while (node != null) { var diff = node.Key.CompareTo(value); if (diff == 1 || (_equals && diff == 0)) { if (node.IsHeadTail(index)) yield break; yield return node; } node = indexer.GetNode(node.Next[0]); } }
/// <summary> /// Drop all indexes pages. Each index use a single page sequence /// </summary> public void DropIndex(CollectionIndex index) { var pages = new HashSet<uint>(); var nodes = this.FindAll(index, Query.Ascending); // get reference for pageID from all index nodes foreach (var node in nodes) { pages.Add(node.Position.PageID); // for each node I need remove from node list datablock reference var prevNode = this.GetNode(node.PrevNode); var nextNode = this.GetNode(node.NextNode); if (prevNode != null) { prevNode.NextNode = node.NextNode; _pager.SetDirty(prevNode.Page); } if (nextNode != null) { nextNode.PrevNode = node.PrevNode; _pager.SetDirty(nextNode.Page); } } // now delete all pages foreach (var pageID in pages) { _pager.DeletePage(pageID); } }
/// <summary> /// Insert a new node index inside an collection index. /// </summary> private IndexNode AddNode(CollectionIndex index, BsonValue key, byte level) { // creating a new index node var node = new IndexNode(level) { Key = key, KeyLength = key.GetBytesCount() }; if (node.KeyLength > MAX_INDEX_LENGTH) { throw LiteException.IndexKeyTooLong(); } // get a free page to insert my index node var page = _pager.GetFreePage<IndexPage>(index.FreeIndexPageID, node.Length); node.Position = new PageAddress(page.PageID, page.Nodes.NextIndex()); node.Page = page; // add index node to page page.Nodes.Add(node.Position.Index, node); // update freebytes + items count page.UpdateItemCount(); // now, let's link my index node on right place var cur = this.GetNode(index.HeadNode); // scan from top left for (var i = IndexNode.MAX_LEVEL_LENGTH - 1; i >= 0; i--) { // for(; <while_not_this>; <do_this>) { ... } for (; cur.Next[i].IsEmpty == false; cur = this.GetNode(cur.Next[i])) { // read next node to compare var diff = this.GetNode(cur.Next[i]).Key.CompareTo(key); // if unique and diff = 0, throw index exception (must rollback transaction - others nodes can be dirty) if (diff == 0 && index.Options.Unique) throw LiteException.IndexDuplicateKey(index.Field, key); if (diff == 1) break; } if (i <= (level - 1)) // level == length { // cur = current (imediatte before - prev) // node = new inserted node // next = next node (where cur is poiting) node.Next[i] = cur.Next[i]; node.Prev[i] = cur.Position; cur.Next[i] = node.Position; var next = this.GetNode(node.Next[i]); if (next != null) { next.Prev[i] = node.Position; next.Page.IsDirty = true; } cur.Page.IsDirty = true; } } // add/remove indexPage on freelist if has space _pager.AddOrRemoveToFreeList(page.FreeBytes > IndexPage.INDEX_RESERVED_BYTES, page, index.Page, ref index.FreeIndexPageID); page.IsDirty = true; return node; }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { this.ExecuteMode = QueryExecuteMode.FullScan; return indexer.FindAll(index, Query.Ascending); }
/// <summary> /// Drop all indexes pages /// </summary> public void DropIndex(CollectionIndex index) { var pages = new HashSet<uint>(); var nodes = this.FindAll(index, Query.Query.Ascending); // get reference for pageID from all index nodes foreach (var node in nodes) { pages.Add(node.Position.PageID); } // now delete all pages foreach (var page in pages) { _pager.DeletePage(page); } }
/// <summary> /// Insert a new node index inside an collection index. /// </summary> private IndexNode AddNode(CollectionIndex index, BsonValue key, byte level, IndexNode last) { // calc key size var keyLength = key.GetBytesCount(false); if (keyLength > MAX_INDEX_LENGTH) { throw LiteException.IndexKeyTooLong(); } // creating a new index node var node = new IndexNode(level) { Key = key, KeyLength = (ushort)keyLength, Slot = (byte)index.Slot }; // get a free page to insert my index node var page = _pager.GetFreePage <IndexPage>(index.FreeIndexPageID, node.Length); node.Position = new PageAddress(page.PageID, page.Nodes.NextIndex()); node.Page = page; // add index node to page page.Nodes.Add(node.Position.Index, node); // update freebytes + items count page.UpdateItemCount(); // now, let's link my index node on right place var cur = this.GetNode(index.HeadNode); // using as cache last IndexNode cache = null; // scan from top left for (var i = IndexNode.MAX_LEVEL_LENGTH - 1; i >= 0; i--) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // for(; <while_not_this>; <do_this>) { ... } for (; cur.Next[i].IsEmpty == false; cur = cache) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // read next node to compare var diff = cache.Key.CompareTo(key); // if unique and diff = 0, throw index exception (must rollback transaction - others nodes can be dirty) if (diff == 0 && index.Unique) { throw LiteException.IndexDuplicateKey(index.Field, key); } if (diff == 1) { break; } } if (i <= (level - 1)) // level == length { // cur = current (immediately before - prev) // node = new inserted node // next = next node (where cur is pointing) _pager.SetDirty(cur.Page); node.Next[i] = cur.Next[i]; node.Prev[i] = cur.Position; cur.Next[i] = node.Position; var next = this.GetNode(node.Next[i]); if (next != null) { next.Prev[i] = node.Position; _pager.SetDirty(next.Page); } } } // add/remove indexPage on freelist if has space _pager.AddOrRemoveToFreeList(page.FreeBytes > IndexPage.INDEX_RESERVED_BYTES, page, index.Page, ref index.FreeIndexPageID); // if last node exists, create a double link list if (last != null) { // link new node with last node if (last.NextNode.IsEmpty == false) { // fix link pointer with has more nodes in list var next = this.GetNode(last.NextNode); next.PrevNode = node.Position; last.NextNode = node.Position; node.PrevNode = last.Position; node.NextNode = next.Position; _pager.SetDirty(next.Page); } else { last.NextNode = node.Position; node.PrevNode = last.Position; } // set last node page as dirty _pager.SetDirty(last.Page); } return(node); }
/// <summary> /// Insert a new node index inside an collection index. Flip coin to know level /// </summary> public IndexNode AddNode(CollectionIndex index, BsonValue key) { // call AddNode normalizing value return(this.AddNode(index, key.Normalize(index.Options), this.FlipCoin())); }
/// <summary> /// Insert a new node index inside an collection index. /// </summary> private IndexNode AddNode(CollectionIndex index, BsonValue key, byte level, IndexNode last) { // calc key size var keyLength = key.GetBytesCount(false); if (keyLength > MAX_INDEX_LENGTH) { throw LiteException.IndexKeyTooLong(); } // creating a new index node var node = new IndexNode(level) { Key = key, KeyLength = (ushort)keyLength, Slot = (byte)index.Slot }; // get a free page to insert my index node var page = _pager.GetFreePage<IndexPage>(index.FreeIndexPageID, node.Length); node.Position = new PageAddress(page.PageID, page.Nodes.NextIndex()); node.Page = page; // add index node to page page.Nodes.Add(node.Position.Index, node); // update freebytes + items count page.UpdateItemCount(); // now, let's link my index node on right place var cur = this.GetNode(index.HeadNode); // using as cache last IndexNode cache = null; // scan from top left for (var i = IndexNode.MAX_LEVEL_LENGTH - 1; i >= 0; i--) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // for(; <while_not_this>; <do_this>) { ... } for (; cur.Next[i].IsEmpty == false; cur = cache) { // get cache for last node cache = cache != null && cache.Position.Equals(cur.Next[i]) ? cache : this.GetNode(cur.Next[i]); // read next node to compare var diff = cache.Key.CompareTo(key); // if unique and diff = 0, throw index exception (must rollback transaction - others nodes can be dirty) if (diff == 0 && index.Unique) throw LiteException.IndexDuplicateKey(index.Field, key); if (diff == 1) break; } if (i <= (level - 1)) // level == length { // cur = current (imediatte before - prev) // node = new inserted node // next = next node (where cur is poiting) _pager.SetDirty(cur.Page); node.Next[i] = cur.Next[i]; node.Prev[i] = cur.Position; cur.Next[i] = node.Position; var next = this.GetNode(node.Next[i]); if (next != null) { next.Prev[i] = node.Position; _pager.SetDirty(next.Page); } } } // add/remove indexPage on freelist if has space _pager.AddOrRemoveToFreeList(page.FreeBytes > IndexPage.INDEX_RESERVED_BYTES, page, index.Page, ref index.FreeIndexPageID); // if last node exists, create a double link list if (last != null) { // link new node with last node if (last.NextNode.IsEmpty == false) { // fix link pointer with has more nodes in list var next = this.GetNode(last.NextNode); next.PrevNode = node.Position; last.NextNode = node.Position; node.PrevNode = last.Position; node.NextNode = next.Position; _pager.SetDirty(next.Page); } else { last.NextNode = node.Position; node.PrevNode = last.Position; } // set last node page as dirty _pager.SetDirty(last.Page); } return node; }
/// <summary> /// Returns if this node is header or tail from collection Index /// </summary> /// <param name="index"></param> /// <returns></returns> public bool IsHeadTail(CollectionIndex index) { return(this.Position.Equals(index.HeadNode) || this.Position.Equals(index.TailNode)); }
internal override IEnumerable <IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { this.ExecuteMode = QueryExecuteMode.FullScan; return(indexer.FindAll(index, Query.Ascending)); }
internal override IEnumerable <IndexNode> Execute(LiteEngine engine, CollectionIndex index) { return(engine.Indexer.FindEquals(index, this.Value)); }
internal override IEnumerable <IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { throw new NotSupportedException(); }
internal override IEnumerable <IndexNode> Execute(LiteEngine engine, CollectionIndex index) { return(engine.Indexer.FindAll(index).Where(x => x.Key.CompareTo(new IndexKey(this.Value)) != 0)); }
/// <summary> /// Abstract method that must be implement for index seek/scan - Returns IndexNodes that match with index /// </summary> internal abstract IEnumerable <IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index);
/// <summary> /// Find first node that index match with value. If not found but sibling = true, returns near node (only non-unique index) /// Before find, value must be normalized /// </summary> public IndexNode Find(CollectionIndex index, BsonValue value, bool sibling, int order) { var cur = this.GetNode(order == Query.Ascending ? index.HeadNode : index.TailNode); for (var i = IndexNode.MAX_LEVEL_LENGTH - 1; i >= 0; i--) { for (; cur.NextPrev(i, order).IsEmpty == false; cur = this.GetNode(cur.NextPrev(i, order))) { var next = this.GetNode(cur.NextPrev(i, order)); var diff = next.Key.CompareTo(value); if (diff == order && (i > 0 || !sibling)) break; if (diff == order && i == 0 && sibling) { return next.IsHeadTail(index) ? null : next; } // if equals, test for duplicates - go back to first occurs on duplicate values if (diff == 0) { // if unique index has no duplicates - just return node if (index.Unique) return next; return this.FindBoundary(index, next, value, order * -1, i); } } } return null; }
public IEnumerable<IndexNode> FindAll(CollectionIndex index, int order) { var cur = this.GetNode(order == Query.Ascending ? index.HeadNode : index.TailNode); while (!cur.NextPrev(0, order).IsEmpty) { cur = this.GetNode(cur.NextPrev(0, order)); // stop if node is head/tail if (cur.IsHeadTail(index)) yield break; yield return cur; } }
/// <summary> /// Go first/last occurence of this index value /// </summary> private IndexNode FindBoundary(CollectionIndex index, IndexNode cur, BsonValue value, int order, int level) { var last = cur; while (cur.Key.CompareTo(value) == 0) { last = cur; cur = this.GetNode(cur.NextPrev(0, order)); if (cur.IsHeadTail(index)) break; } return last; }
/// <summary> /// Insert a new node index inside an collection index. Flip coin to know level /// </summary> public IndexNode AddNode(CollectionIndex index, BsonValue key, IndexNode last) { // call AddNode with key value return this.AddNode(index, key, this.FlipCoin(), last); }
internal override IEnumerable <IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { return(indexer .FindAll(index, Query.Ascending) .Where(x => x.Key.IsString && x.Key.AsString.Contains(_value))); }
/// <summary> /// Delete indexNode from a Index ajust Next/Prev nodes /// </summary> public void Delete(CollectionIndex index, PageAddress nodeAddress) { var node = this.GetNode(nodeAddress); var page = node.Page; // mark page as dirty here because, if deleted, page type will change _pager.SetDirty(page); for (int i = node.Prev.Length - 1; i >= 0; i--) { // get previus and next nodes (between my deleted node) var prev = this.GetNode(node.Prev[i]); var next = this.GetNode(node.Next[i]); if (prev != null) { prev.Next[i] = node.Next[i]; _pager.SetDirty(prev.Page); } if (next != null) { next.Prev[i] = node.Prev[i]; _pager.SetDirty(next.Page); } } page.Nodes.Remove(node.Position.Index); // update freebytes + items count page.UpdateItemCount(); // if there is no more nodes in this page, delete them if (page.Nodes.Count == 0) { // first, remove from free list _pager.AddOrRemoveToFreeList(false, page, index.Page, ref index.FreeIndexPageID); _pager.DeletePage(page.PageID); } else { // add or remove page from free list _pager.AddOrRemoveToFreeList(page.FreeBytes > IndexPage.INDEX_RESERVED_BYTES, node.Page, index.Page, ref index.FreeIndexPageID); } // now remove node from nodelist var prevNode = this.GetNode(node.PrevNode); var nextNode = this.GetNode(node.NextNode); if (prevNode != null) { prevNode.NextNode = node.NextNode; _pager.SetDirty(prevNode.Page); } if (nextNode != null) { nextNode.PrevNode = node.PrevNode; _pager.SetDirty(nextNode.Page); } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { throw new NotSupportedException(); }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { yield break; }
/// <summary> /// Insert a new node index inside an collection index. Flip coin to know level /// </summary> public IndexNode AddNode(CollectionIndex index, BsonValue key, IndexNode last) { // call AddNode with key value return(this.AddNode(index, key, this.FlipCoin(), last)); }
/// <summary> /// Insert a new node index inside an collection index. Flip coin to know level /// </summary> public IndexNode AddNode(CollectionIndex index, BsonValue key) { // call AddNode normalizing value return this.AddNode(index, key.Normalize(index.Options), this.FlipCoin()); }
internal override IEnumerable <IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { return(indexer .FindAll(index, _order) .Where(i => _func(i.Key))); }
/// <summary> /// Abstract method that must be implement for index seek/scan - Returns IndexNodes that match with index /// </summary> internal abstract IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index);
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { return indexer.FindAll(index, _order); }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { foreach (var node in indexer.FindAll(index, Query.Ascending)) { var diff = node.Key.CompareTo(_value); if (diff == 1 || (!_equals && diff == 0)) break; if (node.IsHeadTail(index)) yield break; yield return node; } }
internal override IEnumerable<IndexNode> ExecuteIndex(IndexService indexer, CollectionIndex index) { var value = _value.Normalize(index.Options); return indexer.FindAll(index, Query.Ascending).Where(x => x.Key.CompareTo(value) != 0); }