/// <summary> /// Implement upsert command to documents in a collection. Calls update on all documents, /// then any documents not updated are then attempted to insert. /// This will have the side effect of throwing if duplicate items are attempted to be inserted. /// </summary> public int Upsert(string collection, IEnumerable <BsonDocument> docs, BsonAutoId autoId) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (docs == null) { throw new ArgumentNullException(nameof(docs)); } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot, _header.Pragmas.Collation); var data = new DataService(snapshot); var count = 0; foreach (var doc in docs) { transaction.Safepoint(); // first try update document (if exists _id), if not found, do insert if (doc["_id"] == BsonValue.Null || this.UpdateDocument(snapshot, col, doc, indexer, data) == false) { this.InsertDocument(snapshot, doc, autoId, indexer, data); count++; } } // returns how many document was inserted return count; })); }
/// <summary> /// Implement update command to a document inside a collection. Return number of documents updated /// </summary> public int Update(string collection, IEnumerable <BsonDocument> docs) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (docs == null) { throw new ArgumentNullException(nameof(docs)); } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot); var data = new DataService(snapshot); var count = 0; LOG($"update `{collection}`", "COMMAND"); foreach (var doc in docs) { transaction.Safepoint(); if (this.UpdateDocument(snapshot, col, doc, indexer, data)) { count++; } } return count; })); }
public override async IAsyncEnumerable <IndexNode> Run(CollectionPage col, IndexService indexer) { var rawId = 0u; foreach (var doc in _source) { rawId++; // create fake rawId for document source doc.RawId = new PageAddress(rawId, 0); // create cache until reach 1000 document - after this, delete cache and remove support if (_cache != null) { _cache[rawId] = doc; if (_cache.Count > VIRTUAL_INDEX_MAX_CACHE) { _cache = null; } } // return an fake indexNode yield return(new IndexNode(doc)); } }
/// <summary> /// Add a new collection. Check if name the not exists. Create only in transaction page - will update header only in commit /// </summary> private void Add(string name, ref CollectionPage collectionPage) { if (Encoding.UTF8.GetByteCount(name) > _header.GetAvaiableCollectionSpace()) { throw LiteException.InvalidCollectionName(name, "There is no space in header for more collections"); } if (!name.IsWord()) { throw LiteException.InvalidCollectionName(name, "Use only [a-Z$_]"); } if (name.StartsWith("$")) { throw LiteException.InvalidCollectionName(name, "Collection can't starts with `$` (reserved for system collections)"); } // create new collection page collectionPage = _snapshot.NewPage <CollectionPage>(); var pageID = collectionPage.PageID; // insert collection name/pageID in header only in commit operation _transPages.Commit += (h) => h.InsertCollection(name, pageID); // create first index (_id pk) (must pass collectionPage because snapshot contains null in CollectionPage prop) var indexer = new IndexService(_snapshot); indexer.CreateIndex("_id", "$._id", true); }
public override IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { // if contains startsWith string, search using index Find // otherwise, use index full scan and test results return(_startsWith.Length > 0 ? this.ExecuteStartsWith(indexer, index) : this.ExecuteLike(indexer, index)); }
/// <summary> /// Implements delete based on IDs enumerable /// </summary> public int Delete(string collection, IEnumerable <BsonValue> ids) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (ids == null) { throw new ArgumentNullException(nameof(ids)); } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var collectionPage = snapshot.CollectionPage; var data = new DataService(snapshot); var indexer = new IndexService(snapshot, _header.Pragmas.Collation); if (collectionPage == null) { return 0; } var count = 0; var pk = collectionPage.PK; foreach (var id in ids) { var pkNode = indexer.Find(pk, id, false, LiteDB.Query.Ascending); // if pk not found, continue if (pkNode == null) { continue; } // remove object data data.Delete(pkNode.DataBlock); if (pkNode.NextNode.IsEmpty) { ; } // delete all nodes (start in pk node) indexer.DeleteAll(pkNode.Position); transaction.Safepoint(); count++; } return count; })); }
public override async IAsyncEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { await foreach (var node in indexer.FindAll(index, this.Order)) { if (_func(node.Key)) { yield return(node); } } }
public override IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { foreach (var value in _values.Distinct()) { foreach (var node in Index.EQ(this.Name, value).Execute(indexer, index)) { yield return(node); } } }
private async IAsyncEnumerable <IndexNode> ExecuteLike(IndexService indexer, CollectionIndex index) { await foreach (var node in indexer.FindAll(index, this.Order)) { if (node.Key.IsString && node.Key.AsString.SqlLike(_pattern, indexer.Collation)) { yield return(node); } } }
public override IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { foreach (var value in _values.Distinct()) { var idx = new IndexEquals(this._values, value); foreach (var node in idx.Execute(indexer, index)) { yield return(node); } } }
/// <summary> /// Find witch index will be used and run Execute method /// </summary> public virtual IEnumerable <IndexNode> Run(CollectionPage col, IndexService indexer) { // get index for this query var index = col.GetCollectionIndex(this.Name); if (index == null) { throw LiteException.IndexNotFound(this.Name); } // execute query to get all IndexNodes return(this.Execute(indexer, index)); }
public override async IAsyncEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { // if contains startsWith string, search using index Find // otherwise, use index full scan and test results var nodes = _startsWith.Length > 0 ? this.ExecuteStartsWith(indexer, index) : this.ExecuteLike(indexer, index); await foreach (var node in nodes) { yield return(node); } }
/// <summary> /// Fill current database with data inside file reader - run inside a transacion /// </summary> internal void RebuildContent(IFileReader reader, UpgradeOption upgrade = UpgradeOption.True) { // begin transaction and get TransactionID var transaction = _monitor.GetTransaction(true, false, out _); try { foreach (var collection in reader.GetCollections()) { if (upgrade != UpgradeOption.DataOnly) { // first create all user indexes (exclude _id index) foreach (var index in reader.GetIndexes(collection)) { this.EnsureIndex(collection, index.Name, BsonExpression.Create(index.Expression), index.Unique); } } // get all documents from current collection var docs = reader.GetDocuments(collection); // get snapshot, indexer and data services var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var indexer = new IndexService(snapshot, _header.Pragmas.Collation); var data = new DataService(snapshot); // insert one-by-one foreach (var doc in docs) { transaction.Safepoint(); this.InsertDocument(snapshot, doc, BsonAutoId.ObjectId, indexer, data); } } transaction.Commit(); } catch (Exception) { _walIndex.Clear(); throw; } finally { _monitor.ReleaseTransaction(transaction); } }
/// <summary> /// Drop an index from a collection /// </summary> public bool DropIndex(string collection, string name) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (name.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(name)); } if (name == "_id") { throw LiteException.IndexDropId(); } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot); // no collection, no index if (col == null) { return false; } // search for index reference var index = col.GetCollectionIndex(name); // no index, no drop if (index == null) { return false; } // delete all data pages + indexes pages indexer.DropIndex(index); // remove index entry in collection page snapshot.CollectionPage.DeleteCollectionIndex(name); return true; })); }
/// <summary> /// Add a new collection. Check if name the not exists. Create only in transaction page - will update header only in commit /// </summary> private void Add(string name, ref CollectionPage collectionPage) { // checks for collection name/size CheckName(name, _header); // create new collection page collectionPage = _snapshot.NewPage <CollectionPage>(); var pageID = collectionPage.PageID; // insert collection name/pageID in header only in commit operation _transPages.Commit += (h) => h.InsertCollection(name, pageID); // create first index (_id pk) (must pass collectionPage because snapshot contains null in CollectionPage prop) var indexer = new IndexService(_snapshot); indexer.CreateIndex("_id", "$._id", true); }
public override async IAsyncEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { var node = await indexer.Find(index, _value, false, Query.Ascending); if (node == null) { yield break; } yield return(node); if (index.Unique == false) { // navigate in both sides to return all nodes found var first = node; // first go forward while (!node.Next[0].IsEmpty && ((node = await indexer.GetNode(node.Next[0])).Key.CompareTo(_value, indexer.Collation) == 0)) { if (node.Key.IsMinValue || node.Key.IsMaxValue) { break; } yield return(node); } node = first; // and than, go backward while (!node.Prev[0].IsEmpty && ((node = await indexer.GetNode(node.Prev[0])).Key.CompareTo(_value, indexer.Collation) == 0)) { if (node.Key.IsMinValue || node.Key.IsMaxValue) { break; } yield return(node); } } }
/// <summary> /// Get corrent IDocumentLookup /// </summary> public IDocumentLookup GetLookup(Snapshot snapshot, bool utcDate) { var data = new DataService(snapshot); var indexer = new IndexService(snapshot); // define document loader // if index are VirtualIndex - it's also lookup document if (!(this.Index is IDocumentLookup lookup)) { if (this.IsIndexKeyOnly) { lookup = new IndexLookup(indexer, this.Fields.Single()); } else { lookup = new DatafileLookup(data, utcDate, this.Fields); } } return(lookup); }
/// <summary> /// Implement update command to a document inside a collection. Return number of documents updated /// </summary> public async Task <int> UpdateAsync(string collection, IEnumerable <BsonDocument> docs) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (docs == null) { throw new ArgumentNullException(nameof(docs)); } return(await this.AutoTransaction(async transaction => { var snapshot = await transaction.CreateSnapshot(LockMode.Write, collection, false); var collectionPage = snapshot.CollectionPage; var indexer = new IndexService(snapshot, _header.Pragmas.Collation); var data = new DataService(snapshot); var count = 0; if (collectionPage == null) { return 0; } LOG($"update `{collection}`", "COMMAND"); foreach (var doc in docs) { await transaction.Safepoint(); if (await this.UpdateDocument(snapshot, collectionPage, doc, indexer, data)) { count++; } } return count; })); }
/// <summary> /// Add a new collection. Check if name the not exists. Create only in transaction page - will update header only in commit /// </summary> private async Task <CollectionPage> Add(string name) { // checks for collection name/size CheckName(name, _header); // create new collection page var collectionPage = await _snapshot.NewPage <CollectionPage>(); _snapshot.CollectionPage = collectionPage; var pageID = collectionPage.PageID; // insert collection name/pageID in header only in commit operation _transPages.Commit += (h) => h.InsertCollection(name, pageID); // create first index (_id pk) (must pass collectionPage because snapshot contains null in CollectionPage prop) var indexer = new IndexService(_snapshot, _header.Pragmas.Collation); await indexer.CreateIndex("_id", "$._id", true); return(collectionPage); }
/// <summary> /// Find witch index will be used and run Execute method /// </summary> public virtual async IAsyncEnumerable <IndexNode> Run(CollectionPage col, IndexService indexer) { // get index for this query var index = col.GetCollectionIndex(this.Name); if (index == null) { throw LiteException.IndexNotFound(this.Name); } var distinct = new HashSet <PageAddress>(); await foreach (var node in this.Execute(indexer, index)) { // distinct by dataBlock if (distinct.Contains(node.DataBlock) == false) { distinct.Add(node.DataBlock); yield return(node); } } }
/// <summary> /// INCLUDE: Do include in result document according path expression - Works only with DocumentLookup /// </summary> protected async IAsyncEnumerable <BsonDocument> Include(IAsyncEnumerable <BsonDocument> source, BsonExpression path) { // cached services string last = null; Snapshot snapshot = null; IndexService indexer = null; DataService data = null; CollectionIndex index = null; IDocumentLookup lookup = null; await foreach (var doc in source) { foreach (var value in path.Execute(doc, _pragmas.Collation) .Where(x => x.IsDocument || x.IsArray) .ToList()) { // if value is document, convert this ref document into full document (do another query) if (value.IsDocument) { await DoInclude(value.AsDocument); } else { // if value is array, do same per item foreach (var item in value.AsArray .Where(x => x.IsDocument) .Select(x => x.AsDocument)) { await DoInclude(item); } } } yield return(doc); } async Task DoInclude(BsonDocument value) { // works only if is a document var refId = value["$id"]; var refCol = value["$ref"]; // if has no reference, just go out if (refId.IsNull || !refCol.IsString) { return; } // do some cache re-using when is same $ref (almost always is the same $ref collection) if (last != refCol.AsString) { last = refCol.AsString; // initialize services snapshot = await _transaction.CreateSnapshot(LockMode.Read, last, false); indexer = new IndexService(snapshot, _pragmas.Collation); data = new DataService(snapshot); lookup = new DatafileLookup(data, _pragmas.UtcDate, null); index = snapshot.CollectionPage?.PK; } // fill only if index and ref node exists if (index != null) { var node = await indexer.Find(index, refId, false, Query.Ascending); if (node != null) { // load document based on dataBlock position var refDoc = await lookup.Load(node); //do not remove $id value.Remove("$ref"); // copy values from refDocument into current documet (except _id - will keep $id) foreach (var element in refDoc.Where(x => x.Key != "_id")) { value[element.Key] = element.Value; } } else { // set in ref document that was not found value["$missing"] = true; } } } }
/// <summary> /// Abstract method that must be implement for index seek/scan - Returns IndexNodes that match with index /// </summary> public abstract IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index);
/// <summary> /// Internal implementation of insert a document /// </summary> private void InsertDocument(Snapshot snapshot, BsonDocument doc, BsonAutoId autoId, IndexService indexer, DataService data) { // if no _id, use AutoId if (!doc.TryGetValue("_id", out var id)) { doc["_id"] = id = autoId == BsonAutoId.ObjectId ? new BsonValue(ObjectId.NewObjectId()) : autoId == BsonAutoId.Guid ? new BsonValue(Guid.NewGuid()) : this.GetSequence(snapshot, autoId); } else if (id.IsNumber) { // update memory sequence of numeric _id this.SetSequence(snapshot, id); } // test if _id is a valid type if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // storage in data pages - returns dataBlock address var dataBlock = data.Insert(doc); IndexNode last = null; // for each index, insert new IndexNode foreach (var index in snapshot.CollectionPage.GetCollectionIndexes()) { // for each index, get all keys (supports multi-key) - gets distinct values only // if index are unique, get single key only var keys = index.BsonExpr.Execute(doc, _header.Pragmas.Collation); // do a loop with all keys (multi-key supported) foreach (var key in keys) { // insert node var node = indexer.AddNode(index, key, dataBlock, last); last = node; } } }
private IEnumerable <IndexNode> ExecuteStartsWith(IndexService indexer, CollectionIndex index) { // find first indexNode var first = indexer.Find(index, _startsWith, true, this.Order); var node = first; // if collection exists but are empty if (first == null) { yield break; } // first, go backward to get all same values while (node != null) { // if current node are edges exit while if (node.Key.IsMinValue || node.Key.IsMaxValue) { break; } var valueString = node.Key.IsString ? node.Key.AsString : node.Key.ToString(); if (_equals ? valueString.Equals(_startsWith, StringComparison.OrdinalIgnoreCase) : valueString.StartsWith(_startsWith, StringComparison.OrdinalIgnoreCase)) { // must still testing SqlLike method for rest of pattern - only if exists more to test (avoid slow SqlLike test) if ((_testSqlLike == false) || (_testSqlLike == true && valueString.SqlLike(_pattern) == true)) { yield return(node); } } else { break; } node = indexer.GetNode(node.GetNextPrev(0, -this.Order)); } // move fordward node = indexer.GetNode(first.GetNextPrev(0, this.Order)); while (node != null) { // if current node are edges exit while if (node.Key.IsMinValue || node.Key.IsMaxValue) { break; } var valueString = node.Key.IsString ? node.Key.AsString : node.Key.ToString(); if (_equals ? valueString.Equals(_pattern, StringComparison.OrdinalIgnoreCase) : valueString.StartsWith(_startsWith, StringComparison.OrdinalIgnoreCase)) { // must still testing SqlLike method for rest of pattern - only if exists more to test (avoid slow SqlLike test) if (node.DataBlock.IsEmpty == false && ((_testSqlLike == false) || (_testSqlLike == true && valueString.SqlLike(_pattern) == true))) { yield return(node); } } else { break; } // first, go backward to get all same values node = indexer.GetNode(node.GetNextPrev(0, this.Order)); } }
private IEnumerable <IndexNode> ExecuteLike(IndexService indexer, CollectionIndex index) { return(indexer .FindAll(index, this.Order) .Where(x => x.Key.IsString && x.Key.AsString.SqlLike(_pattern))); }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(Snapshot snapshot, CollectionPage col, BsonDocument doc, IndexService indexer, DataService data) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } // find indexNode from pk index var pkNode = indexer.Find(col.PK, id, false, LiteDB.Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // update data storage data.Update(col, pkNode.DataBlock, doc); // get all current non-pk index nodes from this data block (slot, key, nodePosition) var oldKeys = indexer.GetNodeList(pkNode.NextNode) .Select(x => new Tuple <byte, BsonValue, PageAddress>(x.Slot, x.Key, x.Position)) .ToArray(); // build a list of all new key index keys var newKeys = new List <Tuple <byte, BsonValue, string> >(); foreach (var index in col.GetCollectionIndexes().Where(x => x.Name != "_id")) { // getting all keys from expression over document var keys = index.BsonExpr.Execute(doc, _header.Pragmas.Collation); foreach (var key in keys) { newKeys.Add(new Tuple <byte, BsonValue, string>(index.Slot, key, index.Name)); } } if (oldKeys.Length == 0 && newKeys.Count == 0) { return(true); } // get a list of all nodes that are in oldKeys but not in newKeys (must delete) var toDelete = new HashSet <PageAddress>(oldKeys .Where(x => newKeys.Any(n => n.Item1 == x.Item1 && n.Item2 == x.Item2) == false) .Select(x => x.Item3)); // get a list of all keys that are not in oldKeys (must insert) var toInsert = newKeys .Where(x => oldKeys.Any(o => o.Item1 == x.Item1 && o.Item2 == x.Item2) == false) .ToArray(); // if nothing to change, just exit if (toDelete.Count == 0 && toInsert.Length == 0) { return(true); } // delete nodes and return last keeped node in list var last = indexer.DeleteList(pkNode.Position, toDelete); // now, insert all new nodes foreach (var elem in toInsert) { var index = col.GetCollectionIndex(elem.Item3); last = indexer.AddNode(index, elem.Item2, pkNode.DataBlock, last); } return(true); }
public override IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { // if order are desc, swap start/end values var start = this.Order == Query.Ascending ? _start : _end; var end = this.Order == Query.Ascending ? _end : _start; var startEquals = this.Order == Query.Ascending ? _startEquals : _endEquals; var endEquals = this.Order == Query.Ascending ? _endEquals : _startEquals; // find first indexNode (or get from head/tail if Min/Max value) var first = start.Type == BsonType.MinValue ? indexer.GetNode(index.Head) : start.Type == BsonType.MaxValue ? indexer.GetNode(index.Tail) : indexer.Find(index, start, true, this.Order); var node = first; // if startsEquals, return all equals value from start linked list if (startEquals && node != null) { // going backward in same value list to get first value while (!node.GetNextPrev(0, -this.Order).IsEmpty&& ((node = indexer.GetNode(node.GetNextPrev(0, -this.Order))).Key.CompareTo(start) == 0)) { if (node.Key.IsMinValue || node.Key.IsMaxValue) { break; } yield return(node); } node = first; } // returns (or not) equals start value while (node != null) { var diff = node.Key.CompareTo(start); // if current value are not equals start, go out this loop if (diff != 0) { break; } if (startEquals && !(node.Key.IsMinValue || node.Key.IsMaxValue)) { yield return(node); } node = indexer.GetNode(node.GetNextPrev(0, this.Order)); } // navigate using next[0] do next node - if less or equals returns while (node != null) { var diff = node.Key.CompareTo(end); if (endEquals && diff == 0 && !(node.Key.IsMinValue || node.Key.IsMaxValue)) { yield return(node); } else if (diff == -this.Order && !(node.Key.IsMinValue || node.Key.IsMaxValue)) { yield return(node); } else { break; } node = indexer.GetNode(node.GetNextPrev(0, this.Order)); } }
/// <summary> /// Create a new index (or do nothing if already exists) to a collection/field /// </summary> public bool EnsureIndex(string collection, string name, BsonExpression expression, bool unique) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (name.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(name)); } if (expression == null) { throw new ArgumentNullException(nameof(expression)); } if (expression.IsIndexable == false) { throw new ArgumentException("Index expressions must contains at least one document field. Used methods must be immutable. Parameters are not supported.", nameof(expression)); } if (name.Length > INDEX_NAME_MAX_LENGTH) { throw LiteException.InvalidIndexName(name, collection, "MaxLength = " + INDEX_NAME_MAX_LENGTH); } if (!name.IsWord()) { throw LiteException.InvalidIndexName(name, collection, "Use only [a-Z$_]"); } if (name.StartsWith("$")) { throw LiteException.InvalidIndexName(name, collection, "Index name can't starts with `$`"); } if (name == "_id") { return(false); // always exists } return(this.AutoTransaction(transaction => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot); var data = new DataService(snapshot); // check if index already exists var current = col.GetCollectionIndex(name); // if already exists, just exit if (current != null) { // but if expression are different, throw error if (current.Expression != expression.Source) { throw LiteException.IndexAlreadyExist(name); } return false; } LOG($"create index `{collection}.{name}`", "COMMAND"); // create index head var index = indexer.CreateIndex(name, expression.Source, unique); var count = 0u; // read all objects (read from PK index) foreach (var pkNode in new IndexAll("_id", LiteDB.Query.Ascending).Run(col, indexer)) { using (var reader = new BufferReader(data.Read(pkNode.DataBlock))) { var doc = reader.ReadDocument(expression.Fields); // first/last node in this document that will be added IndexNode last = null; IndexNode first = null; // get values from expression in document var keys = expression.Execute(doc); // adding index node for each value foreach (var key in keys) { // when index key is an array, get items inside array. // valid only for first level (if this items are another array, this arrays will be indexed as array) if (key.IsArray) { var arr = key.AsArray; foreach (var itemKey in arr) { // insert new index node var node = indexer.AddNode(index, itemKey, pkNode.DataBlock, last, _flipCoin); if (first == null) { first = node; } last = node; count++; } } else { // insert new index node var node = indexer.AddNode(index, key, pkNode.DataBlock, last, _flipCoin); if (first == null) { first = node; } last = node; count++; } } // fix single linked-list in pkNode if (first != null) { last.SetNextNode(pkNode.NextNode); pkNode.SetNextNode(first.Position); } } transaction.Safepoint(); } index.KeyCount = count; return true; })); }
public IndexLookup(IndexService indexer, string name) { _indexer = indexer; _name = name; }
public override IEnumerable <IndexNode> Execute(IndexService indexer, CollectionIndex index) { return(indexer .FindAll(index, this.Order) .Where(i => _func(i.Key))); }