/// <summary> /// Create a new index (or do nothing if already exists) to a collection/field /// </summary> public bool EnsureIndex(string collection, string field, bool unique = false) { if (collection.IsNullOrWhiteSpace()) { throw new ArgumentNullException(nameof(collection)); } if (!CollectionIndex.IndexPattern.IsMatch(field)) { throw new ArgumentException("Invalid field format pattern: " + CollectionIndex.IndexPattern.ToString(), "field"); } if (field == "_id") { return(false); // always exists } return(this.Transaction <bool>(collection, true, (col) => { // check if index already exists var current = col.GetIndex(field); // if already exists, just exit if (current != null) { // do not test any difference between current index and new defition return false; } // create index head var index = _indexer.CreateIndex(col); index.Field = field; index.Unique = unique; _log.Write(Logger.COMMAND, "create index on '{0}' :: {1} unique: {2}", collection, index.Field, unique); // read all objects (read from PK index) foreach (var pkNode in new QueryAll("_id", Query.Ascending).Run(col, _indexer)) { // read binary and deserialize document var buffer = _data.Read(pkNode.DataBlock); var doc = BsonReader.Deserialize(buffer).AsDocument; var expr = new BsonFields(index.Field); // get value from document var keys = expr.Execute(doc, true); // adding index node for each value foreach (var key in keys) { // insert new index node var node = _indexer.AddNode(index, key, pkNode); // link index node to datablock node.DataBlock = pkNode.DataBlock; } // check memory usage _trans.CheckPoint(); } return true; })); }
/// <summary> /// Implement internal update document /// </summary> private bool UpdateDocument(CollectionPage col, BsonDocument doc) { // normalize id before find var id = doc["_id"]; // validate id for null, min/max values if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw UltraLiteException.InvalidDataType("_id", id); } _log.Write(Logger.COMMAND, "update document on '{0}' :: _id = {1}", col.CollectionName, id.RawValue); // find indexNode from pk index var pkNode = _indexer.Find(col.PK, id, false, Query.Ascending); // if not found document, no updates if (pkNode == null) { return(false); } // serialize document in bytes var bytes = BsonWriter.Serialize(doc); // update data storage var dataBlock = _data.Update(col, pkNode.DataBlock, bytes); // get all non-pk index nodes from this data block var allNodes = _indexer.GetNodeList(pkNode, false).ToArray(); // delete/insert indexes - do not touch on PK foreach (var index in col.GetIndexes(false)) { var expr = new BsonFields(index.Field); // getting all keys do check var keys = expr.Execute(doc).ToArray(); // get a list of to delete nodes (using ToArray to resolve now) var toDelete = allNodes .Where(x => x.Slot == index.Slot && !keys.Any(k => k == x.Key)) .ToArray(); // get a list of to insert nodes (using ToArray to resolve now) var toInsert = keys .Where(x => !allNodes.Any(k => k.Slot == index.Slot && k.Key == x)) .ToArray(); // delete changed index nodes foreach (var node in toDelete) { _indexer.Delete(index, node.Position); } // insert new nodes foreach (var key in toInsert) { // and add a new one var node = _indexer.AddNode(index, key, pkNode); // link my node to data block node.DataBlock = dataBlock.Position; } } return(true); }
/// <summary> /// Internal implementation of insert a document /// </summary> private void InsertDocument(CollectionPage col, BsonDocument doc, BsonAutoId autoId) { // collection Sequence was created after release current datafile version. // In this case, Sequence will be 0 but already has documents. Let's fix this // ** this code can be removed when datafile change from 7 (HeaderPage.FILE_VERSION) ** if (col.Sequence == 0 && col.DocumentCount > 0) { var max = this.Max(col.CollectionName, "_id"); // if max value is a number, convert to Sequence last value // if not, just set sequence as document count col.Sequence = (max.IsInt32 || max.IsInt64 || max.IsDouble || max.IsDecimal) ? Convert.ToInt64(max.RawValue) : Convert.ToInt64(col.DocumentCount); } // increase collection sequence _id col.Sequence++; _pager.SetDirty(col); // if no _id, add one if (!doc.RawValue.TryGetValue("_id", out var id)) { doc["_id"] = id = autoId == BsonAutoId.ObjectId ? new BsonValue(ObjectId.NewObjectId()) : autoId == BsonAutoId.Guid ? new BsonValue(Guid.NewGuid()) : autoId == BsonAutoId.Int32 ? new BsonValue((Int32)col.Sequence) : autoId == BsonAutoId.Int64 ? new BsonValue(col.Sequence) : BsonValue.Null; } // create bubble in sequence number if _id is bigger than current sequence else if (autoId == BsonAutoId.Int32 || autoId == BsonAutoId.Int64) { var current = id.AsInt64; // if current id is bigger than sequence, jump sequence to this number. Other was, do not increse sequnce col.Sequence = current >= col.Sequence ? current : col.Sequence - 1; } // test if _id is a valid type if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw UltraLiteException.InvalidDataType("_id", id); } _log.Write(Logger.COMMAND, "insert document on '{0}' :: _id = {1}", col.CollectionName, id.RawValue); // serialize object var bytes = BsonWriter.Serialize(doc); // storage in data pages - returns dataBlock address var dataBlock = _data.Insert(col, bytes); // store id in a PK index [0 array] var pk = _indexer.AddNode(col.PK, id, null); // do link between index <-> data block pk.DataBlock = dataBlock.Position; // for each index, insert new IndexNode foreach (var index in col.GetIndexes(false)) { // for each index, get all keys (support now multi-key) - gets distinct values only // if index are unique, get single key only var expr = new BsonFields(index.Field); var keys = expr.Execute(doc, true); // do a loop with all keys (multi-key supported) foreach (var key in keys) { // insert node var node = _indexer.AddNode(index, key, pk); // link my index node to data block address node.DataBlock = dataBlock.Position; } } }