public override BsonDocument GetSaveData() { BsonDocument doc = new BsonDocument(); AddPhysicsData(doc); doc["it_stack"] = Stack.ServerBytes(); return doc; }
private BsonDocument CreateDoc() { // create same object, but using BsonDocument var doc = new BsonDocument(); doc["_id"] = 123; doc["FirstString"] = "BEGIN this string \" has \" \t and this \f \n\r END"; doc["CustomerId"] = Guid.NewGuid(); doc["Date"] = new DateTime(2015, 1, 1); doc["MyNull"] = null; doc["Items"] = new BsonArray(); doc["MyObj"] = new BsonDocument(); doc["EmptyString"] = ""; var obj = new BsonDocument(); obj["Qtd"] = 3; obj["Description"] = "Big beer package"; obj["Unit"] = 1299.995; doc["Items"].AsArray.Add(obj); doc["Items"].AsArray.Add("string-one"); doc["Items"].AsArray.Add(null); doc["Items"].AsArray.Add(true); doc["Items"].AsArray.Add(DateTime.Now); doc.Set("MyObj.IsFirstId", true); return doc; }
public void Index_Insert() { using (var db = new LiteEngine(dbpath)) { var c = db.GetCollection("col1"); var d = new BsonDocument(); var id1 = c.NextVal(); var id2 = c.NextVal(); var id3 = c.NextVal(); d["Name"] = "John 1"; c.Insert(id1, d); d["Name"] = "John 2"; c.Insert(id2, d); d["Name"] = "John 3"; c.Insert(id3, d); d["Name"] = "John A"; c.Insert("A", d); var r = c.Find(Query.GTE("_id", 1)); foreach (var nd in r) { Debug.Print(nd["Name"].AsString); } } }
public override Entity Create(Region tregion, BsonDocument doc) { ItemStack stack = new ItemStack(doc["it_stack"].AsBinary, tregion.TheServer); ItemEntity ent = new ItemEntity(stack, tregion); ent.ApplyPhysicsData(doc); return ent; }
public override BsonDocument GetSaveData() { BsonDocument doc = new BsonDocument(); AddPhysicsData(doc); doc["gs_color"] = Color.ToArgb(); return doc; }
/// <summary> /// Consume all _buffer bytes and write to database /// </summary> private void WriteChunks() { var buffer = new byte[MAX_CHUNK_SIZE]; var read = 0; _buffer.Seek(0, SeekOrigin.Begin); while ((read = _buffer.Read(buffer, 0, MAX_CHUNK_SIZE)) > 0) { var chunk = new BsonDocument { { "_id", this.GetChunckId(_file.Chunks++) } // index zero based }; // get chunk byte array part if (read != MAX_CHUNK_SIZE) { var bytes = new byte[read]; Buffer.BlockCopy(buffer, 0, bytes, 0, read); chunk["data"] = bytes; } else { chunk["data"] = buffer; } // insert chunk part _engine.Insert(LiteStorage.CHUNKS, chunk); } _buffer = new MemoryStream(); }
private BsonDocument CreateDoc() { // create same object, but using BsonDocument var doc = new BsonDocument(); doc["_id"] = 123; doc["FirstString"] = "BEGIN this string \" has \" \t and this \f \n\r END"; doc["CustomerId"] = Guid.NewGuid(); doc["Date"] = DateTime.Now; doc["MyNull"] = null; doc["EmptyObj"] = new BsonDocument(); doc["EmptyString"] = ""; doc["maxDate"] = DateTime.MaxValue; doc["minDate"] = DateTime.MinValue; doc.Set("Customer.Address.Street", "Av. Cacapava"); doc["Items"] = new BsonArray(); doc["Items"].AsArray.Add(new BsonDocument()); doc["Items"].AsArray[0].AsDocument["Qtd"] = 3; doc["Items"].AsArray[0].AsDocument["Description"] = "Big beer package"; doc["Items"].AsArray[0].AsDocument["Unit"] = (double)10 / (double)3; doc["Items"].AsArray.Add("string-one"); doc["Items"].AsArray.Add(null); doc["Items"].AsArray.Add(true); doc["Items"].AsArray.Add(DateTime.Now); return doc; }
/// <summary> /// Update metada on a file. File must exisits /// </summary> public bool SetMetadata(string id, BsonDocument metadata) { var file = this.FindById(id); if (file == null) return false; file.Metadata = metadata; _engine.Update(FILES, new BsonDocument[] { file.AsDocument }); return true; }
internal override bool ExecuteFullScan(BsonDocument doc, IndexOptions options) { var val = doc.Get(this.Field).Normalize(options); if(!val.IsString) return false; return val.AsString.StartsWith(_value.AsString); }
public override Entity Create(Region tregion, BsonDocument doc) { ItemStack it = new ItemStack(doc["mb_item"].AsBinary, tregion.TheServer); MusicBlockEntity mbe = new MusicBlockEntity(tregion, it, Location.Zero); mbe.SetMaxHealth((double)doc["mb_maxhealth"].AsDouble); mbe.SetHealth((double)doc["mb_health"].AsDouble); return mbe; }
public static byte[] Serialize(BsonDocument doc) { if (doc == null) throw new ArgumentNullException("doc"); var writer = new BsonWriter(); return writer.Serialize(doc); }
public override BsonDocument GetSaveData() { BsonDocument doc = new BsonDocument(); AddPhysicsData(doc); doc["sg_color"] = col.ToArgb(); doc["sg_smokeleft"] = SmokeLeft; doc["sg_type"] = SmokeType.ToString(); return doc; }
/// <summary> /// Main method - serialize document. Uses ByteWriter /// </summary> public byte[] Serialize(BsonDocument doc) { var count = doc.GetBytesCount(true); var writer = new ByteWriter(count); this.WriteDocument(writer, doc); return writer.Buffer; }
internal LiteFileInfo(LiteDatabase db, BsonDocument doc) { _db = db; this.Id = doc["_id"].AsString; this.Filename = doc["filename"].AsString; this.MimeType = doc["mimeType"].AsString; this.Length = doc["length"].AsInt64; this.UploadDate = doc["uploadDate"].AsDateTime; this.Metadata = doc["metadata"].AsDocument; }
private IEnumerable<BsonDocument> GetDocs() { for (var i = 0; i < 220; i++) { var doc = new BsonDocument() .Add("_id", Guid.NewGuid()) .Add("content", DB.LoremIpsum(20, 50, 1, 2, 1)); yield return doc; } }
/// <summary> /// Write a bson document /// </summary> public void WriteDocument(ByteWriter writer, BsonDocument doc) { writer.Write(doc.GetBytesCount(false)); foreach (var key in doc.Keys) { this.WriteElement(writer, key, doc[key] ?? BsonValue.Null); } writer.Write((byte)0x00); }
internal LiteFileInfo(LiteEngine engine, BsonDocument doc) { _engine = engine; this.Id = doc["_id"].AsString; this.Filename = doc["filename"].AsString; this.MimeType = doc["mimeType"].AsString; this.Length = doc["length"].AsInt64; this.Chunks = doc["chunks"].AsInt32; this.UploadDate = doc["uploadDate"].AsDateTime; this.Metadata = doc["metadata"].AsDocument; }
public static byte[] Serialize(BsonDocument value) { if (value == null) throw new ArgumentNullException("value"); using (var mem = new MemoryStream()) { var writer = new BsonWriter(); writer.Serialize(mem, value); return mem.ToArray(); } }
public void Files_Store() { using (var db = new LiteEngine(dbpath)) { var c = db.GetCollection("customer"); db.BeginTrans(); for (var i = 1; i <= 500; i++) { var d = new BsonDocument(); d["Name"] = "San Jose"; c.Insert(i, d); } for (var i = 1; i <= 500; i++) { c.Delete(i); } db.Commit(); Dump.Pages(db, "before"); var meta = new Dictionary<string, string>(); meta["my-data"] = "Google LiteDB"; db.Storage.Upload("my/foto1.jpg", new MemoryStream(new byte[5000]), meta); Dump.Pages(db ,"after file"); var f = db.Storage.FindByKey("my/foto1.jpg"); Assert.AreEqual(5000, f.Length); Assert.AreEqual("Google LiteDB", f.Metadata["my-data"]); var mem = new MemoryStream(); f.OpenRead(db).CopyTo(mem); // file real size after read all bytes Assert.AreEqual(5000, mem.Length); // all bytes are 0 Assert.AreEqual(5000, mem.ToArray().Count(x => x == 0)); db.Storage.Delete("my/foto1.jpg"); Dump.Pages(db, "deleted file"); } }
public LiteFileInfo(string id, string filename) { if (!IdPattern.IsMatch(id)) throw LiteException.InvalidFormat("FileId", id); this.Id = id; this.Filename = Path.GetFileName(filename); this.MimeType = MimeTypeConverter.GetMimeType(this.Filename); this.Length = 0; this.Chunks = 0; this.UploadDate = DateTime.Now; this.Metadata = new BsonDocument(); }
internal override bool ExecuteFullScan(BsonDocument doc, IndexOptions options) { var val = doc.Get(this.Field).Normalize(options); foreach (var value in _values.Distinct()) { var diff = val.CompareTo(value); if (diff == 0) return true; } return false; }
/// <summary> /// Internal implementation of insert a document /// </summary> private void InsertDocument(CollectionPage col, BsonDocument doc) { BsonValue id; // if no _id, add one as ObjectId if (!doc.RawValue.TryGetValue("_id", out id)) { doc["_id"] = id = ObjectId.NewObjectId(); } // test if _id is a valid type if (id.IsNull || id.IsMinValue || id.IsMaxValue) { throw LiteException.InvalidDataType("_id", id); } _log.Write(Logger.COMMAND, "insert document on '{0}' :: _id = {1}", col.CollectionName, id); // serialize object var bytes = BsonSerializer.Serialize(doc); // storage in data pages - returns dataBlock address var dataBlock = _data.Insert(col, bytes); // store id in a PK index [0 array] var pk = _indexer.AddNode(col.PK, id, null); // do link between index <-> data block pk.DataBlock = dataBlock.Position; // for each index, insert new IndexNode foreach (var index in col.GetIndexes(false)) { // for each index, get all keys (support now multi-key) - gets distinct values only // if index are unique, get single key only var keys = doc.GetValues(index.Field, index.Unique); // do a loop with all keys (multi-key supported) foreach(var key in keys) { // insert node var node = _indexer.AddNode(index, key, pk); // link my index node to data block address node.DataBlock = dataBlock.Position; } } }
/// <summary> /// Write a bson document /// </summary> internal void WriteDocument(BinaryWriter writer, BsonDocument doc) { using (var mem = new MemoryStream()) { var w = new BinaryWriter(mem); foreach (var key in doc.Keys) { this.WriteElement(w, key, doc[key] ?? BsonValue.Null); } writer.Write((Int32)mem.Position); writer.Write(mem.GetBuffer(), 0, (int)mem.Position); writer.Write((byte)0x00); } }
private void WriteObject(BsonDocument obj) { var length = obj.Keys.Count(); var hasData = length > 0; this.WriteStartBlock("{", hasData); var index = 0; foreach (var key in obj.Keys) { this.WriteKeyValue(key, obj[key], index++ < length - 1); } this.WriteEndBlock("}", hasData); }
public BufferFileMeta GetMetadataFromAttachment(LiteDB.BsonDocument doc) { var meta = new BufferFileMeta(); foreach (var property in typeof(BufferFileMeta).GetProperties()) { if (property.CanWrite && doc.ContainsKey(property.Name)) { var converter = TypeDescriptor.GetConverter(property.PropertyType); property.SetValue(meta, converter.ConvertFromString(doc[property.Name])); } } meta.SetId(int.Parse(doc[nameof(BufferFileMeta.Id)].AsString)); return(meta); }
/// <summary> /// Read a BsonDocument from reader /// </summary> public BsonDocument ReadDocument(ByteReader reader) { var length = reader.ReadInt32(); var end = reader.Position + length - 5; var obj = new BsonDocument(); while (reader.Position < end) { string name; var value = this.ReadElement(reader, out name); obj.RawValue[name] = value; } reader.ReadByte(); // zero return obj; }
/// <summary> /// Get all database information /// </summary> public BsonDocument GetDatabaseInfo() { this.Transaction.AvoidDirtyRead(); var info = new BsonDocument(); info["filename"] = this.ConnectionString.Filename; info["journal"] = this.ConnectionString.JournalEnabled; info["timeout"] = this.ConnectionString.Timeout.TotalSeconds; info["version"] = this.Cache.Header.UserVersion; info["changeID"] = this.Cache.Header.ChangeID; info["fileLength"] = (this.Cache.Header.LastPageID + 1) * BasePage.PAGE_SIZE; info["lastPageID"] = this.Cache.Header.LastPageID; info["pagesInCache"] = this.Cache.PagesInCache; info["dirtyPages"] = this.Cache.GetDirtyPages().Count(); //TODO: Add collections info // Add indexes info // Add storage used/free info return info; }
/// <summary> /// List all indexes inside a collection /// </summary> public IEnumerable<BsonDocument> GetIndexes(string colName, bool stats = false) { var col = this.GetCollectionPage(colName, false); if (col == null) yield break; foreach (var index in col.GetIndexes(true)) { var doc = new BsonDocument() .Add("slot", index.Slot) .Add("field", index.Field) .Add("options", new BsonDocument() .Add("unique", index.Options.Unique) .Add("ignoreCase", index.Options.IgnoreCase) .Add("removeAccents", index.Options.RemoveAccents) .Add("trimWhitespace", index.Options.TrimWhitespace) .Add("emptyStringToNull", index.Options.EmptyStringToNull) ); if (stats) { _cache.CheckPoint(); var pages = _indexer.FindAll(index, Query.Ascending).GroupBy(x => x.Page.PageID).Count(); // this command can be consume too many memory!! has no CheckPoint on loop var keySize = pages == 0 ? 0 : _indexer.FindAll(index, Query.Ascending).Average(x => x.KeyLength); doc.Add("stats", new BsonDocument() .Add("pages", pages) .Add("allocated", BasePage.GetSizeOfPages(pages)) .Add("keyAverageSize", (int)keySize) ); } yield return doc; } }
public static IEnumerable <BsonValue> SORT(IEnumerable <BsonValue> input, BsonExpression sortExpr, BsonDocument root, BsonDocument parameters) { //TODO: implement a sort function here // update parameters in expression parameters.CopyTo(sortExpr.Parameters); foreach (var item in input) { yield return(item); } }
/// <summary> /// Register a property as a DbRefList - implement a custom Serialize/Deserialize actions to convert entity to $id, $ref only /// </summary> private static void RegisterDbRefList(BsonMapper mapper, MemberMapper member, ITypeNameBinder typeNameBinder, string collection) { // get entity from list item type var entity = mapper.GetEntityMapper(member.UnderlyingType); member.Serialize = (list, m) => { // supports null values when "SerializeNullValues = true" if (list == null) { return(BsonValue.Null); } var result = new BsonArray(); var idField = entity.Id; foreach (var item in (IEnumerable)list) { if (item == null) { continue; } var id = idField.Getter(item); var bsonDocument = new BsonDocument { ["$id"] = m.Serialize(id.GetType(), id, 0), ["$ref"] = collection }; if (member.UnderlyingType != item.GetType()) { bsonDocument["$type"] = typeNameBinder.GetName(item.GetType()); } result.Add(bsonDocument); } return(result); }; member.Deserialize = (bson, m) => { if (bson.IsArray == false) { return(null); } var array = bson.AsArray; if (array.Count == 0) { return(m.Deserialize(member.DataType, array)); } // copy array changing $id to _id var result = new BsonArray(); foreach (var item in array) { var refId = item["$id"]; var missing = item["$missing"] == true; // if referece document are missing, do not inlcude on output list if (missing) { continue; } // if refId is null was included by "include" query, so "item" is full filled document if (refId.IsNull) { result.Add(item); } else { var bsonDocument = new BsonDocument { ["_id"] = refId }; if (item.AsDocument.ContainsKey("$type")) { bsonDocument["_type"] = item["$type"]; } result.Add(bsonDocument); } } return(m.Deserialize(member.DataType, result)); }; }
/// <summary> /// Returns a single value from array according index or expression parameter /// </summary> public static BsonValue ARRAY_INDEX(BsonValue value, int index, BsonExpression expr, BsonDocument root, Collation collation, BsonDocument parameters) { if (!value.IsArray) { return(BsonValue.Null); } var arr = value.AsArray; // for expr.Type = parameter, just get value as index (fixed position) if (expr.Type == BsonExpressionType.Parameter) { // get fixed position based on parameter value (must return int value) var indexValue = expr.ExecuteScalar(root, collation); if (!indexValue.IsNumber) { throw new LiteException(0, "Parameter expression must return number when called inside an array"); } index = indexValue.AsInt32; } var idx = index < 0 ? arr.Count + index : index; if (arr.Count > idx) { return(arr[idx]); } return(BsonValue.Null); }
public void Serialize(Stream stream, BsonDocument value) { var writer = new BinaryWriter(stream); this.WriteDocument(writer, value); }
/// <summary> /// Returns all values from array according filter expression or all values (index = MaxValue) /// </summary> public static IEnumerable <BsonValue> ARRAY_FILTER(BsonValue value, int index, BsonExpression filterExpr, BsonDocument root, Collation collation, BsonDocument parameters) { if (!value.IsArray) { yield break; } var arr = value.AsArray; // [*] - index are all values if (index == int.MaxValue) { foreach (var item in arr) { yield return(item); } } // [<expr>] - index are an expression else { foreach (var item in arr) { // execute for each child value and except a first bool value (returns if true) var c = filterExpr.ExecuteScalar(new BsonDocument[] { root }, root, item, collation); if (c.IsBoolean && c.AsBoolean == true) { yield return(item); } } } }
public abstract Entity Create(Region tregion, BsonDocument input);
public static IEnumerable <BsonValue> FILTER(IEnumerable <BsonValue> input, BsonExpression filterExpr, BsonDocument root, BsonDocument parameters) { // update parameters in expression parameters.CopyTo(filterExpr.Parameters); foreach (var item in input) { // execute for each child value and except a first bool value (returns if true) var c = filterExpr.ExecuteScalar(new BsonDocument[] { root }, root, item); if (c.IsBoolean && c.AsBoolean == true) { yield return(item); } } }
private BsonValue ReadObject() { var obj = new BsonDocument(); var token = _tokenizer.ReadToken(); // read "<key>" while (token.TokenType != JsonTokenType.EndDoc) { token.Expect(JsonTokenType.String, JsonTokenType.Word); var key = token.Token; token = _tokenizer.ReadToken(); // read ":" token.Expect(JsonTokenType.Colon); token = _tokenizer.ReadToken(); // read "<value>" // check if not a special data type - only if is first attribute if (key[0] == '$' && obj.Count == 0) { var val = this.ReadExtendedDataType(key, token.Token); // if val is null then it's not a extended data type - it's just a object with $ attribute if (!val.IsNull) return val; } obj[key] = this.ReadValue(token); // read "," or "}" token = _tokenizer.ReadToken(); if (token.TokenType == JsonTokenType.Comma) { token = _tokenizer.ReadToken(); // read "<key>" } } return obj; }
public static IEnumerable <BsonValue> MAP(IEnumerable <BsonValue> input, BsonExpression mapExpr, BsonDocument root, BsonDocument parameters) { // update parameters in expression parameters.CopyTo(mapExpr.Parameters); foreach (var item in input) { // execute for each child value and except a first bool value (returns if true) var values = mapExpr.Execute(new BsonDocument[] { root }, root, item); foreach (var value in values) { yield return(value); } } }