public async Task CountFilteredWithBsonDocument() { BsonDocument filter = new BsonDocument("_id", "050305007"); long count = await SchoolContext.StudentsAsBson.CountAsync(filter); count.Should().Be(1); }
private void AssertValid(ConnectionString connectionString, BsonDocument definition) { if (!definition["valid"].ToBoolean()) { Assert.Fail($"The connection string '{definition["uri"]}' should be invalid."); } BsonValue readConcernValue; if (definition.TryGetValue("readConcern", out readConcernValue)) { var readConcern = ReadConcern.FromBsonDocument((BsonDocument)readConcernValue); connectionString.ReadConcernLevel.Should().Be(readConcern.Level); } BsonValue writeConcernValue; if (definition.TryGetValue("writeConcern", out writeConcernValue)) { var writeConcern = WriteConcern.FromBsonDocument(MassageWriteConcernDocument((BsonDocument)writeConcernValue)); connectionString.W.Should().Be(writeConcern.W); connectionString.WTimeout.Should().Be(writeConcern.WTimeout); connectionString.Journal.Should().Be(writeConcern.Journal); connectionString.FSync.Should().Be(writeConcern.FSync); } }
public void TestBogusElement() { var document = new BsonDocument("bogus", 0); var message = "Element 'bogus' does not match any field or property of class MongoDB.BsonUnitTests.Jira.CSharp270.C."; var ex = Assert.Throws<FileFormatException>(() => { BsonSerializer.Deserialize<C>(document); }); Assert.AreEqual(message, ex.Message); }
public C(int id, int a, int b, BsonDocument x) { Id = id; A = a; B = b; X = x; }
/// <summary> /// Maps the specified response to a custom exception (if possible). /// </summary> /// <param name="response">The response.</param> /// <returns>The custom exception (or null if the response could not be mapped to a custom exception).</returns> public static Exception Map(BsonDocument response) { BsonValue code; if (response.TryGetValue("code", out code) && code.IsNumeric) { switch (code.ToInt32()) { case 50: case 13475: case 16986: case 16712: return new ExecutionTimeoutException("Operation exceeded time limit."); } } // the server sometimes sends a response that is missing the "code" field but does have an "errmsg" field BsonValue errmsg; if (response.TryGetValue("errmsg", out errmsg) && errmsg.IsString) { if (errmsg.AsString.Contains("exceeded time limit") || errmsg.AsString.Contains("execution terminated")) { return new ExecutionTimeoutException("Operation exceeded time limit."); } } return null; }
public void DiffAFieldAddedAndAFieldModified() { var a = new BsonDocument(); var b = new BsonDocument(); const string existingField = "existingField"; const string newField = "newField"; const int newExistingFieldValue = 1; const int oldExistingFieldValue = 2; const int newFieldValue = 3; a.SetValue(existingField, newExistingFieldValue); a.SetValue(newField, newFieldValue); b.SetValue(existingField, oldExistingFieldValue); var expectedDiff = new BsonDocument { {"+a:newField", newFieldValue}, { existingField, new BsonDocument { { "values differ", new BsonDocument {{"a", newExistingFieldValue}, {"b", oldExistingFieldValue}} } } } }; var result = a.Diff(b); Assert.That(result, Is.EqualTo(expectedDiff)); }
public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign() { var server = Configuration.TestServer; var database = Configuration.TestDatabase; var collection = Configuration.TestCollection; collection.Drop(); var document = new BsonDocument { { "_id", 1 }, { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level }; var insertOptions = new MongoInsertOptions { CheckElementNames = false }; collection.Insert(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(1, document["v"]["$x"].AsInt32); document["v"]["$x"] = 2; var query = Query.EQ("_id", 1); var update = Update.Replace(document); var updateOptions = new MongoUpdateOptions { CheckElementNames = false }; collection.Update(query, update, updateOptions); document = collection.FindOne(); Assert.AreEqual(2, document["v"]["$x"].AsInt32); document["v"]["$x"] = 3; collection.Save(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(3, document["v"]["$x"].AsInt32); }
public DatingBookUserInfo GetDatingBookUserInfo(string appName, ObjectId userId) { var datingBookUserInfo = _userDataProvider.GetDatingBookUserInfoByFacebookId(appName, userId); if (datingBookUserInfo == null) return null; if (!CheckIfUserPictureExist(appName, datingBookUserInfo)) DownloadUserPicture(appName, datingBookUserInfo); datingBookUserInfo.LastVisit = DateTime.Now; ObjectId visitId = ObjectId.Empty; var whiteList = _userDataProvider.GetUsersWhitelist(appName); if (!whiteList.Exists((usr) => usr == datingBookUserInfo.FacebookId)) { BsonDocument visit = new BsonDocument(); visit.Add("dating_book_id", datingBookUserInfo.ObjectId); visit.Add("entered_at", datingBookUserInfo.LastVisit); visitId = _userDataProvider.InsertVisit(appName, visit); } datingBookUserInfo = _userDataProvider.UpdateDatingBookLastVisit(appName, datingBookUserInfo, visitId); var userEventInfo = new UserEventInfo { UserId = datingBookUserInfo.ObjectId, EventType = 1, DateCreated = DateTime.Now }; _userDataProvider.InsertUserEvent(appName, userEventInfo); return datingBookUserInfo; }
public void Post(UserModel model) { var mongoDbClient = new MongoClient("mongodb://127.0.0.1:27017"); var mongoDbServer = mongoDbClient.GetDatabase("SocialNetworks"); BsonArray arr = new BsonArray(); dynamic jobj = JsonConvert.DeserializeObject<dynamic>(model.Movies.ToString()); foreach (var item in jobj) { foreach(var subitem in item) { arr.Add(subitem.Title.ToString()); } } var document = new BsonDocument { { "Facebook_ID", model.Facebook_ID }, { "Ime", model.Ime }, { "Prezime", model.Prezime }, { "Email", model.Email }, { "DatumRodjenja", model.DatumRodjenja }, { "Hometown", model.Hometown}, { "ProfilePictureLink", model.ProfilePictureLink }, { "Movies", arr }, }; var collection = mongoDbServer.GetCollection<BsonDocument>("UserInfo"); collection.InsertOneAsync(document); }
public void CreateCommand_should_create_the_correct_command() { var filter = new BsonDocument("x", 1); var hint = "funny"; var limit = 10; var skip = 30; var maxTime = TimeSpan.FromSeconds(20); var subject = new CountOperation(_collectionNamespace, _messageEncoderSettings) { Filter = filter, Hint = hint, Limit = limit, MaxTime = maxTime, Skip = skip }; var expectedResult = new BsonDocument { { "count", _collectionNamespace.CollectionName }, { "query", filter }, { "limit", limit }, { "skip", skip }, { "hint", hint }, { "maxTimeMS", maxTime.TotalMilliseconds } }; var result = subject.CreateCommand(); result.Should().Be(expectedResult); }
/// <summary> /// 查询角色下面所有用户 /// </summary> /// <param name="UserRoleId"></param> /// <returns>返回所有用户</returns> public BsonArray UserInRole(ObjectId UserRoleId) { BsonDocument Query = new BsonDocument { { "UserRole", UserRoleId} }; return GetUsersToArray(Query); }
public IEnumerable<BsonDocument> Run(MongoCollection<Rental> rentals) { var priceRange = new BsonDocument( "$subtract", new BsonArray { "$Price", new BsonDocument( "$mod", new BsonArray{"$Price",500} ) }); var grouping =new BsonDocument( "$group", new BsonDocument { {"_id",priceRange}, {"count",new BsonDocument("$sum",1)} }); var sort=new BsonDocument( "$sort", new BsonDocument("_id",1) ); var args=new AggregateArgs { Pipeline=new []{grouping,sort} }; return rentals.Aggregate(args); }
public ExplainOperationTests() { _command = new BsonDocument { { "count", _collectionNamespace.CollectionName } }; }
public void TestSetDocumentIdInt32() { var document = new BsonDocument { { "x", "abc" } }; ((IBsonIdProvider)BsonDocumentSerializer.Instance).SetDocumentId(document, 1); // 1 will be converted to a BsonInt32 Assert.IsTrue(document["_id"].IsInt32); Assert.AreEqual(1, document["_id"].AsInt32); }
// constructors internal BulkWriteError(int index, int code, string message, BsonDocument details) { _code = code; _details = details; _index = index; _message = message; }
public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign() { // starting with version 2.5.2 the server got stricter about dollars in element names // so this test should only be run when testing against older servers var server = Configuration.TestServer; if (server.BuildInfo.Version < new Version(2, 6, 0)) { var database = Configuration.TestDatabase; var collection = Configuration.TestCollection; collection.Drop(); var document = new BsonDocument { { "_id", 1 }, { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level }; var insertOptions = new MongoInsertOptions { CheckElementNames = false }; collection.Insert(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(1, document["v"]["$x"].AsInt32); document["v"]["$x"] = 2; var query = Query.EQ("_id", 1); var update = Update.Replace(document); var updateOptions = new MongoUpdateOptions { CheckElementNames = false }; collection.Update(query, update, updateOptions); document = collection.FindOne(); Assert.AreEqual(2, document["v"]["$x"].AsInt32); document["v"]["$x"] = 3; collection.Save(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(3, document["v"]["$x"].AsInt32); } }
public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign() { var server = MongoServer.Create("mongodb://localhost/?safe=true;slaveOk=true"); var database = server["onlinetests"]; var collection = database["test"]; collection.Drop(); var document = new BsonDocument { { "_id", 1 }, { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level }; var insertOptions = new MongoInsertOptions { CheckElementNames = false }; collection.Insert(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(1, document["v"].AsBsonDocument["$x"].AsInt32); document["v"].AsBsonDocument["$x"] = 2; var query = Query.EQ("_id", 1); var update = Update.Replace(document); var updateOptions = new MongoUpdateOptions { CheckElementNames = false }; collection.Update(query, update, updateOptions); document = collection.FindOne(); Assert.AreEqual(2, document["v"].AsBsonDocument["$x"].AsInt32); document["v"].AsBsonDocument["$x"] = 3; collection.Save(document, insertOptions); document = collection.FindOne(); Assert.AreEqual(3, document["v"].AsBsonDocument["$x"].AsInt32); }
public async Task QueryAll() { // @begin: query-all // @code: start var collection = _database.GetCollection<BsonDocument>("restaurants"); var filter = new BsonDocument(); var count = 0; using (var cursor = await collection.FindAsync(filter)) { while (await cursor.MoveNextAsync()) { var batch = cursor.Current; foreach (var document in batch) { // process document count++; } } } // @code: end // @results: start count.Should().Be(25359); // @results: end // @end: query-all }
/// <summary> /// Adds a user to this database. /// </summary> /// <param name="user">The user.</param> public static void AddUserEx(MongoCollection Col, User user) { var document = Col.FindOneAs<BsonDocument>(Query.EQ("user", user.Username)); if (document == null) { document = new BsonDocument("user", user.Username); } document["roles"] = user.roles; if (document.Contains("readOnly")) { document.Remove("readOnly"); } //必须要Hash一下Password document["pwd"] = MongoUser.HashPassword(user.Username, user.Password); //OtherRole 必须放在Admin.system.users里面 if (Col.Database.Name == MongoDbHelper.DATABASE_NAME_ADMIN) { document["otherDBRoles"] = user.otherDBRoles; } if (string.IsNullOrEmpty(user.Password)) { document["userSource"] = user.userSource; } Col.Save(document); }
public void Wrapped_should_return_the_document_passed_in_the_constructor() { var doc = new BsonDocument(); var subject = new IsMasterResult(doc); subject.Wrapped.Should().BeSameAs(doc); }
protected void submitButton_Click(object sender, EventArgs e) { delPanel.Visible = false; submitButton.Visible = false; // Cancel game names that contain apostrophe string gamesList = games.Text.Replace("'", "''"); // Retrieves info from textboxes var document = new BsonDocument { { "name", name.Text }, { "live", live.Text }, { "psn", psn.Text }, { "steam", steam.Text }, { "wiiu", wiiu.Text }, { "games", games.Text }, { "delKey", delete.Text } }; // Start a session, allowing for deletion from database Session.Add("Name", name.Text); var collection = Global.database.GetCollection<BsonDocument>("Tags"); // Upon successful insertion collection.InsertOne(document); outputLabel.Text += "<br />" + Session.Contents[0] + ", your gamertag(s) have been added! <br /> <a href=\"./ViewAll.aspx\"> Click Here To View All Entries </a>"; }
public async Task<bool> DeleteAllData(string CollectionName) { var collection = _database.GetCollection<BsonDocument>(CollectionName); var filter = new BsonDocument(); var result = await collection.DeleteManyAsync(filter); return true; }
public void Deserialize_should_return_nested_partially_raw_BsonDocument() { var document = new BsonDocument { { "a", new BsonDocument("x", 1) }, { "b", new BsonDocument { { "d", new BsonDocument("z", 1) }, { "e", new BsonDocument("z", 2) }, { "f", new BsonDocument("z", 3) }, } }, { "c", new BsonDocument("x", 3) } }; var bson = document.ToBson(); var subject = new PartiallyRawBsonDocumentSerializer("b", new PartiallyRawBsonDocumentSerializer("e", RawBsonDocumentSerializer.Instance)); var result = Deserialize(bson, subject); result["a"].Should().BeOfType<BsonDocument>(); result["b"].Should().BeOfType<BsonDocument>(); result["c"].Should().BeOfType<BsonDocument>(); result["b"]["d"].Should().BeOfType<BsonDocument>(); result["b"]["e"].Should().BeOfType<RawBsonDocument>(); result["b"]["f"].Should().BeOfType<BsonDocument>(); }
public void TestNoCircularReference() { var c2 = new C { X = 2 }; var c1 = new C { X = 1, NestedDocument = c2 }; var json = c1.ToJson(); var expected = "{ 'X' : 1, 'NestedDocument' : { 'X' : 2, 'NestedDocument' : null, 'BsonArray' : { '_csharpnull' : true } }, 'BsonArray' : { '_csharpnull' : true } }".Replace("'", "\""); Assert.AreEqual(expected, json); var memoryStream = new MemoryStream(); using (var writer = new BsonBinaryWriter(memoryStream)) { BsonSerializer.Serialize(writer, c1); Assert.AreEqual(0, writer.SerializationDepth); } var document = new BsonDocument(); using (var writer = new BsonDocumentWriter(document)) { BsonSerializer.Serialize(writer, c1); Assert.AreEqual(0, writer.SerializationDepth); } var stringWriter = new StringWriter(); using (var writer = new JsonWriter(stringWriter)) { BsonSerializer.Serialize(writer, c1); Assert.AreEqual(0, writer.SerializationDepth); } }
public BsonDocumentReader( BsonDocument document ) { context = new BsonDocumentReaderContext(null, ContextType.TopLevel, document); currentValue = document; }
static async Task MainAsync(string[] args) { var urlString = "mongodb://localhost:27017"; var client = new MongoClient(urlString); var db = client.GetDatabase("students"); var collection = db.GetCollection<BsonDocument>("grades"); var filter = new BsonDocument("type","homework"); // var count = 0; var sort = Builders<BsonDocument>.Sort.Ascending("student_id").Ascending("score"); var result = await collection.Find(filter).Sort(sort).ToListAsync(); var previous_id=-1 ; var student_id=-1; int count = 0; foreach (var doc in result) { student_id = (int)doc["student_id"]; //Console.WriteLine(student_id); if (student_id != previous_id) { count++; previous_id = student_id; Console.WriteLine("removing :{0} ", doc); // await collection.DeleteOneAsync(doc); await collection.DeleteManyAsync(doc); } // process document } Console.WriteLine(count); //Console.WriteLine(coll.FindAsync<"">); }
private ReadPreference BuildReadPreference(BsonDocument readPreferenceDescription) { var tagSets = ((BsonArray)readPreferenceDescription["tag_sets"]).Select(x => BuildTagSet((BsonDocument)x)); ReadPreference readPreference; switch (readPreferenceDescription["mode"].ToString()) { case "Nearest": readPreference = ReadPreference.Nearest; break; case "Primary": // tag sets can't be used with Primary return ReadPreference.Primary; case "PrimaryPreferred": readPreference = ReadPreference.PrimaryPreferred; break; case "Secondary": readPreference = ReadPreference.Secondary; break; case "SecondaryPreferred": readPreference = ReadPreference.SecondaryPreferred; break; default: throw new NotSupportedException("Unknown read preference mode: " + readPreferenceDescription["mode"]); } return readPreference.With(tagSets: tagSets); }
public void BsonBinaryReader_should_support_reading_multiple_documents( [Range(0, 3)] int numberOfDocuments) { var document = new BsonDocument("x", 1); var bson = document.ToBson(); var input = Enumerable.Repeat(bson, numberOfDocuments).Aggregate(Enumerable.Empty<byte>(), (a, b) => a.Concat(b)).ToArray(); var expectedResult = Enumerable.Repeat(document, numberOfDocuments); using (var stream = new MemoryStream(input)) using (var binaryReader = new BsonBinaryReader(stream)) { var result = new List<BsonDocument>(); while (!binaryReader.IsAtEndOfFile()) { binaryReader.ReadStartDocument(); var name = binaryReader.ReadName(); var value = binaryReader.ReadInt32(); binaryReader.ReadEndDocument(); var resultDocument = new BsonDocument(name, value); result.Add(resultDocument); } result.Should().Equal(expectedResult); } }
// Extra helper code static BsonDocument[] CreateSeedData() { BsonDocument seventies = new BsonDocument { { "Decade" , "1970s" }, { "Artist" , "Debby Boone" }, { "Title" , "You Light Up My Life" }, { "WeeksAtOne" , 10 } }; BsonDocument eighties = new BsonDocument { { "Decade" , "1980s" }, { "Artist" , "Olivia Newton-John" }, { "Title" , "Physical" }, { "WeeksAtOne" , 10 } }; BsonDocument nineties = new BsonDocument { { "Decade" , "1990s" }, { "Artist" , "Mariah Carey" }, { "Title" , "One Sweet Day" }, { "WeeksAtOne" , 16 } }; BsonDocument[] SeedData = { seventies, eighties, nineties }; return SeedData; }
// static methods /// <summary> /// Gets the name of the index derived from the keys specification. /// </summary> /// <param name="keys">The keys specification.</param> /// <returns>The name of the index.</returns> public static string GetIndexName(BsonDocument keys) { Ensure.IsNotNull(keys, nameof(keys)); var sb = new StringBuilder(); foreach (var element in keys) { var value = element.Value; string direction; switch (value.BsonType) { case BsonType.Double: case BsonType.Int32: case BsonType.Int64: direction = value.ToInt32().ToString(); break; case BsonType.String: direction = value.ToString().Replace(' ', '_'); break; default: direction = "x"; break; } if (sb.Length > 0) { sb.Append("_"); } sb.Append(element.Name.Replace(' ', '_')); sb.Append("_"); sb.Append(direction); } return sb.ToString(); }
public void ListDatabaseNames_should_invoke_the_correct_operation( [Values(false, true)] bool usingSession, [Values(false, true)] bool async) { var operationExecutor = new MockOperationExecutor(); var subject = new MongoClient(operationExecutor, DriverTestConfiguration.GetClientSettings()); var session = CreateClientSession(); var cancellationToken = new CancellationTokenSource().Token; var listDatabaseNamesResult = @" { ""databases"" : [ { ""name"" : ""admin"", ""sizeOnDisk"" : 131072, ""empty"" : false }, { ""name"" : ""blog"", ""sizeOnDisk"" : 11669504, ""empty"" : false }, { ""name"" : ""test-chambers"", ""sizeOnDisk"" : 222883840, ""empty"" : false }, { ""name"" : ""recipes"", ""sizeOnDisk"" : 73728, ""empty"" : false }, { ""name"" : ""employees"", ""sizeOnDisk"" : 225280, ""empty"" : false } ], ""totalSize"" : 252534784, ""ok"" : 1 }"; var operationResult = BsonDocument.Parse(listDatabaseNamesResult); operationExecutor.EnqueueResult(CreateListDatabasesOperationCursor(operationResult)); IList <string> databaseNames; if (async) { if (usingSession) { databaseNames = subject.ListDatabaseNamesAsync(session, cancellationToken).GetAwaiter().GetResult().ToList(); } else { databaseNames = subject.ListDatabaseNamesAsync(cancellationToken).GetAwaiter().GetResult().ToList(); } } else { if (usingSession) { databaseNames = subject.ListDatabaseNames(session, cancellationToken).ToList(); } else { databaseNames = subject.ListDatabaseNames(cancellationToken).ToList(); } } var call = operationExecutor.GetReadCall <IAsyncCursor <BsonDocument> >(); if (usingSession) { call.SessionId.Should().BeSameAs(session.ServerSession.Id); } else { call.UsedImplicitSession.Should().BeTrue(); } call.CancellationToken.Should().Be(cancellationToken); var operation = call.Operation.Should().BeOfType <ListDatabasesOperation>().Subject; operation.NameOnly.Should().Be(true); databaseNames.Should().Equal(operationResult["databases"].AsBsonArray.Select(record => record["name"].AsString)); }
public void AddHashAsync(string hash,ObjectId id) { BsonDocument bd = (new HashEntry() { hash = hash, PackageRef = id }).ToBsonDocument(); m_Hashcollection.InsertOneAsync(bd); }
public PagePlayers() { InitializeComponent(); RecivePlayersList(); //close panel add player PanelAddPlayer.MouseDown += (e, s) => { if (s.Source.GetType() == typeof(Grid)) { ShowOffSubpagePlayer(); } }; //close panel search PanelSearch.MouseDown += (s, e) => { if (e.Source.GetType() == typeof(Grid)) { ShowOffPanelSearch(); } }; //show panel search BTNSearch.MouseDown += (s, e) => { ShowOpenPanelSearch(); }; //show paneladdplayer BTNShowPanelAdd.MouseDown += (s, e) => { ShowSubpagePlayer(); }; //actin add player BTNaddPlayer.MouseDown += (s, e) => { SDK.SDK_PageDashboards.DashboardGame.PageStudios.ReciveMonetize(result => { //limit filter if (PlayerCount + 1 <= result["Players"]) { SDK.SDK_PageDashboards.DashboardGame.PagePlayers.CreatPlayer(TextBoxUsername.Text, TextBoxPassword.Password, (resultCreated) => { RecivePlayersList(); ShowOffSubpagePlayer(); DashboardGame.Notifaction("Player Added", StatusMessage.Ok); //add log var Detail = new BsonDocument { { "Username", TextBoxUsername.Text }, { "Password", TextBoxPassword.Password }, { "LocalTime", DateTime.Now } }; SDK.SDK_PageDashboards.DashboardGame.PageLog.AddLog("Add Player", $"You have added \" {TextBoxUsername.Text} \" player", Detail, false, resultlog => { }); } ); } else { DashboardGame.Notifaction("Cannot create new player. Please buy more players than payments", StatusMessage.Error); } }, () => { }); }; BTNSeeMorePlayer.MouseDown += (s, e) => { ReciveCount += 100; TextSeeMoreNumber.Text = ReciveCount.ToString(); RecivePlayersList(); }; }
public void InsertToMongo(BsonDocument document) { collection.InsertOne(document); }
private bool Matches(object o, BsonDocument doc) { return(o.ToBsonDocument().Equals(doc)); }
protected override void VerifyResult(BsonDocument result) { _result.Should().Be(result); }
protected override void ExecuteSync(IMongoCollection <BsonDocument> collection) { _result = collection.FindOneAndReplace(_filter, _replacement, _options); _serverVersion = collection.Database.Client.Cluster.Description.Servers[0].Version; }
public static void GetYtFeeds(string ChannelId, string AcessToken) { oAuthTokenYoutube ObjoAuthTokenGPlus = new oAuthTokenYoutube(AppSettings.googleClientId, AppSettings.googleClientSecret, AppSettings.googleRedirectionUrl); Activities _ObjYtActivities = new Activities(AppSettings.googleClientId, AppSettings.googleClientSecret, AppSettings.googleRedirectionUrl); string apiKey = AppSettings.googleApiKey_TestApp; try { MongoYoutubeFeeds _ObjMongoYtFeeds; string _ChannelVideos = _ObjYtActivities.GetYtVideos(ChannelId, apiKey); JObject J_ChannelVideos = JObject.Parse(_ChannelVideos); foreach (var item in J_ChannelVideos["items"]) { _lstGlobalComVideos.Clear(); _ObjMongoYtFeeds = new MongoYoutubeFeeds(); _ObjMongoYtFeeds.Id = ObjectId.GenerateNewId(); _ObjMongoYtFeeds.YtChannelId = ChannelId; try { _ObjMongoYtFeeds.YtVideoId = item["contentDetails"]["upload"]["videoId"].ToString(); } catch { } try { _ObjMongoYtFeeds.VdoTitle = item["snippet"]["title"].ToString(); } catch { } try { _ObjMongoYtFeeds.VdoDescription = item["snippet"]["description"].ToString(); if (_ObjMongoYtFeeds.VdoDescription == "") { _ObjMongoYtFeeds.VdoDescription = "No Description"; } } catch { } try { _ObjMongoYtFeeds.VdoPublishDate = item["snippet"]["publishedAt"].ToString(); } catch { } try { _ObjMongoYtFeeds.VdoImage = item["snippet"]["thumbnails"]["high"]["url"].ToString(); } catch { _ObjMongoYtFeeds.VdoImage = item["snippet"]["thumbnails"]["medium"]["url"].ToString(); } try { _ObjMongoYtFeeds.VdoUrl = "https://www.youtube.com/watch?v=" + _ObjMongoYtFeeds.YtVideoId; } catch { } try { _ObjMongoYtFeeds.VdoEmbed = "https://www.youtube.com/embed/" + _ObjMongoYtFeeds.YtVideoId; } catch { } try { MongoRepository YtFeedsRepo = new MongoRepository("YoutubeVideos"); var ret = YtFeedsRepo.Find <MongoYoutubeFeeds>(t => t.YtVideoId.Equals(_ObjMongoYtFeeds.YtVideoId)); var task = Task.Run(async() => { return(await ret); }); int count = task.Result.Count; if (count < 1) { try { YtFeedsRepo.Add(_ObjMongoYtFeeds); } catch { } } else { try { FilterDefinition <BsonDocument> filter = new BsonDocument("YtVideoId", _ObjMongoYtFeeds.YtVideoId); var update = Builders <BsonDocument> .Update.Set("VdoTitle", _ObjMongoYtFeeds.VdoTitle).Set("VdoDescription", _ObjMongoYtFeeds.VdoDescription).Set("VdoImage", _ObjMongoYtFeeds.VdoImage); YtFeedsRepo.Update <MongoYoutubeFeeds>(update, filter); } catch { } } //new Thread(delegate () { GetYtComments(_ObjMongoYtFeeds.YtVideoId, apiKey, ChannelId); UpdateYtComments(_ObjMongoYtFeeds.YtVideoId, apiKey, ChannelId); //}).Start(); } catch { } } } catch (Exception ex) { } }
public static void GetYtComments(string VideoId, string apiKey, string ChannelId) { Video _Videos = new Video(AppSettings.googleClientId, AppSettings.googleClientSecret, AppSettings.googleRedirectionUrl); try { //Domain.Socioboard.Domain.GooglePlusActivities _GooglePlusActivities = null; MongoYoutubeComments _ObjMongoYtComments; string _CommentsData = _Videos.Get_CommentsBy_VideoId(VideoId, "", "", apiKey); JObject J_CommentsData = JObject.Parse(_CommentsData); foreach (var item in J_CommentsData["items"]) { _ObjMongoYtComments = new MongoYoutubeComments(); _ObjMongoYtComments.Id = ObjectId.GenerateNewId(); try { _ObjMongoYtComments.ChannelId = ChannelId; } catch { } try { _ObjMongoYtComments.videoId = item["snippet"]["videoId"].ToString(); } catch { } try { _ObjMongoYtComments.commentId = item["id"].ToString(); } catch { } try { _ObjMongoYtComments.authorDisplayName = item["snippet"]["topLevelComment"]["snippet"]["authorDisplayName"].ToString(); } catch { } try { _ObjMongoYtComments.authorProfileImageUrl = item["snippet"]["topLevelComment"]["snippet"]["authorProfileImageUrl"].ToString().Replace(".jpg", ""); } catch { } try { _ObjMongoYtComments.authorChannelUrl = item["snippet"]["topLevelComment"]["snippet"]["authorChannelUrl"].ToString(); } catch { } try { _ObjMongoYtComments.authorChannelId = item["snippet"]["topLevelComment"]["snippet"]["authorChannelId"]["value"].ToString(); } catch { } try { _ObjMongoYtComments.commentDisplay = item["snippet"]["topLevelComment"]["snippet"]["textDisplay"].ToString(); } catch { } try { _ObjMongoYtComments.commentOriginal = item["snippet"]["topLevelComment"]["snippet"]["textOriginal"].ToString(); } catch { } try { _ObjMongoYtComments.viewerRating = item["snippet"]["topLevelComment"]["snippet"]["viewerRating"].ToString(); } catch { } try { _ObjMongoYtComments.likesCount = item["snippet"]["topLevelComment"]["snippet"]["likeCount"].ToString(); } catch { } try { _ObjMongoYtComments.publishTime = item["snippet"]["topLevelComment"]["snippet"]["publishedAt"].ToString(); } catch { } try { _ObjMongoYtComments.publishTimeUnix = UnixTimeFromDatetime(Convert.ToDateTime(_ObjMongoYtComments.publishTime)); } catch { } try { _ObjMongoYtComments.updatedTime = item["snippet"]["topLevelComment"]["snippet"]["updatedAt"].ToString(); } catch { } try { _ObjMongoYtComments.totalReplyCount = item["snippet"]["totalReplyCount"].ToString(); } catch { } _ObjMongoYtComments.active = true; _ObjMongoYtComments.review = false; _ObjMongoYtComments.sbGrpTaskAssign = false; try { _lstGlobalComVideos.Add(_ObjMongoYtComments);//Global var for update the comments MongoRepository youtubecommentsrepo = new MongoRepository("YoutubeVideosComments"); var ret = youtubecommentsrepo.Find <MongoYoutubeComments>(t => t.commentId.Equals(_ObjMongoYtComments.commentId)); var task = Task.Run(async() => { return(await ret); }); int count = task.Result.Count; if (count < 1) { try { youtubecommentsrepo.Add(_ObjMongoYtComments); } catch { } } else { try { FilterDefinition <BsonDocument> filter = new BsonDocument("commentId", _ObjMongoYtComments.commentId); var update = Builders <BsonDocument> .Update.Set("authorDisplayName", _ObjMongoYtComments.authorDisplayName).Set("authorProfileImageUrl", _ObjMongoYtComments.authorProfileImageUrl).Set("commentDisplay", _ObjMongoYtComments.commentDisplay).Set("commentOriginal", _ObjMongoYtComments.commentOriginal).Set("viewerRating", _ObjMongoYtComments.viewerRating).Set("likesCount", _ObjMongoYtComments.likesCount).Set("totalReplyCount", _ObjMongoYtComments.totalReplyCount).Set("updatedTime", _ObjMongoYtComments.updatedTime).Set("publishTimeUnix", _ObjMongoYtComments.publishTimeUnix).Set("active", _ObjMongoYtComments.active); youtubecommentsrepo.Update <MongoYoutubeComments>(update, filter); } catch { } } } catch { } } } catch (Exception ex) { } }
private void WriteQuery(BsonBinaryWriter binaryWriter, BsonDocument query) { var context = BsonSerializationContext.CreateRoot(binaryWriter); BsonDocumentSerializer.Instance.Serialize(context, query); }
public void CountMovies() { // This stage finds all movies that have a specific director var matchStage = new BsonDocument("$match", new BsonDocument("directors", "Rob Reiner")); // This stage sorts the results by the number of reviews, // in descending order var sortStage = new BsonDocument("$sort", new BsonDocument("tomatoes.viewer.numReviews", -1)); // This stage generates the projection we want var projectionStage = new BsonDocument("$project", new BsonDocument { { "_id", 0 }, { "Movie Title", "$title" }, { "Year", "$year" }, { "Average User Rating", "$tomatoes.viewer.rating" } }); /* We now put the stages together in a pipeline. Note that a * pipeline definition requires us to specify the input and output * types. In this case, the input is of type Movie, but because * we are using a Projection with custom fields, our output is * a generic BsonDocument object. To be really cool, we could * create a mapping class for the output type, which is what we've * done for you in the MFlix application. */ var pipeline = PipelineDefinition <Movie, BsonDocument> .Create(new BsonDocument[] { matchStage, sortStage, projectionStage }); var result = _moviesCollection.Aggregate(pipeline).ToList(); /* Note: we're making a synchronous Aggregate() call. * If you want a challenge, change the line above to make an * asynchronous call (hint: you'll need to make 2 changes), * and then confirm the unit test still passes. */ Assert.AreEqual(14, result.Count); var firstMovie = result[0]; Assert.AreEqual("The Princess Bride", firstMovie.GetValue("Movie Title").AsString); Assert.AreEqual(1987, firstMovie.GetValue("Year").AsInt32); Assert.AreEqual(4.0, firstMovie.GetValue("Average User Rating").AsDouble); /* We specifically excluded the "Id" field in the projection stage * that we built in the code above, so let's make sure that field * wasn't included in the resulting BsonDocument. We expect the call * to GetValue() to throw a KeyNotFoundException exception if the * field doesn't exist. */ Assert.Throws <KeyNotFoundException>(() => firstMovie.GetValue("Id")); }
private IAsyncCursor <BsonDocument> CreateListDatabasesOperationCursor(BsonDocument reply) { var databases = reply["databases"].AsBsonArray.OfType <BsonDocument>(); return(new SingleBatchAsyncCursor <BsonDocument>(databases.ToList())); }
public static string GetAdditionalCollectionName(BsonDocument encryptedFields, CollectionNamespace mainCollectionNamespace, HelperCollectionForEncryption helperCollection) => helperCollection switch {
public async Task <IHttpActionResult> Get() { var client = new MongoClient("mongodb://localhost:32768/"); //using (var cursor = client.ListDatabases()) //{ // foreach (var document in cursor.ToEnumerable()) // { // System.Diagnostics.Debug.WriteLine(document.ToString()); // } //} var database = client.GetDatabase("test"); //using (var cursor = database.ListCollections()) //{ // foreach (var document in cursor.ToEnumerable()) // { // System.Diagnostics.Debug.WriteLine(document.ToString()); // } //} var collection = database.GetCollection <BsonDocument>("same_structure");// AsQueryable<Zip>(); var jsonWriterSettings = new JsonWriterSettings { OutputMode = JsonOutputMode.Strict }; // key part var filter = new BsonDocument(); var allDocuments = new List <Models.MongoDB>(); //var allDocuments = new List<String>(); using (var cursor = await collection.Find(filter).ToCursorAsync()) { while (await cursor.MoveNextAsync()) { foreach (var document in cursor.Current) { var ser = BsonSerializer.Deserialize <Models.MongoDB>(document); //var ser = BsonSerializer.Deserialize<Models.MongoDB>(document).ToJson(jsonWriterSettings); //Models.MongoDB obj = Newtonsoft.Json.JsonConvert.DeserializeObject<Models.MongoDB>(ser.ToJson(jsonWriterSettings)); allDocuments.Add(ser); //System.Diagnostics.Debug.WriteLine(ser.ToJson(jsonWriterSettings)); } } } /* * using (var cursor = collection.FindSync(new BsonDocument())) * { * foreach (var document in cursor.ToEnumerable()) * { * allDocuments.Add(BsonSerializer.Deserialize<Models.MongoDB>(document)); * * System.Diagnostics.Debug.WriteLine(document); * * * //var myClass = new Models.sameStructure(); * //try * //{ * // myClass.a = document["a"].AsInt32; * //} * //catch (Exception e) { continue; } * //try * //{ * // myClass.a = document["b"].AsInt32; * //} * //catch (Exception e) { continue; } * //try * //{ * // myClass.a = document["c"].AsInt32; * //} * //catch (Exception e){ continue; } * //myClass._id = (BsonId)document["_id"]; * * //document.ToJson(jsonWriterSettings); * * } * } */ //System.Diagnostics.Debug.WriteLine(allDocuments); //System.Diagnostics.Debug.WriteLine(collection.FindSync(new BsonDocument()).ToList()); //var documents = await collection.FindAsync(new BsonDocument()); //System.Diagnostics.Debug.Write(documents.ToString()); //System.Diagnostics.Debug.WriteLine(allDocuments.ToJson(jsonWriterSettings).AsQueryable()); return(Ok(allDocuments.AsQueryable())); }
public void UpdateBasics() { // Update -en fråga, + det som ska uppdateras var collection = GetCollection(MyName); collection.Update( new QueryDocument { { "Name", "Vlad Tepes" } }, new UpdateDocument { { "$set", new BsonDocument("Adress", "SnackBaren") } } ); // Id vs _id //Adress felstavat , kommer skapa ett nytt adress fält // Större frihet innbär större ansvar. // Kan var bättre att köra typat... var typedCollection = GetCollection(this); var query = Query <PersonDetails> .EQ(e => e.Name, "Nick Cave"); var update = Update <PersonDetails> .Set(e => e.Name, "Gonzo"); // update modifiers collection.Update(query, update); var query2 = Query <PersonDetails> .EQ(e => e.Name, "Magnus"); var update2 = Update <PersonDetails> .Set(e => e.Adress, new List <Model.Adress>()); // update modifiers collection.Update(query2, update2); //nästa skulle inte fungera om vi inte tar bort adresserna som vi tilldelade förut // härnäst väljer vi att arbeta otypat med hjälp av en json sträng som vi serialiserar var jsonQuery = "{ Name :{ $in: [ 'Vlad Tepes','Gonzo']}}"; BsonDocument doc = MongoDB.Bson.Serialization .BsonSerializer .Deserialize <BsonDocument>(jsonQuery); collection.Update( new QueryDocument(doc), new UpdateDocument { { "$set", new BsonDocument("Adress", new BsonArray()) } }, UpdateFlags.Multi ); var query3 = Query <PersonDetails> .In(e => e.Name, new List <string> { "Vlad Tepes", "Gonzo" }); var update3 = Update <PersonDetails> .AddToSet( e => e.Adress, new Model.Adress { Street = "Gonzo Street", Nr = 4, City = "Las Vegas", Country = "USA" }); // update modifiers collection.Update(query3, update3); }
private SDBMessage AdminCommand(string command, BsonDocument matcher, BsonDocument selector, BsonDocument orderBy, BsonDocument hint) { BsonDocument dummyObj = new BsonDocument(); IConnection connection = sdb.Connection; SDBMessage sdbMessage = new SDBMessage(); sdbMessage.OperationCode = Operation.OP_QUERY; sdbMessage.CollectionFullName = command; sdbMessage.Version = SequoiadbConstants.DEFAULT_VERSION; sdbMessage.W = SequoiadbConstants.DEFAULT_W; sdbMessage.Padding = 0; sdbMessage.Flags = 0; sdbMessage.NodeID = SequoiadbConstants.ZERO_NODEID; sdbMessage.RequestID = 0; sdbMessage.SkipRowsCount = 0; sdbMessage.ReturnRowsCount = -1; // matcher if (null == matcher) { sdbMessage.Matcher = dummyObj; } else { sdbMessage.Matcher = matcher; } // selector if (null == selector) { sdbMessage.Selector = dummyObj; } else { sdbMessage.Selector = selector; } // orderBy if (null == orderBy) { sdbMessage.OrderBy = dummyObj; } else { sdbMessage.OrderBy = orderBy; } // hint if (null == hint) { sdbMessage.Hint = dummyObj; } else { sdbMessage.Hint = hint; } byte[] request = SDBMessageHelper.BuildQueryRequest(sdbMessage, isBigEndian); connection.SendMessage(request); SDBMessage rtnSDBMessage = SDBMessageHelper.MsgExtractReply(connection.ReceiveMessage(isBigEndian), isBigEndian); rtnSDBMessage = SDBMessageHelper.CheckRetMsgHeader(sdbMessage, rtnSDBMessage); return(rtnSDBMessage); }
public WriteProtocolException(string message, BsonDocument result) : base(message, null) { _result = result; // can be null }
public void SaveBasics() { DropPersons(); var collection = GetCollection(MyName); //Rensa bort gammalt skräp var firstId = ObjectId.GenerateNewId(); collection.Save(new PersonDetails { Id = firstId, Name = "Magnus", DateOfBirth = new DateTime(1969, 03, 20), Adress = new List <Model.Adress> { new Model.Adress { Street = "Vestmannabraut", Nr = 72, City = "Vestmannaeyjar", Country = "Island" }, new Model.Adress { Street = "Funafold", Nr = 47, City = "Reykjavik", Country = "Island" }, new Model.Adress { Street = "Linnegatan", Nr = 26, City = "Göteborg", Country = "Sverige" }, new Model.Adress { Street = "Snäckvägen", Nr = 26, City = "Göteborg", Country = "Island" }, } }); var ActorPerson = new Model.PersonDetails { Name = "Humpfrey Bogart", DateOfBirth = new DateTime(1920, 02, 20), Id = ObjectId.GenerateNewId(), Adress = new List <Model.Adress> { new Model.Adress { Street = "Bogart Street", Nr = 4, City = "Los Angeles", Country = "USA" }, new Model.Adress { Street = "Bogart Bungalow", Nr = 12, City = "Acapulco", Country = "Mexico" }, new Model.Adress { Street = "Bogart Street", Nr = 4, City = "Boston", Country = "USA" } } }; collection.Save(ActorPerson); collection.Insert(new PersonDetails { Name = "Vlad Tepes", Id = ObjectId.GenerateNewId(), DateOfBirth = new DateTime(1969, 3, 20) } ); BsonDocument AnotherGuy = new BsonDocument { { "Name", "Nick Cave" }, { "Adress", "A small town in England" }, { "Occupation", "Musician" }, { "_id", ObjectId.GenerateNewId() } }; collection.Save(AnotherGuy); //AnotherGuy //luktar som json --- // Där väljer jag att helt strunta i autoserialiseringen och skapa ett BsonDokument direkt //om man har ett gigantiskt json doc som man vill dunka in så kan man göra så här:: //string json = LäsFetJsonFil() //BsonDocument document = BsonDocument.Parse(json); //Ej angivna textvärden får null bydefault + // ej angivna numeriska fält får 0 // Save updaterar existerande post, // Insert Failar om post existerar // Kolla in date of birth i db.... }
/// <summary> /// Get all document using an indexInfo as start point (_id index). /// </summary> public IEnumerable <BsonDocument> GetDocuments(IndexInfo index) { var indexPages = this.VisitIndexPages(index.HeadPageID); foreach (var indexPageID in indexPages) { var indexPage = this.ReadPage(indexPageID); foreach (var node in indexPage["nodes"].AsArray) { var dataBlock = node["dataBlock"]; // if datablock link to a data page if (dataBlock["pageID"].AsInt32 != -1) { // read dataPage and data block var dataPage = this.ReadPage((uint)dataBlock["pageID"].AsInt32); if (dataPage["pageType"].AsInt32 != 4) { continue; } var block = dataPage["blocks"].AsArray.FirstOrDefault(x => x["index"] == dataBlock["index"]).AsDocument; if (block == null) { continue; } // read byte[] from block or from extend pages var data = block["extendPageID"] == -1 ? block["data"].AsBinary : this.ReadExtendData((uint)block["extendPageID"].AsInt32); if (data.Length == 0) { continue; } // BSON format still same from all version var doc = BsonSerializer.Deserialize(data); // change _id PK in _chunks collection if (index.Collection == "_chunks") { var parts = doc["_id"].AsString.Split('\\'); if (!int.TryParse(parts[1], out var n)) { throw LiteException.InvalidFormat("_id"); } doc["_id"] = new BsonDocument { ["f"] = parts[0], ["n"] = n }; } yield return(doc); } } } }
protected WriteProtocolException(SerializationInfo info, StreamingContext context) : base(info, context) { _result = BsonSerializer.Deserialize <BsonDocument>((byte[])info.GetValue("_result", typeof(byte[]))); }
public async Task <IActionResult> NodesPublic([FromQuery(Name = "$filter")] string filter, [FromQuery] PaginationRequest request, [FromQuery(Name = "$projection"), ModelBinder(BinderType = typeof(ArrayModelBinder))] IEnumerable <string> fields) { if (!fields?.Select(s => s.ToLower()).Contains("id") ?? false) { var fieldList = new List <string>(); fieldList.AddRange(fields); fieldList.Add("Id"); fields = fieldList; } BsonDocument filterDocument = null; try { filterDocument = new OrganizeFilter().ByQueryString(filter); } catch { Log.Error($"Filter syntax error '{filter}'"); return(BadRequest()); } var projection = fields.ToMongoDBProjection(); var aggregation = _accessPermissions.Aggregate() .Match(new BsonDocument("IsPublic", true)) .Lookup <BsonDocument, BsonDocument>("Nodes", "_id", "_id", "Nodes") .Unwind("Nodes", new AggregateUnwindOptions <BsonDocument> { PreserveNullAndEmptyArrays = true }) .GraphLookup(_nodes, "Nodes.ParentId", "_id", "$Nodes.ParentId", "Parents") .Match(new BsonDocument("$or", new BsonArray( new[] { new BsonDocument("Parents", new BsonDocument("$size", 0)), new BsonDocument("Parents.IsDeleted", new BsonDocument("$ne", true)) }))) .AppendStage <BsonDocument>(BsonDocument.Parse(@" { $addFields: { 'Nodes.Order': { $cond: { if: { $eq: ['$Type', 'Folder'] }, then: 1, else: 100 } }, 'Nodes.AccessPermissions._id': ""$_id"", 'Nodes.AccessPermissions.IsPublic': ""$IsPublic"", 'Nodes.AccessPermissions.Users': ""$Users"", 'Nodes.AccessPermissions.Groups': ""$Groups"" } }")) .ReplaceRoot(new BsonValueAggregateExpressionDefinition <BsonDocument, BsonDocument>(@"$Nodes")) .Sort("{ Order:1, Name:1 }") .Project("{ Order: 0 }") .Match(filterDocument) .Project <dynamic>(projection); var list = await aggregation.ToPagedListAsync(request.PageNumber, request.PageSize); this.AddPaginationHeader(request, list, nameof(NodesPublic), null, filter, fields); return(Ok(list)); }
private ConnectionDescription UpdateConnectionIdWithServerValue(ConnectionDescription description, BsonDocument getLastErrorResult) { if (getLastErrorResult.TryGetValue("connectionId", out var connectionIdBsonValue)) { description = UpdateConnectionIdWithServerValue(description, connectionIdBsonValue.ToInt32()); } return(description); }
private void ApplyApplicationError(BsonDocument applicationError) { var expectedKeys = new[] { "address", "generation", // optional "maxWireVersion", "when", "type", "response" // optional }; JsonDrivenHelper.EnsureAllFieldsAreValid(applicationError, expectedKeys); var address = applicationError["address"].AsString; var endPoint = EndPointHelper.Parse(address); var server = (Server)_serverFactory.GetServer(endPoint); var connectionId = new ConnectionId(server.ServerId); var type = applicationError["type"].AsString; var maxWireVersion = applicationError["maxWireVersion"].AsInt32; Exception simulatedException = null; switch (type) { case "command": var response = applicationError["response"].AsBsonDocument; var command = new BsonDocument("Link", "start!"); simulatedException = ExceptionMapper.MapNotPrimaryOrNodeIsRecovering(connectionId, command, response, "errmsg"); // can return null break; case "network": { var innerException = CoreExceptionHelper.CreateException("IOExceptionWithNetworkUnreachableSocketException"); simulatedException = new MongoConnectionException(connectionId, "Ignorance, yet knowledge.", innerException); break; } case "timeout": { var innerException = CoreExceptionHelper.CreateException("IOExceptionWithTimedOutSocketException"); simulatedException = new MongoConnectionException(connectionId, "Chaos, yet harmony.", innerException); break; } default: throw new ArgumentException($"Unsupported value of {type} for type"); } var mockConnection = new Mock <IConnectionHandle>(); var isMasterResult = new IsMasterResult(new BsonDocument { { "compressors", new BsonArray() } }); var serverVersion = WireVersionHelper.MapWireVersionToServerVersion(maxWireVersion); var buildInfoResult = new BuildInfoResult(new BsonDocument { { "version", serverVersion } }); mockConnection .SetupGet(c => c.Description) .Returns(new ConnectionDescription(connectionId, isMasterResult, buildInfoResult)); int generation = 0; if (applicationError.TryGetValue("generation", out var generationBsonValue)) { generation = generationBsonValue.AsInt32; if (simulatedException is MongoConnectionException mongoConnectionException) { mongoConnectionException.Generation = generation; } } mockConnection.SetupGet(c => c.Generation).Returns(generation); mockConnection .SetupGet(c => c.Generation) .Returns(generation); if (simulatedException != null) { var when = applicationError["when"].AsString; switch (when) { case "beforeHandshakeCompletes": server.HandleBeforeHandshakeCompletesException(simulatedException); break; case "afterHandshakeCompletes": server.HandleChannelException(mockConnection.Object, simulatedException); break; default: throw new ArgumentException($"Unsupported value of {when} for when."); } } }
/// <summary> /// Read all database pages from v7 structure into a flexible BsonDocument - only read what really needs /// </summary> private BsonDocument ReadPage(uint pageID) { if (pageID * V7_PAGE_SIZE > _stream.Length) { return(null); } _stream.Position = pageID * V7_PAGE_SIZE; // v7 uses 4k page size _stream.Read(_buffer, 0, V7_PAGE_SIZE); // decrypt encrypted page (except header page - header are plain data) if (_aes != null && pageID > 0) { _buffer = _aes.Decrypt(_buffer); } var reader = new ByteReader(_buffer); // reading page header var page = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["pageType"] = (int)reader.ReadByte(), ["prevPageID"] = (int)reader.ReadUInt32(), ["nextPageID"] = (int)reader.ReadUInt32(), ["itemCount"] = (int)reader.ReadUInt16() }; // skip freeByte + reserved reader.ReadBytes(2 + 8); #region Header (1) // read header if (page["pageType"] == 1) { var info = reader.ReadString(27); var ver = reader.ReadByte(); if (string.CompareOrdinal(info, HeaderPage.HEADER_INFO) != 0 || ver != 7) { throw LiteException.InvalidDatabase(); } // skip ChangeID + FreeEmptyPageID + LastPageID reader.ReadBytes(2 + 4 + 4); page["userVersion"] = (int)reader.ReadUInt16(); page["password"] = reader.ReadBytes(20); page["salt"] = reader.ReadBytes(16); page["collections"] = new BsonArray(); var cols = reader.ReadByte(); for (var i = 0; i < cols; i++) { page["collections"].AsArray.Add(new BsonDocument { ["name"] = reader.ReadString(), ["pageID"] = (int)reader.ReadUInt32() }); } } #endregion #region Collection (2) // collection page else if (page["pageType"] == 2) { page["collectionName"] = reader.ReadString(); page["indexes"] = new BsonArray(); reader.ReadBytes(12); for (var i = 0; i < 16; i++) { var index = new BsonDocument(); var field = reader.ReadString(); var eq = field.IndexOf('='); if (eq > 0) { index["name"] = field.Substring(0, eq); index["expression"] = field.Substring(eq + 1); } else { index["name"] = field; index["expression"] = "$." + field; } index["unique"] = reader.ReadBoolean(); index["headPageID"] = (int)reader.ReadUInt32(); // skip HeadNode (index) + TailNode + FreeIndexPageID reader.ReadBytes(2 + 6 + 4); if (field.Length > 0) { page["indexes"].AsArray.Add(index); } } } #endregion #region Index (3) else if (page["pageType"] == 3) { page["nodes"] = new BsonArray(); for (var i = 0; i < page["itemCount"].AsInt32; i++) { var node = new BsonDocument { ["index"] = (int)reader.ReadUInt16() }; var levels = reader.ReadByte(); // skip Slot + PrevNode + NextNode reader.ReadBytes(1 + 6 + 6); var length = reader.ReadUInt16(); // skip DataType + KeyValue reader.ReadBytes(1 + length); node["dataBlock"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // reading Prev[0] node["prev"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // reading Next[0] node["next"] = new BsonDocument { ["pageID"] = (int)reader.ReadUInt32(), ["index"] = (int)reader.ReadUInt16() }; // skip Prev/Next[1..N] reader.ReadBytes((levels - 1) * (6 + 6)); page["nodes"].AsArray.Add(node); } } #endregion #region Data (4) else if (page["pageType"] == 4) { page["blocks"] = new BsonArray(); for (var i = 0; i < page["itemCount"].AsInt32; i++) { var block = new BsonDocument { ["index"] = (int)reader.ReadUInt16(), ["extendPageID"] = (int)reader.ReadUInt32() }; var length = reader.ReadUInt16(); block["data"] = reader.ReadBytes(length); page["blocks"].AsArray.Add(block); } } #endregion #region Extend (5) else if (page["pageType"] == 5) { page["data"] = reader.ReadBytes(page["itemCount"].AsInt32); } #endregion return(page); }
private void VerifyServerPropertiesNotInServerDescription(IClusterableServer actualServer, BsonDocument expectedServer, string phaseDescription) { if (expectedServer.TryGetValue("pool", out var poolValue)) { switch (poolValue) { case BsonDocument poolDocument: if (poolDocument.Values.Count() == 1 && poolDocument.TryGetValue("generation", out var generationValue) && generationValue is BsonInt32 generation) { VerifyServerGeneration(actualServer, generation.Value, phaseDescription); break; } throw new FormatException($"Invalid schema for pool."); default: throw new FormatException($"Invalid topologyVersion BSON type: {poolValue.BsonType}."); } } }
protected override IEnumerable <JsonDrivenTestCase> CreateTestCases(BsonDocument document) { var name = GetTestCaseName(document, document, 0); yield return(new JsonDrivenTestCase(name, document, document)); }
private void VerifyOutcome(BsonDocument outcome, string phaseDescription) { var expectedNames = new[] { "compatible", "logicalSessionTimeoutMinutes", "pool", "servers", "setName", "topologyType", "topologyVersion", "maxSetVersion", "maxElectionId" }; JsonDrivenHelper.EnsureAllFieldsAreValid(outcome, expectedNames); var expectedTopologyType = (string)outcome["topologyType"]; VerifyTopology(_cluster, expectedTopologyType, phaseDescription); var actualDescription = _cluster.Description; var actualServersEndpoints = actualDescription.Servers.Select(x => x.EndPoint).ToList(); var expectedServers = outcome["servers"].AsBsonDocument.Elements.Select(x => new { EndPoint = EndPointHelper.Parse(x.Name), Description = (BsonDocument)x.Value }); actualServersEndpoints.WithComparer(EndPointHelper.EndPointEqualityComparer).Should().BeEquivalentTo(expectedServers.Select(x => x.EndPoint).WithComparer(EndPointHelper.EndPointEqualityComparer)); var actualServers = actualServersEndpoints.Select(endpoint => _serverFactory.GetServer(endpoint)); foreach (var actualServerDescription in actualDescription.Servers) { var expectedServer = expectedServers.Single(x => EndPointHelper.EndPointEqualityComparer.Equals(x.EndPoint, actualServerDescription.EndPoint)); VerifyServerDescription(actualServerDescription, expectedServer.Description, phaseDescription); VerifyServerPropertiesNotInServerDescription(_serverFactory.GetServer(actualServerDescription.EndPoint), expectedServer.Description, phaseDescription); } if (outcome.TryGetValue("maxSetVersion", out var maxSetVersion)) { if (_cluster is MultiServerCluster multiServerCluster) { multiServerCluster._maxElectionInfo_setVersion().Should().Be(maxSetVersion.AsInt32); } else { throw new Exception($"Expected MultiServerCluster but got {_cluster.GetType()}"); } } if (outcome.TryGetValue("maxElectionId", out var maxElectionId)) { if (_cluster is MultiServerCluster multiServerCluster) { multiServerCluster._maxElectionInfo_electionId().Should().Be(new ElectionId((ObjectId)maxElectionId)); } else { throw new Exception($"Expected MultiServerCluster but got {_cluster.GetType()}"); } } if (outcome.Contains("setName")) { // TODO: assert something against setName } if (outcome.Contains("logicalSessionTimeoutMinutes")) { TimeSpan?expectedLogicalSessionTimeout; switch (outcome["logicalSessionTimeoutMinutes"].BsonType) { case BsonType.Null: expectedLogicalSessionTimeout = null; break; case BsonType.Int32: case BsonType.Int64: expectedLogicalSessionTimeout = TimeSpan.FromMinutes(outcome["logicalSessionTimeoutMinutes"].ToDouble()); break; default: throw new FormatException($"Invalid logicalSessionTimeoutMinutes BSON type: {outcome["setName"].BsonType}."); } actualDescription.LogicalSessionTimeout.Should().Be(expectedLogicalSessionTimeout); } if (outcome.Contains("compatible")) { var expectedIsCompatibleWithDriver = outcome["compatible"].ToBoolean(); actualDescription.IsCompatibleWithDriver.Should().Be(expectedIsCompatibleWithDriver); } }
private void VerifyServerDescription(ServerDescription actualDescription, BsonDocument expectedDescription, string phaseDescription) { JsonDrivenHelper.EnsureAllFieldsAreValid(expectedDescription, "electionId", "pool", "setName", "setVersion", "topologyVersion", "type"); var expectedType = (string)expectedDescription["type"]; switch (expectedType) { case "RSPrimary": actualDescription.Type.Should().Be(ServerType.ReplicaSetPrimary); break; case "RSSecondary": actualDescription.Type.Should().Be(ServerType.ReplicaSetSecondary); break; case "RSArbiter": actualDescription.Type.Should().Be(ServerType.ReplicaSetArbiter); break; case "RSGhost": actualDescription.Type.Should().Be(ServerType.ReplicaSetGhost); break; case "RSOther": actualDescription.Type.Should().Be(ServerType.ReplicaSetOther); break; case "Mongos": actualDescription.Type.Should().Be(ServerType.ShardRouter); break; case "Standalone": actualDescription.Type.Should().Be(ServerType.Standalone); break; default: actualDescription.Type.Should().Be(ServerType.Unknown); break; } if (expectedDescription.Contains("setName")) { string expectedSetName; switch (expectedDescription["setName"].BsonType) { case BsonType.Null: expectedSetName = null; break; case BsonType.String: expectedSetName = expectedDescription["setName"].AsString;; break; default: throw new FormatException($"Invalid setName BSON type: {expectedDescription["setName"].BsonType}."); } actualDescription.ReplicaSetConfig?.Name.Should().Be(expectedSetName); } if (expectedDescription.Contains("setVersion")) { int?expectedSetVersion; switch (expectedDescription["setVersion"].BsonType) { case BsonType.Null: expectedSetVersion = null; break; case BsonType.Int32: case BsonType.Int64: expectedSetVersion = expectedDescription["setVersion"].ToInt32(); break; default: throw new FormatException($"Invalid setVersion BSON type: {expectedDescription["setVersion"].BsonType}."); } actualDescription.ReplicaSetConfig?.Version.Should().Be(expectedSetVersion); } if (expectedDescription.Contains("electionId")) { ElectionId expectedElectionId; switch (expectedDescription["electionId"].BsonType) { case BsonType.Null: expectedElectionId = null; break; case BsonType.ObjectId: expectedElectionId = new ElectionId(expectedDescription["electionId"].AsObjectId); break; default: throw new FormatException($"Invalid electionId BSON type: {expectedDescription["electionId"].BsonType}."); } actualDescription.ElectionId.Should().Be(expectedElectionId); } if (expectedDescription.TryGetValue("topologyVersion", out var topologyVersionValue)) { switch (topologyVersionValue) { case BsonDocument topologyVersion: TopologyVersion expectedTopologyType = TopologyVersion.FromBsonDocument(topologyVersion); expectedTopologyType.Should().NotBeNull(); actualDescription.TopologyVersion.Should().Be(expectedTopologyType, phaseDescription); break; case BsonNull _: actualDescription.TopologyVersion.Should().BeNull(); break; default: throw new FormatException($"Invalid topologyVersion BSON type: {topologyVersionValue.BsonType}."); } } }
private void ExecuteOperations(IMongoClient client, Dictionary <string, object> objectMap, BsonDocument test) { var factory = new JsonDrivenTestFactory(client, _databaseName, _collectionName, objectMap); foreach (var operation in test["operations"].AsBsonArray.Cast <BsonDocument>()) { var receiver = operation["object"].AsString; var name = operation["name"].AsString; var jsonDrivenTest = factory.CreateTest(receiver, name); jsonDrivenTest.Arrange(operation); if (test["async"].AsBoolean) { jsonDrivenTest.ActAsync(CancellationToken.None).GetAwaiter().GetResult(); } else { jsonDrivenTest.Act(CancellationToken.None); } jsonDrivenTest.Assert(); } }
protected override void When() { _result = ExecuteOperation(_subject); }