public async Task RunAsync() { var result = new Dictionary<string, dynamic>(); await MapBasicInfo(result); // clean all of the old data. var filter = Builders<BsonDocument>.Filter.Eq("month", _month); await _askerActivities.DeleteManyAsync(filter); result.Select(m => m.Value).ToList().ForEach(async (item)=> { item.month = _month; var line = Newtonsoft.Json.JsonConvert.SerializeObject(item, Formatting.Indented); using (var jsonReader = new MongoDB.Bson.IO.JsonReader(line)) { var context = BsonDeserializationContext.CreateRoot(jsonReader); var document = _askerActivities.DocumentSerializer.Deserialize(context); await _askerActivities.InsertOneAsync(document); } }); }
/// <summary> /// Creates document. /// </summary> /// <param name="mongoDoc">The mongo doc.</param> /// <returns> /// The document. /// </returns> public ActionResult <object> CreateDocument(MongoDocument mongoDoc) { MongoClient client = Helper.GetMongoClient(mongoDoc); var database = client.GetDatabase(mongoDoc.DatabaseName); var collection = database.GetCollection <BsonDocument>(mongoDoc.CollectionName); BsonDocument document = null; if (!string.IsNullOrWhiteSpace(mongoDoc.Id)) { ObjectId oid; if (ObjectId.TryParse(mongoDoc.Id, out oid)) { document = collection.Find($"{{ _id: ObjectId('{mongoDoc.Id}') }}").FirstOrDefault(); } else { document = collection.Find(x => x["_id"] == mongoDoc.Id).FirstOrDefault(); } } if (document == null) { using (var reader = new MongoDB.Bson.IO.JsonReader(mongoDoc.DocumentData)) { var context = BsonDeserializationContext.CreateRoot(reader); document = BsonDocumentSerializer.Instance.Deserialize(context); } collection.InsertOne(document); } var doc = document.ToString(); return(CreatedAtAction(nameof(GetByDocumentId), new { id = document["_id"].ToString(), mongoDoc = mongoDoc }, doc)); }
public Caladan.Models.TransactionReceipt ConvertToDbTransactionReceipt(TransactionReceipt transaction) { var result = new Caladan.Models.TransactionReceipt() { BlockHash = transaction.BlockHash, BlockNumber = transaction.BlockNumber, From = transaction.From, To = transaction.To, TransactionHash = transaction.TransactionHash, TransactionIndex = transaction.TransactionIndex, ContractAddress = transaction.ContractAddress, CumulativeGasUsed = transaction.CumulativeGasUsed, GasUsed = transaction.GasUsed, LogsBloom = transaction.LogsBloom, Root = transaction.Root }; if (transaction.Logs != null) { using (var jsonReader = new MongoDB.Bson.IO.JsonReader(transaction.Logs.ToString())) { var serializer = new MongoDB.Bson.Serialization.Serializers.BsonArraySerializer(); result.Logs = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); } } return(result); }
public async Task <List <BsonDocument> > GetStatistics() { // retrieve stats var results = new List <BsonDocument>(); var nextPageUrl = $"repos/{_repositoryUrl}/stats/contributors"; // retrieve results var response = await GitClient.ExecuteAsGitGetTaskAsync(_token, nextPageUrl); // make sure the calculation has been completed // https://developer.github.com/v3/repos/statistics/#a-word-about-caching while (response.StatusCode == HttpStatusCode.Accepted) { Thread.Sleep(5000); response = await GitClient.ExecuteAsGitGetTaskAsync(_token, nextPageUrl); } using (var jsonReader = new MongoDB.Bson.IO.JsonReader(response.Content)) { var serializer = new BsonArraySerializer(); var bsonArray = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); results.AddRange(bsonArray.Where(o => o.IsBsonDocument).Select(o => o.AsBsonDocument)); } return(results); }
private static BsonDocument GenerateFilterDocumentFromDict(IMongoCollection <BsonDocument> collection, Dictionary <string, string> dict) { string conditionJson = JsonConvert.SerializeObject(dict); var jr = new MongoDB.Bson.IO.JsonReader(conditionJson); var bdc = BsonDeserializationContext.CreateRoot(jr); var bsonDoc = collection.DocumentSerializer.Deserialize(bdc); return(bsonDoc); }
private static BsonDocument GenerateBsonDocumentFromObject(object obj, IMongoCollection <BsonDocument> collection) { string json = JsonConvert.SerializeObject(obj); var jr = new MongoDB.Bson.IO.JsonReader(json); var context = BsonDeserializationContext.CreateRoot(jr); var bsonDoc = collection.DocumentSerializer.Deserialize(context); return(bsonDoc); }
private ChangeStreamDocument <BsonDocument> ConvertJsonToChangeStreamBsonDocument(string json) { ChangeStreamDocument <BsonDocument> changeStreamDocument; var subject = new ChangeStreamDocumentSerializer <BsonDocument>(BsonDocumentSerializer.Instance); using (var reader = new MongoDB.Bson.IO.JsonReader(json)) { var context = BsonDeserializationContext.CreateRoot(reader); changeStreamDocument = subject.Deserialize(context); } return(changeStreamDocument); }
private static BsonDocument GenerateBsonDocumentFromListT(object list, string listName, IMongoCollection <BsonDocument> collection) { Dictionary <string, object> obj = new Dictionary <string, object>(); obj.Add(listName, list); string json = JsonConvert.SerializeObject(obj); var jr = new MongoDB.Bson.IO.JsonReader(json); var context = BsonDeserializationContext.CreateRoot(jr); var bsonDoc = collection.DocumentSerializer.Deserialize(context); return(bsonDoc); }
static int __CreateInstance(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); if (LuaAPI.lua_gettop(L) == 2 && (LuaAPI.lua_isnil(L, 2) || LuaAPI.lua_type(L, 2) == LuaTypes.LUA_TSTRING)) { string _json = LuaAPI.lua_tostring(L, 2); MongoDB.Bson.IO.JsonReader gen_ret = new MongoDB.Bson.IO.JsonReader(_json); translator.Push(L, gen_ret); return(1); } if (LuaAPI.lua_gettop(L) == 3 && (LuaAPI.lua_isnil(L, 2) || LuaAPI.lua_type(L, 2) == LuaTypes.LUA_TSTRING) && translator.Assignable <MongoDB.Bson.IO.JsonReaderSettings>(L, 3)) { string _json = LuaAPI.lua_tostring(L, 2); MongoDB.Bson.IO.JsonReaderSettings _settings = (MongoDB.Bson.IO.JsonReaderSettings)translator.GetObject(L, 3, typeof(MongoDB.Bson.IO.JsonReaderSettings)); MongoDB.Bson.IO.JsonReader gen_ret = new MongoDB.Bson.IO.JsonReader(_json, _settings); translator.Push(L, gen_ret); return(1); } if (LuaAPI.lua_gettop(L) == 2 && translator.Assignable <System.IO.TextReader>(L, 2)) { System.IO.TextReader _textReader = (System.IO.TextReader)translator.GetObject(L, 2, typeof(System.IO.TextReader)); MongoDB.Bson.IO.JsonReader gen_ret = new MongoDB.Bson.IO.JsonReader(_textReader); translator.Push(L, gen_ret); return(1); } if (LuaAPI.lua_gettop(L) == 3 && translator.Assignable <System.IO.TextReader>(L, 2) && translator.Assignable <MongoDB.Bson.IO.JsonReaderSettings>(L, 3)) { System.IO.TextReader _textReader = (System.IO.TextReader)translator.GetObject(L, 2, typeof(System.IO.TextReader)); MongoDB.Bson.IO.JsonReaderSettings _settings = (MongoDB.Bson.IO.JsonReaderSettings)translator.GetObject(L, 3, typeof(MongoDB.Bson.IO.JsonReaderSettings)); MongoDB.Bson.IO.JsonReader gen_ret = new MongoDB.Bson.IO.JsonReader(_textReader, _settings); translator.Push(L, gen_ret); return(1); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } return(LuaAPI.luaL_error(L, "invalid arguments to MongoDB.Bson.IO.JsonReader constructor!")); }
static int _m_SkipValue(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); MongoDB.Bson.IO.JsonReader gen_to_be_invoked = (MongoDB.Bson.IO.JsonReader)translator.FastGetCSObj(L, 1); { gen_to_be_invoked.SkipValue( ); return(0); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } }
public async Task <GeocodeComponents> LookupGeocode(string query) { // build request var request = new RestRequest("geocode/json"); request.AddQueryParameter("key", _apiKey); request.AddQueryParameter("address", query); // get results var response = await RestClient.ExecuteGetTaskAsync(request); using (var jsonReader = new MongoDB.Bson.IO.JsonReader(response.Content)) { // parse to bson var serializer = new BsonDocumentSerializer(); var bsonDocument = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); // cast to geocode components var geocode = new GeocodeComponents(bsonDocument); return(geocode); } }
static int _m_ReadRegularExpression(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); MongoDB.Bson.IO.JsonReader gen_to_be_invoked = (MongoDB.Bson.IO.JsonReader)translator.FastGetCSObj(L, 1); { MongoDB.Bson.BsonRegularExpression gen_ret = gen_to_be_invoked.ReadRegularExpression( ); translator.Push(L, gen_ret); return(1); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } }
public async Task <List <BsonDocument> > GetReleases() { // retrieve all releases var results = new List <BsonDocument>(); var nextPageUrl = $"repos/{_repositoryUrl}/releases?page=1"; do { // retrieve pages results var response = await GitClient.ExecuteAsGitGetTaskAsync(_token, nextPageUrl); using (var jsonReader = new MongoDB.Bson.IO.JsonReader(response.Content)) { var serializer = new BsonArraySerializer(); var bsonArray = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); results.AddRange(bsonArray.Where(o => o.IsBsonDocument).Select(o => o.AsBsonDocument)); } // make sure the response came with a header, otherwise we're done var linkHeader = response.Headers.FirstOrDefault(o => o.Name == "Link")?.Value.ToString(); if (linkHeader == null) { break; } // make sure there's a next page, otherwise we're done var nextLinkMatch = Regex.Match(linkHeader, "<https:\\/\\/api\\.github\\.com\\/(.+?)>; rel=\"next\""); if (!nextLinkMatch.Success) { break; } // create new request and keep going nextPageUrl = nextLinkMatch.Groups[1].Value; } while (true); return(results); }
static int _m_ReadTimestamp(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); MongoDB.Bson.IO.JsonReader gen_to_be_invoked = (MongoDB.Bson.IO.JsonReader)translator.FastGetCSObj(L, 1); { long gen_ret = gen_to_be_invoked.ReadTimestamp( ); LuaAPI.lua_pushint64(L, gen_ret); return(1); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } }
static int _m_ReturnToBookmark(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); MongoDB.Bson.IO.JsonReader gen_to_be_invoked = (MongoDB.Bson.IO.JsonReader)translator.FastGetCSObj(L, 1); { MongoDB.Bson.IO.BsonReaderBookmark _bookmark = (MongoDB.Bson.IO.BsonReaderBookmark)translator.GetObject(L, 2, typeof(MongoDB.Bson.IO.BsonReaderBookmark)); gen_to_be_invoked.ReturnToBookmark(_bookmark); return(0); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } }
public async Task <List <BsonDocument> > GetUsers(List <BsonDocument> statistics) { var users = await Task.WhenAll(statistics.Select(async o => { // get user login var login = o["author"]["login"].AsString; // retrieve user var requestUrl = $"users/{login}"; var response = await GitClient.ExecuteAsGitGetTaskAsync(_token, requestUrl); // parse and return using (var jsonReader = new MongoDB.Bson.IO.JsonReader(response.Content)) { var serializer = new BsonDocumentSerializer(); var bsonDocument = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); return(bsonDocument); } })); return(new List <BsonDocument>(users)); }
static int _m_ReadName(RealStatePtr L) { try { ObjectTranslator translator = ObjectTranslatorPool.Instance.Find(L); MongoDB.Bson.IO.JsonReader gen_to_be_invoked = (MongoDB.Bson.IO.JsonReader)translator.FastGetCSObj(L, 1); { MongoDB.Bson.IO.INameDecoder _nameDecoder = (MongoDB.Bson.IO.INameDecoder)translator.GetObject(L, 2, typeof(MongoDB.Bson.IO.INameDecoder)); string gen_ret = gen_to_be_invoked.ReadName(_nameDecoder); LuaAPI.lua_pushstring(L, gen_ret); return(1); } } catch (System.Exception gen_e) { return(LuaAPI.luaL_error(L, "c# exception:" + gen_e)); } }
public async Task GetCommits() { // retrieve all commits var pageCount = 1; var totalPages = 0; var nextPageUrl = $"repos/{_repositoryUrl}/commits?page=1"; IRestResponse response; do { // retrieve page results response = await GitClient.ExecuteAsGitGetTaskAsync(_token, nextPageUrl); if (response.StatusCode != HttpStatusCode.OK) { _logger.LogError(response.Content); throw new HttpRequestException("Couldn't retrieve Git data, check error message for details."); } var results = new List <BsonDocument>(); using (var jsonReader = new MongoDB.Bson.IO.JsonReader(response.Content)) { var serializer = new BsonArraySerializer(); var bsonArray = serializer.Deserialize(BsonDeserializationContext.CreateRoot(jsonReader)); results.AddRange(bsonArray.Where(o => o.IsBsonDocument).Select(o => o.AsBsonDocument)); } // the above request only provides partial commit information, go through all of them // and retrieve their full details var fullCommitTasks = results .Select(async commit => { // build commit request with the SHA as commit ID var sha = commit["sha"].AsString; var fullResponse = await GitClient.ExecuteAsGitGetTaskAsync(_token, $"repos/{_repositoryUrl}/commits/{sha}"); // parse and return document var document = BsonDocument.Parse(fullResponse.Content); return(new Commit(document)); }) .ToList(); // wait for task completion var fullCommits = await Task.WhenAll(fullCommitTasks); OnRaiseCommitQueryNext(fullCommits); // make sure the response came with a header, otherwise we're done var linkHeader = GetHeader(response, "Link"); if (linkHeader == null) { break; } // make sure there's a next page, otherwise we're done var nextLinkMatch = Regex.Match(linkHeader, "<https:\\/\\/api\\.github\\.com\\/(.+?)>; rel=\"next\""); if (!nextLinkMatch.Success) { break; } // get number of total pages if (totalPages == 0) { var totalPagesMatch = Regex.Match(linkHeader, "page=(\\d+)>; rel=\"last"); if (totalPagesMatch.Success) { totalPages = int.Parse(totalPagesMatch.Groups[1].Value); } } // log _logger.LogInformation( "Got page {0} of {1}, " + "X-RateLimit-Remaining: {2}, " + "X-RateLimit-Reset: {3}", pageCount, totalPages, GetHeader(response, "X-RateLimit-Remaining"), GetRateLimitReset(response)); // create new request and keep going nextPageUrl = nextLinkMatch.Groups[1].Value; pageCount++; } while (true); // log _logger.LogInformation( "Got page {0} of {1}\n" + "\tX-RateLimit-Remaining: {2}\n" + "\tX-RateLimit-Reset: {3}", totalPages, totalPages, GetHeader(response, "X-RateLimit-Remaining"), GetRateLimitReset(response)); }