private IEnumerable <BsonDocument> ExportDailyEntries() { try { MongoCursor <BsonDocument> cursor = GetUnexportedEntries("dayentry"); if (cursor.Count() > 0) { logger.LogInfo("Exporting found unexported daily entries", cursor.Count()); } else { logger.LogFineTrace("No unexported daily entries found."); return(null); } logger.LogInfo(cursor.Count() + " daily entries found."); ExportDailyEntriesToCSV(cursor); return(cursor); } catch (Exception ex) { logger.LogError("Exporting hours failed.", ex); throw; } }
private void ExportWorkerHours() { int exportCycles = 0; while (true) { try { exportedLines.Clear(); MongoCursor <BsonDocument> cursor = GetUnexportedWorkerHours(); if (cursor.Count() > 0) { logger.LogInfo("Exporting found unexported timesheet entries", cursor.Count()); } else { logger.LogFineTrace("No unexported timesheet entries found."); return; } List <TimesheetEntryFragment> timesheetEntryFragments = IntegrationHelpers.GetTimesheetFragments(cursor, MinTimeFragmentSize, database, logger, failedExports); logger.LogInfo(cursor.Count() + " timesheet entries produced " + timesheetEntryFragments.Count + " fragments."); ExportTimesheetFragmentsToCsv(timesheetEntryFragments); if (exportedLines.Count > 0) { logger.LogInfo("Saving exported Visma fragments to disk.", exportedLines.Count); WriteExportedDataToDisk(); } else { logger.LogInfo("No valid timesheet entry fragments found to export. Not saving XML document."); } MarkEntriesAsExported(cursor); } catch (Exception ex) { logger.LogError("Exporting hours failed.", ex); // If even a single pay entry fails, fail the whole process unless skipping is specified in config. if (!(bool)config["skiperrors"]) { throw; } } exportCycles++; if (exportCycles > 100000) { throw new HandlerException("Too many Visma export cycles. Export is likely busy looping and i unable to mark items as exported."); } } }
private void ExportWorkerHours() { try { XmlDocument xmlDocument = (XmlDocument)exportHoursDocumentBase.Clone(); MongoCursor <BsonDocument> cursor = GetUnexportedWorkerHours(); if (cursor.Count() > 0) { logger.LogInfo("Exporting found unexported timesheet entries", cursor.Count()); StartBatch(xmlDocument, "EfiAifBuffProjJournalTable"); } else { logger.LogFineTrace("No unexported timesheet entries found."); return; } List <TimesheetEntryFragment> timesheetEntryFragments = IntegrationHelpers.GetTimesheetFragments(cursor, MinTimeFragmentSize, database, logger, failedExports, succeededExports); IntegrationHelpers.RemoveUnexportedFragmentTypes(timesheetEntryFragments, logger); logger.LogInfo(cursor.Count() + " timesheet entries produced " + timesheetEntryFragments.Count + " fragments."); int exportedFragments = ExportTimesheetFragmentsToXml(timesheetEntryFragments, xmlDocument); if (exportedFragments > 0) { logger.LogInfo("Saving exported fragments to disk.", exportedFragments); WriteExportedDataToDisk(xmlDocument, FilePrefixHourData); WorkerTimesheetEntryFragmentsExported += exportedFragments; WorkerTimesheetEntryFilesCreated++; } else { logger.LogInfo("No valid timesheet entry fragments found to export. Not saving XML document."); } MarkEntriesAsExported(cursor, "timesheetentry"); } catch (Exception ex) { logger.LogError("Exporting hours failed.", ex); WorkerTimesheetEntryExportsFailed++; } finally { EndBatch(); } }
private void ExportAssetHours() { try { XmlDocument xmlDocument = (XmlDocument)exportHoursDocumentBase.Clone(); MongoCursor <BsonDocument> cursor = GetUnexportedAssetHours(); if (cursor.Count() > 0) { logger.LogInfo("Exporting found unexported asset timesheet entries", cursor.Count()); StartBatch(xmlDocument, "EfiAifBuffProjJournalTable"); } else { logger.LogFineTrace("No unexported timesheet entries found. (for assets)"); return; } List <TimesheetEntryFragment> timesheetEntryFragments = GetTimesheetFragmentsForAssets(cursor); logger.LogInfo(cursor.Count() + " timesheet entries produced " + timesheetEntryFragments.Count + " fragments. (for assets)"); int exportedFragments = ExportAssetTimesheetFragmentsToXml(timesheetEntryFragments, xmlDocument); if (exportedFragments > 0) { logger.LogInfo("Saving exported fragments to disk.", exportedFragments); WriteExportedDataToDisk(xmlDocument, FilePrefixHourData); AssetTimesheetEntriesExported += exportedFragments; AssetTimesheetEntryFilesCreated++; } else { logger.LogInfo("No valid timesheet entry fragments found to export. Not saving XML document."); } MarkEntriesAsExported(cursor, "assetentry"); } catch (Exception ex) { logger.LogError("Exporting hours failed (for assets).", ex); AssetTimesheetEntryExportsFailed++; } finally { EndBatch(); } }
/// <summary> /// Splits a collection of objects into an unknown number of pages with n items per page (for example, if I have a list of 45 shoes and say 'shoes.Partition(10)' I will now have 4 pages of 10 shoes and 1 page of 5 shoes. /// </summary> /// <typeparam name="T">The type of object the collection should contain.</typeparam> /// <param name="superset">The collection of objects to be divided into subsets.</param> /// <param name="pageSize">The maximum number of items each page may contain.</param> /// <returns>A subset of this collection of objects, split into pages of maximum size n.</returns> public static IEnumerable <IEnumerable <T> > Partition <T>(this MongoCursor <T> superset, int pageSize) { if (superset.Count() < pageSize) { yield return(superset); } else { var numberOfPages = Math.Ceiling(superset.Count() / (double)pageSize); for (var i = 0; i < numberOfPages; i++) { yield return(superset.Skip(pageSize * i).Take(pageSize)); } } }
public long GetTotalCount() { MongoCursor <WantedUser> gamesCursor = this.MongoConnectionManager.MongoCollection.FindAll(); long totalCount = gamesCursor.Count(); return(totalCount); }
public ActionResult <DeveloperResponses> FetchByStack(int stack) { try { //initialize response DeveloperResponses responses = new DeveloperResponses(); IMongoQuery query = Query <Developer> .EQ(d => (int)d.Stack, stack); MongoCursor <Developer> listed = context.Developer.Find(query); if (listed.Count() == 0) { return(NotFound(new { Message = Constants.Non_Exist })); } responses.Data = listed.ToList(); //prepare response responses.Status = true; return(Ok(responses)); } catch (Exception ex) { Log.LogError(ex); return(StatusCode(500, ex.ToString())); } }
public static void InsertErpOrderPartner(string orderId, string parNr, MongoDatabase mongoDatabase, ILogger logger) { string collName = "erpOrderPartner"; if (!mongoDatabase.CollectionExists(collName)) { logger.LogDebug("Collection doesn't exist. Creating it.", collName); mongoDatabase.CreateCollection(collName); MongoCollection <BsonDocument> newErpEntriesCollection = mongoDatabase.GetCollection(collName); IndexKeysBuilder Key = IndexKeys.Ascending("orderid"); newErpEntriesCollection.CreateIndex(Key); } MongoCollection <BsonDocument> erpEntriesCollection = mongoDatabase.GetCollection(collName); MongoCursor cursor = erpEntriesCollection.Find(Query.And(Query.EQ("orderid", orderId), Query.EQ("parnr", parNr))); if (cursor.Count() == 0) { BsonDocument orderPartnerDocument = new BsonDocument(); ObjectId currentProcess_id = ObjectId.GenerateNewId(); orderPartnerDocument.Set(DBQuery.Id, currentProcess_id); orderPartnerDocument.Set("orderid", orderId); orderPartnerDocument.Set("parnr", parNr); orderPartnerDocument.Set("created", DateTime.UtcNow); erpEntriesCollection.Save(orderPartnerDocument, WriteConcern.Acknowledged); } }
private void load_subject() { MongoClient client = new MongoClient("mongodb://localhost"); MongoServer server = client.GetServer(); MongoDatabase db = server.GetDatabase("NBA"); //in this subjects are orderd in ascending order as they are added in subject add form MongoCursor <Subjects> put = db.GetCollection <Subjects>("Subjects").Find(Query.And(Query.EQ("academic_year", academic_year), Query.EQ("clas", clas), Query.EQ("semester", sem))).SetSortOrder(SortBy.Ascending("srno")); //it will count how many subjects are added long count = put.Count(); if (count == 0) { this.Hide(); MessageBox.Show("No Subjects Added!"); return; } int x = 0; foreach (Subjects i in put) { //subject name and their corresponding marks will be searched in collection sub[x++] = i.Subject_Name; sub[x++] = i.marks; } show_sub(Convert.ToInt32(count)); }
/// <summary> /// Gets a paged list of users /// </summary> /// <param name="page">The page of users to get.</param> /// <param name="pageSize">Size of the page.</param> /// <param name="count">The number of users in the page.</param> /// <returns></returns> public IEnumerable <T> GetPagedUsers(int page, int pageSize, out long count) { // Optional parameters must appear after all other parameters if (page < 1) { page = 1; } if (pageSize < 1) { pageSize = 25; } MongoCursor <T> accountsQuery = _db.Users().FindAllAs <T>(); // MongoCursor.Limit(x) restricts how many records can be retrieved // But MongoCursor.Skip enumerates the list, removes the first N and returns the rest as an IEnumerable<T> // This enumeration counts towards Limit. Once Skip is called, Limit is no longer useful. // Therefore to our Limit we have to add the number of records that will be skipped. accountsQuery.Limit = page * pageSize; // Count() locks the cursor, so Limit can't be called anymore count = accountsQuery.Count(); IEnumerable <T> accounts = accountsQuery.Skip((page - 1) * pageSize); return(accounts); }
//判断用户集合中是否存在指定的用户名,如果不存在就添加 static public int Add(string IP, string[] user) //连接数据库 { MongoServer server = MongoServer.Create(IP); //连接服务器 MongoDatabase db = server.GetDatabase("centerdb"); //连接到数据库 //连接到数据集合user1中 MongoCollection <BsonDocument> collection = db.GetCollection <BsonDocument>("registerinfo"); IMongoQuery query = Query.EQ("userid", user[0]); //建立查询条件,判断user集合中用户名是否存在 MongoCursor <BsonDocument> mc = collection.Find(query); //将数据存入MongoCursor类mc中 if (mc.Count() != 0) { return(0); //response.Write("shanchushibai") } else//将用户名添加到集合user1中 { BsonDocument doc = new BsonDocument { { "userid", user[0] }, { "key", user[1] }, { "name", user[2] }, { "tel", user[3] }, { "email", user[4] } }; collection.Insert(doc); return(1); } //server.Shutdown(); }
public ActionResult <DeveloperResponses> FetchAll() { try { //prepare responses DeveloperResponses responses = new DeveloperResponses(); MongoCursor <Developer> results = context.Developer.FindAll(); //test for emptiness if (results.Count() == 0) { return(NotFound(new { Message = Constants.Empty_List })); } responses.Status = true; //return data responses.Data = results.ToList(); return(Ok(responses)); } catch (Exception ex) { Log.LogError(ex); return(StatusCode(500, ex.ToString())); } }
private List <BsonValue> CollectKeys(IMongoQuery query) { MongoCursor <BsonDocument> cursor = _collection.Find(query).SetFields(InternalField.ID); if (cursor.Count() > 0) { return(cursor.Select(doc => doc.GetValue(InternalField.ID)).ToList()); } return(new List <BsonValue>()); }
public static YellowstonePathology.Business.Client.Model.PhysicianClientNameCollection GetPhysicianClientNameCollection(string clientName, string physicianName) { YellowstonePathology.Business.Client.Model.PhysicianClientNameCollection result = new YellowstonePathology.Business.Client.Model.PhysicianClientNameCollection(); YellowstonePathology.Business.Mongo.Server server = new Business.Mongo.TestServer(YellowstonePathology.Business.Mongo.MongoTestServer.LISDatabaseName); MongoCollection physicianClientCollection = server.Database.GetCollection <BsonDocument>("PhysicianClient"); MongoCollection physicianCollection = server.Database.GetCollection <BsonDocument>("Physician"); MongoCollection clientCollection = server.Database.GetCollection <BsonDocument>("Client"); MongoCursor clientCursor = clientCollection.FindAs <BsonDocument>(Query.Matches("ClientName", BsonRegularExpression.Create("^" + clientName + ".*", "i"))).SetSortOrder(SortBy.Ascending("ClientName")); string physicianClientId = string.Empty; long count = clientCursor.Count(); foreach (BsonDocument clientDocument in clientCursor) { BsonValue clientId = clientDocument.GetValue("ClientId"); MongoCursor physicianClientCursor = physicianClientCollection.FindAs <BsonDocument>(Query.EQ("ClientId", clientId)); List <BsonValue> physicianIdList = new List <BsonValue>(); foreach (BsonDocument physicianClientDocument in physicianClientCursor) { physicianIdList.Add(physicianClientDocument.GetValue("PhysicianId")); } MongoCursor physicianCursor = physicianCollection.FindAs <BsonDocument>(Query.And(Query.In("PhysicianId", physicianIdList), Query.Matches("LastName", BsonRegularExpression.Create("^" + physicianName + ".*", "i")))).SetSortOrder(SortBy.Ascending("LastName", "FirstName")); foreach (BsonDocument physicianDocument in physicianCursor) { foreach (BsonDocument physicianClientDocument in physicianClientCursor) { if (physicianClientDocument.GetValue("PhysicianId").Equals(physicianDocument.GetValue("PhysicianId")) && physicianClientDocument.GetValue("ClientId").Equals(clientId)) { physicianClientId = Mongo.ValueHelper.GetStringValue(physicianClientDocument.GetValue("PhysicianClientId")); break; } } YellowstonePathology.Business.Client.Model.PhysicianClientName physicianClientName = new YellowstonePathology.Business.Client.Model.PhysicianClientName(); physicianClientName.ClientId = clientId.AsInt32; physicianClientName.ClientName = Mongo.ValueHelper.GetStringValue(clientDocument.GetValue("ClientName")); physicianClientName.Fax = Mongo.ValueHelper.GetStringValue(clientDocument.GetValue("Fax")); physicianClientName.FirstName = Mongo.ValueHelper.GetStringValue(physicianDocument.GetValue("FirstName")); physicianClientName.LastName = Mongo.ValueHelper.GetStringValue(physicianDocument.GetValue("LastName")); physicianClientName.PhysicianClientId = physicianClientId; physicianClientName.PhysicianId = physicianDocument.GetValue("PhysicianId").AsInt32; physicianClientName.Telephone = Mongo.ValueHelper.GetStringValue(clientDocument.GetValue("Telephone")); result.Add(physicianClientName); } } return(result); }
public void TestStationRecv_OldOriginalImage() { using (HttpServer cloud = new HttpServer(8080)) using (HttpServer server = new HttpServer(80)) { List <UserGroup> groups = new List <UserGroup>(); groups.Add(new UserGroup { creator_id = "id1", group_id = "gid1", name = "group1", description = "none" }); FileStorage fileStore = new FileStorage(new Driver { email = "*****@*****.**", folder = @"resource\group1", groups = groups, session_token = "session_token1", user_id = "id1" }); AttachmentUploadHandler handler = new AttachmentUploadHandler(); server.AddHandler("/test/", handler); server.Start(); cloud.AddHandler("/" + CloudServer.DEF_BASE_PATH + "/attachments/upload/", new DummyImageUploadHandler()); cloud.Start(); ObjectUploadResponse res = Wammer.Model.Attachment.UploadImage( "http://localhost:80/test/", new ArraySegment <byte>(imageRawData), "group1", object_id1, "orig_name2.png", "image/png", ImageMeta.Origin, "apikey1", "token1"); // verify saved file using (FileStream f = fileStore.Load(object_id1 + ".png")) { byte[] imageData = new byte[f.Length]; Assert.AreEqual(imageData.Length, f.Read(imageData, 0, imageData.Length)); for (int i = 0; i < f.Length; i++) { Assert.AreEqual(imageData[i], imageRawData[i]); } } // verify db MongoCursor <Attachment> cursor = mongodb.GetDatabase("wammer").GetCollection <Attachment>("attachments") .Find(new QueryDocument("_id", object_id1)); Assert.AreEqual <long>(1, cursor.Count()); foreach (Attachment doc in cursor) { Assert.AreEqual(object_id1, doc.object_id); Assert.AreEqual("orig_desc", doc.description); Assert.AreEqual("orig_title", doc.title); Assert.AreEqual(AttachmentType.image, doc.type); Assert.AreEqual(imageRawData.Length, doc.file_size); Assert.AreEqual("image/png", doc.mime_type); } } }
public void TestCursors() { this.DbClient = new MongoClient(connectionString); this.DbServer = this.DbClient.GetServer(); this.Database = this.DbServer.GetDatabase("test_client"); Collection2 = Database.GetCollection <DBObject>("cursors_sample"); MongoCursor <DBObject> cursor = Collection2.FindAll(); //Console.WriteLine("cursor count = {0}", cursor.Count()); cursor.SetSkip(0); var lst = cursor.Take(10).ToList(); foreach (var l in lst) { Console.Write(l.data + ", "); } Console.WriteLine(); cursor = Collection2.FindAll(); cursor.SetSkip(10); lst = cursor.Take(10).ToList(); foreach (var l in lst) { Console.Write(l.data + ", "); } Console.WriteLine(); cursor = Collection2.FindAll(); cursor.SetSkip(0); lst = cursor.Take(10).ToList(); foreach (var l in lst) { Console.Write(l.data + ", "); } Console.WriteLine(); Console.WriteLine("cursor count = {0}", cursor.Count()); cursor = Collection2.FindAll(); Filter filter = new Filter(); var filtered = cursor.Where(obj => filter.Check(obj.data)).Skip(10).Take(10).ToList(); Console.WriteLine("filtered count: " + filtered.Count); foreach (var o in filtered) { Console.Write(o.data + ", "); } Console.WriteLine(); }
public bool ProcessFailure(string message) { var query = Query.And(Query.EQ("@project-name", mProjectName), Query.EQ("@runtime-version", mRuntimeVersion)); MongoCursor <BsonDocument> cursor = collection.Find(query); BsonDocument project = GetProject(cursor, mProjectName, mRuntimeVersion); BsonDocument failure = new BsonDocument(); if (cursor.Count() != 0) { failure = project["failures"].AsBsonDocument; } BsonArray array = new BsonArray(); try { array = failure["automation"].AsBsonArray; } catch (KeyNotFoundException) { array = new BsonArray(); } array = AddFailureDetails(array, mAutomationName, mSuccess, mFailureType, message); BsonDocument doc = new BsonDocument(); doc.Add("automation", array); if (cursor.Count() == 0) { project.Add("failures", doc); collection.Insert(project); } else { var update = Update.Set("failures", doc); collection.Update(query, update); } return(true); }
private IEnumerable <BsonDocument> ExportWorkerHours() { try { MongoCursor <BsonDocument> cursor = GetUnexportedEntries("timesheetentry"); if (cursor.Count() > 0) { logger.LogInfo("Exporting found unexported timesheet entries", cursor.Count()); } else { logger.LogFineTrace("No unexported timesheet entries found."); return(null); } List <TimesheetEntryFragment> timesheetEntryFragments = IntegrationHelpers.GetTimesheetFragments( cursor, MinTimeFragmentSize, database, logger, failedExports, new HashSet <ObjectId>(), // Temp object since this info isn't really needed. Refactor away during payroll handling updatte. false); logger.LogInfo(cursor.Count() + " timesheet entries produced " + timesheetEntryFragments.Count + " fragments."); ExportWorkTimesheetFragmentsToCsv(timesheetEntryFragments); return(cursor); } catch (Exception ex) { logger.LogError("Exporting hours failed.", ex); throw; } }
/// <summary> /// 对列表进行分页搜索 /// </summary> /// <param name="bsonDocQuery"></param> /// <returns></returns> public List <BsonDocument> Search(int pageIndex, out int pageCount, MongoCursor <BsonDocument> bsonDocQuery) { if (bsonDocQuery == null) { pageCount = 0; return(null); } _totalPages = bsonDocQuery.Count(); pageCount = GetPageCount(_pageSize, _totalPages); if (pageIndex == 0) { pageIndex = 1; } return(bsonDocQuery.Skip(_pageSize * ((int)pageIndex - 1)).Take(_pageSize).ToList()); }
public BsonDocument GetProject(MongoCursor <BsonDocument> cursor, string projectName, string runTimeVersion) { BsonDocument ret = new BsonDocument(); if (cursor.Count() != 0) { ret = cursor.First(); } else { ret.Add("@project-name", projectName); ret.Add("@runtime-version", runTimeVersion); } return(ret); }
public void Initialize(MongoCollection <HtmlRecord> records) { MongoCursor <HtmlRecord> cursor = records.FindAll(); if (cursor.Count() > 0) { foreach (HtmlRecord record in cursor) { Job job = new Job(record.id, JobStatus.None, record.timeStamp); jobSet.Add(record.id, job); jobSchedule.Enqueue(record.timeStamp, record.id); jobStatus.Add(record.id, RecordStatus.Waiting); } } }
static void consoleBsonDocument(MongoCursor <BsonDocument> bsoncursor) { if (bsoncursor.Count() > 0) { foreach (BsonDocument bson in bsoncursor) { Console.WriteLine("CustomerName:" + bson["CustomerName"].AsString); Console.WriteLine("Address:" + bson["Address"].AsString); Console.WriteLine("Tel:" + bson["Tel"].AsString); } } else { Console.WriteLine("未找到所需要数据"); } }
static void consoleBsonDocument(MongoCursor<BsonDocument> bsoncursor) { if (bsoncursor.Count() > 0) { foreach (BsonDocument bson in bsoncursor) { Console.WriteLine("CustomerName:" + bson["CustomerName"].AsString); Console.WriteLine("Address:" + bson["Address"].AsString); Console.WriteLine("Tel:" + bson["Tel"].AsString); } } else { Console.WriteLine("未找到所需要数据"); } }
protected void Submit_Click(object sender, EventArgs e) { const string udcErrorMethod = "Submit_Click"; BsonDocument bsonDoc = new BsonDocument(); clsUserInfo objMongoData; try { string strUserName = txtUserName.Text.Trim(); string strPass = txtUserPass.Text.Trim(); if (!string.IsNullOrWhiteSpace(strUserName) && !string.IsNullOrWhiteSpace(strPass)) { var userId = Query.EQ("_id", strUserName); MongoCursor <BsonDocument> cursor = objMongo.bsonReadDocument(strCollectionName, userId); if (cursor != null && cursor.Count() > 0) { bsonDoc = cursor.FirstOrDefault(); objMongoData = BsonSerializer.Deserialize <clsUserInfo>(bsonDoc.ToJson()); clsLog.blnLogDebug(udcErrorSource, udcErrorMethod, "User Data successfully fetched from mongo database", "", "", "", ""); if (strPass.Equals(objMongoData.Password.Trim())) { //ClientScript.RegisterClientScriptBlock(this.GetType(), "Successfully Saved", "<script language=javascript>alert('Successfully logged in!!!');</script>"); //Response.Redirect("/Web Forms/RoomLayout.aspx"); Session["username"] = objMongoData._id; Server.Transfer("/Web Forms/RoomLayout.aspx"); } else { ClientScript.RegisterClientScriptBlock(this.GetType(), "Invalid Password", "<script language=javascript>alert('Invalid password');</script>"); clsLog.blnLogError(udcErrorSource, udcErrorMethod, "User Password is not correct", ""); } } else { ClientScript.RegisterClientScriptBlock(this.GetType(), "Invalid User ID or Password", "<script language=javascript>alert('Invalid User ID or Password');</script>"); clsLog.blnLogError(udcErrorSource, udcErrorMethod, "UserData not received from Mongo database or | User does not exist", ""); } } } catch (Exception ex) { clsLog.blnLogError(udcErrorSource, udcErrorMethod, "Exception catched in method blnValidateLogin !! ", " | Exception : " + ex.ToString()); } }
public static bool ErpOrderPartnerExists(string orderId, string parNr, MongoDatabase mongoDatabase) { string collName = "erpOrderPartner"; if (!mongoDatabase.CollectionExists(collName)) { return(false); } MongoCollection <BsonDocument> erpEntriesCollection = mongoDatabase.GetCollection(collName); MongoCursor cursor = erpEntriesCollection.Find(Query.And(Query.EQ("orderid", orderId), Query.EQ("parnr", parNr))); if (cursor.Count() != 0) { return(true); } return(false); }
//Fetch all fleets public static FleetResponses FetchAll() { //prepare responses FleetResponses responses = new FleetResponses(); MongoCursor <Fleet> results = context.Fleet.FindAll(); //test for emptiness if (results.Count() == 0) { responses.Status = true; responses.Message = Constants.Empty_List; responses.Data = new List <Fleet>() { }; return(responses); } responses.Status = true; //return data responses.Data = results.ToList(); return(responses); }
public ActionResult <UserResponses> GetAll() { try { UserResponses responses = new UserResponses(); MongoCursor <User> users = context.User.FindAll(); if (users.Count() == 0) { return(NotFound(new { Message = Constants.Empty_List })); } responses.Data = users.ToList(); responses.Status = true; return(Ok(responses)); } catch (Exception ex) { Log.LogError(ex); return(StatusCode(500, ex.ToString())); } }
/// <summary> /// Paginate mongo cursor /// </summary> /// <param name="superset">MongoCursor</param> /// <param name="pageNumber">Page Number</param> /// <param name="pageSize">Page Size</param> /// <exception cref="ArgumentOutOfRangeException"></exception> public MongoPagedList(MongoCursor <T> superset, int pageNumber, int pageSize) { if (pageNumber < 1) { throw new ArgumentOutOfRangeException("pageNumber", pageNumber, "PageNumber cannot be below 1."); } if (pageSize < 1) { throw new ArgumentOutOfRangeException("pageSize", pageSize, "PageSize cannot be less than 1."); } // set source to blank list if superset is null to prevent exceptions TotalItemCount = superset == null ? 0 : unchecked ((int)superset.Count()); PageSize = pageSize; PageNumber = pageNumber; PageCount = TotalItemCount > 0 ? (int)Math.Ceiling(TotalItemCount / (double)PageSize) : 0; HasPreviousPage = PageNumber > 1; HasNextPage = PageNumber < PageCount; IsFirstPage = PageNumber == 1; IsLastPage = PageNumber >= PageCount; FirstItemOnPage = (PageNumber - 1) * PageSize + 1; var numberOfLastItemOnPage = FirstItemOnPage + PageSize - 1; LastItemOnPage = numberOfLastItemOnPage > TotalItemCount ? TotalItemCount : numberOfLastItemOnPage; // add items to internal list if (superset != null && TotalItemCount > 0) { Subset.AddRange(pageNumber == 1 ? superset.Skip(0).Take(pageSize).ToList() : superset.Skip((pageNumber - 1) * pageSize).Take(pageSize).ToList() ); } }
public virtual IEnumerable <ImportedHouse> GetImportHouseList(ImportedHouseListReq parames, ref int totalSize) { MongoServer houseCaijiMongoDbServer = GetMongoClient(connectionName); MongoDatabase houseDb = houseCaijiMongoDbServer.GetDatabase(databaseName); string dbCollection = "ImportHouse"; MongoCollection <BsonDocument> houseCollection = houseDb.GetCollection(dbCollection); IMongoQuery query = new QueryDocument(); IMongoQuery userQuery = Query.And(Query.EQ("UserID", parames.UserId)); if (parames.status >= 0) { userQuery = Query.And(userQuery, Query.EQ("MoveStatus", parames.status)); } query = Query.And(query, Query.EQ("SiteId", parames.SiteId)); query = Query.And(query, Query.ElemMatch("RefUser", userQuery)); query = Query.And(query, Query.EQ("SiteUserName", parames.SiteUserName)); if (parames.CommunityId > 0) { query = Query.And(query, Query.EQ("CommunityID", parames.CommunityId)); } if (parames.BuildingType > 0) { query = Query.And(query, Query.EQ("BuildType", parames.BuildingType)); } if (parames.TradeType > 0) { query = Query.And(query, Query.EQ("TradeType", parames.TradeType)); } IMongoSortBy sort = new SortByDocument(); sort = SortBy.Descending("PostTime"); MongoCursor <ImportedHouse> dataList = houseCollection.FindAs <ImportedHouse>(query).SetSkip((parames.PageIndex - 1) * parames.PageSize).SetSortOrder(sort).SetLimit(parames.PageSize); totalSize = (int)dataList.Count(); return(dataList); }
/// <summary> /// Returns a count of incomplete tasks to give an EA an idea of what remains /// </summary> /// <returns> /// The tasks. /// </returns> /// <param name='task_group'> /// Task_group. /// </param> public int incompleteTasks(int task_group) { MongoCursor tasks = getResults(task_group, false); return((int)tasks.Count()); }
public long Count() { return(_mongoMongoCursor.Count()); }