public void TestInitialize() { JsonConvert.DefaultSettings = () => new JsonSerializerSettings { Formatting = Formatting.None, Converters = new JsonConverter[] { new JsonKnownTypeConverter() } }; string jsonPageList = AssemblyResourceReader.ReadAsString("Test_Data.PageList.json"); this.pageList = JsonConvert.DeserializeObject<PageList>(jsonPageList); }
protected override void Start() { if(Pages.Any()) return; var pl = new PageList(Site); pl.FillAllFromCategoryTree("Kategorie:Schmuck"); Pages.AddRange(pl.ToEnumerable()); pl.FillAllFromCategoryTree("Kategorie:Rücken"); Pages.AddRange(pl.ToEnumerable()); }
public static PageList GetAllPage() { if (s_PageCache == null) { lock (s_SyncObj) { string path = GetConfigPath(); s_PageCache = XmlHelper.LoadFromXmlCache<PageList>(path); return s_PageCache; } } else return s_PageCache; }
/// <summary> /// Creates a new instance of <see cref="PageReplacementAlgorithm"/>. /// </summary> /// <param name="pool">The memory pool that blocks will be allocated from.</param> public PageReplacementAlgorithm(MemoryPool pool) { if (pool.PageSize < 4096) throw new ArgumentOutOfRangeException("PageSize Must be greater than 4096", "pool"); if (!BitMath.IsPowerOfTwo(pool.PageSize)) throw new ArgumentException("PageSize Must be a power of 2", "pool"); m_maxValidPosition = (int.MaxValue - 1) * (long)pool.PageSize; //Max position m_syncRoot = new object(); m_memoryPageSizeMask = pool.PageSize - 1; m_memoryPageSizeShiftBits = BitMath.CountBitsSet((uint)m_memoryPageSizeMask); m_pageList = new PageList(pool); m_arrayIndexLocks = new WeakList<PageLock>(); }
static void Main(string[] args) { foreach (var line in File.ReadLines("config.txt")) { var indexOfSep = line.IndexOf('='); var key = line.Substring(0,indexOfSep).Trim(); var val = line.Substring(indexOfSep + 1).Trim(); if (key == "user") User = val; if (key == "pass") Pass = val; if (key == "apiAuthentication") ApiAuthentication = val; } if (string.IsNullOrEmpty(User) || string.IsNullOrEmpty(Pass) || string.IsNullOrEmpty(ApiAuthentication)) { Console.WriteLine("Incomplete config"); return; } var statusApi = new StatusApi(); statusApi.SetStatus(false); var s = new Site("http://wiki-de.guildwars2.com/wiki/", User, Pass); if (args.Contains("-generatePagelist") || !File.Exists("pagelist.txt")) { Console.WriteLine("Generating pagelist..."); var pagelist = new PageList(s); pagelist.FillFromCategoryTree("Guild Wars 2"); pagelist.SaveTitlesToFile("pagelist.txt"); } try { Run(s); } finally { statusApi.SetStatus(false); } }
public static PageList caidan(int pageIndex, int pageSize) { StorageEntities ent = new StorageEntities(); var obj = from p in ent.Function where p.State == true orderby p.NodeId ascending select new { NodeId = p.NodeId, DisplayName = p.DisplayName, NodeURL = p.NodeURL, DisplayOrder = p.DisplayOrder, ParentNodeId = p.ParentNodeId, ADDTime = p.ADDTime, State = p.State, }; PageList list = new PageList(); list.DataList = obj.Skip((pageIndex - 1) * pageSize).Take(pageSize); list.PageCount = obj.Count(); return(list); }
public async Task <ActionResult> CreatePageList(long cookieToReturn) { PageList sm = new PageList(); string list = await sm.CreateAsync(); #if MVC6 Response.Headers.Remove("Cookie"); Response.Cookies.Append(Basics.CookieDone, cookieToReturn.ToString(), new Microsoft.AspNetCore.Http.CookieOptions { HttpOnly = false, Path = "/" }); #else HttpCookie cookie = new HttpCookie(Basics.CookieDone, cookieToReturn.ToString()); Response.Cookies.Remove(Basics.CookieDone); Response.SetCookie(cookie); #endif string contentType = "application/octet-stream"; byte[] btes = Encoding.UTF8.GetBytes(list); FileContentResult result = new FileContentResult(btes, contentType); result.FileDownloadName = "FileList.txt"; return(result); }
/// <summary> /// 查询开源项目分页 /// </summary> /// <param name="request"></param> /// <returns></returns> public async Task <PageList <QueryProjectResponseDto> > QueryProjectPageAsync(QueryProjectPageRequestDto request, CancellationToken cancellationToken = default) { var sql = $@" select * from project where UserId = {request.UserId} and Status = 1 order by SortId desc, CreateTime desc limit {(request.PageParm.Page - 1) * request.PageParm.Size}, {request.PageParm.Size} "; var countSql = $@" select count(*) from project where UserId = {request.UserId} and Status = 1 "; var result = await _dapperHelper.QueryAsync <QueryProjectResponseDto>(sql, null, CommandFlags.None, cancellationToken); var count = await _dapperHelper.QueryFirstAsync <int>(countSql, null, CommandFlags.None, cancellationToken); var pagerlist = new PageList <QueryProjectResponseDto>(request.PageParm.Page, request.PageParm.Size, count, result); return(pagerlist); }
public async Task <PageList <MemberDto> > GetMembersAsync(UserParams userParams) { var query = _context.Users.AsQueryable(); query = query.Where(u => u.UserName != userParams.CurrentUsername); query = query.Where(u => u.Gender == userParams.Gender); var minDob = DateTime.Today.AddYears(-userParams.MaxAge - 1); var maxDob = DateTime.Today.AddYears(-userParams.MinAge); query = query.Where(u => u.DateOfBirth >= minDob && u.DateOfBirth <= maxDob); query = userParams.OrderBy switch { "created" => query.OrderByDescending(u => u.Created), _ => query.OrderByDescending(u => u.LastActive) }; return(await PageList <MemberDto> .CreateAsync(query.ProjectTo <MemberDto>(_mapper .ConfigurationProvider).AsNoTracking(), userParams.PageNumber, userParams.PageSize)); } }
static void LoadCategories() { // Értékelési kategóriák betöltése PageList cats = new PageList(huwiki); cats.FillAllFromCategory(RootCategory); cats.FilterNamespaces(new int[] { 14 }); // A Kategória:Wikipédia-cikkértékelés alkategóriáinak végigvizsgálása foreach (Page p in cats.pages) { if (Regex.IsMatch(p.title, QualityCatRegex)) // Új cikkértékelési kategória megtalálva { string cat = Regex.Match(p.title, QualityCatRegex).Groups[1].Value; categories.Add(cat, new AssessmentCategoryInfo() { HasImportance = cats.Contains(string.Format(ImportanceCat, cat)), // Van-e fontosság szerinti értékelési kategória NewCat = false, CapitalStart = false }); } } }
public static IQueryable querid(string store, string StName, string StoreName) { PageList list = new PageList(); CangChuEntities1 contxt = new CangChuEntities1(); var obj = from p in contxt.Storehouse select new { StorId = p.StorId, StoreNum = p.StoreNum, StoreName = p.StoreName, SupName = p.Supplier.SupName, StName = p.Storehousetype.StName, IsJin = p.Storehousetype.IsJin, IsMoren = p.IsMoren, CreateTime = p.CreateTime }; if (store != "" || StName != "" || StoreName != "") { obj = obj.Where(p => p.SupName == store || p.StName == StName || p.StoreName == StoreName); } return(obj); }
/// <summary> /// 出库/出货记录 cxb 2015-3-3 /// </summary> /// <returns></returns> public ActionResult List() { PageNavigate pn = new PageNavigate(); int limit = 10; PageList <Td_Sale_Out_Head_Query> list = new PageList <Td_Sale_Out_Head_Query>(limit); try { Hashtable param = base.GetParameters(); ParamVessel p = new ParamVessel(); p.Add("keyword", String.Empty, HandleType.Remove, true); //搜素关键字 p.Add("pageIndex", 1, HandleType.DefaultValue); //当前页码 param = param.Trim(p); int pageIndex = Convert.ToInt32(param["pageIndex"]); ViewData["keyword"] = GetParameter("keyword"); ViewData["orderby"] = param["orderby"]; ViewData["pageIndex"] = pageIndex; ViewData["limit"] = limit; param.Add("limit", limit); param.Add("start", (pageIndex - 1) * limit); param.Add("id_cgs", GetLoginInfo <long>("id_buyer")); pn = BusinessFactory.ShippingRecord.GetPage(param); list = new PageList <Td_Sale_Out_Head_Query>(pn, pageIndex, limit); } catch (CySoftException ex) { throw ex; } catch (Exception ex) { throw ex; } return(View(list)); }
public async Task <PageList <MemberDto> > GetMembersAsync(UserParams userParams) { /// When using ProjectTo mapper, no need to use .Include(x =>) var query = _context.Users.AsQueryable(); //// use AsQueryable functions to filter the model. query = query.Where(x => x.UserName != userParams.CurrentUsername); query = query.Where(x => x.Gender == userParams.Gender); var minDob = DateTime.Today.AddYears(-userParams.MaxAge - 1); var maxDob = DateTime.Today.AddYears(-userParams.MinAge); query = query.Where(x => x.DateOfBirth >= minDob && x.DateOfBirth <= maxDob); query = userParams.OrderBy switch { "created" => query.OrderByDescending(x => x.Created), _ => query.OrderByDescending(x => x.LastActive) }; return(await PageList <MemberDto> .CreateAsync(query.ProjectTo <MemberDto>(_mapper.ConfigurationProvider) .AsNoTracking(), userParams.PageNumber, userParams.pageSize)); }
public DrawText(PageList _parent, int x, int y, string textToDraw, Font textFont, Color textColor) : base(_parent) { _shapeoutline = new ShapeOutline(this); _shapefill = new ShapeFill(this); Resizeable = true; _rectangle.X = x; _rectangle.Y = y; _theText = textToDraw; _font = textFont; TextColor = textColor; this.ShapeType = STATIC_OBJ_TYPE.ID_TEXT; shapefill.FillColor = Common.LastFillColor; shapeoutline.LineStyle = Common.LastLineStyle; Propertylist.Add("Text,Text,STRING"); Propertylist.Add("TextColor,Text Color,Color"); Propertylist.Add("TextBlinking,Text Blinking,BOOL"); Propertylist.Add("BorderWidth,Border Width,DINT"); Propertylist.Add("BorderColor,Border Color,Color"); Propertylist.Add("BorderBlinking,Border Blinking,BOOL"); Propertylist.Add("Color1,Fill Color,Color"); Propertylist.Add("Visible,Visible,BOOL"); Initialize(); }
public PageList <Post> GetPosts(PostQueryFilters filters) { filters.PageNumber = filters.PageNumber == 0 ? paginationOptions.DefaultPageNumber : filters.PageNumber; filters.PageSize = filters.PageSize == 0 ? paginationOptions.DefaultPageSize : filters.PageSize; var posts = unitOfWork.PostRepository.GetAll(); if (filters.UserId != null) { posts = posts.Where(x => x.UserId == filters.UserId); } if (filters.Date != null) { posts = posts.Where(x => x.Date.ToShortDateString() == filters.Date?.ToShortDateString()); } if (filters.Description != null) { posts = posts.Where(x => x.Description.ToLower().Contains(filters.Description.ToLower())); } var pagedPost = PageList <Post> .Create(posts, filters.PageNumber, filters.PageSize); return(pagedPost); }
/// <summary> /// Gets an IEnumerable{SelectListItem} of project statuses, as a default /// SelectList doesn't add option value attributes. /// </summary> public IEnumerable <Activity> WhatsHotList() { List <Activity> WhatsHotList = new List <Activity>(); DateTime filter = DateTime.Now.AddDays(-28); IEnumerable <Page> PageList; PageList = AllPages(); PageList = PageList.Where(x => x.ModifiedOn.Date > filter).ToList(); PageList = PageList.OrderByDescending(x => x.ModifiedOn).ToList(); foreach (Page page in PageList) { IEnumerable <Relationship> RelList; RelList = GetRelByPage(page.Id); int RelCount = RelList.Count(); Activity activity = new Activity(); activity.id = RelCount; activity.projectName = page.Title; activity.projectId = page.Id; activity.orgName = GetOrgByID(page.orgID).OrgName; WhatsHotList.Add(activity); } WhatsHotList = WhatsHotList.OrderByDescending(x => x.id).ToList(); WhatsHotList = WhatsHotList.Take(3).ToList(); return(WhatsHotList); }
public List <Sys_FormRightResult> GetRightTree(Sys_FormRightParam param) { this.CheckSession(); PageList <Sys_FormRightResult> ret = new PageList <Sys_FormRightResult>(); List <Field> lstField = new List <Field>() { Sys_FormRight._.NameSpace }; try { WhereClip whereClip = GetWhereClip(param); ret = this.SelectList <Sys_FormRightResult>(param.PageIndex.GetValueOrDefault(1), int.MaxValue, lstField, whereClip, Sys_FormRightResult._.NameSpace.Asc, Sys_FormRight._.NameSpace.Group, null); } catch (WarnException exp) { throw exp; } catch (System.Exception exp) { LogInfoBLL.WriteLog(this.SessionInfo, exp); } return(ret.ResultList); }
public ActionResult OrderSeat_old(int pageIndex = 1, string msg = "") { ViewBag.msg = msg; //班车日期 20点后订次日 DateTime carDate = DateTime.Now.Hour < 20 ? DateTime.Today : DateTime.Today.AddDays(1); //该日已订座记录 ViewBag.hisOrderSeatRec = Container.Instance.Resolve <IServiceOrderSeatRec>().Qry(new List <ICriterion>() { Expression.Eq("CarDate", carDate) }); ViewBag.carDate = carDate; //待订列表 int count = 0;//记录数 IList <ICriterion> listQuery = new List <ICriterion>() { Expression.Eq(GetFieldNameByDay(carDate), 1) }; IList <Order> listOrder = new List <Order>() { new Order("Car.ID", true), new Order("ID", true) }; IList <CarLine> list = Container.Instance.Resolve <IServiceCarLine>().Qry(listQuery, listOrder, pageIndex, PagerHelper.PageSize, out count); //转换为PageList集合,用于分页控件显示不同的页码 PageList <CarLine> pageList = list.ToPageList <CarLine>(pageIndex, PagerHelper.PageSize, count); //用pageList集合填充页面 if (AppHelper.IsMobileBrowser) { return(View("OrderSeatMobile", pageList)); } else { return(View("OrderSeat", pageList)); } }
/// <summary> /// 绑定数据源 /// </summary> /// <param name="dgv"></param> /// <param name="tr"></param> /// <param name="page"></param> private void BindDataGridView(int pageIndex) { panelShowRight.Hide(); if (adtMenu.SelectedNode == null) { return; } List <Sys_MenuResult> lstRst = new List <Sys_MenuResult>(); Sys_EmpRightParam param = new Sys_EmpRightParam(); param.EmpID = _empID; param.PageIndex = pageIndex; param.PageSize = 50; param.MenuID = adtMenu.SelectedNode.Name.Substring(3).ToInt32(); param.ModuleName = txtModuleName.Text.Trim(); param.IsNoAction = cboIsNoSet.Checked; PageList <Sys_EmpRightResult> pgList = rightLogic.GetPageList(param); if (pgList.ResultJoinList != null && pgList.ResultJoinList.Rows.Count > 0) { foreach (DataRow row in pgList.ResultJoinList.Rows) { string actionCode = row["ActionCode"].ToStringHasNull(); if (string.IsNullOrEmpty(actionCode)) { continue; } row["RightName"] = GetRightName(actionCode); } dgvModule.DataSource = pgList.ResultJoinList; pgEmpRight.RecordCount = pgList.TotalCount; pgEmpRight.Refresh(); } }
public static PageList Listfenye(int pageindex, int pagesize) { var obj = from p in s.Supplier where p.State == true orderby p.GysID select new { GysID = p.GysID, GysType = p.GysType, GysName = p.GysName, Hone = p.Hone, ChuangZhen = p.ChuangZhen, Email = p.Email, Contacts = p.Contacts, Address = p.Address, Describe = p.Describe, State = p.State }; PageList list = new PageList(); list.DataList = obj.Skip((pageindex - 1) * pagesize).Take(pagesize); list.PageCount = obj.Count(); return(list); }
public async Task <PageList <Company> > GetCompaniesAsync(CompanyDtoParameters paras) { if (paras == null) { throw new ArgumentNullException(nameof(paras)); } //if (string.IsNullOrWhiteSpace(paras.CompanyName) && string.IsNullOrWhiteSpace(paras.SearchTerm)) // return await context.Companies.ToListAsync(); var queryExpresstion = context.Companies as IQueryable <Company>; if (!string.IsNullOrWhiteSpace(paras.CompanyName)) { paras.CompanyName = paras.CompanyName.Trim(); queryExpresstion = queryExpresstion.Where(x => x.Name == paras.CompanyName); } if (!string.IsNullOrWhiteSpace(paras.SearchTerm)) { paras.SearchTerm = paras.SearchTerm.Trim(); queryExpresstion = queryExpresstion.Where(x => x.Name.Contains(paras.SearchTerm) || x.Introduction.Contains(paras.SearchTerm)); } //return await queryExpresstion.Skip(paras.PageSize * (paras.PageNumber - 1)).Take(paras.PageSize).ToListAsync(); return(await PageList <Company> .CreateAsync(queryExpresstion, paras.PageNumber, paras.PageSize)); }
public static PageList querid(int pageIndex, int pagesize, string SupName, string StName, string StoreNum) { PageList list = new PageList(); CangChuEntities1 contxt = new CangChuEntities1(); var obj = from p in contxt.Storehouse orderby p.StoreNum where p.Supplier.SupName == SupName || p.Storehousetype.StName == StName || p.StoreNum == StoreNum select new { StorId = p.StorId, StoreNum = p.StoreNum, StoreName = p.StoreName, SupName = p.Supplier.SupName, StName = p.Storehousetype.StName, IsJin = p.Storehousetype.IsJin, IsMoren = p.IsMoren, CreateTime = p.CreateTime }; list.Datalist = obj.Skip((pageIndex - 1) * pagesize).Take(pagesize); int row = contxt.Storehouse.Count(); list.PageCount = row % pagesize == 0 ? row / pagesize : row / pagesize + 1; return(list); }
public override dynamic GetValue(ReturnContext context, object data) { PageList <DomainModel.Category> tempList = data as PageList <DomainModel.Category>; int totalRows = tempList.TotalRows; IList <DomainModel.Category> list = tempList.Collections; List <CategoryModel> result = new List <CategoryModel>(); foreach (var item in list) { var temp = new CategoryModel() { CategoryName = item.CategoryName, Id = item.Id, Sort = item.Sort, FoodCount = item.FoodCount }; result.Add(temp); } return(this.ResultValue(data: new PageList <CategoryModel> { Collections = result, TotalRows = totalRows })); }
//产品入库排行 public static PageList Put(int pageIndex, int PageSize) { CKSJKEntities c = new CKSJKEntities(); //实例化分页类 PageList list = new PageList(); var obj = from p in c.putWareDetail orderby p.receiptNum select new { receiptNum = p.receiptNum, productNum = p.productNum, productCount = p.productCount, totalMoney = p.totalMoney, LocationNum = p.LocationNum }; //设置分页数据 list.Datalist = obj.Skip((pageIndex - 1) * PageSize).Take(PageSize); int rows = c.putWareDetail.Count(); //设置总页数 list.PageCount = rows % PageSize == 0 ? rows / PageSize : rows / PageSize + 1; return(list); }
public async Task <PageList <Professor> > GetAllProfessoresAsync(PageParams pageParams, bool includeAlunos = false) { IQueryable <Professor> query = _context.Professores; if (includeAlunos) { query = query.Include(p => p.Disciplinas) .ThenInclude(d => d.AlunosDisciplinas) .ThenInclude(ad => ad.Aluno); } query = query.AsNoTracking().OrderBy(p => p.Id); if (!string.IsNullOrEmpty(pageParams.Nome)) { query = query.Where(professor => professor.Nome .ToUpper() .Contains(pageParams.Nome.ToUpper()) || professor.Sobrenome .ToUpper() .Contains(pageParams.Nome.ToUpper())); } if (pageParams.Registro > 0) { query = query.Where(Professor => Professor.Registro == pageParams.Registro); } if (pageParams.Ativo != null) { query = query.Where(Professor => Professor.Ativo == (pageParams.Ativo != 0)); } return(await PageList <Professor> .CreateAsync(query, pageParams.PageNumber, pageParams.PageSize)); }
// CONSULTA: SELECT´S COM JOIN´S. // ======================================================================================= public async Task <PageList <Aluno> > GetAllAlunosAsync(PageParams pageParams, bool includeProfessor = false) { IQueryable <Aluno> query = _context.Alunos; if (includeProfessor) { query = query.Include(a => a.AlunosDisciplinas) .ThenInclude(ad => ad.Disciplina) .ThenInclude(d => d.Professor); } query = query.AsNoTracking().OrderBy(c => c.Id); if (!string.IsNullOrEmpty(pageParams.Nome)) { query = query.Where(aluno => aluno.Nome .ToUpper() .Contains(pageParams.Nome.ToUpper()) || aluno.Sobrenome .ToUpper() .Contains(pageParams.Nome.ToUpper())); } if (pageParams.Matricula > 0) { query = query.Where(Aluno => Aluno.Matricula == pageParams.Matricula); } if (pageParams.Ativo != null) { query = query.Where(Aluno => Aluno.Ativo == (pageParams.Ativo != 0)); } return(await PageList <Aluno> .CreateAsync(query, pageParams.PageNumber, pageParams.PageSize)); }
public async Task <PageList <Message> > GetMessageForUser(MessageParams messageParams) { var messages = _context.Messages.Include(u => u.Sender) .ThenInclude(p => p.Photos) .Include(u => u.Recipient) .ThenInclude(p => p.Photos) .AsQueryable(); switch (messageParams.MessageContainer) { case "Inbox": messages = messages.GroupBy(x => x.SenderId).Select(f => f.OrderByDescending(y => y.MessageSent).First()).Where(u => u.RecipientId == messageParams.UserId && u.RecipientDeleted == false); break; case "Outbox": messages = messages.Where(u => u.SenderId == messageParams.UserId == u.SenderDeleted == false); break; default: // messages = messages.GroupBy(u => u.SenderId).Select(y => y.First()).Where( // u => u.RecipientId == messageParams.UserId //&& u.RecipientDeleted == false && u.isRead == false //); messages = messages.GroupBy(x => x.SenderId).Select(f => f.OrderByDescending(y => y.MessageSent).First()).Where( u => u.RecipientId == messageParams.UserId && u.RecipientDeleted == false && u.isRead == false); break; } messages = messages.OrderByDescending(d => d.MessageSent); return(await PageList <Message> .CreateAsync(messages, messageParams.PageNumber, messageParams.PageSize)); }
/// <summary> /// 获取工人案例 /// </summary> public void GetWebBuidingSingleList() { int pageIndex = string.IsNullOrEmpty(context.Request["pageIndex"]) ? 1 : Convert.ToInt32(context.Request["pageIndex"]); int PageSize = Convert.ToInt32(context.Request["PageSize"]); string SortOrder = context.Request["SortOrder"]; string WorkerID = context.Request["WorkerID"]; string SortParameters = ""; int pageCount = 0; int recordcount = 0; string SelectParameters = string.Format("[ID],[WebBuidingStageID],[Title],[TimeStageInfo],[TimeStageContent],[AddTime]" + ",[sortID],[endtime],[FlagDelete],[DeleteOn],[TimeStageThumContent],[WorkerID],[DemandID],[IsUserEnd]" + ",[IsWorkerEnd],[UserID],[Guid],[Price],[Space],[BuidingSingleImage],[thumbnailImage],[EditOn],AddOn"); SortParameters = string.Format(" FlagDelete=0 {0} ", WorkerID == "0" || string.IsNullOrEmpty(WorkerID) ? "" : "and WorkerID=" + WorkerID); Pagination paginfo = new Pagination(); paginfo.SelectParameters = SelectParameters; paginfo.PageIndex = pageIndex; paginfo.PageSize = PageSize; paginfo.EntityName = "WebBuidingSingle"; paginfo.SortParameters = SortParameters; paginfo.SortOrder = SortOrder + " AddTime desc,ID desc"; var result = PageList.GetPageListBySQL <WebBuidingSingle>(paginfo, out recordcount, out pageCount); var obj = new { list = result, recordcount = recordcount, pageCount = pageCount }; string msg = JsonConvert.SerializeObject(obj); context.Response.Write(msg); }
/// <summary> /// 列表 /// znt 2015-04-24 /// </summary> public ActionResult List() { PageNavigate pn = new PageNavigate(); int limit = 10; PageList <Tb_Gys_Account> list = new PageList <Tb_Gys_Account>(limit); try { Hashtable param = base.GetParameters(); ParamVessel p = new ParamVessel(); p.Add("id_gys", GetLoginInfo <long>("id_supplier"), HandleType.DefaultValue); p.Add("pageIndex", 1, HandleType.DefaultValue); param = param.Trim(p); int pageIndex = Convert.ToInt32(param["pageIndex"]); ViewData["pageIndex"] = pageIndex; ViewData["limit"] = limit; param.Add("sort", "flag_default"); param.Add("dir", "desc"); param.Add("limit", limit); param.Add("start", (pageIndex - 1) * limit); pn = BusinessFactory.BankAccount.GetPage(param); list = new PageList <Tb_Gys_Account>(pn, pageIndex, limit); } catch (CySoftException ex) { throw ex; } catch (Exception ex) { throw ex; } return(View(list)); }
public async Task <IActionResult> Index(string sortOrder, string currentFilter, string searchString, int?pageNumber) { ViewData["CurrentSort"] = sortOrder; ViewData["NameSortParm"] = String.IsNullOrEmpty(sortOrder) ? "name_desc" : ""; ViewData["CurrentFilter"] = searchString; if (searchString != null) { pageNumber = 1; } else { searchString = currentFilter; } var _employees = from s in _context.employees.Include(p => p.Position) select s; if (!String.IsNullOrEmpty(searchString)) { searchString = searchString.ToLower(); _employees = _employees.Where(s => s.Fullname.ToLower().Contains(searchString)); } switch (sortOrder) { case "name_desc": _employees = _employees.OrderByDescending(s => s.Fullname); break; default: _employees = _employees.OrderBy(s => s.Fullname); break; } return(View(await PageList <Employee> .CreateAsync(_employees.AsNoTracking(), pageNumber ?? 1))); }
public PageList <CRM_CompanyResult> GetCustPageList(CRM_CompanyParam param) { this.CheckSession(); PageList <CRM_CompanyResult> ret = new PageList <CRM_CompanyResult>(); try { List <Field> field = new List <Field>() { Field.All }; ret = this.SelectList <CRM_CompanyResult>(param.PageIndex.GetValueOrDefault(1), param.PageSize.GetValueOrDefault(50), field, GetCustWhereClip(param), CRM_Company._.CompanyCode.Asc); } catch (WarnException exp) { throw exp; } catch (System.Exception exp) { LogInfoBLL.WriteLog(this.SessionInfo, exp); throw exp; } return(ret); }
public async Task <PageList <Message> > GetMessagesForUse(MessageParams messageParams) { var message = _context.Messages.Include(u => u.Sender) .ThenInclude(p => p.Photos).Include(u => u.Recipient) .ThenInclude(p => p.Photos).AsQueryable(); switch (messageParams.MessageContainer) { case "Inbox": message = message.Where(x => x.RecipientId == messageParams.UserId && x.RecipientDeleted == false); break; case "Outbox": message = message.Where(x => x.SenderId == messageParams.UserId && x.SenderDeleted == false); break; default: message = message.Where(x => x.RecipientId == messageParams.UserId && x.IsRead == false && x.RecipientDeleted == false && x.SenderDeleted == false); break; } message = message.OrderByDescending(d => d.MessageSent); return(await PageList <Message> .CreateAsync(message, messageParams.PageNumber, messageParams.PageSize)); }
//模糊查询 public static IQueryable SysRoleQuery(int pageIndex, int pageSize, string RoleName) { StorageEntities entity = new StorageEntities(); var obj = from p in entity.SysRole where p.RoleName.IndexOf(RoleName) != -1 && p.IsDelete == true orderby p.SysRoleID select new { SysRoleID = p.SysRoleID, IsDelete = p.IsDelete, Remark = p.Remark, RoleName = p.RoleName, RoleNum = p.RoleNum, CreateTime = p.CreateTime, }; PageList list = new PageList(); list.DataList = obj.Skip((pageIndex - 1) * pageSize).Take(pageSize); list.PageCount = obj.Count(); return(list.DataList); }
public ActionResult RoleList() { Hashtable param = base.GetParameters(); int limit = 10; param.Add("_id_masteruser", id_user_master); ParamVessel p = new ParamVessel(); p.Add("_search_", "0", HandleType.DefaultValue); p.Add("_id_masteruser", String.Empty, HandleType.ReturnMsg); p.Add("s_role", "", HandleType.Remove, true); p.Add("page", 0, HandleType.DefaultValue); p.Add("pageSize", limit, HandleType.DefaultValue); param = param.Trim(p); param.Add("flag_master", 1); int.TryParse(param["pageSize"].ToString(), out limit); PageNavigate pn = new PageNavigate(); int pageIndex = Convert.ToInt32(param["page"]); param.Add("limit", limit); param.Add("start", pageIndex * limit); pn = BusinessFactory.RoleSetting.GetPage(param); var plist = new PageList <Tb_Role_Query>(pn, pageIndex, limit); plist.PageIndex = pageIndex; plist.PageSize = limit; ViewData["List"] = plist; if (param["_search_"].ToString().Equals("1")) { return(PartialView("_RoleList")); } else { return(View()); } }
/// <summary> /// 分页集合 /// </summary> /// <param name="param"></param> /// <returns></returns> public PageList <Sys_AttachFilesResult> GetPageList(Sys_AttachFilesParam param) { this.CheckSession(); PageList <Sys_AttachFilesResult> ret = new PageList <Sys_AttachFilesResult>(); try { List <Field> field = new List <Field>() { Field.All }; ret = this.SelectList <Sys_AttachFilesResult>(param.PageIndex.GetValueOrDefault(1), param.PageSize.GetValueOrDefault(50), field, GetWhereClip(param), Sys_AttachFiles._.CreatedTime.Desc); } catch (WarnException exp) { throw exp; } catch (System.Exception exp) { LogInfoBLL.WriteLog(this.SessionInfo, exp); throw exp; } return(ret); }
private void GetOtherPage(string otherurl, string PageDoc, string pattern, ref string[] r, int total, int n) { Match m = Utility.GetMatchUrl(PageDoc, pattern, "[其他页面]"); if (m.Success) { string obturl = Utility.StickUrl(otherurl, m.Groups["TARGET"].Value); if (!obturl.Trim().Equals(otherurl.Trim())) { PageList pglst = new PageList(obturl, _Encode); ArrayList arraylist = GetListUrl(pglst); if (arraylist != null && arraylist.Count > 0) { int len = arraylist.Count; int j = len + n; if (j < total) arraylist.CopyTo(0, r, n, len); else { arraylist.CopyTo(0, r, n, total - n); return; } n = j; } if (n < total) { GetOtherPage(obturl, pglst._Doc, pattern, ref r, total, n); } } } }
private static void ShufflePageList(Random r, PageList list) { for (int i = list.Count() - 1; i >= 1; i--) { int j = r.Next(0, i + 1); Page temp = list[j]; list[j] = list[i]; list[i] = temp; } }
/// <summary>Undoes the last edit, so page text reverts to previous contents. /// The function doesn't affect other operations like renaming.</summary> /// <param name="comment">Revert comment.</param> /// <param name="isMinorEdit">Minor edit mark (pass true for minor edit).</param> public void Revert(string comment, bool isMinorEdit) { if (string.IsNullOrEmpty(title)) throw new WikiBotException(Bot.Msg("No title specified for page to revert.")); PageList pl = new PageList(site); if (Bot.useBotQuery == true && site.botQuery == true && site.botQueryVersions.ContainsKey("ApiQueryRevisions.php")) pl.FillFromPageHistoryEx(title, 2, false); else pl.FillFromPageHistory(title, 2); if (pl.Count() != 2) { Console.Error.WriteLine(Bot.Msg("Can't revert page \"{0}\"."), title); return; } pl[1].Load(); Save(pl[1].text, comment, isMinorEdit); Bot.LogEvent(Bot.Msg("Page \"{0}\" was reverted."), title); }
/// <summary>Gets all MediaWiki messages from "Special:Allmessages" page and loads them into /// site.messages PageList. The function is not backward compatible.</summary> public void GetMediaWikiMessages() { if (messages == null) messages = new PageList(this); Console.WriteLine(Bot.Msg("Updating MediaWiki messages dump. Please, wait...")); string res = site + indexPath + "index.php?title=Special:Allmessages"; string src = ""; Page p = null; Regex nextPortionRE = new Regex("offset=([^\"]+)\" title=\"[^\"]+\" rel=\"next\""); do { src = GetPageHTM(res + (src != "" ? "&offset=" + HttpUtility.HtmlDecode(nextPortionRE.Match(src).Groups[1].Value) : "&limit=5000")); using (XmlReader reader = GetXMLReader(src)) { reader.ReadToFollowing("tbody"); while (reader.Read()) { if (reader.Name == "tr" && reader.NodeType == XmlNodeType.Element && reader["id"] != null) p = new Page(this, namespaces["8"].ToString() + ":" + Bot.Capitalize(reader["id"].Replace("msg_", ""))); else if (reader.Name == "td" && (reader["class"] == "am_default" || reader["class"] == "am_actual")) p.text = reader.ReadString(); else if (reader.Name == "tr" && reader.NodeType == XmlNodeType.EndElement) messages.Add(p); else if (reader.Name == "tbody" && reader.NodeType == XmlNodeType.EndElement) break; } } } while (nextPortionRE.IsMatch(src)); if (p != null) messages.Add(p); Console.WriteLine(Bot.Msg("MediaWiki messages dump updated successfully.")); }
/// <summary>Gets all MediaWiki messages and dumps it to XML file.</summary> /// <param name="forceLoad">If true, the messages are forced to be updated.</param> public void GetMediaWikiMessages(bool forceLoad) { if (forceLoad == false) messages = new PageList(this); string filePathName = "Cache/" + HttpUtility.UrlEncode(site.Replace('/','_').Replace(':','_'))+ ".xml"; if (forceLoad == false && File.Exists(filePathName) && (DateTime.Now - File.GetLastWriteTime(filePathName)).Days <= 90) { messages.FillAndLoadFromXMLDump(filePathName); return; } Console.WriteLine("Updating MediaWiki messages dump. Please, wait..."); PageList pl = new PageList(this); pl.FillFromAllPages("!", 8, false, 10000); File.Delete(filePathName); pl.SaveXMLDumpToFile(filePathName); Console.WriteLine("MediaWiki messages dump updated successfully."); messages.FillAndLoadFromXMLDump(filePathName); }
public static int Main() { // Se registra en la página y configura el bot Site welp = iniciar(); leerOpciones(Environment.GetCommandLineArgs()); if (welp == null) return 1; // Cuenta del número de ediciones int cuenta = 0; // Obtiene todos los trabajos (de momento en el espacio de nombre Principal) PageList todas = new PageList(welp); todas.FillFromAllPages("Trabajo:", 0, false, Int32.MaxValue, "Trabajo;"); foreach (Page pag in todas) { pag.Load(); // Si ya hay indicación de curso no hace nada List<string> cats = pag.GetCategories(); if (cats.Exists(patronCCurso.IsMatch)) continue; // Para averiguar el curso obtiene la fecha de la // primera edición PageList hist = new PageList(welp); hist.FillFromPageHistory(pag.title, Int32.MaxValue); DateTime fc = hist[hist.Count()-1].timestamp; // Distingue en base a ella el curso int año = fc.Year; // Si es antes del 29 de septiembre (aprox) es que es // del curso que empieza en el año anterior if (fc.Month < 9 || fc.Month == 9 && fc.Day < 29) año--; string curso = "Curso " + año + "-" + (año + 1); // Muestra información por consola Console.Error.WriteLine("«" + pag.title + "» creado en " + fc + " => " + curso); cuenta++; if (!simulado) { pag.AddToCategory(curso); pag.Save("bot: categorización de trabajos por curso", true); } } // Resumen de las operaciones Console.Error.WriteLine("El bot " + (simulado ? "hubiera realizado " : "realizó ") + cuenta + " ediciones."); return 0; }
private static bool UploadOriginalVersion(out string failureReason, Page page, string mediaUrl, string wikiImageFilename, string picasaImageFilename, bool fetchThumbnailVersion, bool allowWikiBigger) { // if (!wikiImageFilename.ToLower().EndsWith(".jpg") && !wikiImageFilename.ToLower().EndsWith(".jpeg") && // !wikiImageFilename.ToLower().EndsWith(".png")) // { // failureReason = "Cannot compare non-bitmap files to original source - requires manual validation"; // return false; // } failureReason = null; Bitmap wikiBitmap = new Bitmap(wikiImageFilename); // First have the Picasa server resize to the desired size - this will // ensure exactly the same resizing algorithm is used. string thumbnailUrl = new Regex("^(.*)/([^/]*)$").Replace(mediaUrl, "${1}/s" + wikiBitmap.Width + "/${2}"); string filename = "temp_picasa_image"; if (!fetchThumbnailVersion || !WgetToFile(thumbnailUrl, filename)) { filename = picasaImageFilename; } Bitmap picasaBitmap = new Bitmap(filename); if (wikiBitmap.Width < picasaBitmap.Width || wikiBitmap.Height < picasaBitmap.Height) { // Couldn't get version of same size from server - stretch to fit Bitmap newPicasaBitmap = new Bitmap(picasaBitmap, wikiBitmap.Width, wikiBitmap.Height); picasaBitmap.Dispose(); picasaBitmap = newPicasaBitmap; } bool wikiBitmapIsBigger = false; if (wikiBitmap.Width >= picasaBitmap.Width || wikiBitmap.Height >= picasaBitmap.Height) { if (allowWikiBigger) { wikiBitmapIsBigger = true; Bitmap newWikiBitmap = new Bitmap(wikiBitmap, picasaBitmap.Width, picasaBitmap.Height); wikiBitmap.Dispose(); wikiBitmap = newWikiBitmap; } else { // Wiki version is bigger, something odd going on, skip it wikiBitmap.Dispose(); picasaBitmap.Dispose(); failureReason = "license matches and is valid - but Commons version is of a different size than the Picasa version, may have been edited locally"; return false; } } double avgDiff = ImagesMeanSquareDifference(wikiBitmap, picasaBitmap); wikiBitmap.Dispose(); picasaBitmap.Dispose(); if (avgDiff >= 0.032 && avgDiff < 0.10) { failureReason = "license matches and is valid - but Picasa and Commons image were not a close enough match"; return false; } else if (avgDiff < 0.032) { // Got an approximate match, need to upload the full-size version // (unless we've done so before and were reverted) PageList pageHistory = new PageList(page.site); pageHistory.FillFromPageHistory(page.title, int.MaxValue); bool alreadyUploaded = false; foreach (Page revision in pageHistory) { if (revision.lastUser == username && revision.comment.Contains("uploaded a new version")) { alreadyUploaded = true; } } if (!alreadyUploaded && !wikiBitmapIsBigger) { string saveText = page.text; page.UploadImage(picasaImageFilename, "Uploading version from source, revert me if incorrect", "", "", ""); // Set back to current wikitext page.Load(); page.text = saveText; } return true; } else { failureReason = "license matches and is valid - but Picasa and Commons image do not match"; return false; } }
public override void Load() { try { ch = core.getChannel(RequestLabs.RequestCh); if (ch == null) { Log("CRITICAL: the bot isn't in " + RequestLabs.RequestCh + " unloading requests", true); return; } RequestCache.Load(); notifications = new Thread(Run); notifications.Start(); Site wikitech = new Site("https://wikitech.wikimedia.org", "wmib", ""); while (true) { try { List<string> shell = new List<string>(); List<string> tooldata = new List<string>(); PageList requests = new PageList(wikitech); requests.FillAllFromCategory("Shell Access Requests"); foreach (Page page in requests) { string title = page.title.Replace("Shell Request/", ""); if (RequestCache.Contains(title)) { continue; } page.Load(); if (!Matches(page.text)) { RequestCache.Insert(title); lock (Shell) { // this one was already processed RequestLabs previous = Contains(title); if (previous != null) { Shell.Remove(previous); } } continue; } else { if (!shell.Contains(title)) { shell.Add(title); } lock (Shell) { if (Contains(title) == null) { Shell.Add(new RequestLabs(title)); } } } } requests = new PageList(wikitech); requests.FillAllFromCategory("Tools_Access_Requests"); foreach (Page page in requests) { string title = page.title.Replace("Nova Resource:Tools/Access Request/", ""); if (RequestCache.ContainsLabs(title)) { continue; } page.Load(); if (!(Matches(page.text))) { RequestCache.InsertLabs(title); lock (Tools) { // this one was already processed RequestLabs previous = ContainsLabs(title); if (previous != null) { Tools.Remove(previous); } } continue; } else { if (!tooldata.Contains(title)) { tooldata.Add(title); } lock (Tools) { if (ContainsLabs(title) == null) { Tools.Add(new RequestLabs(title)); } } } } Thread.Sleep(60000); } catch (ThreadAbortException) { notifications.Abort(); return; } catch (Exception fail) { handleException(fail); } } } catch (Exception fail) { handleException(fail); notifications.Abort(); } }
/// <summary>Finds all internal wikilinks in page text, excluding interwiki /// links, links to sister projects, categories, embedded images and links in /// image descriptions.</summary> /// <returns>Returns the PageList object, in which page titles are the wikilinks, /// found in text.</returns> public PageList GetLinks() { MatchCollection matches = Site.wikiLinkRE.Matches(text); StringCollection exclLinks = new StringCollection(); exclLinks.AddRange(GetInterWikiLinks()); exclLinks.AddRange(GetSisterWikiLinks(true)); string str; int fragmentPosition; PageList pl = new PageList(site); for(int i = 0; i < matches.Count; i++) { str = matches[i].Groups[1].Value; if (str.StartsWith(site.namespaces["6"] + ":", true, site.langCulture) || str.StartsWith(Site.wikiNSpaces["6"] + ":", true, site.langCulture) || str.StartsWith(site.namespaces["14"] + ":", true, site.langCulture) || str.StartsWith(Site.wikiNSpaces["14"] + ":", true, site.langCulture)) continue; str = str.TrimStart(':'); if (exclLinks.Contains(str)) continue; fragmentPosition = str.IndexOf("#"); if (fragmentPosition != -1) str = str.Substring(0, fragmentPosition); pl.Add(new Page(site, str)); } return pl; }
public NavigationCentereXtended() { myNavMap = new NavMap(); myPageList = new PageList(); myNavList = new NavList(); }
public Document() { this.Pages = new PageList(); this.measurementSystem = IVisio.VisMeasurementSystem.visMSDefault; }
/// <summary> /// ����һ���յķ�ҳ����� /// </summary> /// <returns></returns> public static PageList GetEmpty() { PageList p = new PageList(); p.Results = new ArrayList(); p.Current = 1; p.RecordCount = 0; return p; }
public void SingleSectionValid() { ApplicationData applicationData = new ApplicationData { { "text_field_one", string.Empty }, { "text_field_three", "even more data" }, { "text_field_two", string.Empty } }; Application application = new Application(null, FORM_ID, null, null, applicationData); PageList pagesToValidate = new PageList { this.product.FormDefinition.Pages.First() }; ValidationResults results = this.ApplicationManager.ValidateApplication(this.sessionData, application, pagesToValidate, this.product.FormDefinition.Pages.AllControls, 2); Assert.IsTrue(results.IsValid); }
/// <summary>Gets a specified MediaWiki message.</summary> /// <param name="title">Title of the message.</param> /// <returns>Returns raw text of the message. /// If the message doesn't exist, exception is thrown.</returns> public string GetMediaWikiMessage(string title) { if (string.IsNullOrEmpty(title)) throw new ArgumentNullException("title"); title = namespaces["8"].ToString() + ":" + Bot.Capitalize(RemoveNSPrefix(title, 8)); if (messages == null) messages = new PageList(this); else if (messages.Contains(title)) return messages[title].text; string src; try { src = GetPageHTM(site + indexPath + "index.php?title=" + HttpUtility.UrlEncode(title.Replace(" ", "_")) + "&action=raw&usemsgcache=1&dontcountme=s"); } catch (WebException e) { if (e.Message.Contains(": (404) ")) throw new WikiBotException( string.Format(Bot.Msg("MediaWiki message \"{0}\" was not found."), title)); else throw; } if (string.IsNullOrEmpty(src)) { throw new WikiBotException( string.Format(Bot.Msg("MediaWiki message \"{0}\" was not found."), title)); } messages.Add(new Page(this, title)); messages[messages.Count()-1].text = src; return src; }
/// <summary>Gets subcategories titles for this PageList from specified wiki category page, /// excluding other pages. Use FillFromCategory function to get other pages.</summary> /// <param name="categoryName">Category name, with or without prefix.</param> public void FillSubsFromCategory(string categoryName) { int count = pages.Count; PageList pl = new PageList(site); pl.FillAllFromCategory(categoryName); pl.FilterNamespaces(new int[] {14}); pages.AddRange(pl.pages); if (pages.Count != count) Bot.LogEvent(Bot.Msg("PageList filled with {0} subcategory page titles, " + "found in \"{1}\" category."), (pages.Count - count).ToString(), categoryName); else Console.Error.WriteLine( Bot.Msg("Nothing was found in \"{0}\" category."), categoryName); }
/// <summary>Gets all modified MediaWiki messages (to be more precise, all messages that are /// contained in database), loads them into site.messages PageList (both titles and texts) /// and dumps them to XML file.</summary> /// <param name="forceLoad">If true, the messages are updated unconditionally. Otherwise /// the messages are updated only if they are outdated.</param> public void GetModifiedMediaWikiMessages(bool forceLoad) { if (messages == null) messages = new PageList(this); string filePathName = "Cache" + Path.DirectorySeparatorChar + HttpUtility.UrlEncode(site.Replace("://", ".")) + ".mw_db_msg.xml"; if (forceLoad == false && File.Exists(filePathName) && (DateTime.Now - File.GetLastWriteTime(filePathName)).Days <= 90) { messages.FillAndLoadFromXMLDump(filePathName); return; } Console.WriteLine(Bot.Msg("Updating MediaWiki messages dump. Please, wait...")); PageList pl = new PageList(this); bool prevBotQueryState = botQuery; botQuery = false; // backward compatibility requirement pl.FillFromAllPages("!", 8, false, 100000); botQuery = prevBotQueryState; File.Delete(filePathName); pl.SaveXMLDumpToFile(filePathName); messages.FillAndLoadFromXMLDump(filePathName); Console.WriteLine(Bot.Msg("MediaWiki messages dump updated successfully.")); }
/// <summary>Gets all MediaWiki messages from "Special:Allmessages" page and dumps /// them to HTM file.</summary> /// <param name="forceLoad">If true, the messages are updated unconditionally. Otherwise /// the messages are updated only if they are outdated.</param> public void GetMediaWikiMessagesEx(bool forceLoad) { if (messages == null) messages = new PageList(this); string filePathName = "Cache" + Path.DirectorySeparatorChar + HttpUtility.UrlEncode(site.Replace("://", ".")) + ".messages.htm"; if (forceLoad == true || !File.Exists(filePathName) || (DateTime.Now - File.GetLastWriteTime(filePathName)).Days > 90) { Bot.LogEvent(Bot.Msg("Updating MediaWiki messages dump. Please, wait...")); string res = site + indexPath + "index.php?title=Special:Allmessages"; File.WriteAllText(filePathName, GetPageHTM(res), Encoding.UTF8); Bot.LogEvent(Bot.Msg("MediaWiki messages dump updated successfully.")); } XmlDocument doc = new XmlDocument(); doc.XmlResolver = null; string text = File.ReadAllText(filePathName, Encoding.UTF8).Replace(" ", " "); ; doc.LoadXml(text); XmlNodeList nl1 = doc.DocumentElement.SelectNodes( ".//ns:table[@id = 'allmessagestable']/ns:tr[@class = 'def' or @class = 'orig']/" + "ns:td/ns:a/ns:span", xmlNS); XmlNodeList nl2 = doc.DocumentElement.SelectNodes( ".//ns:table[@id = 'allmessagestable']/ns:tr[@class = 'def' or @class = 'new']/" + "ns:td[last()]", xmlNS); if (nl1.Count == 0 || nl1.Count != nl2.Count) throw new WikiBotException(Bot.Msg("MediaWiki messages parsing failed.")); for (int i = 0; i < nl1.Count; i++) { messages.Add(new Page(this, HttpUtility.HtmlDecode(nl1[i].InnerXml))); messages[messages.Count()-1].text = HttpUtility.HtmlDecode(nl2[i].InnerXml.Trim()); } }
/// <summary>Undoes all last edits of last page contributor, so page text reverts to /// previous contents. The function doesn't affect other operations /// like renaming or protecting.</summary> /// <param name="comment">Revert comment.</param> /// <param name="isMinorEdit">Minor edit mark (pass true for minor edit).</param> public void UndoLastEdits(string comment, bool isMinorEdit) { if (string.IsNullOrEmpty(title)) throw new WikiBotException(Bot.Msg("No title specified for page to revert.")); PageList pl = new PageList(site); string lastEditor = ""; for (int i = 50; i <= 5000; i *= 10) { if (Bot.useBotQuery == true && site.botQuery == true && site.botQueryVersions.ContainsKey("ApiQueryRevisions.php")) pl.FillFromPageHistoryEx(title, i, false); else pl.FillFromPageHistory(title, i); lastEditor = pl[0].lastUser; foreach (Page p in pl) if (p.lastUser != lastEditor) { p.Load(); Save(p.text, comment, isMinorEdit); Bot.LogEvent( Bot.Msg("Last edits of page \"{0}\" by user {1} were undone."), title, lastEditor); return; } pl.Clear(); } Console.Error.WriteLine(Bot.Msg("Can't undo last edits of page \"{0}\" by user {1}."), title, lastEditor); }
void CompleteLogin() { if (errorMessage != null) { MessageBox.Show(errorMessage); loadingDialog.Hide(); Login(); return; } numLoadAheadPages = loginForm.MaxLoadAheadPages; numCategoryPagesToRetrieve = loginForm.MaxCategoryPages; randomizePageOrder = loginForm.RandomizeOrder; loadPageListOnDemand = !randomizePageOrder; pageList = new PageList(site); pageListSource = loginForm.PageListSource; pageListSourceValue = loginForm.PageListSourceValue; if (loadPageListOnDemand) { // Actually just loading some of the list of pages, more will be loaded on-demand later loadingDialog.Message = "Loading list of pages..."; } else { loadingDialog.Message = "Loading list of pages (up to " + numCategoryPagesToRetrieve + ")..."; } Thread fillFromCategoryThread = new Thread(new ThreadStart(delegate() { switch (pageListSource) { case PageListSource.category: if (loadPageListOnDemand) { pageList.FillSomeFromCategoryEx(pageListSourceValue, ref pageListNext); } else { pageList.FillAllFromCategoryEx(pageListSourceValue, numCategoryPagesToRetrieve); } Invoke(new MethodInvoker(CompletePageListLoad)); break; case PageListSource.file: pageList.FillFromFile(pageListSourceValue); Invoke(new MethodInvoker(CompletePageListLoad)); break; default: throw new ArgumentOutOfRangeException(); } //pageList.FillFromFile("pages.txt"); // just for debugging to load up some pages })); fillFromCategoryThread.Start(); }
/// <summary>Gets all MediaWiki messages and dumps them to XML file. This function is /// obsolete, it won't work with current versions of MediaWiki software.</summary> /// <param name="forceLoad">If true, the messages are updated unconditionally. Otherwise /// the messages are updated only if they are outdated.</param> public void GetMediaWikiMessages(bool forceLoad) { if (messages == null) messages = new PageList(this); string filePathName = "Cache" + Path.DirectorySeparatorChar + HttpUtility.UrlEncode(site.Replace("://", ".")) + ".xml"; if (forceLoad == false && File.Exists(filePathName) && (DateTime.Now - File.GetLastWriteTime(filePathName)).Days <= 90) { messages.FillAndLoadFromXMLDump(filePathName); return; } Bot.LogEvent(Bot.Msg("Updating MediaWiki messages dump. Please, wait...")); PageList pl = new PageList(this); pl.FillFromAllPages("!", 8, false, 100000); File.Delete(filePathName); pl.SaveXMLDumpToFile(filePathName); messages.FillAndLoadFromXMLDump(filePathName); Bot.LogEvent(Bot.Msg("MediaWiki messages dump updated successfully.")); }
/// <summary> /// Returns a navigational structures for a book. This is based on the ncx.xml file. /// </summary> /// <param name="bookId"></param> /// <param name="toc"></param> /// <param name="pageList"></param> /// <returns></returns> public void GetNavStructures(string bookId, out TableOfContents toc, out PageList pageList) { toc = null; pageList = null; // Get the dtbook xml IDirectory bookDirectory = GetBookFolder(bookId); IList<IFile> ncxFile = bookDirectory.GetFiles(GetPackage(bookId).NcxXmlPath); if (ncxFile.Count == 1) { // Set the Xml on the TOC. All other properties are derived from this. XDocument tocXml = XDocument.Load(ncxFile[0].Open(FileMode.Open, FileAccess.Read, FileShare.Read)); toc = new TableOfContents(tocXml); pageList = new PageList(tocXml); } // It's OK not to have a TOC. Let the null get returned if no ncxFile found. }
/// <summary>Finds all internal wikilinks in page text, excluding interwiki /// links, links to sister projects, categories, embedded images and links in /// image descriptions.</summary> /// <returns>Returns the PageList object, where page titles are the wikilinks, /// found in text.</returns> public PageList GetLinks() { MatchCollection matches = site.wikiLinkRE.Matches(text); StringCollection exclLinks = new StringCollection(); exclLinks.AddRange(GetInterWikiLinks()); exclLinks.AddRange(GetSisterWikiLinks(true)); StringCollection inclLinks = new StringCollection(); string str; for(int i = 0; i < matches.Count; i++) if (exclLinks.Contains(matches[i].Groups[1].Value) == false && exclLinks.Contains(matches[i].Groups[1].Value.TrimStart(':')) == false) { str = matches[i].Groups[1].Value; if (str.IndexOf("#") != -1) str = str.Substring(0, str.IndexOf("#")); inclLinks.Add(str); } PageList pl = new PageList(site, inclLinks); pl.RemoveNamespaces(new int[] {6,14}); foreach (Page p in pl.pages) p.title = p.title.TrimStart(':'); return pl; }
void LoadPages() { //site = new Site("http://localhost/index.php/", "ZenithBot", "Bot"); site = new Site("http://en.wikipedia.org/", "HNCluster", "csce470"); Invoke(CheckSiteLoaded); pageList = new PageList(site); //pageList.FillFromFile(@"ComputerScienceWikipediaPagesList"); pageList.FillAndLoadFromXMLDump(@"Wikipedia-ComputerScience.xml"); wikiCollection.LoadFromPageList(pageList); Invoke(IncrementPagesLoadedByVal, wikiCollection.wikiPages.Count); pagesLoaded += wikiCollection.wikiPages.Count; if (InvokeRequired) { Invoke(UpdateText); } else { UpdateTextMethod(); } Invoke(CheckTitlesLoaded); wikiCollection.ExtractTokens(); Invoke(CheckTokenized); if (InvokeRequired) { Invoke(UpdateText); } else { UpdateTextMethod(); } /* int num = 100; for (int i = 0; i < pageList.pages.Count; ++i) { if (i + num < pageList.pages.Count) { Parallel.For(i, i + num, n => { pageList.pages[n].Load(); }); i += num - 1; pagesLoaded += num; Invoke(IncrementPagesLoadedByVal, num); } else { Parallel.For(i, pageList.pages.Count, n => { pageList.pages[n].Load(); }); i += num - 1; pagesLoaded += pageList.pages.Count - i; Invoke(IncrementPagesLoadedByVal, pageList.pages.Count - i); } }*/ /* foreach (Page page in pageList) { page.Load(); if (numericUpDown1.Value == 0) { Invoke(AddPageText, new object[] { pageList[0] }); } pagesLoaded += 1; Invoke(IncrementPagesLoaded); //Application.DoEvents(); }*/ }
/// <summary> /// ���ݲ�ѯ���������ط�ҳ���ݼ��� /// </summary> /// <param name="t"></param> /// <param name="condition">��ѯ����</param> /// <param name="pageSize">ÿҳ����</param> /// <returns>��ҳ�����б��������ǰҳ���ܼ�¼������ҳ����</returns> public static IPageList findPage( Type t, String condition, int pageSize ) { ObjectInfo state = new ObjectInfo( t ); state.includeAll(); if (pageSize > 0) state.Pager.setSize( pageSize ); IList list = ObjectDB.FindPage( state, condition ); IPageList result = new PageList(); result.Results = list; result.PageCount = state.Pager.PageCount; result.RecordCount = state.Pager.RecordCount; result.Size = pageSize>0 ? pageSize: state.Pager.getSize(); result.PageBar = state.Pager.PageBar; result.Current = state.Pager.getCurrent(); return result; }
static void Main(string[] args) { username = args[0]; password = args[1]; Bot.unsafeHttpHeaderParsingUsed = 0; Site site = new Site(wikiSiteUrl, username, password); while (true) { int totalPagesProcessed = 0; try { PageList pageList = new PageList(site); #if true if (catScanning != null) pageList.FillFromCategory(catScanning); else pageList.FillFromSearchResults(searchScanning, int.MaxValue); #endif string failureReason = null; foreach (Page page in pageList) { if (resumeAfter != null) { if (page.title == resumeAfter) resumeAfter = null; continue; } totalPagesProcessed++; if (!page.title.StartsWith("File:")) { continue; } while (true) { try { page.Load(); if (tagCompletedRegex.Match(page.text).Success) { break; } if (!tagRegex.Match(page.text).Success && !page.text.ToLower().Contains("{{picasareviewunnecessary}}") && !page.text.ToLower().Contains("user:picasa review bot/reviewed-error")) { Regex licenseReviewRegex = new Regex("{{LicenseReview\\|[^|]*\\|([^|]*)\\|([^}]*)}}"); Match m; if ((m = licenseReviewRegex.Match(page.text)).Success) { page.text = licenseReviewRegex.Replace(page.text, "{{picasareview|" + m.Groups[1].ToString() + "|" + m.Groups[2].ToString() + "}}"); SavePage(page, "Converting old LicenseReview tag into picasareview tag"); break; } else { page.text += "\n{{picasareview}}\n"; } } bool success = false; do { File.Delete("temp_wiki_image"); File.Delete("temp_picasa_image"); File.Delete("temp_picasa_image_full"); string licenseName, mediaUrl; bool reviewUnnecessary = false; if (CheckIfReviewUnnecessary(page)) { // Keep running so we can upload the original version, break out later // (unless it has an OTRS tag, in which case we shouldn't raise the resolution, // or is Flickr reviewed, in which case only a lower-resolution version may // be released on Flickr) reviewUnnecessary = true; success = true; if (HasOtrsTag(page) || IsFlickrReviewed(page)) continue; } if (!page.title.ToLower().EndsWith(".jpg") && !page.title.ToLower().EndsWith(".png") && !page.title.ToLower().EndsWith(".tiff")) { failureReason = "can only review image files, not other file types"; continue; } if (!FetchPicasaImageInfo(page, out licenseName, out mediaUrl)) { failureReason = "could not retrieve image information from Picasa"; continue; } if (!reviewUnnecessary && !CheckLicense(page, licenseName)) { failureReason = "image license on Picasa is invalid"; continue; } string licenseChangedFrom = null, licenseChangedTo = null; if (!reviewUnnecessary && !UpdateLicense(page, licenseName, out licenseChangedFrom, out licenseChangedTo)) { failureReason = "could not recognize license on Commons page"; continue; } string mediaUrlFull = new Regex("^(.*)/([^/]*)$").Replace(mediaUrl, "${1}/d/${2}"); if (!WgetToFile(mediaUrlFull, "temp_picasa_image_full")) { failureReason = "license matches and is valid - but could not retrieve full size image from Picasa"; continue; } page.DownloadImage("temp_wiki_image"); if (!FilesAreIdentical("temp_picasa_image_full", "temp_wiki_image")) { // Upload original full res version if (!UploadOriginalVersion(out failureReason, page, mediaUrl, "temp_wiki_image", "temp_picasa_image_full", /*fetchThumbnailVersion*/true, /*allowWikiBigger*/false)) { continue; } } if (!reviewUnnecessary) { // It matches! Good to go. UpdateReviewTagSuccess(page, licenseChangedFrom, licenseChangedTo); success = true; } } while (false); if (!success) { UpdateReviewTagFailure(page, failureReason); } } catch (Exception e) { Console.WriteLine("Encountered exception: " + e.Message); Console.WriteLine("Retrying..."); continue; } break; } } } catch (Exception e) { Console.WriteLine("Encountered exception: " + e.Message); } Console.WriteLine("Total pages processed: " + totalPagesProcessed); Console.WriteLine("Sleeping for 2 minutes..."); Thread.Sleep(new TimeSpan(0, 2, 0)); } }