//Export pages present in a Wiki Category public void ExportPages(string expCat, string expDir, WikiMedia.ExportNotify expNotify) { PageList pl = wiki.GetPages(expCat); string[] pages = new string[pl.Count()]; for (int i = 0; i < pl.Count(); i++) { Page page = pl[i]; pages[i] = GetTitle(page); } ExportPages(pages, expDir, expNotify); }
public void DeletePage(int Page) { for (int i = 0; i < DesktopItemList.Count; i++) { if (DesktopItemList.ElementAt(i).Page == Page) { DesktopItemList.RemoveAt(i); i--; } } PageList.Remove(Page); Dictionary <int, string> tempList = new Dictionary <int, string>(); for (int i = 0; i < PageList.Count(); i++) { if (PageList.ElementAt(i).Key > Page) { tempList.Add(PageList.ElementAt(i).Key - 1, PageList.ElementAt(i).Value); PageList.Remove(PageList.ElementAt(i).Key); i--; } } foreach (var item in tempList) { PageList.Add(item.Key, item.Value); } PageList = PageList.OrderBy(o => o.Key).ToDictionary(o => o.Key, p => p.Value); Save(); }
void BLoadCategoryClick(object sender, System.EventArgs e) { string category = iCategory.Text; if (!String.IsNullOrEmpty(category)) { PageList pages = wiki.GetPages(category); if (pages.Count() > 0) { string[] lines = new string[pages.Count()]; for (int i = 0; i < pages.Count(); i++) { lines[i] = pages[i].title; } iPage.Lines = lines; } } }
public void DeleteSubApp(DesktopItem Item) { int page = Item.Page; DesktopItemList.Remove(Item); if (File.Exists(Item.Config)) { File.Delete(Item.Config); } bool isExist = false; foreach (DesktopItem i in DesktopItemList) { if (i.Page == page) { isExist = true; break; } } if (!isExist) { foreach (DesktopItem i in DesktopItemList) { if (i.Page > page) { i.Page--; } } PageList.Remove(page); Dictionary <int, string> tempList = new Dictionary <int, string>(); for (int i = 0; i < PageList.Count(); i++) { if (PageList.ElementAt(i).Key > page) { tempList.Add(PageList.ElementAt(i).Key - 1, PageList.ElementAt(i).Value); PageList.Remove(PageList.ElementAt(i).Key); i--; } } foreach (var item in tempList) { PageList.Add(item.Key, item.Value); } } Save(); }
static string LastRev(string title) { // Lekérjük a lap legutolsó változatának azonosítóját Console.Write(title + " legutolsó változata azonosítójának lekérése ... "); PageList lastRev = new PageList(huwiki); lastRev.FillFromPageHistory(title, 1); if (lastRev.Count() > 0) { Console.WriteLine(" sikerült (" + lastRev[0].revision + ")"); return(lastRev[0].revision); } Console.WriteLine(" sikertelen: a szócikk nem létezik"); return(""); }
public static void FillAllFromCategoryTreeExceptDone(string categoryName, Site site, PageList pl, List <string> doneCats) { pl.Clear(); categoryName = site.CorrectNsPrefix(categoryName); //List<string> doneCats = new List<string>(); Console.WriteLine("doneCats " + doneCats.Count.ToString()); pl.FillAllFromCategory(categoryName); doneCats.Add(categoryName); for (int i = 0; i < pl.Count(); i++) { if (pl.pages[i].GetNamespace() == 14 && !doneCats.Contains(pl.pages[i].title)) { //Console.WriteLine(pl.pages[i].title); pl.FillAllFromCategory(pl.pages[i].title); doneCats.Add(pl.pages[i].title); } } pl.RemoveRecurring(); }
public static void Main() { Console.Write("Password: "******"Lsj"; string makelang = "ceb"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Deleting bot test run"; site.minorEditByDefault = true; string reason = "Deleting bot test series"; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Phasmatodea"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillFromCategory("Pagklaro paghimo ni bot"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Boidae"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\" och Amp; \"",4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("Location map+"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("</i>"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(10); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: //if (p.IsRedirect()) //{ // p.title = p.RedirectsTo(); // if (!tryload(p, 2)) // continue; // if (!p.Exists()) // continue; //} //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual deletion: p.Delete(reason); nedit++; if (nedit < 10) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(10); iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }
public static void Update() { Networking.TicketID = 1; GameManager.Instance.AssetVersion = GameManager.Instance.SessionID = null; var CheckVersion = Networking.RequestAPI(new ReqCheckVersion(GameManager.Instance.Version, "android")); GameManager.Instance.AssetVersion = CheckVersion["body"]["assets"].ToString(); //var AssetList = File.ReadAllBytes("ASSETLIST"); var AssetList = DownloadFile("ASSETLIST", "Text"); //File.WriteAllBytes("ASSETLIST", AssetList); List<AssetList.Item> ItemList = new List<AssetList.Item>(); ReadAssetListProc(AssetList, ref ItemList); var Data_MasterParam_Id = ItemList.Find(i => i.Path.Contains("Data/MasterParam")).IDStr; //var Data_MasterParam_Compressed = File.ReadAllBytes("Data_MasterParam"); var Data_MasterParam_Compressed = DownloadFile(Data_MasterParam_Id, "Text"); //File.WriteAllBytes("Data_MasterParam", Data_MasterParam_Compressed); var Data_MasterParam_Json = Encoding.UTF8.GetString(Decompress(Data_MasterParam_Compressed)); Data_MasterParam = (JObject)JsonConvert.DeserializeObject(Data_MasterParam_Json); var Loc_english_LocalizedMasterParam_Id = ItemList.Find(i => i.Path.Contains("Loc/english/LocalizedMasterParam")).IDStr; //var Loc_english_LocalizedMasterParam_Compressed = File.ReadAllBytes("LocalizedMasterParam"); var Loc_english_LocalizedMasterParam_Compressed = DownloadFile(Loc_english_LocalizedMasterParam_Id, "Text"); //File.WriteAllBytes("LocalizedMasterParam", Loc_english_LocalizedMasterParam_Compressed); var Loc_english_LocalizedMasterParam_String = Encoding.UTF8.GetString(Decompress(Loc_english_LocalizedMasterParam_Compressed)); File.WriteAllText("translation.txt", Loc_english_LocalizedMasterParam_String); var Loc_english_LocalizedMasterParam_Lines = Loc_english_LocalizedMasterParam_String.Replace("\r", "").Split(new char[1] { '\n' }, StringSplitOptions.RemoveEmptyEntries); var Loc_english_LocalizedMasterParam_KVP = Loc_english_LocalizedMasterParam_Lines.Select(l => l.Split(new char[1] { '\t' })); Loc_english_LocalizedMasterParam = Loc_english_LocalizedMasterParam_KVP.ToDictionary(a => a[0].Substring(a[0].IndexOf("Param") + 6), b => b[1]); var Loc_english_unit_Id = ItemList.Find(i => i.Path.Contains("Loc/english/unit")).IDStr; //var Loc_english_unit_Compressed = File.ReadAllBytes("unit"); var Loc_english_unit_Compressed = DownloadFile(Loc_english_unit_Id, "Text"); //File.WriteAllBytes("unit", Loc_english_unit_Compressed); var Loc_english_unit_String = Encoding.UTF8.GetString(Decompress(Loc_english_unit_Compressed).Skip(3).ToArray()); var Loc_english_unit_Lines = Loc_english_unit_String.Replace("\r", "").Split(new char[1] { '\n' }, StringSplitOptions.RemoveEmptyEntries); var Loc_english_unit_KVP = Loc_english_unit_Lines.Select(l => l.Split(new char[1] { '\t' })); Loc_english_unit_KVP.ToList().ForEach(u => Loc_english_LocalizedMasterParam.Add(u[0], u[1])); //Loc_english_LocalizedMasterParam.add = Loc_english_LocalizedMasterParam_KVP.ToDictionary(a => a[0], b => b[1]); foreach (var item in ItemList) File.WriteAllText(@"text\" + item.Path.Replace("/", "") + "_" + item.IDStr, Encoding.UTF8.GetString(Decompress(DownloadFile(item.IDStr, "Text")))); // DotNetWikiBot -- http://dotnetwikibot.sourceforge.net/ var site = new Site("https://thealchemistcode.gamepedia.com", "wikiusername", "wikipassword"); site.defaultEditComment = "unitlist element + "; site.minorEditByDefault = true; string content = ""; var pageList = new PageList(site); if (false) { var units = Data_MasterParam["Unit"]; var unitList = units.ToList().FindAll(u => Loc_english_LocalizedMasterParam.ContainsKey(u["iname"] + "_NAME") && u["ai"]?.ToString() == "AI_PLAYER" && u["iname"].ToString().Split(new char[] { '_' }).Count() == 3); pageList = new PageList(site, new List<string> { "Units" }); pageList.LoadWithMetadata(); content = Wiki.Units.CreateUnitListPage(unitList); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } foreach (var unit in unitList) { pageList = new PageList(site, new List<string> { Loc_english_LocalizedMasterParam[unit["iname"] + "_NAME"] }); pageList.LoadWithMetadata(); if (pageList.Count() == 0) { var page = new Page(Loc_english_LocalizedMasterParam[unit["iname"] + "_NAME"]); pageList.Add(page); } content = Wiki.Units.CreateUnitPage(unit); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } } } if (false) { var jobs = Data_MasterParam["Job"]; var jobList = jobs.ToList().FindAll(u => Loc_english_LocalizedMasterParam.ContainsKey(u["ranks"][0]["eqid1"] + "_NAME")); pageList = new PageList(site, new List<string> { "Jobs" }); pageList.LoadWithMetadata(); content = Wiki.Jobs.CreateJobListPage(jobList); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } foreach (var job in jobList) { pageList = new PageList(site, new List<string> { job["iname"].ToString() == job["origin"]?.ToString() ? Loc_english_LocalizedMasterParam[job["iname"] + "_NAME"] : (Loc_english_LocalizedMasterParam[job["iname"] + "_NAME"].Replace("[", "").Replace("]", "") + " (" + job["iname"].ToString().Substring(3) + ")") }); pageList.LoadWithMetadata(); if (pageList.Count() == 0) { var page = new Page(job["iname"].ToString() == job["origin"]?.ToString() ? Loc_english_LocalizedMasterParam[job["iname"] + "_NAME"] : (Loc_english_LocalizedMasterParam[job["iname"] + "_NAME"].Replace("[", "").Replace("]", "") + " (" + job["iname"].ToString().Substring(3) + ")")); pageList.Add(page); } content = Wiki.Jobs.CreateJobPage(job); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } } } if (false) { var equipment = Data_MasterParam["Item"]; var equipmentList = equipment.ToList().FindAll(u => u["type"].ToString() == "3" && Loc_english_LocalizedMasterParam.ContainsKey(u["iname"] + "_NAME")); pageList = new PageList(site, new List<string> { "Equipment" }); pageList.LoadWithMetadata(); content = Equipments.CreateEquipmentListPage(equipmentList); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } foreach (var equip in equipmentList) { pageList = new PageList(site, new List<string> { Loc_english_LocalizedMasterParam[equip["iname"] + "_NAME"] }); pageList.LoadWithMetadata(); if (pageList.Count() == 0) { var page = new Page(Loc_english_LocalizedMasterParam[equip["iname"] + "_NAME"]); pageList.Add(page); } content = Wiki.Equipments.CreateEquipmentPage(equip); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } } } if (true) { var skills = Data_MasterParam["Skill"]; var skillList = skills.ToList().FindAll(u => /*u["type"].ToString() == "3" &&*/ Loc_english_LocalizedMasterParam.ContainsKey(u["iname"] + "_NAME")); pageList = new PageList(site, new List<string> { "Skills" }); pageList.LoadWithMetadata(); if (pageList.Count() == 0) { var page = new Page(site, "Skills"); pageList.Add(page); } content = Skills.CreateSkillListPage(skillList); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } foreach (var skill in skillList) { pageList = new PageList(site, new List<string> { Loc_english_LocalizedMasterParam[skill["iname"] + "_NAME"] }); pageList.LoadWithMetadata(); if (pageList.Count() == 0) { var page = new Page(Loc_english_LocalizedMasterParam[skill["iname"] + "_NAME"]); pageList.Add(page); } content = Skills.CreateSkillPage(skill); if (pageList[0].text != content) { pageList[0].text = content; pageList[0].Save(); } } } }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); PageList pl = new PageList(site); PageList pl1 = new PageList(site); site.defaultEditComment = "Flyttar sidor"; site.minorEditByDefault = true; //Select how to get pages. Uncomment as needed. //Find articles from a category //pl.FillAllFromCategory("Robotskapade artiklar 2014-10"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillFromCategory("Samtliga artiklar föreslagna för sammanslagningar och delningar"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Boidae"); //Set specific article: //Page pp = new Page(site, "Användare:Lsjbot/Flytt-test");pl.Add(pp); List <string> sl = new List <string>(); //sl.Add("Lista över insjöar i Arjeplogs kommun (1-1000)"); sl.Add("Ðakovića Kosa"); sl.Add("Ðanuša"); sl.Add("Ðaprovica"); sl.Add("Ðatla"); sl.Add("Ðatlo"); sl.Add("Ðatlo (bergstopp)"); sl.Add("Ðatlo (grotta i Bosnien och Hercegovina, Republika Srpska, lat 43,06, long 18,49)"); sl.Add("Ðavat"); sl.Add("Ðavato"); sl.Add("Ðavolica"); sl.Add("Ðavor-Konak"); sl.Add("Ðed"); sl.Add("Ðed (berg i Bosnien och Hercegovina, Republika Srpska, lat 43,11, long 18,41)"); sl.Add("Ðed (kulle)"); sl.Add("Ðed (ås)"); sl.Add("Ðedov Do"); sl.Add("Ðedov Do (dal i Bosnien och Hercegovina)"); sl.Add("Ðedovac"); sl.Add("Ðedovac (kulle)"); sl.Add("Ðedovac (källa)"); sl.Add("Ðedovića Voda"); sl.Add("Ðedovo Brdo"); sl.Add("Ðekića Brdo"); sl.Add("Ðekića Vis"); sl.Add("Ðenića Brdo"); sl.Add("Ðera"); sl.Add("Ðerina Voda"); sl.Add("Ðerinac"); sl.Add("Ðerinac (bergstopp)"); sl.Add("Ðermašica"); sl.Add("Ðernovača"); sl.Add("Ðeropa"); sl.Add("Ðerzelovica"); sl.Add("Ðeva"); sl.Add("Ðevice"); sl.Add("Ðevigrad"); sl.Add("Ðidovi"); sl.Add("Ðilas"); sl.Add("Ðipovac"); sl.Add("Ðipuša"); sl.Add("Ðogat"); sl.Add("Ðon"); sl.Add("Ðorđe Stratimirović"); sl.Add("Ðorđo Lavrnić"); sl.Add("Ðubino Brdo"); sl.Add("Ðukanov Vis"); sl.Add("Ðukanov Vrh"); sl.Add("Ðukina Voda"); sl.Add("Ðukino Brdo"); sl.Add("Ðukića Brdo"); sl.Add("Ðukića Brdo (bergstopp)"); sl.Add("Ðukića Glavica"); sl.Add("Ðulanova Rijeka"); sl.Add("Ðulanovo Brdo"); sl.Add("Ðuletske Kose"); sl.Add("Ðulina Kosa"); sl.Add("Ðulina Rupa"); sl.Add("Ðulinac"); sl.Add("Ðupska Čuka"); sl.Add("Ðupska Čuka (kulle i Makedonien)"); sl.Add("Ðuranovac"); sl.Add("Ðuranovina"); sl.Add("Ðuranđa"); sl.Add("Ðuraš"); sl.Add("Ðuraš (kulle)"); sl.Add("Ðurendića Vis"); sl.Add("Ðurevac"); sl.Add("Ðurevac (vattendrag i Bosnien och Hercegovina)"); sl.Add("Ðurica"); sl.Add("Ðurin Sjek"); sl.Add("Ðurina Voda"); sl.Add("Ðurinovača"); sl.Add("Ðurinovača (bergstopp)"); sl.Add("Ðurinovača (utlöpare)"); sl.Add("Ðurića Brdo"); sl.Add("Ðurića Brdo (bergstopp)"); sl.Add("Ðurića Brdo (kulle)"); sl.Add("Ðurića Kuk"); sl.Add("Ðurića Vis"); sl.Add("Ðurića Vrelo"); sl.Add("Ðuričin Do"); sl.Add("Ðurkelina Jama"); sl.Add("Ðurkovac"); sl.Add("Ðurkovac (berg i Bosnien och Hercegovina)"); sl.Add("Ðuroje"); sl.Add("Ðurov"); sl.Add("Ðurov Ras"); sl.Add("Ðurđev Do"); sl.Add("Ðurđeva Glava"); sl.Add("Ðurđeva Glavica"); sl.Add("Ðurđeva Glavica (berg i Bosnien och Hercegovina)"); sl.Add("Ðurđevac"); sl.Add("Ðurđevac (utlöpare)"); sl.Add("Ðurđevica"); sl.Add("Ðurđevica (berg)"); sl.Add("Ðurđeviča Vis"); sl.Add("Ðurđevo Brdo"); sl.Add("Ðurđevo Brdo (berg i Bosnien och Hercegovina, Republika Srpska, lat 43,58, long 19,20)"); sl.Add("Ðurđevo Brdo (berg i Bosnien och Hercegovina, Republika Srpska, lat 44,04, long 19,58)"); sl.Add("Ðurđevo Brdo (kulle i Bosnien och Hercegovina, Republika Srpska, lat 42,76, long 18,27)"); sl.Add("Ðurđevo Brdo (kulle i Bosnien och Hercegovina, Republika Srpska, lat 43,49, long 18,82)"); sl.Add("Ðurđovac"); sl.Add("Ðusin Vrh"); sl.Add("Ðuvića Vrh"); sl.Add("Ðvogrla Jama"); foreach (string s in sl) { Page pp = new Page(site, s); pl.Add(pp); } //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("-", "–"); replacedict.Add("Ð", "Đ"); //från isländskt Đ till bosniskt Đ //Require ALL in requireword list: List <string> requireword = new List <string>(); //requireword.Add("obotskapad"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); //Vetowords should NOT be in article List <string> vetoword = new List <string>(); vetoword.Add("Island"); vetoword.Add("isländska"); vetoword.Add("OMDIRIGERING"); vetoword.Add("Đ"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(10); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); bool first = true; foreach (Page p in pl) { //Skip start of alphabet: string skipuntil = ""; if ((skipuntil != "") && String.Compare(p.title, skipuntil) < 0) { continue; } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; ////Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} //string origtext = p.text; //Do the actual replacement: string frompage = p.title; string topage = p.title; foreach (KeyValuePair <string, string> replacepair in replacedict) { topage = topage.Replace(replacepair.Key, replacepair.Value); } //Do the actual move: movepage(site, frompage, topage); //Move discussion too: bool movedisk = true; if (movedisk) { Page pd = new Page(site, "Diskussion:" + frompage); tryload(pd, 1); if (pd.Exists()) { movepage(site, "Diskussion:" + frompage, "Diskussion:" + topage); } } //Wait: if (first) { Console.WriteLine("<ret"); Console.ReadLine(); first = false; } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(10); iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }
public static void movecat(string fromcat, string tocat, Site site) { Console.WriteLine("Moving from " + fromcat + " to " + tocat); PageList pl = new PageList(site); //Find subcategories and articles in a category pl.FillAllFromCategory(fromcat); Dictionary <string, string> replacedict = new Dictionary <string, string>(); replacedict.Add("[[Kategori:" + fromcat.Replace("Kategori:", ""), "[[Kategori:" + tocat.Replace("Kategori:", "")); //replacedict.Add("Sommarflicksländor", "Dammflicksländor"); //replacedict.Add("sommarflicksländor", "dammflicksländor"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(10); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Pseudanthessius") < 0 ) // continue; if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual replacement: foreach (KeyValuePair <string, string> replacepair in replacedict) { p.text = p.text.Replace(replacepair.Key, replacepair.Value); } //Save the result: if (p.text != origtext) { //Bot.editComment = "Byter kategori"; //isMinorEdit = true; if (trysave(p, 4)) { //nedit++; DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Ersätter och wikilänkar"; site.minorEditByDefault = true; do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Phasmatodea"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillAllFromCategory("Robotskapade artiklar 2016-05"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Maltese Islands"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("botskapad"); //requireword.Add("Burkina Faso"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("vitrea"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); bool resume = false; foreach (Page p in pl) { iremain--; //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!resume) { if (p.title == "Valhermoso") { resume = true; } else { continue; } } if (is_disambig(p.title)) { continue; } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: //if (p.IsRedirect()) //{ // p.title = p.RedirectsTo(); // if (!tryload(p, 2)) // continue; // if (!p.Exists()) // continue; //} //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Find articles from all the links to an article, mostly useful on very small wikis PageList pllink = new PageList(site); try { pllink.FillFromLinksToPage(p.title); } catch (WebException e) { } int nlink = 0; foreach (Page plink in pllink) { if (plink.title.Contains("Lsjbot")) { continue; } if (!pl.Contains(plink.title)) { if (tryload(plink, 1)) { if (!plink.IsRedirect() && !plink.text.Contains("obotskapad")) { Console.WriteLine("plink.title = " + plink.title); nlink++; } } } } Console.WriteLine(p.title + " :" + pllink.Count().ToString() + ", " + nlink.ToString()); if (nlink > 0) { p.AddToCategory("Kontrollbehov inkommande wikilänkar"); } else { p.RemoveFromCategory("Kontrollbehov inkommande wikilänkar"); } //special for mismatching tags: //int itag = p.text.ToLower().IndexOf("<i>"); //int refend = p.text.IndexOf("</ref>", itag); //int bend = p.text.ToLower().IndexOf("</b>", itag); //if (refend < 0) // refend = 999999; //if (bend < 0) // bend = 999999; //if (refend < bend) //{ // p.text = ReplaceOne(p.text, "</ref>", "''</ref>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else if (bend < refend) //{ // p.text = ReplaceOne(p.text, "</b>", "''</b>", itag); // p.text = ReplaceOne(p.text, "</B>", "''</B>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else // p.text = p.text.Replace("<i>", "").Replace("<I>", ""); //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if (nedit < 4) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }while (false);// (nedit > 0); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string fromlang = "en"; Site fsite = new Site("https://" + fromlang + ".wikipedia.org", botkonto, password); string tolang = "ceb"; Site tsite = new Site("https://" + tolang + ".wikipedia.org", botkonto, password); PageList pl = new PageList(fsite); tsite.defaultEditComment = "Importing template category"; tsite.minorEditByDefault = false; //Select how to get pages. Uncomment as needed. //Find articles from a category pl.FillAllFromCategoryTree("Geobox2"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillFromCategory("Samtliga artiklar föreslagna för sammanslagningar och delningar"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Boidae"); //Find articles containing a specific string //pl.FillFromSearchResults("cdata",9999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); if (tolang == "ceb") { replacedict.Add("Template:", "Plantilya:"); } if (tolang == "war") { replacedict.Add("Template:", "Batakan:"); } List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("Template:"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("nedlagda"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(10); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.title.Contains(s)) { allfound = false; } } if (!allfound) { continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} //Do the actual replacement: string newtitle = origtitle; foreach (KeyValuePair <string, string> replacepair in replacedict) { newtitle = newtitle.Replace(replacepair.Key, replacepair.Value); } Page pt = new Page(tsite, newtitle); tryload(pt, 2); if (pt.Exists()) { continue; } pt.text = p.text; //Save the result: //Bot.editComment = "Importing category"; //isMinorEdit = false; if (trysave(pt, 4)) { nedit++; DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } p.title += "/doc"; tryload(p, 2); if (p.Exists()) { string doctitle = p.title; foreach (KeyValuePair <string, string> replacepair in replacedict) { doctitle = doctitle.Replace(replacepair.Key, replacepair.Value); } Page pd = new Page(tsite, newtitle); tryload(pd, 2); if (pd.Exists()) { continue; } pd.text = p.text; //Save the result: //Bot.editComment = "Importing category"; //isMinorEdit = false; if (trysave(pd, 4)) { nedit++; DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Länkar Kinaartiklar"; site.minorEditByDefault = true; Site zhsite = new Site("https://zh.wikipedia.org", botkonto, password); int nround = 1; do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Geografi i Goiás"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillFromCategory("Sidnamn med kinesiska tecken"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Användare:Lsjbot/Algoritmer"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"Användare:Lsjbot/Algoritmer\"", 4999); //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: pl.RemoveNamespaces(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 100, 101 }); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("[[Kategoriya:Kabukiran sa Awstralya nga mas taas kay sa 8000 metros ibabaw sa dagat nga lebel]]", ""); Dictionary <string, string> regexdict = new Dictionary <string, string>(); //regexdict.Add(@"\| timezone *= \[\[Fernando de Noronha Time\|FNT\]\]", "| timezone = [[Brasilia Time|BRT]]"); //regexdict.Add(@"\| timezone_DST *= \[\[Amazon Summer Time\|AMST\]\]", "| timezone_DST = [[Brasilia Summer Time|BRST]]"); //regexdict.Add(@"\| utc_offset *= -2", "| utc_offset = -3"); //regexdict.Add(@"\| utc_offset_DST *= -3", "| utc_offset_DST = -2"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("obotskapad"); //requireword.Add("= -3\n"); //requireword.Add("Brasilien"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); vetoword.Add("[[zh:"); vetoword.Add("förgrening"); //vetoword.Add("isländska"); Page pd = new Page(site, "Användare:Lsjbot/Kinadubletter"); tryload(pd, 2); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to check : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: if (String.Compare(p.title, "莫力庙") < 0) { continue; } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); Console.WriteLine("RedirectsTo = " + p.RedirectsTo()); Page p2 = new Page(site, p.RedirectsTo()); if (!tryload(p2, 2)) { continue; } if (!p2.Exists()) { continue; } p.title = p2.title; p.text = p2.text; //Console.WriteLine(p2.text); } //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} //Check wikidata: List <string> wdlinks; try { wdlinks = p.GetWikidataLinks(); } catch (WebException e) { string message = e.Message; Console.Error.WriteLine("ts we " + message); continue; } if (wdlinks.Count > 0) { bool zhfound = false; Console.WriteLine("wdlinks:"); foreach (string iwl in wdlinks) { Console.WriteLine(iwl); if (iwl == "zh") { zhfound = true; } } if (zhfound) { continue; } //Console.ReadLine(); } //Find in Chinese Wikipedia: Page pzh = new Page(zhsite, origtitle); tryload(pzh, 2); if (!pzh.Exists()) { Console.WriteLine("Not found on zhwp"); continue; } if (pzh.text.Contains("disambig")) { Console.WriteLine("Disambig on zhwp"); continue; } wdlinks = pzh.GetWikidataLinks(); if (wdlinks.Count > 0) { bool svfound = false; Console.WriteLine("zh-wdlinks:"); foreach (string iwl in wdlinks) { Console.WriteLine(iwl); if (iwl == "sv") { svfound = true; } } if (svfound) { pd.text += "\n* [[" + p.title + "]] - [[:zh:" + pzh.title + "]]"; trysave(pd, 1); continue; } //Console.ReadLine(); } string origtext = p.text; //Do the actual replacement: string zhlink = "[[zh:" + pzh.title + "]]"; if (p.text.Contains("[[ceb:")) { p.text = p.text.Replace("[[ceb:", zhlink + "\n[[ceb:"); } else { p.text += "\n" + zhlink; } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if ((nedit < 4) && (nround == 1)) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); nround++; }while (nedit > 0); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Ersätter och wikilänkar"; site.minorEditByDefault = true; if (cyrillic_i == latin_i) { Console.WriteLine("same i"); } else { Console.WriteLine("different i"); } if (cyrillic_I == latin_I) { Console.WriteLine("same I"); } else { Console.WriteLine("different I"); } do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Geografi i Goiás"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillFromCategory("Robotskapade Vitrysslandartiklar");//done: 2012-10, 2013-01, 2013-02, 2013-03, 2013-04, 2013-06, 2013-07, 2013-08, 2013-09, 2013-10, 2014-06, 2014-07, 2014-08 //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Användare:Lsjbot/Algoritmer"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: pl.RemoveNamespaces(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 100, 101 }); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("ligger på ön [[Maltese Islands]]", "ligger på ön [[Malta (ö)|Malta]]"); //replacedict.Add("[http://www.itis.gov;http://www.cbif.gc.ca/itis (Canada);http://siit.conabio.gob.mx (Mexico) ITIS Global: The Integrated Taxonomic Information System]", "[http://www.itis.gov ITIS Global: The Integrated Taxonomic Information System], [http://www.cbif.gc.ca/eng/integrated-taxonomic-information-system-itis/ (Canada)], [http://www.conabio.gob.mx (Mexico)]"); //replacedict.Add("[http://www.itis.gov;http://www.cbif.gc.ca/itis (Canada);http://siit.conabio.gob.mx (Mexico) ITIS Regional: The Integrated Taxonomic Information System]", "[http://www.itis.gov ITIS Global: The Integrated Taxonomic Information System], [http://www.cbif.gc.ca/eng/integrated-taxonomic-information-system-itis/ (Canada)], [http://www.conabio.gob.mx (Mexico)]"); //replacedict.Add("[[djur|djur]]", "[[djur]]"); //replacedict.Add("är ett [[släkte]] av [[djur]]", "är ett [[släkte]] av [[slemmaskar]]"); //replacedict.Add("Phylum nga naglalakip la hin", "Ini nga phylum in naglalakip la hin"); //replacedict.Add("[[Kategori:Leddjur]]", "[[Kategori:Kräftdjur]]"); //replacedict.Add("[[Kategori:Kräftdjur]]", "[[Kategori:Hoppkräftor]]"); //replacedict.Add("[[koralldjur|korall]]art", "[[havsanemon]]art"); //replacedict.Add("| familia_sv = [[Havsormar]]\n| familia = Hydrophiidae", "| familia_sv = [[Giftsnokar]]\n| familia = Elapidae\n| subfamilia_sv = [[Havsormar]]\n| subfamilia = Hydrophiinae"); //replacedict.Add("[[familj (biologi)|familjen]] [[havsormar]]", "[[familj (biologi)|familjen]] [[giftsnokar]] och underfamiljen [[havsormar]]"); //replacedict.Add("| familia_sv = [[Giftsnokar]]\n familia = Elapidae","| familia_sv = [[Giftsnokar]]\n| familia = Elapidae"); //replacedict.Add("| familia = Muridae", "| familia = Cricetidae\n| subfamilia_sv = [[Hamstrar]]\n| subfamilia = Cricetinae"); //replacedict.Add("| familia = Muridae", "| familia = Cricetidae\n| subfamilia_sv = \n| subfamilia = [[Tylomyinae]]"); //replacedict.Add("Råttdjur", "Hamsterartade gnagare"); //replacedict.Add("[[Muridae|råttdjur", "[[Cricetidae|hamsterartade gnagare"); //replacedict.Add("råttdjur", "hamsterartade gnagare"); //replacedict.Add("[[Muridae|hamsterartade gnagare", "[[Cricetidae|hamsterartade gnagare"); //replacedict.Add(" (Muridae)]]", "]]"); ////replacedict.Add("| genus_sv = \n| genus = [[Arborimus]]","| genus_sv = \n| genus = [[Arborimus]]" //replacedict.Add("[[Weaver (auktor)]]", "[[Weaver (auktor)|Weaver]]"); //replacedict.Add("| familia = Muridae", "| familia = [[Nesomyidae]]\n| subfamilia_sv = [[Afrikanska klippmöss]]\n| subfamilia = Petromyscinae"); //replacedict.Add("| familia_sv = [[Råttdjur]]", "| familia_sv ="); //replacedict.Add("[[familj (biologi)|familjen]] [[råttdjur]]", "[[familj (biologi)|familjen]] [[Nesomyidae]]"); //replacedict.Add("| familia_sv = [[Råttdjur]]\n| familia = Muridae", "| familia_sv =\n| familia = [[Nesomyidae]]\n| subfamilia_sv = [[Trädmöss]]| subfamilia = Dendromurinae"); //replacedict.Add("| subfamilia_sv = [[Hamsterråttor]]", "| subfamilia_sv = [[Trädmöss]]"); //replacedict.Add("[[Kategori:Långtungebin]]", "[[Kategori:Orkidébin]]"); //replacedict.Add("och [[familj (biologi)|familjen]] [[långtungebin]]", "[[tribus]] [[orkidébin]], och [[familj (biologi)|familjen]] [[långtungebin]]"); //replacedict.Add("| familia_sv = [[Bladhorningar]]", "| superfamilia_sv = [[Bladhorningar]]\n| superfamilia = Scarabaeoidea"); //replacedict.Add("och [[familj (biologi)|familjen]] [[bladhorningar]]", "[[familj (biologi)|familjen]] [[Scarabaeidae]] och [[överfamilj]]en [[bladhorningar]]"); //replacedict.Add("ingår i [[familj (biologi)|familjen]] [[Scarabaeidae|bladhorningar]]", "ingår i [[familj (biologi)|familjen]] [[Scarabaeidae]] och [[överfamilj]]en [[bladhorningar]]"); //replacedict.Add("Inga underarter finns listade.", "Inga [[underart]]er finns listade i [[Catalogue of Life]]."); //replacedict.Add("[[Kategori:Egentliga insekter]]", "[[Kategori:Termiter]]"); //replacedict.Add("av Linnaeus ", "av [[Carl von Linné]] "); //replacedict.Add(" = Linnaeus,", " = [[Carl von Linné|Linnaeus]],"); //replacedict.Add("av [[Carl von Linné|Linnaeus]]", "av [[Carl von Linné]]"); //replacedict.Add(" taxon_authority = [[Linnaeus (auktor)|Linnaeus]]", " taxon_authority = [[Carl von Linné|Linnaeus]]"); //replacedict.Add("av [[Linnaeus (auktor)|Linnaeus]] ", "av [[Carl von Linné]] "); //replacedict.Add(" och Amp; ", " och "); //replacedict.Add("[[[[", "[["); //replacedict.Add("]]]]", "]]"); //replacedict.Add("<noinclude>{{Kartposition/Info}}", "<noinclude>\n{{Kartposition/Info}}"); //replacedict.Add("[[Eulalia]]", "[[Eulalia (växter)|Eulalia]]"); //replacedict.Add("Anomalepidae]]", "Anomalepididae]]"); //replacedict.Add("[[Kategori:Egentliga insekter]]", "[[Kategori:Spökskräckor]]"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmida]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmida"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmatodea]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmatodea"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmatodea]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmatodea"); //replacedict.Add("<I>", "''"); //replacedict.Add("<i>", "''"); //replacedict.Add("Collection Patrimoines ,</ref>", "Collection Patrimoines.''</ref>"); //replacedict.Add("Expedition 1907-1908</b>", "Expedition 1907-1908.''"); //replacedict.Add("[[Further-eastern European Time|FET]]", "[[Östafrikansk tid|EAT]]"); //replacedict.Add("{{Sidnamn annan skrift|latinska alfabetet}}", "{{Sidnamn annan skrift|kyrilliska alfabetet}}"); //replacedict.Add("[[Bangladesh Standard Time|BST]]", "[[Bhutan Time|BTT]]"); //replacedict.Add("<ref name = \"vp\">{{Cite web |url= {{Viewfinderlink}}|title= Viewfinder Panoramas Digital elevation Model|date= 2015-06-21|format= }}</ref>", ""); //replacedict.Add("== Saysay ==",""); //replacedict.Add("<references group=\"saysay\"/>",""); //replacedict.Add("administratibo nga mga dibisyon sa Bangladesh", "administratibo nga mga dibisyon sa Burkina Faso"); //replacedict.Add("image = Бесцветный богомол.jpg", "image ="); //replacedict.Add("bild = Бесцветный богомол.jpg", "bild ="); //replacedict.Add(" = [[Brčko]]", " = [[Brčko (distrikt)|Brčko]]"); //replacedict.Add(" = Entitet", " = Distrikt"); //replacedict.Add("entiteten <!--ADM1-->[[Brčko]]", "distriktet <!--ADM1-->[[Brčko (distrikt)|Brčko]]"); //replacedict.Add(" = [[Brčko]]", "| state = [[Brčko (distrikt)|Brčko]]"); //replacedict.Add("En underart finns: ''", "Utöver nominatformen finns också underarten ''"); //replacedict.Add("| timezone = [[Fernando de Noronha Time|FNT]]", "| timezone = [[Brasilia Time|BRT]]"); //replacedict.Add("| timezone_DST = [[Amazon Summer Time|AMST]]", "| timezone_DST = [[Brasilia Summer Time|BRST]]"); //replacedict.Add("| utc_offset = -2", "| utc_offset = -3"); //replacedict.Add("| utc_offset_DST = -3", "| utc_offset_DST = -2"); //replacedict.Add("[[Kungariket Olanda]]", "[[Olanda]]"); //replacedict.Add("Ð", "Đ"); //från isländskt Đ till bosniskt Đ //replacedict.Add("Schweizs administrativa indelning", "Schweiz administrativa indelning"); //replacedict.Add("Mer om algoritmen finns här: [[Användare:Lsjbot/Algoritmer]].", "{{Lsjbot-algoritmnot}}"); Dictionary <string, string> regexdict = new Dictionary <string, string>(); //regexdict.Add(@"\| timezone *= \[\[Fernando de Noronha Time\|FNT\]\]", "| timezone = [[Brasilia Time|BRT]]"); //regexdict.Add(@"\| timezone_DST *= \[\[Amazon Summer Time\|AMST\]\]", "| timezone_DST = [[Brasilia Summer Time|BRST]]"); //regexdict.Add(@"\| utc_offset *= -2", "| utc_offset = -3"); //regexdict.Add(@"\| utc_offset_DST *= -3", "| utc_offset_DST = -2"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); requiretitle.Add(cyrillic_i); requiretitle.Add(cyrillic_I); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("obotskapad"); //requireword.Add("= -3\n"); //requireword.Add("Brasilien"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("Argentina"); //vetoword.Add("Island"); //vetoword.Add("isländska"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; string tit = remove_disambig(p.title); string latintit = tit.Replace(cyrillic_i, latin_i).Replace(cyrillic_I, latin_I); p.text = p.text.Replace(tit, latintit); //Do the actual replacement: //foreach (KeyValuePair<string, string> replacepair in replacedict) //{ // p.text = p.text.Replace(replacepair.Key, replacepair.Value); //} //foreach (KeyValuePair<string, string> replacepair in regexdict) //{ // p.text = Regex.Replace(p.text, replacepair.Key, replacepair.Value); //} //special for mismatching tags: //int itag = p.text.ToLower().IndexOf("<i>"); //int refend = p.text.IndexOf("</ref>", itag); //int bend = p.text.ToLower().IndexOf("</b>", itag); //if (refend < 0) // refend = 999999; //if (bend < 0) // bend = 999999; //if (refend < bend) //{ // p.text = ReplaceOne(p.text, "</ref>", "''</ref>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else if (bend < refend) //{ // p.text = ReplaceOne(p.text, "</b>", "''</b>", itag); // p.text = ReplaceOne(p.text, "</B>", "''</B>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else // p.text = p.text.Replace("<i>", "").Replace("<I>", ""); //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if (nedit < 4) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }while (false);// (nedit > 0); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Ersätter och wikilänkar"; site.minorEditByDefault = true; List <string> donecat = new List <string>(); int nround = 1; do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Robotskapade Cypernartiklar"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillFromCategory("Robotskapade Australienartiklar");//done: 2012-10, 2013-01, 2013-02, 2013-03, 2013-04, 2013-06, 2013-07, 2013-08, 2013-09, 2013-10, 2014-06, 2014-07, 2014-08 //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Användare:Lsjbot/Algoritmer"); //pl.FillFromLinksToPage("Nicosia (huvudstaden)"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"Användare:Lsjbot/Algoritmer\"", 4999); pl.FillFromSearchResults("insource:\"är <!--U.SHSU-->\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 100, 101 }); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add(" delen av landet, ", " delen av landskapet, "); //replacedict.Add("[[Nicosia (huvudstaden)|Nicosia]]", "[[Nicosia]]"); //replacedict.Add("Den ligger i regionen <!--ADM2-->[[", "Den ligger i kommunen <!--ADM2-->[["); replacedict.Add("Landformer på havets botten i", "Sandbankar i"); replacedict.Add("är <!--U.SHSU-->", "är <!--xxxU.SHSU-->"); replacedict.Add("är <!--U.SHSU.-->", "är <!--xxxU.SHSU-->"); Dictionary <string, string> regexdict = new Dictionary <string, string>(); //regexdict.Add(@"\| timezone *= \[\[Fernando de Noronha Time\|FNT\]\]", "| timezone = [[Brasilia Time|BRT]]"); //regexdict.Add(@"\| timezone_DST *= \[\[Amazon Summer Time\|AMST\]\]", "| timezone_DST = [[Brasilia Summer Time|BRST]]"); //regexdict.Add(@"\| utc_offset *= -2", "| utc_offset = -3"); //regexdict.Add(@"\| category *= Terass", "| category = Terrass"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("obotskapad"); //requireword.Add("= -3\n"); //requireword.Add("Brasilien"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("Argentina"); //vetoword.Add("Island"); //vetoword.Add("isländska"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { iremain--; //Skip start of alphabet: //if (String.Compare(p.title, "Vivienne") < 0) //{ // continue; //} if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } } //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string expr = @"\[\[Kategori:Landformer på havets botten i (.*?)\]\]"; //string expr = @"\[\[Kategori:Landformer på havets (botten)"; Match m = Regex.Match(p.text, expr); Console.WriteLine(m); string province = m.Groups[1].Value; Console.WriteLine(province); string country = ""; foreach (string c in p.GetTemplateParameter("geobox", "country")) { country = c; } Console.WriteLine("country = " + country); //Console.ReadLine(); string origtext = p.text; //Do the actual replacement: foreach (KeyValuePair <string, string> replacepair in replacedict) { p.text = p.text.Replace(replacepair.Key, replacepair.Value); } foreach (KeyValuePair <string, string> replacepair in regexdict) { p.text = Regex.Replace(p.text, replacepair.Key, replacepair.Value); } if (!String.IsNullOrEmpty(province)) { string newcat = "Sandbankar i " + province; if (!donecat.Contains(newcat)) { Page pcat = new Page(site, "Kategori:" + newcat); tryload(pcat, 1); if (!pcat.Exists()) { string catsea = "Landformer på havets botten i " + province; pcat.AddToCategory(catsea); string catcountry = "Sandbankar i " + country; if (country != province) { pcat.AddToCategory(catcountry); } trysave(pcat, 2); donecat.Add(newcat); if (country != province) { if (!donecat.Contains(catcountry)) { Page pcat2 = new Page(site, "Kategori:" + catcountry); tryload(pcat2, 1); if (!pcat2.Exists()) { pcat2.AddToCategory("Landformer på havets botten i " + country); pcat2.AddToCategory("Sandbankar efter land"); trysave(pcat2, 2); donecat.Add(catcountry); } else { donecat.Add(catcountry); } } } } else { donecat.Add(newcat); } } } //special for mismatching tags: //int itag = p.text.ToLower().IndexOf("<i>"); //int refend = p.text.IndexOf("</ref>", itag); //int bend = p.text.ToLower().IndexOf("</b>", itag); //if (refend < 0) // refend = 999999; //if (bend < 0) // bend = 999999; //if (refend < bend) //{ // p.text = ReplaceOne(p.text, "</ref>", "''</ref>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else if (bend < refend) //{ // p.text = ReplaceOne(p.text, "</b>", "''</b>", itag); // p.text = ReplaceOne(p.text, "</B>", "''</B>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else // p.text = p.text.Replace("<i>", "").Replace("<I>", ""); //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if ((nedit < 4) && (nround == 1)) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } //if ( p.title.Contains("(terass")) //{ // movepage(site, p.title, p.title.Replace("(terass", "(terrass")); // DateTime newtime = DateTime.Now; // while (newtime < oldtime) // newtime = DateTime.Now; // oldtime = newtime.AddSeconds(5); //} } Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); nround++; }while (nedit > 0); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Flyttar Litauens provinskategorier till län"; site.minorEditByDefault = false; PageList pl = new PageList(site); //Select how to get pages. Uncomment as needed. //Find articles from a category //pl.FillAllFromCategoryTree("Nesomyidae"); //pl1.FillAllFromCategoryTree("Siphonostomatoida"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillFromCategory("Samtliga artiklar föreslagna för sammanslagningar och delningar"); //Find subcategories of a category pl.FillAllFromCategoryTree("Litauens län"); //Skip all namespaces except categories: pl.RemoveNamespaces(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 100, 101 }); Console.WriteLine(pl.Count().ToString() + " kategorier att kolla."); int ncat = pl.Count(); foreach (Page p in pl) { if (p.title.Contains("provins")) { Page pnew = new Page(site, p.title.Replace("provins", "län")); tryload(p, 1); tryload(pnew, 1); if (!pnew.Exists()) { pnew.text = p.text; trysave(pnew, 1); } movecat(p.title, pnew.title, site); } ncat--; Console.WriteLine("Categories remaining: " + ncat.ToString()); } //Find articles from all the links to a template, mostly useful on very small wikis //pl.FillFromLinksToPage("Hersilia (djur)"); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Console.WriteLine("Total # edits = " + nedit.ToString()); }
static void CollectArticlesFromCategories(string key, Dictionary <string, Article> arts) { // Először végigmengyünk a besorolási kategóriákon, és betöltjük belőlük a szócikkeket, // eltároljuk őket a Dictionary-ben a jelenlegi dátummal és a megfelelő besorolással, // fontosság nélkül. Console.WriteLine("\nBesorolási kategóriák vizsgálata ..."); foreach (string s in QualityCategories) { PageList p = new PageList(huwiki); p.FillFromCategory(string.Format(DefaultCat, key, s)); Console.WriteLine(" : " + s + ": " + (p.Count() > 0 ? string.Format("{0} szócikket tartalmaz", p.Count()) : "nem tartalmaz szócikkeket")); foreach (Page art in p) { if (Regex.IsMatch(art.title, TalkPageRegex)) { string pageName = Regex.Match(art.title, TalkPageRegex).Groups["page"].Value; try { arts.Add(pageName, new Article() { Name = pageName, Date = DateTime.Today, Importance = null, Quality = new Quality(QualityCategories.IndexOf(s)), OldId = "" }); } catch { } } } } // Másodszor a fontossági kategóriákon megyünk végig (kihagyva az elsőt), és ha már volt // besorolás szerint értékelve az adott szócikk, akkor beállítjuk a fontosságot. // Ha nem volt, akkor figyelmen kívül hagyjuk, mert csak fontosság szerint nem értékelünk. Console.WriteLine("\nFontossági kategóriák vizsgálata ..."); bool skipped = false; foreach (string s in ImportanceCategories) { if (!skipped) { // kihagyjuk az első kategóriát, aszerint nincs fontossági értékelés skipped = true; continue; } PageList p = new PageList(huwiki); p.FillFromCategory(string.Format(DefaultCat, key, s)); Console.WriteLine(" : " + s + ": " + (p.Count() > 0 ? string.Format("{0} szócikket tartalmaz", p.Count()) : "nem tartalmaz szócikkeket")); foreach (Page art in p) { if (Regex.IsMatch(art.title, TalkPageRegex)) { string pageName = Regex.Match(art.title, TalkPageRegex).Groups["page"].Value; if (arts.ContainsKey(pageName)) { // csak akkor állítunk fontossági szintet, ha minőségi kategóriákban megtaláltuk a szócikket arts[pageName].Importance = new Importance(ImportanceCategories.IndexOf(s)); } } } } }
public void FindAndReplaceThread() { string findText = "अप्रैल"; string replaceText = "अप्रिल"; string wikipediaDomain = "ne.wikipedia.org"; string userName = "******"; string passWord = "******"; // Firstly make Site object, specifying site's URL and your bot account if (site == null) { throw new Exception("Not logged in "); } Site enWiki = this.site; // Site enWiki = new Site(wikipediaDomain, userName, passWord); // Then make Page object, specifying site and page title in constructor // Page p = new Page(enWiki, "Art"); // Load actual page text from live wiki // p.Load(); // Add "Visual arts" category link to "Art" page's text // p.AddToCategory("Visual arts"); // Save "Art" article's text back to live wiki with specified comment // p.Save("comment: category link added", true); // Make empty PageList object, representing collection of pages PageList pl = new PageList(enWiki); // Fill it with 100 pages, where "nuclear disintegration" is mentioned pl.FillFromGoogleSearchResults(findText, 100); //pl.FillFromSearchResults(findText,100); // Load texts and metadata of all found pages from live wiki pl.LoadEx(); // Now suppose, that we must correct some typical mistake in all our pages var thisCounter = 0; var maxValue = pl.Count(); var percentComplete = 0; foreach (Page i in pl)// In each page we will replace one phrase with another { thisCounter++; if (maxValue > 0) { percentComplete = ((thisCounter * 100) / maxValue); } else { percentComplete = 5; } if (OnFeedbackForBot != null) { OnFeedbackForBot(percentComplete); } i.text = i.text.Replace(findText, replaceText); } // Finally we'll save all changed pages to wiki with 5 seconds interval pl.SaveSmoothly(5, ("comment: mistake autocorrection " + findText + " with " + replaceText), true); // Now clear our PageList so we could re-use it pl.Clear(); // Fill it with all articles in "Astronomy" category and it's subcategories pl.FillFromCategoryTree("Astronomy"); // Download and save all PageList's articles to specified local XML file pl.SaveXMLDumpToFile("Dumps\\ArticlesAboutAstronomy.xml"); if (OnBotProcessComplete != null) { OnBotProcessComplete("Completed"); } }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Ersätter och wikilänkar"; site.minorEditByDefault = true; do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Phasmatodea"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillFromCategory("Robotskapade artiklar 2015-11"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Maltese Islands"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("ligger på ön [[Maltese Islands]]", "ligger på ön [[Malta (ö)|Malta]]"); //replacedict.Add("[http://www.itis.gov;http://www.cbif.gc.ca/itis (Canada);http://siit.conabio.gob.mx (Mexico) ITIS Global: The Integrated Taxonomic Information System]", "[http://www.itis.gov ITIS Global: The Integrated Taxonomic Information System], [http://www.cbif.gc.ca/eng/integrated-taxonomic-information-system-itis/ (Canada)], [http://www.conabio.gob.mx (Mexico)]"); //replacedict.Add("[http://www.itis.gov;http://www.cbif.gc.ca/itis (Canada);http://siit.conabio.gob.mx (Mexico) ITIS Regional: The Integrated Taxonomic Information System]", "[http://www.itis.gov ITIS Global: The Integrated Taxonomic Information System], [http://www.cbif.gc.ca/eng/integrated-taxonomic-information-system-itis/ (Canada)], [http://www.conabio.gob.mx (Mexico)]"); //replacedict.Add("[[djur|djur]]", "[[djur]]"); //replacedict.Add("är ett [[släkte]] av [[djur]]", "är ett [[släkte]] av [[slemmaskar]]"); //replacedict.Add("Phylum nga naglalakip la hin", "Ini nga phylum in naglalakip la hin"); //replacedict.Add("[[Kategori:Leddjur]]", "[[Kategori:Kräftdjur]]"); //replacedict.Add("[[Kategori:Kräftdjur]]", "[[Kategori:Hoppkräftor]]"); //replacedict.Add("[[koralldjur|korall]]art", "[[havsanemon]]art"); //replacedict.Add("| familia_sv = [[Havsormar]]\n| familia = Hydrophiidae", "| familia_sv = [[Giftsnokar]]\n| familia = Elapidae\n| subfamilia_sv = [[Havsormar]]\n| subfamilia = Hydrophiinae"); //replacedict.Add("[[familj (biologi)|familjen]] [[havsormar]]", "[[familj (biologi)|familjen]] [[giftsnokar]] och underfamiljen [[havsormar]]"); //replacedict.Add("| familia_sv = [[Giftsnokar]]\n familia = Elapidae","| familia_sv = [[Giftsnokar]]\n| familia = Elapidae"); //replacedict.Add("| familia = Muridae", "| familia = Cricetidae\n| subfamilia_sv = [[Hamstrar]]\n| subfamilia = Cricetinae"); //replacedict.Add("| familia = Muridae", "| familia = Cricetidae\n| subfamilia_sv = \n| subfamilia = [[Tylomyinae]]"); //replacedict.Add("Råttdjur", "Hamsterartade gnagare"); //replacedict.Add("[[Muridae|råttdjur", "[[Cricetidae|hamsterartade gnagare"); //replacedict.Add("råttdjur", "hamsterartade gnagare"); //replacedict.Add("[[Muridae|hamsterartade gnagare", "[[Cricetidae|hamsterartade gnagare"); //replacedict.Add(" (Muridae)]]", "]]"); ////replacedict.Add("| genus_sv = \n| genus = [[Arborimus]]","| genus_sv = \n| genus = [[Arborimus]]" //replacedict.Add("[[Weaver (auktor)]]", "[[Weaver (auktor)|Weaver]]"); //replacedict.Add("| familia = Muridae", "| familia = [[Nesomyidae]]\n| subfamilia_sv = [[Afrikanska klippmöss]]\n| subfamilia = Petromyscinae"); //replacedict.Add("| familia_sv = [[Råttdjur]]", "| familia_sv ="); //replacedict.Add("[[familj (biologi)|familjen]] [[råttdjur]]", "[[familj (biologi)|familjen]] [[Nesomyidae]]"); //replacedict.Add("| familia_sv = [[Råttdjur]]\n| familia = Muridae", "| familia_sv =\n| familia = [[Nesomyidae]]\n| subfamilia_sv = [[Trädmöss]]| subfamilia = Dendromurinae"); //replacedict.Add("| subfamilia_sv = [[Hamsterråttor]]", "| subfamilia_sv = [[Trädmöss]]"); //replacedict.Add("[[Kategori:Långtungebin]]", "[[Kategori:Orkidébin]]"); //replacedict.Add("och [[familj (biologi)|familjen]] [[långtungebin]]", "[[tribus]] [[orkidébin]], och [[familj (biologi)|familjen]] [[långtungebin]]"); //replacedict.Add("| familia_sv = [[Bladhorningar]]", "| superfamilia_sv = [[Bladhorningar]]\n| superfamilia = Scarabaeoidea"); //replacedict.Add("och [[familj (biologi)|familjen]] [[bladhorningar]]", "[[familj (biologi)|familjen]] [[Scarabaeidae]] och [[överfamilj]]en [[bladhorningar]]"); //replacedict.Add("ingår i [[familj (biologi)|familjen]] [[Scarabaeidae|bladhorningar]]", "ingår i [[familj (biologi)|familjen]] [[Scarabaeidae]] och [[överfamilj]]en [[bladhorningar]]"); //replacedict.Add("Inga underarter finns listade.", "Inga [[underart]]er finns listade i [[Catalogue of Life]]."); //replacedict.Add("[[Kategori:Egentliga insekter]]", "[[Kategori:Termiter]]"); //replacedict.Add("av Linnaeus ", "av [[Carl von Linné]] "); //replacedict.Add(" = Linnaeus,", " = [[Carl von Linné|Linnaeus]],"); //replacedict.Add("av [[Carl von Linné|Linnaeus]]", "av [[Carl von Linné]]"); //replacedict.Add(" taxon_authority = [[Linnaeus (auktor)|Linnaeus]]", " taxon_authority = [[Carl von Linné|Linnaeus]]"); //replacedict.Add("av [[Linnaeus (auktor)|Linnaeus]] ", "av [[Carl von Linné]] "); //replacedict.Add(" och Amp; ", " och "); //replacedict.Add("[[[[", "[["); //replacedict.Add("]]]]", "]]"); //replacedict.Add("<noinclude>{{Kartposition/Info}}", "<noinclude>\n{{Kartposition/Info}}"); //replacedict.Add("[[Eulalia]]", "[[Eulalia (växter)|Eulalia]]"); //replacedict.Add("Anomalepidae]]", "Anomalepididae]]"); //replacedict.Add("[[Kategori:Egentliga insekter]]", "[[Kategori:Spökskräckor]]"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmida]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmida"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmatodea]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmatodea"); //replacedict.Add("| ordo_sv = \n| ordo = [[Phasmatodea]]", "| ordo_sv = [[Spökskräckor]]\n| ordo = Phasmatodea"); //replacedict.Add("<I>", "''"); //replacedict.Add("<i>", "''"); //replacedict.Add("Collection Patrimoines ,</ref>", "Collection Patrimoines.''</ref>"); //replacedict.Add("Expedition 1907-1908</b>", "Expedition 1907-1908.''"); //replacedict.Add("[[Further-eastern European Time|FET]]", "[[Östafrikansk tid|EAT]]"); //replacedict.Add("{{Sidnamn annan skrift|latinska alfabetet}}", "{{Sidnamn annan skrift|kyrilliska alfabetet}}"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("obotskapad"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("<B>"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: //if (p.IsRedirect()) //{ // p.title = p.RedirectsTo(); // if (!tryload(p, 2)) // continue; // if (!p.Exists()) // continue; //} //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual replacement: //foreach (KeyValuePair<string, string> replacepair in replacedict) //{ // p.text = p.text.Replace(replacepair.Key, replacepair.Value); //} List <string> pn = p.GetTemplateParameter("geobox", "population_note"); foreach (string pnn in pn) { if (pnn.Contains("från") && pnn.Contains("wiki")) { p.SetTemplateParameter("geobox", "population_date", "", true); } } //special for mismatching tags: //int itag = p.text.ToLower().IndexOf("<i>"); //int refend = p.text.IndexOf("</ref>", itag); //int bend = p.text.ToLower().IndexOf("</b>", itag); //if (refend < 0) // refend = 999999; //if (bend < 0) // bend = 999999; //if (refend < bend) //{ // p.text = ReplaceOne(p.text, "</ref>", "''</ref>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else if (bend < refend) //{ // p.text = ReplaceOne(p.text, "</b>", "''</b>", itag); // p.text = ReplaceOne(p.text, "</B>", "''</B>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else // p.text = p.text.Replace("<i>", "").Replace("<I>", ""); //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { p.text = p.text.Replace("= <!-", "=\n<!-"); p.text = p.text.Replace("| 0 = ", "| 1 = "); //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if (nedit < 4) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }while (false);// (nedit > 0); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "ceb"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); PageList pl = new PageList(site); //PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Find articles from a category //pl.FillAllFromCategoryTree("Nesomyidae"); //pl1.FillAllFromCategoryTree("Siphonostomatoida"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillFromCategory("Samtliga artiklar föreslagna för sammanslagningar och delningar"); //Find subcategories of a category //pl.FillSubsFromCategory("Muridae (Rodentia)"); Console.WriteLine("Move FROM category:"); string fromcat = Console.ReadLine(); Console.WriteLine("Move TO category:"); string tocat = Console.ReadLine(); Console.WriteLine("Move ONLY articles with title containing:"); string required = Console.ReadLine(); //Find subcategories and articles in a category pl.FillAllFromCategory(fromcat); //site.defaultEditComment = "Flyttar kategori " + fromcat + " till " + tocat; site.minorEditByDefault = false; site.defaultEditComment = "Moving category " + fromcat + " to " + tocat; //Find articles from all the links to a template, mostly useful on very small wikis //pl.FillFromLinksToPage("Hersilia (djur)"); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); replacedict.Add("[[Kategori:" + fromcat, "[[Kategori:" + tocat); replacedict.Add("[[Kategoriya:" + fromcat, "[[Kategoriya:" + tocat); replacedict.Add("[[Category:" + fromcat, "[[Kategoriya:" + tocat); //replacedict.Add("Sommarflicksländor", "Dammflicksländor"); //replacedict.Add("sommarflicksländor", "dammflicksländor"); List <string> linkword = new List <string>(); //linkword.Add("Stillahavssluttningen"); //Require ALL in requireword list: List <string> requireword = new List <string>(); //requireword.Add("obotskapad"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("Kategori:Fungi"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(10); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Pseudanthessius") < 0 ) // continue; if (!p.title.Contains(required)) { continue; } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual replacement: foreach (KeyValuePair <string, string> replacepair in replacedict) { p.text = p.text.Replace(replacepair.Key, replacepair.Value); } //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Byter kategori"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "ceb"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Fixing country categories"; site.minorEditByDefault = true; int nround = 1; Dictionary <string, string> countrydict = new Dictionary <string, string>(); //countrydict.Add("Myanmar", "Burma"); //countrydict.Add("Norwega", "Noruwega"); //countrydict.Add("Marwekos", "Maruwekos"); //countrydict.Add("Habagatang Koreya", "Habagatang Korea"); //countrydict.Add("Amihanang Koreya", "Amihanang Korea"); //countrydict.Add("Malaysia", "Malasya"); //countrydict.Add("Mosambike", "Mozambique"); //countrydict.Add("Kuba", "Cuba"); //countrydict.Add("Aserbayan", "Aserbaiyan"); //countrydict.Add("Bruney", "Brunei"); //countrydict.Add("Indonesya", "Indonesia"); //countrydict.Add("Iraq", "Irak"); //countrydict.Add("Bolivia", "Bolibya"); //countrydict.Add("Chile", "Tsile"); //countrydict.Add("Georgia (nasud)", "Heyorhiya"); countrydict.Add("Ireland", "Irlanda"); foreach (string fromcountry in countrydict.Keys) { string tocountry = countrydict[fromcountry]; nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Geografi i Goiás"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); //pl.FillAllFromCategory(fromcountry); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis pl.FillFromLinksToPage(fromcountry); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"Användare:Lsjbot/Algoritmer\"", 4999); //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: pl.RemoveNamespaces(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 100, 101 }); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("Åboland-Turunmaa", "Åboland"); //replacedict.Add("[[Kategoriya:" + fromcountry + "]]", "[[Kategoriya:" + tocountry + "]]"); //replacedict.Add("[[kategoriya:" + fromcountry + "]]", "[[Kategoriya:" + tocountry + "]]"); //replacedict.Add("[[Category:" + fromcountry + "]]", "[[Kategoriya:" + tocountry + "]]"); //replacedict.Add("[[category:" + fromcountry + "]]", "[[Kategoriya:" + tocountry + "]]"); replacedict.Add("[[" + fromcountry + "]]", "[[" + tocountry + "]]"); Dictionary <string, string> regexdict = new Dictionary <string, string>(); //regexdict.Add(@"\| timezone *= \[\[Fernando de Noronha Time\|FNT\]\]", "| timezone = [[Brasilia Time|BRT]]"); //regexdict.Add(@"\| timezone_DST *= \[\[Amazon Summer Time\|AMST\]\]", "| timezone_DST = [[Brasilia Summer Time|BRST]]"); //regexdict.Add(@"\| utc_offset *= -2", "| utc_offset = -3"); //regexdict.Add(@"\| utc_offset_DST *= -3", "| utc_offset_DST = -2"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); //requireword.Add("obotskapad"); //requireword.Add("= -3\n"); //requireword.Add("Brasilien"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); //vetoword.Add("Argentina"); //vetoword.Add("Island"); //vetoword.Add("isländska"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); bool resume = true; foreach (Page p in pl) { iremain--; //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!resume) { if (p.title == "Moylan Lough") { resume = true; } else { continue; } } if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: if (p.IsRedirect()) { p.title = p.RedirectsTo(); if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } } //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual replacement: foreach (KeyValuePair <string, string> replacepair in replacedict) { p.text = p.text.Replace(replacepair.Key, replacepair.Value); } foreach (KeyValuePair <string, string> replacepair in regexdict) { p.text = Regex.Replace(p.text, replacepair.Key, replacepair.Value); } //special for mismatching tags: //int itag = p.text.ToLower().IndexOf("<i>"); //int refend = p.text.IndexOf("</ref>", itag); //int bend = p.text.ToLower().IndexOf("</b>", itag); //if (refend < 0) // refend = 999999; //if (bend < 0) // bend = 999999; //if (refend < bend) //{ // p.text = ReplaceOne(p.text, "</ref>", "''</ref>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else if (bend < refend) //{ // p.text = ReplaceOne(p.text, "</b>", "''</b>", itag); // p.text = ReplaceOne(p.text, "</B>", "''</B>", itag); // p.text = p.text.Replace("<i>", "''").Replace("<I>", "''"); //} //else // p.text = p.text.Replace("<i>", "").Replace("<I>", ""); //Wikilink first occurrence of each word, if not linked already: foreach (string s in linkword) { if (p.text.IndexOf(s) < 0) { continue; } string slinked = "[[" + s + "]]"; if (p.text.IndexOf(slinked) < 0) { p.text = p.text.Insert(p.text.IndexOf(s), "[["); p.text = p.text.Replace("[[" + s, slinked); } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if ((nedit < 4) && (nround == 1)) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); nround++; } //while (nedit > 0); }
public static void Main() { string makelang = "sv"; string botaccount = "Lsjbot"; Console.Write("Password: "******"https://" + makelang + ".wikipedia.org", botaccount, password); Site cmsite = new Site("https://commons.wikimedia.org", botaccount, password); //Site wdsite = new Site("http://wikidata.org", botaccount, password); //while (true) //{ // string fn = Console.ReadLine(); // Page ppp = new Page(cmsite, fn); // tryload(ppp, 1); // Console.WriteLine(ppp.text); //} //string cattodo = "Persoon naar beroep"; string cattodo = "Robotskapade svampartiklar"; string editcomment = "Fixar bilder från iw"; string logpage = "Användare:Lsjbot/imagelog"; string resume_at = ""; //string resume_at = ""; List <string> doneCats = new List <string>(); //doneCats.Add("Svedesi"); //doneCats.Add("Tedeschi"); //doneCats.Add("Spagnoli"); switch (makelang) { case "sv": editcomment = "Fixar bilder från iw, Kategori:" + cattodo; break; case "ceb": editcomment = "Galeriya sa hulagway"; break; case "nl": editcomment = "Fotogalerij van interwiki, Categorie:" + cattodo; break; case "it": editcomment = "Galleria di immagini da interwiki, Categoria:" + cattodo; break; default: editcomment = "Image gallery from interwiki"; break; } svsite.defaultEditComment = editcomment; svsite.minorEditByDefault = false; Console.WriteLine("apipath = " + svsite.apiPath); //Skip images in blacklist: List <string> blacklist = new List <string>(); List <string> vetocatlist = new List <string>(); bool blackwrite = false; bool blackread = true; if (blackread) { int nblack = 0; using (StreamReader sr = new StreamReader("blacklist.txt")) { while (!sr.EndOfStream) { string s = sr.ReadLine(); blacklist.Add(s); nblack++; } } Console.WriteLine("nblack=" + nblack.ToString()); } else { vetocatlist.Add("Image placeholders"); vetocatlist.Add("Icons by subject"); vetocatlist.Add("Logos of Eurovision"); vetocatlist.Add("Flags by country"); vetocatlist.Add("Audio files"); //vetocatlist.Add(""); foreach (string vc in vetocatlist) { PageList pldummy = new PageList(cmsite); bool loaded = false; do { try { pldummy.FillFromCategoryTree(vc); loaded = true; } catch (WebException e) { string message = e.Message; Console.Error.WriteLine(message); } }while (!loaded); foreach (Page pd in pldummy) { //Console.WriteLine(pd.title); blacklist.Add(pd.title.Replace("File:", "").Replace(" ", "_")); } pldummy.Clear(); } //Console.ReadLine(); if (blackwrite) { using (StreamWriter sw = new StreamWriter("blacklist.txt")) { foreach (string s in blacklist) { sw.WriteLine(s); } } } } //Skip pages in watchlist: svsite.watchList = new PageList(svsite); svsite.watchList.FillFromWatchList(); Console.WriteLine("Watchlist pages: " + svsite.watchList.Count()); List <string> blacktype = new List <string>(); //blacktype.Add(".svg"); //blacktype.Add(".png"); PageList pl = new PageList(svsite); //////////////////////////////////// //Select how to get pages. Uncomment as needed. //////////////////////////////////// //Find articles from a category bool loaded2 = false; do { try { FillAllFromCategoryTreeExceptDone(cattodo, svsite, pl, doneCats); loaded2 = true; } catch (WebException e) { string message = e.Message; Console.Error.WriteLine(message); } }while (!loaded2); //Find articles from all the links to a template, mostly useful on very small wikis // pl.FillFromLinksToPage("Mall:Taxobox"); //Set specific article: //Page ppp = new Page(svsite, "Dina Tersago");pl.Add(ppp); //Skip all namespaces except regular articles: pl.RemoveNamespaces(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 100, 101 }); /////////////////////////////////////// //Choose what to do with the pix that are found: // nchoice = 0: do nothing, except list on standard output // nchoice = 1: add as gallery in target article // nchoice = 2: add as separate pix in target article // nchoice = 3: add in article discussion // nchoice = 4: list in separate workpage "Användare:Botaccount/Gallery" //////////////////////////////////////// int nchoice = 1; // If ntop is non-zero, don't use all pix but only the ntop most used ones. int ntop = 20; // Skip pictures with size smaller than minsize. int minsize = 50; //Skip articles that already have at least one pic: bool skipillustrated = true; Page pwork = new Page(svsite, "Användare:" + botaccount + "/Gallery"); if (nchoice == 4) { pwork.Load(); } Dictionary <string, Site> sitedict = new Dictionary <string, Site>(); string sbrack = "[]'† ?"; char[] brackets = sbrack.ToCharArray(); //int nfound = 0; DateTime oldtime = DateTime.Now; int nedit = 0; int iremain = pl.Count(); //Console.ReadLine(); foreach (Page p in pl) { iremain--; Console.WriteLine(iremain.ToString() + " remaining."); //DateTime nexttime = oldtime.AddSeconds(7); //Skip start of alphabet: //if (String.Compare(p.title,"Acacia tortilis") < 0 ) // continue; //skip until specific article if (resume_at != "") { if (resume_at == p.title) { resume_at = ""; } else { continue; } } //Skip pages in watchlist if (p.watched) { Console.WriteLine("Skip watched"); continue; } if (svsite.watchList.Contains(p)) { Console.WriteLine("Skip page in watchlist"); continue; } if (!tryload(p, 1)) { continue; } if (!p.Exists()) { continue; } string origtext = p.text; //find images already in page: List <string> oldpix = p.GetImages(); List <string> oldpix2 = GetImagesInTemplates(svsite, p); //Console.WriteLine("Oldpix:"); int npix = 0; foreach (string oldpic in oldpix) { //Console.WriteLine(oldpic); npix++; } foreach (string pic in oldpix2) { npix++; } Console.WriteLine("npix = " + npix.ToString()); if (skipillustrated) { if (npix > 0) { continue; } if (p.text.Contains(".jp")) { continue; } if (p.text.Contains(".gif")) { continue; } if (p.text.Contains(".JP")) { continue; } if (p.text.Contains(".GIF")) { continue; } if (p.text.Contains(".png")) { continue; } if (p.text.Contains(".PNG")) { continue; } } //if it already has a gallery, skip it: if (p.text.Contains("<gallery>")) { continue; } //if it doesn't contain "Lsjbot", skip it: //if (!p.text.Contains("Lsjbot")) // continue; //find iw: Dictionary <string, string> newpix = new Dictionary <string, string>(); //string[] iw = p.GetInterWikiLinks(); List <string> iwlist = new List <string>(); try { iwlist = p.GetInterLanguageLinks(); } catch (WebException e) { string message = e.Message; Console.Error.WriteLine(message); Thread.Sleep(10000);//milliseconds } Console.WriteLine("iwlist.Count " + iwlist.Count); //if (iw.Length == 0) // iwlist = Interwiki(wdsite, p.title); //else //{ // foreach (string iws in iw) // iwlist.Add(iws); //} foreach (string iws in iwlist) { string[] ss = iws.Split(':'); string iwcode = ss[0]; string iwtitle = ss[1]; Console.WriteLine("iw - " + iwcode + ":" + iwtitle); if (iwcode == "nah") { continue; } if (!sitedict.ContainsKey(iwcode)) { string iwurl = "https://" + iwcode + ".wikipedia.org"; try { try { sitedict.Add(iwcode, new Site(iwurl, botaccount, password)); } catch (WebException e) { Console.WriteLine(e.Message); continue; } } catch (WikiBotException e) { Console.WriteLine(e.Message); continue; } } Page piw = new Page(sitedict[iwcode], iwtitle); try { piw.Load(); } catch (WebException e) { Console.WriteLine(e.Message); continue; } if (!piw.Exists()) { Console.WriteLine("Not found despite iw"); continue; } List <string> iwpix = piw.GetImages(); List <string> iwpix2 = GetImagesInTemplates(sitedict[iwcode], piw); foreach (string pic in iwpix2) { iwpix.Add(pic); } foreach (string iwpicture in iwpix) { string iwpic = iwpicture; //Remove file prefix: if (iwpic.Contains(":")) { iwpic = iwpic.Split(':')[1]; } //Skip if smaller than minsize: int size = 999; foreach (string pp in GetImageParams(piw, iwpic)) { if (pp.Contains("px")) { size = tryconvert(pp.Replace("px", "")); break; } } if ((size > 0) && (size < minsize)) { continue; } //Replace space with underscore: iwpic = iwpic.Replace(" ", "_"); //Add to list: if (newpix.ContainsKey(iwpic)) { newpix[iwpic] = newpix[iwpic] + ":" + iwcode; } else { newpix.Add(iwpic, iwcode); } } } bool fromcommons = false; if (newpix.Count == 0) { if (p.text.Contains("ommonscat|")) { fromcommons = true; string s = ""; if (p.text.IndexOf("{{commonscat|") > 0) { s = p.text.Remove(0, p.text.IndexOf("{{commonscat|")); s = s.Remove(s.IndexOf("}}")); s = s.Remove(0, "{{commonscat|".Length); } else if (p.text.IndexOf("{{Commonscat|") > 0) { s = p.text.Remove(0, p.text.IndexOf("{{Commonscat|")); s = s.Remove(s.IndexOf("}}")); s = s.Remove(0, "{{Commonscat|".Length); } if (String.IsNullOrEmpty(s)) { continue; } if (s.Contains("|")) { s = s.Remove(s.IndexOf("|")); } s = "Category:" + s; //Console.WriteLine(s); //Console.ReadLine(); PageList plc = new PageList(cmsite); try { plc.FillFromCategory(s); } catch (WebException e) { Console.WriteLine(e.Message); Thread.Sleep(10000);//milliseconds //continue; } foreach (Page pc in plc) { Console.WriteLine("pc = " + pc.title); newpix.Add(pc.title.Replace(" ", "_"), "cm"); } } } //Check if pix from iw is already used in target article: // //Workaround because a Dictionary can't be modified while iterating over its keys: List <string> dummykeys = new List <string>(); foreach (string dk in newpix.Keys) { dummykeys.Add(dk); } foreach (string newpic in dummykeys) { //Check if pix from iw is already used in target article: if (p.text.Contains(newpic)) { newpix[newpic] = "/// ALREADY USED"; } else if (newpic.Contains(":")) { if (p.text.Contains(newpic.Remove(0, newpic.IndexOf(':') + 1))) { newpix[newpic] = "/// ALREADY USED"; } } if ((!newpic.Contains(".")) || (newpic.LastIndexOf('.') < newpic.Length - 5)) { newpix[newpic] = "/// NOT A FILE"; } //Check if pic in blacklist: if (blacklist.Contains(newpic)) { newpix[newpic] = "/// BLACKLISTED IMAGE"; } foreach (string filetype in blacktype) { if (newpic.Contains(filetype)) { newpix[newpic] = "/// BLACKLISTED FILETYPE"; } } if (newpix[newpic].Contains("///")) { continue; } //Check if pic really exists on Commons: if (!fromcommons) { string res = cmsite.indexPath + "?title=" + HttpUtility.UrlEncode("File:" + newpic); //Console.WriteLine("commonsres = " + res); string src = ""; try { src = cmsite.GetWebPage(res); // cmsite.GetPageHTM(res); } catch (WebException e) { newpix[newpic] = "/// NOT FOUND ON COMMONS"; string message = e.Message; if (message.Contains(": (404) ")) { // Not Found Console.Error.WriteLine(Bot.Msg("Page \"{0}\" doesn't exist."), newpic); Console.WriteLine("Image not found " + newpic); continue; } else { Console.Error.WriteLine(message); continue; } } } } int nnew = 0; foreach (string newpic in newpix.Keys) { Console.WriteLine(newpic + " ! " + newpix[newpic]); if (!newpix[newpic].Contains("///")) { nnew++; } } Console.WriteLine("# new pix = " + nnew.ToString()); if (nnew == 0) { continue; } //OK, so we found some pix. Now what do we do with them? //First get rid of the ones we don't want: foreach (string newpic in dummykeys) { if (newpix[newpic].Contains("///")) { newpix[newpic] = ""; } } //Then figure out which new pix have the most interwiki use: List <string> pixtouse = new List <string>(); if ((ntop > 0) && (ntop < nnew)) { int nused = 0; while (nused < ntop) { string longest = ""; int maxlength = -1; foreach (string newpic in dummykeys) { if (newpix[newpic].Length > maxlength) { longest = newpic; maxlength = newpix[newpic].Length; } } pixtouse.Add(longest); newpix[longest] = ""; nused++; } } else { foreach (string newpic in newpix.Keys) { if (newpix[newpic] != "") { pixtouse.Add(newpic); } } } //Then actually use them, according to nchoice value: string gallerylabel = "Bildgalleri"; string talkpage = "Diskussion"; string disktext = "\n\n==Bilder från interwiki==\nBoten " + botaccount + " har identifierat följande bilder som används på andra språkversioner av den här artikeln:\n\n"; string disksig = "~~~~"; switch (makelang) { case "sv": gallerylabel = "Bildgalleri"; talkpage = "Diskussion"; disktext = "\n\n==Bilder från interwiki==\nBoten " + botaccount + " har identifierat följande bilder som används på andra språkversioner av den här artikeln:\n\n"; break; case "ceb": gallerylabel = "Galeriya sa hulagway"; talkpage = "Hisgot"; break; case "war": gallerylabel = "Image gallery"; talkpage = "Hiruhimangraw"; break; case "it": gallerylabel = "Galleria di immagini"; talkpage = "Discussione"; disktext = "== Suggerimento di immagini ==\n{{Suggerimento immagini}}"; disksig = "Cordiali saluti, ~~~~"; logpage = "Utente:Lsjbot/imagelog"; break; case "nl": gallerylabel = "Galleria di immagini"; talkpage = "Discussione"; disktext = "== Immagine suggerimento ==\n{{Immaginesuggerimento2015}}"; disksig = " -- ~~~~"; logpage = "Utente:Lsjbot/imagelog"; break; default: gallerylabel = "Image gallery"; break; } string gallery = "\n\n== " + gallerylabel + " ==\n\n<gallery>\n"; switch (nchoice) { case 1: foreach (string newpic in pixtouse) { gallery = gallery + newpic + "\n"; } gallery = gallery + "</gallery>\n\n"; int ipos = p.text.IndexOf("[[Kategori"); if ((ipos < 0) && (makelang == "war")) { ipos = p.text.IndexOf("[[Kaarangay"); } string botendtext = "== Källor =="; if (p.text.Contains(botendtext)) { ipos = p.text.IndexOf(botendtext); } if (ipos > 0) { p.text = p.text.Insert(ipos, gallery); } else { p.text += gallery; } break; case 2: foreach (string newpic in pixtouse) { p.text = p.text.Replace("[[Kategori", "[[Fil:" + newpic + "|thumb|right|]]\n\n" + "[[Kategori"); } break; case 3: Page pdisk = new Page(svsite, talkpage + ":" + p.title); if (!tryload(pdisk, 2)) { continue; } //Skip if already processed by the bot: if (pdisk.text.Contains(disktext) || pdisk.text.Contains(botaccount)) { continue; } if (!String.IsNullOrEmpty(pdisk.text)) { pdisk.text += "\n\n"; } pdisk.text = pdisk.text + disktext; gallery = gallery.Replace("\n== " + gallerylabel + " ==\n\n", ""); //"=== " + gallerylabel + " ==="); foreach (string newpic in pixtouse) { gallery = gallery + newpic + "\n"; } gallery = gallery + "</gallery>\n" + disksig + "\n"; pdisk.text = pdisk.text + gallery; //Bot.editComment = "Fixar bildförslag från iw"; //isMinorEdit = false; trysave(pdisk, 2); p.text = ""; try { p.text = ""; p.Watch(); } catch (WebException e) { string message = e.Message; Console.Error.WriteLine(message); Thread.Sleep(10000); //milliseconds } //Thread.Sleep(55000);//milliseconds //Console.WriteLine("<ret>"); //Console.ReadLine(); break; case 4: pwork.text = pwork.text + "===" + p.title + "===\n"; foreach (string newpic in pixtouse) { gallery = gallery + newpic + "\n"; } gallery = gallery + "</gallery>\n\n"; pwork.text = pwork.text + gallery; break; } //DONE! Now save if needed. //Bot.editComment = editcomment; //isMinorEdit = false; if ((nchoice == 1) || (nchoice == 2)) { int ntry = 0; if (p.text != origtext) { while (ntry < 3) { try { p.Save(); ntry = 999; } catch (WebException e) { Console.WriteLine(e.Message); ntry++; continue; } } } } if (nchoice == 4) { trysave(pwork, 3); } //Thread.Sleep(4000);//milliseconds //Console.WriteLine("nexttime = "+nexttime.ToLongTimeString()); //Console.WriteLine("Now = " + DateTime.Now.ToLongTimeString()); //while (DateTime.Now.CompareTo(nexttime) < 0) // continue; //oldtime = DateTime.Now; nedit++; } Console.WriteLine("Total #edits = " + nedit.ToString()); Page plog = new Page(svsite, logpage); tryload(plog, 2); plog.text += "\n# Category:" + cattodo + "; Total # pages = " + pl.Count().ToString() + "; Total #edits = " + nedit.ToString() + "\n"; trysave(plog, 2); }
public static void Main() { Console.Write("Password: "******"Lsjbot"; string makelang = "sv"; Site site = new Site("https://" + makelang + ".wikipedia.org", botkonto, password); site.defaultEditComment = "Kategoriserar förgreningar"; site.minorEditByDefault = true; List <string> donecat = new List <string>(); do { nedit = 0; PageList pl = new PageList(site); PageList pl1 = new PageList(site); //Select how to get pages. Uncomment as needed. //Add pages "by hand": //addpages(site,pl); //Find articles from a category //pl.FillAllFromCategoryTree("Phasmatodea"); //pl1.FillAllFromCategoryTree("Eufriesea"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Euglossa"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Eulaema"); //foreach (Page p in pl1) // pl.Add(p); //pl1.FillAllFromCategoryTree("Exaerete"); //foreach (Page p in pl1) // pl.Add(p); pl.FillFromCategory("Robotskapade förgreningssidor"); //Find subcategories of a category //pl.FillSubsFromCategory("Svampars vetenskapliga namn"); //Find articles from all the links to an article, mostly useful on very small wikis //pl.FillFromLinksToPage("Brčko"); //Find articles containing a specific string //pl.FillFromSearchResults("insource:\"http://www.itis.gov;http://\"", 4999); //Set specific article: //Page pp = new Page(site, "Citrontrogon");pl.Add(pp); //Skip all namespaces except articles: //pl.RemoveNamespaces(new int[] {1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,100,101}); Dictionary <string, string> replacedict = new Dictionary <string, string>(); //replacedict.Add("En underart finns: ''", "Utöver nominatformen finns också underarten ''"); List <string> linkword = new List <string>(); //linkword.Add("Catalogue of Life"); //Require title to contain one in requiretitle list: List <string> requiretitle = new List <string>(); //requiretitle.Add("Radioprogram nerlagda"); //Require ALL in requireword list: List <string> requireword = new List <string>(); requireword.Add("obotskapad"); //requireword.Add("Burkina Faso"); //Require AT LEAST ONE in requireone list: List <string> requireone = new List <string>(); List <string> vetoword = new List <string>(); vetoword.Add("[[Kategori:Robotskapade auktorsförkortningar]]"); DateTime oldtime = DateTime.Now; oldtime = oldtime.AddSeconds(5); Console.WriteLine("Pages to change : " + pl.Count().ToString()); int iremain = pl.Count(); foreach (Page p in pl) { //Skip start of alphabet: //if (String.Compare(p.title,"Sicydium") < 0 ) // continue; if (!tryload(p, 2)) { continue; } if (!p.Exists()) { continue; } string origtitle = p.title; //Follow redirect: //if (p.IsRedirect()) //{ // p.title = p.RedirectsTo(); // if (!tryload(p, 2)) // continue; // if (!p.Exists()) // continue; //} //Check so required title actually present: if (requiretitle.Count > 0) { bool onefound = false; foreach (string s in requiretitle) { if (p.title.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requiretitle not found"); continue; } } //Check so all required strings actually present: bool allfound = true; foreach (string s in requireword) { if (!p.text.Contains(s)) { allfound = false; } } if (!allfound) { Console.WriteLine("requireword not found"); continue; } if (requireone.Count > 0) { bool onefound = false; foreach (string s in requireone) { if (p.text.Contains(s)) { onefound = true; } } if (!onefound) { Console.WriteLine("requireone not found"); continue; } } //Check so no vetoword are present: bool vetofound = false; foreach (string s in vetoword) { if (p.text.Contains(s)) { vetofound = true; } } if (vetofound) { Console.WriteLine("vetoword found"); continue; } //If redirect, go back to redirect page: //if (origtitle != p.title) //{ // p.title = origtitle; // p.Load(); //} string origtext = p.text; //Do the actual replacement: string datecat = "Robotskapade förgreningar " + yearmonth(p, site); string catstring = "Kategori"; if (makelang == "ceb") { datecat = "Pagklaro paghimo ni bot " + yearmonth(p, site); catstring = "Kategoriya:"; } p.AddToCategory(datecat); if (!donecat.Contains(datecat)) { Page pcat = new Page(site, catstring + datecat); tryload(pcat, 1); if (!pcat.Exists()) { pcat.text = ""; if (makelang == "sv") { pcat.AddToCategory("Robotskapade förgreningar efter datum"); } else if (makelang == "ceb") { pcat.AddToCategory("Pagklaro paghimo ni bot"); } trysave(pcat, 1); donecat.Add(datecat); } } foreach (string country in getheaders(p.text)) { string countrycat = "Robotskapade " + country + "förgreningar"; if (makelang == "ceb") { countrycat = "Pagklaro paghimo ni bot " + country; } p.AddToCategory(countrycat); if (!donecat.Contains(countrycat)) { Page pcat = new Page(site, catstring + countrycat); tryload(pcat, 1); if (!pcat.Exists()) { pcat.text = ""; if (makelang == "sv") { pcat.AddToCategory("Robotskapade förgreningar efter land|" + country); } else if (makelang == "ceb") { pcat.AddToCategory("Pagklaro paghimo ni bot sa nasud"); } trysave(pcat, 1); donecat.Add(countrycat); } } } //Save the result: if (p.text != origtext) { //Bot.editComment = "Ersätter och wikilänkar"; //isMinorEdit = true; if (trysave(p, 4)) { nedit++; if (nedit < 4) { Console.Write("<ret>"); Console.ReadLine(); } DateTime newtime = DateTime.Now; while (newtime < oldtime) { newtime = DateTime.Now; } oldtime = newtime.AddSeconds(5); } } iremain--; Console.WriteLine(iremain.ToString() + " remaining."); } Console.WriteLine("Total # edits = " + nedit.ToString()); }while (false);// (nedit > 0); }