public void setDataSource(DotNetWikiBot.Page initialPage) { var group1 = new DataCollection(initialPage); int count = 0; PageList links = initialPage.GetLinks(); if (links != null) { foreach (DotNetWikiBot.Page page in links) { count++; group1.Items.Add(new DataItem(page, group1)); if (count >= 20) { break; } } sampleDataSource.AllGroups.Clear(); sampleDataSource.AllGroups.Add(group1); } else { Console.WriteLine("AW SHIT NO DATA"); } }
public static void ConvertPage(string pageName, string newName, bool overwrite = false) { var db = new ZkDataContext(); ForumThread thread = db.ForumThreads.FirstOrDefault(x=> x.WikiKey == pageName); if (thread == null) { Console.WriteLine("No ZK wiki page with name {0} found", pageName); return; } string text = thread.ForumPosts.First().Text; text = BBCodeToMediaWiki(text); Page page = new Page(newWiki, newName); page.Load(); bool update = false; if (!page.IsEmpty()) { if (!overwrite) { Console.WriteLine("Page already exists, exiting"); return; } else update = true; } if (newName.StartsWith("Mission Editor", true, System.Globalization.CultureInfo.CurrentCulture)) page.AddToCategory("Mission Editor"); page.Save(text, update ? "" : "Ported from ZK wiki by DotNetWikiBot", update); }
internal Template(Page p, string s) { Page = p; Text = s; Parameters = p.site.ParseTemplate(s); Title = Page.site.RemoveNSPrefix(Regex.Match(s, @"^(.+?)($|\|)", RegexOptions.Multiline).Groups[1].Value.Trim(), 10); }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var changes = new List<string>(); p.InsertPlaceholders(GeneralExtensions.Placeholder.Default); foreach (var replacement in Replacements.Where(replacement => p.text.Contains(replacement.Key))) { p.text = p.text.Replace(replacement.Key, replacement.Value); changes.Add(replacement.Key + " → " + replacement.Value); } foreach (var replacement in RegexReplacements) { var pattern = replacement.Key; var replace = replacement.Value; pattern.Replace(p.text, match => { var replaceWith = RegexParseReplaceWithString(match, replace); changes.Add(match.Value + " → " + replaceWith); return replaceWith; }); } p.RemovePlaceholders(); if (changes.Count > 0) { edit.Save = true; edit.EditComment = "Ersetzt: " + string.Join(", ", changes); } }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var before = p.text; var changes = new List<string>(); Match m; while ((m = MuenzenRegex.Match(p.text)).Success) { var kupfer = 0; var silber = 0; var gold = 0; int.TryParse(m.Groups["kupfer"].Value, out kupfer); int.TryParse(m.Groups["silber"].Value, out silber); int.TryParse(m.Groups["gold"].Value, out gold); var muenzen = kupfer + 100*silber + 10000*gold; p.text = p.text.Replace(m.Value.Trim(), "{{Münzen|" + muenzen + "}}"); changes.Add(string.Format("{0}g {1}s {2}k → {3}", gold, silber, kupfer, muenzen)); } if (changes.Count > 0) { edit.EditComment = string.Format("Münzen ({0}x): {1}", changes.Count, string.Join(", ", changes)); edit.Save = true; } }
protected DataCommon(DotNetWikiBot.Page srcPage) { this.srcPage = srcPage; this.srcPage.Load(); this.uniqueId = this.srcPage.pageId; this.title = this.srcPage.title; this.description = this.srcPage.text; }
public Section(string content, Page page, int level) { _originalContent = content; Content = content; Page = page; Level = level; Subsections = SectionExtensions.GetAllSections(Content, page, level + 1); }
public static void exec() { string[] line = System.IO.File.ReadAllLines("list"); List<string> ignorelist = new List<string>(); if (System.IO.File.Exists("resultsig")) { ignorelist.AddRange (System.IO.File.ReadAllLines("resultsig")); } if (System.IO.File.Exists("results")) { ignorelist.AddRange (System.IO.File.ReadAllLines("results")); } int processed = 1; foreach (string c in line) { if (ignorelist.Contains(c)) { continue; } try { Page afc = new Page(MainClass.en, "Wikipedia talk:" + c); afc.Load(); if ( afc.text == null || afc.text.Contains("REDIRECT") || afc.text.Contains("{{AFC") || afc.text.Contains("{{WPAFC") || afc.text == "" || afc.text.Contains("[[Category:AfC_submissions_with_missing_AfC_template]]") ) { MainClass.DebugLog("OK:" + c); System.IO.File.AppendAllText("resultsig", c + "\n"); } else { MainClass.DebugLog("Category missing:" + c); afc.text = afc.text + "\n[[Category:AfC_submissions_with_missing_AfC_template]]"; afc.Save(afc.text, "Bot: inserting [[Category:AfC_submissions_with_missing_AfC_template]]", false); processed++; System.IO.File.AppendAllText("results", c + "\n"); if (processed > 100) { MainClass.DebugLog("Finished for today"); return; } } } catch (Exception fail) { Console.WriteLine(fail.ToString()); System.IO.File.AppendAllText("errors", c + Environment.NewLine); } } MainClass.Log("end"); }
private void setData(DotNetWikiBot.Page page) { SampleDataSource sampleDataSourceRoot = new SampleDataSource(); sampleDataSourceRoot.setDataSource(page); var sampleDataSource = SampleDataSource.GetGroup(page.pageId); this.itemsControl.ItemsSource = sampleDataSource; }
private static bool CheckIfReviewUnnecessary(Page page) { string newPageText; bool unnecessary = false; string unnecessaryReason = ""; if (page.text.ToLower().Contains("{{picasareviewunnecessary}}")) { // Already marked unnecessary, assume it is return true; } if (IsFlickrReviewed(page)) { unnecessary = true; unnecessaryReason = "Also on Flickr and already Flickr reviewed"; } if (HasOtrsTag(page)) { unnecessary = true; unnecessaryReason = "Has OTRS tag"; } foreach (string tag in new string[] { "{{pd-usgov", "{{pd-art}}", "{{pd-art|", "{{pd-scan}}", "{{pd-scan|", "{{pd-old", "{{pd-us}}", "{{anonymous-eu}}" }) { if (page.text.ToLower().Contains(tag)) { unnecessary = true; unnecessaryReason = "Is public domain"; } } if (unnecessary) { newPageText = tagRegex.Replace(page.text, "{{picasareviewunnecessary}}"); newPageText = new Regex("\\{\\{User:Picasa Review Bot/reviewed-error\\|[^}]*\\}\\}", RegexOptions.IgnoreCase). Replace(newPageText, "{{picasareviewunnecessary}}"); if (page.text != newPageText) { page.text = newPageText; SavePage(page, "Marked {{picasareviewunnecessary}}: " + unnecessaryReason); } return true; } return false; }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var templates = p.GetAllTemplates().Where(t => t.Title.ToLower() == "infobox gegenstand"); foreach (var template in templates) { if (template.Parameters.ContainsKey("beschreibung") && _regex.IsMatch(template.Parameters["beschreibung"])) { template.Parameters["beschreibung"] = _regex.Replace(template.Parameters["beschreibung"], "[[$1]]"); template.Save(); edit.Save = true; edit.EditComment = "Attribute in Gegenstandsbeschreibung verlinkt"; } } }
protected override void ProcessPage(Page p, Job.EditStatus edit) { if(p.GetNamespace() != 0) return; p.Load(); foreach (var template in p.GetAllTemplates().Where( template => (template.Title.Equals("Ausrüstungswerte", StringComparison.OrdinalIgnoreCase) || template.Title.Equals("Rezept", StringComparison.OrdinalIgnoreCase)) && (template.Parameters.HasValueIgnoreCase("seltenheit", "Aufgestiegen") && !template.Parameters.ContainsKey("aufwertung")))) { template.InsertParameterAfter("aufwertung", "nein", "infusion2", "infusion"); template.Save(); edit.EditComment = "'aufwertung = nein' hinzugefügt"; edit.Save = true; } }
public static void exec() { string[] line = System.IO.File.ReadAllLines("list"); List<string> ignorelist = new List<string>(); if (System.IO.File.Exists("resultsig")) { ignorelist.AddRange (System.IO.File.ReadAllLines("resultsig")); } if (System.IO.File.Exists("results")) { ignorelist.AddRange (System.IO.File.ReadAllLines("results")); } foreach (string c in line) { if (ignorelist.Contains(c)) { continue; } try { Page afc = new Page(MainClass.en, "Wikipedia talk:" + c); afc.Load(); if (afc.text.Contains("REDIRECT") || afc.text.Contains("{{AFC")) { MainClass.DebugLog("OK:" + c); System.IO.File.AppendAllText("resultsig", c + "\n"); } else { MainClass.DebugLog("Template missing:" + c); System.IO.File.AppendAllText("results", c + "\n"); } } catch (Exception fail) { Console.WriteLine(fail.ToString()); System.IO.File.AppendAllText("errors", c + "." + Environment.NewLine); } } MainClass.Log("end"); }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); //Nur Seiten mit Vorlage:Infobox NSC if (p.GetAllTemplates().All(t => t.Title.ToLower() != "infobox nsc")) return; //Nur Seiten, die eine Unterseite mit Angeboten haben... var m = Regex.Match(p.text, "\\{\\{:" + p.title + "/([^}]+)}}"); if (!m.Success) return; var subpageTitle = m.Groups[1].Value; var subpage = new Page(p.site, p.title + "/" + subpageTitle); subpage.Load(); if (!subpage.Exists()) { p.text = p.text.Replace(m.Value, ""); edit.EditComment = "Verweis auf nicht vorhandene Angebots-Unterseite „" + subpage.title + "“ entfernt"; edit.Save = true; } else { var pl2 = new PageList(p.site); pl2.FillFromLinksToPage(subpage.title); if (pl2.Count() > 1) return; var subpageContent = Regex.Replace(subpage.text, "<noinclude>.*?</noinclude>", "").Trim(); p.text = p.text.Replace(m.Value, subpageContent); subpage.text = "{{Löschantrag|[Bot] In den Hauptartikel „[[" + p.title + "]]“ verschoben}}\n" + subpage.text; subpage.Save("[Bot] In Hauptartikel „[[" + p.title + "]]“ verschoben", true); edit.EditComment = "Angebot von „" + subpage.title + "“ in den Hauptartikel verschoben"; edit.Save = true; } }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var before = p.text; foreach (var template in p.GetAllTemplates()) { if (template.Title == "#dpl:" && template.Parameters.ContainsKey("category") && template.Parameters.ContainsKey("linksto") && template.Parameters.ContainsKey("format")) { var linksTo = template.Parameters["linksto"]; linksTo = linksTo == p.title ? "{{PAGENAME}}" : linksTo; if (template.Parameters["category"].ToLower() == "trophäe") p.text = p.text.Replace(template.Text, "Beschaffung|gegenstand=" + linksTo + "|kategorie=Trophäe"); else if (template.Parameters["category"].ToLower() == "behälter") p.text = p.text.Replace(template.Text, "Beschaffung|gegenstand=" + linksTo + "|kategorie=Behälter"); } } if (p.text.Contains("#dpl:") && p.text.Contains("Behälter")) { Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine("{0} still contains dpl", p.title); Console.ResetColor(); } if (p.text != before) { edit.Save = true; edit.EditComment = "DPL durch {{Beschaffung}} ersetzt"; } }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var allChanges = new List<string>(); var before = p.text; foreach (var template in p.GetAllTemplates()) { var templateChanges = new List<string>(); if (Replacements.ContainsKey(template.Title)) { foreach (var parameter in template.Parameters) { if (Replacements[template.Title].ContainsKey(parameter.Key)) { template.ChangeParametername(parameter.Key, Replacements[template.Title][parameter.Key]); templateChanges.Add(parameter.Key + " → " + Replacements[template.Title][parameter.Key]); } } } if (templateChanges.Count > 0) { template.Save(); allChanges.Add(template.Title + ": " + string.Join(", ", templateChanges)); } } if (allChanges.Count > 0) { edit.Save = true; edit.EditComment = "Parameter umbenannt: " + string.Join("; ", allChanges); } }
/// <summary>Gets page titles for this PageList from all links in some wiki page. All links /// will be retrieved, from all standard namespaces, except interwiki links to other /// sites. Use FillFromPageLinks function instead to filter namespaces /// automatically.</summary> /// <param name="pageTitle">Page title as string.</param> /// <example><code>pageList.FillFromAllPageLinks("Art");</code></example> public void FillFromAllPageLinks(string pageTitle) { if (string.IsNullOrEmpty(pageTitle)) throw new ArgumentNullException("pageTitle"); if (string.IsNullOrEmpty(Site.WMLangsStr)) site.GetWikimediaWikisList(); Regex wikiLinkRE = new Regex(@"\[\[:*(.+?)(]]|\|)"); Page page = new Page(site, pageTitle); page.Load(); MatchCollection matches = wikiLinkRE.Matches(page.text); Regex outWikiLink = new Regex("^(" + Site.WMLangsStr + /*"|" + Site.WMSitesStr + */ "):"); foreach (Match match in matches) if (!outWikiLink.IsMatch(match.Groups[1].Value)) pages.Add(new Page(site, match.Groups[1].Value)); Bot.LogEvent( Bot.Msg("PageList filled with links, found on \"{0}\" page."), pageTitle); }
/// <summary>Gets page titles and page texts from all ".txt" files in the specified /// directory (folder). Each file becomes a page. Page titles are constructed from /// file names. Page text is read from file contents. If any Unicode numeric codes /// (also known as numeric character references or NCRs) of the forbidden characters /// (forbidden in filenames) are recognized in filenames, those codes are converted /// to characters (e.g. "|" is converted to "|").</summary> /// <param name="dirPath">The path and name of a directory (folder) /// to load files from.</param> public void FillAndLoadFromFiles(string dirPath) { foreach (string fileName in Directory.GetFiles(dirPath, "*.txt")) { Page p = new Page(site, Path.GetFileNameWithoutExtension(fileName)); p.title = p.title.Replace(""", "\""); p.title = p.title.Replace("<", "<"); p.title = p.title.Replace(">", ">"); p.title = p.title.Replace("?", "?"); p.title = p.title.Replace(":", ":"); p.title = p.title.Replace("\", "\\"); p.title = p.title.Replace("/", "/"); p.title = p.title.Replace("*", "*"); p.title = p.title.Replace("|", "|"); p.LoadFromFile(fileName); pages.Add(p); } }
/// <summary>Gets page titles and page text from local XML dump. /// This function consumes much resources.</summary> /// <param name="filePathName">The path to and name of the XML dump file as string.</param> public void FillAndLoadFromXMLDump(string filePathName) { Bot.LogEvent(Bot.Msg("Loading pages from XML dump...")); XmlReader reader = XmlReader.Create(filePathName); while (reader.ReadToFollowing("page")) { Page p = new Page(site, ""); p.ParsePageXML(reader.ReadOuterXml()); pages.Add(p); } reader.Close(); Bot.LogEvent(Bot.Msg("XML dump loaded successfully.")); }
/// <summary>This function returns true, if in this PageList there exists a page with /// the same title, as a page specified as a parameter.</summary> /// <param name="page">.</param> /// <returns>Returns bool value.</returns> public bool Contains(Page page) { page.CorrectNSPrefix(); CorrectNSPrefixes(); foreach (Page p in pages) if (p.title == page.title) return true; return false; }
/// <summary>This function returns true, if a page with specified title exists /// in this PageList.</summary> /// <param name="title">Title of page to check.</param> /// <returns>Returns bool value.</returns> public bool Contains(string title) { Page page = new Page(site, title); page.CorrectNSPrefix(); CorrectNSPrefixes(); foreach (Page p in pages) if (p.title == page.title) return true; return false; }
/// <summary>This function adds specified page to the end of this PageList.</summary> /// <param name="page">Page object to add.</param> public void Add(Page page) { pages.Add(page); }
/// <summary>This internal function compares pages by titles (alphabetically).</summary> /// <returns>Returns 1 if x is greater, -1 if y is greater, 0 if equal.</returns> public int ComparePagesByTitles(Page x, Page y) { int r = string.Compare(x.title, y.title, false, site.langCulture); return (r != 0) ? r/Math.Abs(r) : 0; }
/// <summary>This function allows to set individual pages in this PageList. /// But it's better to use simple pageList[i] index, when it is possible.</summary> /// <param name="page">Page object to set in this PageList.</param> /// <param name="index">Zero-based index.</param> /// <returns>Returns the Page object.</returns> public void SetPageAtIndex(Page page, int index) { pages[index] = page; }
public DataCollection(DotNetWikiBot.Page page) : base(page) { this.Items.CollectionChanged += this.ItemsCollectionChanged; }
/// <summary>Inserts an element into this PageList at the specified index.</summary> /// <param name="page">Page object to insert.</param> /// <param name="index">Zero-based index.</param> public void Insert(Page page, int index) { pages.Insert(index, page); }
/// <summary>Gets page history and fills this PageList with specified number of last page /// revisions. But only revision identifiers, user names, timestamps and comments are /// loaded, not the texts. Call Load() (but not LoadEx) to load the texts of page revisions. /// The function combines XML (XHTML) parsing and regular expressions matching.</summary> /// <param name="pageTitle">Page to get history of.</param> /// <param name="lastRevisions">Number of last page revisions to get.</param> public void FillFromPageHistory(string pageTitle, int lastRevisions) { if (string.IsNullOrEmpty(pageTitle)) throw new ArgumentNullException("pageTitle"); if (lastRevisions <= 0) throw new ArgumentOutOfRangeException("quantity", Bot.Msg("Quantity must be positive.")); Bot.LogEvent( Bot.Msg("Getting {0} last revisons of \"{1}\" page..."), lastRevisions, pageTitle); string res = site.site + site.indexPath + "index.php?title=" + HttpUtility.UrlEncode(pageTitle) + "&limit=" + lastRevisions.ToString() + "&action=history"; XmlDocument doc = new XmlDocument(); doc.XmlResolver = null; string text = site.GetPageHTM(res).Replace(" ", " ").Replace("–", "-"); doc.LoadXml(text); XmlNodeList nl = doc.DocumentElement.SelectNodes("//ns:ul[@id='pagehistory']/ns:li", site.xmlNS); Regex revisionLinkRE = new Regex(@"(?<!diff=\d+&)oldid=(\d+).+?>(.+?)<"); XmlNode subn; foreach (XmlNode n in nl) { Page p = new Page(site, pageTitle); p.lastRevisionID = revisionLinkRE.Match(n.InnerXml).Groups[1].Value; DateTime.TryParse(revisionLinkRE.Match(n.InnerXml).Groups[2].Value, site.regCulture, DateTimeStyles.AssumeLocal, out p.timestamp); p.lastUser = n.SelectSingleNode("ns:span[@class='history-user']/ns:a", site.xmlNS).InnerText; if ((subn = n.SelectSingleNode("ns:span[@class='history-size']", site.xmlNS)) != null) int.TryParse(Regex.Replace(subn.InnerText, @"[^-+\d]", ""), out p.lastBytesModified); p.lastMinorEdit = (n.SelectSingleNode("ns:span[@class='minor']", site.xmlNS) != null) ? true : false; p.comment = (n.SelectSingleNode("ns:span[@class='comment']", site.xmlNS) != null) ? n.SelectSingleNode("ns:span[@class='comment']", site.xmlNS).InnerText : ""; pages.Add(p); } Bot.LogEvent(Bot.Msg("PageList filled with {0} last revisons of \"{1}\" page..."), nl.Count, pageTitle); }
public async Task AddOCAsync(CommandContext e, [RemainingText] string charName) { string role = ""; Wiki.Page page = new Wiki.Page(WikiSite, charName) { text = "" }; var interactivity = e.Client.GetInteractivity(); var embed = new DiscordEmbedBuilder() { Title = "Character Data", Color = DiscordColor.Red }; var infoBoxEmbed = new DiscordEmbedBuilder() { Title = "Infobox", Color = DiscordColor.SpringGreen }; DiscordMessage embedMessage = await e.RespondAsync(embed : embed); DiscordMessage infoBoxEmbedMessage = await e.RespondAsync(embed : infoBoxEmbed); DiscordMessage mainMessage = await e.RespondAsync("Okay, setting up!"); foreach (var pair in WikiFields) { bool errored = false; await mainMessage.ModifyAsync(pair.Key[1]); string content = ""; AnotherMessage: var msg = await interactivity.WaitForMessageAsync(x => x.Author == e.Member, TimeSpan.FromSeconds(120)); if (msg != null) { content += msg.Message.Content.Replace("¬", "") + Environment.NewLine; if (msg.Message.Content.Contains("¬")) { await msg.Message.DeleteAsync(); goto AnotherMessage; } else if (msg.Message.Content.ToLower() == "stop") { goto End; } Errored: if (content.Length < 1024) { embed.AddField(pair.Key[0], content); } else { var strings = content.Split(1024); foreach (var s in strings) { embed.AddField(pair.Key[0], new string(s.ToArray())); } } pair.Value(content, page); if (errored) { embedMessage = await e.RespondAsync(embed : embed); await infoBoxEmbedMessage.DeleteAsync(); await mainMessage.DeleteAsync(); infoBoxEmbedMessage = await e.RespondAsync(embed : infoBoxEmbed); mainMessage = await e.RespondAsync("Okay, setting up!"); } else { try { await embedMessage.ModifyAsync(embed : embed.Build()); } catch { embed.ClearFields(); errored = true; goto Errored; } } await mainMessage.ModifyAsync("Added field!"); await msg.Message.DeleteAsync(); await Task.Delay(1000); } } foreach (var infoBoxField in InfoBox.Fields) { await mainMessage.ModifyAsync(infoBoxField.Question + "\nSend `-` if the field should be omitted."); Failed: var msg = await interactivity.WaitForMessageAsync(x => x.Author == e.Member, TimeSpan.FromSeconds(120)); if (msg != null) { if (infoBoxField.FieldId == "affiliation") { string message = msg.Message.Content.ToLower(); if (message.Contains("hero")) { role = "Heroes"; infoBoxField.FieldValue = "Pro Hero"; } else if (message.Contains("rogue") || message.Contains("rouge")) { role = "Rogues"; infoBoxField.FieldValue = "Rogue"; } else if (message.Contains("villain")) { role = "Villains"; infoBoxField.FieldValue = "Villain"; } else if (message.Contains("academy student")) { role = "Academy Students"; infoBoxField.FieldValue = "Academy Student"; } else { await msg.Message.DeleteAsync(); goto Failed; } infoBoxEmbed.AddField(infoBoxField.FieldId, infoBoxField.FieldValue); } else { if (msg.Message.Content.ToLower() == "stop") { goto End; } infoBoxField.FieldValue = msg.Message.Content; try { if (infoBoxField.FieldValue.Length < 1024) { infoBoxEmbed.AddField(infoBoxField.FieldId, infoBoxField.FieldValue); } else { var strings = infoBoxField.FieldValue.Split(1024); foreach (var s in strings) { infoBoxEmbed.AddField(infoBoxField.FieldId, infoBoxField.FieldValue); } } } catch { await msg.Message.DeleteAsync(); goto Failed; } } await infoBoxEmbedMessage.ModifyAsync(embed : infoBoxEmbed.Build()); await msg.Message.DeleteAsync(); await mainMessage.ModifyAsync("Added field!"); await Task.Delay(1000); } } imageMessage: await mainMessage.ModifyAsync("Please link an image for the infobox."); var imageMessage = await interactivity.WaitForMessageAsync(x => x.Author == e.Member, TimeSpan.FromSeconds(120)); if (imageMessage != null) { string imageName; if (imageMessage.Message.Content.ToLower() == "stop") { goto End; } Regex ItemRegex = new Regex(@"\.(png|gif|jpg|jpeg|tiff|webp)"); if (ItemRegex.IsMatch(imageMessage.Message.Content)) { imageName = imageMessage.Message.Content.Split('/').Last(); Wiki.Page p = new Wiki.Page(WikiSite, "File:" + imageName); p.UploadImageFromWeb(imageMessage.Message.Content, "N/A", "N/A", "N/A"); infoBoxEmbed.ImageUrl = imageMessage.Message.Content; await imageMessage.Message.DeleteAsync(); await infoBoxEmbedMessage.ModifyAsync(embed : infoBoxEmbed.Build()); await mainMessage.ModifyAsync("Added field!"); await Task.Delay(1000); } else { await imageMessage.Message.DeleteAsync(); await mainMessage.ModifyAsync("That is invalid."); goto imageMessage; } string infoboxStuff = InfoBox.BuildInfoBox("image = " + imageName + "|"); page.text = page.text.Insert(0, infoboxStuff); page.text += $"{Environment.NewLine}[[Category:OC]] [[Category:All Characters]] [[Category:OC {role}]] [[Category:{role}]]"; if (role == "Academy Students") { academyMessage: await mainMessage.ModifyAsync("Please state which year the academy student is in: 1, 2, 3 or 4."); var academyMessage = await interactivity.WaitForMessageAsync(x => x.Author == e.Member, TimeSpan.FromSeconds(120)); if (academyMessage != null) { if (academyMessage.Message.Content.ToLower() == "stop") { goto End; } switch (academyMessage.Message.Content) { case "1": page.text += $"[[Category:1st Year Student]]"; break; case "2": page.text += $"[[Category:2nd Year Student]]"; break; case "3": page.text += $"[[Category:3rd Year Student]]"; break; case "4": page.text += $"[[Category:4th Year Student]]"; break; default: await mainMessage.ModifyAsync("That is invalid."); goto academyMessage; } await academyMessage.Message.DeleteAsync(); } } page.Save(); await mainMessage.ModifyAsync("Complete!"); await e.RespondAsync($"http://roleplay-heroes-and-villains.wikia.com/wiki/{page.title}"); return; } End: await mainMessage.ModifyAsync("Action cancelled."); await Task.Delay(2000); await mainMessage.DeleteAsync(); await infoBoxEmbedMessage.DeleteAsync(); await embedMessage.DeleteAsync(); }
protected override void ProcessPage(Page p, EditStatus edit) { if (p.GetNamespace() != 0) return; p.Load(); var changes = new List<string>(); var templates = p.GetAllTemplates(); foreach (var template in templates) { if (template.Title == "Rezept") { if (template.Parameters.ContainsKey("seltenheit") && new[] {"meisterwerk", "selten", "exotisch", "legendär"}.Contains( template.Parameters["seltenheit"].ToLower()) && template.Parameters.ContainsKey("gebunden") && template.Parameters["gebunden"].ToLower() == "benutzung") { template.Parameters.Remove("gebunden"); template.Save(); changes.Add("Rezept: 'gebunden = benutzung' entfernt"); } } else if (template.Title == "Eventbelohnung") { string[] parametersToRemove = { "ep-gold", "ep-silber", "ep-bronze", "ep-gold-niederlage", "ep-silber-niederlage", "ep-bronze-niederlage", "ep-niederlage", "karma-gold", "karma-silber", "karma-bronze", "karma-gold-niederlage", "karma-silber-niederlage", "karma-bronze-niederlage", "karma-niederlage", "münzen-gold", "münzen-silber", "münzen-bronze", "münzen-gold-niederlage", "münzen-silber-niederlage", "münzen-bronze-niederlage", "münzen-niederlage" }; var removed = new List<string>(); foreach (var parameter in parametersToRemove) { if (template.Parameters.ContainsKey(parameter)) { template.Parameters.Remove(parameter); removed.Add(parameter); } } if (removed.Any()) { template.Save(); changes.Add("Eventbelohnung: '" + string.Join("', '", removed) + "' entfernt"); } } else if (template.Title == "Infobox Aufgabe") { string[] parametersToRemove = { "erfahrung", "münzen" }; var removed = new List<string>(); foreach (var parameter in parametersToRemove) { if (template.Parameters.ContainsKey(parameter)) { template.Parameters.Remove(parameter); removed.Add(parameter); } } if (removed.Any()) { template.Save(); changes.Add("Infobox Aufgabe: '" + string.Join("', '", removed) + "' entfernt"); } } else if (template.Title == "Infobox Farbstoff") { string[] parametersToRemove = { "seltenheit" }; var removed = new List<string>(); foreach (var parameter in parametersToRemove) { if (template.Parameters.ContainsKey(parameter)) { template.Parameters.Remove(parameter); removed.Add(parameter); } } if (removed.Any()) { template.Save(); changes.Add("Infobox Farbstoff: '" + string.Join("', '", removed) + "' entfernt"); } } } if (changes.Count == 0) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("\tUnbekannt..."); Console.ResetColor(); } else { Console.ForegroundColor = ConsoleColor.White; var comment = "Überflüssige Parameter entfernt: " + string.Join("; ", changes); Console.WriteLine("\t" + comment); Console.ResetColor(); edit.EditComment = comment; edit.Save = true; } }
public DataItem(DotNetWikiBot.Page page, DataCollection group) : base(page) { this.group = group; this.page = page; }
/// <summary>Gets page history using bot query interface ("api.php" MediaWiki extension) /// and fills this PageList with specified number of last page revisions, optionally loading /// revision texts as well. On most sites not more than 50 last revisions can be obtained. /// Thanks to Jutiphan Mongkolsuthree for idea and outline of this function.</summary> /// <param name="pageTitle">Page to get history of.</param> /// <param name="lastRevisions">Number of last page revisions to obtain.</param> /// <param name="loadTexts">Load revision texts right away.</param> public void FillFromPageHistoryEx(string pageTitle, int lastRevisions, bool loadTexts) { if (!site.botQuery) throw new WikiBotException( Bot.Msg("The \"api.php\" MediaWiki extension is not available.")); if (string.IsNullOrEmpty(pageTitle)) throw new ArgumentNullException("pageTitle"); if (lastRevisions <= 0) throw new ArgumentOutOfRangeException("lastRevisions", Bot.Msg("Quantity must be positive.")); Bot.LogEvent( Bot.Msg("Getting {0} last revisons of \"{1}\" page..."), lastRevisions, pageTitle); string queryUri = site.site + site.indexPath + "api.php?action=query&prop=revisions&titles=" + HttpUtility.UrlEncode(pageTitle) + "&rvprop=ids|user|comment|timestamp" + (loadTexts ? "|content" : "") + "&format=xml&rvlimit=" + lastRevisions.ToString(); Page p; using (XmlReader reader = XmlReader.Create(queryUri)) { reader.ReadToFollowing("api"); reader.Read(); if (reader.Name == "error") Console.Error.WriteLine(Bot.Msg("Error: {0}"), reader.GetAttribute("info")); while (reader.ReadToFollowing("rev")) { p = new Page(site, pageTitle); p.lastRevisionID = reader.GetAttribute("revid"); p.lastUser = reader.GetAttribute("user"); p.comment = reader.GetAttribute("comment"); p.timestamp = DateTime.Parse(reader.GetAttribute("timestamp")).ToUniversalTime(); if (loadTexts) p.text = reader.ReadString(); pages.Add(p); } } Bot.LogEvent(Bot.Msg("PageList filled with {0} last revisons of \"{1}\" page."), pages.Count, pageTitle); }
/// <summary>Gets page history and fills this PageList with specified number of recent page /// revisions. Only revision identifiers, user names, timestamps and comments are /// loaded, not the texts. Call Load() (but not LoadEx) to load the texts of page revisions. /// The function combines XML (XHTML) parsing and regular expressions matching.</summary> /// <param name="pageTitle">Page to get history of.</param> /// <param name="lastRevisions">Number of last page revisions to get.</param> public void FillFromPageHistory(string pageTitle, int lastRevisions) { if (string.IsNullOrEmpty(pageTitle)) throw new ArgumentNullException("pageTitle"); if (lastRevisions <= 0) throw new ArgumentOutOfRangeException("quantity", Bot.Msg("Quantity must be positive.")); Console.WriteLine( Bot.Msg("Getting {0} last revisons of \"{1}\" page..."), lastRevisions, pageTitle); string res = site.site + site.indexPath + "index.php?title=" + HttpUtility.UrlEncode(pageTitle) + "&limit=" + lastRevisions.ToString() + "&action=history"; string src = site.GetPageHTM(res); src = src.Substring(src.IndexOf("<ul id=\"pagehistory\">")); src = src.Substring(0, src.IndexOf("</ul>") + 5); Page p = null; using (XmlReader reader = site.GetXMLReader(src)) { while (reader.Read()) { if (reader.Name == "li" && reader.NodeType == XmlNodeType.Element) { p = new Page(site, pageTitle); p.lastMinorEdit = false; p.comment = ""; } else if (reader.Name == "span" && reader["class"] == "mw-history-histlinks") { reader.ReadToFollowing("a"); p.lastRevisionID = reader["href"].Substring( reader["href"].IndexOf("oldid=") + 6); DateTime.TryParse(reader.ReadString(), site.regCulture, DateTimeStyles.AssumeLocal, out p.timestamp); } else if(reader.Name == "span" && reader["class"] == "history-user") { reader.ReadToFollowing("a"); p.lastUser = reader.ReadString(); } else if(reader.Name == "abbr") p.lastMinorEdit = true; else if(reader.Name == "span" && reader["class"] == "history-size") int.TryParse(Regex.Replace(reader.ReadString(), @"[^-+\d]", ""), out p.lastBytesModified); else if(reader.Name == "span" && reader["class"] == "comment") { p.comment = Regex.Replace(reader.ReadInnerXml().Trim(), "<.+?>", ""); p.comment = p.comment.Substring(1, p.comment.Length - 2); // brackets } if(reader.Name == "li" && reader.NodeType == XmlNodeType.EndElement) pages.Add(p); } } Console.WriteLine(Bot.Msg("PageList filled with {0} last revisons of \"{1}\" page..."), pages.Count, pageTitle); }
/// <summary>Loads text and metadata for pages in PageList via XML export interface. /// Non-existent pages will be automatically removed from the PageList. /// Please, don't use this function when going to edit big amounts of pages on /// popular public wikis, as it compromises edit conflict detection. In that case, /// each page's text should be loaded individually right before its processing /// and saving.</summary> public void LoadEx() { if (IsEmpty()) throw new WikiBotException(Bot.Msg("The PageList is empty. Nothing to load.")); Bot.LogEvent(Bot.Msg("Loading {0} pages..."), pages.Count); string res = site.site + site.indexPath + "index.php?title=Special:Export&action=submit"; string postData = "curonly=True&pages="; foreach (Page page in pages) postData += HttpUtility.UrlEncode(page.title) + "\r\n"; XmlReader reader = XmlReader.Create( new StringReader(site.PostDataAndGetResultHTM(res, postData))); Clear(); while (reader.ReadToFollowing("page")) { Page p = new Page(site, ""); p.ParsePageXML(reader.ReadOuterXml()); pages.Add(p); } reader.Close(); }
/// <summary>Gets all MediaWiki messages from "Special:Allmessages" page and loads them into /// site.messages PageList. The function is not backward compatible.</summary> public void GetMediaWikiMessages() { if (messages == null) messages = new PageList(this); Console.WriteLine(Bot.Msg("Updating MediaWiki messages dump. Please, wait...")); string res = site + indexPath + "index.php?title=Special:Allmessages"; string src = ""; Page p = null; Regex nextPortionRE = new Regex("offset=([^\"]+)\" title=\"[^\"]+\" rel=\"next\""); do { src = GetPageHTM(res + (src != "" ? "&offset=" + HttpUtility.HtmlDecode(nextPortionRE.Match(src).Groups[1].Value) : "&limit=5000")); using (XmlReader reader = GetXMLReader(src)) { reader.ReadToFollowing("tbody"); while (reader.Read()) { if (reader.Name == "tr" && reader.NodeType == XmlNodeType.Element && reader["id"] != null) p = new Page(this, namespaces["8"].ToString() + ":" + Bot.Capitalize(reader["id"].Replace("msg_", ""))); else if (reader.Name == "td" && (reader["class"] == "am_default" || reader["class"] == "am_actual")) p.text = reader.ReadString(); else if (reader.Name == "tr" && reader.NodeType == XmlNodeType.EndElement) messages.Add(p); else if (reader.Name == "tbody" && reader.NodeType == XmlNodeType.EndElement) break; } } } while (nextPortionRE.IsMatch(src)); if (p != null) messages.Add(p); Console.WriteLine(Bot.Msg("MediaWiki messages dump updated successfully.")); }
public static IEnumerable<Section> GetAllSections(string text, Page page, int minLevel = 1) { var headerMatches = HeaderRegex.Matches(text).Cast<Match>() .OrderBy(m => m.Groups[1].Length) .Where(m => m.Groups[1].Length >= minLevel) .ToList(); if(!headerMatches.Any()) yield break; var lowestHeading = headerMatches.First().Groups[1].Length; var lastHeadingPos = 0; var headingsFound = 0; foreach (var hMatch in headerMatches.Where(m => m.Groups[1].Length == lowestHeading)) { headingsFound++; if(hMatch.Index > 0 && headingsFound > 1) yield return new Section(text.Substring(lastHeadingPos, hMatch.Index - lastHeadingPos), page, lowestHeading); lastHeadingPos = hMatch.Index; } if(lastHeadingPos < text.Length) yield return new Section(text.Substring(lastHeadingPos, text.Length - lastHeadingPos), page, lowestHeading); }
/// <summary>Renames the page.</summary> /// <param name="newTitle">New title of that page.</param> /// <param name="reason">Reason for renaming.</param> public void RenameTo(string newTitle, string reason) { if (string.IsNullOrEmpty(newTitle)) throw new ArgumentNullException("newTitle"); if (string.IsNullOrEmpty(title)) throw new WikiBotException(Bot.Msg("No title specified for page to rename.")); //Page mp = new Page(site, "Special:Movepage/" + HttpUtility.UrlEncode(title)); Page mp = new Page(site, "Special:Movepage/" + title); mp.GetEditSessionData(); if (string.IsNullOrEmpty(mp.editSessionToken)) throw new WikiBotException(string.Format( Bot.Msg("Unable to rename page \"{0}\" to \"{1}\"."), title, newTitle)); string postData = string.Format("wpNewTitle={0}&wpOldTitle={1}&wpEditToken={2}" + "&wpReason={3}", HttpUtility.UrlEncode(newTitle), HttpUtility.UrlEncode(title), HttpUtility.UrlEncode(mp.editSessionToken), HttpUtility.UrlEncode(reason)); string respStr = site.PostDataAndGetResultHTM(site.indexPath + "index.php?title=Special:Movepage&action=submit", postData); if (site.editSessionTokenRE2.IsMatch(respStr)) throw new WikiBotException(string.Format( Bot.Msg("Failed to rename page \"{0}\" to \"{1}\"."), title, newTitle)); Bot.LogEvent( Bot.Msg("Page \"{0}\" was successfully renamed to \"{1}\"."), title, newTitle); title = newTitle; }