private static async Task <bool> _editPageAsync(MediaWikiClient client, EditHistory history, string pageTitle, string pageContent) { // Check to see if we've made this edit before. // If we've already made this page before, don't do anything. EditRecord record = await history.GetEditRecordAsync(pageTitle, pageContent); if (record is null) { // Get existing page content. // This allows us to make sure that no one has removed the "{{BotGenerated}}" flag. // If it has been removed, do not modify the page. MediaWikiApiParseRequestResult parse_result = client.Parse(pageTitle, new ParseParameters()); if (parse_result.ErrorCode == ErrorCode.MissingTitle || parse_result.Text.Contains(BotFlag)) { if (parse_result.ErrorCode == ErrorCode.MissingTitle) { _log(string.Format("creating page \"{0}\"", pageTitle)); } else { _log(string.Format("editing page \"{0}\"", pageTitle)); } try { client.Edit(pageTitle, new EditParameters { Action = EditAction.Text, Text = pageContent }); // Make a record of the edit. await history.AddEditRecordAsync(pageTitle, pageContent); // Return true to indicate that edits have occurred. return(true); } catch (Exception ex) { _log(ex.ToString()); } } else { _log(string.Format("skipping page \"{0}\" (manually edited)", pageTitle)); } } else { _log(string.Format("skipping page \"{0}\" (previously edited)", pageTitle)); } // Return false to indicate that no edits have occurred. return(false); }
private static async Task _editSpeciesPageAsync(MediaWikiClient client, EditHistory history, Species species, string pageTitle, string pageContent) { if (await _editPageAsync(client, history, pageTitle, pageContent)) { // If the edit was successful, associated it with this species. EditRecord record = await history.GetEditRecordAsync(pageTitle, pageContent); if (record != null) { await history.AddEditRecordAsync(species.Id, record); // Because it's possible that the species was renamed, we need to look at past edits to find previous titles of the same page. // Old pages for renamed species will be deleted. EditRecord[] edit_records = (await history.GetEditRecordsAsync(species.Id)) .Where(x => x.Id != record.Id && x.Title.ToLower() != record.Title.ToLower()) .ToArray(); // Delete all created pages where the old title does not match the current title. foreach (EditRecord i in edit_records) { MediaWikiApiParseRequestResult parse_result = client.Parse(i.Title, new ParseParameters()); if (parse_result.Text.Contains(BotFlag)) { // Only delete pages that haven't been manually edited. client.Delete(i.Title, new DeleteParameters { Reason = "species page moved to " + pageTitle }); // Add an edit record for this page so that we can restore the content later without it thinking we've already made this edit. // This is important, because this step can delete redirects when a page with redirects is updated. By creating a new edit record, the redirect will be recreated. await history.AddEditRecordAsync(i.Title, string.Empty); } } // We also need to delete any redirect pages that are now invalid (i.e. when specific epithet that points to a common name is changed). // Delete all redirects that point to this page (or one of this page's previous titles). RedirectRecord[] redirect_records = (await history.GetRedirectRecordsAsync()) .Where(i => i.Target == pageTitle || edit_records.Any(j => j.Title == i.Target)) // points to the title of this page, or one of its previous titles .Where(i => i.Title != species.FullName) // the title doesn't match this species' full name (the species has been renamed) .ToArray(); foreach (RedirectRecord j in redirect_records) { MediaWikiApiParseRequestResult parse_result = client.Parse(j.Title, new ParseParameters()); if (parse_result.IsRedirect && parse_result.Text.Contains(BotFlag)) { // Only delete pages that haven't been manually edited. client.Delete(j.Title, new DeleteParameters { Reason = "outdated redirect" }); } } } } }