/// <summary>Loads texts and metadata (revision ID, timestamp, last comment,
        /// last contributor, minor edit mark) for pages in this PageList.
        /// Non-existent pages will be automatically removed from the PageList.
        /// Please, don't use this function when going to edit big amount of pages on
        /// popular public wikis, as it compromises edit conflict detection. In that case,
        /// each page's text should be loaded individually right before its processing
        /// and saving.</summary>
        public void LoadWithMetadata()
        {
            if (IsEmpty())
                throw new WikiBotException(Bot.Msg("The PageList is empty. Nothing to load."));
            Console.WriteLine(Bot.Msg("Loading {0} pages..."), pages.Count);

            string res = site.indexPath + "?title=Special:Export&action=submit";
            string postData = "curonly=True&pages=";
            foreach (Page page in pages)
                postData += HttpUtility.UrlEncode(page.title) + "\r\n";
            string src = site.PostDataAndGetResult(res, postData);
            XmlReader reader = XmlReader.Create(new StringReader(src));
            PageList pl = new PageList(site);
            while (reader.ReadToFollowing("page")) {
                Page p = new Page(site);
                p.ParsePageXml(reader.ReadOuterXml());
                pl.Add(p);
            }
            reader.Close();
            if (pages.Count > 0) {
                Clear();
                pages = pl.pages;
                return;
            }
            else {    // FALLBACK, use alternative parsing way, XPath
                Console.WriteLine(
                    Bot.Msg("XML parsing failed, switching to alternative parser..."), pages.Count);
                src = Bot.RemoveXMLRootAttributes(src);
                StringReader strReader = new StringReader(src);
                XPathDocument doc = new XPathDocument(strReader);
                strReader.Close();
                XPathNavigator nav = doc.CreateNavigator();
                foreach (Page page in pages) {
                    if (page.title.Contains("'")) {    // There's no good way to escape "'" in XPath
                        page.LoadWithMetadata();
                        continue;
                    }
                    string query = "//page[title='" + page.title + "']/";
                    try {
                        page.text =
                            nav.SelectSingleNode(query + "revision/text").InnerXml;
                    }
                    catch (System.NullReferenceException) {
                        continue;
                    }
                    page.text = HttpUtility.HtmlDecode(page.text);
                    page.pageId = nav.SelectSingleNode(query + "id").InnerXml;
                    try {
                        page.lastUser = nav.SelectSingleNode(query +
                            "revision/contributor/username").InnerXml;
                        page.lastUserId = nav.SelectSingleNode(query +
                            "revision/contributor/id").InnerXml;
                    }
                    catch (System.NullReferenceException) {
                        page.lastUser = nav.SelectSingleNode(query +
                            "revision/contributor/ip").InnerXml;
                    }
                    page.lastUser = HttpUtility.HtmlDecode(page.lastUser);
                    page.revision = nav.SelectSingleNode(query + "revision/id").InnerXml;
                    page.lastMinorEdit = (nav.SelectSingleNode(query +
                        "revision/minor") == null) ? false : true;
                    try {
                        page.comment = nav.SelectSingleNode(query + "revision/comment").InnerXml;
                        page.comment = HttpUtility.HtmlDecode(page.comment);
                    }
                    catch (System.NullReferenceException) {;}
                    page.timestamp =
                        nav.SelectSingleNode(query + "revision/timestamp").ValueAsDateTime;
                }

                if (string.IsNullOrEmpty(pages[0].text)) {    // FALLBACK 2, load pages one-by-one
                    foreach (Page page in pages)
                        page.LoadWithMetadata();
                }
            }
        }
 /// <summary>Finds all internal wikilinks in page text, excluding interwiki
 /// links, links to sister projects, categories, embedded images and links in
 /// image descriptions.</summary>
 /// <returns>Returns the PageList object, in which page titles are the wikilinks,
 /// found in text.</returns>
 public PageList GetLinks()
 {
     MatchCollection matches = Site.wikiLinkRE.Matches(text);
     StringCollection exclLinks = new StringCollection();
     exclLinks.AddRange(GetInterWikiLinks());
     exclLinks.AddRange(GetSisterWikiLinks(true));
     string str;
     int fragmentPosition;
     PageList pl = new PageList(site);
     for(int i = 0; i < matches.Count; i++) {
         str = matches[i].Groups[1].Value;
         if (str.StartsWith(site.namespaces["6"] + ":", true, site.langCulture) ||
             str.StartsWith(Site.wikiNSpaces["6"] + ":", true, site.langCulture) ||
             str.StartsWith(site.namespaces["14"] + ":", true, site.langCulture) ||
             str.StartsWith(Site.wikiNSpaces["14"] + ":", true, site.langCulture))
                 continue;
         str = str.TrimStart(':');
         if (exclLinks.Contains(str))
             continue;
         fragmentPosition = str.IndexOf("#");
         if (fragmentPosition != -1)
             str = str.Substring(0, fragmentPosition);
         pl.Add(new Page(site, str));
     }
     return pl;
 }
 /// <summary>Finds all wikilinks in page text, excluding interwiki
 /// links, categories, embedded images and links in
 /// image descriptions.</summary>
 /// <returns>Returns the PageList object, in which page titles are the wikilinks,
 /// found in text.</returns>
 public PageList GetLinks()
 {
     MatchCollection matches = site.regexes["wikiLink"].Matches(text);
     var exclLinks = GetSisterwikiLinks();
     exclLinks.AddRange(GetInterLanguageLinks());
     string str;
     int fragmentPosition;
     PageList pl = new PageList(site);
     for(int i = 0; i < matches.Count; i++) {
         str = matches[i].Groups["title"].Value;
         if (str.StartsWith(site.GetNsPrefix(6), true, site.langCulture) ||    // image
             str.StartsWith(site.GetEnglishNsPrefix(6), true, site.langCulture) ||
             str.StartsWith(site.GetNsPrefix(14), true, site.langCulture) ||    // category
             str.StartsWith(site.GetEnglishNsPrefix(14), true, site.langCulture))
                 continue;
         str = str.TrimStart(':');
         if (exclLinks.Contains(str))
             continue;
         fragmentPosition = str.IndexOf("#");
         if (fragmentPosition != -1)
             str = str.Substring(0, fragmentPosition);
         pl.Add(new Page(site, str));
     }
     return pl;
 }