Esempio n. 1
0
        //Assembles all hack information one function at a time
        public HackInfo ParseInfo(string url)
        {
            HackInfo info = new HackInfo();

            string       response = CallUrl(url);
            HtmlDocument htmlDoc  = new HtmlDocument();

            htmlDoc.LoadHtml(response);

            int nodeModifier = GetNodeModifier(htmlDoc);
            HtmlNodeCollection childNodes = GetChildNodes(htmlDoc);

            //info.imageURLs = GetScreenshots(url);
            info.rating      = GetRating(htmlDoc);
            info.author      = GetAuthor(nodeModifier, childNodes);
            info.type        = GetHackType(nodeModifier, childNodes);
            info.exits       = GetExits(nodeModifier, childNodes);
            info.description = GetDescription(nodeModifier, childNodes);
            info.downloadURL = GetDownloadURL(nodeModifier, childNodes);

            return(info);
        }
Esempio n. 2
0
        //To be used only if the cache file gets deleted
        //or to replace a cache that contains errors
        private void GenerateCache()
        {
            List <string>        urls     = new List <string>();
            List <string>        titles   = new List <string>();
            List <html.HackInfo> hackList = new List <html.HackInfo>();

            html.HtmlParser parser = new html.HtmlParser();

            string       response = parser.CallUrl("https://www.smwcentral.net/?p=section&s=smwhacks&n=0");
            HtmlDocument htmlDoc  = new HtmlDocument();

            htmlDoc.LoadHtml(response);
            var pageNumbers = htmlDoc.DocumentNode.Descendants("a")
                              .Where(node => node.GetAttributeValue("title", "").Contains("Go to page "));

            foreach (var page in pageNumbers)
            {
                finalPage = Convert.ToInt32(page.InnerHtml);
            }

            for (int i = 1; i <= finalPage; i++)
            {
                response = parser.CallUrl("https://www.smwcentral.net/?p=section&s=smwhacks&n=" + i);

                //Generate HTML doc from URL and parse only sections containing hack links
                htmlDoc.LoadHtml(response);
                var hackURLs = htmlDoc.DocumentNode.Descendants("a")
                               //Checks that the link is not from a Tip section of the page
                               .Where(node => !node.ParentNode.GetAttributeValue("class", "").Contains("rope"))
                               //Grabs urls of hacks based on their url containing "details"
                               .Where(node => node.GetAttributeValue("href", "").Contains("details"))
                               //Checks that smwcentral is not contained
                               .Where(node => !node.GetAttributeValue("href", "").Contains("smwcentral"));

                //Add URLs to hacks and the names of hacks into HackList object
                foreach (var hackURL in hackURLs)
                {
                    urls.Add("https://www.smwcentral.net" + WebUtility.HtmlDecode(hackURL.GetAttributeValue("href", "")));
                    titles.Add(WebUtility.HtmlDecode(hackURL.InnerText));
                }
            }

            for (int i = 0; i < urls.Count; i++)
            {
                try
                {
                    hackInfo       = parser.ParseInfo(urls[i]);
                    hackInfo.title = titles[i];
                    foreach (var c in System.IO.Path.GetInvalidFileNameChars())
                    {
                        hackInfo.title = hackInfo.title.Replace(c, ' ');
                    }
                    hackList.Add(hackInfo);
                }
                catch { }
            }

            string jsonString = JsonSerializer.Serialize(hackList);

            System.IO.File.WriteAllText("./resources/cache.json", jsonString);
        }
Esempio n. 3
0
        //Checks if the local cache file matches the SMWCentral's most recent files
        //If not, it adds all hacks not currently included into the cache file
        //If the cache file is very old, this function can take several minutes
        private void UpdateCache()
        {
            List <html.HackInfo> newHacks = new List <html.HackInfo>();
            List <string>        urls     = new List <string>();
            List <string>        titles   = new List <string>();

            html.HtmlParser parser = new html.HtmlParser();

            int titleIndex = -1;

            int i = 1;

            while (titleIndex < 0)
            {
                string response = parser.CallUrl("https://www.smwcentral.net/?p=section&s=smwhacks&n=" + i);

                //Generate HTML doc from URL and parse only sections containing hack links
                HtmlDocument htmlDoc = new HtmlDocument();
                htmlDoc.LoadHtml(response);
                var hackURLs = htmlDoc.DocumentNode.Descendants("a")
                               //Checks that the link is not from a Tip section of the page
                               .Where(node => !node.ParentNode.GetAttributeValue("class", "").Contains("rope"))
                               //Grabs urls of hacks based on their url containing "details"
                               .Where(node => node.GetAttributeValue("href", "").Contains("details"))
                               //Checks that smwcentral is not contained
                               .Where(node => !node.GetAttributeValue("href", "").Contains("smwcentral"));

                //Add URLs to hacks and the names of hacks into HackList object
                foreach (var hackURL in hackURLs)
                {
                    urls.Add("https://www.smwcentral.net" + WebUtility.HtmlDecode(hackURL.GetAttributeValue("href", "")));
                    titles.Add(WebUtility.HtmlDecode(hackURL.InnerText));
                }

                if (titles.Contains(cache[0].title))
                {
                    titleIndex = titles.IndexOf(cache[0].title);
                }

                i++;
            }

            for (i = 0; i < titleIndex; i++)
            {
                try
                {
                    hackInfo       = parser.ParseInfo(urls[i]);
                    hackInfo.title = titles[i];
                    foreach (var c in System.IO.Path.GetInvalidFileNameChars())
                    {
                        hackInfo.title = hackInfo.title.Replace(c, ' ');
                    }
                    newHacks.Add(hackInfo);
                }
                catch { }
            }

            newHacks.AddRange(cache);

            string jsonString = JsonSerializer.Serialize(newHacks);

            System.IO.File.WriteAllText("./resources/cache.json", jsonString);

            LoadCache();
        }