コード例 #1
0
        public void Scrape()
        {
            _scrapedShoes.Clear();

            ScrapeResponse serviceResponse = new ScrapeResponse();

            try
            {
                string[,] websites = WebScraperFactory.Websites;
                WebScraper         scraper;
                ScrapePageResponse websiteResponse = new ScrapePageResponse();
                string[]           website;

                for (int i = 0; i < WebScraperFactory.WebsiteCount; ++i)
                {
                    scraper = WebScraperFactory.CreateWebScraper(websites[i, 0]);
                    website = new string[] { websites[i, 0], websites[i, 1] };

                    ScrapePageRequest request = new ScrapePageRequest
                    {
                        Website  = website,
                        ShoeList = _scrapedShoes
                    };

                    websiteResponse = scraper.ScrapeWebpage(request);

                    if (!websiteResponse.Success)
                    {
                        break;
                    }
                }

                serviceResponse.Success     = websiteResponse.Success;
                serviceResponse.Message     = websiteResponse.Message;
                serviceResponse.ScrapeCount = _scrapedShoes.Count;
            }
            catch (Exception ex)
            {
                serviceResponse.Success     = false;
                serviceResponse.Message     = ex.Message;
                serviceResponse.ScrapeCount = 0;
                _scrapedShoes.Clear();
            }
            finally
            {
                FilterShoes("");

                if (ScrapeCompletedEvent != null)
                {
                    ScrapeCompletedEvent(this, new ScrapeResponseEventArgs(serviceResponse));
                }
            }
        }
コード例 #2
0
ファイル: AIOBotScraper.cs プロジェクト: vietle2111/SnkrBot
        public override ScrapePageResponse ScrapeWebpage(ScrapePageRequest request)
        {
            ShoeModel tmpShoe;

            ShoeModel[] tmpArray = null;

            ScrapePageResponse response = new ScrapePageResponse {
                Success = true, Message = ""
            };

            try
            {
                HtmlDocument       webpage = _webscraper.Load(request.Website[1]);
                HtmlNodeCollection nodes   = webpage.DocumentNode.SelectNodes("//div[contains(@class, 'rt-holder')]");
                _shoeCount           = nodes.Count;
                response.ScrapeCount = _shoeCount;

                if (_shoeCount != 0)
                {
                    tmpArray = new ShoeModel[_shoeCount];

                    for (int i = 0; i < _shoeCount; ++i)
                    {
                        tmpShoe             = new ShoeModel();
                        tmpShoe.ID          = 0;
                        tmpShoe.WebsiteName = request.Website[0];
                        tmpShoe.WebsiteURL  = request.Website[1];
                        tmpShoe.ModelName   = WebScraper.CleanHTMLString(nodes[i].SelectNodes("//h3 [contains(@class, 'entry-title')]")[i].InnerText);
                        tmpShoe.Description = WebScraper.CleanHTMLString(nodes[i].SelectNodes("//h3 [contains(@class, 'entry-title')]")[i].InnerText);
                        //tmpShoe.Description = WebScraper.CleanHTMLString(nodes[i].SelectNodes("//div[contains(@class, 'post-meta-user')]//i")[i].InnerText);
                        tmpShoe.ImageURI    = nodes[i].SelectNodes("//div[contains(@class, 'rt-holder')]//img")[i].GetAttributeValue("src", "");
                        tmpShoe.ImageBinary = WebScraper.LoadImageBinary(tmpShoe.ImageURI);
                        tmpArray[i]         = tmpShoe;
                    }
                    request.ShoeList.AddRange(tmpArray);
                }
            }
            catch (Exception ex)
            {
                _shoeCount = 0;

                response.Success = false;
                response.Message = ex.Message;
            }

            return(response);
        }
コード例 #3
0
 public abstract ScrapePageResponse ScrapeWebpage(ScrapePageRequest request);