Beispiel #1
0
        public async Task <IActionResult> Edit(int id, [Bind("CrawlID,name,DateCreated")] Crawl crawl)
        {
            if (id != crawl.CrawlID)
            {
                return(NotFound());
            }

            if (ModelState.IsValid)
            {
                try
                {
                    _context.Update(crawl);
                    await _context.SaveChangesAsync();
                }
                catch (DbUpdateConcurrencyException)
                {
                    if (!CrawlExists(crawl.CrawlID))
                    {
                        return(NotFound());
                    }
                    else
                    {
                        throw;
                    }
                }
                return(RedirectToAction(nameof(Index)));
            }
            return(View(crawl));
        }
 /// <summary>
 ///     Outputs the state of the engine.
 /// </summary>
 /// <param name = "crawl">The crawl.</param>
 public override void OutputEngineState(Crawl <TArachnodeDAO> crawl)
 {
     if (ApplicationSettings.EnableConsoleOutput)
     {
         Console.WriteLine(BuildOutputString("ot", OutputType.EngineState, "tn", crawl.CrawlInfo.ThreadNumber, "cr", crawl.UncrawledCrawlRequests.Count, "State", crawl.Crawler.Engine.State));
     }
 }
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            Cursor = Cursors.Wait;

            _crawlDel = CrawlFolders;
            _crawlDel.BeginInvoke(RootPath, CrawlCallback, null);
        }
Beispiel #4
0
        public static string CreateUrl(string site, Crawl outbound, Crawl inbound, CabinClass bclass, uint pax)
        {
            //Todo: handle citycodes
            if (site.IsNullOrWhiteSpace())
            {
                site = "https://sas.se";
            }
            string dtFormat   = "yyyyMMdd";
            var    lang       = site.Contains("flysas.com", StringComparison.InvariantCultureIgnoreCase) ? "gb-en" : "en";
            var    shortClass = ClassStringShort(bclass);
            var    longClass  = ClassStringLong(bclass);
            bool   roundtrip  = inbound != null;
            var    url        = new System.Text.StringBuilder(site + $"/{lang}/book/flights?search=");

            url.Append(roundtrip ? "RT" : "OW");
            url.Append($"_{outbound.Origin}-{outbound.Destination}-{outbound.TravelDate.ToString(dtFormat)}");
            if (roundtrip)
            {
                url.Append($"-{inbound.TravelDate.ToString(dtFormat)}");
            }
            url.Append($"_a{pax}c0i0y0&view=upsell&bookingFlow=points&out_flight_number={outbound.Flight}&out_sub_class={longClass}&out_class={shortClass}");
            if (roundtrip)
            {
                url.Append($"&in_flight_number={inbound.Flight}&in_sub_class={longClass}&in_class={shortClass}");
            }
            bool hasLink = !roundtrip || outbound.Origin == inbound.Destination && outbound.Destination == inbound.Origin;

            return(hasLink ? url.ToString() : null);
        }
 bool routeFilter(IEnumerable <string> FromList, IEnumerable <string> ToList, Crawl c)
 {
     return
         (c.Departure >= DateTime.Now &&
          (match(FromList, c.Origin) || FromList.Contains(c.Return ? c.Route.ToAirport.Zone : c.Route.FromAirport.Zone)) &&
          (match(ToList, c.Destination) || ToList.Contains(c.Return ? c.Route.FromAirport.Zone : c.Route.ToAirport.Zone)));
 }
Beispiel #6
0
    public Crawl CreateCrawl(Vector3 position, int maxCrawl, float growthMultiplier)
    {
        Crawl crawlScript = new Crawl(this, maxCrawl, position, growthMultiplier, growthDivisor);

        crawls.Add(crawlScript);

        return(crawlScript);
    }
        /// <summary>
        ///     Determines whether the specified crawl request is disallowed.
        /// </summary>
        /// <param name = "crawlRequest">The crawl request.</param>
        /// <param name = "arachnodeDAO">The arachnode DAO.</param>
        /// <returns>
        ///     <c>true</c> if the specified crawl request is disallowed; otherwise, <c>false</c>.
        /// </returns>
        public override bool IsDisallowed(CrawlRequest <TArachnodeDAO> crawlRequest, IArachnodeDAO arachnodeDAO)
        {
            //ANODET: When you add the multi-server caching, the robots.txt file will need to be sent to all other CachePeers.

            //if we're not being called by the Engine prior to assigning to a Crawl...
            if (crawlRequest.Crawl != null)
            {
                string robotsDotTextAbsoluteUri = crawlRequest.Discovery.Uri.Scheme + Uri.SchemeDelimiter + crawlRequest.Discovery.Uri.Host + "/robots.txt";

                crawlRequest.OutputIsDisallowedReason = OutputIsDisallowedReason;

                if (!UserDefinedFunctions.IsDisallowedForAbsoluteUri(robotsDotTextAbsoluteUri, false, false))
                {
                    if (crawlRequest.Politeness.DisallowedPaths == null || (crawlRequest.Politeness.DisallowedPaths != null && DateTime.Now.Subtract(crawlRequest.Politeness.DisallowedPathsSince) > TimeSpan.FromDays(1)))
                    {
                        CrawlRequest <TArachnodeDAO> robotsDotTextRequest = new CrawlRequest <TArachnodeDAO>(crawlRequest, crawlRequest.Crawl.Crawler.Cache.GetDiscovery(robotsDotTextAbsoluteUri, arachnodeDAO), 1, 1, (short)UriClassificationType.Host, (short)UriClassificationType.Host, double.MaxValue, RenderType.None, RenderType.None);
                        robotsDotTextRequest.Discovery.DiscoveryState = DiscoveryState.Undiscovered;
                        robotsDotTextRequest.Politeness = crawlRequest.Politeness;

                        Crawl <TArachnodeDAO> crawl = new Crawl <TArachnodeDAO>(crawlRequest.Crawl.Crawler.ApplicationSettings, crawlRequest.Crawl.Crawler.WebSettings, crawlRequest.Crawl.Crawler, crawlRequest.Crawl.Crawler.ActionManager, crawlRequest.Crawl.Crawler.ConsoleManager, crawlRequest.Crawl.Crawler.CookieManager, crawlRequest.Crawl.Crawler.CrawlRequestManager, crawlRequest.Crawl.Crawler.DataTypeManager, crawlRequest.Crawl.Crawler.DiscoveryManager, crawlRequest.Crawl.Crawler.EncodingManager, crawlRequest.Crawl.Crawler.HtmlManager, crawlRequest.Crawl.Crawler.PolitenessManager, crawlRequest.Crawl.Crawler.ProxyManager, crawlRequest.Crawl.Crawler.RuleManager, false);

                        robotsDotTextRequest.Crawl = crawl;

                        crawl.ProcessCrawlRequest(robotsDotTextRequest, false, false);

                        crawlRequest.Politeness.DisallowedPathsSince = DateTime.Now;

                        //The DataManager will not download the byte stream is ApplicationSettings.AssignFileAndImageDicoveries is set to false.  This is by design.
                        if (robotsDotTextRequest.Data != null && robotsDotTextRequest.Data.Length == 0 && robotsDotTextRequest.WebClient.WebException == null)
                        {
                            robotsDotTextRequest.Data = robotsDotTextRequest.WebClient.DownloadHttpData(crawlRequest.Discovery.Uri.AbsoluteUri, robotsDotTextRequest.WebClient.HttpWebResponse.ContentEncoding.ToLowerInvariant() == "gzip", robotsDotTextRequest.WebClient.HttpWebResponse.ContentEncoding.ToLowerInvariant() == "deflate", crawlRequest.Crawl.Crawler.CookieContainer);
                        }

                        SiteCrawler.Value.RobotsDotText robotsDotText = _robotsDotTextManager.ParseRobotsDotTextSource(new Uri(crawlRequest.Discovery.Uri.Scheme + Uri.SchemeDelimiter + crawlRequest.Discovery.Uri.Host), robotsDotTextRequest.Data);

                        crawlRequest.Politeness.CrawlDelayInMilliseconds = robotsDotText.CrawlDelay * 1000;
                        crawlRequest.Politeness.DisallowedPaths          = robotsDotText.DisallowedPaths;
                    }

                    if (crawlRequest.Politeness != null)
                    {
                        if (crawlRequest.Politeness.DisallowedPaths != null)
                        {
                            foreach (string disallowedPath in crawlRequest.Politeness.DisallowedPaths)
                            {
                                if (HttpUtility.UrlDecode(crawlRequest.Discovery.Uri.AbsoluteUri).StartsWith(HttpUtility.UrlDecode(disallowedPath)))
                                {
                                    crawlRequest.IsDisallowedReason = "Prohibited by robots.txt.";
                                    return(true);
                                }
                            }
                        }
                    }
                }
            }

            return(false);
        }
Beispiel #8
0
        public IActionResult CreateCrawlDetail(string name, DateTime crawlDate, bool haveTheme, string Theme)

        {
            Crawl c = new Crawl {
                name = name, datetime = crawlDate, Theme = Theme
            };

            c.UserID = User.FindFirstValue(ClaimTypes.NameIdentifier);


            //Auto join creator
            c.crawlUser.Add(new CrawlUser
            {
                crawl   = c,
                usersID = c.UserID
            });

            Barcrawl        bc           = new Barcrawl();
            List <Barcrawl> listBarcrawl = new List <Barcrawl>();
            List <Bar>      bar          = PossibleBars;

            foreach (Bar b in bar)
            {
                if (!db.Bar.Where(x => x.BarId == b.BarId).Any())
                {
                    db.Bar.Add(b);
                }
                c.barCrawl.Add(new Barcrawl
                {
                    BarId   = b.BarId,
                    CrawlID = c.CrawlID,
                }
                               );
            }
            string[] bob = bar[0].Location.Split(',');

            string qwer   = bob[1] + bob[2];
            string qwert  = qwer.Replace("]", "");
            string qwerty = qwert.Replace("\"", "");

            string[] qwertyu = qwerty.Split(' ');
            c.Location = qwertyu[2] + ", " + qwertyu[3];



            db.Crawl.Add(c);



            db.SaveChanges();
            string id = c.CrawlID.ToString();

            return(RedirectToAction("CrawlDetails", new { id = id }));
        }
        /// <summary>
        ///     Processes a FilesRow after crawling.
        /// </summary>
        /// <param name = "filesRow">The files row.</param>
        /// <param name="webClient"></param>
        /// <param name="actionManager"></param>
        /// <param name="consoleManager"></param>
        /// <param name="discoveryManager"></param>
        /// <param name = "fileManager">The file manager.</param>
        /// <param name = "fileManager">The file manager.</param>
        /// <param name="memoryManager"></param>
        /// <param name="ruleManager"></param>
        /// <param name = "arachnodeDAO">The arachnode DAO.</param>
        /// <param name = "imageManager">The image manager.</param>
        public static void ProcessFile(ApplicationSettings applicationSettings, WebSettings webSettings, Crawler <TArachnodeDAO> crawler, ArachnodeDataSet.FilesRow filesRow, WebClient <TArachnodeDAO> webClient, Cache <TArachnodeDAO> cache, ActionManager <TArachnodeDAO> actionManager, ConsoleManager <TArachnodeDAO> consoleManager, CrawlerPeerManager <TArachnodeDAO> crawlerPeerManager, DiscoveryManager <TArachnodeDAO> discoveryManager, FileManager <TArachnodeDAO> fileManager, MemoryManager <TArachnodeDAO> memoryManager, RuleManager <TArachnodeDAO> ruleManager, IArachnodeDAO arachnodeDAO)
        {
            CacheManager <TArachnodeDAO> cacheManager = new CacheManager <TArachnodeDAO>(applicationSettings, webSettings);
            CookieManager cookieManager = new CookieManager();;
            CrawlRequestManager <TArachnodeDAO> crawlRequestManager = new CrawlRequestManager <TArachnodeDAO>(applicationSettings, webSettings, cache, consoleManager, discoveryManager);
            DataTypeManager <TArachnodeDAO>     dataTypeManager     = new DataTypeManager <TArachnodeDAO>(applicationSettings, webSettings);
            EncodingManager <TArachnodeDAO>     encodingManager     = new EncodingManager <TArachnodeDAO>(applicationSettings, webSettings);
            PolitenessManager <TArachnodeDAO>   politenessManager   = new PolitenessManager <TArachnodeDAO>(applicationSettings, webSettings, cache);
            ProxyManager <TArachnodeDAO>        proxyManager        = new ProxyManager <TArachnodeDAO>(applicationSettings, webSettings, consoleManager);
            HtmlManager <TArachnodeDAO>         htmlManager         = new HtmlManager <TArachnodeDAO>(applicationSettings, webSettings, discoveryManager);
            Crawl <TArachnodeDAO> crawl = new Crawl <TArachnodeDAO>(applicationSettings, webSettings, crawler, actionManager, consoleManager, cookieManager, crawlRequestManager, dataTypeManager, discoveryManager, encodingManager, htmlManager, politenessManager, proxyManager, ruleManager, true);

            //create a CrawlRequest as this is what the internals of SiteCrawler.dll expect to operate on...
            CrawlRequest <TArachnodeDAO> crawlRequest = new CrawlRequest <TArachnodeDAO>(new Discovery <TArachnodeDAO>(filesRow.AbsoluteUri), 1, UriClassificationType.Host, UriClassificationType.Host, 0, RenderType.None, RenderType.None);

            crawlRequest.Crawl = crawl;
            crawlRequest.Discovery.DiscoveryType = DiscoveryType.File;
            crawlRequest.Discovery.ID            = filesRow.ID;
            crawlRequest.Data        = filesRow.Source;
            crawlRequest.ProcessData = true;
            crawlRequest.WebClient   = webClient;

            crawlRequest.WebClient.HttpWebResponse.Headers.Clear();

            //parse the ResponseHeaders from the FilesRow.ResponseHeaders string...
            foreach (string responseHeader in filesRow.ResponseHeaders.Split("\r\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries))
            {
                string[] responseHeaderSplit = responseHeader.Split(":".ToCharArray());

                string name  = responseHeaderSplit[0];
                string value = UserDefinedFunctions.ExtractResponseHeader(filesRow.ResponseHeaders, name, true).Value;

                crawlRequest.WebClient.HttpWebResponse.Headers.Add(name, value);
            }

            //refresh the DataTypes in the DataTypeManager... (if necessary)...
            if (dataTypeManager.AllowedDataTypes.Count == 0)
            {
                dataTypeManager.RefreshDataTypes();
            }

            crawlRequest.DataType = dataTypeManager.DetermineDataType(crawlRequest);

            if (applicationSettings.InsertFiles)
            {
                crawlRequest.Discovery.ID = arachnodeDAO.InsertFile(crawlRequest.Parent.Uri.AbsoluteUri, crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.WebClient.HttpWebResponse.Headers.ToString(), applicationSettings.InsertFileSource ? crawlRequest.Data : new byte[] { }, crawlRequest.DataType.FullTextIndexType, applicationSettings.ClassifyAbsoluteUris);
            }

            crawlRequest.ManagedDiscovery = fileManager.ManageFile(crawlRequest, crawlRequest.Discovery.ID.Value, crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.Data, crawlRequest.DataType.FullTextIndexType, applicationSettings.ExtractFileMetaData, applicationSettings.InsertFileMetaData, applicationSettings.SaveDiscoveredFilesToDisk);

            actionManager.PerformCrawlActions(crawlRequest, CrawlActionType.PostRequest, arachnodeDAO);

            discoveryManager.CloseAndDisposeManagedDiscovery(crawlRequest, arachnodeDAO);
        }
Beispiel #10
0
        private void MenuItem_Spider_Go_Click(object sender, RoutedEventArgs e)
        {
            var spider = new Crawl();

            //ChatterVolumeDataGrid.ItemsSource = spider.Chatters;
            WebNodesDataGrid.ItemsSource = spider.WebNodes;
            BindingOperations.EnableCollectionSynchronization(spider.WebNodes, WebNodesDataGrid.ItemsSource);

            spider.Seed(Settings.Default.StartingUrl, Settings.Default.MaxAttempts,
                        Settings.Default.SecondsDelay, Settings.Default.Steps, Settings.Default.DatabaseFile);
            spider.Start();
        }
Beispiel #11
0
        public async Task <IActionResult> Create([Bind("CrawlID,name,DateCreated")] Crawl crawl)
        {
            crawl.UserID = User.FindFirstValue(ClaimTypes.NameIdentifier);

            if (ModelState.IsValid)
            {
                _context.Add(crawl);
                await _context.SaveChangesAsync();

                return(RedirectToAction(nameof(Index)));
            }
            return(View(crawl));
        }
Beispiel #12
0
        public Crawl GetCrawl()
        {
            Crawl crawl = new Crawl();

            // checks if crawl section exists
            if (PsaFile.DataSection[MiscSectionLocation + 15] >= 8096 && PsaFile.DataSection[MiscSectionLocation + 15] < PsaFile.DataSectionSize)
            {
                crawl.Offset = PsaFile.DataSection[MiscSectionLocation + 15];
                int crawlLocation = PsaFile.DataSection[MiscSectionLocation + 15] / 4;
                crawl.Forward  = PsaFile.DataSection[crawlLocation];
                crawl.Backward = PsaFile.DataSection[crawlLocation + 1];
            }
            Console.WriteLine(crawl);
            return(crawl);
        }
Beispiel #13
0
        public IActionResult CrawlDetails(string ID)
        {
            int crawlID = int.Parse(ID);

            ViewBag.CrawlID = crawlID;
            Crawl c = db.Crawl.Find(crawlID);

            ViewBag.CrawlName  = c.name;
            ViewBag.CrawlTheme = c.Theme;

            List <Barcrawl> bc    = db.Barcrawl.Include(g => g.bar).Where(a => a.crawl.CrawlID == crawlID /*int.Parse(crawlID)*/).ToList();
            List <Bar>      cool  = new List <Bar>();
            List <string>   users = new List <string>();

            foreach (Barcrawl ayy in bc)
            {
                Bar b = new Bar {
                    Name = ayy.bar.Name, Location = ayy.bar.Location
                };
                cool.Add(b);
            }

            List <CrawlUser> cu = db.CrawlUser.Where(x => x.crawl.CrawlID == crawlID).ToList();

            foreach (CrawlUser crus in cu)
            {
                string un = db.Users.Find(crus.usersID).UserName;
                un = un.Split("@")[0];
                users.Add(un);
            }
            ViewBag.Users   = users;
            ViewBag.gmapurl = GMapUrlBuilder(cool);

            ViewBag.MapBars = cool;
            return(View(bc));


            //Crawl c = db.Crawl.FirstOrDefault(i => i.CrawlID == 14);

            //List<Bar> bars = new List<Bar>();
            //foreach(Barcrawl bc in c.barCrawl)
            //{
            //    Bar b = db.Bar.Find(bc.bar.BarId);
            //    bars.Add(b);
            //}
            //return View(c);
        }
Beispiel #14
0
        public static void TestMethod1(string url)
        {
            Crawl c = new Crawl();


            c.EnterQueue(new HttpItem()
            {
                URL = url,

                HandleResult = (result) =>
                {
                    Console.WriteLine(result);
                },
                Method = "Get",
            });

            c.Start();
        }
Beispiel #15
0
        static void Auto()
        {
            Parametter para = new Parametter();

            para = para.DEserialize();

            // hide code  when user need

            Create_Folder.Delete();
            Create_Folder.Create(para.From);

            Console.WriteLine("waiting for create folder....");


            if (para.Type.Equals("manhua"))
            {
                Manhua.Start(para.To, para.From);
            }
            else
            {
                //crawl
                // check thông số từ para.json
                Console.WriteLine("waiting for crawling....");
                Crawl crawl = new Crawl();
                crawl.Auto_Crawl(para);


                ////  render
                ////    check thông số từ para.json
                Console.WriteLine("Waiting for Render....");
                Render.Instance.Start(para);
            }

            ////    upload
            Console.WriteLine("Waiting for Upload....");
            Upload_Console.Start(para);
        }
        public void PerformActionTest()
        {
            ApplicationSettings applicationSettings = new ApplicationSettings();
            WebSettings         webSettings         = new WebSettings();

            ArachnodeDAO arachnodeDAO = new ArachnodeDAO(applicationSettings.ConnectionString, applicationSettings, webSettings, true, true);

            Crawler <ArachnodeDAO> crawler = new Crawler <ArachnodeDAO>(applicationSettings, webSettings, CrawlMode.BreadthFirstByPriority, false);

            CrawlRequest <ArachnodeDAO> crawlRequest = new CrawlRequest <ArachnodeDAO>(new Discovery <ArachnodeDAO>("http://trycatchfail.com/blog/post/2008/11/12/Deep-web-crawling-with-NET-Getting-Started.aspx"), 1, UriClassificationType.Host, UriClassificationType.Host, 1, RenderType.None, RenderType.None);

            Crawl <ArachnodeDAO> crawl = new Crawl <ArachnodeDAO>(applicationSettings, webSettings, crawler, crawler.ActionManager, crawler.ConsoleManager, crawler.CookieManager, crawler.CrawlRequestManager, crawler.DataTypeManager, crawler.DiscoveryManager, crawler.EncodingManager, crawler.HtmlManager, crawler.PolitenessManager, crawler.ProxyManager, crawler.RuleManager, true);

            applicationSettings.MaximumNumberOfCrawlThreads = 0;

            UserDefinedFunctions.ConnectionString = "Data Source=.;Initial Catalog=arachnode.net;Integrated Security=True;Connection Timeout=3600;";
            crawler.Engine.Start();

            crawl.BeginCrawl(crawlRequest, false, false, false);

            Templater <ArachnodeDAO> target = new Templater <ArachnodeDAO>(applicationSettings, webSettings);

            target.PerformAction(crawlRequest, arachnodeDAO);
        }
Beispiel #17
0
        public ActionResult Import(int id)
        {
            var flyer        = _flyerRepository.GetById(id);
            var merchantCode = Helper.FlyerLink.MerchantCode(flyer.Url);

            if (flyer.IsActive)
            {
                var crawlObject = Crawl.GetData(flyer.Url);
                flyer.Valid_from = crawlObject.valid_from;
                flyer.Valid_to   = crawlObject.valid_to;

                var merchantName = crawlObject.merchant;
                var merchant     = _merchantRepository.GetByCondition(m => m.MerchantCode == merchantCode).FirstOrDefault();
                if (merchant == null)
                {
                    merchant = new Merchant
                    {
                        Name         = merchantName,
                        MerchantCode = merchantCode,
                        Flyers       = new List <Flyer> {
                            flyer
                        }
                    };
                    _merchantRepository.Add(merchant);
                }
                else
                {
                    merchant.Name = merchantName;
                }
                merchant.Url      = crawlObject.merchant_url;
                merchant.LogoFile = crawlObject.merchant_logo;
                merchant.Flyers.Add(flyer);


                foreach (var crawlcate in crawlObject.categories)
                {
                    var category = _categoryReposity.GetByCondition(c => c.Name == crawlcate.name).FirstOrDefault();
                    if (category == null)
                    {
                        category      = new Category();
                        category.Name = crawlcate.name;
                        category.Merchants.Add(merchant);
                        _categoryReposity.Add(category);

                        merchant.Categories.Add(category);
                        _merchantRepository.Update(merchant);
                    }
                    else
                    {
                        if (!merchant.Categories.Contains(category))
                        {
                            merchant.Categories.Add(category);
                        }
                    }
                }

                foreach (var item in crawlObject.items)
                {
                    if ((item.current_price != null || item.sale_story != null) && item.category_names.Count > 0)
                    {
                        var pcate = item.category_names[0];
                        var pCate = _categoryReposity.GetByCondition(c => c.Name.Contains(pcate)).FirstOrDefault();
                        if (pCate == null)
                        {
                            pCate = new Category
                            {
                                Name = pcate
                            };
                            _categoryReposity.Add(pCate);
                        }
                        var product = new Product
                        {
                            Name                  = item.name,
                            CurrentPrice          = Convert.ToDecimal(item.current_price),
                            Brand                 = item.brand,
                            Description           = item.description,
                            Discount_percent      = item.discount_percent.ToString(),
                            DisplayName           = item.display_name,
                            Dist_coupon_image_url = item.dist_coupon_image_url,
                            Image                 = item.large_image_url,
                            Url               = item.url,
                            InStoreOnly       = item.in_store_only,
                            X_large_image_url = item.x_large_image_url,
                            Sale_Story        = item.sale_story,
                            Flyer             = flyer,
                            Valid_from        = DateTime.Parse(item.valid_from),
                            Valid_to          = DateTime.Parse(item.valid_to),
                            SKU               = item.sku,
                            CategoryId        = pCate.Id,
                            Category          = pCate,
                            MerchantId        = merchant.Id,
                            Merchant          = merchant
                        };
                        _productRepository.Add(product);
                        pCate.Products.Add(product);
                        _categoryReposity.Update(pCate);
                        merchant.Products.Add(product);
                        if (!merchant.Categories.Contains(pCate))
                        {
                            merchant.Categories.Add(pCate);
                        }
                        flyer.Products.Add(product);
                    }
                }
                flyer.Merchant = merchant;
                flyer.IsActive = false;
                _merchantRepository.Update(merchant);
                _flyerRepository.Update(flyer);
            }
            return(RedirectToAction("Index"));
        }
 public CrawlingHostedService(IConfiguration configuration, Channel <SearchTerm> crawlingMessageChannel, Crawl crawl)
 {
     Configuration     = configuration;
     SearchTermChannel = crawlingMessageChannel;
     Crawl             = crawl;
 }
Beispiel #19
0
 /// <summary>
 ///     Outputs the state of the engine.
 /// </summary>
 /// <param name = "crawl">The crawl.</param>
 public abstract void OutputEngineState(Crawl <TArachnodeDAO> crawl);
Beispiel #20
0
 public void CrawlComplete(Crawl crawl)
 {
     //Debug.Log("removing crawl");
     //crawls.Remove(crawl);
 }
Beispiel #21
0
		public void Stop()
		{
			(new Logger()).WriteDebug3("in");
			IsChatTerminating = true;
			_cMat.Stop();
			try
			{
				_cSMSRoll.Stop();
				if (IsChatOnAir)
					_mreChatOnAir.Set();
			}
			catch (Exception ex)
			{
				(new Logger()).WriteError(ex);
			}
			try
			{
				_cInfoCrawl.Stop();
				_cInfoCrawl = null;
			}
			catch (Exception ex)
			{
				(new Logger()).WriteError(ex);
			}
			GC.Collect();
			(new Logger()).WriteDebug4("return");
		}
Beispiel #22
0
		public void Init()
		{
			(new Logger()).WriteDebug3("in");
			_cMat = new Mat(_cPreferences.cMat) { OnStop = MatStopped };
			_cSMSRoll = new SMSRoll(_cPreferences.cRoll) { OnRollStandby = _cMat.Standby, OnRollShow = _cMat.Start };
			_cInfoCrawl = new Crawl(_cPreferences.cCrawl, _cPreferences.eBroadcastType);
			bReleased = false;
			_mreChatOnAir = new ManualResetEvent(true);
			_mreInfoOnAir = new ManualResetEvent(true);
			_mreChatSetuping = new ManualResetEvent(true);
			_cSMSRoll.Init();
			QueueLength = 0;
			_cInfoCrawl.Init();
			_cMat.Init();

			(new Logger()).WriteDebug4("return");
		}
Beispiel #23
0
        public static void Commands(Simulator sim)
        {
            string command = String.Empty;

            Console.WriteLine("Type HELP for a list of commands.\n");
            while (command != "Q")
            {
                bool secure = false;
                Console.Write("#: ");
                // Commands can have parameters separated by spaces
                string[] parts = Console.ReadLine().Split(' ');
                command = parts[0].ToUpper();

                try {
                    if (command.Equals("S"))
                    {
                        secure  = true;
                        command = parts[1].ToUpper();;
                    }

                    switch (command)
                    {
                    case "B":
                        int       forwarders = (parts.Length >= 2) ? Int32.Parse(parts[1]) : -1;
                        Broadcast bcast      = new Broadcast(sim.SimBroadcastHandler,
                                                             sim.RandomNode().Node, forwarders, TaskFinished);
                        bcast.Start();
                        RunUntilTaskFinished();
                        break;

                    case "C":
                        sim.CheckRing(true);
                        break;

                    case "P":
                        sim.PrintConnections();
                        break;

                    case "M":
                        Console.WriteLine("Memory Usage: " + GC.GetTotalMemory(true));
                        break;

                    case "CR":
                        NodeMapping nm = sim.Nodes.Values[0];
                        SymphonySecurityOverlord bso = null;
                        if (secure)
                        {
                            bso = nm.Sso;
                        }
                        Crawl c = new Crawl(nm.Node, sim.Nodes.Count, bso, TaskFinished);
                        c.Start();
                        RunUntilTaskFinished();
                        break;

                    case "A2A":
                        AllToAll atoa = new AllToAll(sim.Nodes, secure, TaskFinished);
                        atoa.Start();
                        RunUntilTaskFinished();
                        break;

                    case "A":
                        sim.AddNode();
                        break;

                    case "D":
                        sim.RemoveNode(true, true);
                        break;

                    case "R":
                        sim.RemoveNode(false, true);
                        break;

                    case "REVOKE":
                        sim.Revoke(true);
                        break;

                    case "RUN":
                        int steps = (parts.Length >= 2) ? Int32.Parse(parts[1]) : 0;
                        if (steps > 0)
                        {
                            SimpleTimer.RunSteps(steps);
                        }
                        else
                        {
                            SimpleTimer.RunStep();
                        }
                        break;

                    case "Q":
                        break;

                    case "CONSTATE":
                        sim.PrintConnectionState();
                        break;

                    case "H":
                        Console.WriteLine("Commands: \n");
                        Console.WriteLine("A - add a node");
                        Console.WriteLine("D - remove a node");
                        Console.WriteLine("R - abort a node");
                        Console.WriteLine("C - check the ring using ConnectionTables");
                        Console.WriteLine("P - Print connections for each node to the screen");
                        Console.WriteLine("M - Current memory usage according to the garbage collector");
                        Console.WriteLine("[S] CR - Perform a (secure) crawl of the network using RPC");
                        Console.WriteLine("[S] A2A - Perform all-to-all measurement of the network using RPC");
                        Console.WriteLine("Q - Quit");
                        break;

                    default:
                        Console.WriteLine("Invalid command");
                        break;
                    }
                } catch (Exception e) {
                    Console.WriteLine("Error: " + e);
                }
                Console.WriteLine();
            }
        }
Beispiel #24
0
        public Preferences(string sWorkFolder, string sData)
        {
            _sWorkFolder = sWorkFolder;

            XmlDocument cXmlDocument = new XmlDocument();

            cXmlDocument.LoadXml(sData);
            XmlNode cXmlNode = cXmlDocument.NodeGet("data");

            cDBCredentials = new DB.Credentials(cXmlNode.NodeGet("database"));

            bMessagesRelease = cXmlNode.AttributeOrDefaultGet <bool>("release", false);
            eBroadcastType   = cXmlNode.AttributeOrDefaultGet <BroadcastType>("type", BroadcastType.linear);
            nSMSQtty         = cXmlNode.AttributeGet <int>("queue");
            bAnotherAirError = cXmlNode.AttributeOrDefaultGet <bool>("another_air_err", false);

            XmlNode cNodeChild = cXmlNode.NodeGet("vip");

            if (null != cNodeChild)
            {
                cVIP = new VIP();
                XmlNode cXNPromos = cNodeChild.NodeGet("promos", false);
                if (null != cXNPromos)
                {
                    cVIP.tsPromoPeriod = cXNPromos.AttributeGet <TimeSpan>("period");
                    cVIP.aPromos       = new List <Promo>();
                    List <Promo>       aPromos = new List <Promo>();
                    List <WeeklyRange> aWeeklyRanges;
                    foreach (XmlNode cPromo in cXNPromos.NodesGet("promo", false))
                    {
                        aWeeklyRanges = new List <WeeklyRange>();
                        foreach (XmlNode cWeeklyRange in cPromo.NodesGet("weeklyrange", false))
                        {
                            aWeeklyRanges.Add(new WeeklyRange(
                                                  cWeeklyRange.AttributeValueGet("dayin"),
                                                  cWeeklyRange.AttributeValueGet("timein"),
                                                  cWeeklyRange.AttributeValueGet("dayout"),
                                                  cWeeklyRange.AttributeValueGet("timeout")
                                                  ));
                        }
                        aPromos.Add(new Promo()
                        {
                            nID          = cPromo.InnerXml.GetHashCode(),
                            aWeeklyRange = aWeeklyRanges,
                            bEnabled     = cPromo.AttributeGet <bool>("enabled"),
                            sText        = cPromo.NodeGet("text").InnerXml
                        });
                    }
                    cVIP.aPromos.AddRange(aPromos);
                }
                cVIP.sFile   = System.IO.Path.Combine(_sWorkFolder, "data/vip.dat");
                cVIP.sPrefix = "VIP";
                cVIP.cFont   = FontParse(cNodeChild.NodeGet("font"));
            }
            #region . ROLL .
            cNodeChild           = cXmlNode.NodeGet("roll");
            cRoll                = new Roll();
            cRoll.nLayer         = cNodeChild.AttributeGet <ushort>("layer");
            cRoll.nSpeed         = cNodeChild.AttributeGet <float>("speed");
            cRoll.nUseHistorySMS = cNodeChild.AttributeOrDefaultGet <int>("use_history", 0); // 0 is off
            string sEx = cNodeChild.AttributeOrDefaultGet <string>("exceptions", "");        // 0 is off
            cRoll.cExceptions = new Roll.HoursExceptions(sEx);
            cRoll.stMerging   = new MergingMethod(cNodeChild);
            XmlNode cXNGrandChild = cNodeChild.NodeGet("holds");
            cRoll.SecondsPerLineSet(cXNGrandChild.AttributeGet <float>("line"));
            cRoll.nSecondsPerPause = cXNGrandChild.AttributeGet <float>("pause");
            cXNGrandChild          = cNodeChild.NodeGet("masks");
            cRoll.sMaskPageSingle  = cXNGrandChild.NodeGet("single").InnerXml;
            cRoll.sMaskPageMulti   = cXNGrandChild.NodeGet("multi").InnerXml;
            cRoll.stSize           = SizeParse(cNodeChild.NodeGet("size"));

            cRoll.cSMSCommon = RollCategoryGet(cNodeChild.NodeGet("common"));
            cRoll.cSMSVIP    = RollCategoryGet(cNodeChild.NodeGet("vip"));
            cRoll.cSMSPromo  = RollCategoryGet(cNodeChild.NodeGet("promo"));
            cRoll.cSMSPhoto  = RollCategoryGet(cNodeChild.NodeGet("photo"));
            #endregion
            #region . CRAWL .
            cNodeChild = cXmlNode.NodeGet("crawl");
            cCrawl     = new Crawl();
            cCrawl.sInfoStringNight = System.IO.Path.Combine(sWorkFolder, "data/info_night.dat");
            cCrawl.sCrowlLastStart  = System.IO.Path.Combine(sWorkFolder, "data/crowl_last_start.dat");;
            cCrawl.stMerging        = new MergingMethod(cNodeChild);
            cCrawl.nSpeed           = cNodeChild.AttributeGet <float>("speed");
            cCrawl.nLayer           = cNodeChild.AttributeGet <ushort>("layer");
            cCrawl.nShiftTop        = cXmlNode.AttributeOrDefaultGet <short>("shift_top", 0);
            cCrawl.nPressBottom     = cXmlNode.AttributeOrDefaultGet <short>("press_bot", 0);
            cCrawl.bRenderFields    = cXmlNode.AttributeOrDefaultGet <bool>("render_fields", false);
            cCrawl.stSize           = SizeParse(cNodeChild.NodeGet("size"));
            cCrawl.sText            = cNodeChild.NodeGet("text").InnerXml;
            cCrawl.sRegistryInfo    = cNodeChild.NodeGet("registration").InnerXml;

            cCrawl.cFont         = FontParse(cNodeChild = cNodeChild.NodeGet("font"));
            cCrawl.stColor       = ColorParse(cNodeChild.NodeGet("color"));
            cNodeChild           = cNodeChild.NodeGet("border");
            cCrawl.nBorderWidth  = cNodeChild.AttributeGet <float>("width");
            cCrawl.stBorderColor = ColorParse(cNodeChild.NodeGet("color", false));
            #endregion
            #region . ANIMATION .
            cNodeChild       = cXmlNode.NodeGet("mat");
            cMat             = new Mat();
            cXNGrandChild    = cNodeChild.NodeGet("badge");
            cMat.nBadgeLayer = cXNGrandChild.AttributeGet <ushort>("layer");
            cMat.stBadgeSize = SizeParse(cXNGrandChild.NodeGet("size"));
            cMat.sBadgeIn    = cXNGrandChild.NodeGet("in").InnerXml;
            cMat.sBadgeLoop  = cXNGrandChild.NodeGet("loop").InnerXml;
            cMat.sBadgeOut   = cXNGrandChild.NodeGet("out").InnerXml;

            cXNGrandChild        = cNodeChild.NodeGet("show");
            cMat.nShowLayer      = cXNGrandChild.AttributeGet <ushort>("layer");
            cMat.stShowSize      = SizeParse(cXNGrandChild.NodeGet("size"));
            cMat.sShowLoop       = cXNGrandChild.NodeGet("loop").InnerXml;
            cMat.sShowOut        = cXNGrandChild.NodeGet("out").InnerXml;
            cMat.sShowTransition = cXNGrandChild.NodeGet("transition").InnerXml;

            cXNGrandChild           = cXNGrandChild.NodeGet("standby");
            cMat.sStandbyIn         = cXNGrandChild.NodeGet("in").InnerXml;
            cMat.sStandbyLoop       = cXNGrandChild.NodeGet("loop").InnerXml;
            cMat.sStandbyOut        = cXNGrandChild.NodeGet("out").InnerXml;
            cMat.sStandbyTransition = cXNGrandChild.NodeGet("transition").InnerXml;

            cXNGrandChild  = cNodeChild.NodeGet("out");
            cMat.nOutLayer = cXNGrandChild.AttributeGet <ushort>("layer");
            cMat.stOutSize = SizeParse(cXNGrandChild.NodeGet("size"));
            cMat.sOut      = cXNGrandChild.NodeGet("animation").InnerXml;
            #endregion
        }
Beispiel #25
0
        public static void Seed(AwardData.AwardContext ctx, Microsoft.AspNetCore.Identity.UserManager <ApplicationUser> userManager)
        {
            var routes = getRoutes();

            ctx.Routes.AddRange(routes);
            ctx.SaveChanges();
            var client = new HttpClient();

            using (HttpResponseMessage response = client.GetAsync("https://awardhacks.se/export/flights").Result)
            {
                using (HttpContent content = response.Content)
                {
                    var json   = content.ReadAsStringAsync().Result;
                    var export = Newtonsoft.Json.JsonConvert.DeserializeObject <List <AwardExport> >(json);
                    foreach (var e in export)
                    {
                        var tmp = new Crawl
                        {
                            Id         = e.Id,
                            Departure  = e.Departure,
                            Arrival    = e.Arrival,
                            Business   = e.Business,
                            Plus       = e.Plus,
                            Go         = e.Go,
                            Flight     = e.Flight,
                            TravelDate = e.Departure.Date,
                            CrawlDate  = DateTime.Now,
                            Equipment  = e.Equipment,
                            Success    = true
                        };
                        tmp.RouteId = routes.Where(r => r.To == e.Destination && r.From == e.Origin).Select(r => r.Id).FirstOrDefault();
                        if (tmp.RouteId == 0)
                        {
                            tmp.RouteId = routes.Where(r => r.To == e.Origin && r.From == e.Destination).Select(r => r.Id).FirstOrDefault();
                            tmp.Return  = true;
                        }
                        ctx.Crawls.Add(tmp);
                    }
                }
            }
            ctx.SaveChanges();
            var ofData = new OpenFlightsData.OFData();

            ofData.LoadData();
            int id = 0;

            foreach (var iata in ctx.Routes.Select(r => r.To).Union(ctx.Routes.Select(r => r.From)).Distinct())
            {
                var ap = ofData.Airports.FirstOrDefault(ap => iata.Equals(ap.IATA, StringComparison.OrdinalIgnoreCase));
                if (ap != null)
                {
                    ctx.Airports.Add(new Airport

                    {
                        Id      = ++id,
                        City    = ap.City,
                        IATA    = ap.IATA,
                        Country = ap.Country,
                        //This is not correct for Africa etc but will do for the current routes.
                        Zone = ap.Country == "United States" ? "North & Central America" : ap.Timezone > -4 && ap.Timezone < 5 ? "Europe" : "Central Asia & Far East Asia"
                    }
                                     );
                }
                ;
            }
            ctx.SaveChanges();
            userManager.CreateAsync(new ApplicationUser {
                Email = "*****@*****.**", EmailConfirmed = true, UserName = "******"
            }, "someweaktestpwd");
            ctx.SaveChanges();
        }
Beispiel #26
0
        /// <summary>
        ///     Processes a WebPagesRow after crawling.
        /// </summary>
        /// <param name = "webPagesRow">The web pages row.</param>
        /// <param name="webClient"></param>
        /// <param name="actionManager"></param>
        /// <param name="consoleManager"></param>
        /// <param name="discoveryManager"></param>
        /// <param name="memoryManager"></param>
        /// <param name="ruleManager"></param>
        /// <param name = "webPageManager">The web page manager.</param>
        /// <param name = "arachnodeDAO">The arachnode DAO.</param>
        /// <param name = "fileManager">The file manager.</param>
        /// <param name = "imageManager">The image manager.</param>
        public static void ProcessWebPage(ApplicationSettings applicationSettings, WebSettings webSettings, Crawler <TArachnodeDAO> crawler, ArachnodeDataSet.WebPagesRow webPagesRow, WebClient <TArachnodeDAO> webClient, Cache <TArachnodeDAO> cache, ActionManager <TArachnodeDAO> actionManager, ConsoleManager <TArachnodeDAO> consoleManager, CrawlerPeerManager <TArachnodeDAO> crawlerPeerManager, DiscoveryManager <TArachnodeDAO> discoveryManager, MemoryManager <TArachnodeDAO> memoryManager, RuleManager <TArachnodeDAO> ruleManager, WebPageManager <TArachnodeDAO> webPageManager, IArachnodeDAO arachnodeDAO)
        {
            CacheManager <TArachnodeDAO> cacheManager = new CacheManager <TArachnodeDAO>(applicationSettings, webSettings);
            CookieManager cookieManager = new CookieManager();
            CrawlRequestManager <TArachnodeDAO> crawlRequestManager = new CrawlRequestManager <TArachnodeDAO>(applicationSettings, webSettings, cache, consoleManager, discoveryManager);
            DataTypeManager <TArachnodeDAO>     dataTypeManager     = new DataTypeManager <TArachnodeDAO>(applicationSettings, webSettings);
            EncodingManager <TArachnodeDAO>     encodingManager     = new EncodingManager <TArachnodeDAO>(applicationSettings, webSettings);
            PolitenessManager <TArachnodeDAO>   politenessManager   = new PolitenessManager <TArachnodeDAO>(applicationSettings, webSettings, cache);
            ProxyManager <TArachnodeDAO>        proxyManager        = new ProxyManager <TArachnodeDAO>(applicationSettings, webSettings, consoleManager);
            HtmlManager <TArachnodeDAO>         htmlManager         = new HtmlManager <TArachnodeDAO>(applicationSettings, webSettings, discoveryManager);
            Crawl <TArachnodeDAO> crawl = new Crawl <TArachnodeDAO>(applicationSettings, webSettings, crawler, actionManager, consoleManager, cookieManager, crawlRequestManager, dataTypeManager, discoveryManager, encodingManager, htmlManager, politenessManager, proxyManager, ruleManager, true);

            //create a CrawlRequest as this is what the internals of SiteCrawler.dll expect to operate on...
            CrawlRequest <TArachnodeDAO> crawlRequest = new CrawlRequest <TArachnodeDAO>(new Discovery <TArachnodeDAO>(webPagesRow.AbsoluteUri), webPagesRow.CrawlDepth, UriClassificationType.Host, UriClassificationType.Host, 0, RenderType.None, RenderType.None);

            crawlRequest.Crawl = crawl;
            crawlRequest.Discovery.DiscoveryType = DiscoveryType.WebPage;
            crawlRequest.Discovery.ID            = webPagesRow.ID;
            crawlRequest.Data         = webPagesRow.Source;
            crawlRequest.CurrentDepth = webPagesRow.CrawlDepth;
            crawlRequest.Encoding     = Encoding.GetEncoding(webPagesRow.CodePage);
            crawlRequest.ProcessData  = true;
            crawlRequest.WebClient    = webClient;

            crawlRequest.WebClient.HttpWebResponse.Headers.Clear();

            //parse the ResponseHeaders from the WebPagesRow.ResponseHeaders string...
            foreach (string responseHeader in webPagesRow.ResponseHeaders.Split("\r\n".ToCharArray(), StringSplitOptions.RemoveEmptyEntries))
            {
                string[] responseHeaderSplit = responseHeader.Split(":".ToCharArray());

                string name  = responseHeaderSplit[0];
                string value = UserDefinedFunctions.ExtractResponseHeader(webPagesRow.ResponseHeaders, name, true).Value;

                crawlRequest.WebClient.HttpWebResponse.Headers.Add(name, value);
            }

            //refresh the DataTypes in the DataTypeManager... (if necessary)...
            if (dataTypeManager.AllowedDataTypes.Count == 0)
            {
                dataTypeManager.RefreshDataTypes();
            }

            crawlRequest.DataType = dataTypeManager.DetermineDataType(crawlRequest);

            //now, process the bytes...
            encodingManager.ProcessCrawlRequest(crawlRequest, arachnodeDAO);

            if (applicationSettings.InsertWebPages)
            {
                crawlRequest.Discovery.ID = arachnodeDAO.InsertWebPage(crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.WebClient.HttpWebResponse.Headers.ToString(), applicationSettings.InsertWebPageSource ? crawlRequest.Data : new byte[] { }, crawlRequest.Encoding.CodePage, crawlRequest.DataType.FullTextIndexType, crawlRequest.CurrentDepth, applicationSettings.ClassifyAbsoluteUris);
            }

            crawlRequest.ManagedDiscovery = webPageManager.ManageWebPage(crawlRequest.Discovery.ID.Value, crawlRequest.Discovery.Uri.AbsoluteUri, crawlRequest.Data, crawlRequest.Encoding, crawlRequest.DataType.FullTextIndexType, applicationSettings.ExtractWebPageMetaData, applicationSettings.InsertWebPageMetaData, applicationSettings.SaveDiscoveredWebPagesToDisk);

            //assigning FileAndImageDiscoveries isn't applicable because Files and Images need to be crawled to be properly classified... without classification we don't know whether they belong in dbo.Files or dbo.Images...
            crawlRequestManager.ProcessEmailAddresses(crawlRequest, arachnodeDAO);
            crawlRequestManager.ProcessHyperLinks(crawlRequest, arachnodeDAO);

            actionManager.PerformCrawlActions(crawlRequest, CrawlActionType.PostRequest, arachnodeDAO);

            discoveryManager.CloseAndDisposeManagedDiscovery(crawlRequest, arachnodeDAO);
        }
Beispiel #27
0
        static void PerformanceTestWith(Type eventStoreType, int parallel, bool useCaching = false)
        {
            Console.WriteLine("PERFTEST: {0}, {1} client(s)", eventStoreType.Name, parallel);
            var container = Container(new DatabaseServiceInstaller());

            container.Register(Component.For<IBlobStorage>().ImplementedBy<NullBlobStorage>().IsDefault());
            container.Register(Component.For<ILogger>().ImplementedBy<NullLogger>().IsDefault());
            if(useCaching)
                container.Register(Component.For<IEventStore>().ImplementedBy<CachingEventStore>());

            container.Register(Component.For<IEventStore>().ImplementedBy(eventStoreType).Named("es"));

            _eventStore = container.Resolve<IEventStore>();
            var repository = container.Resolve<IRepository<Crawl>>();
            var setupService = container.Resolve<ISetupService>();
            var crawlService = container.Resolve<ICrawlService>();
            var msgbus = container.Resolve<IMessageBus>();
            var blobstorage = container.Resolve<IBlobStorage>();

            msgbus.ClearSubscribers();

            var tasks = new List<Task>();
            var random = new Random();

            var shouldCancel = false;

            Task.Run(async () =>
            {
                await Task.Delay(10000);
                shouldCancel = true;
            });

            for (var y = 1; y <= parallel; y++)
            {
                var x = y;
                var t = Task.Run(() =>
                {
                    //await Task.Delay(random.Next(0, 750));
                    //Console.WriteLine("Staring task {0}", x);

                    var siteId = Guid.NewGuid();
                    var crawlId = Guid.NewGuid();
                    var pageid = Guid.NewGuid();

                    setupService.CreateNewSite(new CreateNewSite(siteId, "Perftest", "Perftest"));

                    var crawl = new Crawl(crawlId, siteId, DateTime.UtcNow);
                    crawl.StartCrawl("Perftest crawler", DateTime.UtcNow);

                    crawl.AddNewPage(pageid, "perftesturl", "nocontent", DateTime.UtcNow, blobstorage);

                    repository.Save(crawl);

                    for (var i = 0; i < 50000000000; i++)
                    {
                        //crawl.PageCheckedWithoutChange(pageid, DateTime.UtcNow);
                        crawlService.PageCheckedWithoutChanges(new RegisterCheckWithoutChange(crawlId, pageid, DateTime.UtcNow));

                        if (i%100 == 0 && shouldCancel)
                        {
                            break;
                        }
                    }
                });

                tasks.Add(t);
            }

            Task.WaitAll(tasks.ToArray());
            container.Dispose();

            Console.WriteLine("... done\n");
        }