/// <summary> /// Crawler uses WorkManager to get URLs to crawl through. /// </summary> public Crawler(WorkManager manager, WorkerConfiguration config, PluginManager plugins = null) { Config = config; Manager = manager; this.plugins = plugins; RecentDownloads = new ConcurrentSlidingBuffer <DownloadedWork>(config.MaxLoggedDownloads); }
/// <summary> /// Starts a TCP listener for clients. Uses WorkManager to get URLs to distribute among clients. /// </summary> public WorkerManager(WorkManager manager, Configuration config, IWorkerPicker workerPicker, PluginManager plugins = null) { // paramaters this.plugins = plugins; this.manager = manager; this.picker = workerPicker; this.config = config.HostConfig; this.WorkerConfig = config.WorkerConfig; var password = this.config.ListenerConfiguration.Password; var endpoint = new IPEndPoint( IPAddress.Parse(this.config.ListenerConfiguration.IP), this.config.ListenerConfiguration.Port); RecentDownloads = new ConcurrentSlidingBuffer <DownloadedWork>(config.WorkerConfig.MaxLoggedDownloads); UpdateWorkerConfigurations(WorkerConfig); // initialize everything listener = new TcpListener(endpoint); certificate = SecurityUtils.BuildSelfSignedCertificate("crycrawler"); passwordHash = string.IsNullOrEmpty(password) ? null : SecurityUtils.GetHash(password); // prepare checking timer timer = new Timer(TimeSpan.FromMinutes(1).TotalMilliseconds); timer.Elapsed += OldClientCheck; // prepare worker timer workerTimer = new Timer(TimeSpan.FromSeconds(1).TotalMilliseconds); workerTimer.Elapsed += WorkerStatusCheck; // subscribe to events this.ClientRemoved += clientRemoved; this.ClientLeft += clientDisconnected; }