public void schedule() { //Schedule a URL while (keepScheduling && crawlsScheduled < crawlsToPerform) { queueSema.WaitOne(); if (urlQueue.Count != 0) { crawlerSema.WaitOne(); if (crawlerQueue.Count != 0) { Console.WriteLine("Using available crawler"); //Use an available crawler WebCrawler nextCrawler = crawlerQueue.Dequeue(); string nextURL = urlQueue.Dequeue(); new Thread(nextCrawler.Crawl).Start(nextURL); crawlsScheduled++; } else if (numCrawlers <= maxCrawlers) { //Console.WriteLine("Create a crawler"); //Create a new crawler WebCrawler newCrawler = new WebCrawler(numCrawlers++); string nextURL = urlQueue.Dequeue(); new Thread(newCrawler.Crawl).Start(nextURL); crawlsScheduled++; //Console.WriteLine("Scheduled new crawler"); } // Otherwise, there are no available resources crawlerSema.Release(); } queueSema.Release(); //Console.WriteLine("URL scheduler done"); } //Stop Scheduling, clear crawler queue crawlerSema.WaitOne(); crawlerQueue.Clear(); crawlerSema.Release(); }