public static void Main()
    {
        Thread thread = ThreadSleeper.DoSleep(5);

        Console.WriteLine("Waiting for thread to join");
        thread.Join();
        Console.WriteLine("Thread Joined");
    }
    public static Thread DoSleep(int seconds)
    {
        ThreadSleeper ts     = new ThreadSleeper(seconds);
        Thread        thread = new Thread(new ThreadStart(ts.Nap));

        thread.Start();
        return(thread);
    }
    public static WaitHandle DoSleep(int seconds)
    {
        ThreadSleeper ts     = new ThreadSleeper(seconds);
        Thread        thread = new Thread(new ThreadStart(ts.Nap));

        thread.Start();
        return(ts.napDone);
    }
    public static void Main()
    {
        WaitHandle[] waits = new WaitHandle[2];
        waits[0] = ThreadSleeper.DoSleep(8);
        waits[1] = ThreadSleeper.DoSleep(4);

        Console.WriteLine("Waiting for threads to finish");
        WaitHandle.WaitAll(waits);
        Console.WriteLine("Threads finished");
    }
Beispiel #5
0
 /// <summary>
 /// Set the appropriate timeout per website
 /// returns the host that was evicted from timeout
 /// </summary>
 /// <param name="hostname"></param>
 public void markHostForTimeout(string hostname)
 {
     if (webpageHosts.get(hostname) == null)
     {
         webpageHosts.put(hostname, new ThreadSleeper(MIN_THREAD_SLEEP));
     }
     else
     {
         ThreadSleeper sleeper = webpageHosts.get(hostname);
         if (sleeper.getTimeoutLimit() < MAX_THREAD_SLEEP)
         {
             sleeper.addTenSeconds();
         }
         sleeper.restart();
     }
 }
        /// <summary>
        /// Searches the given url once for information.
        /// </summary
        /// <param name="url"></param>
        /// <returns></returns>
        private void explore()
        {
            WebUtils      utils     = new WebUtils();
            WebCache      cache     = new WebCache();
            WebCrawler    crawler   = new WebCrawler();
            WebHostPolicy policy    = new WebHostPolicy();
            WebProcessor  processor = new WebProcessor(configuredSettings);
            ThreadSleeper sleeper   = new ThreadSleeper(5000);

            // init the queue if not already created,
            if (sizeOfQueue() < 1)
            {
                initQueue(cache, currentUrl);
            }

            // traverse as long as the visited urls is less than the limit, is callable, and URL collection is not empty
            while (amountOfWebpageUrlsTraversed() < maxPageSearchLimit && callable && !isQueueEmpty())
            {
                string currentWebpageUrl = dequeueWebpageUrl(traversalStyle, cache);

                // ensure the url is valid and has not been visited already
                if (!utils.isValidWebpageURL(currentWebpageUrl) || hasWebpageUrlBeenVisied(currentWebpageUrl))
                {
                    continue;
                }

                // try to timeout checking shared state and current thread
                handlePotentialTimeout(policy, utils, currentWebpageUrl);

                // if the crawl returns false, then it is an unsupported url
                if (!crawler.tryCrawl(currentWebpageUrl))
                {
                    continue;
                }

                setWebpageUrlAsVisited(currentWebpageUrl);

                // Retrieve all the texts found by the crawler
                Queue <string> texts              = crawler.releaseTexts();
                Queue <string> webpageUrls        = crawler.releaseWebpages();
                Queue <string> imageUrls          = crawler.releaseImages();
                string         currentWebpageHost = crawler.releaseHost();

                // filters the texts potentially and handles the links/images/etc
                WebPage page = processor.constructWebsite(texts, webpageUrls, imageUrls, currentWebpageUrl, currentWebpageHost);
                processor.tryBasicFilter(texts);

                // handles the cache to context communication for the newly discovered site URLS
                addWebpageUrlsToQueue(cache, page, webpageUrls, imageUrls);
                // enqueue the website to the hub
                sendToHub(page);

                // Update the state object
                sharedSearchContext.getContextInfo().addToThreadScore(contextualId, page.getSearchPhraseCount());
                sharedSearchContext.getContextInfo().incrementUrlsTraversed();

                // construct the display for the end user
                mainDisplayQueue.Enqueue(utils.createPrimaryDisplayView(page, contextualId));

                // consturct the secondary display for the end user
                secondaryDisplayQueue.Enqueue(utils.createSecondaryDisplayView(sharedSearchContext));

                // try to set webpage for timeout on all threads
                addOrUpdatePolicy(policy, currentWebpageHost);
                sleeper.trySleeping();
            }
            secondaryDisplayQueue.Enqueue(utils.createSecondaryDisplayView(sharedSearchContext));
        }