コード例 #1
0
ファイル: Program.cs プロジェクト: mgamlem3/Wikipedia-Race
        // search for the finishing website
        // inputs: strings for start and end, ArticleCollection to store webpages in, ForbiddenLinks object
        void searchForWebsite(string StartPageName, string FinishPageName, ArticleCollection Webpages, ForbiddenLinks Forbidden) {
            // get data for first webpage
            WikipediaWebRequest request = new WikipediaWebRequest("wiki/"+StartPageName, Webpages, Forbidden, null);          
            
            // successful request, start program
            if (request.SuccessfulWebRequest()) {
                WikipediaSearcher s = null;
                
                // create article crawler
                ArticleCrawler c = new ArticleCrawler(Webpages, Forbidden);
                Thread crawler = new Thread(() => c.Start());
                crawler.Start();
                crawler.Priority = ThreadPriority.Normal;

                // create searcher
                Console.WriteLine("Creating Searcher....");
                Thread searcher = new Thread(() => {s = new WikipediaSearcher(Webpages, StartPageName, FinishPageName, Forbidden, "BFS");});
                searcher.Start();
                searcher.Priority = ThreadPriority.AboveNormal;

                // wait for searcher to complete   
                searcher.Join();

                // try to stop crawler
                try {
                    crawler.Abort();
                }
                catch (PlatformNotSupportedException) {
                    // TODO: investigate this further
                    Console.WriteLine("System does not support abort of threads. Please terminate manually");
                }
            }
        }
コード例 #2
0
    // creates request to get webpage
    // note: only to be used within this class, use GetWebpage external to class
    private bool TryToGetWebpage(string requestedPage, Webpage parent)
    {
        WikipediaWebRequest r = new WikipediaWebRequest(requestedPage, this, ForbiddenLinksCollection, parent);

        if (r == null)
        {
            return(false);
        }
        else
        {
            return(true);
        }
    }
コード例 #3
0
    // depth first search
    // slow
    // may contain bugs... abandoned when realized BFS was better
    private void SearchDFS()
    {
        Webpage currentPage, nextPage;
        string  nextPageString;
        bool    successfulTake, answerFound = false;

        currentPage = Articles.GetWebpage(StartPage, null);
        if (currentPage != null)
        {
            PathTaken.Push(currentPage);
        }

        while (PathTaken.Count > 0)
        {
            currentPage = PathTaken.Peek();
            // get next page to be searched
            try {
                successfulTake = currentPage.WebpagesToBeSearched.TryDequeue(out nextPageString);

                if (successfulTake)
                {
                    // get next page down
                    nextPage = Articles.GetWebpage(nextPageString, currentPage);
                    // don't search pages that are not in database or that are the page I am currently on
                    if (nextPage == null || nextPage.Title == currentPage.Title)
                    {
                        continue;
                    }
                    PathTaken.Push(nextPage);
                    answerFound = checkIfAnswerFound(nextPage);
                    if (answerFound)
                    {
                        break;
                    }
                }
                else
                {
                    PathTaken.Pop();
                }
                if (PathTaken.Count >= 7)
                {
                    PathTaken.Pop();
                }
            } catch (NullReferenceException e) {
                Console.WriteLine("No webpages have been located yet. " + e);
                WikipediaWebRequest r = new WikipediaWebRequest(StartPage, Articles, ForbiddenLinksCollection, currentPage);
                currentPage = Articles.GetWebpage(StartPage, null);
            }
        }
        PrintResultsDFS(answerFound);
    }
コード例 #4
0
    // function given to thread
    // input: GetPageObject
    private void GetPage(object o)
    {
        // cast generic object back to GetPageObject
        GetPageObject obj    = (GetPageObject)o;
        string        _str   = obj.Str;
        Webpage       parent = obj.Page;

        // check again to see if webpage alread in dictionary
        Webpage w = Articles.WebpageInDictionary(_str);

        if (w == null)
        {
            // download webpage
            Console.WriteLine("requesting: " + _str);
            WikipediaWebRequest r = new WikipediaWebRequest(_str, Articles, ForbiddenLinksCollection, parent);
            threadCount--;
        }
    }