private void Crawl_Manager()
        {
            string output_result = string.Empty;
            bool found_result = false;
            progress = 0;
            num_results = 0;
            try
            {
                bool valid_site;
                if(force_crawl)
                {
                    valid_site = true;
                }
                else
                {
                    string page = client.DownloadString(server.Trim());
                    valid_site = page != "" && page.Contains("Xtream Codes");
                }
                


                if (valid_site && dictionary.entries.Count > 0)
                {                  

                    while (progress < dictionary.entries.Count)
                    {
                        int increment;
                        if ((progress + concurrent_tasks) < dictionary.entries.Count)
                        {
                            increment = concurrent_tasks;
                        }
                        else
                        {
                            increment = dictionary.entries.Count - progress;
                        }

                        crawl_tasks = new CrawlTaskData[increment];

                        for (int j = 0; j < crawl_tasks.Length; j++)
                        {
                            int x = progress + j;

                            string search_string;
                            if(uppercase)
                            {
                                search_string = dictionary.entries[x];
                            }
                            else
                            {
                                search_string = char.ToUpper(dictionary.entries[x][0]) + dictionary.entries[x].Substring(1);
                            }               
                            crawl_tasks[j] = new CrawlTaskData();
                            crawl_tasks[j].task.WorkerSupportsCancellation = true;
                            crawl_tasks[j].task.WorkerReportsProgress = true;
                            crawl_tasks[j].task.DoWork += new DoWorkEventHandler(Crawl_Method);
                            var arguments = Tuple.Create<string, int>(search_string, j);
                            crawl_tasks[j].task.RunWorkerAsync(arguments);

                        }

                        WaitWorkers();                      

                        foreach (CrawlTaskData crawl_task in crawl_tasks)
                        {
                            if (crawl_task.result != string.Empty)
                            {
                                output_result = crawl_task.result;
                                found_result = true;
                                num_results++;
                                if (!Directory.Exists("output")) Directory.CreateDirectory("output");
                                string output = "output/tv_channels_" + num_results + ".m3u";
                                StreamWriter outputFile = new StreamWriter(output, true);
                                outputFile.WriteLine(output_result);
                                outputFile.Flush();
                                outputFile.Close();
                                outputFile.Dispose();
                            }
                        }

                        progress += increment;
                    }
                    
                    if(found_result)
                    {
                        status = CrawlerStatus.CompletedWithResults;    
                    }
                    else
                    {
                        status = CrawlerStatus.CompletedWithoutResults;
                    }
                }
                else
                {
                    status = CrawlerStatus.InvalidSite;
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                status = CrawlerStatus.InvalidSite;
            }
        }
Beispiel #2
0
        private void Crawl_Manager()
        {
            string output_result = string.Empty;
            bool   found_result  = false;

            progress    = 0;
            num_results = 0;
            try
            {
                bool valid_site;
                if (force_crawl)
                {
                    valid_site = true;
                }
                else
                {
                    string page = client.DownloadString(server.Trim());
                    valid_site = page != "" && page.Contains("Xtream Codes");
                }



                if (valid_site && dictionary.entries.Count > 0)
                {
                    while (progress < dictionary.entries.Count)
                    {
                        int increment;
                        if ((progress + concurrent_tasks) < dictionary.entries.Count)
                        {
                            increment = concurrent_tasks;
                        }
                        else
                        {
                            increment = dictionary.entries.Count - progress;
                        }

                        crawl_tasks = new CrawlTaskData[increment];

                        for (int j = 0; j < crawl_tasks.Length; j++)
                        {
                            int x = progress + j;

                            string search_string;
                            if (uppercase)
                            {
                                search_string = dictionary.entries[x];
                            }
                            else
                            {
                                search_string = char.ToUpper(dictionary.entries[x][0]) + dictionary.entries[x].Substring(1);
                            }
                            crawl_tasks[j] = new CrawlTaskData();
                            crawl_tasks[j].task.WorkerSupportsCancellation = true;
                            crawl_tasks[j].task.WorkerReportsProgress      = true;
                            crawl_tasks[j].task.DoWork += new DoWorkEventHandler(Crawl_Method);
                            var arguments = Tuple.Create <string, int>(search_string, j);
                            crawl_tasks[j].task.RunWorkerAsync(arguments);
                        }

                        WaitWorkers();

                        foreach (CrawlTaskData crawl_task in crawl_tasks)
                        {
                            if (crawl_task.result != string.Empty)
                            {
                                output_result = crawl_task.result;
                                found_result  = true;
                                num_results++;
                                if (!Directory.Exists("output"))
                                {
                                    Directory.CreateDirectory("output");
                                }
                                string       output     = "output/tv_channels_" + num_results + ".m3u";
                                StreamWriter outputFile = new StreamWriter(output, true);
                                outputFile.WriteLine(output_result);
                                outputFile.Flush();
                                outputFile.Close();
                                outputFile.Dispose();
                            }
                        }

                        progress += increment;
                    }

                    if (found_result)
                    {
                        status = CrawlerStatus.CompletedWithResults;
                    }
                    else
                    {
                        status = CrawlerStatus.CompletedWithoutResults;
                    }
                }
                else
                {
                    status = CrawlerStatus.InvalidSite;
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                status = CrawlerStatus.InvalidSite;
            }
        }