Esempio n. 1
0
        static void Main(string[] args)
        {
            IWidow info = new ConsoleInformative();

            try
            {
                string searchQuery         = "test";
                string searchEngine        = "https://google.com";
                NameValueCollection search = new NameValueCollection();
                Regex linkParser           = new Regex(@"\b(?:https?://|www\.)\S+\b", RegexOptions.Compiled | RegexOptions.IgnoreCase);
                info.Say("Attempting to connect to the site..");
                BlackWidow blackWidow = new BlackWidow(searchEngine);
                info.Success(string.Format("Connected to site, writing HTML to file, and searching {0} with query {1}.", searchEngine, searchQuery));
                search.Add("q", searchQuery);
            }

            /*
             * Catch all exceptions and write them
             * to a file for futher analysis if any
             * occur during the process.
             */
            catch (Exception e)
            {
                var filePath = AppDomain.CurrentDomain.BaseDirectory;
                info.FatalErr(string.Format("Exception thrown: {0}", e.ToString()));
                File.WriteAllText(string.Format(@"{0}\errorlog.LOG", filePath), e.ToString());
                info.Debug(string.Format("Wrote Exception to file located in {0}", filePath));
            }
            Console.ReadLine();
        }
Esempio n. 2
0
        /*
         * Make a request to the web host in
         * this case it is Google.
         */
        private static string GetWebInfo(string url)
        {
            string         logPath  = string.Format(@"{0}\html.txt", Path.GetDirectoryName(System.AppDomain.CurrentDomain.BaseDirectory));
            string         errPath  = string.Format(@"{0}\error.txt", Path.GetDirectoryName(System.AppDomain.CurrentDomain.BaseDirectory));
            HttpWebRequest requests = (HttpWebRequest)HttpWebRequest.Create(url);

            requests.ProtocolVersion = HttpVersion.Version10;
            requests.UserAgent       = "A .NET Web Crawler";
            IWebProxy proxy = requests.Proxy;
            IWidow    info  = new ConsoleInformative();

            /*
             * Used cached credentials to access
             * proxy if there is one.
             */
            info.Say("Checking if you're behind a proxy");
            if (proxy != null)
            {
                try
                {
                    info.Say("Proxy found attempting to login with cached credentials..");
                    string proxyUri = proxy.GetProxy(requests.RequestUri).ToString();
                    requests.UseDefaultCredentials = true;
                    requests.Proxy             = new WebProxy(proxyUri, false);
                    requests.Proxy.Credentials = System.Net.CredentialCache.DefaultCredentials;
                }

                /*
                 * Catch exception if hte cached
                 * credentials fail to load.
                 */
                catch (Exception e)
                {
                    info.FatalErr("Unable to verify cached credentials..");
                    File.WriteAllText(errPath, e.ToString());
                    info.Debug("Wrote error to file for further analysis, exiting process..");
                }
            }
            info.Success("Logged in with cached credentials, continuing process.");
            WebResponse  providedResponse = requests.GetResponse();
            Stream       stream           = providedResponse.GetResponseStream();
            StreamReader readInformation  = new StreamReader(stream);
            string       htmlOutput       = readInformation.ReadToEnd();

            File.WriteAllText(logPath, htmlOutput);
            return(htmlOutput);
        }