public Crawler(KeyValuePair <Uri, IUserAgent> inpParams, LogMessanger logMessanger, IHttpParser httpParser, string output) { _inpParams = inpParams; _logMessanger = logMessanger; _httpParser = httpParser; _output = output; }
static void Main(string[] args) { SimpleStringCombiner stringCombiner = new SimpleStringCombiner(); //FileWriterLogger fileWriterLogger = new FileWriterLogger(Path.Combine(@"D:\", "log.txt"), stringCombiner); ConsoleWriterLogger consoleWriterLogger = new ConsoleWriterLogger(stringCombiner); LogMessanger logMessanger = new LogMessanger(); logMessanger.Add("consoleWriter", consoleWriterLogger); //Console.WriteLine("Hello, put ur input file at line below"); //Console.WriteLine("input file looks like:"); //Console.WriteLine("<line>::=<domain><user_agent>|<crawl-delay>"); //Console.WriteLine("example:"); //Console.WriteLine("https://stackoverflow.com yandex\r\nhttp://theory.phphtml.net google\r\nhttp://www.mkyong.com yandex\r\nhttp://2coders.ru 300\r\nhttps://habrahabr.ru google"); Client client = new Client("me", consoleWriterLogger); client.InitInterpreter(); while (!cancelConsole) { client.Interpreter.Interpret(Console.ReadLine()); } }
public RobotsParser(InputFields inputFields, LogMessanger logMessanger, IRequest request) { _inputFields = inputFields; _logMessanger = logMessanger; _request = request; }