public void NScrapySuccessfullyExitTest() { var logPath = Path.Combine(Directory.GetCurrentDirectory(), "log-file.txt"); if (File.Exists(logPath)) { File.Delete(logPath); } Shell.NScrapy scrapy = NScrapy.Shell.NScrapy.GetInstance(); scrapy.Crawl("UrlFilterTestSpider"); //Make a copy of log file since the original log file is still in using by log4net var copiedLogFile = Path.Combine(Directory.GetCurrentDirectory(), "log-file.test.txt"); File.Copy(logPath, copiedLogFile); var logFileContent = string.Empty; using (FileStream stream = File.OpenRead(copiedLogFile)) { using (var ms = new MemoryStream()) { stream.CopyTo(ms); logFileContent = Encoding.UTF8.GetString(ms.ToArray()); } } File.Delete(copiedLogFile); Assert.IsTrue(logFileContent.Contains("https://www.liepin.com/zhaopin/?d_sfrom=search_fp_nvbar&init=1")); }
public void NScrapySuccessfullyExitTest() { string logPath = DeleteLog(); Shell.NScrapy scrapy = NScrapy.Shell.NScrapy.GetInstance(); scrapy.Crawl("UrlFilterTestSpider"); //Make a copy of log file since the original log file is still in using by log4net string logFileContent = GetLogContent(logPath); Assert.IsTrue(logFileContent.Contains("https://www.liepin.com/zhaopin/?d_sfrom=search_fp_nvbar&init=1")); }
public void UrlFilterTest() { Shell.NScrapy scrapy = NScrapy.Shell.NScrapy.GetInstance(); scrapy.Crawl("UrlFilterTestSpider"); }