public void AddBot(string watchLink, string title, int updateTime) { SeriesWebCrawler crawler = new SeriesWebCrawler(watchLink, title, updateTime, CreateUnigueID()); CrawlerFileHandler.WriteNewBotToFile(crawler); CrawlersRunning.Add(crawler); }
public static void SaveShows(SeriesWebCrawler crawler) { string path = @"bots\" + crawler.ID.ToString() + ".txt"; if (!Directory.Exists("bots")) { Directory.CreateDirectory("bots"); } var tempFile = Path.GetTempFileName(); using (StreamWriter sw1 = File.AppendText(tempFile)) { sw1.WriteLine(crawler.WatchLink); sw1.WriteLine(crawler.SeriesName); sw1.WriteLine(crawler.UpdateTime); sw1.WriteLine(crawler.ID); foreach (var link in crawler.Episodes) { sw1.WriteLine(link); } } File.Delete(path); File.Move(tempFile, path); }
private static SeriesWebCrawler CreateBotFromFile(string file) { SeriesWebCrawler bot = new SeriesWebCrawler(GetCrawlerUrl(file), GetSeriesName(file), GetUpdateTime(file), GetCrawlerId(file)); bot.Episodes = ReadLink(file); return(bot); }
public static void DeleteFile(SeriesWebCrawler crawler) { try { File.Delete(@"bots\" + crawler.ID.ToString() + ".txt"); } catch (FileNotFoundException e) { Console.WriteLine(e); throw; } }
public static void WriteNewBotToFile(SeriesWebCrawler crawler) { string path = @"bots\" + crawler.ID.ToString() + ".txt"; if (!Directory.Exists("bots")) { Directory.CreateDirectory("bots"); } using (StreamWriter sw = File.AppendText(path)){ sw.WriteLine(crawler.WatchLink); sw.WriteLine(crawler.SeriesName); sw.WriteLine(crawler.UpdateTime); sw.WriteLine(crawler.ID); } }
public void RemoveBot(SeriesWebCrawler active) { active.StopWatching(); CrawlerFileHandler.DeleteFile(active); CrawlersRunning.Remove(active); }
public void MarkAsSeen(SeriesWebCrawler active) { active.Seen(); }
public void StartBot(SeriesWebCrawler active) { active.StartWatching(); }
public void EndBot(SeriesWebCrawler active) { CrawlerFileHandler.SaveShows(active); active.StopWatching(); }