public void loadPages(Models.Book s) { bool val = sf.isSafePull(); if (!val) { while (!val) { val = sf.isSafePull(); } //break; } if (s.BookLastChapterAddress != null) { this.CurrentBook = s; } }
public void LoadCompleted(string url) { while (!sf.isSafePull()) { } try { //Thread.Sleep(30000); helper = null; helper = new MainHelper(); helper.loadPages(CurrentBook); string HTML = DownloadPage(url); HtmlDocument doc = new HtmlDocument(); if (string.IsNullOrEmpty(HTML)) { return; } doc.LoadHtml(HTML); HtmlDocument doc2 = new HtmlDocument(); doc2.LoadHtml(doc.DocumentNode.ChildNodes["html"].InnerHtml); helper.Document.LoadHtml(doc2.DocumentNode.InnerHtml); Logging.Logging.LogMessage("", Environment.GetFolderPath(Environment.SpecialFolder.Desktop) + "\\Logs", "BookScraper_ChapterScraped", " BookID" + this.CurrentBook.BookID + "\r\n\r\n Book Next Chapter Address" + this.CurrentBook.NextChapterAddress + "\r\n\r\n Book Previous Chapter Address: " + this.CurrentBook.PreviousChapterAddress + "\r\n\r\n Book LastChapterAddress: " + this.CurrentBook.BookLastChapterAddress + "\r\n\r\n Book SourceID: " + this.CurrentBook.SourceID + "\r\n\r\n Chapter HTML: " + HTML); Console.WriteLine("(" + DateTime.Now + ") " + "Grabbed HTML About to Parse..."); this.NextChapter = this.helper.prepareBook(); //Next(); }catch (WebException we) { ProgramStatics.NextBook = true; //Next(); } }