internal override IEnumerable <Page> DownloadPages(Chapter a_chapter) { a_chapter.Token.ThrowIfCancellationRequested(); Limiter.Aquire(a_chapter); try { Sleep(); } finally { Limiter.Release(a_chapter); } a_chapter.Token.ThrowIfCancellationRequested(); a_chapter.State = ChapterState.DownloadingPagesList; var serie = m_series.First(s => s.Title == a_chapter.Serie.Title); var chapter = serie.GetChapters().First(c => c.Title == a_chapter.Title); var pages = chapter.GeneratePages().ToList(); if (a_chapter.Title.Contains("error pages none")) { throw new Exception(); } var result = from page in pages select new Page(a_chapter, "fake_page_url", pages.IndexOf(page) + 1, page); return(result); }
internal override MemoryStream GetImageStream(Page a_page) { if (a_page.Chapter.Title.Contains("error page getimagestream")) { if (a_page.Index == 45) { throw new Exception(); } } a_page.Chapter.Token.ThrowIfCancellationRequested(); using (Bitmap bmp = new Bitmap(NextInt(600, 2000), NextInt(600, 2000))) { using (Graphics g = Graphics.FromImage(bmp)) { string str = "server: " + a_page.Server.Name + Environment.NewLine + "serie: " + a_page.Serie.Title + Environment.NewLine + "chapter: " + a_page.Chapter.Title + Environment.NewLine + "page: " + a_page.Name; g.DrawString( str, new Font(FontFamily.GenericSansSerif, 25, FontStyle.Bold), Brushes.White, new RectangleF(10, 10, bmp.Width - 20, bmp.Height - 20) ); } Limiter.Aquire(a_page); try { Sleep(); } finally { Limiter.Release(a_page); } MemoryStream ms = new MemoryStream(); bmp.SaveJPEG(ms, 75); return(ms); } }
internal override void DownloadChapters(Serie a_serie, Action <int, IEnumerable <Chapter> > a_progress_callback) { Limiter.Aquire(a_serie); try { Sleep(); } finally { Limiter.Release(a_serie); } a_serie.State = SerieState.Downloading; Debug.Assert(a_serie.Server.Name == m_name); var serie = m_series.FirstOrDefault(s => s.Title == a_serie.Title); if (serie == null) { throw new Exception(); } if (serie.Title.Contains("error chapters none")) { throw new Exception(); } bool gen_exc = serie.Title.Contains("error chapters few"); int count = -1; if (gen_exc) { count = m_items_per_page * 8 + m_items_per_page / 3; } if (serie.Title.Contains("few chapters")) { count = 3; } var toreport = (from chapter in serie.GetChapters(count) select new Chapter(a_serie, chapter.URL, chapter.Title)).ToArray(); int total = toreport.Length; if (m_slow_chapters) { List <List <Chapter> > listlist = new List <List <Chapter> >(); while (toreport.Any()) { var part = toreport.Take(m_items_per_page).ToList(); toreport = toreport.Skip(m_items_per_page).ToArray(); listlist.Add(part); } ConcurrentBag <Tuple <int, int, Chapter> > chapters = new ConcurrentBag <Tuple <int, int, Chapter> >(); bool exc = false; Parallel.ForEach(listlist, new ParallelOptions() { MaxDegreeOfParallelism = MaxConnectionsPerServer }, (list) => { foreach (var el in list) { chapters.Add(new Tuple <int, int, Chapter>(listlist.IndexOf(list), list.IndexOf(el), el)); } Limiter.Aquire(a_serie); try { Sleep(); } finally { Limiter.Release(a_serie); } var result = (from s in chapters orderby s.Item1, s.Item2 select s.Item3).ToList(); if (gen_exc) { if (exc) { return; } } a_progress_callback( result.Count * 100 / total, result); if (gen_exc) { if (!exc) { exc = true; throw new Exception(); } } }); } else { a_progress_callback(100, toreport); if (gen_exc) { throw new Exception(); } } }
internal override void DownloadSeries(Server a_server, Action <int, IEnumerable <Serie> > a_progress_callback) { Limiter.Aquire(a_server); try { Sleep(); } finally { Limiter.Release(a_server); } a_server.State = ServerState.Downloading; if (a_server.Name.Contains("error series none")) { throw new Exception(); } Debug.Assert(a_server.Name == m_name); bool gen_exc = a_server.Name.Contains("error series few"); var toreport = (from serie in m_series select new Serie(a_server, serie.URL, serie.Title)).ToArray(); bool exc = false; int total = toreport.Length; if (m_slow_series) { List <List <Serie> > listlist = new List <List <Serie> >(); while (toreport.Any()) { var part = toreport.Take(m_items_per_page).ToList(); toreport = toreport.Skip(m_items_per_page).ToArray(); listlist.Add(part); } ConcurrentBag <Tuple <int, int, Serie> > series = new ConcurrentBag <Tuple <int, int, Serie> >(); Parallel.ForEach(listlist, new ParallelOptions() { MaxDegreeOfParallelism = MaxConnectionsPerServer }, (list) => { foreach (var el in list) { series.Add(new Tuple <int, int, Serie>(listlist.IndexOf(list), list.IndexOf(el), el)); } Limiter.Aquire(a_server); try { Sleep(); } finally { Limiter.Release(a_server); } var result = (from s in series orderby s.Item1, s.Item2 select s.Item3).ToList(); if (exc) { if (gen_exc) { return; } } a_progress_callback( result.Count * 100 / total, result); if (!exc) { if (gen_exc) { exc = true; throw new Exception(); } } }); } else { if (gen_exc) { throw new Exception(); } a_progress_callback(100, toreport); } }