public override MetaData GetMetaData() // MetaData done { if (mdata != null) { return(mdata); } ADLUpdates.CallLogUpdate("Creating MetaData Object"); pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "entry-title", "thumb" }); mdata = new MetaData(); this.mdata.url = this.url.ToString(); mdata.name = baseInfo["entry-title"].First().InnerText; mdata.author = "www.asianhobbyist.com"; mdata.type = "uknown"; mdata.genre = "unknown"; mdata.rating = "-1"; string x = baseInfo["thumb"].First().ChildNodes[1].Attributes["data-lazy-src"].Value; //x = x.Remove(x.IndexOf('?')); GenerateHeaders(); mdata.cover = webClient.DownloadData(x); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallLogUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}"); sU(taskIndex, $"Got MetaData Object for {mdata.name} by {mdata.author}"); return(mdata); }
public Main(string[] arguments, int ti = -1, Action <int, string> u = null) { ArgumentObject args = new ArgumentObject(arguments); Restart :; if (args.arguments.mn == "nvl") { NovelDownload(args.arguments, ti, u); } else if (args.arguments.mn == "ani") { AnimeDownload(args.arguments, ti, u); } else { if (!searchMN(ref args)) { u?.Invoke(ti, "Error: could not parse command (Failure to parse website to ani/nvl flag.. you can retry with ani/nvl flag)"); ADLUpdates.CallError(new Exception("Error: Could not parse command (mn selector)")); return; } else { goto Restart; } } }
public override MetaData GetMetaData() { if (mdata != null) { return(mdata); } pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "book-name", "author", "book-state", "book-catalog", "score" }); mdata = new MetaData(); this.mdata.url = this.url.ToString(); try { mdata.name = baseInfo["book-name"].First().InnerText.DeleteFollowingWhiteSpaceA(); mdata.author = baseInfo["author"].First().InnerText.SkipPreceedingAndChar(':').Sanitize(); mdata.type = baseInfo["book-state"].First().InnerText.SkipPreceedingAndChar(' ').DeleteFollowingWhiteSpaceA().Sanitize(); mdata.genre = baseInfo["book-catalog"].First().InnerText.DeleteFollowingWhiteSpaceA().Sanitize(); mdata.rating = baseInfo["score"].First().InnerText.Sanitize(); } catch { updateStatus(taskIndex, "Failed to load some values, failed"); } mdata.cover = webClient.DownloadData($"https://img.wuxiaworld.co/BookFiles/BookImages/{mdata.name.Replace(' ', '-').Replace('\'', '-')}.jpg"); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallLogUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}"); sU(taskIndex, $"Got MetaData Object for {mdata.name} by {mdata.author}"); return(mdata); }
/// <summary> /// Get general information about the novel, cover, title, author, etc /// </summary> /// <returns></returns> public override MetaData GetMetaData() { if (mdata != null) { return(mdata); } pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "book-img", "book-info", "total" }); HtmlNode[] t = baseInfo["total"].First().SelectNodes("//span[@class=\"blue\"]").ToArray(); HtmlNode[] to = baseInfo["total"].First().SelectNodes("//a[@class=\"red\"]").ToArray(); mdata = new MetaData(); this.mdata.url = this.url.ToString(); try { mdata.name = baseInfo["book-info"].First().SelectSingleNode("//h1").InnerText; mdata.author = t[0].InnerText; mdata.type = t[1].InnerText; mdata.genre = to[0].InnerText; mdata.rating = " "; } catch { updateStatus(taskIndex, "Failed to load some values, failed"); } mdata.cover = webClient.DownloadData(baseInfo["book-img"].First().SelectNodes("//img/@src").ToArray()[1].Attributes.ToArray()[0].Value); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}", false); return(mdata); }
public override MetaData GetMetaData() { if (mdata != null) { return(mdata); } ADLUpdates.CallUpdate("Creating MetaData Object", false); pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "title", "info", "book" }); mdata = new MetaData(); this.mdata.url = this.url.ToString(); mdata.name = baseInfo["title"].First().InnerText; string[] sp = baseInfo["info"].First().InnerText.Split(":"); mdata.author = sp[1].Replace("Genre", string.Empty); mdata.type = sp.Last(); mdata.genre = sp[2]; mdata.rating = "-1"; string x = $"http://{url.Host}{Regex.Match(baseInfo["book"].First().OuterHtml, @"<img[^>]+src=""([^"">]+)""").Groups[1].Value}"; //x = x.Remove(x.IndexOf('?')); GenerateHeaders(); mdata.cover = webClient.DownloadData(x); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}", false); return(mdata); }
private string SearchPrompt(SearchReq sj, ref int np) { for (int idx = 0; idx < sj.actualHits.Count; idx++) { ADLUpdates.CallLogUpdate($"{idx} -- {sj.actualHits[idx].name} | Ratings: {sj.actualHits[idx].GetRating()}/10\n tags:{sj.actualHits[idx].tagsAsString()}\n desc:{new string(sj.actualHits[idx].description.Replace("<p>", string.Empty).Replace("</p>", string.Empty).Replace("\n", string.Empty).Take(60).ToArray())}\n\n"); } ADLUpdates.CallLogUpdate($"\nCommands: \n page {{page}}/{sj.nbPages}\n select {{episode num}}"); c: String[] input = Console.ReadLine().ToLower().Split(' '); switch (input[0]) { case "select": videoInfo = new Constructs.Video() { hentai_video = new HentaiVideo() { slug = $"https://hanime.tv/videos/hentai/{sj.actualHits[int.Parse(input[1])].slug}" } }; ADLUpdates.CallThreadChange(false); return($"https://hanime.tv/videos/hentai/{sj.actualHits[int.Parse(input[1])].slug}"); case "page": Console.Clear(); np = int.Parse(input[1]); return("CNT"); default: goto c; } }
public override void GenerateHeaders() { whc = new WebHeaderCollection(); whc.Add("DNT", "1"); whc.Add("Sec-Fetch-Dest", "document"); whc.Add("Sec-Fetch-Site", "none"); whc.Add("Accept", "video/webm,video/ogg,video/*;q=0.9,application/ogg;q=0.7,audio/*;q=0.6,*/*;q=0.5"); //Get anime slug to use for api ADLUpdates.CallLogUpdate("Getting anime title and episode list from api.twist.moe"); string k = ao.term.TrimToSlash(keepSlash: false).SkipCharSequence("https://twist.moe/a/".ToCharArray()); string uri = $"https://api.twist.moe/api/anime/{k}"; wRequest = (HttpWebRequest)WebRequest.Create(uri); wRequestSet(); WebResponse wb = wRequest.GetResponse(); string decodedContent = M3U.DecryptBrotliStream(wb.GetResponseStream()); info = JsonSerializer.Deserialize <TwistMoeAnimeInfo>(decodedContent); wRequest = (HttpWebRequest)WebRequest.Create($"https://api.twist.moe/api/anime/{k}/sources"); wRequestSet(); wb = wRequest.GetResponse(); decodedContent = M3U.DecryptBrotliStream(wb.GetResponseStream()); info.episodes = JsonSerializer.Deserialize <List <Episode> >(decodedContent); }
public MangaBase(argumentList args, int taskIndex, Action <int, string> act) { if (taskIndex > -1 && act != null || taskIndex == -1 && act == null) { this.taskIndex = taskIndex; this.updateStatus = act; } else { throw new Exception("Invalid statusUpdate args"); } ADLUpdates.CallLogUpdate("Creating Manga Download Instance"); this.url = new Uri(args.term); webClient = new WebClient(); GenerateHeaders(); if (args.d && args.term.IsValidUri()) { string html = webClient.DownloadString(url); LoadPage(html); html = null; } this.args = args; ADLUpdates.CallLogUpdate("Generating ADL Archive"); archive = new ArchiveManager() { args = args }; }
public override MetaData GetMetaData() { if (mdata != null) { return(mdata); } pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "fic_title", "auth_name_fic", "fic_image", "fic_genre" }); mdata = new MetaData(); this.mdata.url = this.url.ToString(); mdata.name = baseInfo["fic_title"].First().InnerText; mdata.author = baseInfo["auth_name_fic"].First().InnerText; mdata.type = "unknown"; mdata.genre = baseInfo["fic_genre"].First().InnerText; mdata.rating = "-1"; string x = Regex.Match(baseInfo["fic_image"].First().OuterHtml, @"<img[^>]+src=""([^"">]+)""").Groups[1].Value; //x = x.Remove(x.IndexOf('?')); GenerateHeaders(); mdata.cover = webClient.DownloadData(x); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallLogUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}"); sU(taskIndex, $"Got MetaData Object for {mdata.name} by {mdata.author}"); return(mdata); }
public override MetaData GetMetaData() { if (mdata != null) { return(mdata); } pageEnumerator.Reset(); Dictionary <string, LinkedList <HtmlNode> > baseInfo = pageEnumerator.GetElementsByClassNames(new string[] { "novel-body", "media-object" }); mdata = new MetaData(); this.mdata.url = this.url.ToString(); string[] novelInfo = baseInfo["novel-body"].First().InnerText.DeleteFollowingWhiteSpaceA().DeleteConDuplicate('\n').Split("\n"); mdata.name = novelInfo[1]; mdata.author = novelInfo[7]; mdata.type = "unknown"; mdata.genre = novelInfo[10]; mdata.rating = "-1"; novelInfo = baseInfo["media-object"].First().OuterHtml.Split('\r'); string x = Regex.Match(novelInfo[0], @"<img[^>]+src=""([^"">]+)""").Groups[1].Value; //x = x.Remove(x.IndexOf('?')); mdata.cover = webClient.DownloadData($"{x}.jpg"); pageEnumerator.Reset(); baseInfo.Clear(); ADLUpdates.CallUpdate($"Got MetaData Object for {mdata.name} by {mdata.author}", false); return(mdata); }
public bool ParseBookFromWeb(string url) { statusUpdate?.Invoke(ti, $"{metaData?.name} Getting MetaData"); metaData = dBase.GetMetaData(); statusUpdate?.Invoke(ti, $"{metaData?.name} Getting Chapter links"); chapters = dBase.GetChapterLinks(); fileLocation = $"{chapterDir}/{metaData.name}"; ADLUpdates.CallLogUpdate($"Downloading Chapters for {metaData.name}", ADLUpdates.LogLevel.TaskiOnly); return(true); }
public override string Search(bool promptUser = true) { int np = 0; string a; a: try { HttpWebRequest httpWebRequest = (HttpWebRequest)WebRequest.Create("https://search.htv-services.com/"); httpWebRequest.ContentType = "application/json"; httpWebRequest.Method = "POST"; string json = $"{{\"search_text\":\"{ao.term}\",\"tags\":[],\"tags_mode\":\"AND\",\"brands\":[],\"blacklist\":[],\"order_by\":\"released_at_unix\",\"ordering\":\"asc\",\"page\":{np.ToString()}}}"; using (StreamWriter sw = new StreamWriter(httpWebRequest.GetRequestStream())) sw.Write(json); HttpWebResponse response = (HttpWebResponse)httpWebRequest.GetResponse(); using (StreamReader sr = new StreamReader(response.GetResponseStream())) a = sr.ReadToEnd(); SearchReq sj = JsonSerializer.Deserialize <SearchReq>(a); if (sj.actualHits.Count <= 0) { ADLUpdates.CallLogUpdate($"No videos matching search query."); return(null); } ADLUpdates.CallLogUpdate($"Hits: {sj.actualHits.Count} {np}/{sj.nbPages} page"); if (promptUser) { ADLUpdates.CallThreadChange(true); while (true) { string searchResponse = SearchPrompt(sj, ref np); if (searchResponse == "CNT") { goto a; } return(searchResponse); } } else { return($"https://hanime.tv/videos/hentai/{sj.actualHits[0].slug}"); // Else return first video returned. } } catch { goto a; } }
public void InitializeZipper(string loc, bool dc = false) { try { bookStream = new FileStream(loc, dc ? FileMode.Open : FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite); zapive = new ZipArchive(bookStream, ZipArchiveMode.Update, true); } catch { ADLUpdates.CallError(new Exception("Failed to initialize stream.")); } }
public bool ParseBookFromWeb(string url) { statusUpdate(ti, $"{metaData?.name} Creating Novel Object"); DownloaderBase dbase = null; switch (site) { case Site.AsianHobbyist: dbase = new AsianHobbyist(url, ti, statusUpdate); break; case Site.wuxiaWorldA: dbase = new dWuxiaWorld(url, ti, statusUpdate); break; case Site.wuxiaWorldB: dbase = new cWuxiaWorld(url, ti, statusUpdate); break; case Site.ScribbleHub: dbase = new cScribbleHub(url, ti, statusUpdate); break; case Site.NovelFull: dbase = new cNovelFull(url, ti, statusUpdate); break; case Site.NovelHall: dbase = new NovelHall(url, ti, statusUpdate); break; case Site.Error: ADLUpdates.CallUpdate("Error: This site doesn't seem to be supported."); return(false); default: ADLUpdates.CallUpdate("Unknown error"); return(false); } statusUpdate(ti, $"{metaData?.name} Getting MetaData"); metaData = dbase.GetMetaData(); statusUpdate(ti, $"{metaData?.name} Getting Chapter links"); chapters = dbase.GetChapterLinks(); fileLocation = $"{chapterDir}/{metaData.name}"; ADLUpdates.CallUpdate($"Downloading Chapters for {metaData.name}"); return(true); }
public DownloaderBase(argumentList args, int taskIndex, Action <int, string> act) { ao = args; this.taskIndex = taskIndex; this.updateStatus = act; ADLUpdates.CallLogUpdate("Creating Novel Download Instance"); this.url = new Uri(args.term); webClient = new WebClient(); GenerateHeaders(); string html = webClient.DownloadString(args.term); LoadPage(html); html = null; }
public DownloaderBase(string url, int taskIndex, Action<int, string> act) { if (taskIndex > -1 && act != null || taskIndex == -1 && act == null) { this.taskIndex = taskIndex; this.updateStatus = act; } else throw new Exception("Invalid statusUpdate args"); ADLUpdates.CallUpdate("Creating Novel Download Instance", false); this.url = new Uri(url); webClient = new WebClient(); GenerateHeaders(); string html = webClient.DownloadString(url); LoadPage(html); html = null; }
public override Chapter[] GetChapterLinks(bool sort = false) { int idx = 0; List <Chapter> chaps = new List <Chapter>(); Regex reg = new Regex("href=\"(.*?)\""); ADLUpdates.CallLogUpdate($"Getting Chapter Links for {mdata.name}"); while (true) { idx++; MovePage($"{mdata.url}?page={idx.ToString()}&per-page=50"); // limited to 50 Dictionary <string, LinkedList <HtmlNode> > chapterInfo = pageEnumerator.GetElementsByClassNames(new string[] { "list-chapter" }); if (chapterInfo["list-chapter"].Count <= 0) { break; } IEnumerator <HtmlNode> a = chapterInfo["list-chapter"].GetEnumerator(); while (a.MoveNext()) { LoadPage(a.Current.InnerHtml); foreach (HtmlNode ele in page.DocumentNode.SelectNodes("//li")) { Chapter ch = new Chapter(this) { name = ele.InnerText.SkipCharSequence(new char[] { ' ' }), chapterLink = new Uri("https://" + url.Host + reg.Match(ele.InnerHtml).Groups[1].Value) }; if (chaps.Where(x => x.chapterLink == ch.chapterLink).Count() == 0) { chaps.Add(ch); } else { goto exit; } } } } exit: ADLUpdates.CallLogUpdate($"Found {chaps.Count} Chapters for {mdata.name}"); sU(taskIndex, $"Got MetaData Object for {mdata.name} by {mdata.author}"); return(chaps.ToArray()); }
public void ExportToEPUB(string location) { //SORT if (sortedTrustFactor) { statusUpdate?.Invoke(ti, "Trust Lost, Sorting Chapters."); ADLCore.Alert.ADLUpdates.CallLogUpdate("Trust Lost, discrepancy in chapter numbering. Sorting Chapters.", ADLUpdates.LogLevel.High); for (int id = 0; id < chapters.Length; id++) { for (int idx = 0; idx < chapters.Length; idx++) { if (chapters[idx].chapterNum > chapters[id].chapterNum) { Chapter a = chapters[id]; chapters[id] = chapters[idx]; chapters[idx] = a; } } } } statusUpdate?.Invoke(ti, $"{metaData?.name} Exporting to EPUB"); Epub.Epub e = new Epub.Epub(metaData.name, metaData.author, new Image() { bytes = metaData.cover }, new Uri(metaData.url)); e.AddPage(CreditsPage()); foreach (Chapter chp in chapters) { statusUpdate?.Invoke(ti, $"{metaData?.name} Generating page for {chp.name.Replace('_', ' ')}"); ADLUpdates.CallLogUpdate($"{metaData?.name} Generating page for {chp.name.Replace('_', ' ')}"); e.AddPage(Page.AutoGenerate(chp.image == null ? chp.text : null, chp.name.Replace('_', ' '), chp.image != null ? new Image[] { Image.GenerateImageFromByte(chp.image, "IMG_" + chp.name) } : null)); } e.CreateEpub(new OPFMetaData(this.metaData.name, this.metaData.author, "Chay#3670", "null", DateTime.Now.ToString())); statusUpdate?.Invoke(ti, $"{metaData?.name} EPUB Created!"); ADLUpdates.CallLogUpdate($"{metaData?.name} EPUB Created!", ADLUpdates.LogLevel.Middle); e.ExportToEpub(location); }
private void Book_onThreadFinish(int i) { ZipArchiveFinish(i); finishedThreads++; if (finishedThreads >= limiter) { sw.Stop(); if (statusUpdate != null) { statusUpdate?.CommitMessage(ti, $"Done! Download of {metaData.name} finished in {sw.Elapsed}"); } else { ADLUpdates.CallLogUpdate($"Done! Download of {metaData.name} finished in {sw.Elapsed}"); } dwnldFinished = true; onDownloadFinish?.Invoke(); return; } UnlockThread(i); }
public override string GetDownloadUri(HentaiVideo vid) { ADLUpdates.CallLogUpdate($"Extracting Download URL for {vid.slug}"); string Data = webClient.DownloadString(vid.slug); Regex reg = new Regex("(?<=<script>window\\.__NUXT__=)(.*)(?=;</script>)"); Match mc = reg.Match(Data); // Grab json // Make it "parsable" string a = mc.Value; rootObj = JsonSerializer.Deserialize <Root>(a); rootObj.state.data.video.hentai_video.name = rootObj.state.data.video.hentai_video.name.RemoveSpecialCharacters(); rootObj.linkToManifest = $"https://weeb.hanime.tv/weeb-api-cache/api/v8/m3u8s/{rootObj.state.data.video.videos_manifest.servers[0].streams[0].id.ToString()}.m3u8"; vid.slug = rootObj.linkToManifest; if (videoInfo == null) { videoInfo = rootObj.state.data.video; } else { videoInfo.hentai_video = rootObj.state.data.video.hentai_video; } return(vid.slug); }
//episodes to download: 0-12, 1-12, 5-6 etc. //TODO: Implement download ranges for GoGoStream and TwistMoe (and novel downloaders) //key MjY3MDQxZGY1NWNhMmIzNmYyZTMyMmQwNWVlMmM5Y2Y= -> search for atob(e) and floating-player public TwistMoe(argumentList args, int ti = -1, Action <int, string> u = null) : base(args, ti, u, Site.TwistMoe) { ADLUpdates.CallLogUpdate("Beginning instantiation of TwistMoe Object"); updateStatus?.Invoke(taskIndex, "Proceeding with setup"); }
public override bool Download(string path, bool mt, bool continuos) { GetDownloadUri(videoInfo == null ? new HentaiVideo { slug = path } : videoInfo.hentai_video); if (!ao.l) { downloadTo = $"{Directory.GetCurrentDirectory()}{Path.DirectorySeparatorChar}HAnime{Path.DirectorySeparatorChar}{videoInfo.hentai_video.name.TrimIntegrals()}{Path.DirectorySeparatorChar}"; } else if (ao.android) { downloadTo = Path.Combine(ao.export, "HAnime", videoInfo.hentai_video.name.TrimIntegrals()); } else { downloadTo = Path.Combine(ao.export, videoInfo.hentai_video.brand); } Directory.CreateDirectory(downloadTo); M3U m3 = new M3U(webClient.DownloadString(rootObj.linkToManifest)); Byte[] b; int l = m3.Size; double prg; updateStatus?.Invoke(taskIndex, $"Beginning download of {videoInfo.hentai_video.name}"); ADLUpdates.CallLogUpdate($"Beginning download of {videoInfo.hentai_video.name}"); if (ao.stream) { startStreamServer(); while ((b = m3.getNext()) != null) { if (allStop) { invoker(); return(false); } updateStatus?.Invoke(taskIndex, $"{videoInfo.hentai_video.name} {Strings.calculateProgress('#', m3.location, l)}"); ADLUpdates.CallLogUpdate($"{videoInfo.hentai_video.name} {Strings.calculateProgress('#', m3.location, l)}"); publishToStream(b); mergeToMain(downloadTo + Path.DirectorySeparatorChar + videoInfo.hentai_video.name + ".mp4", b); } } else { while ((b = m3.getNext()) != null) { if (allStop) { invoker(); return(false); } prg = (double)m3.location / (double)l; updateStatus?.Invoke(taskIndex, $"{videoInfo.hentai_video.name} {Strings.calculateProgress('#', m3.location, l)}"); ADLUpdates.CallLogUpdate($"{videoInfo.hentai_video.name} {Strings.calculateProgress('#', m3.location, l)}"); mergeToMain(downloadTo + Path.DirectorySeparatorChar + videoInfo.hentai_video.name + ".mp4", b); } } if (continuos && videoInfo.next_hentai_video.name.RemoveSpecialCharacters().TrimIntegrals() == videoInfo.hentai_video.name.TrimIntegrals()) { HAnime h = new HAnime(new argumentList { term = $"https://hanime.tv/videos/hentai/{videoInfo.next_hentai_video.slug}", mt = mt, export = downloadTo, cc = continuos }, taskIndex, updateStatus); h.Begin(); } return(true); }