/// <summary> /// Execute our search query /// </summary> /// <param name="query">Query</param> /// <returns>Releases</returns> protected override async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var torrentRowList = new List <CQ>(); var searchTerm = query.GetQueryString(); var searchUrl = SearchUrl; int nbResults = 0; int pageLinkCount = 0; // Check cache first so we don't query the server (if search term used or not in dev mode) if (!DevMode && !string.IsNullOrEmpty(searchTerm)) { lock (cache) { // Remove old cache items CleanCache(); // Search in cache var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault(); if (cachedResult != null) { return(cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray()); } } } // Add emulated XHR request emulatedBrowserHeaders.Add("X-Requested-With", "XMLHttpRequest"); // Build our query var request = buildQuery(searchTerm, query, searchUrl); // Getting results & Store content WebClientStringResult results = await queryExec(request); fDom = results.Content; try { // Find number of results nbResults = ParseUtil.CoerceInt(Regex.Match(fDom["div.ajaxtotaltorrentcount"].Text(), @"\d+").Value); // Find torrent rows var firstPageRows = findTorrentRows(); // Add them to torrents list torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq())); // Check if there are pagination links at bottom Boolean pagination = (nbResults != 0); // If pagination available if (pagination) { // Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page) pageLinkCount = (int)Math.Ceiling((double)nbResults / firstPageRows.Length); } else { // Check if we have a minimum of one result if (firstPageRows.Length >= 1) { // Set page count arbitrary to one pageLinkCount = 1; } else { output("\nNo result found for your query, please try another search term ...\n", "info"); // No result found for this query return(releases); } } output("\nFound " + nbResults + " result(s) in " + pageLinkCount + " page(s) for this query !"); output("\nThere are " + firstPageRows.Length + " results on the first page !"); // If we have a term used for search and pagination result superior to one if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1) { // Starting with page #2 for (int i = 2; i <= Math.Min(Int32.Parse(ConfigData.Pages.Value), pageLinkCount); i++) { output("\nProcessing page #" + i); // Request our page latencyNow(); // Build our query var pageRequest = buildQuery(searchTerm, query, searchUrl, i); // Getting results & Store content WebClientStringResult pageResults = await queryExec(pageRequest); // Assign response fDom = pageResults.Content; // Process page results var additionalPageRows = findTorrentRows(); // Add them to torrents list torrentRowList.AddRange(additionalPageRows.Select(fRow => fRow.Cq())); } } // Loop on results foreach (CQ tRow in torrentRowList) { output("\n=>> Torrent #" + (releases.Count + 1)); // Release Name string name = tRow.Find(".torrent-h3 > h3 > a").Attr("title").ToString(); output("Release: " + name); // Category string categoryID = tRow.Find(".category > img").Attr("src").Split('/').Last().ToString(); string categoryName = tRow.Find(".category > img").Attr("title").ToString(); output("Category: " + MapTrackerCatToNewznab(mediaToCategory(categoryID, categoryName)) + " (" + categoryName + ")"); // Uploader string uploader = tRow.Find(".uploader > span > a").Attr("title").ToString(); output("Uploader: " + uploader); // Seeders int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find(".seeders")[0].LastChild.ToString(), @"\d+").Value); output("Seeders: " + seeders); // Leechers int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find(".leechers")[0].LastChild.ToString(), @"\d+").Value); output("Leechers: " + leechers); // Completed int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find(".completed")[0].LastChild.ToString(), @"\d+").Value); output("Completed: " + completed); // Comments int comments = ParseUtil.CoerceInt(Regex.Match(tRow.Find(".comments")[0].LastChild.ToString(), @"\d+").Value); output("Comments: " + comments); // Size & Publish Date string infosData = tRow.Find(".torrent-h3 > span")[0].LastChild.ToString().Trim(); IList <string> infosList = infosData.Split('-').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); // --> Size var size = ReleaseInfo.GetBytes(infosList[1].Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb")); output("Size: " + infosList[1] + " (" + size + " bytes)"); // --> Publish Date IList <string> clockList = infosList[0].Replace("Il y a", "").Split(',').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); var clock = agoToDate(clockList); output("Released on: " + clock.ToString()); // Torrent Details URL string details = tRow.Find(".torrent-h3 > h3 > a").Attr("href").ToString().TrimStart('/'); Uri detailsLink = new Uri(SiteLink + details); output("Details: " + detailsLink.AbsoluteUri); // Torrent Comments URL Uri commentsLink = new Uri(SiteLink + details + "#tab_2"); output("Comments Link: " + commentsLink.AbsoluteUri); // Torrent Download URL string download = tRow.Find(".download-item > a").Attr("href").ToString().TrimStart('/'); Uri downloadLink = new Uri(SiteLink + download); output("Download Link: " + downloadLink.AbsoluteUri); // Building release infos var release = new ReleaseInfo(); release.Category = MapTrackerCatToNewznab(mediaToCategory(categoryID, categoryName)); release.Title = name; release.Seeders = seeders; release.Peers = seeders + leechers; release.MinimumRatio = 1; release.MinimumSeedTime = 345600; release.PublishDate = clock; release.Size = size; release.Guid = detailsLink; release.Comments = commentsLink; release.Link = downloadLink; releases.Add(release); } } catch (Exception ex) { OnParseError("Error, unable to parse result \n" + ex.StackTrace, ex); } finally { // Remove our XHR request header emulatedBrowserHeaders.Remove("X-Requested-With"); } // Return found releases return(releases); }
/// <summary> /// Parses the torrents from the content /// </summary> /// <returns>The parsed torrents.</returns> /// <param name="results">The result of the query</param> /// <param name="alreadyFound">Number of the already found torrents.(used for limit)</param> /// <param name="limit">The limit to the number of torrents to download </param> /// <param name="previouslyParsedOnPage">Current position in parsed results</param> private async Task <List <ReleaseInfo> > ParseTorrentsAsync(WebClientStringResult results, int alreadyFound, int limit, int previouslyParsedOnPage) { var releases = new List <ReleaseInfo>(); var queryParams = new NameValueCollection { { "func", "getToggle" }, { "w", "F" }, { "pg", "0" } }; try { /* Content Looks like this * 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\ * 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\ * First 3 items per page are total results, results per page, and results this page * There is also a tail of ~4 items after the results for some reason. Looks like \1\\\ */ var parameters = results.Content.Split('\\'); var torrentsThisPage = int.Parse(parameters[2]); var maxTorrents = Math.Min(torrentsThisPage, limit - alreadyFound); var rows = parameters.Skip(3) //Skip pages info .Select((str, index) => (index, str)) //Index each string for grouping .GroupBy(n => n.index / 27) // each torrent is divided into 27 parts .Skip(previouslyParsedOnPage).Take(maxTorrents) // only parse the rows we want //Convert above query into a List<string>(27) in prep for parsing .Select(entry => entry.Select(item => item.str).ToList()); foreach (var row in rows) { var torrentId = row[(int)TorrentParts.TorrentId]; var downloadLink = new Uri(DownloadUrl + "?id=" + torrentId); var imdbId = _imdbLookup.TryGetValue(int.Parse(row[(int)TorrentParts.InternalId]), out var imdb) ? (long?)imdb : null; var files = int.Parse(row[(int)TorrentParts.Files]); var size = long.Parse(row[(int)TorrentParts.SizeBytes]); var seeders = int.Parse(row[(int)TorrentParts.Seeders]); var leechers = int.Parse(row[(int)TorrentParts.Leechers]); var grabs = int.Parse(row[(int)TorrentParts.Grabs]); var publishDate = DateTime.Parse(row[(int)TorrentParts.PublishDate]); var isSeasonPack = row[(int)TorrentParts.EpisodeInfo].Contains("évad"); queryParams["id"] = torrentId; queryParams["now"] = DateTimeUtil.DateTimeToUnixTimestamp(DateTime.UtcNow) .ToString(CultureInfo.InvariantCulture); var filesList = (await RequestStringWithCookiesAndRetry(SearchUrl + "?" + queryParams.GetQueryString())) .Content; var firstFileName = filesList.Split( new[] { @"\\" }, StringSplitOptions.None)[1]; // Delete the file extension. Many first files are either mkv or nfo. // Cannot confirm these are the only extensions, so generic remove all 3 char extensions at end of section. firstFileName = Regex.Replace(firstFileName, @"\.\w{3}$", string.Empty); if (isSeasonPack) { firstFileName = Regex.Replace( firstFileName, @"(?<=S\d+)E\d{2,3}", string.Empty, RegexOptions.IgnoreCase); } var category = new[] { TvCategoryParser.ParseTvShowQuality(firstFileName) }; var release = new ReleaseInfo { Title = firstFileName, Link = downloadLink, Guid = downloadLink, PublishDate = publishDate, Files = files, Size = size, Category = category, Seeders = seeders, Peers = leechers + seeders, Grabs = grabs, MinimumRatio = 1, MinimumSeedTime = 172800, // 48 hours DownloadVolumeFactor = 1, UploadVolumeFactor = UploadFactorCalculator(publishDate, isSeasonPack), Imdb = imdbId }; releases.Add(release); } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
private bool IsAuthorized(WebClientStringResult result) => result.Content.Contains("index.php?action=logout");
private List <ReleaseInfo> ParseResponse(TorznabQuery query, WebClientStringResult response, bool includePremium) { List <ReleaseInfo> releases = new List <ReleaseInfo>(); var torrents = CheckResponse(response); try { foreach (var torrent in torrents) { var release = new ReleaseInfo(); release.Title = (string)torrent["titulo"] + " " + (string)torrent["titulo_extra"]; // for downloading "premium" torrents you need special account if ((string)torrent["premium"] == "si") { if (includePremium) { release.Title += " [PREMIUM]"; } else { continue; } } release.Comments = new Uri(CommentsUrl + (string)torrent["id"]); release.Guid = release.Comments; release.PublishDate = DateTime.Now; if (torrent["created_at"] != null) { release.PublishDate = DateTime.Parse((string)torrent["created_at"]); } release.Category = MapTrackerCatToNewznab((string)torrent["categoria"]); release.Size = (long)torrent["size"]; release.Seeders = (int)torrent["seeders"]; release.Peers = release.Seeders + (int)torrent["leechers"]; release.Grabs = (long)torrent["snatched"]; release.InfoHash = (string)torrent["plain_info_hash"]; release.Link = new Uri(DownloadUrl + (string)torrent["id"]); var files = (JArray)JsonConvert.DeserializeObject <dynamic>((string)torrent["files_list"]); release.Files = files.Count; release.DownloadVolumeFactor = (string)torrent["freetorrent"] == "0" ? 1 : 0; release.UploadVolumeFactor = (string)torrent["doubletorrent"] == "0" ? 1 : 2; release.MinimumRatio = 1; release.MinimumSeedTime = 172800; // 48 hours releases.Add(release); } } catch (Exception ex) { OnParseError(response.Content, ex); } return(releases); }
private List <ReleaseInfo> ParseTorrents(WebClientStringResult results, string episodeString, TorznabQuery query, int alreadyFound, int limit, int previouslyParsedOnPage) { var releases = new List <ReleaseInfo>(); try { var parser = new HtmlParser(); var dom = parser.ParseDocument(results.Content); var rows = dom.QuerySelectorAll(".box_torrent").Skip(previouslyParsedOnPage).Take(limit - alreadyFound); var key = ParseUtil.GetArgumentFromQueryString( dom.QuerySelector("link[rel=alternate]").GetAttribute("href"), "key"); // Check torrents only till we reach the query Limit foreach (var row in rows) { try { var torrentTxt = row.QuerySelector(".torrent_txt, .torrent_txt2").QuerySelector("a"); //if (torrentTxt == null) continue; var infoLink = row.QuerySelector("a.infolink"); var imdbId = ParseUtil.GetLongFromString(infoLink?.GetAttribute("href")); var desc = row.QuerySelector("span")?.GetAttribute("title") + " " + infoLink?.TextContent; var downloadLink = SiteLink + torrentTxt.GetAttribute("href"); var downloadId = ParseUtil.GetArgumentFromQueryString(downloadLink, "id"); //Build site links var baseLink = SiteLink + "torrents.php?action=download&id=" + downloadId; var commentsUri = new Uri(baseLink); var guidUri = new Uri(baseLink + "#comments"); var linkUri = new Uri(QueryHelpers.AddQueryString(baseLink, "key", key)); var seeders = ParseUtil.CoerceInt(row.QuerySelector(".box_s2 a").TextContent); var leechers = ParseUtil.CoerceInt(row.QuerySelector(".box_l2 a").TextContent); var publishDate = DateTime.Parse( row.QuerySelector(".box_feltoltve2").InnerHtml.Replace("<br>", " "), CultureInfo.InvariantCulture); var sizeSplit = row.QuerySelector(".box_meret2").TextContent.Split(' '); var size = ReleaseInfo.GetBytes(sizeSplit[1].ToLower(), ParseUtil.CoerceFloat(sizeSplit[0])); var catLink = row.QuerySelector("a:has(img[class='categ_link'])").GetAttribute("href"); var cat = ParseUtil.GetArgumentFromQueryString(catLink, "tipus"); var title = torrentTxt.GetAttribute("title"); // if the release name does not contain the language we add from the category if (cat.Contains("hun") && !title.ToLower().Contains("hun")) { title += ".hun"; } // Minimum seed time is 48 hours + 24 minutes (.4 hours) per GB of torrent size if downloaded in full. // Or a 1.0 ratio on the torrent var seedTime = TimeSpan.FromHours(48) + TimeSpan.FromMinutes(24 * ReleaseInfo.GigabytesFromBytes(size).Value); var release = new ReleaseInfo { Title = title, Description = desc.Trim(), MinimumRatio = 1, MinimumSeedTime = (long)seedTime.TotalSeconds, DownloadVolumeFactor = 0, UploadVolumeFactor = 1, Link = linkUri, Comments = commentsUri, Guid = guidUri, Seeders = seeders, Peers = leechers + seeders, Imdb = imdbId, PublishDate = publishDate, Size = size, Category = MapTrackerCatToNewznab(cat) }; var banner = row.QuerySelector("img.infobar_ico")?.GetAttribute("onmouseover"); if (banner != null) { // static call to Regex.Match caches the pattern, so we aren't recompiling every loop. var bannerMatch = Regex.Match(banner, @"mutat\('(.*?)', '", RegexOptions.Compiled); release.BannerUrl = new Uri(bannerMatch.Groups[1].Value); } //TODO there is room for improvement here. if (episodeString != null && query.MatchQueryStringAND(release.Title, queryStringOverride: episodeString) && !query.IsImdbQuery) { // For Sonarr if the search query was english the title must be english also // The description holds the alternate language name // so we need to swap title and description names var tempTitle = release.Title; // releaseData everything after Name.S0Xe0X var releaseIndex = tempTitle.IndexOf(episodeString, StringComparison.OrdinalIgnoreCase) + episodeString.Length; var releaseData = tempTitle.Substring(releaseIndex).Trim(); // release description contains [imdb: ****] but we only need the data before it for title var description = new[] { release.Description, "" }; if (release.Description.Contains("[imdb:")) { description = release.Description.Split('['); description[1] = "[" + description[1]; } var match = Regex.Match(releaseData, @"^E\d\d?"); // if search is done for S0X than we don't want to put . between S0X and E0X var episodeSeparator = episodeString.Length == 3 && match.Success ? null : "."; release.Title = (description[0].Trim() + "." + episodeString.Trim() + episodeSeparator + releaseData.Trim('.')).Replace(' ', '.'); // add back imdb points to the description [imdb: 8.7] release.Description = tempTitle + " " + description[1]; release.Description = release.Description.Trim(); } releases.Add(release); } catch (FormatException ex) { logger.Error("Problem of parsing Torrent:" + row.InnerHtml); logger.Error("Exception was the following:" + ex); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
private bool IsSessionIsClosed(WebClientStringResult result) { return(result.IsRedirect && result.RedirectingTo.Contains("login.php")); }
protected override async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var queryString = query.GetQueryString(); var url = TorrentsUrl; WebClientStringResult results = null; var searchUrls = new List <string>(); if (!string.IsNullOrWhiteSpace(query.SanitizedSearchTerm)) { var pairs = new Dictionary <string, string>(); pairs.Add("search", query.SanitizedSearchTerm); results = await PostDataWithCookiesAndRetry(SearchUrl, pairs, null, TorrentsUrl); results = await ReloginIfNecessary(results); CQ dom = results.Content; var shows = dom.Find("div.show[data-id]"); foreach (var show in shows) { var showUrl = ShowUrl + show.GetAttribute("data-id"); searchUrls.Add(showUrl); } } else { searchUrls.Add(TorrentsUrl); } try { foreach (var searchUrl in searchUrls) { results = await RequestStringWithCookies(searchUrl); results = await ReloginIfNecessary(results); CQ dom = results.Content; var rows = dom["#torrent-table tr"]; if (!string.IsNullOrWhiteSpace(queryString)) { rows = dom["table tr"]; } var globalFreeleech = dom.Find("span:contains(\"Freeleech until:\"):has(span.datetime)").Any(); foreach (var row in rows.Skip(1)) { var release = new ReleaseInfo(); var qRow = row.Cq(); var titleRow = qRow.Find("td:eq(2)").First(); titleRow.Children().Remove(); release.Title = titleRow.Text().Trim(); if ((query.ImdbID == null || !TorznabCaps.SupportsImdbSearch) && !query.MatchQueryStringAND(release.Title)) { continue; } var qBanner = qRow.Find("div[style^=\"cursor: pointer; background-image:url\"]"); var qBannerStyle = qBanner.Attr("style"); if (!string.IsNullOrEmpty(qBannerStyle)) { var bannerImg = Regex.Match(qBannerStyle, @"url\('(.*?)'\);").Groups[1].Value; release.BannerUrl = new Uri(SiteLink + bannerImg); } var qLink = row.Cq().Find("td:eq(4) a:eq(0)"); release.Link = new Uri(SiteLink + qLink.Attr("href")); release.Guid = release.Link; var qLinkComm = row.Cq().Find("td:eq(4) a:eq(1)"); release.Comments = new Uri(SiteLink + qLinkComm.Attr("href")); var dateString = qRow.Find(".datetime").Attr("data-timestamp"); if (dateString != null) { release.PublishDate = DateTimeUtil.UnixTimestampToDateTime(ParseUtil.CoerceDouble(dateString)).ToLocalTime(); } var infoString = row.Cq().Find("td:eq(3)").Text(); release.Size = ParseUtil.CoerceLong(Regex.Match(infoString, "\\((\\d+)\\)").Value.Replace("(", "").Replace(")", "")); var infosplit = infoString.Replace("/", string.Empty).Split(":".ToCharArray()); release.Seeders = ParseUtil.CoerceInt(infosplit[1]); release.Peers = release.Seeders + ParseUtil.CoerceInt(infosplit[2]); if (globalFreeleech) { release.DownloadVolumeFactor = 0; } else { release.DownloadVolumeFactor = 1; } release.UploadVolumeFactor = 1; // var tags = row.Cq().Find(".label-tag").Text(); These don't see to parse - bad tags? releases.Add(release); } } } catch (Exception ex) { OnParseError(results.Content, ex); } /* else * { * var rssUrl = SiteLink + "rss/recent?passkey=" + configData.RSSKey.Value; * * results = await RequestStringWithCookiesAndRetry(rssUrl); * try * { * var doc = XDocument.Parse(results.Content); * foreach (var result in doc.Descendants("item")) * { * var xTitle = result.Element("title").Value; * var xLink = result.Element("link").Value; * var xGUID = result.Element("guid").Value; * var xDesc = result.Element("description").Value; * var xDate = result.Element("pubDate").Value; * var release = new ReleaseInfo(); * release.Guid =release.Link = new Uri(xLink); * release.MinimumRatio = 1; * release.Seeders = 1; // We are not supplied with peer info so just mark it as one. * foreach (var element in xDesc.Split(";".ToCharArray())) * { * var split = element.IndexOf(':'); * if (split > -1) * { * var key = element.Substring(0, split).Trim(); * var value = element.Substring(split+1).Trim(); * * switch (key) * { * case "Filename": * release.Title = release.Description = value; * break; * } * } * } * * //"Thu, 24 Sep 2015 18:07:07 +0000" * release.PublishDate = DateTime.ParseExact(xDate, "ddd, dd MMM yyyy HH:mm:ss +0000", CultureInfo.InvariantCulture); * * if (!string.IsNullOrWhiteSpace(release.Title)) * { * releases.Add(release); * } * } * } * catch (Exception ex) * { * OnParseError(results.Content, ex); * }*/ foreach (var release in releases) { if (release.Title.Contains("1080p") || release.Title.Contains("720p")) { release.Category = new List <int> { TorznabCatType.TVHD.ID }; } else { release.Category = new List <int> { TorznabCatType.TVSD.ID }; } } return(releases); }
List <ReleaseInfo> parseTorrents(WebClientStringResult results, String seasonep, TorznabQuery query, int already_founded, int limit) { var releases = new List <ReleaseInfo>(); try { CQ dom = results.Content; ReleaseInfo release; var rows = dom[".box_torrent_all"].Find(".box_torrent"); // Check torrents only till we reach the query Limit for (int i = 0; (i < rows.Length && ((already_founded + releases.Count) < limit)); i++) { try { CQ qRow = rows[i].Cq(); var key = dom["link[rel=alternate]"].First().Attr("href").Split('=').Last(); release = new ReleaseInfo(); var torrentTxt = qRow.Find(".torrent_txt, .torrent_txt2").Find("a").Get(0); //if (torrentTxt == null) continue; release.Title = torrentTxt.GetAttribute("title"); release.Description = qRow.Find("span").Get(0).GetAttribute("title") + " " + qRow.Find("a.infolink").Text(); release.MinimumRatio = 1; release.MinimumSeedTime = 172800; release.DownloadVolumeFactor = 0; release.UploadVolumeFactor = 1; string downloadLink = SiteLink + torrentTxt.GetAttribute("href"); string downloadId = downloadLink.Substring(downloadLink.IndexOf("&id=") + 4); release.Link = new Uri(SiteLink.ToString() + "torrents.php?action=download&id=" + downloadId + "&key=" + key); release.Comments = new Uri(SiteLink.ToString() + "torrents.php?action=details&id=" + downloadId); release.Guid = new Uri(release.Comments.ToString() + "#comments");; release.Seeders = ParseUtil.CoerceInt(qRow.Find(".box_s2").Find("a").First().Text()); release.Peers = ParseUtil.CoerceInt(qRow.Find(".box_l2").Find("a").First().Text()) + release.Seeders; var imdblink = qRow.Find("a[href*=\".imdb.com/title\"]").Attr("href"); release.Imdb = ParseUtil.GetLongFromString(imdblink); var banner = qRow.Find("img.infobar_ico").Attr("onmouseover"); if (banner != null) { Regex BannerRegEx = new Regex(@"mutat\('(.*?)', '", RegexOptions.Compiled); var BannerMatch = BannerRegEx.Match(banner); var bannerurl = BannerMatch.Groups[1].Value; release.BannerUrl = new Uri(bannerurl); } release.PublishDate = DateTime.Parse(qRow.Find(".box_feltoltve2").Get(0).InnerHTML.Replace("<br />", " "), CultureInfo.InvariantCulture); string[] sizeSplit = qRow.Find(".box_meret2").Get(0).InnerText.Split(' '); release.Size = ReleaseInfo.GetBytes(sizeSplit[1].ToLower(), ParseUtil.CoerceFloat(sizeSplit[0])); string catlink = qRow.Find("a:has(img[class='categ_link'])").First().Attr("href"); string cat = ParseUtil.GetArgumentFromQueryString(catlink, "tipus"); release.Category = MapTrackerCatToNewznab(cat); /* if the release name not contains the language we add it because it is know from category */ if (cat.Contains("hun") && !release.Title.Contains("hun")) { release.Title += ".hun"; } if (seasonep == null) { releases.Add(release); } else { if (query.MatchQueryStringAND(release.Title, null, seasonep)) { /* For sonnar if the search querry was english the title must be english also so we need to change the Description and Title */ var temp = release.Title; // releasedata everithing after Name.S0Xe0X String releasedata = release.Title.Split(new[] { seasonep }, StringSplitOptions.None)[1].Trim(); /* if the release name not contains the language we add it because it is know from category */ if (cat.Contains("hun") && !releasedata.Contains("hun")) { releasedata += ".hun"; } // release description contains [imdb: ****] but we only need the data before it for title String[] description = { release.Description, "" }; if (release.Description.Contains("[imdb:")) { description = release.Description.Split('['); description[1] = "[" + description[1]; } release.Title = (description[0].Trim() + "." + seasonep.Trim() + "." + releasedata.Trim('.')).Replace(' ', '.'); // if search is done for S0X than we dont want to put . between S0X and E0X Match match = Regex.Match(releasedata, @"^E\d\d?"); if (seasonep.Length == 3 && match.Success) { release.Title = (description[0].Trim() + "." + seasonep.Trim() + releasedata.Trim('.')).Replace(' ', '.'); } // add back imdb points to the description [imdb: 8.7] release.Description = temp + " " + description[1]; release.Description = release.Description.Trim(); releases.Add(release); } } } catch (FormatException ex) { logger.Error("Problem of parsing Torrent:" + rows[i].InnerHTML); logger.Error("Exception was the following:" + ex); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
/// <summary> /// Parses the torrents from the content /// </summary> /// <returns>The parsed torrents.</returns> /// <param name="results">The result of the query</param> /// <param name="query">Query.</param> /// <param name="already_found">Number of the already found torrents.(used for limit)</param> /// <param name="limit">The limit to the number of torrents to download </param> private async Task <List <ReleaseInfo> > ParseTorrents(WebClientStringResult results, TorznabQuery query, int already_found, int limit, int previously_parsed_on_page) { var releases = new List <ReleaseInfo>(); try { var content = results.Content; /* Content Looks like this * 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\ * 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\ */ var parameters = content.Split(new string[] { "\\" }, StringSplitOptions.None); var type = "normal"; /* * Split the releases by '\' and go through them. * 27 element belongs to one torrent */ for (var j = previously_parsed_on_page * 27; (j + 27 < parameters.Length && ((already_found + releases.Count) < limit)); j = j + 27) { var release = new ReleaseInfo(); var imdb_id = 4 + j; var torrent_id = 5 + j; var is_season_id = 6 + j; var publish_date_id = 9 + j; var files_id = 13 + j; var size_id = 14 + j; var seeders_id = 23; var peers_id = 24 + j; var grabs_id = 25 + j; type = "normal"; //IMDB id of the series var seriesinfo = series.Find(x => x.id.Contains(parameters[imdb_id])); if (seriesinfo != null && !parameters[imdb_id].Equals("")) { release.Imdb = long.Parse(seriesinfo.imdbid); } //ID of the torrent var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds; var fileinfoURL = SearchUrl + "?func=getToggle&id=" + parameters[torrent_id] + "&w=F&pg=0&now=" + unixTimestamp; var fileinfo = (await RequestStringWithCookiesAndRetry(fileinfoURL)).Content; release.Link = new Uri(DownloadUrl + "?id=" + parameters[torrent_id]); release.Guid = release.Link; release.Comments = release.Link; var fileinf = fileinfo.Split(new string[] { "\\\\" }, StringSplitOptions.None); if (fileinf.Length > 1) { release.Title = fileinf[1]; if (fileinf[1].Length > 5 && fileinf[1].Substring(fileinf[1].Length - 4).Contains(".")) { release.Title = fileinf[1].Substring(0, fileinf[1].Length - 4); } } // SeasonPack check if (parameters[is_season_id].Contains("évad/")) { type = "season"; // If this is a seasonpack, remove episode nunmber from title. release.Title = Regex.Replace(release.Title, "s0?(\\d+)(e0?(\\d+))", "S$1", RegexOptions.IgnoreCase); } release.PublishDate = DateTime.Parse(parameters[publish_date_id], CultureInfo.InvariantCulture); release.Files = int.Parse(parameters[files_id]); release.Size = long.Parse(parameters[size_id]); release.Seeders = int.Parse(parameters[seeders_id]); release.Peers = (int.Parse(parameters[peers_id]) + release.Seeders); release.Grabs = int.Parse(parameters[grabs_id]); release.MinimumRatio = 1; release.MinimumSeedTime = 172800; // 48 hours release.DownloadVolumeFactor = 1; release.UploadVolumeFactor = UploadFactorCalculator(release.PublishDate, type); release.Category = new List <int> { TvCategoryParser.ParseTvShowQuality(release.Title) }; if ((already_found + releases.Count) < limit) { releases.Add(release); } else { return(releases); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
protected override async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var searchString = query.GetQueryString(); WebClientStringResult results = null; var queryCollection = new NameValueCollection(); queryCollection.Add("st", "0"); queryCollection.Add("sd", "d"); queryCollection.Add("sk", "t"); queryCollection.Add("tracker_search", "torrent"); queryCollection.Add("t", "0"); queryCollection.Add("submit", "Search"); queryCollection.Add("sr", "topics"); //queryCollection.Add("sr", "posts"); //queryCollection.Add("ch", "99999"); // if the search string is empty use the getnew view if (string.IsNullOrWhiteSpace(searchString)) { queryCollection.Add("search_id", "active_topics"); queryCollection.Add("ot", "1"); } else // use the normal search { searchString = searchString.Replace("-", " "); queryCollection.Add("keywords", searchString); queryCollection.Add("sf", "titleonly"); queryCollection.Add("sr", "topics"); queryCollection.Add("pt", "t"); queryCollection.Add("ot", "1"); } var searchUrl = SearchUrl + "?" + queryCollection.GetQueryString(); results = await RequestStringWithCookies(searchUrl); if (!results.Content.Contains("ucp.php?mode=logout")) { await ApplyConfiguration(null); results = await RequestStringWithCookies(searchUrl); } try { string RowsSelector = "ul.topics > li.row"; var ResultParser = new HtmlParser(); var SearchResultDocument = ResultParser.Parse(results.Content); var Rows = SearchResultDocument.QuerySelectorAll(RowsSelector); foreach (var Row in Rows) { try { var release = new ReleaseInfo(); release.MinimumRatio = 1; release.MinimumSeedTime = 0; var qDetailsLink = Row.QuerySelector("a.topictitle"); release.Title = qDetailsLink.TextContent; release.Comments = new Uri(SiteLink + qDetailsLink.GetAttribute("href")); release.Guid = release.Comments; var detailsResult = await RequestStringWithCookies(SiteLink + qDetailsLink.GetAttribute("href")); var DetailsResultDocument = ResultParser.Parse(detailsResult.Content); var qDownloadLink = DetailsResultDocument.QuerySelector("table.table2 > tbody > tr > td > a[href^=\"/download/torrent.php?id\"]"); release.Link = new Uri(SiteLink + qDownloadLink.GetAttribute("href")); release.Seeders = ParseUtil.CoerceInt(Row.QuerySelector("span.seed").TextContent); release.Peers = ParseUtil.CoerceInt(Row.QuerySelector("span.leech").TextContent) + release.Seeders; release.Grabs = ParseUtil.CoerceLong(Row.QuerySelector("span.complet").TextContent); var author = Row.QuerySelector("dd.lastpost > span"); var timestr = author.TextContent.Split('\n')[4].Trim(); release.PublishDate = DateTimeUtil.FromUnknown(timestr, "UK"); var forum = Row.QuerySelector("a[href^=\"./viewforum.php?f=\"]"); var forumid = forum.GetAttribute("href").Split('=')[1]; release.Category = MapTrackerCatToNewznab(forumid); var size = Row.QuerySelector("dl.row-item > dt > div.list-inner > div[style^=\"float:right\"]").TextContent; size = size.Replace("GiB", "GB"); size = size.Replace("MiB", "MB"); size = size.Replace("KiB", "KB"); release.Size = ReleaseInfo.GetBytes(size); release.DownloadVolumeFactor = 1; release.UploadVolumeFactor = 1; releases.Add(release); } catch (Exception ex) { logger.Error(string.Format("{0}: Error while parsing row '{1}':\n\n{2}", ID, Row.OuterHtml, ex)); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
/// <summary> /// Parses the torrents from the content /// </summary> /// <returns>The parsed torrents.</returns> /// <param name="results">The result of the query</param> /// <param name="query">Query.</param> /// <param name="already_found">Number of the already found torrents.(used for limit)</param> /// <param name="limit">The limit to the number of torrents to download </param> async Task <List <ReleaseInfo> > ParseTorrents(WebClientStringResult results, TorznabQuery query, int already_found, int limit) { var releases = new List <ReleaseInfo>(); try { String content = results.Content; /* Content Looks like this * 2\15\2\1\1727\207244\1x08 \[WebDL-720p - Eng - AJP69]\gb\2018-03-09 08:11:53\akció, kaland, sci-fi \0\0\1\191170047\1\0\Anonymous\50\0\0\\0\4\0\174\0\ * 1\ 0\0\1\1727\207243\1x08 \[WebDL-1080p - Eng - AJP69]\gb\2018-03-09 08:11:49\akció, kaland, sci-fi \0\0\1\305729738\1\0\Anonymous\50\0\0\\0\8\0\102\0\0\0\0\1\\\ */ var splits = content.Split('\\'); int i = 0; ReleaseInfo release = new ReleaseInfo(); /* Split the releases by '\' and go through them. * 26 element belongs to one torrent */ foreach (var s in splits) { switch (i) { case 4: //ID of the series //Get IMDB id form site series database SeriesDetail seriesinfo = series.Find(x => x.id.Contains(s)); if (seriesinfo != null && !s.Equals("")) { release.Imdb = long.Parse(seriesinfo.imdbid); } goto default; case 5: //ID of the torrent Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds; string fileinfoURL = SearchUrl + "?func=getToggle&id=" + s + "&w=F&pg=0&now=" + unixTimestamp; string fileinfo = (await RequestStringWithCookiesAndRetry(fileinfoURL)).Content; release.Link = new Uri(DownloadUrl + "?id=" + s); release.Guid = release.Link; release.Comments = release.Link; string[] fileinf = fileinfo.Split(new string[] { "\\\\" }, StringSplitOptions.None); if (fileinf.Length > 1) { release.Title = fileinf[1]; } goto default; /*case 6: * Console.WriteLine("Series season/ep =" + s); --> 9x10 * goto default;*/ /*case 7: * Console.WriteLine("Releaseinfo =" + s); --->Releaseinfo =[HDTV - Rip - Eng - SVA] * goto default;*/ case 9: release.PublishDate = DateTime.Parse(s, CultureInfo.InvariantCulture); goto default; case 13: release.Files = int.Parse(s); goto default; case 14: release.Size = long.Parse(s); goto default; case 23: release.Seeders = int.Parse(s); goto default; case 24: release.Peers = (int.Parse(s) + release.Seeders); goto default; case 25: release.Grabs = int.Parse(s); goto default; case 26: /* This is the last element for the torrent. So add it to releases and start parsing to new torrent */ i = 0; release.Category = new List <int> { TvCategoryParser.ParseTvShowQuality(release.Title) }; //todo Added some basic configuration need to improve it release.MinimumRatio = 1; release.MinimumSeedTime = 172800; release.DownloadVolumeFactor = 1; release.UploadVolumeFactor = 1; if ((already_found + releases.Count) < limit) { releases.Add(release); } else { return(releases); } release = new ReleaseInfo(); break; default: i++; break; } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
public async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); string qryString = query.GetQueryString(); Match matchQry = new Regex(@".*\s[Ss]{1}\d{2}$").Match(qryString); if (matchQry.Success) { //If search string ends in S## eg. S03 (season search) add an asterix to search term qryString += "*"; } var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(qryString); WebClientStringResult response = await RequestStringWithCookiesAndRetry(episodeSearchUrl); try { string decodedResponse = WebUtility.HtmlDecode(response.Content); var json = JObject.Parse(decodedResponse); foreach (JObject r in json["response"]["results"]) { DateTime pubDate = DateTime.MinValue; double dateNum; if (double.TryParse((string)r["groupTime"], out dateNum)) { pubDate = DateTimeUtil.UnixTimestampToDateTime(dateNum); pubDate = DateTime.SpecifyKind(pubDate, DateTimeKind.Utc).ToLocalTime(); } string groupName = (string)r["groupName"]; if (r["torrents"] is JArray) { string showName = (string)r["artist"]; foreach (JObject t in r["torrents"]) { var release = new ReleaseInfo(); release.PublishDate = pubDate; release.Title = $"{showName} {groupName}"; release.Description = $"{showName} {groupName}"; FillReleaseInfoFromJson(release, t); releases.Add(release); } } else { var release = new ReleaseInfo(); release.PublishDate = pubDate; release.Title = groupName; release.Description = groupName; FillReleaseInfoFromJson(release, r); releases.Add(release); } } } catch (Exception ex) { OnParseError(response.Content, ex); } return(releases); }
private async Task <IEnumerable <ReleaseInfo> > MovieSearch(Uri siteLink, TorznabQuery query) { var releases = new List <NewpctRelease>(); var searchStr = query.SanitizedSearchTerm; if (_removeMovieAccents) { searchStr = RemoveDiacritics(searchStr); } Uri validUri = null; var validUriUsesJson = false; var pg = 1; while (pg <= _maxMoviesPages) { var queryCollection = new Dictionary <string, string>(); queryCollection.Add("q", searchStr); queryCollection.Add("s", searchStr); queryCollection.Add("pg", pg.ToString()); WebClientStringResult results = null; IEnumerable <NewpctRelease> items = null; if (validUri != null) { if (validUriUsesJson) { var uri = new Uri(validUri, _searchJsonUrl); results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection); if (results == null || string.IsNullOrEmpty(results.Content)) { break; } items = ParseSearchJsonContent(uri, results.Content); } else { var uri = new Uri(validUri, _searchUrl); results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection); if (results == null || string.IsNullOrEmpty(results.Content)) { break; } items = ParseSearchContent(results.Content); } } else { using (var jsonUris = GetLinkUris(new Uri(siteLink, _searchJsonUrl)).GetEnumerator()) { using (var uris = GetLinkUris(new Uri(siteLink, _searchUrl)).GetEnumerator()) { var resultFound = false; while (jsonUris.MoveNext() && uris.MoveNext() && !resultFound) { for (var i = 0; i < 2 && !resultFound; i++) { var usingJson = i == 0; Uri uri; if (usingJson) { uri = jsonUris.Current; } else { uri = uris.Current; } try { results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection); } catch { results = null; } if (results != null && !string.IsNullOrEmpty(results.Content)) { if (usingJson) { items = ParseSearchJsonContent(uri, results.Content); } else { items = ParseSearchContent(results.Content); } if (items != null) { validUri = uri; validUriUsesJson = usingJson; resultFound = true; } } } } } } } if (items == null) { break; } releases.AddRange(items); pg++; } ScoreReleases(releases, searchStr); if (_filterMovies) { releases = releases.Where(r => r.Score < _wordNotFoundScore).ToList(); } return(releases); }
protected override async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var searchString = query.GetQueryString(); WebClientStringResult results = null; var queryCollection = new NameValueCollection(); queryCollection.Add("act", "search"); queryCollection.Add("forums", "all"); queryCollection.Add("torrents", "1"); queryCollection.Add("search_in", "titles"); queryCollection.Add("result_type", "topics"); // if the search string is empty use the getnew view if (string.IsNullOrWhiteSpace(searchString)) { queryCollection.Add("CODE", "getnew"); queryCollection.Add("active", "1"); } else // use the normal search { searchString = searchString.Replace("-", " "); queryCollection.Add("CODE", "01"); queryCollection.Add("keywords", searchString); } var searchUrl = IndexUrl + "?" + queryCollection.GetQueryString(); results = await RequestStringWithCookies(searchUrl); if (results.IsRedirect && results.RedirectingTo.Contains("CODE=show")) { results = await RequestStringWithCookies(results.RedirectingTo); } try { string RowsSelector = "div.borderwrap:has(div.maintitle) > table > tbody > tr:has(a[href*=\"index.php?showtopic=\"])"; var SearchResultParser = new HtmlParser(); var SearchResultDocument = SearchResultParser.Parse(results.Content); var Rows = SearchResultDocument.QuerySelectorAll(RowsSelector); foreach (var Row in Rows) { try { var release = new ReleaseInfo(); var StatsElements = Row.QuerySelector("td:nth-child(5)"); var stats = StatsElements.TextContent.Split('·'); if (stats.Length != 3) // not a torrent { continue; } release.Seeders = ParseUtil.CoerceInt(stats[0]); release.Peers = ParseUtil.CoerceInt(stats[1]) + release.Seeders; release.Grabs = ParseUtil.CoerceInt(stats[2]); release.MinimumRatio = 0.51; release.MinimumSeedTime = 0; var qDetailsLink = Row.QuerySelector("a[onmouseover][href*=\"index.php?showtopic=\"]"); release.Title = qDetailsLink.TextContent; release.Comments = new Uri(qDetailsLink.GetAttribute("href")); release.Link = release.Comments; release.Guid = release.Link; release.DownloadVolumeFactor = 1; release.UploadVolumeFactor = 1; var id = QueryHelpers.ParseQuery(release.Comments.Query)["showtopic"].FirstOrDefault(); var desc = Row.QuerySelector("span.desc"); var forange = desc.QuerySelector("font.forange"); if (forange != null) { var DownloadVolumeFactor = forange.QuerySelector("i:contains(\"freeleech\")"); if (DownloadVolumeFactor != null) { release.DownloadVolumeFactor = 0; } var UploadVolumeFactor = forange.QuerySelector("i:contains(\"x upload]\")"); if (UploadVolumeFactor != null) { release.UploadVolumeFactor = ParseUtil.CoerceDouble(UploadVolumeFactor.TextContent.Split(' ')[0].Substring(1).Replace("x", "")); } forange.Remove(); } var format = desc.TextContent; release.Title += " [" + format + "]"; var preview = SearchResultDocument.QuerySelector("div#d21-tph-preview-data-" + id); if (preview != null) { release.Description = ""; foreach (var e in preview.ChildNodes) { if (e.NodeType == NodeType.Text) { release.Description += e.NodeValue; } else { release.Description += e.TextContent + "\n"; } } } release.Description = WebUtility.HtmlEncode(release.Description.Trim()); release.Description = release.Description.Replace("\n", "<br>"); if (format.Contains("MP3")) { release.Category = new List <int> { TorznabCatType.AudioMP3.ID } } ; else if (format.Contains("AAC")) { release.Category = new List <int> { TorznabCatType.AudioOther.ID } } ; else if (format.Contains("Lossless")) { release.Category = new List <int> { TorznabCatType.AudioLossless.ID } } ; else { release.Category = new List <int> { TorznabCatType.AudioOther.ID } }; var lastAction = Row.QuerySelector("td:nth-child(9) > span").FirstChild.NodeValue; release.PublishDate = DateTimeUtil.FromUnknown(lastAction, "UK"); releases.Add(release); } catch (Exception ex) { logger.Error(string.Format("{0}: Error while parsing row '{1}':\n\n{2}", ID, Row.OuterHtml, ex)); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }
private async Task <IEnumerable <ReleaseInfo> > PerformQuery(Uri siteLink, TorznabQuery query, int attempts) { var releases = new List <ReleaseInfo>(); _includeVo = ((BoolItem)configData.GetDynamic("IncludeVo")).Value; _filterMovies = ((BoolItem)configData.GetDynamic("FilterMovies")).Value; _removeMovieAccents = ((BoolItem)configData.GetDynamic("RemoveMovieAccents")).Value; _dailyNow = DateTime.Now; _dailyResultIdx = 0; bool rssMode = string.IsNullOrEmpty(query.SanitizedSearchTerm); if (rssMode) { int pg = 1; Uri validUri = null; while (pg <= _maxDailyPages) { IEnumerable <NewpctRelease> items = null; WebClientStringResult results = null; if (validUri != null) { Uri uri = new Uri(validUri, string.Format(_dailyUrl, pg)); results = await RequestStringWithCookiesAndRetry(uri.AbsoluteUri); if (results == null || string.IsNullOrEmpty(results.Content)) { break; } await FollowIfRedirect(results); items = ParseDailyContent(results.Content); } else { foreach (Uri uri in GetLinkUris(new Uri(siteLink, string.Format(_dailyUrl, pg)))) { results = await RequestStringWithCookiesAndRetry(uri.AbsoluteUri); if (results != null && !string.IsNullOrEmpty(results.Content)) { await FollowIfRedirect(results); items = ParseDailyContent(results.Content); if (items != null && items.Any()) { validUri = uri; break; } } } } if (items == null || !items.Any()) { break; } releases.AddRange(items); //Check if we need to go to next page bool recentFound = _mostRecentRelease != null && items.Any(r => r.Title == _mostRecentRelease.Title && r.Link.AbsoluteUri == _mostRecentRelease.Link.AbsoluteUri); if (pg == 1) { _mostRecentRelease = (NewpctRelease)items.First().Clone(); } if (recentFound) { break; } pg++; } } else { bool isTvSearch = query.Categories == null || query.Categories.Length == 0 || query.Categories.Any(c => _allTvCategories.Contains(c)); if (isTvSearch) { releases.AddRange(await TvSearch(siteLink, query)); } bool isMovieSearch = query.Categories == null || query.Categories.Length == 0 || query.Categories.Any(c => _allMoviesCategories.Contains(c)); if (isMovieSearch) { releases.AddRange(await MovieSearch(siteLink, query)); } } return(releases); }
protected async Task <bool> DoLogin() { var Login = Definition.Login; if (Login == null) { return(false); } if (Login.Method == "post") { var pairs = new Dictionary <string, string>(); foreach (var Input in Definition.Login.Inputs) { var value = applyGoTemplateText(Input.Value); pairs.Add(Input.Key, value); } var LoginUrl = SiteLink + Login.Path; configData.CookieHeader.Value = null; var loginResult = await RequestLoginAndFollowRedirect(LoginUrl, pairs, null, true, null, SiteLink, true); configData.CookieHeader.Value = loginResult.Cookies; checkForLoginError(loginResult); } else if (Login.Method == "form") { var LoginUrl = SiteLink + Login.Path; var pairs = new Dictionary <string, string>(); var CaptchaConfigItem = (RecaptchaItem)configData.GetDynamic("Captcha"); if (CaptchaConfigItem != null) { if (!string.IsNullOrWhiteSpace(CaptchaConfigItem.Cookie)) { // for remote users just set the cookie and return CookieHeader = CaptchaConfigItem.Cookie; return(true); } var CloudFlareCaptchaChallenge = landingResultDocument.QuerySelector("script[src=\"/cdn-cgi/scripts/cf.challenge.js\"]"); if (CloudFlareCaptchaChallenge != null) { var CloudFlareQueryCollection = new NameValueCollection(); CloudFlareQueryCollection["id"] = CloudFlareCaptchaChallenge.GetAttribute("data-ray"); CloudFlareQueryCollection["g-recaptcha-response"] = CaptchaConfigItem.Value; var ClearanceUrl = resolvePath("/cdn-cgi/l/chk_captcha?" + CloudFlareQueryCollection.GetQueryString()); var ClearanceResult = await RequestStringWithCookies(ClearanceUrl.ToString(), null, SiteLink); if (ClearanceResult.IsRedirect) // clearance successfull { // request real login page again landingResult = await RequestStringWithCookies(LoginUrl, null, SiteLink); var htmlParser = new HtmlParser(); landingResultDocument = htmlParser.Parse(landingResult.Content); } else { throw new ExceptionWithConfigData(string.Format("Login failed: Cloudflare clearance failed using cookies {0}: {1}", CookieHeader, ClearanceResult.Content), configData); } } else { pairs.Add("g-recaptcha-response", CaptchaConfigItem.Value); } } var FormSelector = Login.Form; if (FormSelector == null) { FormSelector = "form"; } var form = landingResultDocument.QuerySelector(FormSelector); if (form == null) { throw new ExceptionWithConfigData(string.Format("Login failed: No form found on {0} using form selector {1}", LoginUrl, FormSelector), configData); } var inputs = form.QuerySelectorAll("input"); if (inputs == null) { throw new ExceptionWithConfigData(string.Format("Login failed: No inputs found on {0} using form selector {1}", LoginUrl, FormSelector), configData); } var submitUrl = resolvePath(form.GetAttribute("action")); foreach (var input in inputs) { var name = input.GetAttribute("name"); if (name == null) { continue; } var value = input.GetAttribute("value"); if (value == null) { value = ""; } pairs[name] = value; } foreach (var Input in Definition.Login.Inputs) { var value = applyGoTemplateText(Input.Value); pairs[Input.Key] = value; } // automatically solve simpleCaptchas, if used var simpleCaptchaPresent = landingResultDocument.QuerySelector("script[src*=\"simpleCaptcha\"]"); if (simpleCaptchaPresent != null) { var captchaUrl = resolvePath("simpleCaptcha.php?numImages=1"); var simpleCaptchaResult = await RequestStringWithCookies(captchaUrl.ToString(), null, LoginUrl); var simpleCaptchaJSON = JObject.Parse(simpleCaptchaResult.Content); var captchaSelection = simpleCaptchaJSON["images"][0]["hash"].ToString(); pairs["captchaSelection"] = captchaSelection; pairs["submitme"] = "X"; } var loginResult = await RequestLoginAndFollowRedirect(submitUrl.ToString(), pairs, configData.CookieHeader.Value, true, null, SiteLink, true); configData.CookieHeader.Value = loginResult.Cookies; checkForLoginError(loginResult); } else if (Login.Method == "cookie") { configData.CookieHeader.Value = ((StringItem)configData.GetDynamic("cookie")).Value; } else { throw new NotImplementedException("Login method " + Definition.Login.Method + " not implemented"); } return(true); }
private async Task <IEnumerable <ReleaseInfo> > MovieSearch(Uri siteLink, TorznabQuery query) { var releases = new List <NewpctRelease>(); string searchStr = query.SanitizedSearchTerm; if (_removeMovieAccents) { searchStr = RemoveDiacritics(searchStr); } Uri validUri = null; int pg = 1; while (pg <= _maxMoviesPages) { var queryCollection = new Dictionary <string, string>(); queryCollection.Add("q", searchStr); queryCollection.Add("pg", pg.ToString()); WebClientStringResult results = null; IEnumerable <NewpctRelease> items = null; if (validUri != null) { Uri uri = new Uri(validUri, string.Format(_searchUrl, pg)); results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection); if (results == null || string.IsNullOrEmpty(results.Content)) { break; } items = ParseSearchContent(results.Content); } else { foreach (Uri uri in GetLinkUris(new Uri(siteLink, string.Format(_searchUrl, pg)))) { results = await PostDataWithCookies(uri.AbsoluteUri, queryCollection); if (results != null && !string.IsNullOrEmpty(results.Content)) { items = ParseSearchContent(results.Content); if (items != null && items.Any()) { validUri = uri; break; } } } } if (items == null) { break; } releases.AddRange(items); pg++; } ScoreReleases(releases, searchStr); if (_filterMovies) { releases = releases.Where(r => r.Score < _wordNotFoundScore).ToList(); } return(releases); }
/// <summary> /// Execute our search query /// </summary> /// <param name="query">Query</param> /// <returns>Releases</returns> public async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var torrentRowList = new List <CQ>(); var searchTerm = query.GetQueryString(); var searchUrl = SearchUrl; int nbResults = 0; int pageLinkCount = 0; // Check cache first so we don't query the server (if search term used or not in dev mode) if (!DevMode && !string.IsNullOrEmpty(searchTerm)) { lock (cache) { // Remove old cache items CleanCache(); // Search in cache var cachedResult = cache.Where(i => i.Query == searchTerm).FirstOrDefault(); if (cachedResult != null) { return(cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray()); } } } // Build our query var request = buildQuery(searchTerm, query, searchUrl); // Getting results & Store content WebClientStringResult results = await queryExec(request); fDom = results.Content; try { // Find torrent rows var firstPageRows = findTorrentRows(); // Add them to torrents list torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq())); // Check if there are pagination links at bottom Boolean pagination = (fDom[".linkbox > a"].Length != 0); // If pagination available if (pagination) { // Calculate numbers of pages available for this search query (Based on number results and number of torrents on first page) pageLinkCount = ParseUtil.CoerceInt(Regex.Match(fDom[".linkbox > a"].Last().Attr("href").ToString(), @"\d+").Value); // Calculate average number of results (based on torrents rows lenght on first page) nbResults = firstPageRows.Count() * pageLinkCount; } else { // Check if we have a minimum of one result if (firstPageRows.Length >= 1) { // Retrieve total count on our alone page nbResults = firstPageRows.Count(); pageLinkCount = 1; } else { output("\nNo result found for your query, please try another search term ...\n", "info"); // No result found for this query return(releases); } } output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !"); output("\nThere are " + firstPageRows.Length + " results on the first page !"); // If we have a term used for search and pagination result superior to one if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1) { // Starting with page #2 for (int i = 2; i <= Math.Min(Int32.Parse(ConfigData.Pages.Value), pageLinkCount); i++) { output("\nProcessing page #" + i); // Request our page latencyNow(); // Build our query var pageRequest = buildQuery(searchTerm, query, searchUrl, i); // Getting results & Store content WebClientStringResult pageResults = await queryExec(pageRequest); // Assign response fDom = pageResults.Content; // Process page results var additionalPageRows = findTorrentRows(); // Add them to torrents list torrentRowList.AddRange(additionalPageRows.Select(fRow => fRow.Cq())); } } else { // No search term, maybe testing... so registring autkey and torrentpass for future uses string infosData = firstPageRows.First().Find("td:eq(3) > a").Attr("href"); IList <string> infosList = infosData.Split('&').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); IList <string> infosTracker = infosList.Select(s => s.Split(new[] { '=' }, 2)[1].Trim()).ToList(); output("\nStoring Authkey for future uses..."); ConfigData.AuthKey.Value = infosTracker[2]; output("\nStoring TorrentPass for future uses..."); ConfigData.TorrentPass.Value = infosTracker[3]; } // Loop on results foreach (CQ tRow in torrentRowList) { output("\n=>> Torrent #" + (releases.Count + 1)); // ID int id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > a").Attr("href").ToString(), @"\d+").Value); output("ID: " + id); // Release Name string name = tRow.Find("td:eq(1) > a").Text().ToString(); output("Release: " + name); // Category string categoryID = tRow.Find("td:eq(0) > a").Attr("href").Replace("torrents.php?cat[]=", String.Empty); output("Category: " + MapTrackerCatToNewznab(categoryID) + " (" + categoryID + ")"); // Seeders int seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value); output("Seeders: " + seeders); // Leechers int leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6)").Text(), @"\d+").Value); output("Leechers: " + leechers); // Completed int completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5)").Text(), @"\d+").Value); output("Completed: " + completed); // Size string sizeStr = tRow.Find("td:eq(4)").Text().Replace("Go", "gb").Replace("Mo", "mb").Replace("Ko", "kb"); long size = ReleaseInfo.GetBytes(sizeStr); output("Size: " + sizeStr + " (" + size + " bytes)"); // Publish DateToString IList <string> clockList = tRow.Find("td:eq(2) > span").Text().Replace("Il y a", "").Split(',').Select(s => s.Trim()).Where(s => s != String.Empty).ToList(); var date = agoToDate(clockList); output("Released on: " + date.ToLocalTime()); // Torrent Details URL Uri detailsLink = new Uri(TorrentDescriptionUrl + id); output("Details: " + detailsLink.AbsoluteUri); // Torrent Comments URL Uri commentsLink = new Uri(TorrentCommentUrl + id); output("Comments Link: " + commentsLink.AbsoluteUri); // Torrent Download URL Uri downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString()).Replace("{auth_key}", ConfigData.AuthKey.Value).Replace("{torrent_pass}", ConfigData.TorrentPass.Value)); output("Download Link: " + downloadLink.AbsoluteUri); // Building release infos var release = new ReleaseInfo(); release.Category = MapTrackerCatToNewznab(categoryID.ToString()); release.Title = name; release.Seeders = seeders; release.Peers = seeders + leechers; release.MinimumRatio = 1; release.MinimumSeedTime = 172800; release.PublishDate = date; release.Size = size; release.Guid = detailsLink; release.Comments = commentsLink; release.Link = downloadLink; releases.Add(release); } } catch (Exception ex) { OnParseError("Error, unable to parse result \n" + ex.StackTrace, ex); } // Return found releases return(releases); }
public async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var queryString = query.GetQueryString(); var url = TorrentsUrl; WebClientStringResult results = null; var pairs = new Dictionary <string, string> { { "portlet", "true" } }; if (!string.IsNullOrWhiteSpace(queryString)) { pairs.Add("search", queryString); results = await PostDataWithCookiesAndRetry(SearchUrl, pairs, null, TorrentsUrl); } else { results = await PostDataWithCookiesAndRetry(TorrentsUrl, pairs, null, TorrentsUrl); } try { CQ dom = results.Content; var rows = dom["#torrent-table tr"]; if (!string.IsNullOrWhiteSpace(queryString)) { rows = dom["table tr"]; } foreach (var row in rows.Skip(1)) { var release = new ReleaseInfo(); var qRow = row.Cq(); var titleRow = qRow.Find("td:eq(2)").First(); titleRow.Children().Remove(); release.Title = titleRow.Text().Trim(); if (string.IsNullOrWhiteSpace(release.Title)) { continue; } release.Description = release.Title; var qLink = row.Cq().Find("td:eq(4) a:eq(0)"); release.Link = new Uri(SiteLink + qLink.Attr("href")); release.Guid = release.Link; var qLinkComm = row.Cq().Find("td:eq(4) a:eq(1)"); release.Comments = new Uri(SiteLink + qLinkComm.Attr("href")); var dateString = qRow.Find(".datetime").Attr("data-timestamp"); release.PublishDate = DateTimeUtil.UnixTimestampToDateTime(ParseUtil.CoerceDouble(dateString)); var infoString = row.Cq().Find("td:eq(3)").Text(); release.Size = ParseUtil.CoerceLong(Regex.Match(infoString, "\\((\\d+)\\)").Value.Replace("(", "").Replace(")", "")); var infosplit = infoString.Replace("/", string.Empty).Split(":".ToCharArray()); release.Seeders = ParseUtil.CoerceInt(infosplit[1]); release.Peers = release.Seeders + ParseUtil.CoerceInt(infosplit[2]); // var tags = row.Cq().Find(".label-tag").Text(); These don't see to parse - bad tags? releases.Add(release); } } catch (Exception ex) { OnParseError(results.Content, ex); } /* else * { * var rssUrl = SiteLink + "rss/recent?passkey=" + configData.RSSKey.Value; * * results = await RequestStringWithCookiesAndRetry(rssUrl); * try * { * var doc = XDocument.Parse(results.Content); * foreach (var result in doc.Descendants("item")) * { * var xTitle = result.Element("title").Value; * var xLink = result.Element("link").Value; * var xGUID = result.Element("guid").Value; * var xDesc = result.Element("description").Value; * var xDate = result.Element("pubDate").Value; * var release = new ReleaseInfo(); * release.Guid =release.Link = new Uri(xLink); * release.MinimumRatio = 1; * release.Seeders = 1; // We are not supplied with peer info so just mark it as one. * foreach (var element in xDesc.Split(";".ToCharArray())) * { * var split = element.IndexOf(':'); * if (split > -1) * { * var key = element.Substring(0, split).Trim(); * var value = element.Substring(split+1).Trim(); * * switch (key) * { * case "Filename": * release.Title = release.Description = value; * break; * } * } * } * * //"Thu, 24 Sep 2015 18:07:07 +0000" * release.PublishDate = DateTime.ParseExact(xDate, "ddd, dd MMM yyyy HH:mm:ss +0000", CultureInfo.InvariantCulture); * * if (!string.IsNullOrWhiteSpace(release.Title)) * { * releases.Add(release); * } * } * } * catch (Exception ex) * { * OnParseError(results.Content, ex); * }*/ foreach (var release in releases) { if (release.Title.Contains("1080p") || release.Title.Contains("720p")) { release.Category = TorznabCatType.TVHD.ID; } else { release.Category = TorznabCatType.TVSD.ID; } } return(releases); }
/// <summary> /// Execute our search query /// </summary> /// <param name="query">Query</param> /// <returns>Releases</returns> public async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); var torrentRowList = new List <CQ>(); var searchTerm = query.GetQueryString(); var searchUrl = SearchUrl; // Check login before performing a query await CheckLogin(); // Check cache first so we don't query the server (if search term used or not in dev mode) if (!DevMode && !string.IsNullOrEmpty(searchTerm)) { lock (cache) { // Remove old cache items CleanCache(); // Search in cache var cachedResult = cache.FirstOrDefault(i => i.Query == searchTerm); if (cachedResult != null) { return(cachedResult.Results.Select(s => (ReleaseInfo)s.Clone()).ToArray()); } } } // Build our query var request = BuildQuery(searchTerm, query, searchUrl); // Getting results & Store content var results = await QueryExec(request); _fDom = results.Content; try { // Find torrent rows var firstPageRows = FindTorrentRows(); // Add them to torrents list torrentRowList.AddRange(firstPageRows.Select(fRow => fRow.Cq())); // Check if there are pagination links at bottom var pagination = (_fDom["#quicknavpage_menu"].Length != 0); // If pagination available int nbResults; int pageLinkCount; if (pagination) { // Retrieve available pages (3 pages shown max) pageLinkCount = _fDom["#navcontainer_f:first > ul"].Find("a").Not(".smalltext").Not("#quicknavpage").Length; // Last button ? (So more than 3 page are available) var more = _fDom["#navcontainer_f:first > ul"].Find("a.smalltext").Length > 1; // More page than 3 pages ? if (more) { // Get total page count from last link pageLinkCount = ParseUtil.CoerceInt(Regex.Match(_fDom["#navcontainer_f:first > ul"].Find("a:eq(4)").Attr("href"), @"\d+").Value); } // Calculate average number of results (based on torrents rows lenght on first page) nbResults = firstPageRows.Count() * pageLinkCount; } else { nbResults = 1; pageLinkCount = 1; // Check if we have a minimum of one result if (firstPageRows.Length > 1) { // Retrieve total count on our alone page nbResults = firstPageRows.Count(); } else { // Check if no result if (torrentRowList.First().Find("td").Length == 1) { // No results found Output("\nNo result found for your query, please try another search term ...\n", "info"); // No result found for this query return(releases); } } } Output("\nFound " + nbResults + " result(s) (+/- " + firstPageRows.Length + ") in " + pageLinkCount + " page(s) for this query !"); Output("\nThere are " + firstPageRows.Length + " results on the first page !"); // If we have a term used for search and pagination result superior to one if (!string.IsNullOrWhiteSpace(query.GetQueryString()) && pageLinkCount > 1) { // Starting with page #2 for (var i = 2; i <= Math.Min(int.Parse(ConfigData.Pages.Value), pageLinkCount); i++) { Output("\nProcessing page #" + i); // Request our page LatencyNow(); // Build our query -- Minus 1 to page due to strange pagination number on tracker side, starting with page 0... var pageRequest = BuildQuery(searchTerm, query, searchUrl, i); // Getting results & Store content WebClientStringResult pageResults = await QueryExec(pageRequest); // Assign response _fDom = pageResults.Content; // Process page results var additionalPageRows = FindTorrentRows(); // Add them to torrents list torrentRowList.AddRange(additionalPageRows.Select(fRow => fRow.Cq())); } } // Loop on results foreach (var tRow in torrentRowList) { Output("\n=>> Torrent #" + (releases.Count + 1)); // ID var id = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(1) > div:first > a").Attr("name"), @"\d+").Value); Output("ID: " + id); // Check if torrent is not nuked by tracker or rulez, can't download it if (tRow.Find("td:eq(2) > a").Length == 0) { // Next item Output("Torrent is nuked, we can't download it, going to next torrent..."); continue; } // Release Name var name = tRow.Find("td:eq(2) > a").Attr("title").Substring(24).Trim(); Output("Release: " + name); // Category var categoryId = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(0) > a").Attr("href"), @"\d+").Value); var categoryName = tRow.Find("td:eq(0) > a > img").Attr("title").Split(new[] { ':' }, 2)[1].Trim(); Output("Category: " + MapTrackerCatToNewznab(categoryId.ToString()) + " (" + categoryId + " - " + categoryName + ")"); // Seeders var seeders = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(5) > div > font").Select(s => Regex.Replace(s.ToString(), "<.*?>", string.Empty)).ToString(), @"\d+").Value); Output("Seeders: " + seeders); // Leechers var leechers = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(6) > div > font").Text(), @"\d+").Value); Output("Leechers: " + leechers); // Completed var completed = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(4)").Text(), @"\d+").Value); Output("Completed: " + completed); // Files var files = 1; if (tRow.Find("td:eq(3) > a").Length == 1) { files = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(3) > a").Text(), @"\d+").Value); } Output("Files: " + files); // Health var percent = ParseUtil.CoerceInt(Regex.Match(tRow.Find("td:eq(7) > img").Attr("src"), @"\d+").Value) * 10; Output("Health: " + percent + "%"); // Size var humanSize = tRow.Find("td:eq(8)").Text().ToLowerInvariant(); var size = ReleaseInfo.GetBytes(humanSize); Output("Size: " + humanSize + " (" + size + " bytes)"); // Date & IMDB & Genre var infosData = tRow.Find("td:eq(1) > div:last").Text(); var infosList = Regex.Split(infosData, "\\|").ToList(); var infosTorrent = infosList.Select(s => s.Split(new[] { ':' }, 2)[1].Trim()).ToList(); // --> Date var date = FormatDate(infosTorrent.First()); Output("Released on: " + date.ToLocalTime()); // --> Genre var genre = infosTorrent.Last(); Output("Genre: " + genre); // Torrent Details URL var detailsLink = new Uri(TorrentDescriptionUrl.Replace("{id}", id.ToString())); Output("Details: " + detailsLink.AbsoluteUri); // Torrent Comments URL var commentsLink = new Uri(TorrentCommentUrl.Replace("{id}", id.ToString())); Output("Comments Link: " + commentsLink.AbsoluteUri); // Torrent Download URL var downloadLink = new Uri(TorrentDownloadUrl.Replace("{id}", id.ToString())); Output("Download Link: " + downloadLink.AbsoluteUri); // Building release infos var release = new ReleaseInfo { Category = MapTrackerCatToNewznab(categoryId.ToString()), Title = name, Seeders = seeders, Peers = seeders + leechers, MinimumRatio = 1, MinimumSeedTime = 172800, PublishDate = date, Size = size, Guid = detailsLink, Comments = commentsLink, Link = downloadLink }; releases.Add(release); } } catch (Exception ex) { OnParseError("Error, unable to parse result \n" + ex.StackTrace, ex); } // Return found releases return(releases); }
protected override async Task <IEnumerable <ReleaseInfo> > PerformQuery(TorznabQuery query) { var releases = new List <ReleaseInfo>(); //List of releases initialization var searchString = query.GetQueryString(); //get search string from query WebClientStringResult results = null; var queryCollection = new NameValueCollection(); List <string> catList = MapTorznabCapsToTrackers(query); // map categories from query to indexer specific var categ = String.Join(",", catList); //create GET request - search URI queryCollection.Add("offset", "0"); queryCollection.Add("keyword", searchString); queryCollection.Add("category", categ.TrimStart(',')); queryCollection.Add("option", ""); queryCollection.Add("ns", "true"); //concatenate base search url with query var searchUrl = SearchUrl + "?" + queryCollection.GetQueryString(); // log search URL logger.Info(string.Format("Searh URL Partis: {0}", searchUrl)); // add necessary headers var heder = new Dictionary <string, string> { { "X-requested-with", "XMLHttpRequest" } }; //get results and follow redirect results = await RequestStringWithCookies(searchUrl, null, SearchUrl, heder); await FollowIfRedirect(results, null, null, null, true); // are we logged in? check based on cookies if (!results.Cookies.Contains(configData.Username.Value)) { await ApplyConfiguration(null); } // another request with specific query - NEEDED for succesful response results = await RequestStringWithCookies(SiteLink + "brskaj/?rs=false&offset=0", null, SearchUrl, heder); await FollowIfRedirect(results, null, null, null, true); // parse results try { string RowsSelector = "div.list > div[name=\"torrrow\"]"; var ResultParser = new HtmlParser(); var SearchResultDocument = ResultParser.ParseDocument(results.Content); var Rows = SearchResultDocument.QuerySelectorAll(RowsSelector); foreach (var Row in Rows) { try { // initialize REleaseInfo var release = new ReleaseInfo { MinimumRatio = 1, MinimumSeedTime = 0 }; // Get Category var catega = Row.QuerySelector("div.likona div").GetAttribute("alt"); release.Category = MapTrackerCatDescToNewznab(catega); var qDetailsLink = Row.QuerySelector("div.listeklink a"); // Title and torrent link release.Title = qDetailsLink.TextContent; release.Comments = new Uri(SiteLink + qDetailsLink.GetAttribute("href").TrimStart('/')); release.Guid = release.Comments; // Date of torrent creation var liopis = Row.QuerySelector("div.listeklink div span.middle"); int ind = liopis.TextContent.IndexOf("Naloženo:"); String reldate = liopis.TextContent.Substring(ind + 10, 22); release.PublishDate = DateTime.ParseExact(reldate, "dd.MM.yyyy ob HH:mm:ss", CultureInfo.InvariantCulture); // Is freeleech? var checkIfFree = (Row.QuerySelector("div.listeklink div.liopisl img[title=\"freeleech\"]") != null) ? true : false; // Download link var qDownloadLink = Row.QuerySelector("div.data3t a").GetAttribute("href"); release.Link = new Uri(SiteLink + qDownloadLink.TrimStart('/')); // Various data - size, seeders, leechers, download count var sel = Row.QuerySelectorAll("div.datat"); var size = sel[0].TextContent; release.Size = ReleaseInfo.GetBytes(size); release.Seeders = ParseUtil.CoerceInt(sel[1].TextContent); release.Peers = ParseUtil.CoerceInt(sel[2].TextContent) + release.Seeders; release.Grabs = ParseUtil.CoerceLong(sel[3].TextContent); // Set download/upload factor release.DownloadVolumeFactor = checkIfFree ? 0 : 1; release.UploadVolumeFactor = 1; // Add current release to List releases.Add(release); } catch (Exception ex) { logger.Error(string.Format("{0}: Error while parsing row '{1}':\n\n{2}", ID, Row.OuterHtml, ex)); } } } catch (Exception ex) { OnParseError(results.Content, ex); } return(releases); }