// Wrapper for Run which takes care of CloudFlare challenges, calls RunCurl override protected async Task <WebClientByteResult> Run(WebRequest request) { WebClientByteResult result = await RunCurl(request); // check if we've received a CloudFlare challenge string[] server; if (result.Status == HttpStatusCode.ServiceUnavailable && result.Headers.TryGetValue("server", out server) && server[0] == "cloudflare-nginx") { logger.Info("UnixLibCurlWebClient: Received a new CloudFlare challenge"); // solve the challenge string pageContent = Encoding.UTF8.GetString(result.Content); Uri uri = new Uri(request.Url); string clearanceUri = CloudFlareChallengeSolverSolve(pageContent, uri); logger.Info(string.Format("UnixLibCurlWebClient: CloudFlare clearanceUri: {0}", clearanceUri)); // wait... await Task.Delay(5000); // request clearanceUri to get cf_clearance cookie var response = await CurlHelper.GetAsync(clearanceUri, request.Cookies, request.Referer); logger.Info(string.Format("UnixLibCurlWebClient: received CloudFlare clearance cookie: {0}", response.Cookies)); // add new cf_clearance cookies to the original request request.Cookies = response.Cookies + request.Cookies; // re-run the original request with updated cf_clearance cookie result = await RunCurl(request); // add cf_clearance cookie to the final result so we update the config for the next request result.Cookies = response.Cookies + " " + result.Cookies; } return(result); }
// Wrapper for Run which takes care of CloudFlare challenges private async Task <WebClientByteResult> RunCloudFlare(WebRequest request) { WebClientByteResult result = await Run(request); // check if we've received a CloudFlare challenge if (result.Status == HttpStatusCode.ServiceUnavailable && ((request.Cookies != null && request.Cookies.Contains("__cfduid")) || result.Cookies.Contains("__cfduid"))) { logger.Info("UnixLibCurlWebClient: Received a new CloudFlare challenge"); // solve the challenge string pageContent = Encoding.UTF8.GetString(result.Content); Uri uri = new Uri(request.Url); string clearanceUri = CloudFlareChallengeSolverSolve(pageContent, uri); logger.Info(string.Format("UnixLibCurlWebClient: CloudFlare clearanceUri: {0}", clearanceUri)); // wait... await Task.Delay(5000); // request clearanceUri to get cf_clearance cookie var response = await CurlHelper.GetAsync(clearanceUri, request.Cookies, request.Referer); logger.Info(string.Format("UnixLibCurlWebClient: received CloudFlare clearance cookie: {0}", response.Cookies)); // add new cf_clearance cookies to the original request request.Cookies = response.Cookies + request.Cookies; // re-run the original request with updated cf_clearance cookie result = await Run(request); // add cf_clearance cookie to the final result so we update the config for the next request result.Cookies = response.Cookies + " " + result.Cookies; } return(result); }
public async Task <byte[]> Download(Uri link) { if (Program.IsWindows) { return(await client.GetByteArrayAsync(link)); } else { var response = await CurlHelper.GetAsync(link.ToString(), cookieHeader); return(response.Content); } }
private async Task <WebClientByteResult> Run(WebRequest request) { Jackett.CurlHelper.CurlResponse response; if (request.Type == RequestType.GET) { response = await CurlHelper.GetAsync(request.Url, request.Cookies, request.Referer); } else { if (!string.IsNullOrEmpty(request.RawBody)) { logger.Debug("UnixLibCurlWebClient: Posting " + request.RawBody); } else if (request.PostData != null && request.PostData.Count() > 0) { logger.Debug("UnixLibCurlWebClient: Posting " + StringUtil.PostDataFromDict(request.PostData)); } response = await CurlHelper.PostAsync(request.Url, request.PostData, request.Cookies, request.Referer, request.RawBody); } var result = new WebClientByteResult() { Content = response.Content, Cookies = response.Cookies, Status = response.Status }; if (response.HeaderList != null) { foreach (var header in response.HeaderList) { switch (header[0].ToLowerInvariant()) { case "location": result.RedirectingTo = header[1]; break; } } } ServerUtil.ResureRedirectIsFullyQualified(request, result); return(result); }
protected async Task <WebClientByteResult> RunCurl(WebRequest request) { Jackett.CurlHelper.CurlResponse response; if (request.Type == RequestType.GET) { response = await CurlHelper.GetAsync(request.Url, request.Cookies, request.Referer, request.Headers); } else { if (!string.IsNullOrEmpty(request.RawBody)) { logger.Debug("UnixLibCurlWebClient: Posting " + request.RawBody); } else if (request.PostData != null && request.PostData.Count() > 0) { logger.Debug("UnixLibCurlWebClient: Posting " + StringUtil.PostDataFromDict(request.PostData)); } response = await CurlHelper.PostAsync(request.Url, request.PostData, request.Cookies, request.Referer, request.Headers, request.RawBody); } var result = new WebClientByteResult() { Content = response.Content, Cookies = response.Cookies, Status = response.Status }; if (response.HeaderList != null) { foreach (var header in response.HeaderList) { var key = header[0].ToLowerInvariant(); result.Headers[key] = new string[] { header[1] }; // doesn't support multiple identical headers? switch (key) { case "location": result.RedirectingTo = header[1]; break; case "refresh": if (response.Status == System.Net.HttpStatusCode.ServiceUnavailable) { //"Refresh: 8;URL=/cdn-cgi/l/chk_jschl?pass=1451000679.092-1vJFUJLb9R" var redirval = ""; var value = header[1]; var start = value.IndexOf("="); var end = value.IndexOf(";"); var len = value.Length; if (start > -1) { redirval = value.Substring(start + 1); result.RedirectingTo = redirval; // normally we don't want a serviceunavailable (503) to be a redirect, but that's the nature // of this cloudflare approach..don't want to alter BaseWebResult.IsRedirect because normally // it shoudln't include service unavailable..only if we have this redirect header. result.Status = System.Net.HttpStatusCode.Redirect; var redirtime = Int32.Parse(value.Substring(0, end)); System.Threading.Thread.Sleep(redirtime * 1000); } } break; } } } ServerUtil.ResureRedirectIsFullyQualified(request, result); return(result); }
public async Task <ReleaseInfo[]> PerformQuery(TorznabQuery query) { List <ReleaseInfo> releases = new List <ReleaseInfo>(); foreach (var title in query.ShowTitles ?? new string[] { string.Empty }) { var searchString = title + " " + query.GetEpisodeSearchString(); var searchSection = string.IsNullOrEmpty(query.Episode) ? "archive" : "browse"; var searchCategory = string.IsNullOrEmpty(query.Episode) ? "26" : "27"; var searchUrl = string.Format(SearchUrl, searchSection, searchCategory, searchString); string results; if (Program.IsWindows) { results = await client.GetStringAsync(searchUrl); } else { var response = await CurlHelper.GetAsync(searchUrl, cookieHeader); results = Encoding.UTF8.GetString(response.Content); } try { CQ dom = results; var rows = dom["#torrents-table > tbody > tr.tt_row"]; foreach (var row in rows) { CQ qRow = row.Cq(); var release = new ReleaseInfo(); release.MinimumRatio = 1; release.MinimumSeedTime = 129600; release.Title = qRow.Find(".ttr_name > a").Text(); release.Description = release.Title; release.Guid = new Uri(BaseUrl + "/" + qRow.Find(".ttr_name > a").Attr("href")); release.Comments = release.Guid; release.Link = new Uri(BaseUrl + "/" + qRow.Find(".td_dl > a").Attr("href")); var sizeStr = qRow.Find(".ttr_size").Contents()[0].NodeValue; var sizeParts = sizeStr.Split(' '); release.Size = ReleaseInfo.GetBytes(sizeParts[1], float.Parse(sizeParts[0], NumberStyles.Float | NumberStyles.AllowThousands)); var timeStr = qRow.Find(".ttr_added").Text(); DateTime time; if (DateTime.TryParseExact(timeStr, "yyyy-MM-ddHH:mm:ss", CultureInfo.InvariantCulture, DateTimeStyles.None, out time)) { release.PublishDate = time; } release.Seeders = int.Parse(qRow.Find(".ttr_seeders").Text(), NumberStyles.AllowThousands); release.Peers = int.Parse(qRow.Find(".ttr_leechers").Text(), NumberStyles.AllowThousands) + release.Seeders; releases.Add(release); } } catch (Exception ex) { OnResultParsingError(this, results, ex); throw ex; } } return(releases.ToArray()); }
public async Task <ReleaseInfo[]> PerformQuery(TorznabQuery query) { List <ReleaseInfo> releases = new List <ReleaseInfo>(); foreach (var title in query.ShowTitles ?? new string[] { string.Empty }) { var searchString = title + " " + query.GetEpisodeSearchString(); var episodeSearchUrl = SearchUrl + HttpUtility.UrlEncode(searchString); string results; if (Program.IsWindows) { var request = CreateHttpRequest(new Uri(episodeSearchUrl)); request.Method = HttpMethod.Get; var response = await client.SendAsync(request); results = await response.Content.ReadAsStringAsync(); } else { var response = await CurlHelper.GetAsync(episodeSearchUrl, cookieHeader); results = Encoding.UTF8.GetString(response.Content); } try { var json = JObject.Parse(results); foreach (JObject r in json["response"]["results"]) { DateTime pubDate = DateTime.MinValue; double dateNum; if (double.TryParse((string)r["groupTime"], out dateNum)) { pubDate = UnixTimestampToDateTime(dateNum); } var groupName = (string)r["groupName"]; if (r["torrents"] is JArray) { foreach (JObject t in r["torrents"]) { var release = new ReleaseInfo(); release.PublishDate = pubDate; release.Title = groupName; release.Description = groupName; FillReleaseInfoFromJson(release, t); releases.Add(release); } } else { var release = new ReleaseInfo(); release.PublishDate = pubDate; release.Title = groupName; release.Description = groupName; FillReleaseInfoFromJson(release, r); releases.Add(release); } } } catch (Exception ex) { OnResultParsingError(this, results, ex); throw ex; } } return(releases.ToArray()); }
async Task <ReleaseInfo[]> PerformQuery(TorznabQuery query, string baseUrl) { List <ReleaseInfo> releases = new List <ReleaseInfo>(); List <string> searchUrls = new List <string>(); foreach (var title in query.ShowTitles ?? new string[] { string.Empty }) { var searchString = title + " " + query.GetEpisodeSearchString(); var queryStr = HttpUtility.UrlEncode(searchString); var episodeSearchUrl = baseUrl + string.Format(SearchUrl, queryStr); var episodeSearchUrl2 = baseUrl + string.Format(SearchUrl2, queryStr); searchUrls.Add(episodeSearchUrl); searchUrls.Add(episodeSearchUrl2); } foreach (var episodeSearchUrl in searchUrls) { var message = new HttpRequestMessage { Method = HttpMethod.Get, RequestUri = new Uri(baseUrl + SwitchSingleViewUrl) }; message.Headers.Referrer = new Uri(episodeSearchUrl); string results; if (Program.IsWindows) { var response = await client.SendAsync(message); results = await response.Content.ReadAsStringAsync(); } else { var response = await CurlHelper.GetAsync(baseUrl + SwitchSingleViewUrl, null, episodeSearchUrl); results = Encoding.UTF8.GetString(response.Content); } try { CQ dom = results; var rows = dom["#searchResult > tbody > tr"]; foreach (var row in rows) { var release = new ReleaseInfo(); CQ qLink = row.ChildElements.ElementAt(1).Cq().Children("a").First(); release.MinimumRatio = 1; release.MinimumSeedTime = 172800; release.Title = qLink.Text().Trim(); release.Description = release.Title; release.Comments = new Uri(baseUrl + "/" + qLink.Attr("href").TrimStart('/')); release.Guid = release.Comments; var timeString = row.ChildElements.ElementAt(2).Cq().Text(); if (timeString.Contains("mins ago")) { release.PublishDate = (DateTime.Now - TimeSpan.FromMinutes(int.Parse(timeString.Split(' ')[0]))); } else if (timeString.Contains("Today")) { release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(2) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime(); } else if (timeString.Contains("Y-day")) { release.PublishDate = (DateTime.UtcNow - TimeSpan.FromHours(26) - TimeSpan.Parse(timeString.Split(' ')[1])).ToLocalTime(); } else if (timeString.Contains(':')) { var utc = DateTime.ParseExact(timeString, "MM-dd HH:mm", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2); release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime(); } else { var utc = DateTime.ParseExact(timeString, "MM-dd yyyy", CultureInfo.InvariantCulture) - TimeSpan.FromHours(2); release.PublishDate = DateTime.SpecifyKind(utc, DateTimeKind.Utc).ToLocalTime(); } var downloadCol = row.ChildElements.ElementAt(3).Cq().Find("a"); release.MagnetUri = new Uri(downloadCol.Attr("href")); release.InfoHash = release.MagnetUri.ToString().Split(':')[3].Split('&')[0]; var sizeString = row.ChildElements.ElementAt(4).Cq().Text().Split(' '); var sizeVal = float.Parse(sizeString[0], CultureInfo.InvariantCulture); var sizeUnit = sizeString[1]; release.Size = ReleaseInfo.GetBytes(sizeUnit, sizeVal); release.Seeders = int.Parse(row.ChildElements.ElementAt(5).Cq().Text()); release.Peers = int.Parse(row.ChildElements.ElementAt(6).Cq().Text()) + release.Seeders; releases.Add(release); } } catch (Exception ex) { OnResultParsingError(this, results, ex); throw ex; } } return(releases.ToArray()); }