public IScrapeResponse CreateResponse(Stream responseStream) { using (responseStream) { InternalTorrentStatisticCollection files = new InternalTorrentStatisticCollection(); HttpScrapeResponse response = new HttpScrapeResponse(); BEncodingSettings.ParserMode = BEncodingParserMode.Loose; BEncodedDictionary responseDictionary = BEncodedDictionary.Decode(responseStream); if (responseDictionary.ContainsKey("failure reason")) throw new TrackerFailureException((BEncodedString)responseDictionary["failure reason"]); foreach (KeyValuePair<BEncodedString, IBEncodedValue> file in (BEncodedDictionary)responseDictionary["files"]) { BEncodedDictionary value = (BEncodedDictionary)file.Value; files.Add(new InfoHash((BEncodedString)file.Key), new TorrentStatistic( (BEncodedInteger)value["complete"], (BEncodedInteger)value["downloaded"], (BEncodedInteger)value["incomplete"], value.ContainsKey("name") ? ((string)(BEncodedString)value["name"]) : String.Empty )); } response.Files = new TorrentStatisticCollection(files); return response; } }
public IScrapeResponse CreateResponse(ref UdpScrapeRequestPacket requestPacket, ref UdpScrapeResponsePacket responsePacket) { UdpScrapeResponse response = new UdpScrapeResponse(); InternalTorrentStatisticCollection files = new InternalTorrentStatisticCollection(); for (int i = 0; i < responsePacket.files.Length; i++) files.Add(requestPacket.info_hash[i], responsePacket.files[i]); response.Files = new TorrentStatisticCollection(files); return response; }
internal CoalescedScrapeResponse(InternalTorrentStatisticCollection torrentStatistics) : base(torrentStatistics) { }
public static CoalescedScrapeResponse CoalescedScrape(this Tracker tracker, params InfoHash[] scrapeList) { object syncRoot = new object(); TrackerBehavior extendedBehavior = tracker.GetBehavior(); InternalTorrentStatisticCollection torrentStatistics = new InternalTorrentStatisticCollection(); if (scrapeList.Length > 0) { if (extendedBehavior.SupportsScrape) { if ( extendedBehavior.SupportsFullScrape && !(extendedBehavior.SupportsMultiScrape && scrapeList.Length < extendedBehavior.MultiScrapeThreshold) && !(extendedBehavior.SupportsMultiScrape == false && scrapeList.Length < extendedBehavior.ScrapeThreshold) ) { TorrentStatisticCollection files = null; #if DEBUG lock (debugSyncRoot) { System.Diagnostics.Debug.WriteLine(String.Format("Full scrape[{0}]", scrapeList.Length)); System.Diagnostics.Debug.Write(tracker.AnnounceUrl); System.Diagnostics.Debug.Write(" "); System.Diagnostics.Debug.WriteLine("{"); System.Diagnostics.Debug.Indent(); foreach (InfoHash infoHash in scrapeList) System.Diagnostics.Debug.WriteLine(infoHash.Hex); System.Diagnostics.Debug.Unindent(); System.Diagnostics.Debug.WriteLine("}"); } #endif IAsyncResult result = tracker.BeginScrape(tracker.CreateScrapeRequest(scrapeList), null, null); IScrapeResponse response = null; try { response = tracker.EndScrape(result); } catch (Exception e) { throw e; } if (response != null) { lock (syncRoot) { foreach (InfoHash infoHash in scrapeList) if (!torrentStatistics.ContainsKey(infoHash) && files.ContainsKey(infoHash)) torrentStatistics.Add(infoHash, files[infoHash]); } } else { throw new TimeoutException(String.Format("Full scrape[{0}] failed", scrapeList.Length)); } } else if (extendedBehavior.SupportsMultiScrape && scrapeList.Length > 1) { int range = (extendedBehavior.MultiScrapeRange > scrapeList.Length ? scrapeList.Length : (extendedBehavior.MultiScrapeRange > 0 ? extendedBehavior.MultiScrapeRange : scrapeList.Length)); int coalescedCount = (int)Math.Ceiling((decimal)range / (decimal)scrapeList.Length); Semaphore requestSemaphore = new Semaphore(TrackerSettings.MaxConcurrency, TrackerSettings.MaxConcurrency); IAsyncResult[] results = new IAsyncResult[coalescedCount]; List<Exception> exceptions = new List<Exception>(); for (int i = 0, j = Math.Min(scrapeList.Length - i, range), k = 0; i < scrapeList.Length; j = Math.Min(scrapeList.Length - i, range), i += j, k++) { requestSemaphore.WaitOne(); InfoHash[] coalescingBuffer = new InfoHash[j]; Array.Copy(scrapeList, i, coalescingBuffer, 0, j); #if DEBUG lock (debugSyncRoot) { System.Diagnostics.Debug.WriteLine(String.Format("Multi scrape[{0}]({1} - {2}) {3} out of {4}", scrapeList.Length, i, i + j, k + 1, coalescedCount)); System.Diagnostics.Debug.Write(tracker.AnnounceUrl); System.Diagnostics.Debug.Write(" "); System.Diagnostics.Debug.WriteLine("{"); System.Diagnostics.Debug.Indent(); foreach (InfoHash infoHash in coalescingBuffer) System.Diagnostics.Debug.WriteLine(infoHash.Hex); System.Diagnostics.Debug.Unindent(); System.Diagnostics.Debug.WriteLine("}"); } #endif object state = new int[4] { scrapeList.Length, i, i + j, k }; results[k] = tracker.BeginScrape(tracker.CreateScrapeRequest(coalescingBuffer), new AsyncCallback((result) => requestSemaphore.Release()), state); } foreach (IAsyncResult result in results) { int[] state = (int[])result.AsyncState; IScrapeResponse response = null; try { response = tracker.EndScrape(result); } catch (Exception e) { exceptions.Add(e); continue; } if (response != null) { lock (syncRoot) { foreach (KeyValuePair<InfoHash, TorrentStatistic> file in response.Files) if (!torrentStatistics.ContainsKey(file.Key)) torrentStatistics.Add(file.Key, file.Value); } } else { exceptions.Add(new TimeoutException(String.Format("Multi scrape[{0}]({1} - {2}) {3} out of {4} failed", state[0], state[1], state[2], state[3] + 1, coalescedCount))); } } if (exceptions.Count == coalescedCount) ThrowMultipleException(exceptions); } else { Semaphore requestSemaphore = new Semaphore(TrackerSettings.MaxConcurrency, TrackerSettings.MaxConcurrency); IAsyncResult[] results = new IAsyncResult[scrapeList.Length]; List<Exception> exceptions = new List<Exception>(); for(int i = 0; i < scrapeList.Length; i++) { requestSemaphore.WaitOne(); #if DEBUG lock (debugSyncRoot) { System.Diagnostics.Debug.WriteLine("Single scrape[1]"); System.Diagnostics.Debug.Write(tracker.AnnounceUrl); System.Diagnostics.Debug.Write(" "); System.Diagnostics.Debug.WriteLine("{"); System.Diagnostics.Debug.Indent(); System.Diagnostics.Debug.WriteLine(scrapeList[i].Hex); System.Diagnostics.Debug.Unindent(); System.Diagnostics.Debug.WriteLine("}"); } #endif results[i] = tracker.BeginScrape(tracker.CreateScrapeRequest(scrapeList[i]), new AsyncCallback((result) => requestSemaphore.Release()), scrapeList[i]); } foreach (IAsyncResult result in results) { InfoHash infoHash = (InfoHash)result.AsyncState; IScrapeResponse response = null; try { response = tracker.EndScrape(result); } catch(Exception e) { exceptions.Add(e); continue; } if (response != null) { if (!torrentStatistics.ContainsKey(infoHash) && response.Files.ContainsKey(infoHash)) torrentStatistics.Add(infoHash, response.Files[infoHash]); } else { exceptions.Add(new TimeoutException("Single scrape[1] failed")); } } if (exceptions.Count == scrapeList.Length) ThrowMultipleException(exceptions); } } else { throw new NotSupportedException("Tracker does not support scraping"); } } return new CoalescedScrapeResponse(torrentStatistics); }