public override void Scrape(ScrapeParameters parameters, object state) { //LastUpdated = DateTime.Now; if (!hasConnected && amConnecting) return; if (!hasConnected) { amConnecting = true; try { Connect(new ConnectScrapeState(parameters, ConnectScrapeCallback, state)); } catch (SocketException) { DoScrapeComplete(false, state); } } else DoScrape(parameters, state); }
public async Task Scrape() { // make sure it's a unique infohash as the listener isn't re-created for every test. infoHash = new InfoHash(Enumerable.Repeat((byte)1, 20).ToArray()); var trackable = new InfoHashTrackable("Test", infoHash); server.Add(trackable); scrapeParams = new ScrapeParameters(infoHash); await tracker.ScrapeAsync(scrapeParams); Assert.AreEqual(0, tracker.Complete, "#1"); Assert.AreEqual(0, tracker.Incomplete, "#2"); Assert.AreEqual(0, tracker.Downloaded, "#3"); await tracker.AnnounceAsync(new AnnounceParameters (0, 0, 100, TorrentEvent.Started, infoHash, false, "peer1", null, 1, false)); await tracker.ScrapeAsync(scrapeParams); Assert.AreEqual(0, tracker.Complete, "#4"); Assert.AreEqual(1, tracker.Incomplete, "#5"); Assert.AreEqual(0, tracker.Downloaded, "#6"); await tracker.AnnounceAsync(new AnnounceParameters (0, 0, 0, TorrentEvent.Started, infoHash, false, "peer2", null, 2, false)); await tracker.ScrapeAsync(scrapeParams); Assert.AreEqual(1, tracker.Complete, "#7"); Assert.AreEqual(1, tracker.Incomplete, "#8"); Assert.AreEqual(0, tracker.Downloaded, "#9"); await tracker.AnnounceAsync(new AnnounceParameters (0, 0, 0, TorrentEvent.Completed, infoHash, false, "peer3", null, 3, false)); await tracker.ScrapeAsync(scrapeParams); Assert.AreEqual(2, tracker.Complete, "#10"); Assert.AreEqual(1, tracker.Incomplete, "#11"); Assert.AreEqual(1, tracker.Downloaded, "#12"); }
public void Setup() { keys.Clear(); listener.IncompleteAnnounce = listener.IncompleteScrape = false; server = new TrackerServer(trackerId); server.AllowUnregisteredTorrents = true; server.RegisterListener(listener); tracker = (HTTPTracker)TrackerFactory.Create(AnnounceUrl); var infoHashBytes = new[] { ' ', '%', '&', '?', '&', '&', '?', '5', '1', '=' } .Select(t => (byte)t); infoHash = new InfoHash(infoHashBytes.Concat(infoHashBytes).ToArray()); announceParams = new AnnounceParameters() .WithPort(5555) .WithPeerId(peerId) .WithInfoHash(infoHash); scrapeParams = new ScrapeParameters(new InfoHash(new byte[20])); }
public Task ScrapeAsync(ScrapeParameters parameters) => DoScrapeAsync(parameters);
public ConnectScrapeState(ScrapeParameters parameters, AsyncCallback callback, object state) : base(callback, state) { Parameters = parameters; }
public override void Scrape(ScrapeParameters parameters, object state) { RaiseScrapeComplete(new ScrapeResponseEventArgs(this, state, true)); }
public override void Scrape(ScrapeParameters parameters, object state) { throw new NotSupportedException(); }
private void DoScrape(ScrapeParameters parameters, object state) { //strange because here only one infohash??? //or get all torrent infohash so loop on torrents of client engine var infohashs = new List<byte[]>(1); infohashs.Add(parameters.InfoHash.Hash); var scrapeState = new ConnectScrapeState(parameters, ScrapeCallback, state); scrapeState.Message = new ScrapeMessage(DateTime.Now.GetHashCode(), connectionId, infohashs); try { SendAndReceive(scrapeState); } catch (SocketException) { DoScrapeComplete(false, state); } }
public override void Scrape(ScrapeParameters parameters, object state) { RaiseBeforeScrape(); ScrapedAt.Add(DateTime.Now); RaiseScrapeComplete(new ScrapeResponseEventArgs(this, state, !FailScrape)); }
public abstract Task ScrapeAsync(ScrapeParameters parameters, TrackerConnectionID state);
void OfflineScrapeTest() { UdpTracker t = (UdpTracker)TrackerFactory.Create(new Uri("udp://127.0.0.1:57532/announce")); t.RetryDelay = TimeSpan.FromMilliseconds(500); TrackerConnectionID id = new TrackerConnectionID(t, false, TorrentEvent.Started, new ManualResetEvent(false)); ScrapeResponseEventArgs p = null; t.ScrapeComplete += delegate(object o, ScrapeResponseEventArgs e) { if (e.Successful) Console.ReadLine(); p = e; id.WaitHandle.Set(); }; ScrapeParameters pars = new ScrapeParameters(new InfoHash(new byte[20])); t.Scrape(pars, id); Wait(id.WaitHandle); Assert.IsNotNull(p, "#1"); Assert.IsFalse(p.Successful); }
public void ScrapeTest() { UdpTracker t = (UdpTracker)TrackerFactory.Create(new Uri(prefix)); Assert.IsTrue(t.CanScrape, "#1"); TrackerConnectionID id = new TrackerConnectionID(t, false, TorrentEvent.Started, new ManualResetEvent(false)); ScrapeResponseEventArgs p = null; t.ScrapeComplete += delegate(object o, ScrapeResponseEventArgs e) { p = e; id.WaitHandle.Set(); }; ScrapeParameters pars = new ScrapeParameters(new InfoHash(new byte[20])); t.Scrape(pars, id); Wait(id.WaitHandle); Assert.IsNotNull(p, "#2"); Assert.IsTrue(p.Successful, "#3"); Assert.AreEqual(0, t.Complete, "#1"); Assert.AreEqual(0, t.Incomplete, "#2"); Assert.AreEqual(0, t.Downloaded, "#3"); }
public abstract Task ScrapeAsync(ScrapeParameters parameters);
public override void Scrape(ScrapeParameters parameters, object state) { try { string url = scrapeUrl.OriginalString; // If you want to scrape the tracker for *all* torrents, don't append the info_hash. if (url.IndexOf('?') == -1) url += "?info_hash=" + parameters.InfoHash.UrlEncode (); else url += "&info_hash=" + parameters.InfoHash.UrlEncode (); HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url); request.UserAgent = MonoTorrent.Common.VersionInfo.ClientVersion; BeginRequest(request, ScrapeReceived, new object[] { request, state }); } catch { RaiseScrapeComplete(new ScrapeResponseEventArgs(this, state, false)); } }
public override void Scrape(ScrapeParameters parameters, object state) { }
protected abstract Task DoScrapeAsync(ScrapeParameters parameters);
public abstract void Scrape(ScrapeParameters parameters, object state);
protected abstract ReusableTask <ScrapeResponse> DoScrapeAsync(ScrapeParameters parameters, CancellationToken token);
public ReusableTask <ScrapeResponse> ScrapeAsync(ScrapeParameters parameters, CancellationToken token) { return(DoScrapeAsync(parameters, token)); }
public Task ScrapeAsync(ScrapeParameters parameters) { return(DoScrapeAsync(parameters)); }