protected override async Task ReceiveScrape(UdpClient client, ScrapeMessage scrapeMessage, IPEndPoint remotePeer) { if (!IgnoreScrapes) { await base.ReceiveScrape(client, scrapeMessage, remotePeer); } }
protected override async Task DoScrapeAsync(ScrapeParameters parameters) { try { if (ConnectionIdTask == null || LastConnected.Elapsed > TimeSpan.FromMinutes(1)) { ConnectionIdTask = ConnectAsync(); } long connectionId = await ConnectionIdTask; var infohashes = new List <byte[]> { parameters.InfoHash.Hash }; var message = new ScrapeMessage(DateTime.Now.GetHashCode(), connectionId, infohashes); var response = (ScrapeResponseMessage) await SendAndReceiveAsync(message); if (response.Scrapes.Count == 1) { Complete = response.Scrapes[0].Seeds; Downloaded = response.Scrapes[0].Complete; Incomplete = response.Scrapes[0].Leeches; } Status = TrackerState.Ok; } catch (OperationCanceledException e) { Status = TrackerState.Offline; ConnectionIdTask = null; throw new TrackerException("Scrape could not be completed", e); } catch (Exception e) { Status = TrackerState.InvalidResponse; ConnectionIdTask = null; throw new TrackerException("Scrape could not be completed", e); } }
public override async Task ScrapeAsync(ScrapeParameters parameters, TrackerConnectionID state) { try { if (ConnectionIdTask == null || LastConnected.Elapsed > TimeSpan.FromMinutes(1)) { ConnectionIdTask = ConnectAsync(); } await ConnectionIdTask; var infohashes = new List <byte[]> { parameters.InfoHash.Hash }; var message = new ScrapeMessage(DateTime.Now.GetHashCode(), ConnectionIdTask.Result, infohashes); var response = (ScrapeResponseMessage) await SendAndReceiveAsync(message); if (response.Scrapes.Count == 1) { Complete = response.Scrapes[0].Seeds; Downloaded = response.Scrapes[0].Complete; Incomplete = response.Scrapes[0].Leeches; } RaiseScrapeComplete(new ScrapeResponseEventArgs(this, state, true)); } catch (Exception e) { ConnectionIdTask = null; RaiseScrapeComplete(new ScrapeResponseEventArgs(this, state, false)); throw new Exception("Scrape could not be completed", e); } }
protected override void ReceiveScrape(ScrapeMessage scrapeMessage) { if (!IgnoreScrapes) { base.ReceiveScrape(scrapeMessage); } }
public static UdpTrackerMessage DecodeMessage(byte[] buffer, int offset, int count, MessageType type) { UdpTrackerMessage m = null; int action = type == MessageType.Request ? ReadInt(buffer, offset + 8) : ReadInt(buffer, offset); switch (action) { case 0: if (type == MessageType.Request) { m = new ConnectMessage(); } else { m = new ConnectResponseMessage(); } break; case 1: if (type == MessageType.Request) { m = new AnnounceMessage(); } else { m = new AnnounceResponseMessage(); } break; case 2: if (type == MessageType.Request) { m = new ScrapeMessage(); } else { m = new ScrapeResponseMessage(); } break; case 3: m = new ErrorMessage(); break; default: throw new ProtocolException(string.Format("Invalid udp message received: {0}", buffer[offset])); } try { m.Decode(buffer, offset, count); } catch { m = new ErrorMessage(0, "Couldn't decode the tracker response"); } return(m); }
protected virtual async Task ReceiveScrape(UdpClient client, ScrapeMessage scrapeMessage, IPEndPoint remotePeer) { BEncodedDictionary val = Handle(getCollection(scrapeMessage), remotePeer.Address, true); UdpTrackerMessage m; byte[] data; if (val.ContainsKey(TrackerRequest.FailureKey)) { m = new ErrorMessage(scrapeMessage.TransactionId, val[TrackerRequest.FailureKey].ToString()); } else { var scrapes = new List <ScrapeDetails> (); foreach (KeyValuePair <BEncodedString, BEncodedValue> keypair in val) { var dict = (BEncodedDictionary)keypair.Value; int seeds = 0; int leeches = 0; int complete = 0; foreach (KeyValuePair <BEncodedString, BEncodedValue> keypair2 in dict) { switch (keypair2.Key.Text) { case "complete": //The current number of connected seeds seeds = Convert.ToInt32(keypair2.Value.ToString()); break; case "downloaded": //The total number of completed downloads complete = Convert.ToInt32(keypair2.Value.ToString()); break; case "incomplete": leeches = Convert.ToInt32(keypair2.Value.ToString()); break; } } var sd = new ScrapeDetails(seeds, leeches, complete); scrapes.Add(sd); if (scrapes.Count == 74)//protocole do not support to send more than 74 scrape at once... { m = new ScrapeResponseMessage(scrapeMessage.TransactionId, scrapes); data = m.Encode(); await client.SendAsync(data, data.Length, remotePeer); scrapes.Clear(); } } m = new ScrapeResponseMessage(scrapeMessage.TransactionId, scrapes); } data = m.Encode(); await client.SendAsync(data, data.Length, remotePeer); }
protected override async Task ReceiveScrape(UdpClient client, ScrapeMessage scrapeMessage, IPEndPoint remotePeer) { if (IncompleteScrape) { await client.SendAsync(Enumerable.Repeat ((byte)200, 50).ToArray(), 50, remotePeer); return; } if (!IgnoreScrapes) { await base.ReceiveScrape(client, scrapeMessage, remotePeer); } }
NameValueCollection getCollection(ScrapeMessage scrapeMessage) { var res = new NameValueCollection(); if (scrapeMessage.InfoHashes.Count == 0) { return(res);//no infohash???? } //TODO more than one infohash : paid attention to order in response!!! var hash = new InfoHash(scrapeMessage.InfoHashes[0]); res.Add("info_hash", hash.UrlEncode()); return(res); }
public async ReusableTask <ScrapeResponse> ScrapeAsync(ScrapeRequest parameters, CancellationToken token) { try { if (ConnectionIdTask == null || LastConnected.Elapsed > TimeSpan.FromMinutes(1)) { ConnectionIdTask = ConnectAsync(); } long connectionId = await ConnectionIdTask; var infohashes = new List <InfoHash> { parameters.InfoHash }; var message = new ScrapeMessage(DateTime.Now.GetHashCode(), connectionId, infohashes); (var rawResponse, var errorString) = await SendAndReceiveAsync(message); // Did we receive an 'ErrorMessage' from the tracker? If so, propagate the failure if (errorString != null) { ConnectionIdTask = null; return(new ScrapeResponse(TrackerState.InvalidResponse, failureMessage: errorString)); } else if (rawResponse is ScrapeResponseMessage response) { int?complete = null, incomplete = null, downloaded = null; if (response.Scrapes.Count == 1) { complete = response.Scrapes[0].Seeds; downloaded = response.Scrapes[0].Complete; incomplete = response.Scrapes[0].Leeches; } return(new ScrapeResponse(TrackerState.Ok, complete: complete, downloaded: downloaded, incomplete: incomplete)); } else { throw new InvalidOperationException($"There was no error and no {nameof (ScrapeResponseMessage)} was received"); } } catch (OperationCanceledException) { ConnectionIdTask = null; return(new ScrapeResponse(TrackerState.Offline, failureMessage: "Scrape could not be completed")); } catch (Exception) { ConnectionIdTask = null; return(new ScrapeResponse(TrackerState.InvalidResponse, failureMessage: "Scrape could not be completed")); } }
public static UdpTrackerMessage DecodeMessage(byte[] buffer, int offset, int count, MessageType type) { UdpTrackerMessage m = null; var action = type == MessageType.Request ? ReadInt(buffer, offset + 8) : ReadInt(buffer, offset); switch (action) { case 0: if (type == MessageType.Request) m = new ConnectMessage(); else m = new ConnectResponseMessage(); break; case 1: if (type == MessageType.Request) m = new AnnounceMessage(); else m = new AnnounceResponseMessage(); break; case 2: if (type == MessageType.Request) m = new ScrapeMessage(); else m = new ScrapeResponseMessage(); break; case 3: m = new ErrorMessage(); break; default: throw new ProtocolException(string.Format("Invalid udp message received: {0}", buffer[offset])); } try { m.Decode(buffer, offset, count); } catch { m = new ErrorMessage(0, "Couldn't decode the tracker response"); } return m; }
private async Task ScrapeArticleAsync(string documentId, int depth) { try { await TryCreateRequestSenderAsync(); var messageContent = new ScrapeMessage { DocumentId = documentId, Depth = depth, ReplyTo = _completeQueueName }; var messageJson = JsonConvert.SerializeObject(messageContent); var message = new Message(Encoding.UTF8.GetBytes(messageJson)) { TimeToLive = TimeSpan.FromMinutes(10), MessageId = Guid.NewGuid().ToString() }; _logger.LogInformation($"Sending scrape message for document {documentId}"); await _requestSender.RequestAsync(message, rsp => Task.FromResult(true), new CancellationTokenSource(TimeSpan.FromMinutes(10)).Token); await MarkReferencesNotPendingAsync(documentId); _logger.LogInformation($"Finished scrape for document {documentId}"); } catch (OperationCanceledException) { _logger.LogWarning($"Scrape task for document {documentId} took too long."); } catch (Exception e) { _logger.LogError(e, "Scrape message failure."); } finally { _pendingScrapes.TryRemove(documentId, out _); } }
public void ScrapeMessage_TryDecode() { ScrapeMessage message; byte[] data = "0000041727101980 00000002 00003300 551914F642D7E0B5781FB9A9BCF6AC7DE4BDE7ED F5E7C4D5A7BD299CD36CB7547F1C0B8145AE0C9D".Replace(" ", string.Empty).ToByteArray(); if (ScrapeMessage.TryDecode(data, 0, out message)) { Assert.AreEqual(56, message.Length); Assert.AreEqual(2, (int)message.Action); Assert.AreEqual(13056, message.TransactionId); Assert.AreEqual(2, message.InfoHashes.Count()); Assert.AreEqual("551914F642D7E0B5781FB9A9BCF6AC7DE4BDE7ED", message.InfoHashes.ElementAt(0)); Assert.AreEqual("F5E7C4D5A7BD299CD36CB7547F1C0B8145AE0C9D", message.InfoHashes.ElementAt(1)); CollectionAssert.AreEqual(data, message.Encode()); } else { Assert.Fail(); } }
private async Task DoScrapeAsync(ScrapeParameters parameters, object state) { //strange because here only one infohash??? //or get all torrent infohash so loop on torrents of client engine var infohashs = new List <byte[]>(1) { parameters.InfoHash.Hash }; var message = new ScrapeMessage(DateTime.Now.GetHashCode(), _connectionId, infohashs); var responseBytes = await SendAndReceiveAsync(message); var udpTrackerMessage = Receive(message, responseBytes); if (!(udpTrackerMessage is ScrapeResponseMessage)) { DoScrapeComplete(false, state); } else { CompleteScrape(udpTrackerMessage, state); } }
public void ScrapeMessageTest() { List <byte[]> hashes = new List <byte[]>(); Random r = new Random(); byte[] hash1 = new byte[20]; byte[] hash2 = new byte[20]; byte[] hash3 = new byte[20]; r.NextBytes(hash1); r.NextBytes(hash2); r.NextBytes(hash3); hashes.Add(hash1); hashes.Add(hash2); hashes.Add(hash3); ScrapeMessage m = new ScrapeMessage(12345, 123, hashes); ScrapeMessage d = (ScrapeMessage)UdpTrackerMessage.DecodeMessage(m.Encode(), 0, m.ByteLength, MessageType.Request); Check(m, MessageType.Request); Assert.AreEqual(2, m.Action); Assert.AreEqual(m.Action, d.Action); Assert.IsTrue(Toolbox.ByteMatch(m.Encode(), d.Encode())); }
protected virtual void ReceiveScrape(ScrapeMessage scrapeMessage) { BEncodedDictionary val = Handle(getCollection(scrapeMessage), endpoint.Address, true); UdpTrackerMessage m; byte[] data; if (val.ContainsKey(RequestParameters.FailureKey)) { m = new ErrorMessage(scrapeMessage.TransactionId, val[RequestParameters.FailureKey].ToString()); } else { List <ScrapeDetails> scrapes = new List <ScrapeDetails>(); foreach (KeyValuePair <BEncodedString, BEncodedValue> keypair in val) { BEncodedDictionary dict = (BEncodedDictionary)keypair.Value; int seeds = 0; int leeches = 0; int complete = 0; foreach (KeyValuePair <BEncodedString, BEncodedValue> keypair2 in dict) { switch (keypair2.Key.Text) { case "complete": //The current number of connected seeds seeds = Convert.ToInt32(keypair2.Value.ToString()); break; case "downloaded": //The total number of completed downloads complete = Convert.ToInt32(keypair2.Value.ToString()); break; case "incomplete": leeches = Convert.ToInt32(keypair2.Value.ToString()); break; } } ScrapeDetails sd = new ScrapeDetails(seeds, leeches, complete); scrapes.Add(sd); if (scrapes.Count == 74)//protocole do not support to send more than 74 scrape at once... { m = new ScrapeResponseMessage(scrapeMessage.TransactionId, scrapes); data = m.Encode(); #if NETSTANDARD1_5 listener.SendAsync(data, data.Length, endpoint); #else listener.Send(data, data.Length, endpoint); #endif scrapes.Clear(); } } m = new ScrapeResponseMessage(scrapeMessage.TransactionId, scrapes); } data = m.Encode(); #if NETSTANDARD1_5 listener.SendAsync(data, data.Length, endpoint); #else listener.Send(data, data.Length, endpoint); #endif }
protected override void ReceiveScrape(ScrapeMessage scrapeMessage) { if (!IgnoreScrapes) base.ReceiveScrape(scrapeMessage); }
public void ScrapeMessageTest() { var hashes = new List<byte[]>(); var r = new Random(); var hash1 = new byte[20]; var hash2 = new byte[20]; var hash3 = new byte[20]; r.NextBytes(hash1); r.NextBytes(hash2); r.NextBytes(hash3); hashes.Add(hash1); hashes.Add(hash2); hashes.Add(hash3); var m = new ScrapeMessage(12345, 123, hashes); var d = (ScrapeMessage) UdpTrackerMessage.DecodeMessage(m.Encode(), 0, m.ByteLength, MessageType.Request); Check(m, MessageType.Request); Assert.AreEqual(2, m.Action); Assert.AreEqual(m.Action, d.Action); Assert.IsTrue(Toolbox.ByteMatch(m.Encode(), d.Encode())); }