/// <summary> /// Gathers statistics about the torrent. This is known as "Scraping" /// </summary> /// <param name="infofile">Metainfo file on the torrent</param> /// <param name="numSeeds">Number of seeds on the torrent</param> /// <param name="numLeechers">Number of peers (leechers) on the torrent</param> /// <param name="numFinished">Number of successful downloads so far</param> /// <param name="name">Name of the torrent</param> public static void Scrape(MetainfoFile infofile, out int numSeeds, out int numLeechers, out int numFinished, out string name) { numSeeds = numLeechers = numFinished = 0; name = ""; // determine the scrape url. string announceUrl = infofile.AnnounceUrl; int lastSlashIndex = announceUrl.LastIndexOf('/'); if (lastSlashIndex < 0) { return; } const string announce = "announce"; // check that "announce" exists after the last slash in the url - if it doesn't, scraping isn't supported. if (announceUrl.Substring(lastSlashIndex + 1, announce.Length).CompareTo(announce) != 0) { return; } string scapeUrl = announceUrl.Substring(0, lastSlashIndex + 1) + "scrape" + announceUrl.Substring(lastSlashIndex + 1 + announce.Length); scapeUrl += "?"; scapeUrl += "info_hash=" + UriEscape(infofile.InfoDigest.Data); Net.WebRequest request = Net.WebRequest.Create(scapeUrl); Net.WebResponse response = request.GetResponse(); IO.Stream stream = response.GetResponseStream(); // because the response stream does not support seeking, we copy the contents to a memorystream // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt // be too large anyway. byte[] data = new byte[1024]; IO.MemoryStream responseStream = new IO.MemoryStream(); int dataRead = 0; while ((dataRead = stream.Read(data, 0, data.Length)) > 0) { responseStream.Write(data, 0, dataRead); } responseStream.Seek(0, IO.SeekOrigin.Begin); /// BEncode.Dictionary mainDic = BEncode.NextDictionary(responseStream); if (mainDic.Contains("files")) { // extract file information - as we supplied the info_hash value, this dictionary should only contain one value BEncode.Dictionary filesDic = mainDic.GetDictionary("files"); foreach (BEncode.String infoHash in filesDic.Keys) { BEncode.Dictionary dic = filesDic.GetDictionary(infoHash); if (dic.Contains("downloaded")) { numFinished = dic.GetInteger("downloaded"); } if (dic.Contains("incomplete")) { numLeechers = dic.GetInteger("incomplete"); } if (dic.Contains("complete")) { numSeeds = dic.GetInteger("complete"); } if (dic.Contains("name")) { name = dic.GetString("name"); } } } else if (mainDic.Contains("failure reason")) { throw new TrackerException("Tracker connection failed: " + mainDic.GetString("failure reason")); } }
/// <summary> /// Parses the response from the tracker, and updates the peer list /// </summary> /// <param name="stream">IO stream from response</param> private void ParseTrackerResponse(IO.Stream stream) { this.peerList.Clear(); /* * // because the response stream does not support seeking, we copy the contents to a memorystream * // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt * // be too large anyway. * byte[] data = new byte[ 1024 ]; * IO.MemoryStream responseStream = new IO.MemoryStream(); * int dataRead = 0; * * while ((dataRead = stream.Read(data, 0, data.Length)) > 0) * { * responseStream.Write(data, 0, dataRead); * } * * responseStream.Seek(0, IO.SeekOrigin.Begin); */ /// BEncode.Dictionary dic = BEncode.NextDictionary(stream); // note: sometimes IPs can be duplicated in quick disconnection, so there is a check for any duplications if (dic.Contains("failure reason")) { throw new IO.IOException("Tracker connection failed: " + dic.GetString("failure reason")); } else { this.updateInterval = dic.GetInteger("interval"); BEncode.Element peers = dic["peers"]; if (peers is BEncode.List) { // peer list comes as a list of dictionaries BEncode.List dicList = (BEncode.List)peers; foreach (BEncode.Dictionary dicPeer in dicList) { ByteField20 peerId = new ByteField20(dicPeer.GetBytes("peer id")); string peerIp = dicPeer.GetString("ip"); int port = dicPeer.GetInteger("port"); PeerInformation peerinfo = new PeerInformation(peerIp, port, peerId); if (!this.peerList.Contains(peerinfo)) { this.peerList.Add(peerinfo); } } } else if (peers is BEncode.String) { // else its compressed (this is pretty common) byte[] compactPeers = ((BEncode.String)peers).Data; for (int i = 0; i < compactPeers.Length; i += 6) { int ip1 = 0xFF & compactPeers[i]; int ip2 = 0xFF & compactPeers[i + 1]; int ip3 = 0xFF & compactPeers[i + 2]; int ip4 = 0xFF & compactPeers[i + 3]; int po1 = 0xFF & compactPeers[i + 4]; int po2 = 0xFF & compactPeers[i + 5]; string peerIp = ip1 + "." + ip2 + "." + ip3 + "." + ip4; int port = (po1 * 256) + po2; PeerInformation peerinfo = new PeerInformation(peerIp, port); if (!this.peerList.Contains(peerinfo)) { this.peerList.Add(peerinfo); } } } else { throw new TrackerException("Unexcepted error"); } } }