/// <summary> /// Gathers statistics about the torrent. This is known as "Scraping" /// </summary> /// <param name="infofile">Metainfo file on the torrent</param> /// <param name="numSeeds">Number of seeds on the torrent</param> /// <param name="numLeechers">Number of peers (leechers) on the torrent</param> /// <param name="numFinished">Number of successful downloads so far</param> /// <param name="name">Name of the torrent</param> public static void Scrape(MetainfoFile infofile, out int numSeeds, out int numLeechers, out int numFinished, out string name) { numSeeds = numLeechers = numFinished = 0; name = ""; // determine the scrape url. string announceUrl = infofile.AnnounceUrl; int lastSlashIndex = announceUrl.LastIndexOf('/'); if (lastSlashIndex < 0) { return; } const string announce = "announce"; // check that "announce" exists after the last slash in the url - if it doesn't, scraping isn't supported. if (announceUrl.Substring(lastSlashIndex + 1, announce.Length).CompareTo(announce) != 0) { return; } string scapeUrl = announceUrl.Substring(0, lastSlashIndex + 1) + "scrape" + announceUrl.Substring(lastSlashIndex + 1 + announce.Length); scapeUrl += "?"; scapeUrl += "info_hash=" + UriEscape(infofile.InfoDigest.Data); Net.WebRequest request = Net.WebRequest.Create(scapeUrl); Net.WebResponse response = request.GetResponse(); IO.Stream stream = response.GetResponseStream(); // because the response stream does not support seeking, we copy the contents to a memorystream // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt // be too large anyway. byte[] data = new byte[1024]; IO.MemoryStream responseStream = new IO.MemoryStream(); int dataRead = 0; while ((dataRead = stream.Read(data, 0, data.Length)) > 0) { responseStream.Write(data, 0, dataRead); } responseStream.Seek(0, IO.SeekOrigin.Begin); /// BEncode.Dictionary mainDic = BEncode.NextDictionary(responseStream); if (mainDic.Contains("files")) { // extract file information - as we supplied the info_hash value, this dictionary should only contain one value BEncode.Dictionary filesDic = mainDic.GetDictionary("files"); foreach (BEncode.String infoHash in filesDic.Keys) { BEncode.Dictionary dic = filesDic.GetDictionary(infoHash); if (dic.Contains("downloaded")) { numFinished = dic.GetInteger("downloaded"); } if (dic.Contains("incomplete")) { numLeechers = dic.GetInteger("incomplete"); } if (dic.Contains("complete")) { numSeeds = dic.GetInteger("complete"); } if (dic.Contains("name")) { name = dic.GetString("name"); } } } else if (mainDic.Contains("failure reason")) { throw new TrackerException("Tracker connection failed: " + mainDic.GetString("failure reason")); } }
/// <summary>Constructs a MetainfoFile</summary> /// <param name="istream">Stream to read data from</param> public MetainfoFile(IO.Stream istream) { BEncode.Dictionary mainDictionary = (BEncode.Dictionary)BEncode.NextElement(istream); this.announceUrl = mainDictionary.GetString(new BEncode.String("announce")); if (mainDictionary.Contains("comment")) { this.comment = mainDictionary.GetString("comment"); } if (mainDictionary.Contains("created by")) { this.createdBy = mainDictionary.GetString("created by"); } if (mainDictionary.Contains("creation date")) { int creation = mainDictionary.GetInteger("creation date"); this.creationDate = new System.DateTime(1970, 1, 1, 0, 0, 0); this.creationDate = this.creationDate.AddSeconds(creation); } BEncode.Dictionary infoDictionary = mainDictionary.GetDictionary("info"); this.name = infoDictionary.GetString("name"); this.pieceLength = infoDictionary.GetInteger("piece length"); this.pieceFileName = this.name.ToLower().Replace(' ', '_'); // Get SHA digests byte[] pieces = infoDictionary.GetBytes("pieces"); int numPieces = pieces.Length / 20; this.shaDigestList.Capacity = numPieces; for (int i = 0; i < numPieces; ++i) { this.shaDigestList.Add(new ByteField20(pieces, i * 20)); } // Get filenames and lengths if (infoDictionary.Contains("length")) { // one file this.fileList.Add(name); int fileLength = infoDictionary.GetInteger("length"); this.fileLengthList.Add(fileLength); this.totalSize = fileLength; } else { // multiple files - a list of dictionaries containing the filename and length BEncode.List files = infoDictionary.GetList("files"); this.fileList.Capacity = this.fileLengthList.Capacity = files.Count; this.totalSize = 0; foreach (BEncode.Dictionary fileDic in files) { BEncode.List pathList = fileDic.GetList("path"); string path = this.name + IO.Path.DirectorySeparatorChar; for (int i = 0; i < pathList.Count - 1; ++i) { path += pathList[i].ToString() + IO.Path.DirectorySeparatorChar; } path += pathList[pathList.Count - 1]; this.fileList.Add(path); int fileLength = fileDic.GetInteger("length"); this.fileLengthList.Add(fileLength); this.totalSize += fileLength; } } // calculate the SHA-1 digest of the info dictionary - this is required for the tracker protocol istream.Seek(infoDictionary.Position, IO.SeekOrigin.Begin); byte[] infoData = new byte[infoDictionary.Length]; istream.Read(infoData, 0, infoData.Length); this.infoDigest = ByteField20.ComputeSHAHash(infoData); }
/// <summary> /// Parses the response from the tracker, and updates the peer list /// </summary> /// <param name="stream">IO stream from response</param> private void ParseTrackerResponse(IO.Stream stream) { this.peerList.Clear(); /* * // because the response stream does not support seeking, we copy the contents to a memorystream * // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt * // be too large anyway. * byte[] data = new byte[ 1024 ]; * IO.MemoryStream responseStream = new IO.MemoryStream(); * int dataRead = 0; * * while ((dataRead = stream.Read(data, 0, data.Length)) > 0) * { * responseStream.Write(data, 0, dataRead); * } * * responseStream.Seek(0, IO.SeekOrigin.Begin); */ /// BEncode.Dictionary dic = BEncode.NextDictionary(stream); // note: sometimes IPs can be duplicated in quick disconnection, so there is a check for any duplications if (dic.Contains("failure reason")) { throw new IO.IOException("Tracker connection failed: " + dic.GetString("failure reason")); } else { this.updateInterval = dic.GetInteger("interval"); BEncode.Element peers = dic["peers"]; if (peers is BEncode.List) { // peer list comes as a list of dictionaries BEncode.List dicList = (BEncode.List)peers; foreach (BEncode.Dictionary dicPeer in dicList) { ByteField20 peerId = new ByteField20(dicPeer.GetBytes("peer id")); string peerIp = dicPeer.GetString("ip"); int port = dicPeer.GetInteger("port"); PeerInformation peerinfo = new PeerInformation(peerIp, port, peerId); if (!this.peerList.Contains(peerinfo)) { this.peerList.Add(peerinfo); } } } else if (peers is BEncode.String) { // else its compressed (this is pretty common) byte[] compactPeers = ((BEncode.String)peers).Data; for (int i = 0; i < compactPeers.Length; i += 6) { int ip1 = 0xFF & compactPeers[i]; int ip2 = 0xFF & compactPeers[i + 1]; int ip3 = 0xFF & compactPeers[i + 2]; int ip4 = 0xFF & compactPeers[i + 3]; int po1 = 0xFF & compactPeers[i + 4]; int po2 = 0xFF & compactPeers[i + 5]; string peerIp = ip1 + "." + ip2 + "." + ip3 + "." + ip4; int port = (po1 * 256) + po2; PeerInformation peerinfo = new PeerInformation(peerIp, port); if (!this.peerList.Contains(peerinfo)) { this.peerList.Add(peerinfo); } } } else { throw new TrackerException("Unexcepted error"); } } }