public UploadManager(MetainfoFile infofile, DownloadFile downloadFile) { this.infofile = infofile; this.downloadFile = downloadFile; this.chokeTimer = new System.Threading.Timer(new System.Threading.TimerCallback(OnChokeTimer), null, Config.ChokeInterval, Config.ChokeInterval); }
/// <summary> /// Constructs a piece /// </summary> /// <param name="infofile">Metainfo file for torrent</param> /// <param name="pieceId">ID for piece</param> public Piece(MetainfoFile infofile, int pieceId) { this.infofile = infofile; this.pieceId = pieceId; this.data = new byte[this.infofile.GetPieceLength(pieceId)]; }
/// <summary> /// Constructs a piece /// </summary> /// <param name="infofile">Metainfo file for torrent</param> /// <param name="pieceId">ID for piece</param> public Piece(MetainfoFile infofile, int pieceId) { this.infofile = infofile; this.pieceId = pieceId; this.data = new byte[ this.infofile.GetPieceLength(pieceId) ]; }
public Peer(MetainfoFile infofile, DownloadFile downloadFile, Sockets.Socket socket, Sockets.NetworkStream netStream, PeerInformation peerInformation) { this.infofile = infofile; this.downloadFile = downloadFile; this.socket = socket; this.netStream = netStream; this.peerInformation = peerInformation; this.piecesDownloaded = new BitField(this.infofile.PieceCount); this.peerProtocol.UpThrottle.Start(); this.peerProtocol.DownThrottle.Start(); }
/// <summary>Constructs a DownloadFile</summary> /// <param name="infofile">Metainfo file for the torrent</param> public DownloadFile(MetainfoFile infofile) { this.infofile = infofile; this.piecesDownloaded = new BitField(this.infofile.PieceCount); this.numBytesLeft = 0; for (int i = 0; i < this.piecesDownloaded.Count; ++i) { this.piecesDownloaded.Set(i, false); this.numBytesLeft += this.infofile.GetPieceLength(i); } GetPieceInfoFromFile(null); }
/// <summary>Constructs a DownloadFile</summary> /// <param name="infofile">Metainfo file for the torrent</param> public DownloadFile(MetainfoFile infofile) { this.infofile = infofile; this.piecesDownloaded = new BitField( this.infofile.PieceCount ); this.numBytesLeft = 0; for ( int i = 0; i < this.piecesDownloaded.Count; ++i ) { this.piecesDownloaded.Set( i, false ); this.numBytesLeft += this.infofile.GetPieceLength( i ); } GetPieceInfoFromFile( null ); }
/// <summary>Contructs a torrent using the metainfo filename</summary> /// <param name="metafilename">Filename of the metainfo file</param> internal Torrent(Session session, string metafilename) { this.mSession = session; this.infofile = new MetainfoFile(metafilename); this.downloadFile = new DownloadFile(infofile); this.peerManager = new PeerManager(); this.mDownloadStrategy = new DownloadStrategyManager(this); this.uploadManager = new UploadManager(infofile, downloadFile); this.tp = new TrackerProtocol(this, infofile, downloadFile); // this.downloadManager.PieceFinished += new PieceFinishedCallback(downloadManager_PieceFinished); this.uploadManager.PieceSectionFinished += new PieceSectionFinishedCallback(uploadManager_PieceSectionFinished); this.tp.TrackerUpdate += new TrackerUpdateCallback(tp_TrackerUpdate); }
/// <summary>Contructs a torrent using the metainfo filename</summary> /// <param name="metafilename">Filename of the metainfo file</param> internal Torrent( Session session, string metafilename ) { this.mSession = session; this.infofile = new MetainfoFile(metafilename); this.downloadFile = new DownloadFile(infofile); this.peerManager = new PeerManager(); this.mDownloadStrategy = new DownloadStrategyManager( this ); this.uploadManager = new UploadManager(infofile, downloadFile); this.tp = new TrackerProtocol(this, infofile, downloadFile); // this.downloadManager.PieceFinished += new PieceFinishedCallback(downloadManager_PieceFinished); this.uploadManager.PieceSectionFinished += new PieceSectionFinishedCallback(uploadManager_PieceSectionFinished); this.tp.TrackerUpdate += new TrackerUpdateCallback(tp_TrackerUpdate); }
/// <summary> /// Gathers statistics about the torrent. This is known as "Scraping" /// </summary> /// <param name="infofile">Metainfo file on the torrent</param> /// <param name="numSeeds">Number of seeds on the torrent</param> /// <param name="numLeechers">Number of peers (leechers) on the torrent</param> /// <param name="numFinished">Number of successful downloads so far</param> /// <param name="name">Name of the torrent</param> public static void Scrape(MetainfoFile infofile, out int numSeeds, out int numLeechers, out int numFinished, out string name) { numSeeds = numLeechers = numFinished = 0; name = ""; // determine the scrape url. string announceUrl = infofile.AnnounceUrl; int lastSlashIndex = announceUrl.LastIndexOf('/'); if (lastSlashIndex < 0) return; const string announce = "announce"; // check that "announce" exists after the last slash in the url - if it doesn't, scraping isn't supported. if (announceUrl.Substring(lastSlashIndex+1, announce.Length).CompareTo(announce) != 0) return; string scapeUrl = announceUrl.Substring(0, lastSlashIndex+1) + "scrape" + announceUrl.Substring(lastSlashIndex + 1 + announce.Length); scapeUrl += "?"; scapeUrl += "info_hash=" + UriEscape(infofile.InfoDigest.Data); Net.WebRequest request = Net.WebRequest.Create(scapeUrl); Net.WebResponse response = request.GetResponse(); IO.Stream stream = response.GetResponseStream(); // because the response stream does not support seeking, we copy the contents to a memorystream // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt // be too large anyway. byte[] data = new byte[ 1024 ]; IO.MemoryStream responseStream = new IO.MemoryStream(); int dataRead = 0; while ((dataRead = stream.Read(data, 0, data.Length)) > 0) { responseStream.Write(data, 0, dataRead); } responseStream.Seek(0, IO.SeekOrigin.Begin); /// BEncode.Dictionary mainDic = BEncode.NextDictionary(responseStream); if (mainDic.Contains("files")) { // extract file information - as we supplied the info_hash value, this dictionary should only contain one value BEncode.Dictionary filesDic = mainDic.GetDictionary("files"); foreach (BEncode.String infoHash in filesDic.Keys) { BEncode.Dictionary dic = filesDic.GetDictionary(infoHash); if (dic.Contains("downloaded")) numFinished = dic.GetInteger("downloaded"); if (dic.Contains("incomplete")) numLeechers = dic.GetInteger("incomplete"); if (dic.Contains("complete")) numSeeds = dic.GetInteger("complete"); if (dic.Contains("name")) name = dic.GetString("name"); } } else if (mainDic.Contains("failure reason")) throw new TrackerException("Tracker connection failed: " + mainDic.GetString("failure reason")); }
/// <summary> /// Gathers statistics about the torrent. This is known as "Scraping" /// </summary> /// <param name="infofile">Metainfo file on the torrent</param> /// <param name="numSeeds">Number of seeds on the torrent</param> /// <param name="numLeechers">Number of peers (leechers) on the torrent</param> /// <param name="numFinished">Number of successful downloads so far</param> public static void Scrape(MetainfoFile infofile, out int numSeeds, out int numLeechers, out int numFinished) { string name; Scrape(infofile, out numSeeds, out numLeechers, out numFinished, out name); }
/// <summary> /// Constructs a TrackerProtocol /// </summary> public TrackerProtocol(Torrent torrent, MetainfoFile infofile, DownloadFile file) { this.torrent = torrent; this.infofile = infofile; this.file = file; }
/// <summary> /// Gathers statistics about the torrent. This is known as "Scraping" /// </summary> /// <param name="infofile">Metainfo file on the torrent</param> /// <param name="numSeeds">Number of seeds on the torrent</param> /// <param name="numLeechers">Number of peers (leechers) on the torrent</param> /// <param name="numFinished">Number of successful downloads so far</param> /// <param name="name">Name of the torrent</param> public static void Scrape(MetainfoFile infofile, out int numSeeds, out int numLeechers, out int numFinished, out string name) { numSeeds = numLeechers = numFinished = 0; name = ""; // determine the scrape url. string announceUrl = infofile.AnnounceUrl; int lastSlashIndex = announceUrl.LastIndexOf('/'); if (lastSlashIndex < 0) { return; } const string announce = "announce"; // check that "announce" exists after the last slash in the url - if it doesn't, scraping isn't supported. if (announceUrl.Substring(lastSlashIndex + 1, announce.Length).CompareTo(announce) != 0) { return; } string scapeUrl = announceUrl.Substring(0, lastSlashIndex + 1) + "scrape" + announceUrl.Substring(lastSlashIndex + 1 + announce.Length); scapeUrl += "?"; scapeUrl += "info_hash=" + UriEscape(infofile.InfoDigest.Data); Net.WebRequest request = Net.WebRequest.Create(scapeUrl); Net.WebResponse response = request.GetResponse(); IO.Stream stream = response.GetResponseStream(); // because the response stream does not support seeking, we copy the contents to a memorystream // to send to the bencoder. This shouldnt cause too much of a performance penalty as it shouldnt // be too large anyway. byte[] data = new byte[1024]; IO.MemoryStream responseStream = new IO.MemoryStream(); int dataRead = 0; while ((dataRead = stream.Read(data, 0, data.Length)) > 0) { responseStream.Write(data, 0, dataRead); } responseStream.Seek(0, IO.SeekOrigin.Begin); /// BEncode.Dictionary mainDic = BEncode.NextDictionary(responseStream); if (mainDic.Contains("files")) { // extract file information - as we supplied the info_hash value, this dictionary should only contain one value BEncode.Dictionary filesDic = mainDic.GetDictionary("files"); foreach (BEncode.String infoHash in filesDic.Keys) { BEncode.Dictionary dic = filesDic.GetDictionary(infoHash); if (dic.Contains("downloaded")) { numFinished = dic.GetInteger("downloaded"); } if (dic.Contains("incomplete")) { numLeechers = dic.GetInteger("incomplete"); } if (dic.Contains("complete")) { numSeeds = dic.GetInteger("complete"); } if (dic.Contains("name")) { name = dic.GetString("name"); } } } else if (mainDic.Contains("failure reason")) { throw new TrackerException("Tracker connection failed: " + mainDic.GetString("failure reason")); } }