public static DownloadData Create(string url, string destFolder) { DownloadData downloadData = new DownloadData(); WebRequest req = GetRequest(url); try { if (req is FtpWebRequest) { // get the file size for FTP files req.Method = WebRequestMethods.Ftp.GetFileSize; downloadData.response = req.GetResponse(); downloadData.GetFileSize(); // new request for downloading the FTP file req = GetRequest(url); downloadData.response = req.GetResponse(); } else { downloadData.response = req.GetResponse(); downloadData.GetFileSize(); } } catch (Exception e) { throw new Exception(string.Format("Error downloading \"{0}\": {1}", url, e.Message), e); } // Check to make sure the response isn't an error. If it is this method // will throw exceptions. ValidateResponse(downloadData.response, url); // Take the name of the file given to use from the web server. string fileName = downloadData.response.Headers["Content-Disposition"]; if (fileName != null) { int fileLoc = fileName.IndexOf("filename=", StringComparison.OrdinalIgnoreCase); if (fileLoc != -1) { // go past "filename=" fileLoc += 9; if (fileName.Length > fileLoc) { // trim off an ending semicolon if it exists int end = fileName.IndexOf(';', fileLoc); if (end == -1) { end = fileName.Length - fileLoc; } else { end -= fileLoc; } fileName = fileName.Substring(fileLoc, end).Trim(); } else { fileName = null; } } else { fileName = null; } } if (string.IsNullOrEmpty(fileName)) { // brute force the filename from the url fileName = Path.GetFileName(downloadData.response.ResponseUri.LocalPath); } // trim out non-standard filename characters if (!string.IsNullOrEmpty(fileName) && fileName.IndexOfAny(invalidFilenameChars.ToArray()) != -1) { //make a new string builder (with at least one bad character) StringBuilder newText = new StringBuilder(fileName.Length - 1); //remove the bad characters for (int i = 0; i < fileName.Length; i++) { if (invalidFilenameChars.IndexOf(fileName[i]) == -1) { newText.Append(fileName[i]); } } fileName = newText.ToString().Trim(); } // if filename *still* is null or empty, then generate some random temp filename if (string.IsNullOrEmpty(fileName)) { fileName = Path.GetFileName(Path.GetTempFileName()); } string downloadTo = Path.Combine(destFolder, fileName); downloadData.Filename = downloadTo; // If we don't know how big the file is supposed to be, // we can't resume, so delete what we already have if something is on disk already. if (!downloadData.IsProgressKnown && File.Exists(downloadTo)) { File.Delete(downloadTo); } if (downloadData.IsProgressKnown && File.Exists(downloadTo)) { // We only support resuming on http requests if (!(downloadData.Response is HttpWebResponse)) { File.Delete(downloadTo); } else { // Try and start where the file on disk left off downloadData.start = new FileInfo(downloadTo).Length; // If we have a file that's bigger than what is online, then something // strange happened. Delete it and start again. if (downloadData.start > downloadData.size) { File.Delete(downloadTo); } else if (downloadData.start < downloadData.size) { // Try and resume by creating a new request with a new start position downloadData.response.Close(); req = GetRequest(url); ((HttpWebRequest)req).AddRange((int)downloadData.start); downloadData.response = req.GetResponse(); if (((HttpWebResponse)downloadData.Response).StatusCode != HttpStatusCode.PartialContent) { // They didn't support our resume request. File.Delete(downloadTo); downloadData.start = 0; } } } } return(downloadData); }
void bw_DoWork(object sender, DoWorkEventArgs e) { // validate input if (urlList == null || urlList.Count == 0) { if (string.IsNullOrEmpty(url)) { //no sites specified, bail out if (!bw.CancellationPending) { bw.ReportProgress(0, new object[] { -1, -1, string.Empty, ProgressStatus.Failure, new Exception("No download URLs are specified.") }); } return; } //single site specified, add it to the list urlList = new List <string> { url }; } // use the custom proxy if provided if (CustomProxy != null) { WebRequest.DefaultWebProxy = CustomProxy; } else { IWebProxy proxy = WebRequest.GetSystemWebProxy(); if (proxy.Credentials == null) { proxy.Credentials = CredentialCache.DefaultNetworkCredentials; } WebRequest.DefaultWebProxy = proxy; } if (DownloadAll) { totalDownloadSize = 0; totalBytesDownloaded = 0; foreach (string s in urlList) { long size = DownloadData.GetFileSize(s); if (size > -1) { totalDownloadSize += size; } } } else { totalDownloadSize = -1; } // try each url in the list until one succeeds bool allFailedWaitingForResponse = true; Exception ex = null; foreach (string s in urlList) { CurrentURLIndex = urlList.IndexOf(s); ex = null; try { url = s; BeginDownload(); ValidateDownload(); } catch (Exception except) { ex = except; if (!waitingForResponse) { allFailedWaitingForResponse = false; } } // If we got through that without an exception, we found a good url if (!DownloadAll && (ex == null || bw.CancellationPending)) { allFailedWaitingForResponse = false; break; } } /* * If all the sites failed before a response was received then either the * internet connection is shot, or the Proxy is shot. Either way it can't * hurt to try downloading without the proxy: */ if (allFailedWaitingForResponse && WebRequest.DefaultWebProxy != null) { // try the sites again without a proxy WebRequest.DefaultWebProxy = null; foreach (string s in urlList) { ex = null; try { url = s; BeginDownload(); ValidateDownload(); } catch (Exception except) { ex = except; } // If we got through that without an exception, we found a good url if (ex == null || bw.CancellationPending) { break; } } } // Process complete (either successfully or failed), report back if (bw.CancellationPending || ex != null) { bw.ReportProgress(0, new object[] { -1, -1, string.Empty, ProgressStatus.Failure, ex }); } else { bw.ReportProgress(0, new object[] { -1, -1, string.Empty, ProgressStatus.Success, null }); } }
public static DownloadData Create(string url, string destFolder) { DownloadData downloadData = new DownloadData(); WebRequest req = GetRequest(url); try { if (req is FtpWebRequest) { // get the file size for FTP files req.Method = WebRequestMethods.Ftp.GetFileSize; downloadData.response = req.GetResponse(); downloadData.GetFileSize(); // new request for downloading the FTP file req = GetRequest(url); downloadData.response = req.GetResponse(); } else { downloadData.response = req.GetResponse(); downloadData.GetFileSize(); } } catch (Exception e) { throw new Exception(string.Format("Error downloading \"{0}\": {1}", url, e.Message), e); } // Check to make sure the response isn't an error. If it is this method // will throw exceptions. ValidateResponse(downloadData.response, url); // Take the name of the file given to use from the web server. string fileName = downloadData.response.Headers["Content-Disposition"]; if (fileName != null) { int fileLoc = fileName.IndexOf("filename=", StringComparison.OrdinalIgnoreCase); if (fileLoc != -1) { // go past "filename=" fileLoc += 9; if (fileName.Length > fileLoc) { // trim off an ending semicolon if it exists int end = fileName.IndexOf(';', fileLoc); if (end == -1) end = fileName.Length - fileLoc; else end -= fileLoc; fileName = fileName.Substring(fileLoc, end).Trim(); } else fileName = null; } else fileName = null; } if (string.IsNullOrEmpty(fileName)) { // brute force the filename from the url fileName = Path.GetFileName(downloadData.response.ResponseUri.LocalPath); } // trim out non-standard filename characters if (!string.IsNullOrEmpty(fileName) && fileName.IndexOfAny(invalidFilenameChars.ToArray()) != -1) { //make a new string builder (with at least one bad character) StringBuilder newText = new StringBuilder(fileName.Length - 1); //remove the bad characters for (int i = 0; i < fileName.Length; i++) { if (invalidFilenameChars.IndexOf(fileName[i]) == -1) newText.Append(fileName[i]); } fileName = newText.ToString().Trim(); } // if filename *still* is null or empty, then generate some random temp filename if (string.IsNullOrEmpty(fileName)) fileName = Path.GetFileName(Path.GetTempFileName()); string downloadTo = Path.Combine(destFolder, fileName); downloadData.Filename = downloadTo; // If we don't know how big the file is supposed to be, // we can't resume, so delete what we already have if something is on disk already. if (!downloadData.IsProgressKnown && File.Exists(downloadTo)) File.Delete(downloadTo); if (downloadData.IsProgressKnown && File.Exists(downloadTo)) { // We only support resuming on http requests if (!(downloadData.Response is HttpWebResponse)) { File.Delete(downloadTo); } else { // Try and start where the file on disk left off downloadData.start = new FileInfo(downloadTo).Length; // If we have a file that's bigger than what is online, then something // strange happened. Delete it and start again. if (downloadData.start > downloadData.size) File.Delete(downloadTo); else if (downloadData.start < downloadData.size) { // Try and resume by creating a new request with a new start position downloadData.response.Close(); req = GetRequest(url); ((HttpWebRequest)req).AddRange((int)downloadData.start); downloadData.response = req.GetResponse(); if (((HttpWebResponse)downloadData.Response).StatusCode != HttpStatusCode.PartialContent) { // They didn't support our resume request. File.Delete(downloadTo); downloadData.start = 0; } } } } return downloadData; }