/// <summary> /// Main entry point /// </summary> /// <param name="args">Arguments to be passed to the application</param> public static void Main(string[] args) { try { Downloader.ProgressUpdateEvent += Print; string retry; do { Print("How many parallel downloads do you want to execute?"); int numOfParallelDownloads = int.Parse(Console.ReadLine()); DownloadResult result = Downloader.Download(ConfigurationManager.AppSettings["fileUrl"], ConfigurationManager.AppSettings["downloadLocation"], numOfParallelDownloads); Print($"Download Summary:\n FileSize: {DisplayFormatHelper.FormatSize(result.Size)}\n Number of chunks: {numOfParallelDownloads}" + $"\n Chunk size: {DisplayFormatHelper.FormatSize(result.ChunkSize)}\n Time taken : {DisplayFormatHelper.TimeSpanDisplayFormat(result.TimeTaken)} \n Downloaded File: {result.FilePath}"); Print("Try again? (Y/N)"); retry = Console.ReadLine(); }while (!string.IsNullOrWhiteSpace(retry) && retry.ToLower() == "y"); } catch (FriendlyException ex) { Console.WriteLine(ex.Message); } Console.Read(); }
public static DownloadResult Download(string fileUrl, string destinationFolderPath, int numberOfParallelDownloads) { downloadedChunks = 0; Uri uri = new Uri(fileUrl); //Input validation if (!Directory.Exists(destinationFolderPath)) { throw new FriendlyException($"Invalid value for destinationFolderPath. Directory {destinationFolderPath} does not exist."); } if (numberOfParallelDownloads <= 0) { throw new FriendlyException("Invalid value for numberOfParallelDownloads. Please enter a value greater than zero."); } //Calculate destination path String destinationFilePath = Path.Combine(destinationFolderPath, uri.Segments.Last()); DownloadResult result = new DownloadResult() { FilePath = destinationFilePath }; #region Get file size WebRequest webRequest = HttpWebRequest.Create(fileUrl); webRequest.Method = "HEAD"; long responseLength; using (WebResponse webResponse = webRequest.GetResponse()) { if (!webResponse.Headers.AllKeys.Contains("Content-Length")) { throw new FriendlyException("Unable to download file. Content-Length not present."); } responseLength = long.Parse(webResponse.Headers.Get("Content-Length")); result.Size = responseLength; } #endregion UpdateProgress($"File Size:{responseLength} bytes"); if (File.Exists(destinationFilePath)) { File.Delete(destinationFilePath); } if (responseLength < numberOfParallelDownloads) { throw new FriendlyException($"The file is too small to be divided into chunks to have {numberOfParallelDownloads} parallel downloads. Please select a value less than {numberOfParallelDownloads}."); } UpdateProgress("Dividing in to chunks..."); ConcurrentDictionary <long, string> tempFilesDictionary = new ConcurrentDictionary <long, string>(); #region Calculate ranges List <DataRange> readRanges = new List <DataRange>(); for (int chunk = 0; chunk < numberOfParallelDownloads - 1; chunk++) { DataRange range = new DataRange() { Start = chunk * (responseLength / numberOfParallelDownloads), End = ((chunk + 1) * (responseLength / numberOfParallelDownloads)) - 1 }; readRanges.Add(range); } readRanges.Add(new DataRange() { Start = readRanges.Any() ? readRanges.Last().End + 1 : 0, End = responseLength - 1 }); result.ChunkSize = readRanges[0].End - readRanges[0].Start; #endregion UpdateProgress($"Divided into {numberOfParallelDownloads} chunks of {readRanges[0].End - readRanges[0].Start} bytes each."); DateTime startTime = DateTime.Now; #region Parallel download long total = readRanges.Count(); UpdateProgress("Starting downloads..."); Parallel.ForEach(readRanges, new ParallelOptions() { MaxDegreeOfParallelism = numberOfParallelDownloads }, readRange => { DateTime chunkStart = DateTime.Now; HttpWebRequest httpWebRequest = HttpWebRequest.Create(fileUrl) as HttpWebRequest; httpWebRequest.Method = "GET"; httpWebRequest.AddRange(readRange.Start, readRange.End); using (HttpWebResponse httpWebResponse = httpWebRequest.GetResponse() as HttpWebResponse) { String tempFilePath = Path.GetTempFileName(); using (FileStream fileStream = new FileStream(tempFilePath, FileMode.Create, FileAccess.Write, FileShare.Write)) { httpWebResponse.GetResponseStream().CopyTo(fileStream); tempFilesDictionary.TryAdd(readRange.Start, tempFilePath); } } downloadedChunks++; UpdateProgress(readRange.End - readRange.Start, DateTime.Now - chunkStart, DateTime.Now - startTime, total, numberOfParallelDownloads); }); #endregion result.TimeTaken = DateTime.Now.Subtract(startTime); UpdateProgress($"Total time for downloading : {DisplayFormatHelper.TimeSpanDisplayFormat(result.TimeTaken)}"); UpdateProgress("Merging chunks.."); #region Merge to single file using (FileStream destinationStream = new FileStream(destinationFilePath, FileMode.Append)) { foreach (KeyValuePair <long, string> tempFile in tempFilesDictionary.OrderBy(b => b.Key)) { byte[] tempFileBytes = File.ReadAllBytes(tempFile.Value); destinationStream.Write(tempFileBytes, 0, tempFileBytes.Length); File.Delete(tempFile.Value); } #endregion } result.TimeTaken = DateTime.Now.Subtract(startTime); UpdateProgress("Process complete!"); return(result); }
public static DownloadResult Download(String fileUrl, String destinationFolderPath, int numberOfParallelDownloads = 0, bool validateSSL = false) { if (!validateSSL) { ServicePointManager.ServerCertificateValidationCallback = delegate { return(true); }; } Uri uri = new Uri(fileUrl); //Calculate destination path String destinationFilePath = Path.Combine(destinationFolderPath, uri.Segments.Last()); DownloadResult result = new DownloadResult() { FilePath = destinationFilePath }; //Handle number of parallel downloads if (numberOfParallelDownloads <= 0) { numberOfParallelDownloads = Environment.ProcessorCount; } #region Get file size WebRequest webRequest = HttpWebRequest.Create(fileUrl); webRequest.Method = "HEAD"; long responseLength; using (WebResponse webResponse = webRequest.GetResponse()) { responseLength = long.Parse(webResponse.Headers.Get("Content-Length")); result.Size = responseLength; } #endregion if (File.Exists(destinationFilePath)) { File.Delete(destinationFilePath); } using (FileStream destinationStream = new FileStream(destinationFilePath, FileMode.Append)) { ConcurrentDictionary <long, String> tempFilesDictionary = new ConcurrentDictionary <long, String>(); var readRanges = CalculateRanges(numberOfParallelDownloads, responseLength); DateTime startTime = DateTime.Now; #region Parallel download int index = 0; Parallel.ForEach(readRanges, new ParallelOptions() { MaxDegreeOfParallelism = numberOfParallelDownloads }, readRange => { var tempFilePath = string.Empty; if (WebRequest.Create(fileUrl) is HttpWebRequest httpWebRequest) { tempFilePath = DownloadChunk(httpWebRequest, readRange); } tempFilesDictionary.TryAdd(readRange.Start, tempFilePath); index++; }); result.ParallelDownloads = index; #endregion result.TimeTaken = DateTime.Now.Subtract(startTime); #region Merge to single file foreach (var tempFile in tempFilesDictionary.OrderBy(b => b.Key)) { byte[] tempFileBytes = File.ReadAllBytes(tempFile.Value); destinationStream.Write(tempFileBytes, 0, tempFileBytes.Length); File.Delete(tempFile.Value); } #endregion return(result); } }