/// <summary> /// Returns a refreshed version of the local metadata. /// This method accomplishes this by reading from the disk all the files whose name match those in filePaths and calculating their md5 hashes concurrently. /// </summary> public static ConcurrentDictionary <string, FileMetadata> GenerateLocalMetadata(string[] filesPaths, int concurrencyLevel) { ConcurrentDictionary <string, FileMetadata> metadata = new ConcurrentDictionary <string, FileMetadata>(filesPaths.Length, concurrencyLevel); Parallel.ForEach(filesPaths, (currentPath) => { using (FileStream stream = File.OpenRead(currentPath)) { // The bigger the files to hash the bigger the speedup! metadata[currentPath] = new FileMetadata(currentPath, Md5HashFactory.GeneratedMd5HashFromStream(stream)); } }); return(metadata); }
/// <summary> /// This method tries to return a <c>Task</c> responsible to download a specific file (<c>address</c>). /// If it completes the download, it checks if the hash of the downloaded file equals the expected hash of that same file (expectedHash), throwing an <c>InvalidDataException</c> if it doesn't. /// If the expected hash is null, it assumes that the downloaded file is the server metadata and executes the respective sanity check on it, throwing an <c>InvalidDataException</c> if something is wrong with it. /// This method also logs the download progress to the respective progress bar through the BackgroundWorker (bw), whenever it assumes it is necessary. /// </summary> private static async Task <byte[]> Download(BackgroundWorker bw, string address, string expectedHash) { Stopwatch sw = new Stopwatch(); sw.Start(); Utils.Progress(bw, 0, ProgressiveWidgetsEnum.ProgressBar.DownloadProgressBar); using (HttpResponseMessage response = HttpClient.GetAsync(address, HttpCompletionOption.ResponseHeadersRead).Result) { response.EnsureSuccessStatusCode(); long fileSize = (long)response.Content.Headers.ContentLength; using (Stream contentStream = await response.Content.ReadAsStreamAsync()) { using (MemoryStream ms = new MemoryStream()) { long totalRead = 0; long totalReads = 0; byte[] buffer = new byte[DownloaderConfigs.BUFFER_SIZE]; bool moreLeftToRead = true; int lastMark = 0; float speedAverage = 0; do { // A CancellationTokenSource is used to close the contentStream by force if it doesn't finish some ReadAsync() // under a specific amount of time (DownloaderConfigs.TIMEOUT_MS_WAITING_FOR_READ), // throwing an ObjectDisposedException or an AggregateException containing it, if that's the case. using (var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(DownloaderConfigs.TIMEOUT_MS_WAITING_FOR_READ))) { cts.Token.Register(() => contentStream.Close()); int read = await contentStream.ReadAsync(buffer, 0, buffer.Length, cts.Token); if (read == 0) { moreLeftToRead = false; } else { await ms.WriteAsync(buffer, 0, read); totalRead += read; totalReads++; // It only attempts to log the download progress every x reads (DownloaderConfigs.INFORM_PROGRESS_EVERY_X_READS), due to performance reasons. if (totalReads % DownloaderConfigs.INFORM_PROGRESS_EVERY_X_READS == 0) { speedAverage = RecalculateSpeedAverage((float)totalRead / sw.ElapsedMilliseconds, speedAverage); Utils.Log(bw, string.Format(DownloaderResources.DOWNLOAD_DATA, Utils.BytesToString(totalRead, 2), Utils.BytesToString(fileSize, 2), Utils.BytesToString(Convert.ToInt64(speedAverage * 1000), 1)), ProgressiveWidgetsEnum.Label.DownloadSpeedLogger); if ((float)totalRead / fileSize * 100 > lastMark) { lastMark = Convert.ToInt32((float)totalRead / fileSize * 100); Utils.Progress(bw, lastMark, ProgressiveWidgetsEnum.ProgressBar.DownloadProgressBar); } } } } }while (moreLeftToRead); sw.Stop(); byte[] result = ms.ToArray(); if (expectedHash == null) { Utils.PerformPatchDirectorySanityCheck(result); } else if (!Md5HashFactory.GeneratedMd5HashFromByteArray(result).Equals(expectedHash)) { throw new InvalidDataException(); } Utils.Progress(bw, 100, ProgressiveWidgetsEnum.ProgressBar.DownloadProgressBar); return(result); } } } }