public FileData(DepotManifest.FileData sourceData) : this() { FileName = sourceData.FileName; sourceData.Chunks.ForEach(c => Chunks.Add(new ChunkData(c))); Flags = sourceData.Flags; TotalSize = sourceData.TotalSize; FileHash = sourceData.FileHash; }
private static async Task <EResult> DownloadFile(DepotProcessor.ManifestJob job, DepotManifest.FileData file, byte[] hash) { var directory = Path.Combine(Application.Path, "files", DownloadFolders[job.DepotID], Path.GetDirectoryName(file.FileName)); var finalPath = new FileInfo(Path.Combine(directory, Path.GetFileName(file.FileName))); var downloadPath = new FileInfo(Path.Combine(Path.GetTempPath(), Path.ChangeExtension(Path.GetRandomFileName(), ".steamdb_tmp"))); if (!Directory.Exists(directory)) { Directory.CreateDirectory(directory); } else if (file.TotalSize == 0) { if (!finalPath.Exists) { using (var _ = finalPath.Create()) { // FileInfo.Create returns a stream but we don't need it } Log.WriteInfo("FileDownloader", "{0} created an empty file", file.FileName); return(EResult.SameAsPreviousValue); } else if (finalPath.Length == 0) { #if DEBUG Log.WriteDebug("FileDownloader", "{0} is already empty", file.FileName); #endif return(EResult.SameAsPreviousValue); } } else if (hash != null && file.FileHash.SequenceEqual(hash)) { #if DEBUG Log.WriteDebug("FileDownloader", "{0} already matches the file we have", file.FileName); #endif return(EResult.SameAsPreviousValue); } byte[] checksum; using (var sha = SHA1.Create()) { checksum = sha.ComputeHash(Encoding.UTF8.GetBytes(file.FileName)); } var neededChunks = new List <DepotManifest.ChunkData>(); var chunks = file.Chunks.OrderBy(x => x.Offset).ToList(); var oldChunksFile = Path.Combine(Application.Path, "files", ".support", "chunks", string.Format("{0}-{1}.json", job.DepotID, BitConverter.ToString(checksum))); using (var fs = downloadPath.Open(FileMode.OpenOrCreate, FileAccess.ReadWrite)) { fs.SetLength((long)file.TotalSize); if (finalPath.Exists && File.Exists(oldChunksFile)) { var oldChunks = JsonConvert.DeserializeObject <List <DepotManifest.ChunkData> >(File.ReadAllText(oldChunksFile), JsonHandleAllReferences); using (var fsOld = finalPath.Open(FileMode.Open, FileAccess.Read)) { foreach (var chunk in chunks) { var oldChunk = oldChunks.Find(c => c.ChunkID.SequenceEqual(chunk.ChunkID)); if (oldChunk != null) { var oldData = new byte[oldChunk.UncompressedLength]; fsOld.Seek((long)oldChunk.Offset, SeekOrigin.Begin); fsOld.Read(oldData, 0, oldData.Length); var existingChecksum = Utils.AdlerHash(oldData); if (existingChecksum.SequenceEqual(chunk.Checksum)) { fs.Seek((long)chunk.Offset, SeekOrigin.Begin); fs.Write(oldData, 0, oldData.Length); #if DEBUG Log.WriteDebug("FileDownloader", "{0} Found chunk ({1}), not downloading", file.FileName, chunk.Offset); #endif } else { neededChunks.Add(chunk); #if DEBUG Log.WriteDebug("FileDownloader", "{0} Found chunk ({1}), but checksum differs", file.FileName, chunk.Offset); #endif } } else { neededChunks.Add(chunk); } } } } else { neededChunks = chunks; } } var downloadedSize = file.TotalSize - (ulong)neededChunks.Sum(x => x.UncompressedLength); var chunkCancellation = new CancellationTokenSource(); var chunkTasks = new Task[neededChunks.Count]; Log.WriteInfo("FileDownloader", "Downloading {0} ({1} bytes, {2} out of {3} chunks)", file.FileName, downloadedSize, neededChunks.Count, chunks.Count); for (var i = 0; i < chunkTasks.Length; i++) { var chunk = neededChunks[i]; chunkTasks[i] = TaskManager.Run(async() => { try { chunkCancellation.Token.ThrowIfCancellationRequested(); await ChunkDownloadingSemaphore.WaitAsync(chunkCancellation.Token).ConfigureAwait(false); var result = await DownloadChunk(job, chunk, downloadPath); if (!result) { Log.WriteWarn("FileDownloader", "Failed to download chunk for {0}", file.FileName); chunkCancellation.Cancel(); } else { downloadedSize += chunk.UncompressedLength; // Do not write progress info to log file Console.WriteLine("{2} [{0,6:#00.00}%] {1}", downloadedSize / (float)file.TotalSize * 100.0f, file.FileName, job.DepotName); } } finally { ChunkDownloadingSemaphore.Release(); } }).Unwrap(); // Register error handler on inner task TaskManager.RegisterErrorHandler(chunkTasks[i]); } await Task.WhenAll(chunkTasks).ConfigureAwait(false); using (var fs = downloadPath.Open(FileMode.Open, FileAccess.ReadWrite)) { fs.Seek(0, SeekOrigin.Begin); using (var sha = SHA1.Create()) { checksum = sha.ComputeHash(fs); } } if (!file.FileHash.SequenceEqual(checksum)) { IRC.Instance.SendOps("{0}[{1}]{2} Failed to correctly download {3}{4}", Colors.OLIVE, job.DepotName, Colors.NORMAL, Colors.BLUE, file.FileName); Log.WriteWarn("FileDownloader", "Failed to download file {0} ({1})", file.FileName, job.Server); downloadPath.Delete(); return(EResult.DataCorruption); } Log.WriteInfo("FileDownloader", "Downloaded {0} from {1}", file.FileName, job.DepotName); finalPath.Delete(); downloadPath.MoveTo(finalPath.FullName); if (chunks.Count > 1) { File.WriteAllText(oldChunksFile, JsonConvert.SerializeObject(chunks, Formatting.None, JsonHandleAllReferences)); } else if (File.Exists(oldChunksFile)) { File.Delete(oldChunksFile); } return(EResult.OK); }
private static async Task <EResult> DownloadFile(DepotProcessor.ManifestJob job, DepotManifest.FileData file, ExistingFileData existingFile) { var directory = Path.Combine(Application.Path, "files", DownloadFolders[job.DepotID], Path.GetDirectoryName(file.FileName)); var finalPath = new FileInfo(Path.Combine(directory, Path.GetFileName(file.FileName))); var downloadPath = new FileInfo(Path.Combine(Path.GetTempPath(), Path.ChangeExtension(Path.GetRandomFileName(), ".steamdb_tmp"))); if (!Directory.Exists(directory)) { Directory.CreateDirectory(directory); } else if (file.TotalSize == 0) { if (!finalPath.Exists) { await using (var _ = finalPath.Create()) { // FileInfo.Create returns a stream but we don't need it } Log.WriteInfo($"FileDownloader {job.DepotID}", $"{file.FileName} created an empty file"); return(EResult.SameAsPreviousValue); } else if (finalPath.Length == 0) { #if DEBUG Log.WriteDebug($"FileDownloader {job.DepotID}", $"{file.FileName} is already empty"); #endif return(EResult.SameAsPreviousValue); } } else if (existingFile.FileHash != null && file.FileHash.SequenceEqual(existingFile.FileHash)) { #if DEBUG Log.WriteDebug($"FileDownloader {job.DepotID}", $"{file.FileName} already matches the file we have"); #endif return(EResult.SameAsPreviousValue); } using var sha = SHA1.Create(); var neededChunks = new List <DepotManifest.ChunkData>(); var chunks = file.Chunks.OrderBy(x => x.Offset).ToList(); await using (var fs = downloadPath.Open(FileMode.OpenOrCreate, FileAccess.ReadWrite)) { fs.SetLength((long)file.TotalSize); if (finalPath.Exists) { await using var fsOld = finalPath.Open(FileMode.Open, FileAccess.Read); foreach (var chunk in chunks) { var oldChunk = existingFile.Chunks.FirstOrDefault(c => c.Value.SequenceEqual(chunk.ChunkID)); if (oldChunk.Value != null) { var oldData = new byte[chunk.UncompressedLength]; fsOld.Seek((long)oldChunk.Key, SeekOrigin.Begin); fsOld.Read(oldData, 0, oldData.Length); var existingChecksum = sha.ComputeHash(oldData); if (existingChecksum.SequenceEqual(chunk.ChunkID)) { fs.Seek((long)chunk.Offset, SeekOrigin.Begin); fs.Write(oldData, 0, oldData.Length); #if DEBUG Log.WriteDebug($"FileDownloader {job.DepotID}", $"{file.FileName} Found chunk ({chunk.Offset}), not downloading"); #endif } else { neededChunks.Add(chunk); #if DEBUG Log.WriteDebug($"FileDownloader {job.DepotID}", $"{file.FileName} Found chunk ({chunk.Offset}), but checksum differs"); #endif } } else { neededChunks.Add(chunk); } } } else { neededChunks = chunks; } } using var chunkCancellation = new CancellationTokenSource(); var downloadedSize = file.TotalSize - (ulong)neededChunks.Sum(x => x.UncompressedLength); var chunkTasks = new Task[neededChunks.Count]; Log.WriteInfo($"FileDownloader {job.DepotID}", $"Downloading {file.FileName} ({neededChunks.Count} out of {chunks.Count} chunks to download)"); for (var i = 0; i < chunkTasks.Length; i++) { var chunk = neededChunks[i]; chunkTasks[i] = TaskManager.Run(async() => { var result = await DownloadChunk(job, chunk, downloadPath, chunkCancellation); if (!result) { Log.WriteWarn($"FileDownloader {job.DepotID}", $"Failed to download chunk for {file.FileName} ({chunk.Offset})"); chunkCancellation.Cancel(); } else { downloadedSize += chunk.UncompressedLength; // Do not write progress info to log file Console.WriteLine($"{job.DepotName} [{downloadedSize / (float) file.TotalSize * 100.0f,6:#00.00}%] {file.FileName}"); } }); } await Task.WhenAll(chunkTasks).ConfigureAwait(false); byte[] checksum; await using (var fs = downloadPath.Open(FileMode.Open, FileAccess.ReadWrite)) { checksum = await sha.ComputeHashAsync(fs, chunkCancellation.Token); } if (!file.FileHash.SequenceEqual(checksum)) { if (!job.DownloadCorrupted) { job.DownloadCorrupted = true; IRC.Instance.SendOps($"{Colors.OLIVE}[{job.DepotName}]{Colors.NORMAL} Failed to correctly download {Colors.BLUE}{file.FileName}"); } Log.WriteWarn($"FileDownloader {job.DepotID}", $"Hash check failed for {file.FileName} ({job.Server})"); downloadPath.Delete(); existingFile.FileHash = null; existingFile.Chunks.Clear(); return(EResult.DataCorruption); } Log.WriteInfo($"FileDownloader {job.DepotID}", $"Downloaded {file.FileName}"); finalPath.Delete(); downloadPath.MoveTo(finalPath.FullName); if (chunks.Count > 0) { existingFile.Chunks = chunks.ToDictionary(chunk => chunk.Offset, chunk => chunk.ChunkID); } else { existingFile.Chunks.Clear(); } return(EResult.OK); }