public async void Case5() { var origF = SampleFinder.Get("sample_4.6mb"); var oldHash = origF.SHA1ForFile(); var outDir = TempDir.New(); var archives = await SevenZipper1.Compress(origF, outDir, 0.5); archives.MustHaveFiles(7); var list = await SevenZipper1.DecompressMultiPart(archives, outDir); foreach (var part in archives) { File.Exists(part).Should().BeFalse("Should delete parts after merge"); } list.Should().HaveCount(1); var newF = Path.Combine(outDir, list[0]); var newHash = newF.SHA1ForFile(); newHash.Should().Be(oldHash, "Hashes should match"); Directory.GetFiles(outDir).Length.Should().Be(2, "Should delete OneBigF"); Directory.Delete(outDir, true); }
public async Task <string> DownloadAndExtract(List <R1SplitPart> splitParts, string expectedHash) { var orderedParts = splitParts.OrderBy(x => x.PartNumber).ToList(); _lastTempDir = CreateTempFolder(); for (int i = 0; i < orderedParts.Count; i++) { Status = $"Downloading part {i + 1} of {orderedParts.Count}"; var part = orderedParts[i]; var path = Path.Combine(_lastTempDir, part.FileName); var byts = await GetPartContentByHash(part.PartHash); if (byts == null) { return(null); } File.WriteAllBytes(path, byts); if (path.SHA1ForFile() != part.PartHash) { Warn("Expected PartHash did not match actual hash."); return(null); } part.FullPathOrURL = path; } Status = "Merging and decompressing downloaded file ..."; var paths = orderedParts.Select(x => x.FullPathOrURL); List <string> list = null; try { list = await SevenZipper1.DecompressMultiPart(paths, _lastTempDir); } catch (Exception ex) { Warn("[Decompress Error] Downloaded file may be corrupted." + L.f + ex.Info()); return(null); } if (list == null) { Warn("[Decompress Fail] Downloaded file may be corrupted."); return(null); } var exePath = Path.Combine(_lastTempDir, list[0]); if (exePath.SHA1ForFile() == expectedHash) { return(exePath); } else { Warn("[Hash Mismatch] Downloaded file may be corrupted."); return(null); } }
public UploaderClient1(DownloaderClient1 downloaderClient, UploaderCfg uploaderCfg) : base(uploaderCfg) { _upCfg = uploaderCfg; _downloadr = downloaderClient; OnError = ex => MessageBox.Show(ex.Info()); EmbeddedResrc.ExtractToFile <UploaderClient1> ("7za.dll", "Archivers", SevenZipper1.GetLocalBinariesDir()); }
public async Task <bool> UploadInParts(R1Package localPkg, double maxVolumeSizeMB) { var tmpCopy = CopyToTemp(localPkg.FullPathOrURL); _package = localPkg; _partPaths = await SevenZipper1.Compress(tmpCopy, null, maxVolumeSizeMB, ".data"); _pkgParts = new List <R1PackagePart>(); for (int i = 0; i < _partPaths.Count; i++) { var ok = await UploadPartByIndex(i); if (!ok) { return(false); } } return(true); }
public async void Case4() { var origF = SampleFinder.Get("sample_4.6mb"); var oldHash = origF.SHA1ForFile(); var outDir = TempDir.New(); var archives = await SevenZipper1.Compress(origF, outDir, 0.5); archives.MustHaveFiles(7); var list = await SevenZipper1.Decompress(archives[0], outDir); list.Should().HaveCount(1); var newF = Path.Combine(outDir, list[0]); var newHash = newF.SHA1ForFile(); newHash.Should().Be(oldHash, "Hashes should match"); Directory.Delete(outDir, true); }
internal async Task <bool> UploadNew (R1Executable localExe, double?maxVolumeSizeMB) { IsBusy = true; Status = "Compressing ..."; var tmpCopy = CopyToTemp(localExe.FullPathOrURL); var splitParts = new List <R1SplitPart>(); var partPaths = await SevenZipper1.Compress(tmpCopy, null, maxVolumeSizeMB, ".data"); for (int i = 0; i < partPaths.Count; i++) { Status = $"Uploading part {i + 1} of {partPaths.Count} ..."; var r1Part = FilePart.ToR1Part(partPaths[i], localExe, i + 1, partPaths.Count); var node = await Create(r1Part, () => GetSplitPartIDsByHash(r1Part.PartHash)); if (node == null) { return(false); } splitParts.Add(r1Part); } var ok = await ValidateDownload(splitParts, localExe.FileHash); if (!ok) { //todo: delete corrupted uploaded parts return(Alerter.Warn("Uploaded parts are invalid.", false)); } IsBusy = false; return(true); }