//public Stream GetStream() //{ // MemoryStream mem = new MemoryStream(); // using (Stream raw = GetRawStream()) // using (IDecompressor decompressor = CompressorFactory.GetDecompressor(Compression)) // decompressor.Decompress(raw).CopyTo(mem); // mem.Position = 0; // return mem; // // return decompressor.Decompress(); //} public void CopyToMemory(Memory <byte> memory) { if (memory.Length < (int)UncompressedLength) { throw new ArgumentException("Memory buffer must be at least the uncompressed size of the chunk"); } using var rawStream = GetRawStream(); if (Compression == ArchiveChunkCompression.Zstandard) { using var zstdDecompressor = new ZstdDecompressor(); using var buffer = MemoryPool <byte> .Shared.Rent((int) CompressedLength); var compressedMemory = buffer.Memory.Slice(0, (int)CompressedLength); rawStream.Read(compressedMemory.Span); zstdDecompressor.DecompressData(compressedMemory.Span, memory.Span, out _); } else { rawStream.Read(memory.Span); } }
public void DecompressSampleFileTest() { // Arrange byte[] compressed = File.ReadAllBytes(Path.Combine(TestContext.CurrentContext.TestDirectory, "testfiles/z000107.zst")); byte[] uncompressed = File.ReadAllBytes(Path.Combine(TestContext.CurrentContext.TestDirectory, "testfiles/z000107")); byte[] output = new byte[uncompressed.Length]; // pre + post padding ZstdDecompressor decompressor = new ZstdDecompressor(); // Act int decompressedSize = decompressor.Decompress(compressed, 0, compressed.Length, output, 0, output.Length); // Assert Assert.AreEqual(uncompressed.Length, decompressedSize); CollectionAssert.AreEqual(uncompressed, output); }
public void DecompressWithOutputPaddingAndChecksumTest() { // Arrange int padding = 1021; byte[] compressed = File.ReadAllBytes(Path.Combine(TestContext.CurrentContext.TestDirectory, "testfiles/with-checksum.zst")); byte[] uncompressed = File.ReadAllBytes(Path.Combine(TestContext.CurrentContext.TestDirectory, "testfiles/with-checksum.txt")); byte[] output = new byte[uncompressed.Length + padding * 2]; // pre + post padding ZstdDecompressor decompressor = new ZstdDecompressor(); // Act int decompressedSize = decompressor.Decompress(compressed, 0, compressed.Length, output, padding, output.Length); byte[] outputPaddingRemoved = output.Skip(padding).Take(decompressedSize).ToArray(); // Assert Assert.AreEqual(uncompressed.Length, decompressedSize); CollectionAssert.AreEqual(uncompressed, outputPaddingRemoved); }
public void Save() { if (txtArchiveName.Text == "" || txtSaveLocation.Text == "") { return; } btnSave.Enabled = false; ExtendedArchiveWriter writer = new ExtendedArchiveWriter(txtArchiveName.Text); writer.DefaultCompression = (ArchiveChunkCompression)cmbCompression.SelectedIndex; writer.ChunkSizeLimit = (ulong)numChunkSize.Value * 1024 * 1024; writer.Threads = (int)numThreads.Value; Core.Settings.Xx2Precision = (int)numXx2Precision.Value; Core.Settings.OpusMusicBitrate = (int)(numMusicBitrate.Value * 1000); Core.Settings.OpusVoiceBitrate = (int)(numVoiceBitrate.Value * 1000); IProgress <string> progressStatus = new Progress <string>(x => { txtSaveProg.AppendText(x); }); IProgress <int> progressPercentage = new Progress <int>(x => { prgSaveProgress.Value = x; }); //attempt loading md5 cache Core.Settings.UseMd5Cache = chkMd5Cache.Checked; if (Core.Settings.UseMd5Cache && File.Exists("HashCache.md5.zs")) { progressStatus.Report("Loading MD5 cache...\r\n"); progressPercentage.Report(0); using (var decom = new ZstdDecompressor()) { string rawCache = Encoding.ASCII.GetString(decom.Unwrap(File.ReadAllBytes("HashCache.md5.zs"))); Core.Settings.Md5Cache = rawCache.Split('\n').Select(x => CachedMd5.FromString(x)).ToDictionary(x => x.Filename); } } Task.Run(() => { FileStream arc = new FileStream(txtSaveLocation.Text, FileMode.Create); try { progressStatus.Report("Performing first pass...\r\n"); progressPercentage.Report(0); var allNodes = trvFiles.Nodes .Cast <TreeNode>() .SelectMany(GetNodeBranch); int i = 1; int total = allNodes.Count(); foreach (TreeNode node in allNodes) { if (node.Tag == null) { continue; } var holder = node.Tag as SubfileHolder; ISubfile subfile = new PPeX.Subfile( holder.Source, node.Text, node.Parent.Text); writer.Files.Add(subfile); i++; if (i % 20 == 0) { progressPercentage.Report(100 * i / total); } } writer.Write(arc, progressStatus, progressPercentage); btnSave.DynamicInvoke(() => btnSave.Enabled = true); } #if !DEBUG catch (Exception ex) { progressStatus.Report("ERROR: " + ex.Message + "\n"); progressPercentage.Report(0); } #endif finally { arc.Close(); //write hash cache if (Core.Settings.UseMd5Cache) { using (var comp = new ZstdCompressor(new ZstdCompressionOptions(3))) { var strings = Core.Settings.Md5Cache.Values.Select(x => x.ToWritableString()); if (strings.Count() > 0) { string rawCache = strings.Aggregate((x, y) => x + '\n' + y); File.WriteAllBytes("HashCache.md5.zs", comp.Wrap(Encoding.ASCII.GetBytes(rawCache))); } } } this.Invoke(new MethodInvoker(() => { currentlyOpenedFile = Path.GetFileName(txtSaveLocation.Text); IsModified = false; btnSave.Enabled = true; })); } }); }