public void Allocate() { var filesToAllocate = BaseChunk.Files .Where(x => !BaseCache.LoadedFiles.ContainsKey(x.RawSource.Md5)) .Select(x => (Md5Hash)x.RawSource.Md5) .Distinct() .ToArray(); if (filesToAllocate.Length == 0) { return; } List <Task> recompressionTasks = new List <Task>(); var memoryBuffer = MemoryPool <byte> .Shared.Rent((int)BaseChunk.UncompressedLength); BaseChunk.CopyToMemory(memoryBuffer.Memory); foreach (var hash in filesToAllocate) { var subfile = BaseChunk.Files.First(x => x.RawSource.Md5 == hash); recompressionTasks.Add(Task.Run(() => { var cachedFile = BaseCache.LoadedFiles.GetOrAdd(hash, new CachedFile(BaseCache, hash, (long)subfile.Size)); using var zstdCompressor = new ZstdCompressor(); using var compressedMemoryBuffer = MemoryPool <byte> .Shared.Rent(ZstdCompressor.GetUpperCompressionBound((int)subfile.Size)); var uncompressedSource = memoryBuffer.Memory.Slice((int)subfile.RawSource.Offset, (int)subfile.RawSource.Size); zstdCompressor.CompressData(uncompressedSource.Span, compressedMemoryBuffer.Memory.Span, 3, out int compressedSize); var pointer = BaseCache.Allocator.Allocate(compressedSize); using var pointerBuffer = pointer.GetReference(); compressedMemoryBuffer.Memory.Slice(0, compressedSize).CopyTo(pointerBuffer.Memory); cachedFile.CompressedData = pointer; })); } Task.WhenAll(recompressionTasks).ContinueWith(t => { memoryBuffer.Dispose(); }); }
public void Save() { if (txtArchiveName.Text == "" || txtSaveLocation.Text == "") { return; } btnSave.Enabled = false; ExtendedArchiveWriter writer = new ExtendedArchiveWriter(txtArchiveName.Text); writer.DefaultCompression = (ArchiveChunkCompression)cmbCompression.SelectedIndex; writer.ChunkSizeLimit = (ulong)numChunkSize.Value * 1024 * 1024; writer.Threads = (int)numThreads.Value; Core.Settings.Xx2Precision = (int)numXx2Precision.Value; Core.Settings.OpusMusicBitrate = (int)(numMusicBitrate.Value * 1000); Core.Settings.OpusVoiceBitrate = (int)(numVoiceBitrate.Value * 1000); IProgress <string> progressStatus = new Progress <string>(x => { txtSaveProg.AppendText(x); }); IProgress <int> progressPercentage = new Progress <int>(x => { prgSaveProgress.Value = x; }); //attempt loading md5 cache Core.Settings.UseMd5Cache = chkMd5Cache.Checked; if (Core.Settings.UseMd5Cache && File.Exists("HashCache.md5.zs")) { progressStatus.Report("Loading MD5 cache...\r\n"); progressPercentage.Report(0); using (var decom = new ZstdDecompressor()) { string rawCache = Encoding.ASCII.GetString(decom.Unwrap(File.ReadAllBytes("HashCache.md5.zs"))); Core.Settings.Md5Cache = rawCache.Split('\n').Select(x => CachedMd5.FromString(x)).ToDictionary(x => x.Filename); } } Task.Run(() => { FileStream arc = new FileStream(txtSaveLocation.Text, FileMode.Create); try { progressStatus.Report("Performing first pass...\r\n"); progressPercentage.Report(0); var allNodes = trvFiles.Nodes .Cast <TreeNode>() .SelectMany(GetNodeBranch); int i = 1; int total = allNodes.Count(); foreach (TreeNode node in allNodes) { if (node.Tag == null) { continue; } var holder = node.Tag as SubfileHolder; ISubfile subfile = new PPeX.Subfile( holder.Source, node.Text, node.Parent.Text); writer.Files.Add(subfile); i++; if (i % 20 == 0) { progressPercentage.Report(100 * i / total); } } writer.Write(arc, progressStatus, progressPercentage); btnSave.DynamicInvoke(() => btnSave.Enabled = true); } #if !DEBUG catch (Exception ex) { progressStatus.Report("ERROR: " + ex.Message + "\n"); progressPercentage.Report(0); } #endif finally { arc.Close(); //write hash cache if (Core.Settings.UseMd5Cache) { using (var comp = new ZstdCompressor(new ZstdCompressionOptions(3))) { var strings = Core.Settings.Md5Cache.Values.Select(x => x.ToWritableString()); if (strings.Count() > 0) { string rawCache = strings.Aggregate((x, y) => x + '\n' + y); File.WriteAllBytes("HashCache.md5.zs", comp.Wrap(Encoding.ASCII.GetBytes(rawCache))); } } } this.Invoke(new MethodInvoker(() => { currentlyOpenedFile = Path.GetFileName(txtSaveLocation.Text); IsModified = false; btnSave.Enabled = true; })); } }); }
public void CompressCallback(int id) { using ZstdCompressor compressor = new ZstdCompressor(); var completionSource = threadCompletionSources[id]; try { while (!QueuedChunks.IsCompleted) { if (QueuedChunks.TryTake(out var queuedChunk, 500)) { var totalUncompressed = queuedChunk.Subfiles.Sum(x => (int)x.Size); var upperBound = ZstdCompressor.GetUpperCompressionBound(totalUncompressed); var uncompressedBuffer = MemoryPool <byte> .Shared.Rent(totalUncompressed); int currentBufferIndex = 0; List <FileReceipt> fileReceipts = new List <FileReceipt>(); foreach (var subfile in queuedChunk.Subfiles) { try { FileReceipt receipt; if ((receipt = fileReceipts.Find(x => x.Md5 == subfile.Source.Md5)) != null) { receipt = FileReceipt.CreateDuplicate(receipt, subfile); receipt.Filename = subfile.Name; receipt.EmulatedName = subfile.Name; receipt.ArchiveName = subfile.ArchiveName; } else if (subfile.RequestedConversion != null) { if (subfile.RequestedConversion.TargetEncoding != ArchiveFileType.OpusAudio) { throw new NotImplementedException("Only supports opus encoding at this time"); } using var opusEncoder = new OpusEncoder(); using var inputStream = subfile.GetStream(); using var bufferStream = new MemorySpanStream(uncompressedBuffer.Memory.Slice(currentBufferIndex)); opusEncoder.Encode(inputStream, bufferStream); receipt = new FileReceipt { Md5 = Utility.GetMd5(bufferStream.SliceToCurrentPosition().Span), Length = (ulong)bufferStream.Position, Offset = (ulong)0, Filename = opusEncoder.RealNameTransform(subfile.Name), EmulatedName = subfile.Name, Encoding = ArchiveFileType.OpusAudio, ArchiveName = subfile.ArchiveName, Subfile = subfile }; currentBufferIndex += (int)receipt.Length; } else { using var inputStream = subfile.GetStream(); int totalRead = 0; while (totalRead < (int)subfile.Size) { int read = inputStream.Read( uncompressedBuffer.Memory.Span.Slice(currentBufferIndex, (int)subfile.Size - totalRead)); totalRead += read; } receipt = new FileReceipt { Md5 = subfile.Source.Md5, Length = subfile.Size, Offset = (ulong)currentBufferIndex, Filename = subfile.Name, EmulatedName = subfile.Name, Encoding = subfile.Type, ArchiveName = subfile.ArchiveName, Subfile = subfile }; currentBufferIndex += (int)receipt.Length; } fileReceipts.Add(receipt); } catch (Exception ex) { throw new Exception($"Failed to compress file '{subfile.ArchiveName}/{subfile.Name}'", ex); } } Memory <byte> uncompressedSpan = uncompressedBuffer.Memory.Slice(0, currentBufferIndex); IMemoryOwner <byte> compressedBuffer; Memory <byte> compressedMemory; if (queuedChunk.Compression == ArchiveChunkCompression.Zstandard) { compressedBuffer = MemoryPool <byte> .Shared.Rent(upperBound); compressor.CompressData(uncompressedSpan.Span, compressedBuffer.Memory.Span, queuedChunk.CompressionLevel, out int compressedSize); compressedMemory = compressedBuffer.Memory.Slice(0, compressedSize); uncompressedBuffer.Dispose(); } else { compressedBuffer = uncompressedBuffer; compressedMemory = uncompressedSpan; } uint crc = CRC32.Compute(compressedMemory.Span); var chunkReceipt = new ChunkReceipt { ID = queuedChunk.ID, Compression = queuedChunk.Compression, CRC = crc, UncompressedSize = (ulong)uncompressedSpan.Length, CompressedSize = (ulong)compressedMemory.Length, FileReceipts = fileReceipts }; ReadyChunks.Add(new FinishedChunk { UnderlyingBuffer = compressedBuffer, Data = compressedMemory, Receipt = chunkReceipt }); threadProgress.Report( $"Compressed chunk id:{queuedChunk.ID} ({fileReceipts.Count} files) ({Utility.GetBytesReadable((long)chunkReceipt.CompressedSize)} - {(double)chunkReceipt.CompressedSize / chunkReceipt.UncompressedSize:P} ratio)\r\n"); } else { Thread.Sleep(50); } } completionSource.SetResult(null); } catch (Exception ex) { completionSource.SetException(ex); } }