private static void FillLarge(this IStorage input, byte value, long offset, long count, IProgressReport progress = null) { const int bufferSize = 0x4000; long remaining = count; if (remaining < 0) { throw new ArgumentException("Storage must have an explicit length"); } progress?.SetTotal(remaining); long pos = offset; using var buffer = new RentedArray <byte>(bufferSize); int rentedBufferSize = buffer.Array.Length; buffer.Array.AsSpan(0, (int)Math.Min(remaining, rentedBufferSize)).Fill(value); while (remaining > 0) { int toFill = (int)Math.Min(rentedBufferSize, remaining); Span <byte> buf = buffer.Array.AsSpan(0, toFill); input.Write(pos, buf); remaining -= toFill; pos += toFill; progress?.ReportAdd(toFill); } progress?.SetTotal(0); }
public static void CopyTo(this IStorage input, IStorage output, IProgressReport progress = null, int bufferSize = 81920) { input.GetSize(out long inputSize).ThrowIfFailure(); output.GetSize(out long outputSize).ThrowIfFailure(); long remaining = Math.Min(inputSize, outputSize); if (remaining < 0) { throw new ArgumentException("Storage must have an explicit length"); } progress?.SetTotal(remaining); long pos = 0; using var buffer = new RentedArray <byte>(bufferSize); int rentedBufferSize = buffer.Array.Length; while (remaining > 0) { int toCopy = (int)Math.Min(rentedBufferSize, remaining); Span <byte> buf = buffer.Array.AsSpan(0, toCopy); input.Read(pos, buf); output.Write(pos, buf); remaining -= toCopy; pos += toCopy; progress?.ReportAdd(toCopy); } progress?.SetTotal(0); }
public static void CopyTo(this IFile file, IFile dest, IProgressReport logger = null) { const int bufferSize = 0x8000; logger?.SetTotal(file.GetSize()); byte[] buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { long inOffset = 0; int bytesRead; while ((bytesRead = file.Read(buffer, inOffset)) != 0) { dest.Write(buffer.AsSpan(0, bytesRead), inOffset); inOffset += bytesRead; logger?.ReportAdd(bytesRead); } } finally { ArrayPool <byte> .Shared.Return(buffer); logger?.SetTotal(0); } }
public static void CopyTo(this IFile file, IFile dest, IProgressReport logger = null) { const int bufferSize = 0x8000; file.GetSize(out long fileSize).ThrowIfFailure(); logger?.SetTotal(fileSize); byte[] buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { long inOffset = 0; // todo: use result for loop condition while (true) { file.Read(out long bytesRead, inOffset, buffer).ThrowIfFailure(); if (bytesRead == 0) { break; } dest.Write(inOffset, buffer.AsSpan(0, (int)bytesRead)).ThrowIfFailure(); inOffset += bytesRead; logger?.ReportAdd(bytesRead); } } finally { ArrayPool <byte> .Shared.Return(buffer); logger?.SetTotal(0); } }
public static void CopyDirectoryWithProgress(FileSystemClient fs, string sourcePath, string destPath, CreateFileOptions options = CreateFileOptions.None, IProgressReport logger = null) { try { logger?.SetTotal(GetTotalSize(fs, sourcePath)); CopyDirectoryWithProgressInternal(fs, sourcePath, destPath, options, logger); } finally { logger?.SetTotal(0); } }
public static Result CopyDirectoryWithProgress(FileSystemClient fs, U8Span sourcePath, U8Span destPath, CreateFileOptions options = CreateFileOptions.None, IProgressReport logger = null) { try { logger?.SetTotal(GetTotalSize(fs, sourcePath)); return(CopyDirectoryWithProgressInternal(fs, sourcePath, destPath, options, logger)); } finally { logger?.SetTotal(0); } }
// Benchmarks encrypting each block separately, initializing a new cipher object for each one private static void CipherBenchmarkSeparate(ReadOnlySpan <byte> src, Span <byte> dst, CipherTaskSeparate function, int iterations, string label, bool dotNetCrypto, IProgressReport logger) { Debug.Assert(src.Length == dst.Length); var watch = new Stopwatch(); double[] runTimes = new double[iterations]; ReadOnlySpan <byte> key1 = stackalloc byte[0x10]; ReadOnlySpan <byte> key2 = stackalloc byte[0x10]; ReadOnlySpan <byte> iv = stackalloc byte[0x10]; logger.SetTotal(iterations); const int blockSize = BlockSizeSeparate; int blockCount = src.Length / blockSize; for (int i = 0; i < iterations; i++) { watch.Restart(); for (int b = 0; b < blockCount; b++) { function(src.Slice(b * blockSize, blockSize), dst.Slice(b * blockSize, blockSize), key1, key2, iv, dotNetCrypto); } watch.Stop(); logger.ReportAdd(1); runTimes[i] = watch.Elapsed.TotalSeconds; } logger.SetTotal(0); long srcSize = src.Length; double fastestRun = runTimes.Min(); double averageRun = runTimes.Average(); double slowestRun = runTimes.Max(); string fastestRate = Utilities.GetBytesReadable((long)(srcSize / fastestRun)); string averageRate = Utilities.GetBytesReadable((long)(srcSize / averageRun)); string slowestRate = Utilities.GetBytesReadable((long)(srcSize / slowestRun)); logger.LogMessage($"{label}{averageRate}/s, fastest run: {fastestRate}/s, slowest run: {slowestRate}/s"); }
public IEnumerable <byte[]> EnumerateFiles(IProgressReport progress = null) { FileListTab[] fileListTabs = Table20.FileList; progress?.SetTotal(fileListTabs.Length); for (int i = 0; i < fileListTabs.Length; i++) { bool success = false; byte[] data = new byte[0]; try { data = GetFileFromIndex(i); success = true; } catch (Exception) { progress?.LogMessage($"Error getting file {i}"); } if (success) { yield return(data); } progress?.ReportAdd(1); } }
public static void PrintAllTables(BdatStringCollection bdats, string jsonDir, IProgressReport progress = null) { progress?.LogMessage("Writing BDAT tables as JSON"); progress?.SetTotal(bdats.Tables.Count); string bdatHtmlDir = Path.Combine(jsonDir, "json"); Directory.CreateDirectory(bdatHtmlDir); foreach (string tableName in bdats.Tables.Keys) { string outDir = bdatHtmlDir; string tableFilename = bdats[tableName].Filename; string json = PrintTable(bdats[tableName]); if (tableFilename != null) { outDir = Path.Combine(outDir, tableFilename); } string filename = Path.Combine(outDir, tableName + ".json"); Directory.CreateDirectory(outDir); File.WriteAllText(filename, json); progress?.ReportAdd(1); } }
private static void CipherBenchmarkBlocked(ReadOnlySpan <byte> src, Span <byte> dst, Func <ICipher> cipherGenerator, int iterations, string label, IProgressReport logger) { cipherGenerator().Transform(src, dst); var watch = new Stopwatch(); double[] runTimes = new double[iterations]; logger.SetTotal(iterations); int blockCount = src.Length / BlockSizeBlocked; for (int i = 0; i < iterations; i++) { ICipher cipher = cipherGenerator(); watch.Restart(); for (int b = 0; b < blockCount; b++) { cipher.Transform(src.Slice(b * BlockSizeBlocked, BlockSizeBlocked), dst.Slice(b * BlockSizeBlocked, BlockSizeBlocked)); } watch.Stop(); logger.ReportAdd(1); runTimes[i] = watch.Elapsed.TotalSeconds; } logger.SetTotal(0); long srcSize = src.Length; double fastestRun = runTimes.Min(); double averageRun = runTimes.Average(); double slowestRun = runTimes.Max(); string fastestRate = Utilities.GetBytesReadable((long)(srcSize / fastestRun)); string averageRate = Utilities.GetBytesReadable((long)(srcSize / averageRun)); string slowestRate = Utilities.GetBytesReadable((long)(srcSize / slowestRun)); logger.LogMessage($"{label}{averageRate}/s, fastest run: {fastestRate}/s, slowest run: {slowestRate}/s"); }
public void Run(IProgressReport progress) { Params = GenerateCombinations(); progress.SetTotal(Params.Length); Parallel.ForEach(Params, p => { Encode(p, progress); }); }
public static void PrintSeparateTables(BdatStringCollection bdats, string htmlDir, IProgressReport progress = null) { progress?.LogMessage("Writing BDAT tables as HTML"); progress?.SetTotal(bdats.Tables.Count); string bdatHtmlDir = Path.Combine(htmlDir, "bdat"); Directory.CreateDirectory(bdatHtmlDir); if (bdats.Game == Game.XB2) { PrintIndex(bdats, htmlDir); } PrintBdatIndex(bdats, bdatHtmlDir); foreach (string tableName in bdats.Tables.Keys) { string outDir = bdatHtmlDir; string tableFilename = bdats[tableName].Filename; string indexPath = tableFilename == null ? "index.html" : "../index.html"; var sb = new Indenter(2); sb.AppendLine("<!DOCTYPE html>"); sb.AppendLineAndIncrease("<html>"); sb.AppendLineAndIncrease("<head>"); sb.AppendLine("<meta charset=\"utf-8\" />"); sb.AppendLine($"<title>{tableName}</title>"); sb.AppendLineAndIncrease("<style>"); sb.AppendLine(CssSticky); sb.AppendLine(CssSortable); sb.DecreaseAndAppendLine("</style>"); sb.DecreaseAndAppendLine("</head>"); sb.AppendLineAndIncrease("<body>"); sb.AppendLine($"<a href=\"{indexPath}\">Return to BDAT index</a><br/>"); sb.AppendLine("<input type=\"button\" value=\"Open all references\" onclick=\"openAll(true)\" />"); sb.AppendLine("<input type=\"button\" value=\"Close all references\" onclick=\"openAll(false)\" />"); PrintTable(bdats[tableName], sb); sb.AppendLineAndIncrease("<script>"); sb.AppendLine(JsOpenAll); sb.AppendLine(JsSortable); sb.AppendLine(JsAnchor); sb.DecreaseAndAppendLine("</script>"); sb.DecreaseAndAppendLine("</body>"); sb.DecreaseAndAppendLine("</html>"); if (tableFilename != null) { outDir = Path.Combine(outDir, tableFilename); } string filename = Path.Combine(outDir, tableName + ".html"); Directory.CreateDirectory(outDir); File.WriteAllText(filename, sb.ToString()); progress?.ReportAdd(1); } }
public static void CopyTo(this IStorage input, IStorage output, IProgressReport progress = null) { const int bufferSize = 81920; long remaining = input.GetSize(); long outsize = output.GetSize(); if (outsize > 0) { remaining = Math.Min(remaining, outsize); } if (remaining < 0) { throw new ArgumentException("Storage must have an explicit length"); } progress?.SetTotal(remaining); long pos = 0; byte[] buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { while (remaining > 0) { int toCopy = (int)Math.Min(bufferSize, remaining); Span <byte> buf = buffer.AsSpan(0, toCopy); input.Read(buf, pos); output.Write(buf, pos); remaining -= toCopy; pos += toCopy; progress?.ReportAdd(toCopy); } } finally { ArrayPool <byte> .Shared.Return(buffer); } progress?.SetTotal(0); }
public override CriHcaFormat EncodeFromPcm16(Pcm16Format pcm16, CriHcaParameters config) { config.ChannelCount = pcm16.ChannelCount; config.SampleRate = pcm16.SampleRate; config.SampleCount = pcm16.SampleCount; config.Looping = pcm16.Looping; config.LoopStart = pcm16.LoopStart; config.LoopEnd = pcm16.LoopEnd; IProgressReport progress = config.Progress; CriHcaEncoder encoder = CriHcaEncoder.InitializeNew(config); short[][] pcm = pcm16.Channels; var pcmBuffer = Helpers.CreateJaggedArray <short[][]>(pcm16.ChannelCount, SamplesPerFrame); progress?.SetTotal(encoder.Hca.FrameCount); var audio = Helpers.CreateJaggedArray <byte[][]>(encoder.Hca.FrameCount, encoder.FrameSize); int frameNum = 0; for (int i = 0; frameNum < encoder.Hca.FrameCount; i++) { int samplesToCopy = Math.Min(pcm16.SampleCount - i * SamplesPerFrame, SamplesPerFrame); for (int c = 0; c < pcm.Length; c++) { Array.Copy(pcm[c], SamplesPerFrame * i, pcmBuffer[c], 0, samplesToCopy); } int framesWritten = encoder.Encode(pcmBuffer, audio[frameNum]); if (framesWritten == 0) { throw new NotSupportedException("Encoder returned no audio. This should not happen."); } if (framesWritten > 0) { frameNum++; framesWritten--; progress?.ReportAdd(1); } while (framesWritten > 0) { audio[frameNum] = encoder.GetPendingFrame(); frameNum++; framesWritten--; progress?.ReportAdd(1); } } var builder = new CriHcaFormatBuilder(audio, encoder.Hca); return(builder.Build()); }
private static void CopyBenchmark(IStorage src, IStorage dst, int iterations, string label, IProgressReport logger) { // Warmup src.CopyTo(dst); logger.SetTotal(iterations); Stopwatch encryptWatch = Stopwatch.StartNew(); for (int i = 0; i < iterations; i++) { src.CopyTo(dst); logger.ReportAdd(1); } encryptWatch.Stop(); logger.SetTotal(0); string rate = Util.GetBytesReadable((long)(src.Length * iterations / encryptWatch.Elapsed.TotalSeconds)); logger.LogMessage($"{label}{rate}/s"); }
/// <summary> /// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the data. /// </summary> /// <param name="returnOnError">If <see langword="true"/>, return as soon as an invalid block is found.</param> /// <param name="logger">An optional <see cref="IProgressReport"/> for reporting progress.</param> /// <returns>The <see cref="Validity"/> of the data of the specified hash level.</returns> public Validity Validate(bool returnOnError, IProgressReport logger = null) { Validity[] validities = LevelValidities[LevelValidities.Length - 1]; IntegrityVerificationStream stream = IntegrityStreams[IntegrityStreams.Length - 1]; // Restore the original position of the stream when we're done validating long initialPosition = stream.Position; long blockSize = stream.SectorSize; int blockCount = (int)Util.DivideByRoundUp(Length, blockSize); var buffer = new byte[blockSize]; var result = Validity.Valid; logger?.SetTotal(blockCount); for (int i = 0; i < blockCount; i++) { if (validities[i] == Validity.Unchecked) { stream.Position = blockSize * i; stream.Read(buffer, 0, buffer.Length, IntegrityCheckLevel.IgnoreOnInvalid); } if (validities[i] == Validity.Invalid) { result = Validity.Invalid; if (returnOnError) { break; } } logger?.ReportAdd(1); } logger?.SetTotal(0); stream.Position = initialPosition; return(result); }
private static void FillLarge(this IStorage input, byte value, long offset, long count, IProgressReport progress = null) { const int bufferSize = 0x4000; long remaining = count; if (remaining < 0) { throw new ArgumentException("Storage must have an explicit length"); } progress?.SetTotal(remaining); long pos = offset; byte[] buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { buffer.AsSpan(0, (int)Math.Min(remaining, bufferSize)).Fill(value); while (remaining > 0) { int toFill = (int)Math.Min(bufferSize, remaining); Span <byte> buf = buffer.AsSpan(0, toFill); input.Write(pos, buf); remaining -= toFill; pos += toFill; progress?.ReportAdd(toFill); } } finally { ArrayPool <byte> .Shared.Return(buffer); } progress?.SetTotal(0); }
/// <summary> /// Checks the hashes of any unchecked blocks and returns the <see cref="Validity"/> of the data. /// </summary> /// <param name="returnOnError">If <see langword="true"/>, return as soon as an invalid block is found.</param> /// <param name="logger">An optional <see cref="IProgressReport"/> for reporting progress.</param> /// <returns>The <see cref="Validity"/> of the data of the specified hash level.</returns> public Validity Validate(bool returnOnError, IProgressReport logger = null) { Validity[] validities = LevelValidities[LevelValidities.Length - 1]; IntegrityVerificationStorage storage = IntegrityStorages[IntegrityStorages.Length - 1]; long blockSize = storage.SectorSize; int blockCount = (int)Utilities.DivideByRoundUp(Length, blockSize); var buffer = new byte[blockSize]; var result = Validity.Valid; logger?.SetTotal(blockCount); for (int i = 0; i < blockCount; i++) { if (validities[i] == Validity.Unchecked) { storage.GetSize(out long storageSize).ThrowIfFailure(); int toRead = (int)Math.Min(storageSize - blockSize * i, buffer.Length); storage.Read(blockSize * i, buffer.AsSpan(0, toRead), IntegrityCheckLevel.IgnoreOnInvalid); } if (validities[i] == Validity.Invalid) { result = Validity.Invalid; if (returnOnError) { break; } } logger?.ReportAdd(1); } logger?.SetTotal(0); return(result); }
public static void CopyFile(this FileSystemManager fs, string sourcePath, string destPath, IProgressReport logger = null) { using (FileHandle sourceHandle = fs.OpenFile(sourcePath, OpenMode.Read)) using (FileHandle destHandle = fs.OpenFile(destPath, OpenMode.Write | OpenMode.Append)) { const int maxBufferSize = 0x10000; long fileSize = fs.GetFileSize(sourceHandle); int bufferSize = (int)Math.Min(maxBufferSize, fileSize); logger?.SetTotal(fileSize); byte[] buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { for (long offset = 0; offset < fileSize; offset += bufferSize) { int toRead = (int)Math.Min(fileSize - offset, bufferSize); Span <byte> buf = buffer.AsSpan(0, toRead); fs.ReadFile(sourceHandle, buf, offset); fs.WriteFile(destHandle, buf, offset); logger?.ReportAdd(toRead); } } finally { ArrayPool <byte> .Shared.Return(buffer); logger?.SetTotal(0); } fs.FlushFile(destHandle); } }
public GuessAdx(string path, string executable, IProgressReport progress = null) { Progress = progress; Progress?.SetTotal(0x1000); if (Directory.Exists(path)) { EncryptionType = LoadFiles(Directory.GetFiles(path, "*.adx")); } else if (File.Exists(path)) { EncryptionType = LoadFiles(path); } else { Progress?.LogMessage($"{path} does not exist."); } switch (EncryptionType) { case 8: int[] primes = Common.GetPrimes(0x8000); // .NET returns the bitwise complement of the index of the first element // smaller than the search value if it is not found in the array int start = ~Array.BinarySearch(primes, 0x4000); PossibleMultipliers = new int[0x400]; Array.Copy(primes, start, PossibleMultipliers, 0, 0x400); PossibleIncrements = PossibleMultipliers; PossibleSeeds = new HashSet <int>(PossibleMultipliers); ValidationMask = 0xE000; MaxSeed = 0x8000; break; case 9: PossibleSeeds = new HashSet <int>(Enumerable.Range(0, 0x2000)); PossibleMultipliers = Enumerable.Range(0, 0x2000).Where(x => (x & 3) == 1).ToArray(); PossibleIncrements = Enumerable.Range(0, 0x2000).Where(x => (x & 1) == 1).ToArray(); ValidationMask = 0x1000; MaxSeed = 0x2000; break; } if (EncryptionType == 8 && executable != null) { KeyStrings = LoadStrings(executable, path); } }
public static BdatStringCollection DeserializeTables(BdatTables tables, IProgressReport progress = null) { var collection = new BdatStringCollection { Bdats = tables }; progress?.LogMessage("Parsing BDAT tables"); progress?.SetTotal(tables.Tables.Length); foreach (BdatTable table in tables.Tables) { var items = new BdatStringItem[table.ItemCount]; var stringTable = new BdatStringTable { Collection = collection, Name = table.Name, BaseId = table.BaseId, Members = table.Members, Items = items, Filename = table.Filename }; if (tables.DisplayFields.TryGetValue(table.Name, out string displayMember)) { stringTable.DisplayMember = displayMember; } for (int i = 0; i < table.ItemCount; i++) { BdatStringItem item = ReadItem(table, i); item.Table = stringTable; item.Id = table.BaseId + i; if (displayMember != null) { item.Display = item[displayMember]; } items[i] = item; } collection.Add(stringTable); progress?.ReportAdd(1); } return(collection); }
public static BdatCollection DeserializeTables(BdatTables files, IProgressReport progress = null) { progress?.LogMessage("Deserializing BDAT tables"); progress?.SetTotal(files.Tables.Length); var tables = new BdatCollection(); foreach (BdatTable table in files.Tables) { ReadTable(table, tables); progress?.ReportAdd(1); } ReadFunctions.SetReferences(tables); return(tables); }
public static void CopyStream(this Stream input, Stream output, long length, IProgressReport progress = null) { const int bufferSize = 0x8000; long remaining = length; var buffer = new byte[bufferSize]; progress?.SetTotal(length); int read; while ((read = input.Read(buffer, 0, (int)Math.Min(buffer.Length, remaining))) > 0) { output.Write(buffer, 0, read); remaining -= read; progress?.ReportAdd(read); } }
public void ExtractFiles(string outDir, IProgressReport progress = null) { var sb = new StringBuilder(); sb.AppendLine("name, file flags, dir offset, file offset, offset, size comp, size, file offset flags, offsetToFile, F1, F9, F17, Is link, F21, Is comp, OF3, OF4, OF5, OF6, bad"); FileListTab[] fileListTabs = Table20.FileList; progress?.SetTotal(fileListTabs.Length); for (int i = 0; i < fileListTabs.Length; i++) { //ExtractFileIndex(i, outDir, progress, sb); ExtractFileIndex(i, outDir, progress); progress?.ReportAdd(1); } //File.WriteAllText("list2.csv", sb.ToString()); }
public static void Extract(FileArchive archive, string outDir, IProgressReport progress = null) { FileInfo[] fileInfos = archive.FileInfo.Where(x => !string.IsNullOrWhiteSpace(x.Filename)).ToArray(); progress?.SetTotal(fileInfos.Length); progress?.LogMessage("Extracting ARD archive"); foreach (FileInfo fileInfo in fileInfos) { string filename = Path.Combine(outDir, fileInfo.Filename.TrimStart('/')); string dir = Path.GetDirectoryName(filename) ?? throw new InvalidOperationException(); Directory.CreateDirectory(dir); using (var outStream = new FileStream(filename, FileMode.Create, FileAccess.Write)) { archive.OutputFile(fileInfo, outStream); } progress?.ReportAdd(1); } }
public static void ExtractTextures(string[] filenames, string outDir, IProgressReport progress = null) { progress?.SetTotal(filenames.Length); foreach (string filename in filenames) { try { byte[] file = File.ReadAllBytes(filename); string name = Path.GetFileNameWithoutExtension(filename); ExportWilayTextures(file, name, outDir, progress); } catch (Exception ex) { progress?.LogMessage($"{ex.Message} {filename}"); } progress?.ReportAdd(1); } }
public static void CopyToStream(this IStorage input, Stream output, long length, IProgressReport progress = null) { const int bufferSize = 0x8000; long remaining = length; long inOffset = 0; var buffer = new byte[bufferSize]; progress?.SetTotal(length); while (remaining > 0) { int toWrite = (int)Math.Min(buffer.Length, remaining); input.Read(inOffset, buffer.AsSpan(0, toWrite)); output.Write(buffer, 0, toWrite); remaining -= toWrite; inOffset += toWrite; progress?.ReportAdd(toWrite); } }
public static void ExtractTextures(FileArchive archive, string texDir, string outDir, IProgressReport progress = null) { FileInfo[] fileInfos = archive.GetChildFileInfos(texDir); progress?.SetTotal(fileInfos.Length); foreach (FileInfo info in fileInfos) { try { byte[] file = archive.ReadFile(info); string filename = Path.GetFileNameWithoutExtension(info.Filename); ExportWilayTextures(file, filename, outDir, progress); } catch (Exception ex) { progress?.LogMessage($"{ex.Message} {info.Filename}"); } progress?.ReportAdd(1); } }
private short[][] Decode(CodecParameters parameters) { IProgressReport progress = parameters?.Progress; progress?.SetTotal(AudioData.Length); var decoder = new Atrac9Decoder(); decoder.Initialize(Config.ConfigData); Atrac9Config config = decoder.Config; var pcmOut = CreateJaggedArray <short[][]>(config.ChannelCount, SampleCount); var pcmBuffer = CreateJaggedArray <short[][]>(config.ChannelCount, config.SuperframeSamples); for (int i = 0; i < AudioData.Length; i++) { decoder.Decode(AudioData[i], pcmBuffer); CopyBuffer(pcmBuffer, pcmOut, EncoderDelay, i); progress?.ReportAdd(1); } return(pcmOut); }
public static void CopyToStream(this IStorage input, Stream output, long length, IProgressReport progress = null, int bufferSize = 0x8000) { long remaining = length; long inOffset = 0; using var buffer = new RentedArray <byte>(bufferSize); int rentedBufferSize = buffer.Array.Length; progress?.SetTotal(length); while (remaining > 0) { int toWrite = (int)Math.Min(rentedBufferSize, remaining); input.Read(inOffset, buffer.Array.AsSpan(0, toWrite)); output.Write(buffer.Array, 0, toWrite); remaining -= toWrite; inOffset += toWrite; progress?.ReportAdd(toWrite); } }