private int CompressAndWriteClusters(long focusVcn, int count, byte[] buffer, int offset) { BlockCompressor compressor = _context.Options.Compressor; compressor.BlockSize = _bytesPerCluster; int totalAllocated = 0; int compressedLength = _ioBuffer.Length; CompressionResult result = compressor.Compress(buffer, offset, _attr.CompressionUnitSize * _bytesPerCluster, _ioBuffer, 0, ref compressedLength); if (result == CompressionResult.AllZeros) { totalAllocated -= _rawStream.ReleaseClusters(focusVcn, count); } else if (result == CompressionResult.Compressed && _attr.CompressionUnitSize * _bytesPerCluster - compressedLength > _bytesPerCluster) { int compClusters = Utilities.Ceil(compressedLength, _bytesPerCluster); totalAllocated += _rawStream.AllocateClusters(focusVcn, compClusters); totalAllocated += _rawStream.WriteClusters(focusVcn, compClusters, _ioBuffer, 0); totalAllocated -= _rawStream.ReleaseClusters(focusVcn + compClusters, _attr.CompressionUnitSize - compClusters); } else { totalAllocated += _rawStream.AllocateClusters(focusVcn, _attr.CompressionUnitSize); totalAllocated += _rawStream.WriteClusters(focusVcn, _attr.CompressionUnitSize, buffer, offset); } return(totalAllocated); }
private static void CompareResults(IEnumerable <CompressionResult> results) { var lastResult = Directory.GetFiles(xmlPath, "*.xml", SearchOption.TopDirectoryOnly).Select(f => new { FilePath = f, Creation = File.GetCreationTime(f) }).OrderByDescending(f => f.Creation).FirstOrDefault(); if (lastResult != null) { XDocument oldResults; using (var file = new FileStream(lastResult.FilePath, FileMode.Open, FileAccess.Read)) { oldResults = XDocument.Load(file); } foreach (var r in results) { var match = CompressionResult.FromXml(oldResults.Descendants("Result").FirstOrDefault(x => x.Attribute("direction").Value == r.Direction.ToString() && x.Element("FileName").Value == r.FileName)); if (match != null) { Console.Write(r.ToString()); var currentColor = Console.ForegroundColor; double speedup = CalculateSpeedup(r.Throughput, match.Throughput); if (speedup > 1) { Console.ForegroundColor = r.Throughput < match.Throughput ? ConsoleColor.Red : ConsoleColor.Green; } Console.WriteLine(" [{0:F2}%]", speedup); Console.ForegroundColor = currentColor; } } } }
public void Compress1KBlockSize() { object instance = typeof(NtfsFileSystem).Assembly.CreateInstance("DiscUtils.Ntfs.LZNT1"); BlockCompressor compressor = (BlockCompressor)instance; int compressedLength = 16 * 4096; byte[] compressedData = new byte[compressedLength]; // Double-check, make sure native code round-trips byte[] nativeCompressed = NativeCompress(_uncompressedData, 0, _uncompressedData.Length, 1024); Assert.AreEqual(_uncompressedData, NativeDecompress(nativeCompressed, 0, nativeCompressed.Length)); compressor.BlockSize = 1024; CompressionResult r = compressor.Compress(_uncompressedData, 0, _uncompressedData.Length, compressedData, 0, ref compressedLength); Assert.AreEqual(CompressionResult.Compressed, r); byte[] duDecompressed = new byte[_uncompressedData.Length]; int numDuDecompressed = compressor.Decompress(compressedData, 0, compressedLength, duDecompressed, 0); byte[] rightSizedDuDecompressed = new byte[numDuDecompressed]; Array.Copy(duDecompressed, rightSizedDuDecompressed, numDuDecompressed); // Note: Due to bug in Windows LZNT1, we compare against native decompression, not the original data, since // Windows LZNT1 corrupts data on decompression when block size != 4096. Assert.AreEqual(rightSizedDuDecompressed, NativeDecompress(compressedData, 0, compressedLength)); }
private static void Processor_ProcessingFinished(object sender, CompressionResult result) { CompressionProcessor processor = (CompressionProcessor)sender; try { switch (result.Type) { case CompressionResultType.Success: PrintProgress($"Finished!"); break; case CompressionResultType.Cancelled: DeleteDestinationFile(processor); PrintProgress($"Canceled!"); break; case CompressionResultType.Fail: DeleteDestinationFile(processor); throw result.Exception; default: throw new NotSupportedException(); } if (result.Type == CompressionResultType.Fail) { throw result.Exception; } } catch (Exception ex) { PrintError(ex.Message, true); } }
public void GivenDefaultConfiguration_ShouldNotOptimizeImages() { var compressionResults = new CompressionResult[] { }; var configuration = new RepoConfiguration(); var shouldOptimize = Threshold.MeetsThreshold(configuration, compressionResults); Assert.IsFalse(shouldOptimize); }
public void Given0_ShouldOptimizeImages() { var compressionResults = new CompressionResult[] { }; var configuration = new RepoConfiguration { MinKBReduced = 0 }; var shouldOptimize = Threshold.MeetsThreshold(configuration, compressionResults); Assert.IsTrue(shouldOptimize); }
public void GivenCompressionResultAndFilterArray_ShouldCorrectlyFilter() { var images = new[] { new CompressionResult { Title = "path/to/image.png", SizeBefore = 100.3678, SizeAfter = 95.78743 }, new CompressionResult { Title = "path/to/image2.png", SizeBefore = 500.3234, SizeAfter = 360.1321987 }, new CompressionResult { Title = "path/to/image3.png", SizeBefore = 500.3234, SizeAfter = 360.1321987 }, }; var filter = new[] { "path/to/image3.png" }; var expected = new[] { new CompressionResult { Title = "path/to/image.png", SizeBefore = 100.3678, SizeAfter = 95.78743 }, new CompressionResult { Title = "path/to/image2.png", SizeBefore = 500.3234, SizeAfter = 360.1321987 }, }; var filterResult = CompressionResult.Filter(images, filter); Assert.AreEqual(filterResult.Length, expected.Length); for (int i = 0; i < filterResult.Length; ++i) { Assert.AreEqual(filterResult[i].Title, expected[i].Title); Assert.AreEqual(filterResult[i].SizeBefore, expected[i].SizeBefore); Assert.AreEqual(filterResult[i].SizeAfter, expected[i].SizeAfter); } }
public static IBinaryDataAccessor Compress(IReadOnlyBinaryDataAccessor input) { var output = new MemoryStream((int)input.Length / 2); var inputData = input.ReadArray(); void writeArray(byte[] array) { output.Write(array, 0, array.Length); }; writeArray(Encoding.ASCII.GetBytes("GYU0")); writeArray(BitConverter.GetBytes(inputData.Length)); long dataOffset = 0; var compressionResult = new CompressionResult(); while (dataOffset < inputData.LongLength) { // Try each of the compression algorithms without copying data first. // If we get a result, write that to the output right away. // Otherwise, try copying the least amount of data followed by one of the algorithms. TryCompress(inputData, dataOffset, output, ref compressionResult); if (!compressionResult.Valid) { var copyOffset = dataOffset; var copyCommandOffset = output.Position; output.Position++; while (!compressionResult.Valid && copyOffset - dataOffset < 31 && copyOffset < inputData.LongLength) { output.WriteByte(inputData[copyOffset]); copyOffset++; TryCompress(inputData, copyOffset, output, ref compressionResult); } var currPos = output.Position; output.Position = copyCommandOffset; output.WriteByte((byte)(0x80 + copyOffset - dataOffset - 1)); output.Position = currPos; dataOffset = copyOffset; } if (compressionResult.Valid) { dataOffset += compressionResult.InputByteCount; } } // Write EOF marker output.WriteByte(0x7F); output.WriteByte(0xFF); // Trim any excess bytes that may have been written by the TryCompress* methods output.SetLength(output.Position); return(new BinaryFile(output.ToArray())); }
private void HandleResult(CompressionResult result) { try { if (result.Saving > 0 && result.ResultFileSize > 0) { File.Copy(result.ResultFileName, result.OriginalFileName, true); } OnFinished(result); File.Delete(result.ResultFileName); } catch { } }
public void PrintCompressionResult(CompressionResult result, bool isVerbose) { Console.WriteLine(); if (isVerbose) { Console.Write(result.MetaData); } else if (result.MetaData != null) { Console.WriteLine($" # of blocks: {result.MetaData.BlockSizes.Length}"); } Console.WriteLine($" Input file size: {result.InputFileSize}"); Console.WriteLine($"Output file size: {result.OutputFileSize}"); }
public void TestGifCompression() { // Arrange Compressor compressor = new Compressor(); string inputFile = Path.Combine(Path.GetDirectoryName(GetType().Assembly.Location), "Test.gif"); Assert.IsTrue(File.Exists(inputFile)); // Act CompressionResult result = compressor.CompressFile(inputFile); // Assert Assert.IsFalse(string.IsNullOrEmpty(result.ResultFileName)); Assert.AreNotEqual(0, result.ResultFileSize); }
public void TestNoneImageFile() { // Arrange Compressor compressor = new Compressor(); string inputFile = Path.Combine(Path.GetDirectoryName(GetType().Assembly.Location), "NotAnImage.txt"); Assert.IsTrue(File.Exists(inputFile)); // Act CompressionResult result = compressor.CompressFile(inputFile); // Assert Assert.IsTrue(string.IsNullOrEmpty(result.ResultFileName)); Assert.AreEqual(0, result.Saving); }
public CompressionResult returnResult() { var compressionResult = new CompressionResult { CompressionTestId = this.CompressionTestId, AttemptNumber = this.AttemptNumber, RelativeReduction = this.RelativeReduction, StandardForce = this.StandardForce, PlasticRelativeReduction = this.PlasticRelativeReduction, XCorrectRelativeReduction = this.XCorrectRelativeReduction, D0 = this.D0, H0 = this.H0, S0 = this.S0 }; return(compressionResult); }
public void CompressMidDestBuffer() { object instance = CreateInstance <NtfsFileSystem>("DiscUtils.Ntfs.LZNT1"); BlockCompressor compressor = (BlockCompressor)instance; // Double-check, make sure native code round-trips byte[] nativeCompressed = NativeCompress(_uncompressedData, 0, _uncompressedData.Length, 4096); Assert.Equal(_uncompressedData, NativeDecompress(nativeCompressed, 0, nativeCompressed.Length)); int compressedLength = 128 * 1024; byte[] compressedData = new byte[compressedLength]; compressor.BlockSize = 4096; CompressionResult r = compressor.Compress(_uncompressedData, 0, _uncompressedData.Length, compressedData, 32 * 1024, ref compressedLength); Assert.Equal(CompressionResult.Compressed, r); Assert.True(compressedLength < _uncompressedData.Length); Assert.Equal(_uncompressedData, NativeDecompress(compressedData, 32 * 1024, compressedLength)); }
public void Compress() { object instance = typeof(NtfsFileSystem).Assembly.CreateInstance("DiscUtils.Ntfs.LZNT1"); BlockCompressor compressor = (BlockCompressor)instance; int compressedLength = 16 * 4096; byte[] compressedData = new byte[compressedLength]; // Double-check, make sure native code round-trips byte[] nativeCompressed = NativeCompress(_uncompressedData, 0, _uncompressedData.Length, 4096); Assert.AreEqual(_uncompressedData, NativeDecompress(nativeCompressed, 0, nativeCompressed.Length)); compressor.BlockSize = 4096; CompressionResult r = compressor.Compress(_uncompressedData, 0, _uncompressedData.Length, compressedData, 0, ref compressedLength); Assert.AreEqual(CompressionResult.Compressed, r); Assert.AreEqual(_uncompressedData, NativeDecompress(compressedData, 0, compressedLength)); Assert.Less(compressedLength, _uncompressedData.Length * 0.66); }
public void Compress1KBlock() { object instance = typeof(NtfsFileSystem).Assembly.CreateInstance("DiscUtils.Ntfs.LZNT1"); BlockCompressor compressor = (BlockCompressor)instance; byte[] uncompressed1K = new byte[1024]; Array.Copy(_uncompressedData, uncompressed1K, 1024); int compressedLength = 1024; byte[] compressedData = new byte[compressedLength]; // Double-check, make sure native code round-trips byte[] nativeCompressed = NativeCompress(uncompressed1K, 0, 1024, 1024); Assert.AreEqual(uncompressed1K, NativeDecompress(nativeCompressed, 0, nativeCompressed.Length)); compressor.BlockSize = 1024; CompressionResult r = compressor.Compress(uncompressed1K, 0, 1024, compressedData, 0, ref compressedLength); Assert.AreEqual(CompressionResult.Compressed, r); Assert.AreEqual(uncompressed1K, NativeDecompress(compressedData, 0, compressedLength)); }
public void GivenACommitMessage_ShouldCorrectlyParse() { var commitMessage = KnownGitHubs.CommitMessageTitle + Environment.NewLine + Environment.NewLine + "*Total -- 501.89kb -> 455.68kb (9.21%)" + Environment.NewLine + Environment.NewLine + "path/to/image.png -- 200.97kb -> 195.12kb (2.91%)" + Environment.NewLine + "path/to/image2.png -- 300.92kb -> 260.56kb (13.41%)" + Environment.NewLine + Environment.NewLine + "Signed-off-by: ImgBotApp <*****@*****.**>" + Environment.NewLine; var expected = new[] { new CompressionResult { Title = "path/to/image.png", SizeBefore = 200.97, SizeAfter = 195.12 }, new CompressionResult { Title = "path/to/image2.png", SizeBefore = 300.92, SizeAfter = 260.56 }, }; var parsed = CompressionResult.ParseCommitMessage(commitMessage); Assert.AreEqual(parsed.Length, expected.Length); for (int i = 0; i < parsed.Length; ++i) { Assert.AreEqual(parsed[i].Title, expected[i].Title); Assert.AreEqual(parsed[i].SizeBefore, expected[i].SizeBefore); Assert.AreEqual(parsed[i].SizeAfter, expected[i].SizeAfter); } }
public void GivenAboveThreshold_ShouldNotOptimizeImages() { var compressionResults = new CompressionResult[] { new CompressionResult { SizeBefore = 5000, SizeAfter = 4900, }, new CompressionResult { SizeBefore = 5000, SizeAfter = 4999, }, }; var configuration = new RepoConfiguration { MinKBReduced = 500 }; var shouldOptimize = Threshold.MeetsThreshold(configuration, compressionResults); Assert.IsFalse(shouldOptimize); }
private long RunCompression(string searchFilter, bool lossy) { string[] files = Directory.GetFiles(_temp, searchFilter); var list = new List <CompressionResult>(); foreach (string file in files) { CompressionResult result = _compressor.CompressFile(file, lossy); if (File.Exists(result.ResultFileName)) { list.Add(result); File.Copy(result.ResultFileName, result.OriginalFileName, true); File.Delete(result.ResultFileName); } } IEnumerable <IGrouping <string, CompressionResult> > grouped = list.GroupBy(r => Path.GetExtension(r.OriginalFileName).ToLowerInvariant()); var sb = new StringBuilder(); sb.AppendLine("Type\t#\tSavings\tTime"); sb.AppendLine(); foreach (IGrouping <string, CompressionResult> group in grouped) { long sum = group.Sum(g => g.Saving); double time = group.Average(g => g.Elapsed.TotalSeconds); sb.AppendLine(group.Key + "\t" + group.Count() + "\t" + sum + "\t" + Math.Round(time, 2)); } string testName = searchFilter.Replace("*.*", "all").Trim('.', '*'); File.WriteAllText("../../" + testName + "-" + (lossy ? "lossy" : "lossless") + ".txt", sb.ToString()); return(list.Sum(r => r.Saving)); }
public void Run() { using var input = _readable.OpenReader(); Result = Compressor.Compress(input, Temp); }
private static void TryCompress(byte[] data, long offset, MemoryStream output, ref CompressionResult result) { var outputPos = output.Position; result.InputByteCount = 0; TryCompressSplitCopy(data, offset, output, ref result); output.Position = outputPos; TryCompressFill(data, offset, output, ref result); output.Position = outputPos; TryCompressSkip(data, offset, output, ref result); output.Position = outputPos; // FIXME: Redesign this algorithm; too slow // - Move to the main loop // - Use a rolling window instead of recomputing every time //TryCompressPrevious(data, offset, output, ref result); //output.Position = outputPos; if (result.Valid) { output.Position += result.OutputByteCount; } }
public void GivenTwoCompressionResultArrays_ShouldCreateMergedArray() { var images = new[] { new CompressionResult { Title = "path/to/image.png", SizeBefore = 100.3678, SizeAfter = 95.78743 }, new CompressionResult { Title = "path/to/image2.png", SizeBefore = 500.3234, SizeAfter = 360.1321987 }, }; var images2 = new[] { new CompressionResult { Title = "path/to/image2.png", SizeBefore = 101.3678, SizeAfter = 96.78743 }, new CompressionResult { Title = "path/to/image3.png", SizeBefore = 300.3234, SizeAfter = 760.1321987 }, }; var expected = new[] { new CompressionResult { Title = "path/to/image.png", SizeBefore = 100.3678, SizeAfter = 95.78743 }, new CompressionResult { Title = "path/to/image2.png", SizeBefore = 500.3234, SizeAfter = 360.1321987 }, new CompressionResult { Title = "path/to/image3.png", SizeBefore = 300.3234, SizeAfter = 760.1321987 }, }; var mergeResult = CompressionResult.Merge(images, images2); Assert.AreEqual(mergeResult.Length, expected.Length); for (int i = 0; i < mergeResult.Length; ++i) { Assert.AreEqual(mergeResult[i].Title, expected[i].Title); Assert.AreEqual(mergeResult[i].SizeBefore, expected[i].SizeBefore); Assert.AreEqual(mergeResult[i].SizeAfter, expected[i].SizeAfter); } }
private static void TryCompressSplitCopy(byte[] data, long offset, MemoryStream output, ref CompressionResult result) { try { var sep = data[offset]; var count = 1; while (data[offset + count * 2] == sep && data[offset + count * 2 + 1] != sep && count < 0x21) { count++; } if (count >= 2) { var compressionRatio = count * 2.0f / (2.0f + count); if (compressionRatio > result.CompressionRatio) { result.InputByteCount = count * 2; result.OutputByteCount = 2 + count; output.WriteByte((byte)(0xA0 + count - 2)); output.WriteByte(sep); for (int i = 0; i < count; i++) { output.WriteByte(data[offset + i * 2 + 1]); } } } } catch (IndexOutOfRangeException) { // EOF means failure } }
private static void TryCompressFill(byte[] data, long offset, MemoryStream output, ref CompressionResult result) { try { var fill = data[offset]; var count = 1; while (data[offset + count] == fill && count < 0x21) { count++; } if (count >= 2) { var compressionRatio = count * 0.5f; if (compressionRatio > result.CompressionRatio) { result.InputByteCount = count; result.OutputByteCount = 2; output.WriteByte((byte)(0xC0 + count - 2)); output.WriteByte(fill); } } } catch (IndexOutOfRangeException) { // EOF means failure } }
private static void TryCompressSkip(byte[] data, long offset, MemoryStream output, ref CompressionResult result) { try { var count = 0; while (data[offset + count] == 0 && count < 0x11F) { count++; } if (count > 0) { if (count < 0x1F) { var compressionRatio = count; if (compressionRatio > result.CompressionRatio) { result.InputByteCount = count; result.OutputByteCount = 1; output.WriteByte((byte)(0xE0 + count - 1)); } } else { var compressionRatio = count * 0.5f; if (compressionRatio > result.CompressionRatio) { result.InputByteCount = count; result.OutputByteCount = 2; output.WriteByte(0xFF); output.WriteByte((byte)(count - 0x20)); } } } } catch (IndexOutOfRangeException) { // EOF means failure } }
private static void TryCompressPrevious(byte[] data, long offset, MemoryStream output, ref CompressionResult result) { // Don't waste time trying to look behind if there's nothing written yet if (offset == 0) { return; } try { // Search output up to 0x400 bytes behind for the longest subsequence of bytes found in data starting at offset. // The common substring must be between 2 and 33 bytes long. var maxLookbehindDistance = Math.Min(0x400, (int)offset); if (maxLookbehindDistance < 2) { return; } var maxLength = Math.Min(33, (int)Math.Min(maxLookbehindDistance, data.Length - offset)); var lookbehindData = new Span <byte>(data, (int)(offset - maxLookbehindDistance), maxLookbehindDistance); var lookaheadData = new Span <byte>(data, (int)offset, maxLength); int matchLength = 0; int matchPos = -1; int[,] longestCommonSuffixes = new int[lookbehindData.Length + 1, lookaheadData.Length + 1]; for (int i = 0; i <= lookbehindData.Length; i++) { for (int j = 0; j <= lookaheadData.Length; j++) { if (i == 0 || j == 0) { longestCommonSuffixes[i, j] = 0; } else if (lookbehindData[i - 1] == lookaheadData[j - 1]) { longestCommonSuffixes[i, j] = longestCommonSuffixes[i - 1, j - 1] + 1; if (longestCommonSuffixes[i, j] > matchLength && longestCommonSuffixes[i, j] == j) { matchLength = longestCommonSuffixes[i, j]; matchPos = i - matchLength; } } else { longestCommonSuffixes[i, j] = 0; } } } if (matchLength >= 2) { var compressionRatio = matchLength * 0.5f; if (compressionRatio > result.CompressionRatio) { var matchOffset = matchPos - maxLookbehindDistance; result.InputByteCount = matchLength; result.OutputByteCount = 2; output.WriteByte((byte)((byte)((matchLength - 2) << 2) | (byte)((matchOffset >> 8) & 3))); output.WriteByte((byte)(matchOffset & 0xFF)); } } } catch (IndexOutOfRangeException) { // EOF means failure } }
private void RunProcess(string sourceFile, string targetFile, ProcessStartInfo start) { try { using (var process = Process.Start(start)) { process.WaitForExit(7000); var result = new CompressionResult(sourceFile, targetFile); HandleResult(result); } } catch { } }
private void OnFinished(CompressionResult result) { if (Finished != null) Finished(this, result); }
private void WriteToLog(object sender, CompressionResult e) { if (!_cmdLineOptions.SuppressCsvReport) { ThreadPool.QueueUserWorkItem((o) => { _store.Save(e.OriginalFileName); if (e == null /*|| e.ResultFileSize == 0*/) { return; } var logItem = new LogItem { FileName = BuildRelativeFilePath(e.OriginalFileName), OriginalSizeBytes = e.OriginalFileSize, NewSizeBytes = e.ResultFileSize }; _logger.Write(logItem); }); } }