public PackageDistributer(ILogger <PackageDistributer> logger, IDownloader downloader, IDecompressor decompressor, IVerifier verifier) { _logger = logger; _downloader = downloader; _decompressor = decompressor; _verifier = verifier; }
/// <summary> /// Decompress compressed bytes /// </summary> /// <param name="decompressor">Compressor to extend</param> /// <param name="data">The data to decompress</param> /// <returns></returns> public static byte[] Decompress(this IDecompressor decompressor, byte[] data) { using (var outputStream = new MemoryStream(data)) { return(decompressor.Decompress(outputStream)); } }
public bool RegisterDecompressor( IDecompressor decompressor ) { if ( decompressor != null && GetDecompressor( decompressor.GetId() ) == null ) { Decompressors.Add( decompressor ); return true; } return false; }
public CompressedContainerImpl(SerializationInfo info, StreamingContext ctxt) : base(info, ctxt) { // Nasty hack - The content excryption tests use the serialization constructors // which is only used for testing. When the object is serialized the compressor/decompressor are // never set. m_decompressor = new ZipDecompressor(); m_compressor = new ZipCompressor(); }
/// <summary> /// Decompress compressed bytes /// </summary> /// <param name="decompressor">Compressor to extend</param> /// <param name="data">The data to decompress</param> /// <returns>A decompressed byte array.</returns> /// <exception cref="ArgumentNullException"></exception> public static byte[] Decompress(this IDecompressor decompressor, byte[] data) { if (decompressor is null) { throw new ArgumentNullException(nameof(decompressor)); } using (var outputStream = new MemoryStream(data)) { return(decompressor.Decompress(outputStream)); } }
public FileDecompressor( IGenerator <ChunkDecompressionInfo> generator, IFileReader reader, IDecompressor decompressor, IFileWriter writer) { this.generator = generator; this.reader = reader; this.decompressor = decompressor; this.writer = writer; }
public CompressedContainerImpl(FileData fileData, bool expanded, ICompressor compressor, IDecompressor decompressor) : base(fileData) { m_decompressor = decompressor; m_compressor = compressor; m_fileData.FileType = Workshare.Policy.FileType.ZIP; m_expanded = expanded; // We want to know when someone adds to the collection so that we can mark it as expanded. this.Files.CollectionChanged += new System.Collections.Specialized.NotifyCollectionChangedEventHandler(FilesCollectionChanged); }
/// <summary> /// Executes asynchronously decompression process and updates request based on result. /// </summary> /// <param name="context"> /// The <see cref="HttpContext"/> context. /// </param> /// <param name="decompressors"> /// The collection of available decompresssors. /// </param> /// <param name="cancellationToken"> /// The cancellation token. /// </param> public async Task ExecuteAsync(HttpContext context, IEnumerable <IDecompressor> decompressors, CancellationToken cancellationToken) { // If one or more encodings have been applied to a representation, the // sender that applied the encodings MUST generate a Content-Encoding // header field that lists the content codings in the order in which // they were applied. Additional information about the encoding // parameters can be provided by other header fields not defined by this // specification. var contentEncodings = context.Request.Headers.GetCommaSeparatedValues(HeaderNames.ContentEncoding) ?? new string[0]; // We can only decompress last encoding on list, because encodings where applied in order, so they have to be decomposed in reverse order string contentEncoding = contentEncodings.LastOrDefault(); IDecompressor decompressor = decompressors.FirstOrDefault(c => c.ContentCoding.Equals(contentEncoding, StringComparison.OrdinalIgnoreCase)); if (decompressor != null) { this.logger.LogInformation($"Decompressing request using {decompressor.ContentCoding} decompressor."); Stream decompressed = new MemoryStream(); using (Stream requestBody = context.Request.Body) { // decompress here await decompressor.DecompressAsync(requestBody, decompressed, cancellationToken); // move to beggining of stream decompressed.Seek(0, SeekOrigin.Begin); // stream is decompressed, so set proper length. context.Request.ContentLength = decompressed.Length; // remove encoding already processed from list, the last one contentEncodings = contentEncodings.Take(contentEncodings.Length - 1).ToArray(); // update content-encoding header because it is no longer valid if (contentEncodings.Any()) { context.Request.Headers[HeaderNames.ContentEncoding] = new StringValues(contentEncodings); } else { context.Request.Headers.Remove(HeaderNames.ContentEncoding); } // reasign new decompressed stream context.Request.Body = decompressed; } this.logger.LogInformation("Finished decompressing request."); } }
private static int PerformDecompression(IDecompressor decompressor) { var(message, success) = decompressor.Decompress(); if (decompressor is IDisposable disposableDecompressor) { disposableDecompressor.Dispose(); } if (success) { return(0); } Console.WriteLine($"\r\nError: {message}"); return(1); }
static bool Test(ICompressor compressor, IDecompressor decompressor, string inputFileName, string outputFileName, string restoredFileName) { var stopWatch = new Stopwatch(); stopWatch.Start(); var compressResult = compressor.Compress(); stopWatch.Stop(); if (compressor is IDisposable disposableCompressor) { disposableCompressor.Dispose(); } var compressionTime = stopWatch.ElapsedMilliseconds; if (!compressResult.success) { return(false); } Console.WriteLine($"Output file size:\t{new FileInfo(outputFileName).Length} bytes"); stopWatch.Reset(); stopWatch.Start(); var decompressResult = decompressor.Decompress(); stopWatch.Stop(); if (decompressor is IDisposable disposableDecompressor) { disposableDecompressor.Dispose(); } var decompressionTime = stopWatch.ElapsedMilliseconds; if (!decompressResult.success) { return(false); } Console.WriteLine($"Restored file size:\t{new FileInfo(restoredFileName).Length} bytes"); Console.WriteLine($"Compression time:\t{compressionTime} ms"); Console.WriteLine($"Decompression time:\t{decompressionTime} ms"); return(FileEquals(inputFileName, restoredFileName)); }
public void Setup() { _fileHelperMock = new Mock <IFileHelper>(); _decompressor = new Decompressor(_fileHelperMock.Object); }
public App(ICompressor compressor, IDecompressor decompressor, IFileHelper fileHelper) { _compressor = compressor ?? throw new ArgumentNullException(nameof(compressor)); _decompressor = decompressor ?? throw new ArgumentNullException(nameof(decompressor)); _fileHelper = fileHelper ?? throw new ArgumentNullException(nameof(fileHelper)); }
public ParallelDecompressionHandler(IDecompressor gzipDecompressor) { _decompressor = gzipDecompressor; }
/// <summary> /// Adds the given decompressor to the store. /// </summary> public void Add <T>(IDecompressor <T> decompressor) { Decompressors[typeof(T)] = decompressor; }
public override bool PackContainer(string sPassword) { if (m_fileList == null || !IsExpanded) { Logger.LogInfo(string.Format("NOT unpacking container, expanded is[{0}]", IsExpanded.ToString())); // no need to pack if it's already packed return true; } if (m_compressor.PackContainer(m_fileList, m_fileData, Properties, FileName, sPassword)) { Files.Clear(); m_expanded = false; m_decompressor = m_compressor.CreateCompatibleDecompressor(); // we don't guarantee to repack in original format - this means we can unpack again what we packed return true; } return false; }