public byte[] Output(byte[] bytes) { var uncompressed = _compressionService.Decompress(bytes); var decrypted = _encryptionService.Decrypt(uncompressed); return(decrypted.ToArray()); }
public static int Main(string[] args) { if (parsingService.TryParse(args)) { int result; switch (parsingService.CompressionMode) { case CompressionMode: result = (int)compressionService.Compress(parsingService.OriginalFileName, parsingService.ResultFileName); break; case DecompressionMode: result = (int)compressionService.Decompress(parsingService.OriginalFileName, parsingService.ResultFileName); break; default: result = 1; logger.Warning("Compression mode can be either 'compress' or 'decompress'"); break; } return(result); } return(1); }
public object Output(Span <byte> bytes) { var tmp = _compressionService.Decompress(bytes); tmp = _encryptionService.Decrypt(tmp); return(_serializerService.Deserialize(tmp)); }
public void BigFileCompressionTest(string originalFileName, string archiveFileName, string decompressedFileName) { _ = compressionService.Compress(originalFileName, archiveFileName); _ = compressionService.Decompress(archiveFileName, decompressedFileName); Assert.True(CheckHashSum(originalFileName, decompressedFileName)); }
public T GetWithCompression <T>(string key) { var compressed = _distributedCache.Get(key); if (compressed == null) { return(default(T)); } var uncompressed = _compressionService.Decompress(compressed); return(Deserialize <T>(uncompressed)); }
public async Task <DocumentContent> Get(Guid documentId) { var result = await _repo.Fetch(documentId); if (result == null) { return(null); } var decryptedContent = _encryptionService.Decrypt(result.Content); return(new DocumentContent() { Content = _compressionService.Decompress(decryptedContent), ContentType = result.Type, Name = result.Name }); }
private void CompressChunk( CompressionMode compressionMode, AsyncReadContext <QueueItem> asyncReadContext, ConcurrentDictionary <int, byte[]> resultPieces, CancellationTokenSource exceptionSrc, CancellationToken[] cancellationTokens, ManualResetEventSlim outputOverflowEvent) { try { while (!cancellationTokens.Any(ct => ct.IsCancellationRequested) || asyncReadContext.Queue.Count > 0) { outputOverflowEvent.Wait(); if (!asyncReadContext.Queue.TryTake(out var queueItem)) { asyncReadContext.EmptyInputEvent.Reset(); asyncReadContext.EmptyInputEvent.Wait(TimeSpan.FromMilliseconds(10)); continue; } byte[] data; switch (compressionMode) { case CompressionMode.Compress: data = _compressionService.Compress(queueItem.Data); break; case CompressionMode.Decompress: data = _compressionService.Decompress(queueItem.Data); break; default: throw new ApplicationException($"Managing of compression mode '{compressionMode}' not implemented'"); } resultPieces.AddOrUpdate(queueItem.Order, data, (i1, byteArray) => byteArray); ControlInputOverflow(asyncReadContext.Queue, asyncReadContext.InputOverflowEvent); } } catch (Exception e) { _statusUpdateService.Error(e.Message); exceptionSrc.Cancel(); } }