public void TestReaderGeneration() { Dictionary <CompressionFactory.Type, Type> validReaders = new Dictionary <CompressionFactory.Type, Type>() { { CompressionFactory.Type.Gz, typeof(DotNetZipGZipInputStream) }, { CompressionFactory.Type.Bz2, typeof(DotNetZipBZip2InputStream) } }; foreach (KeyValuePair <CompressionFactory.Type, Type> pair in validReaders) { string target = Path.Combine(Directory.GetCurrentDirectory(), "TestResources", "emptyfile.bz2"); /* * Note: Reading a bzip2 file in as a byte[] works for gzip as well as bzip2 stream * as the implementation of bzip2 must be initialised with a string containing a * header, EOF etc.. whereas gzip doesn't mind so the following will work despite * opening a gzip compression stream with a bzip2 data */ using (MemoryStream ms = new MemoryStream(File.ReadAllBytes(target))) { using (CompressionStream providedStream = CompressionFactory.Reader(pair.Key, ms)) { Assert.AreEqual(providedStream.GetType(), pair.Value); } } } }
/// <summary> /// get the compressed response body from give response bytes /// </summary> /// <param name="encodingType"></param> /// <param name="responseBodyStream"></param> /// <returns></returns> private async Task <byte[]> GetCompressedResponseBody(string encodingType, byte[] responseBodyStream) { var compressionFactory = new CompressionFactory(); var compressor = compressionFactory.Create(encodingType); return(await compressor.Compress(responseBodyStream)); }
private void buttonDecode_Click(object sender, EventArgs e) { textBoxOriginalSize.Text = Convert.ToUInt32(dataBytes.Length).ToString(); CompressionType t = (CompressionType)algorithmTypeList.IndexOf(comboBoxAlgorithmType.SelectedItem.ToString()); this.ca = CompressionFactory.CreateAlgorithm(t); textBoxNewSize.Text = this.ca.Decompress(textBoxFilePath.Text, this.textBoxDirectoryPath.Text + fileName + ".decomp").ToString(); }
public void TestFailingReaderGeneration() { using (MemoryStream ms = new MemoryStream()) { using (CompressionFactory.Reader(CompressionFactory.Type.Bz2, ms)) {} } }
public Type TestWriterGeneration(int archiveType) { using (MemoryStream ms = new MemoryStream()) { using (var providedStream = CompressionFactory.Writer((CompressionFactory.Type)archiveType, ms)) return(providedStream.GetType()); } }
public void TestReaderGenerationWithoutFile(int archiveType) { using (MemoryStream ms = new MemoryStream()) Assert.Throws(typeof(IOException), () => { using (CompressionFactory.Reader((CompressionFactory.Type)archiveType, ms)) { } }); }
private void _unzipWorker_DoWork(object sender, DoWorkEventArgs e) { FileInfo info = new FileInfo(_unzipFileIn); long length = info.Length; using (Stream inStream = File.Open(_unzipFileIn, FileMode.Open, FileAccess.ReadWrite, FileShare.None)) using (Stream outStream = File.Open(_unzipFileOut, FileMode.Create, FileAccess.ReadWrite, FileShare.None)) { try { using (CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inStream)) { byte[] buffer = new byte[4 * 1024]; int bytesRead; while ((bytesRead = bzis.Read(buffer, 0, buffer.Length)) > 0) { outStream.Write(buffer, 0, bytesRead); int percentage = (int)Math.Floor((double)bzis.Position * 100 / length); if (percentage < 0) { _unzipWorker.ReportProgress(0); } else if (percentage > 100) { _unzipWorker.ReportProgress(100); } else { _unzipWorker.ReportProgress(percentage); } if (_unzipWorker.CancellationPending) { e.Cancel = true; return; } } } e.Result = _unzipFileOut; } catch (Exception ex) { log.Error(string.Format("Error while uncompressing {0} to {1}", _unzipFileIn, _unzipFileOut), ex); throw; } finally { outStream.Flush(); } } }
public string RunTests(ref int failCount, ref int testCount) { string ret = ""; List <double> list = new List <double>(); list.AddRange(new double[6]); list.Add(4); list.AddRange(new double[5]); CompressionList <double> cList = new ConstantCompressionList <double>(); cList.Compress(list); List <double> uList = cList.Decompress(); if (!ListsEqual <double>(list, uList)) { failCount++; } testCount++; cList = new NoCompressionList <double>(); cList.Compress(list); uList = cList.Decompress(); if (!ListsEqual <double>(list, uList)) { failCount++; } testCount++; cList = new LinearCompressionList_double(); cList.Compress(list); uList = cList.Decompress(); if (!ListsEqual <double>(list, uList)) { failCount++; } testCount++; cList = new LinearCompressionList_double(); list = new List <double>(new double[] { 0, 1, 2, 3, 4, 5, 6, 4, 3, 2, 6, 9, 12, 15, 18, 21, 24, 27 }); cList.Compress(list); uList = cList.Decompress(); if (!ListsEqual <double>(list, uList)) { failCount++; } testCount++; cList = CompressionFactory.GetBestCompressionOf <double>(list, 8); ret += "Best compression method is " + cList.GetType().Name; return(ret); }
private void GZipCompress(Stream inputStream, Stream outputStream) { if (inputStream == null || outputStream == null) { throw new ArgumentNullException(); } CompressionStream bzos = CompressionFactory.Writer(CompressionFactory.Type.Gz, outputStream); StreamCopy(inputStream, (CompressionStream)bzos); bzos.Dispose(); }
public void TestFailingReaderGeneration() { Assert.Throws <IOException>(() => { using (MemoryStream ms = new MemoryStream()) { using (CompressionFactory.Reader(CompressionFactory.Type.Bz2, ms)) { } } }); }
/// <summary> /// get the compressed body from given bytes /// </summary> /// <param name="encodingType"></param> /// <param name="body"></param> /// <returns></returns> internal byte[] GetCompressedBody(string encodingType, byte[] body) { using (var ms = new MemoryStream()) { using (var zip = CompressionFactory.Create(encodingType, ms)) { zip.Write(body, 0, body.Length); } return(ms.ToArray()); } }
private void GZipDecompress(Stream inputStream, Stream outputStream) { if (inputStream == null || outputStream == null) { throw new ArgumentNullException(); } CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inputStream); StreamCopy((CompressionStream)bzis, outputStream); outputStream.Flush(); bzis.Dispose(); }
/// <summary> /// Compress a Stream /// </summary> /// <param name="inputstream">Source Input</param> /// <param name="method">GZip or BZip2</param> /// <returns>compressed stream</returns> public CompressionStream CompressFileStream(Stream inputstream, string method) { switch (method.ToLower()) { case "gzip": return(CompressionFactory.Writer(CompressionFactory.Type.Gz, inputstream)); case "bzip2": return(CompressionFactory.Writer(CompressionFactory.Type.Bz2, inputstream)); } throw new InvalidDataException(string.Format(Messages.COMPRESS_INVALID_METHOD, method)); }
// Construction from byte array (from serialized data) public void InitStoresFromSerializedData(byte[] serializedData) { int nextReadPos = 0; // Position of next read operation int readBytes = 0; // Read config SerializerConfiguration serConfig = SerializerConfiguration.FromByteArray(serializedData, out readBytes); // Update read pos nextReadPos += readBytes; // Assign original buffer byte[] dataBytes = serializedData; // Decompress when necessary if (serConfig.IsCompressed) { // All data bytes List <byte> compressedBytes = new List <byte>(serializedData.Skip(readBytes)); dataBytes = new List <byte>(CompressionFactory.GetCompressionEngine(serConfig.CompressionType).Decompress(compressedBytes.ToArray())).ToArray(); nextReadPos = 0; } // Restore StorageFormatIds bytes int storageSize = BitToolkit.DecodeSize(dataBytes, out readBytes, nextReadPos); nextReadPos += readBytes; StorageFormatIds.InitFromSerializedData(dataBytes.Skip(nextReadPos).Take(storageSize)); nextReadPos += storageSize; // Restore StorageFormatData bytes storageSize = BitToolkit.DecodeSize(dataBytes, out readBytes, nextReadPos); nextReadPos += readBytes; StorageFormatData.InitFromSerializedData(dataBytes.Skip(nextReadPos).Take(storageSize)); nextReadPos += storageSize; // Restore PackedData bytes storageSize = BitToolkit.DecodeSize(dataBytes, out readBytes, nextReadPos); nextReadPos += readBytes; PackedData.InitFromSerializedData(dataBytes.Skip(nextReadPos).Take(storageSize)); nextReadPos += storageSize; // Update read positions PackedData.CachedValPosInStore = 0; StorageFormatIds.CachedValPosInStore = 0; StorageFormatData.CachedValPosInStore = 0; }
private static IGZipHandler CreateHandler(CommandType command, string src, string dest) { switch (command) { case CommandType.Compress: var compressionFactory = new CompressionFactory(src, dest); return(new CompressionHandler(compressionFactory)); case CommandType.Decompress: var decompressionFactory = new DecompressionFactory(src, dest); return(new DecompressionHandler(decompressionFactory)); default: return(null); } }
private void m_worker_DoWork(object sender, DoWorkEventArgs e) { FileInfo info = new FileInfo(m_compressedFile); long length = info.Length; using (Stream inStream = File.Open(m_compressedFile, FileMode.Open, FileAccess.ReadWrite, FileShare.None)) using (Stream outStream = File.Open(m_decompressedFile, FileMode.Create, FileAccess.ReadWrite, FileShare.None)) { try { using (CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inStream)) { byte[] buffer = new byte[4 * 1024]; int bytesRead; while ((bytesRead = bzis.Read(buffer, 0, buffer.Length)) > 0) { outStream.Write(buffer, 0, bytesRead); int percentage = (int)Math.Floor((double)bzis.Position * 100 / length); if (percentage < 0) { m_worker.ReportProgress(0); } else if (percentage > 100) { m_worker.ReportProgress(100); } else { m_worker.ReportProgress(percentage); } if (m_worker.CancellationPending) { e.Cancel = true; return; } } } } finally { outStream.Flush(); } } }
/// <summary> /// 解压Zip简历文件包 /// </summary> private void DecompressionResumeZip(string path) { var tasks = new List <Task>(); for (var i = 0; i < 8; i++) { tasks.Add(Task.Run(() => FormatResume())); } foreach (var doc in CompressionFactory.GetMhtSources(path)) { resumeQueue.Enqueue(doc); } Task.WaitAll(tasks.ToArray()); }
public Type TestReaderGenerationWithFile(int archiveType) { string target = TestUtils.GetTestResource("emptyfile.bz2"); /* * Note: Reading a bzip2 file in as a byte[] works for gzip as well as bzip2 stream * as the implementation of bzip2 must be initialised with a string containing a * header, EOF etc. whereas gzip doesn't mind so the following will work despite * opening a gzip compression stream with a bzip2 data */ using (MemoryStream ms = new MemoryStream(File.ReadAllBytes(target))) { using (var providedStream = CompressionFactory.Reader((CompressionFactory.Type)archiveType, ms)) return(providedStream.GetType()); } }
public SharpZipTarArchiveIterator(Stream compressedTarFile, CompressionFactory.Type compressionType) { if (compressionType == CompressionFactory.Type.Gz) { compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Gz, compressedTarFile); } else if (compressionType == CompressionFactory.Type.Bz2) { compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Bz2, compressedTarFile); } else { throw new NotSupportedException($"Type {compressionType} is not supported by ArchiveIterator"); } tarStream = new TarInputStream(compressionStream); disposed = false; }
public void TestWriterGeneration() { Dictionary <CompressionFactory.Type, Type> validWriters = new Dictionary <CompressionFactory.Type, Type>() { { CompressionFactory.Type.Gz, typeof(DotNetZipGZipOutputStream) }, { CompressionFactory.Type.Bz2, typeof(DotNetZipBZip2OutputStream) } }; foreach (KeyValuePair <CompressionFactory.Type, Type> pair in validWriters) { using (MemoryStream ms = new MemoryStream()) { using (CompressionStream providedStream = CompressionFactory.Writer(pair.Key, ms)) { Assert.AreEqual(providedStream.GetType(), pair.Value); } } } }
/// <summary> /// Instantiate a class that can read a archive type /// </summary> /// <param name="archiveType">Type of archive to read</param> /// <param name="packagedData">The contents of packaged data</param> /// <exception cref="NotSupportedException">if there is not a iterator for a specified archive type</exception> /// <returns>ArchiveIterator to allow an archive to be traversed</returns> public static ArchiveIterator Reader(Type archiveType, Stream packagedData) { if (archiveType == Type.Tar) { return(new SharpZipTarArchiveIterator(packagedData)); } if (archiveType == Type.TarGz) { return(new SharpZipTarArchiveIterator(CompressionFactory.Reader(CompressionFactory.Type.Gz, packagedData))); } if (archiveType == Type.TarBz2) { return(new SharpZipTarArchiveIterator(CompressionFactory.Reader(CompressionFactory.Type.Bz2, packagedData))); } if (archiveType == Type.Zip) { return(new DotNetZipZipIterator(packagedData)); } throw new NotSupportedException(String.Format("Type: {0} is not supported by ArchiveIterator", archiveType)); }
// Open a file within the tar. public TarFileStream(string tarPath) { _tarStream = File.OpenRead(tarPath); var extension = Path.GetExtension(tarPath); // Check for a compressed archive. if (String.Compare(extension, ".gz", true) == 0) { _compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Gz, _tarStream); _tarInputStream = ArchiveFactory.Reader(ArchiveFactory.Type.Tar, _compressionStream); } else if (String.Compare(extension, ".bz2", true) == 0) { _compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Bz2, _tarStream); _tarInputStream = ArchiveFactory.Reader(ArchiveFactory.Type.Tar, _compressionStream); } else { _tarInputStream = ArchiveFactory.Reader(ArchiveFactory.Type.Tar, _tarStream); } }
// Constructor public Serializer() { RefObjectsCache = new RefTypeObjectsDictionary(); ValObjectsCache = new ValueTypeObjectsDictionary(); // Compression factory CompressionFactory = new CompressionFactory(); ActiveCompressionType = CompressionType.Internal; ActiveCompressionLevel = 1; // Fast compression // Above this value we start compressing resulting stream CompressionThreshold = 100; StorageFormatIds = new StorageFormatIdsSerStore(); StorageFormatData = new StorageFormatDataSerStore(); PackedData = new PackedDataSerStore(); // Turn off caching by default UseCaching = false; // Instance serializers WKTSerializers = new WKTSerializers(SerializerStorage, ValObjectsCache); }
public ChunkReader( DecodingConfiguration configuration, CompressionFactory compressionFactory, EncryptionFactory encryptionFactory) { if (configuration == null) { throw new ArgumentNullException("configuration"); } if (compressionFactory == null) { throw new ArgumentNullException("compressionFactory"); } if (encryptionFactory == null) { throw new ArgumentNullException("encryptionFactory"); } _configuration = configuration; _compressionFactory = compressionFactory; _encryptionFactory = encryptionFactory; }
private static void ProcessCompression(EnvelopeType env, string ovfPath, bool compress, Action cancellingDelegate, CompressionFactory.Type method = CompressionFactory.Type.Gz) { if (env.References?.File == null) { return; } string path = Path.GetDirectoryName(ovfPath); foreach (File_Type file in env.References.File) { if (!compress) { if (file.compression == null) { log.InfoFormat("File {0} was not marked as compressed, skipped.", file.href); continue; } if (file.compression.ToLower() == "gzip") { method = CompressionFactory.Type.Gz; } else if (file.compression.ToLower() == "bzip2") { method = CompressionFactory.Type.Bz2; } else { log.ErrorFormat("File {0} uses unsupported method {1}. Must be Gzip or BZip2. Skipping.", file.href, file.compression); continue; } } int slash = file.href.LastIndexOf('/'); string stem = slash >= 0 ? file.href.Substring(0, slash + 1) : ""; string filePath = Path.Combine(path, slash >= 0 ? file.href.Substring(slash + 1) : file.href); string tempfile = Path.Combine(path, Path.GetRandomFileName()); try { if (compress) { CompressionFactory.CompressFile(filePath, tempfile, method, cancellingDelegate); } else { CompressionFactory.UncompressFile(filePath, tempfile, method, cancellingDelegate); } File.Delete(filePath); var ext = method.FileExtension(); if (compress) { filePath += ext; } else if (filePath.EndsWith(ext)) { filePath = filePath.Substring(0, filePath.Length - ext.Length); } File.Move(tempfile, filePath); file.href = stem + Path.GetFileName(filePath); file.compression = compress ? method.StringOf() : null; FileInfo fi = new FileInfo(filePath); file.size = (ulong)fi.Length; } catch (EndOfStreamException eose) { log.Error("End of Stream: ", eose); } finally { try { File.Delete(tempfile); } catch { //ignore } } } }
// Serialization of internal data to byte array (Final step of serialization) public byte[] ToByteArray() { int StorageFormatIds_ByteArraySize = StorageFormatIds.GetByteArraySize(); int StorageFormatData_ByteArraySize = StorageFormatData.GetByteArraySize(); int PackedData_ByteArraySize = PackedData.ByteArraySize; byte[] storageFormatIdsSizeBytes = BitToolkit.EncodeSize(StorageFormatIds_ByteArraySize); byte[] storageFormatDataSizeBytes = BitToolkit.EncodeSize(StorageFormatData_ByteArraySize); byte[] packedDataSizeBytes = BitToolkit.EncodeSize(PackedData_ByteArraySize); // Storage estimated size int estimatedSize = StorageFormatIds_ByteArraySize + StorageFormatData_ByteArraySize + PackedData_ByteArraySize; // Serializer configuration SerializerConfiguration serConfig = new SerializerConfiguration(); serConfig.SerializerFormatVersion = m_SerializerFormatVersion; // Compress if we have necessary data already if (ActiveCompressionType != CompressionType.NoCompression && estimatedSize > CompressionThreshold) { serConfig.CompressionType = ActiveCompressionType; } // Should we compress data? if (serConfig.IsCompressed) { byte[] rawDataAggregated = new byte[storageFormatIdsSizeBytes.Length + storageFormatDataSizeBytes.Length + packedDataSizeBytes.Length + StorageFormatIds_ByteArraySize + StorageFormatData_ByteArraySize + PackedData_ByteArraySize]; int destIndex = 0; // Store StorageFormatIds bytes byte[] tmpArray = storageFormatIdsSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; StorageFormatIds.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += StorageFormatIds_ByteArraySize; // Store StorageFormatData bytes tmpArray = storageFormatDataSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; StorageFormatData.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += StorageFormatData_ByteArraySize; // Store PackedData bytes tmpArray = packedDataSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; PackedData.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += PackedData_ByteArraySize; // Compressed result byte[] compressedContent = CompressionFactory.GetCompressionEngine(ActiveCompressionType).Compress(rawDataAggregated, ActiveCompressionLevel); // Encode config byte[] serConfigBytes = serConfig.ToByteArray(); // Buffer for final result byte[] result = new byte[serConfigBytes.Length + compressedContent.Length]; // Copy config info Array.Copy(serConfigBytes, 0, result, 0, serConfigBytes.Length); // Copy compressed content Array.Copy(compressedContent, 0, result, serConfigBytes.Length, compressedContent.Length); return(result); } else { // No compression necessary - we store raw data // Encode config byte[] serConfigBytes = serConfig.ToByteArray(); byte[] rawDataAggregated = new byte[serConfigBytes.Length + storageFormatIdsSizeBytes.Length + storageFormatDataSizeBytes.Length + packedDataSizeBytes.Length + StorageFormatIds_ByteArraySize + StorageFormatData_ByteArraySize + PackedData_ByteArraySize]; int destIndex = 0; // Store Serialization Configuration bytes byte[] tmpArray = serConfigBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; // Store StorageFormatIds bytes tmpArray = storageFormatIdsSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; StorageFormatIds.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += StorageFormatIds_ByteArraySize; // Store StorageFormatData bytes tmpArray = storageFormatDataSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; StorageFormatData.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += StorageFormatData_ByteArraySize; // Store PackedData bytes tmpArray = packedDataSizeBytes; Array.Copy(tmpArray, 0, rawDataAggregated, destIndex, tmpArray.Length); destIndex += tmpArray.Length; PackedData.StoreBytesInByteArray(rawDataAggregated, destIndex); destIndex += PackedData_ByteArraySize; return(rawDataAggregated); } }