public void TestExtractArchiveTarGzCreateContainer() { CloudFilesProvider provider = (CloudFilesProvider)Bootstrapper.CreateObjectStorageProvider(); string containerName = TestContainerPrefix + Path.GetRandomFileName(); string sourceFileName = "DarkKnightRises.jpg"; byte[] content = File.ReadAllBytes("DarkKnightRises.jpg"); using (MemoryStream outputStream = new MemoryStream()) { using (GZipOutputStream gzoStream = new GZipOutputStream(outputStream)) { gzoStream.IsStreamOwner = false; gzoStream.SetLevel(9); using (TarOutputStream tarOutputStream = new TarOutputStream(gzoStream)) { tarOutputStream.IsStreamOwner = false; TarEntry entry = TarEntry.CreateTarEntry(containerName + '/' + sourceFileName); entry.Size = content.Length; tarOutputStream.PutNextEntry(entry); tarOutputStream.Write(content, 0, content.Length); tarOutputStream.CloseEntry(); tarOutputStream.Close(); } } outputStream.Flush(); outputStream.Position = 0; ExtractArchiveResponse response = provider.ExtractArchive(outputStream, "", ArchiveFormat.TarGz); Assert.IsNotNull(response); Assert.AreEqual(1, response.CreatedFiles); Assert.IsNotNull(response.Errors); Assert.AreEqual(0, response.Errors.Count); } using (MemoryStream downloadStream = new MemoryStream()) { provider.GetObject(containerName, sourceFileName, downloadStream, verifyEtag: true); Assert.AreEqual(content.Length, GetContainerObjectSize(provider, containerName, sourceFileName)); downloadStream.Position = 0; byte[] actualData = new byte[downloadStream.Length]; downloadStream.Read(actualData, 0, actualData.Length); Assert.AreEqual(content.Length, actualData.Length); using (MD5 md5 = MD5.Create()) { byte[] contentMd5 = md5.ComputeHash(content); byte[] actualMd5 = md5.ComputeHash(actualData); Assert.AreEqual(BitConverter.ToString(contentMd5), BitConverter.ToString(actualMd5)); } } /* Cleanup */ provider.DeleteContainer(containerName, deleteObjects: true); }
public void GZip_Compress_Extract_Test() { var plainStream = PlainText.ToStream(); plainStream.Seek(0, SeekOrigin.Begin); var plainData = Encoding.UTF8.GetBytes(PlainText); byte[] compressedData; byte[] extractedData; // Compress using(var compressedStream = new MemoryStream()) using(var gzs = new GZipOutputStream(compressedStream)) { gzs.SetLevel(5); gzs.Write(plainData, 0, plainData.Length); gzs.Finish(); compressedData = compressedStream.ToArray(); } Assert.IsNotNull(compressedData); // Extract using(var compressedStream = new MemoryStream(compressedData)) { // compressedStream.Seek(0, SeekOrigin.Begin); using(var gzs = new GZipInputStream(compressedStream)) using(var extractedStream = new MemoryStream()) { StreamTool.CopyStreamToStream(gzs, extractedStream); extractedData = extractedStream.ToArray(); } } Assert.IsNotNull(extractedData); string extractedText = Encoding.UTF8.GetString(extractedData).TrimEnd('\0'); Assert.AreEqual(PlainText, extractedText); }
/// <summary> /// 지정된 데이타를 압축한다. /// </summary> /// <param name="input">압축할 Data</param> /// <returns>압축된 Data</returns> public override byte[] Compress(byte[] input) { if(IsDebugEnabled) log.Debug(CompressorTool.SR.CompressStartMsg); // check input data if(input.IsZeroLength()) { if(IsDebugEnabled) log.Debug(CompressorTool.SR.InvalidInputDataMsg); return CompressorTool.EmptyBytes; } byte[] output; using(var compressedStream = new MemoryStream(input.Length)) { using(var gzs = new GZipOutputStream(compressedStream)) { gzs.SetLevel(ZipLevel); gzs.Write(input, 0, input.Length); gzs.Finish(); } output = compressedStream.ToArray(); } if(IsDebugEnabled) log.Debug(CompressorTool.SR.CompressResultMsg, input.Length, output.Length, output.Length / (double)input.Length); return output; }
private const int COMPRESS_LEVEL = 7;// 0-9, 9 being the highest compression #endregion #region ICompressionProvider Members public byte[] Compress(byte[] data) { using (var outputStream = new MemoryStream()) { using (var compressStream = new GZipOutputStream(outputStream)) { compressStream.SetLevel(COMPRESS_LEVEL); compressStream.Write(data, 0, data.Length); compressStream.Finish(); compressStream.Close(); return outputStream.ToArray(); } } }
public void CompressFile(string inFilePath, string outFilePath) { using (FileStream inFileStream = Util.IO.OpenFileStreamForReading(inFilePath) //new FileStream(inFilePath, FileMode.Open, FileAccess.Read) ) { using (FileStream outFileStream = new FileStream(outFilePath, FileMode.Create, FileAccess.Write)) { using (var compressStream = new GZipOutputStream(outFileStream)) { compressStream.SetLevel(COMPRESS_LEVEL); byte[] buffer = new byte[BUFFER_SIZE]; ICSharpCode.SharpZipLib.Core.StreamUtils.Copy(inFileStream, compressStream, buffer); } } } /* using (FileStream inFileStream = Util.IO.OpenFileStreadForReading(inFilePath) //new FileStream(inFilePath, FileMode.Open, FileAccess.Read) ) { using (FileStream outFileStream = new FileStream(outFilePath, FileMode.Create, FileAccess.Write)) { using (var compressStream = new GZipOutputStream(outFileStream)) { compressStream.SetLevel(COMPRESS_LEVEL); byte[] buffer = new byte[BUFFER_SIZE]; while(true) { int size = inFileStream.Read(buffer, 0, buffer.Length); if (size > 0) { compressStream.Write(buffer, 0, size); } else { break; } } compressStream.Finish(); compressStream.Close(); } } } */ }
public static void GzipCompressFile(string inputPath, string outputPath, int compressionLevel) { using (var inputStream = new FileStream(inputPath, FileMode.Open, FileAccess.Read)) { using (var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write)) { using (var gzipStream = new GZipOutputStream(outputStream)) { gzipStream.SetLevel(compressionLevel); int size; byte[] data = new byte[_defaultBufferSize]; do { size = inputStream.Read(data, 0, data.Length); gzipStream.Write(data, 0, size); } while (size > 0); } } } }
public int OnExecute(IConsole console) { try { using (var istm = Util.OpenInputStream(InputFile)) using (var ostm = Util.OpenOutputStream(OutputFile, true)) { using (var ozstm = new ICSharpCode.SharpZipLib.GZip.GZipOutputStream(ostm)) { ozstm.SetLevel(Level); istm.CopyTo(ozstm); } } } catch (Exception e) { console.Error.WriteLine($"failed gzip compression:{e}"); return(1); } return(0); }
/// <summary> /// Compress the <paramref name="inStream">input stream</paramref> sending /// result data to <paramref name="outStream">output stream</paramref> /// </summary> /// <param name="inStream">The readable stream to compress.</param> /// <param name="outStream">The output stream to receive the compressed data.</param> /// <param name="isStreamOwner">Both streams are closed on completion if true.</param> /// <param name="bufferSize">Deflate buffer size, minimum 512</param> /// <param name="level">Deflate compression level, 0-9</param> /// <exception cref="ArgumentNullException">Input or output stream is null</exception> /// <exception cref="ArgumentOutOfRangeException">Buffer Size is smaller than 512</exception> /// <exception cref="ArgumentOutOfRangeException">Compression level outside 0-9</exception> public static void Compress(Stream inStream, Stream outStream, bool isStreamOwner, int bufferSize = 512, int level = 6) { if (inStream == null) { throw new ArgumentNullException("inStream", "Input stream is null"); } if (outStream == null) { throw new ArgumentNullException("outStream", "Output stream is null"); } if (bufferSize < 512) { throw new ArgumentOutOfRangeException("bufferSize", "Deflate buffer size must be >= 512"); } if (level < Zip.Compression.Deflater.NO_COMPRESSION || level > Zip.Compression.Deflater.BEST_COMPRESSION) { throw new ArgumentOutOfRangeException("level", "Compression level must be 0-9"); } try { using (GZipOutputStream gzipOutput = new GZipOutputStream(outStream, bufferSize)) { gzipOutput.SetLevel(level); gzipOutput.IsStreamOwner = isStreamOwner; Core.StreamUtils.Copy(inStream, gzipOutput, new byte[bufferSize]); } } finally { if (isStreamOwner) { // outStream is closed by the GZipOutputStream if stream owner inStream.Dispose(); } } }
public override void Write(byte[] value) { var compressedStream = new MemoryStream(); try { using (var inputStream = new MemoryStream(value)) { using (var compressionStream = new GZipOutputStream(compressedStream)) { compressionStream.SetLevel(9); inputStream.CopyTo(compressionStream); compressionStream.Flush(); } } var compressedData = compressedStream.ToArray(); _lock.Wait(); using (var outputStream = File.OpenWrite(_filePath)) { outputStream.Position = outputStream.Length; outputStream.Write(compressedData, 0, compressedData.Length); outputStream.Flush(); } } finally { compressedStream.Dispose(); _lock.Release(); } var info = new FileInfo(_filePath); var bufferSize = info.Length; if (bufferSize > MaxBufferSize) { Flush(); } }
/// <summary> /// Creates the Transformation Block to GZip compress. /// </summary> /// <param name="level">The compression level.</param> /// <param name="options">The options.</param> /// <returns>TransformBlock<System.Byte[], System.Byte[]>.</returns> public static TransformBlock<byte[], byte[]> Create(int level, ExecutionDataflowBlockOptions options) { return new TransformBlock<byte[], byte[]>(data => { using (var output = new MemoryStream()) { using (var input = new MemoryStream(data)) using (var compression = new GZipOutputStream(output)) using (Logger.BeginTimedOperation("GZipOutputStream", null, LogEventLevel.Debug)) { compression.SetLevel(level); input.CopyTo(compression); compression.Flush(); } var result = output.ToArray(); Logger.Debug("Compressed {InputSize} bytes to {OutputSize} ({Compression:00}%)", data.Length, result.Length, result.Length * 100M / data.Length); return result; } }, options); }
public override void Save(Stream stream) { GZipOutputStream gzipStream = new GZipOutputStream(stream); gzipStream.IsStreamOwner = true; gzipStream.SetLevel(9); field.Save(gzipStream); }
/// <summary> /// Compress a given file into Tar archive. /// </summary> public void Compress() { Common.ValidateOverwrite(this.overwriteTarget, this.target); DirectoryInfo dir = new DirectoryInfo(Path.GetDirectoryName(this.source)); this.Log(String.Format("Archiving: [{0}] -> [{1}].", this.source, this.target), LogLevel.Minimal); if (tarLevel == TarCompressionLevel.None) { using (Stream outStream = File.Create(this.target)) { ArchiveFile(outStream, dir); } } else if (tarLevel == TarCompressionLevel.BZip2) { using (BZip2OutputStream bz2Stream = new BZip2OutputStream(File.Create(this.target), 9)) { ArchiveFile(bz2Stream, dir); } } else if (tarLevel == TarCompressionLevel.GZip) { using (GZipOutputStream gzoStream = new GZipOutputStream(File.Create(this.target))) { gzoStream.SetLevel(9); ArchiveFile(gzoStream, dir); } } this.Log(String.Format("Successfully Archived: [{0}] -> [{1}].", this.source, this.target), LogLevel.Minimal); Common.RemoveFile(this.removeSource, this.source); }
/// <summary> /// Read data direct from drive to file /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool ReadDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool bUseMBR) { IsCancelling = false; var dtStart = DateTime.Now; // // Map to physical drive // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if(string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return false; } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if(driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return false; } var readSize = driveSize; // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Start doing the read // var buffer = new byte[Globals.MaxBufferSize]; var offset = 0L; using(var basefs = (Stream)new FileStream(fileName, FileMode.Create, FileAccess.Write)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zfs = new ZipOutputStream(basefs); // Default to middle of the range compression zfs.SetLevel(Globals.CompressionLevel); var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".zip", ""); entryName = ZipEntry.CleanName(entryName); var zipEntry = new ZipEntry(entryName) {DateTime = fi.LastWriteTime}; zfs.IsStreamOwner = true; // Todo: Consider whether size needs setting for older utils ? zfs.PutNextEntry(zipEntry); fs = zfs; break; case EnumCompressionType.Gzip: var gzis = new GZipOutputStream(basefs); gzis.SetLevel(Globals.CompressionLevel); gzis.IsStreamOwner = true; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; var tos = new TarOutputStream(gzos); fs = tos; break; case EnumCompressionType.XZ: var xzs = new XZOutputStream(basefs); fs = xzs; break; default: // No compression - direct to file stream fs = basefs; break; } while (offset < readSize && !IsCancelling) { // NOTE: If we provide a buffer that extends past the end of the physical device ReadFile() doesn't // seem to do a partial read. Deal with this by reading the remaining bytes at the end of the // drive if necessary var readMaxLength = (int) ((((ulong) readSize - (ulong) offset) < (ulong) buffer.Length) ? ((ulong) readSize - (ulong) offset) : (ulong) buffer.Length); int readBytes; if (_diskAccess.Read(buffer, readMaxLength, out readBytes) < 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (readBytes == 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive___past_EOF_); goto readfail1; } // Check MBR if (bUseMBR && offset == 0) { var truncatedSize = ParseMBRForSize(buffer); if(truncatedSize > driveSize) { LogMsg(Resources.Disk_ReadDrive_Problem_with_filesystem__It_reports_it_is_larger_than_the_disk_); goto readfail1; } if(truncatedSize == 0) { LogMsg(Resources.Disk_ReadDrive_No_valid_partitions_on_drive); goto readfail1; } readSize = truncatedSize; } if(offset == 0) { switch (eCompType) { case EnumCompressionType.Targzip: var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".tar.gz", ""); entryName = entryName.ToLower().Replace(".tgz", ""); var tarEntry = TarEntry.CreateTarEntry(entryName); tarEntry.Size = readSize; tarEntry.ModTime = DateTime.SpecifyKind(fi.LastWriteTime, DateTimeKind.Utc); ((TarOutputStream) fs).PutNextEntry(tarEntry); break; } } fs.Write(buffer, 0, readBytes); offset += (uint) readBytes; var percentDone = (int) (100*offset/readSize); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset/tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_ReadDrive_Read + @": " + (offset / Globals.MbModifier) + @" / " + (readSize / Globals.MbModifier) + @" MB " + @"(" + Resources.Disk_ReadDrive_Physical + @": " + (driveSize / Globals.MbModifier) + " MB); " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream) fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } readfail1: _diskAccess.Close(); _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) LogMsg(Resources.Disk_WriteDrive_Cancelled); else LogMsg(Resources.Disk_ReadDrive_All_Done_Read + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); Progress(0); return true; }
public static byte[] Compress(byte[] buffer) { Log.DebugFormat("Initial buffer size {0}",buffer.Length); MemoryStream ms = new MemoryStream(); //GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true); GZipOutputStream zip = new GZipOutputStream(ms) { IsStreamOwner = false }; zip.SetLevel(9); //BZip2OutputStream zip = new BZip2OutputStream(ms) { IsStreamOwner = false }; zip.Write(buffer, 0, buffer.Length); zip.Close(); ms.Position = 0; //MemoryStream outStream = new MemoryStream(); byte[] compressed = new byte[ms.Length]; ms.Read(compressed, 0, compressed.Length); Log.DebugFormat("Compressed buffer size {0}", compressed.Length); byte[] gzBuffer = new byte[compressed.Length + 4]; Buffer.BlockCopy(compressed, 0, gzBuffer, 4, compressed.Length); Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, gzBuffer, 0, 4); return gzBuffer; }
public void flush() { byte[] uncompressed_bytes = uncompressed_output.ToArray(); // write uncompressed size to output new BinaryWriter(output).Write((UInt32)uncompressed_bytes.Length); // write the compressed data GZipOutputStream zipstream = new GZipOutputStream(this.output); zipstream.SetLevel(1); // 0 is no compression, 9 is best compression (slowest) zipstream.Write(uncompressed_bytes, 0, uncompressed_bytes.Length); zipstream.Finish(); }
public byte[] ToCompressedByteArray() { byte[] returnArray; byte[] streambuff = new byte[(XSize * YSize * ZSize) * Voxel.BYTE_SIZE]; byte[] buffer = new byte[Voxel.BYTE_SIZE]; int o = 0; for (int x = 0; x < XSize; x++) for (int y = 0; y < YSize; y++) for (int z = 0; z < ZSize; z++) { buffer = Voxels[x + XSize * (y + YSize * z)].ToBytes(); for (int i = 0; i < Voxel.BYTE_SIZE; i++) { streambuff[o] = buffer[i]; o++; } } using (var ms = new MemoryStream()) { using (var gzs = new GZipOutputStream(ms)) { gzs.SetLevel(1); gzs.Write(streambuff, 0, streambuff.Length); } returnArray = ms.ToArray(); } return returnArray; }
private TarOutputStream newTarOutputStream(string filename) { Stream oStream = File.Create(filename); //gzipStream = new BZip2OutputStream(oStream); GZipOutputStream gzipStream = new GZipOutputStream(oStream); gzipStream.SetLevel(3); return new TarOutputStream(gzipStream); }