private void WriteObjectToTar (TarOutputStream tar_out, FileSystemObject fso, EventTracker tracker) { MemoryStream memory = null; TarHeader header; header = new TarHeader (); StringBuilder name_builder; name_builder = new StringBuilder (fso.FullName); name_builder.Remove (0, this.FullName.Length+1); header.Name = name_builder.ToString (); header.ModTime = fso.Timestamp; if (fso is DirectoryObject) { header.Mode = 511; // 0777 header.TypeFlag = TarHeader.LF_DIR; header.Size = 0; } else { header.Mode = 438; // 0666 header.TypeFlag = TarHeader.LF_NORMAL; memory = new MemoryStream (); ((FileObject) fso).AddToStream (memory, tracker); header.Size = memory.Length; } TarEntry entry; entry = new TarEntry (header); tar_out.PutNextEntry (entry); if (memory != null) { tar_out.Write (memory.ToArray (), 0, (int) memory.Length); memory.Close (); } tar_out.CloseEntry (); // If this is a directory, write out the children if (fso is DirectoryObject) foreach (FileSystemObject child in fso.Children) WriteObjectToTar (tar_out, child, tracker); }
public void TestExtractArchiveTarGzCreateContainer() { CloudFilesProvider provider = (CloudFilesProvider)Bootstrapper.CreateObjectStorageProvider(); string containerName = TestContainerPrefix + Path.GetRandomFileName(); string sourceFileName = "DarkKnightRises.jpg"; byte[] content = File.ReadAllBytes("DarkKnightRises.jpg"); using (MemoryStream outputStream = new MemoryStream()) { using (GZipOutputStream gzoStream = new GZipOutputStream(outputStream)) { gzoStream.IsStreamOwner = false; gzoStream.SetLevel(9); using (TarOutputStream tarOutputStream = new TarOutputStream(gzoStream)) { tarOutputStream.IsStreamOwner = false; TarEntry entry = TarEntry.CreateTarEntry(containerName + '/' + sourceFileName); entry.Size = content.Length; tarOutputStream.PutNextEntry(entry); tarOutputStream.Write(content, 0, content.Length); tarOutputStream.CloseEntry(); tarOutputStream.Close(); } } outputStream.Flush(); outputStream.Position = 0; ExtractArchiveResponse response = provider.ExtractArchive(outputStream, "", ArchiveFormat.TarGz); Assert.IsNotNull(response); Assert.AreEqual(1, response.CreatedFiles); Assert.IsNotNull(response.Errors); Assert.AreEqual(0, response.Errors.Count); } using (MemoryStream downloadStream = new MemoryStream()) { provider.GetObject(containerName, sourceFileName, downloadStream, verifyEtag: true); Assert.AreEqual(content.Length, GetContainerObjectSize(provider, containerName, sourceFileName)); downloadStream.Position = 0; byte[] actualData = new byte[downloadStream.Length]; downloadStream.Read(actualData, 0, actualData.Length); Assert.AreEqual(content.Length, actualData.Length); using (MD5 md5 = MD5.Create()) { byte[] contentMd5 = md5.ComputeHash(content); byte[] actualMd5 = md5.ComputeHash(actualData); Assert.AreEqual(BitConverter.ToString(contentMd5), BitConverter.ToString(actualMd5)); } } /* Cleanup */ provider.DeleteContainer(containerName, deleteObjects: true); }
public TarCore(CloudBlobClient blobClient, Stream outputStream) : base(blobClient, outputStream) { var archiveStream = new TarOutputStream(outputStream); archiveStream.IsStreamOwner = false; _archiveStream = archiveStream; }
protected TarArchive(TarOutputStream stream) { if ( stream == null ) { throw new ArgumentNullException("stream"); } tarOut = stream; }
protected TarArchive(TarOutputStream stream) { if (stream == null) { throw new ArgumentNullException("stream"); } tarOut = stream; }
/// <summary> /// Prepares to generate a TAR archive from a directory. /// </summary> /// <param name="sourceDirectory">The path of the directory to capture/store in the archive.</param> /// <param name="stream">The stream to write the generated archive to. Will be disposed when the generator is disposed.</param> internal TarGenerator([NotNull] string sourceDirectory, [NotNull] Stream stream) : base(sourceDirectory) { #region Sanity checks if (stream == null) throw new ArgumentNullException(nameof(stream)); #endregion _tarStream = new TarOutputStream(stream); }
protected TarArchive(TarOutputStream stream) { //IL_0024: Unknown result type (might be due to invalid IL or missing references) if (stream == null) { throw new ArgumentNullException("stream"); } tarOut = stream; }
/// <summary> /// Initialise a TarArchive for output. /// </summary> /// <param name="stream">The <see cref="TarOutputStream"/> to use for output.</param> protected TarArchive(TarOutputStream stream) { if (stream == null) { throw new ArgumentNullException(SerialportSample.XWUtils.nameof(stream)); } tarOut = stream; }
public void BlockFactorHandling() { const int MinimumBlockFactor = 1; const int MaximumBlockFactor = 64; const int FillFactor = 2; for ( int factor = MinimumBlockFactor; factor < MaximumBlockFactor; ++factor) { MemoryStream ms = new MemoryStream(); using ( TarOutputStream tarOut = new TarOutputStream(ms, factor) ) { TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); entry.Size = (TarBuffer.BlockSize * factor * FillFactor); tarOut.PutNextEntry(entry); byte[] buffer = new byte[TarBuffer.BlockSize]; Random r = new Random(); r.NextBytes(buffer); // Last block is a partial one for ( int i = 0; i < factor * FillFactor; ++i) { tarOut.Write(buffer, 0, buffer.Length); } } byte[] tarData = ms.ToArray(); Assert.IsNotNull(tarData, "Data written is null"); // Blocks = Header + Data Blocks + Zero block + Record trailer int usedBlocks = 1 + (factor * FillFactor) + 1; int totalBlocks = usedBlocks + (factor - 1); totalBlocks /= factor; totalBlocks *= factor; Assert.AreEqual(TarBuffer.BlockSize * totalBlocks, tarData.Length, "Tar file should contain {0} blocks in length", totalBlocks); if ( usedBlocks < totalBlocks ) { // Start at first byte after header. int byteIndex = TarBuffer.BlockSize * ((factor * FillFactor)+ 1); while ( byteIndex < tarData.Length ) { int blockNumber = byteIndex / TarBuffer.BlockSize; int offset = blockNumber % TarBuffer.BlockSize; Assert.AreEqual(0, tarData[byteIndex], string.Format("Trailing block data should be null iteration {0} block {1} offset {2} index {3}", factor, blockNumber, offset, byteIndex)); byteIndex += 1; } } } }
public static TarArchive CreateOutputTarArchive(Stream outputStream) { if (outputStream == null) { throw new ArgumentNullException("outputStream"); } TarOutputStream stream = outputStream as TarOutputStream; return((stream == null) ? CreateOutputTarArchive(outputStream, 20) : new TarArchive(stream)); }
protected TarArchive(TarOutputStream stream) { this.userName = string.Empty; this.groupName = string.Empty; if (stream == null) { throw new ArgumentNullException("stream"); } this.tarOut = stream; }
public static TarArchive CreateOutputTarArchive(Stream outputStream) { if (outputStream == null) { throw new ArgumentNullException("outputStream"); } TarOutputStream tarOutputStream = outputStream as TarOutputStream; if (tarOutputStream != null) { return(new TarArchive(tarOutputStream)); } return(CreateOutputTarArchive(outputStream, 20)); }
public static TarArchive CreateOutputTarArchive(Stream outputStream) { //IL_0008: Unknown result type (might be due to invalid IL or missing references) if (outputStream == null) { throw new ArgumentNullException("outputStream"); } TarOutputStream tarOutputStream = outputStream as TarOutputStream; if (tarOutputStream != null) { return(new TarArchive(tarOutputStream)); } return(CreateOutputTarArchive(outputStream, 20)); }
/// <summary> /// Create a TarArchive for writing to, using the default blocking factor /// </summary> /// <param name="outputStream">The <see cref="Stream"/> to write to</param> /// <returns>Returns a <see cref="TarArchive"/> suitable for writing.</returns> public static TarArchive CreateOutputTarArchive(Stream outputStream) { if (outputStream == null) { throw new ArgumentNullException("outputStream"); } TarOutputStream tarStream = outputStream as TarOutputStream; if (tarStream != null) { return(new TarArchive(tarStream)); } else { return(CreateOutputTarArchive(outputStream, TarBuffer.DefaultBlockFactor)); } }
public static TarArchive CreateOutputTarArchive(Stream outputStream) { if (outputStream == null) { throw new ArgumentNullException("outputStream"); } TarOutputStream tarOutputStream = outputStream as TarOutputStream; TarArchive result; if (tarOutputStream != null) { result = new TarArchive(tarOutputStream); } else { result = TarArchive.CreateOutputTarArchive(outputStream, 20); } return(result); }
public void Checksum() { MemoryStream ms = new MemoryStream(); TarOutputStream tarOut = new TarOutputStream(ms); DateTime modTime = DateTime.Now; TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); entry.TarHeader.Mode = 12345; tarOut.PutNextEntry(entry); tarOut.Close(); MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); TarInputStream tarIn = new TarInputStream(ms2); TarEntry nextEntry = tarIn.GetNextEntry(); Assert.IsTrue(nextEntry.TarHeader.IsChecksumValid, "Checksum should be valid"); MemoryStream ms3 = new MemoryStream(); ms3.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms3.Seek(0, SeekOrigin.Begin); ms3.Write(new byte[1] { 34 }, 0, 1); ms3.Seek(0, SeekOrigin.Begin); tarIn = new TarInputStream(ms3); bool trapped = false; try { nextEntry = tarIn.GetNextEntry(); } catch (TarException) { trapped = true; } Assert.IsTrue(trapped, "Checksum should be invalid"); }
public void TrailerContainsNulls() { const int TestBlockFactor = 3; for ( int iteration = 0; iteration < TestBlockFactor * 2; ++iteration) { MemoryStream ms = new MemoryStream(); using ( TarOutputStream tarOut = new TarOutputStream(ms, TestBlockFactor) ) { TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); if ( iteration > 0 ) { entry.Size = (TarBuffer.BlockSize * (iteration - 1)) + 9; } tarOut.PutNextEntry(entry); byte[] buffer = new byte[TarBuffer.BlockSize]; Random r = new Random(); r.NextBytes(buffer); if ( iteration > 0 ) { for ( int i = 0; i < iteration - 1; ++i ) { tarOut.Write(buffer, 0, buffer.Length); } // Last block is a partial one for ( int i = 1; i < 10; ++i) { tarOut.WriteByte((byte)i); } } } byte[] tarData = ms.ToArray(); Assert.IsNotNull(tarData, "Data written is null"); // Blocks = Header + Data Blocks + Zero block + Record trailer int usedBlocks = 1 + iteration + 1; int totalBlocks = usedBlocks + (TestBlockFactor - 1); totalBlocks /= TestBlockFactor; totalBlocks *= TestBlockFactor; Assert.AreEqual(TarBuffer.BlockSize * totalBlocks, tarData.Length, string.Format("Tar file should be {0} blocks in length", totalBlocks)); if ( usedBlocks < totalBlocks ) { // Start at first byte after header. int byteIndex = TarBuffer.BlockSize * (iteration + 1); while ( byteIndex < tarData.Length ) { int blockNumber = byteIndex / TarBuffer.BlockSize; int offset = blockNumber % TarBuffer.BlockSize; Assert.AreEqual(0, tarData[byteIndex], string.Format("Trailing block data should be null iteration {0} block {1} offset {2} index {3}", iteration, blockNumber, offset, byteIndex)); byteIndex += 1; } } } }
public override void SetBaseStream(Stream outputStream) { tar = new TarOutputStream(outputStream); disposed = false; }
protected override void _Partir(string fichero, string sal1, string dir, long kb) { FileInfo fi = null; DirectoryInfo din = null; DirectoryInfo dout = new DirectoryInfo (dir); if (File.Exists (fichero)) { fi = new FileInfo (fichero); } else if (Directory.Exists (fichero)) { din = new DirectoryInfo (fichero); } else { throw new Exception ("" + fichero + " not found"); } List<FileInfo> files = load (fichero); string baseName = ""; if (fi != null) { baseName = fi.Name; } else if (din != null) { baseName = din.Name; } if ((sal1 == null) || (sal1 == string.Empty)) { // if (din != null) { sal1 = din.Name; } if (fi != null) { sal1 = fi.Name; } } long totalSize = calculateTotalSize (files); long fragments = totalSize / (kb * 1024); string s = "" + fragments; JoinInfo info = new JoinInfo (); info.OriginalFile = baseName; info.InitialFragment = 0; info.Digits = Math.Max (s.Length, 3); info.BaseName = sal1 + ".tar.gz."; info.Directory = dout; info.Length = totalSize; Stream stream = new SplitStream (info, kb * 1024, info.Directory.FullName + Path.DirectorySeparatorChar + info.BaseName + "sha512sum.dalle", "SHA512"); stream = new GZipStream (stream, CompressionMode.Compress); TarOutputStream taros = new TarOutputStream (stream); foreach (FileInfo f in files) { TarEntry te = TarEntry.CreateEntryFromFile (f.FullName); te.UserId = 0; te.GroupId = 0; te.UserName = String.Empty; te.GroupName = String.Empty; taros.PutNextEntry (te); FileStream fs = f.OpenRead (); long leidosTotales = 0; byte[] buffer = new byte[Consts.BUFFER_LENGTH]; int leidos = 0; while ((leidos = fs.Read (buffer, 0, buffer.Length)) > 0) { taros.Write (buffer, 0, leidos); leidosTotales += leidos; OnProgress (leidosTotales, totalSize); } taros.CloseEntry (); fs.Close (); } taros.Close (); OnProgress (totalSize, totalSize); }
private void RecursiveTarDir(TarOutputStream stream, IPurePath subdir, IPurePath source, int depth) { var sourceStr = source.ToString(); if (depth == 0) { return; } if (!Directory.Exists(sourceStr)) { throw new IOException("Could not find path " + source); } foreach (var subfile in Directory.EnumerateFiles(sourceStr)) { TarFile( stream, subdir.WithFilename(source.Filename), Context.LocalEnv.CreatePurePath(subfile)); } foreach (var dirStr in Directory.EnumerateDirectories(sourceStr)) { var dir = Context.LocalEnv.CreatePurePath(dirStr); var dirpath = subdir.Join(Directory.GetParent(dirStr).Name); RecursiveTarDir(stream, dirpath, dir, depth > 0 ? depth - 1 : depth); } }
public void OutputStreamOwnership() { TrackedMemoryStream memStream = new TrackedMemoryStream(); TarOutputStream s = new TarOutputStream(memStream); Assert.IsFalse(memStream.IsClosed, "Shouldnt be closed initially"); Assert.IsFalse(memStream.IsDisposed, "Shouldnt be disposed initially"); s.Close(); Assert.IsTrue(memStream.IsClosed, "Should be closed after parent owner close"); Assert.IsTrue(memStream.IsDisposed, "Should be disposed after parent owner close"); memStream = new TrackedMemoryStream(); s = new TarOutputStream(memStream); Assert.IsFalse(memStream.IsClosed, "Shouldnt be closed initially"); Assert.IsFalse(memStream.IsDisposed, "Shouldnt be disposed initially"); s.IsStreamOwner = false; s.Close(); Assert.IsFalse(memStream.IsClosed, "Should not be closed after parent owner close"); Assert.IsFalse(memStream.IsDisposed, "Should not be disposed after parent owner close"); }
public SharpZipTarArchiveWriter(Stream outputStream) : this() { tar = new TarOutputStream(outputStream); }
public void CompressFiles(Stream outputStream, string[] sources, CompressionScheme scheme, int compressionLevel, int pathDepth) { if (scheme == CompressionScheme.None || scheme == CompressionScheme.GZip) { var innerStream = outputStream; try { switch (scheme) { case CompressionScheme.None: // Nothing to do... break; case CompressionScheme.GZip: innerStream = new GZipOutputStream(outputStream) { IsStreamOwner = false }; ((GZipOutputStream) innerStream).SetLevel(compressionLevel); break; default: throw new NotImplementedException(String.Format( "Compression Scheme {0} not yet implemented.", scheme)); } using (var output = new TarOutputStream(innerStream)) { output.IsStreamOwner = false; foreach (var source in sources) { if (String.IsNullOrWhiteSpace(source)) { continue; } var fullpath = Context.LocalEnv.CurrentDirectory.Join(source); if (File.Exists(fullpath.ToString())) { TarFile(output, null, fullpath); } else { RecursiveTarDir(output, null, fullpath, pathDepth); } } } } finally { if (!ReferenceEquals(innerStream, outputStream)) { // We own the stream... close it. innerStream.Close(); } } } }
private void TarFile(TarOutputStream stream, IPurePath baseDir, IPurePath sourcePath) { var tarName = baseDir.WithFilename(sourcePath.Filename); var entry = TarEntry.CreateTarEntry(tarName.ToString()); using (var file = File.OpenRead(sourcePath.ToString())) { entry.Size = file.Length; stream.PutNextEntry(entry); file.CopyTo(stream); } stream.CloseEntry(); }
void TryLongName(string name) { MemoryStream ms = new MemoryStream(); using ( TarOutputStream tarOut = new TarOutputStream(ms) ) { DateTime modTime = DateTime.Now; TarEntry entry = TarEntry.CreateTarEntry(name); tarOut.PutNextEntry(entry); } MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); using (TarInputStream tarIn = new TarInputStream(ms2)) { TarEntry nextEntry = tarIn.GetNextEntry(); Assert.AreEqual(nextEntry.Name, name, "Name match failure"); } }
public void ValuesPreserved() { MemoryStream ms = new MemoryStream(); TarEntry entry; DateTime modTime = DateTime.Now; using (TarOutputStream tarOut = new TarOutputStream(ms)) { entry = TarEntry.CreateTarEntry("TestEntry"); entry.GroupId = 12; entry.UserId = 14; entry.ModTime = modTime; entry.UserName = "******"; entry.GroupName = "GroupName"; entry.TarHeader.Mode = 12345; tarOut.PutNextEntry(entry); } MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); using (TarInputStream tarIn = new TarInputStream(ms2)) { TarEntry nextEntry = tarIn.GetNextEntry(); Assert.AreEqual(entry.TarHeader.Checksum, nextEntry.TarHeader.Checksum, "Checksum"); Assert.IsTrue(nextEntry.Equals(entry), "Entries should be equal"); Assert.IsTrue(nextEntry.TarHeader.Equals(entry.TarHeader), "Headers should match"); // Tar only stores seconds DateTime truncatedTime = new DateTime(modTime.Year, modTime.Month, modTime.Day, modTime.Hour, modTime.Minute, modTime.Second); Assert.AreEqual(truncatedTime, nextEntry.ModTime, "Modtimes should match"); int entryCount = 0; while ( nextEntry != null ) { ++entryCount; nextEntry = tarIn.GetNextEntry(); } Assert.AreEqual(1, entryCount, "Expected 1 entry"); } }
override public void AddToStream (Stream stream, EventTracker tracker) { if (tracker != null) tracker.ExpectingAdded (UriFu.UriToEscapedString (this.Uri)); UnclosableStream unclosable; unclosable = new UnclosableStream (stream); TarOutputStream tar_out; tar_out = new TarOutputStream (unclosable); foreach (FileSystemObject fso in Children) WriteObjectToTar (tar_out, fso, tracker); // This calls close on the underlying stream, // which is why we wrapped the stream in an // UnclosableStream. tar_out.Close (); }
/// <summary> /// Initialise a TarArchive for output. /// </summary> /// <param name="stream">The <see cref="TarOutputStream"/> to use for output.</param> protected TarArchive(TarOutputStream stream) { tarOut = stream ?? throw new ArgumentNullException(nameof(stream)); }
/// <summary> /// Read data direct from drive to file /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool ReadDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool bUseMBR) { IsCancelling = false; var dtStart = DateTime.Now; // // Map to physical drive // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if(string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return false; } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if(driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return false; } var readSize = driveSize; // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Start doing the read // var buffer = new byte[Globals.MaxBufferSize]; var offset = 0L; using(var basefs = (Stream)new FileStream(fileName, FileMode.Create, FileAccess.Write)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zfs = new ZipOutputStream(basefs); // Default to middle of the range compression zfs.SetLevel(Globals.CompressionLevel); var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".zip", ""); entryName = ZipEntry.CleanName(entryName); var zipEntry = new ZipEntry(entryName) {DateTime = fi.LastWriteTime}; zfs.IsStreamOwner = true; // Todo: Consider whether size needs setting for older utils ? zfs.PutNextEntry(zipEntry); fs = zfs; break; case EnumCompressionType.Gzip: var gzis = new GZipOutputStream(basefs); gzis.SetLevel(Globals.CompressionLevel); gzis.IsStreamOwner = true; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; var tos = new TarOutputStream(gzos); fs = tos; break; case EnumCompressionType.XZ: var xzs = new XZOutputStream(basefs); fs = xzs; break; default: // No compression - direct to file stream fs = basefs; break; } while (offset < readSize && !IsCancelling) { // NOTE: If we provide a buffer that extends past the end of the physical device ReadFile() doesn't // seem to do a partial read. Deal with this by reading the remaining bytes at the end of the // drive if necessary var readMaxLength = (int) ((((ulong) readSize - (ulong) offset) < (ulong) buffer.Length) ? ((ulong) readSize - (ulong) offset) : (ulong) buffer.Length); int readBytes; if (_diskAccess.Read(buffer, readMaxLength, out readBytes) < 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (readBytes == 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive___past_EOF_); goto readfail1; } // Check MBR if (bUseMBR && offset == 0) { var truncatedSize = ParseMBRForSize(buffer); if(truncatedSize > driveSize) { LogMsg(Resources.Disk_ReadDrive_Problem_with_filesystem__It_reports_it_is_larger_than_the_disk_); goto readfail1; } if(truncatedSize == 0) { LogMsg(Resources.Disk_ReadDrive_No_valid_partitions_on_drive); goto readfail1; } readSize = truncatedSize; } if(offset == 0) { switch (eCompType) { case EnumCompressionType.Targzip: var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".tar.gz", ""); entryName = entryName.ToLower().Replace(".tgz", ""); var tarEntry = TarEntry.CreateTarEntry(entryName); tarEntry.Size = readSize; tarEntry.ModTime = DateTime.SpecifyKind(fi.LastWriteTime, DateTimeKind.Utc); ((TarOutputStream) fs).PutNextEntry(tarEntry); break; } } fs.Write(buffer, 0, readBytes); offset += (uint) readBytes; var percentDone = (int) (100*offset/readSize); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset/tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_ReadDrive_Read + @": " + (offset / Globals.MbModifier) + @" / " + (readSize / Globals.MbModifier) + @" MB " + @"(" + Resources.Disk_ReadDrive_Physical + @": " + (driveSize / Globals.MbModifier) + " MB); " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream) fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } readfail1: _diskAccess.Close(); _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) LogMsg(Resources.Disk_WriteDrive_Cancelled); else LogMsg(Resources.Disk_ReadDrive_All_Done_Read + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); Progress(0); return true; }