private void DefineCompressionType(string file) { if (file.ToLower().EndsWith(".tar.gz") || file.ToLower().EndsWith(".tgz")) { _eCompType = EnumCompressionType.Targzip; } else if (file.ToLower().EndsWith(".gz")) { _eCompType = EnumCompressionType.Gzip; } else if (file.ToLower().EndsWith(".zip")) { _eCompType = EnumCompressionType.Zip; } else if (file.ToLower().EndsWith(".xz")) { _eCompType = EnumCompressionType.XZ; } else { _eCompType = EnumCompressionType.None; } }
private void TextBoxFileNameTextChanged(object sender, EventArgs e) { if (textBoxFileName.Text.ToLower().EndsWith(".tar.gz") || textBoxFileName.Text.ToLower().EndsWith(".tgz")) { radioButtonCompTgz.Checked = true; } else if (textBoxFileName.Text.ToLower().EndsWith(".gz")) { radioButtonCompGz.Checked = true; } else if (textBoxFileName.Text.ToLower().EndsWith(".zip")) { radioButtonCompZip.Checked = true; } else if (textBoxFileName.Text.ToLower().EndsWith(".img") || textBoxFileName.Text.ToLower().EndsWith(".bin") || textBoxFileName.Text.ToLower().EndsWith(".sdcard")) { radioButtonCompNone.Checked = true; } if (radioButtonCompNone.Checked) { _eCompType = EnumCompressionType.None; } else if (radioButtonCompTgz.Checked) { _eCompType = EnumCompressionType.Targzip; } else if (radioButtonCompGz.Checked) { _eCompType = EnumCompressionType.Gzip; } else if (radioButtonCompZip.Checked) { _eCompType = EnumCompressionType.Zip; } }
/// <summary> /// /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool WriteDrive(string driveLetter, string fileName, EnumCompressionType eCompType) { IsCancelling = false; var dtStart = DateTime.Now; if (!File.Exists(fileName)) { throw new ArgumentException(fileName + " doesn't exist"); } // // Get physical drive partition for logical partition // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(@"Error: Couldn't map partition to physical drive"); _diskAccess.UnlockDrive(); return(false); } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(@"Failed to lock drive"); return(false); } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(@"Failed to get device size"); _diskAccess.UnlockDrive(); return(false); } // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(@"Failed to open physical drive"); _diskAccess.UnlockDrive(); return(false); } var buffer = new byte[Globals.MaxBufferSize]; long offset = 0; var fileLength = new FileInfo(fileName).Length; var uncompressedlength = fileLength; var errored = true; using (var basefs = new FileStream(fileName, FileMode.Open, FileAccess.Read)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: { var zipFile = new ZipFile(basefs); var ze = (from ZipEntry zipEntry in zipFile where zipEntry.IsFile select zipEntry).FirstOrDefault(); if (ze == null) { LogMsg(@"Error reading zip input stream"); goto readfail2; } var zis = zipFile.GetInputStream(ze); uncompressedlength = ze.Size; fs = zis; } break; case EnumCompressionType.Gzip: { var gzis = new GZipInputStream(basefs) { IsStreamOwner = true }; uncompressedlength = gzis.Length; fs = gzis; } break; case EnumCompressionType.Targzip: { var gzos = new GZipInputStream(basefs) { IsStreamOwner = true }; var tis = new TarInputStream(gzos); TarEntry tarEntry; do { tarEntry = tis.GetNextEntry(); } while (tarEntry.IsDirectory); uncompressedlength = tarEntry.Size; fs = tis; } break; default: // No compression - direct to file stream fs = basefs; uncompressedlength = fs.Length; break; } var bufferOffset = 0; using (var br = new BinaryReader(fs)) { while (offset < uncompressedlength && !IsCancelling) { // Note: There's a problem writing certain lengths to the underlying physical drive. // This appears when we try to read from a compressed stream as it gives us // "strange" lengths which then fail to be written via Writefile() so try to build // up a decent block of bytes here... int readBytes; do { readBytes = br.Read(buffer, bufferOffset, buffer.Length - bufferOffset); bufferOffset += readBytes; } while (bufferOffset < Globals.MaxBufferSize && readBytes != 0); int wroteBytes; var bytesToWrite = bufferOffset; var trailingBytes = 0; // Assume that the underlying physical drive will at least accept powers of two! if (!IsPowerOfTwo((ulong)bufferOffset)) { // Find highest bit (32-bit max) var highBit = 31; for (; ((bufferOffset & (1 << highBit)) == 0) && highBit >= 0; highBit--) { ; } // Work out trailing bytes after last power of two var lastPowerOf2 = 1 << highBit; bytesToWrite = lastPowerOf2; trailingBytes = bufferOffset - lastPowerOf2; } if (_diskAccess.Write(buffer, bytesToWrite, out wroteBytes) < 0) { LogMsg(@"Error writing data to drive: " + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (wroteBytes != bytesToWrite) { LogMsg(@"Error writing data to drive - past EOF?"); goto readfail1; } // Move trailing bytes up - Todo: Suboptimal if (trailingBytes > 0) { Buffer.BlockCopy(buffer, bufferOffset - trailingBytes, buffer, 0, trailingBytes); bufferOffset = trailingBytes; } else { bufferOffset = 0; } offset += (uint)wroteBytes; var percentDone = (int)(100 * offset / uncompressedlength); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(@"Wrote " + percentDone + @"%, " + (offset / (1024 * 1024)) + @" MB / " + (uncompressedlength / (1024 * 1024) + " MB, " + string.Format("{0:F}", (bytesPerSec / (1024 * 1024))) + @" MB/sec, Elapsed time: " + tsElapsed.ToString(@"dd\.hh\:mm\:ss"))); } } } errored = false; readfail1: _diskAccess.Close(); readfail2: _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) { LogMsg("Cancelled"); } else { LogMsg("All Done - Wrote " + offset + " bytes. Elapsed time " + tstotalTime.ToString(@"dd\.hh\:mm\:ss")); } Progress(0); return(!errored); }
/// <summary> /// Read data direct from drive to file /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool ReadDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool bUseMBR, long start, long length) { IsCancelling = false; var dtStart = DateTime.Now; // // Map to physical drive // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(@"Error: Couldn't map partition to physical drive"); _diskAccess.UnlockDrive(); return(false); } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(@"Failed to lock drive"); return(false); } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(@"Failed to get device size"); _diskAccess.UnlockDrive(); return(false); } var readSize = driveSize; // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(@"Failed to open physical drive"); _diskAccess.UnlockDrive(); return(false); } // // Start doing the read // var buffer = new byte[Globals.MaxBufferSize]; var offset = 0L; using (var basefs = (Stream) new FileStream(fileName, FileMode.Create, FileAccess.Write)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: { var zfs = new ZipOutputStream(basefs); // Default to middle of the range compression zfs.SetLevel(Globals.CompressionLevel); var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".zip", ""); entryName = ZipEntry.CleanName(entryName); var zipEntry = new ZipEntry(entryName) { DateTime = fi.LastWriteTime }; zfs.IsStreamOwner = true; // Todo: Consider whether size needs setting for older utils ? zfs.PutNextEntry(zipEntry); fs = zfs; } break; case EnumCompressionType.Gzip: { var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; fs = gzos; } break; case EnumCompressionType.Targzip: { var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; var tos = new TarOutputStream(gzos); fs = tos; } break; default: // No compression - direct to file stream fs = basefs; break; } while (offset < readSize && !IsCancelling) { // NOTE: If we provide a buffer that extends past the end of the physical device ReadFile() doesn't // seem to do a partial read. Deal with this by reading the remaining bytes at the end of the // drive if necessary var readMaxLength = (int) ((((ulong)readSize - (ulong)offset) < (ulong)buffer.Length) ? ((ulong)readSize - (ulong)offset) : (ulong)buffer.Length); int readBytes; if (_diskAccess.Read(buffer, readMaxLength, out readBytes) < 0) { LogMsg(@"Error reading data from drive: " + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (readBytes == 0) { LogMsg(@"Error reading data from drive - past EOF?"); goto readfail1; } // Check MBR if (bUseMBR && offset == 0) { var truncatedSize = ParseMBRForSize(buffer); if (truncatedSize > driveSize) { LogMsg("Problem with filesystem. It reports it is larger than the disk!"); goto readfail1; } if (truncatedSize == 0) { LogMsg("No valid partitions on drive"); goto readfail1; } readSize = truncatedSize; } if (offset == 0) { switch (eCompType) { case EnumCompressionType.Targzip: var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".tar.gz", ""); entryName = entryName.ToLower().Replace(".tgz", ""); var tarEntry = TarEntry.CreateTarEntry(entryName); tarEntry.Size = readSize; tarEntry.ModTime = DateTime.SpecifyKind(fi.LastWriteTime, DateTimeKind.Utc); ((TarOutputStream)fs).PutNextEntry(tarEntry); break; } } fs.Write(buffer, 0, readBytes); offset += (uint)readBytes; var percentDone = (int)(100 * offset / readSize); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(@"Read " + percentDone + @"%, " + (offset / (1024 * 1024)) + @" MB / " + (readSize / (1024 * 1024) + " MB (Physical: " + (driveSize / (1024 * 1024)) + " MB), " + string.Format("{0:F}", (bytesPerSec / (1024 * 1024))) + @" MB/sec, Elapsed time: " + tsElapsed.ToString(@"dd\.hh\:mm\:ss"))); } // Todo: Do we need this? if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); ((TarOutputStream)fs).Close(); } if (fs is GZipOutputStream) { // ((GZipOutputStream) fs).Finish(); ((GZipOutputStream)fs).Close(); } } readfail1: _diskAccess.Close(); _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) { LogMsg("Cancelled"); } else { LogMsg("All Done - Read " + offset + " bytes. Elapsed time " + tstotalTime.ToString(@"dd\.hh\:mm\:ss")); } Progress(0); return(true); }
/// <summary> /// /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <param name="removeAfter"></param> /// <returns></returns> public bool WriteDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool removeAfter) { IsCancelling = false; var dtStart = DateTime.Now; if (!File.Exists(fileName)) { throw new ArgumentException(fileName + Resources.Disk_WriteDrive__doesn_t_exist); } // // Get physical drive partition for logical partition // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return(false); } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return(false); } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return(false); } // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return(false); } var buffer = new byte[Globals.MaxBufferSize]; long offset = 0; var fileLength = new FileInfo(fileName).Length; var uncompressedlength = fileLength; var errored = true; using (var basefs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zipFile = new ZipFile(basefs); var ze = (from ZipEntry zipEntry in zipFile where zipEntry.IsFile select zipEntry).FirstOrDefault(); if (ze == null) { LogMsg(Resources.Disk_WriteDrive_Error_reading_zip_input_stream); goto readfail2; } var zis = zipFile.GetInputStream(ze); uncompressedlength = ze.Size; fs = zis; break; case EnumCompressionType.Gzip: var gzis = new GZipInputStream(basefs) { IsStreamOwner = true }; uncompressedlength = gzis.Length; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipInputStream(basefs) { IsStreamOwner = true }; var tis = new TarInputStream(gzos); TarEntry tarEntry; do { tarEntry = tis.GetNextEntry(); } while (tarEntry.IsDirectory); uncompressedlength = tarEntry.Size; fs = tis; break; case EnumCompressionType.XZ: var xzs = new XZInputStream(basefs); uncompressedlength = xzs.Length; fs = xzs; break; default: // No compression - direct to file stream fs = basefs; uncompressedlength = fs.Length; break; } var bufferOffset = 0; using (var br = new BinaryReader(fs)) { while (offset < uncompressedlength && !IsCancelling) { // Note: There's a problem writing certain lengths to the underlying physical drive. // This appears when we try to read from a compressed stream as it gives us // "strange" lengths which then fail to be written via Writefile() so try to build // up a decent block of bytes here... int readBytes; do { readBytes = br.Read(buffer, bufferOffset, buffer.Length - bufferOffset); bufferOffset += readBytes; } while (bufferOffset < Globals.MaxBufferSize && readBytes != 0); int wroteBytes; var bytesToWrite = bufferOffset; var trailingBytes = 0; // Assume that the underlying physical drive will at least accept powers of two! if (!IsPowerOfTwo((ulong)bufferOffset)) { // Find highest bit (32-bit max) var highBit = 31; for (; ((bufferOffset & (1 << highBit)) == 0) && highBit >= 0; highBit--) { } // Work out trailing bytes after last power of two var lastPowerOf2 = 1 << highBit; bytesToWrite = lastPowerOf2; trailingBytes = bufferOffset - lastPowerOf2; } if (_diskAccess.Write(buffer, bytesToWrite, out wroteBytes) < 0) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (wroteBytes != bytesToWrite) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive___past_EOF_); goto readfail1; } // Move trailing bytes up - Todo: Suboptimal if (trailingBytes > 0) { Buffer.BlockCopy(buffer, bufferOffset - trailingBytes, buffer, 0, trailingBytes); bufferOffset = trailingBytes; } else { bufferOffset = 0; } offset += (uint)wroteBytes; var percentDone = (int)(100 * offset / uncompressedlength); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_WriteDrive_Wrote + @": " + (offset / Globals.MbModifier) + @" / " + (uncompressedlength / Globals.MbModifier) + " MB; " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } errored = false; if (removeAfter && !IsCancelling) { _diskAccess.UnmountDrive(); } readfail1: _diskAccess.Close(); readfail2: _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) { LogMsg(Resources.Disk_WriteDrive_Cancelled); } else { LogMsg(Resources.Disk_WriteDrive_Wrote + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); } Progress(0); return(!errored); }
/// <summary> /// Read data direct from drive to file /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool ReadDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool bUseMBR) { IsCancelling = false; var dtStart = DateTime.Now; // // Map to physical drive // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return(false); } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return(false); } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return(false); } var readSize = driveSize; // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return(false); } // // Start doing the read // var buffer = new byte[Globals.MaxBufferSize]; var offset = 0L; using (var basefs = (Stream) new FileStream(fileName, FileMode.Create, FileAccess.Write)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zfs = new ZipOutputStream(basefs); // Default to middle of the range compression zfs.SetLevel(Globals.CompressionLevel); var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".zip", ""); entryName = ZipEntry.CleanName(entryName); var zipEntry = new ZipEntry(entryName) { DateTime = fi.LastWriteTime }; zfs.IsStreamOwner = true; // Todo: Consider whether size needs setting for older utils ? zfs.PutNextEntry(zipEntry); fs = zfs; break; case EnumCompressionType.Gzip: var gzis = new GZipOutputStream(basefs); gzis.SetLevel(Globals.CompressionLevel); gzis.IsStreamOwner = true; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; var tos = new TarOutputStream(gzos); fs = tos; break; case EnumCompressionType.XZ: var xzs = new XZOutputStream(basefs); fs = xzs; break; default: // No compression - direct to file stream fs = basefs; break; } while (offset < readSize && !IsCancelling) { // NOTE: If we provide a buffer that extends past the end of the physical device ReadFile() doesn't // seem to do a partial read. Deal with this by reading the remaining bytes at the end of the // drive if necessary var readMaxLength = (int) ((((ulong)readSize - (ulong)offset) < (ulong)buffer.Length) ? ((ulong)readSize - (ulong)offset) : (ulong)buffer.Length); int readBytes; if (_diskAccess.Read(buffer, readMaxLength, out readBytes) < 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (readBytes == 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive___past_EOF_); goto readfail1; } // Check MBR if (bUseMBR && offset == 0) { var truncatedSize = ParseMBRForSize(buffer); if (truncatedSize > driveSize) { LogMsg(Resources.Disk_ReadDrive_Problem_with_filesystem__It_reports_it_is_larger_than_the_disk_); goto readfail1; } if (truncatedSize == 0) { LogMsg(Resources.Disk_ReadDrive_No_valid_partitions_on_drive); goto readfail1; } readSize = truncatedSize; } if (offset == 0) { switch (eCompType) { case EnumCompressionType.Targzip: var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".tar.gz", ""); entryName = entryName.ToLower().Replace(".tgz", ""); var tarEntry = TarEntry.CreateTarEntry(entryName); tarEntry.Size = readSize; tarEntry.ModTime = DateTime.SpecifyKind(fi.LastWriteTime, DateTimeKind.Utc); ((TarOutputStream)fs).PutNextEntry(tarEntry); break; } } fs.Write(buffer, 0, readBytes); offset += (uint)readBytes; var percentDone = (int)(100 * offset / readSize); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_ReadDrive_Read + @": " + (offset / Globals.MbModifier) + @" / " + (readSize / Globals.MbModifier) + @" MB " + @"(" + Resources.Disk_ReadDrive_Physical + @": " + (driveSize / Globals.MbModifier) + " MB); " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } readfail1: _diskAccess.Close(); _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) { LogMsg(Resources.Disk_WriteDrive_Cancelled); } else { LogMsg(Resources.Disk_ReadDrive_All_Done_Read + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); } Progress(0); return(true); }
private void DefineCompressionType(string file) { if (file.ToLower().EndsWith(".tar.gz") || file.ToLower().EndsWith(".tgz")) _eCompType = EnumCompressionType.Targzip; else if (file.ToLower().EndsWith(".gz")) _eCompType = EnumCompressionType.Gzip; else if (file.ToLower().EndsWith(".zip")) _eCompType = EnumCompressionType.Zip; else if (file.ToLower().EndsWith(".xz")) _eCompType = EnumCompressionType.XZ; else _eCompType = EnumCompressionType.None; }
/// <summary> /// /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <param name="removeAfter"></param> /// <returns></returns> public bool WriteDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool removeAfter) { IsCancelling = false; var dtStart = DateTime.Now; if(!File.Exists(fileName)) throw new ArgumentException(fileName + Resources.Disk_WriteDrive__doesn_t_exist); // // Get physical drive partition for logical partition // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return false; } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return false; } // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return false; } var buffer = new byte[Globals.MaxBufferSize]; long offset = 0; var fileLength = new FileInfo(fileName).Length; var uncompressedlength = fileLength; var errored = true; using (var basefs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zipFile = new ZipFile(basefs); var ze = (from ZipEntry zipEntry in zipFile where zipEntry.IsFile select zipEntry).FirstOrDefault(); if(ze == null) { LogMsg(Resources.Disk_WriteDrive_Error_reading_zip_input_stream); goto readfail2; } var zis = zipFile.GetInputStream(ze); uncompressedlength = ze.Size; fs = zis; break; case EnumCompressionType.Gzip: var gzis = new GZipInputStream(basefs) {IsStreamOwner = true}; uncompressedlength = gzis.Length; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipInputStream(basefs) {IsStreamOwner = true}; var tis = new TarInputStream(gzos); TarEntry tarEntry; do { tarEntry = tis.GetNextEntry(); } while (tarEntry.IsDirectory); uncompressedlength = tarEntry.Size; fs = tis; break; case EnumCompressionType.XZ: var xzs = new XZInputStream(basefs); uncompressedlength = xzs.Length; fs = xzs; break; default: // No compression - direct to file stream fs = basefs; uncompressedlength = fs.Length; break; } var bufferOffset = 0; using (var br = new BinaryReader(fs)) { while (offset < uncompressedlength && !IsCancelling) { // Note: There's a problem writing certain lengths to the underlying physical drive. // This appears when we try to read from a compressed stream as it gives us // "strange" lengths which then fail to be written via Writefile() so try to build // up a decent block of bytes here... int readBytes; do { readBytes = br.Read(buffer, bufferOffset, buffer.Length - bufferOffset); bufferOffset += readBytes; } while (bufferOffset < Globals.MaxBufferSize && readBytes != 0); int wroteBytes; var bytesToWrite = bufferOffset; var trailingBytes = 0; // Assume that the underlying physical drive will at least accept powers of two! if(!IsPowerOfTwo((ulong)bufferOffset)) { // Find highest bit (32-bit max) var highBit = 31; for (; ((bufferOffset & (1 << highBit)) == 0) && highBit >= 0; highBit--) { } // Work out trailing bytes after last power of two var lastPowerOf2 = 1 << highBit; bytesToWrite = lastPowerOf2; trailingBytes = bufferOffset - lastPowerOf2; } if (_diskAccess.Write(buffer, bytesToWrite, out wroteBytes) < 0) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (wroteBytes != bytesToWrite) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive___past_EOF_); goto readfail1; } // Move trailing bytes up - Todo: Suboptimal if (trailingBytes > 0) { Buffer.BlockCopy(buffer, bufferOffset - trailingBytes, buffer, 0, trailingBytes); bufferOffset = trailingBytes; } else { bufferOffset = 0; } offset += (uint)wroteBytes; var percentDone = (int)(100 * offset / uncompressedlength); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_WriteDrive_Wrote + @": " + (offset / Globals.MbModifier) + @" / " + (uncompressedlength / Globals.MbModifier) + " MB; " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } errored = false; if (removeAfter && !IsCancelling) _diskAccess.UnmountDrive(); readfail1: _diskAccess.Close(); readfail2: _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) LogMsg(Resources.Disk_WriteDrive_Cancelled); else LogMsg(Resources.Disk_WriteDrive_Wrote + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); Progress(0); return !errored; }
/// <summary> /// Read data direct from drive to file /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <returns></returns> public bool ReadDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool bUseMBR) { IsCancelling = false; var dtStart = DateTime.Now; // // Map to physical drive // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if(string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return false; } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if(driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return false; } var readSize = driveSize; // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Start doing the read // var buffer = new byte[Globals.MaxBufferSize]; var offset = 0L; using(var basefs = (Stream)new FileStream(fileName, FileMode.Create, FileAccess.Write)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zfs = new ZipOutputStream(basefs); // Default to middle of the range compression zfs.SetLevel(Globals.CompressionLevel); var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".zip", ""); entryName = ZipEntry.CleanName(entryName); var zipEntry = new ZipEntry(entryName) {DateTime = fi.LastWriteTime}; zfs.IsStreamOwner = true; // Todo: Consider whether size needs setting for older utils ? zfs.PutNextEntry(zipEntry); fs = zfs; break; case EnumCompressionType.Gzip: var gzis = new GZipOutputStream(basefs); gzis.SetLevel(Globals.CompressionLevel); gzis.IsStreamOwner = true; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipOutputStream(basefs); gzos.SetLevel(Globals.CompressionLevel); gzos.IsStreamOwner = true; var tos = new TarOutputStream(gzos); fs = tos; break; case EnumCompressionType.XZ: var xzs = new XZOutputStream(basefs); fs = xzs; break; default: // No compression - direct to file stream fs = basefs; break; } while (offset < readSize && !IsCancelling) { // NOTE: If we provide a buffer that extends past the end of the physical device ReadFile() doesn't // seem to do a partial read. Deal with this by reading the remaining bytes at the end of the // drive if necessary var readMaxLength = (int) ((((ulong) readSize - (ulong) offset) < (ulong) buffer.Length) ? ((ulong) readSize - (ulong) offset) : (ulong) buffer.Length); int readBytes; if (_diskAccess.Read(buffer, readMaxLength, out readBytes) < 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (readBytes == 0) { LogMsg(Resources.Disk_ReadDrive_Error_reading_data_from_drive___past_EOF_); goto readfail1; } // Check MBR if (bUseMBR && offset == 0) { var truncatedSize = ParseMBRForSize(buffer); if(truncatedSize > driveSize) { LogMsg(Resources.Disk_ReadDrive_Problem_with_filesystem__It_reports_it_is_larger_than_the_disk_); goto readfail1; } if(truncatedSize == 0) { LogMsg(Resources.Disk_ReadDrive_No_valid_partitions_on_drive); goto readfail1; } readSize = truncatedSize; } if(offset == 0) { switch (eCompType) { case EnumCompressionType.Targzip: var fi = new FileInfo(fileName); var entryName = fi.Name; entryName = entryName.ToLower().Replace(".tar.gz", ""); entryName = entryName.ToLower().Replace(".tgz", ""); var tarEntry = TarEntry.CreateTarEntry(entryName); tarEntry.Size = readSize; tarEntry.ModTime = DateTime.SpecifyKind(fi.LastWriteTime, DateTimeKind.Utc); ((TarOutputStream) fs).PutNextEntry(tarEntry); break; } } fs.Write(buffer, 0, readBytes); offset += (uint) readBytes; var percentDone = (int) (100*offset/readSize); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset/tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_ReadDrive_Read + @": " + (offset / Globals.MbModifier) + @" / " + (readSize / Globals.MbModifier) + @" MB " + @"(" + Resources.Disk_ReadDrive_Physical + @": " + (driveSize / Globals.MbModifier) + " MB); " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream) fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } readfail1: _diskAccess.Close(); _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) LogMsg(Resources.Disk_WriteDrive_Cancelled); else LogMsg(Resources.Disk_ReadDrive_All_Done_Read + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); Progress(0); return true; }