static Resources() { using ( MemoryStream memStream = new MemoryStream( FFTPatcher.TextEditor.Properties.Resources.Resources_tar, false ) ) using ( GZipInputStream gzStream = new GZipInputStream( memStream ) ) using ( TarInputStream tarStream = new TarInputStream( gzStream ) ) { TarEntry entry; entry = tarStream.GetNextEntry(); while ( entry != null ) { if ( entry.Size != 0 && !string.IsNullOrEmpty( entry.Name ) ) { if (entry.Name.EndsWith( ".xml" )) { XmlDocument doc = new XmlDocument(); doc.Load( tarStream ); resourceMapping[entry.Name] = doc; } else { byte[] bytes = new byte[entry.Size]; ICSharpCode.SharpZipLib.Core.StreamUtils.ReadFully( tarStream, bytes ); otherResources[entry.Name] = bytes; } } entry = tarStream.GetNextEntry(); } } }
/// <summary> /// Adds a layer around a stream that isolates the <code>data.tar.gz</code> file from a TAR stream. /// </summary> /// <param name="stream">The TAR stream.</param> /// <returns>A stream representing the <code>data.tar.gz</code> data.</returns> /// <exception cref="IOException">The compressed stream contains invalid data.</exception> private static Stream GetPartialStream(Stream stream) { try { var tar = new TarInputStream(stream); while (true) { var entry = tar.GetNextEntry(); if (entry == null) throw new IOException(Resources.RubyGemInvalid); if (entry.Name == "data.tar.gz") return tar; } } #region Error handling catch (SharpZipBaseException ex) { // Wrap exception since only certain exception types are allowed throw new IOException(Resources.ArchiveInvalid, ex); } catch (InvalidDataException ex) { // Wrap exception since only certain exception types are allowed throw new IOException(Resources.ArchiveInvalid, ex); } catch (ArgumentOutOfRangeException ex) { // Wrap exception since only certain exception types are allowed throw new IOException(Resources.ArchiveInvalid, ex); } #endregion }
static FFTPack() { using( MemoryStream memStream = new MemoryStream( PatcherLib.Properties.Resources.Resources_tar, false ) ) using( GZipInputStream gzStream = new GZipInputStream( memStream ) ) using ( TarInputStream tarStream = new TarInputStream( gzStream ) ) { TarEntry entry = tarStream.GetNextEntry(); while ( entry != null ) { if ( entry.Name == "FFTPackFiles.xml" ) { XmlDocument doc = new XmlDocument(); doc.Load( tarStream ); resourcesDoc = doc; } entry = tarStream.GetNextEntry(); } } }
static Resources() { using ( MemoryStream memStream = new MemoryStream( FFTPatcher.TextEditor.Properties.Resources.Resources_tar, false ) ) using ( GZipInputStream gzStream = new GZipInputStream( memStream ) ) using ( TarInputStream tarStream = new TarInputStream( gzStream ) ) { TarEntry entry; entry = tarStream.GetNextEntry(); while ( entry != null ) { if ( entry.Size != 0 && !string.IsNullOrEmpty( entry.Name ) ) { XmlDocument doc = new XmlDocument(); doc.Load( tarStream ); resourceMapping[entry.Name] = doc; } entry = tarStream.GetNextEntry(); } } }
public override bool Execute() { if (!NeedsUpdate()) { return(true); } try { var ms = new MemoryStream(); var downloadStream = new System.Net.WebClient().OpenRead(Url); downloadStream.CopyTo(ms); ms.Seek(0, SeekOrigin.Begin); var hash = new SHA256Managed().ComputeHash(ms); if (BitConverter.ToString(hash).Replace("-", "").ToLower() != this.Hash) { Log.LogError("Got wrong hash for {0}", Url); return(false); } try { Directory.Delete(Root, true); } catch (DirectoryNotFoundException) { // Obviously not an issue } ms.Seek(0, SeekOrigin.Begin); var bzStream = new ICSharpCode.SharpZipLib.BZip2.BZip2InputStream(ms); var tarStream = new ICSharpCode.SharpZipLib.Tar.TarInputStream(bzStream); while (true) { TarEntry entry = tarStream.GetNextEntry(); if (entry == null) { break; } ExtractEntry(Root, entry, tarStream); } File.WriteAllText(Path.Combine(Root, "aegisub.hash"), Hash); return(true); } catch (Exception e) { Log.LogErrorFromException(e); return(false); } }
public override void Extract(Stream CompressedData, string OutputPath) { TarInputStream tarFile = new TarInputStream(inputDecompressor.Decompress(CompressedData)); if (!OutputPath.EndsWith(""+Path.DirectorySeparatorChar)) OutputPath = OutputPath + Path.DirectorySeparatorChar; while (true) { TarEntry entry = tarFile.GetNextEntry(); if (entry == null) break; string outputFile = OutputPath + entry.Name; if (entry.IsDirectory) continue; EnsureDirectoryExists(Path.GetDirectoryName(outputFile)); CopyStream(tarFile, File.OpenWrite(outputFile)); } }
private static Stopwatch ParseDisks(Action<Disk> addToBatch) { int i = 0; var parser = new Parser(); var buffer = new byte[1024*1024];// more than big enough for all files var sp = Stopwatch.StartNew(); using (var bz2 = new BZip2InputStream(File.Open(@"D:\Data\freedb-complete-20120101.tar.bz2", FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while((entry=tar.GetNextEntry()) != null) { if(entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while(true) { var read = tar.Read(buffer, readSoFar, ((int) entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer,0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); addToBatch(disk); if (i++ % BatchSize == 0) Console.Write("\r{0} {1:#,#} {2} ", entry.Name, i, sp.Elapsed); } catch (Exception e) { Console.WriteLine(); Console.WriteLine(entry.Name); Console.WriteLine(e); return sp; } } } return sp; }
public void Checksum() { MemoryStream ms = new MemoryStream(); TarOutputStream tarOut = new TarOutputStream(ms); DateTime modTime = DateTime.Now; TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); entry.TarHeader.Mode = 12345; tarOut.PutNextEntry(entry); tarOut.Close(); MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); TarInputStream tarIn = new TarInputStream(ms2); TarEntry nextEntry = tarIn.GetNextEntry(); Assert.IsTrue(nextEntry.TarHeader.IsChecksumValid, "Checksum should be valid"); MemoryStream ms3 = new MemoryStream(); ms3.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms3.Seek(0, SeekOrigin.Begin); ms3.Write(new byte[1] { 34 }, 0, 1); ms3.Seek(0, SeekOrigin.Begin); tarIn = new TarInputStream(ms3); bool trapped = false; try { nextEntry = tarIn.GetNextEntry(); } catch (TarException) { trapped = true; } Assert.IsTrue(trapped, "Checksum should be invalid"); }
private static void ParseDisks(BulkInsertOperation insert) { int i = 0; var parser = new Parser(); var buffer = new byte[1024*1024];// more than big enough for all files using (var bz2 = new BZip2InputStream(File.Open(@"D:\Scratch\freedb-complete-20150101.tar.bz2", FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while((entry=tar.GetNextEntry()) != null) { if(entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while(true) { var read = tar.Read(buffer, readSoFar, ((int) entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer,0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); insert.Store(disk); } catch (Exception e) { Console.WriteLine(); Console.WriteLine(entry.Name); Console.WriteLine(e); } } } }
public override bool Execute() { if (!NeedsUpdate()) return true; try { var ms = new MemoryStream(); var downloadStream = new System.Net.WebClient().OpenRead(Url); downloadStream.CopyTo(ms); ms.Seek(0, SeekOrigin.Begin); var hash = new SHA256Managed().ComputeHash(ms); if (BitConverter.ToString(hash).Replace("-", "").ToLower() != this.Hash) { Log.LogError("Got wrong hash for {0}", Url); return false; } try { Directory.Delete(Root, true); } catch (DirectoryNotFoundException) { // Obviously not an issue } ms.Seek(0, SeekOrigin.Begin); var bzStream = new ICSharpCode.SharpZipLib.BZip2.BZip2InputStream(ms); var tarStream = new ICSharpCode.SharpZipLib.Tar.TarInputStream(bzStream); while (true) { TarEntry entry = tarStream.GetNextEntry(); if (entry == null) break; ExtractEntry(Root, entry, tarStream); } File.WriteAllText(Path.Combine(Root, "aegisub.hash"), Hash); return true; } catch (Exception e) { Log.LogErrorFromException(e); return false; } }
public void Unir(FileInfo fichero, DirectoryInfo dirDest) { DalleStream dstream = new DalleStream (fichero); byte[] buffer = new byte[Consts.BUFFER_LENGTH]; OnProgress (0, dstream.Length); Stream gzipStream = new GZipStream (dstream, CompressionMode.Decompress); TarInputStream tarStream = new TarInputStream (gzipStream); TarEntry tarEntry = null; OnProgress (0, 1); while ((tarEntry = tarStream.GetNextEntry ()) != null) { // Tamaño de la cabecera de la entrada. // Nota: TarInputStream ignora sileciosamente algunas entradas, // por lo que el progreso no será totalmente preciso. if (tarEntry.IsDirectory) { continue; } Stream entrada = new SizeLimiterStream (tarStream, tarEntry.Size); Stream salida = UtilidadesFicheros.CreateWriter (dirDest.FullName + Path.DirectorySeparatorChar + tarEntry.Name); int leidos = 0; while ((leidos = entrada.Read (buffer, 0, buffer.Length)) > 0) { salida.Write (buffer, 0, leidos); OnProgress (dstream.Position, dstream.Length+1); // +1 para evitar llegar al 100% antes de tiempo } salida.Close (); } tarStream.Close (); OnProgress (1, 1); }
private static void GetTarMetadata(Stream s, List<MetadataItem> metadata) { using (TarInputStream s2 = new TarInputStream(s)) { TarEntry e; while ((e = s2.GetNextEntry()) != null) { // Note: directories can be identified by a ending slash metadata.Add(new MetadataItem(MetadataType.FILENAME, e.Name)); } } }
List<TestCase> AddTestCase(Contest contest, Problem problem, HttpPostedFileBase file) { if (file == null) { ModelState.AddModelError("File", "请选择文件"); return null; } Dictionary<int, byte[]> inputFiles = new Dictionary<int, byte[]>(); Dictionary<int, byte[]> outputFiles = new Dictionary<int, byte[]>(); if (new[] { "application/zip", "application/x-zip-compressed", "application/x-zip" }.Contains(file.ContentType) || file.ContentType == "application/octet-stream" && file.FileName.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) { using (ZipInputStream stream = new ZipInputStream(file.InputStream)) { ZipEntry entry; while ((entry = stream.GetNextEntry()) != null) { byte[] bytes; using (MemoryStream mem = new MemoryStream()) { stream.CopyTo(mem); bytes = mem.ToArray(); } if (!DealEntry(entry.Name, bytes, inputFiles, outputFiles)) { return null; } } } } else if (file.FileName.EndsWith(".tgz") || file.FileName.EndsWith(".tar.gz")) { using (GZipStream stream = new GZipStream(file.InputStream, CompressionMode.Decompress)) { using (TarInputStream tar = new TarInputStream(stream)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { byte[] bytes; using (MemoryStream mem = new MemoryStream()) { tar.CopyTo(mem); bytes = mem.ToArray(); } if (!DealEntry(entry.Name, bytes, inputFiles, outputFiles)) { return null; } } } } } else if (file.FileName.EndsWith(".tar.bz2")) { using (BZip2InputStream stream = new BZip2InputStream(file.InputStream)) { using (TarInputStream tar = new TarInputStream(stream)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { byte[] bytes; using (MemoryStream mem = new MemoryStream()) { tar.CopyTo(mem); bytes = mem.ToArray(); } if (!DealEntry(entry.Name, bytes, inputFiles, outputFiles)) { return null; } } } } } else { ModelState.AddModelError("File", "不支持的压缩文件类型"); return null; } if (!inputFiles.Keys.OrderBy(x => x).SequenceEqual(outputFiles.Keys.OrderBy(x => x))) { ModelState.AddModelError("File", "输入与输出文件没有一一对应"); return null; } var testCases = inputFiles.Keys.Select(id => new TestCase { Input = inputFiles[id], Data = outputFiles[id], MemoryLimit = DEFAULT_TEST_CASE_MEMORY_LIMIT, TimeLimit = DEFAULT_TEST_CASE_TIME_LIMIT, Available = contest.Type == Contest.ContestType.CF ? false : true }).ToList(); foreach (var t in testCases) { t.ID = problem.AddTestCase(t); } return testCases; }
protected override void _Unir(string fichero, string dirDest) { if (!File.Exists (fichero)) { return; } FileInfo fi = new FileInfo (fichero); long datosTotales = fi.Length; long uncompressedSize = 0; FileStream input = File.OpenRead (fichero); Stream input2 = input; if (fichero.ToLower ().EndsWith (".bz2") || fichero.ToLower ().EndsWith (".tbz2") || fichero.ToLower ().EndsWith (".tbz")) { // No hay forma de saber el tamaño descomprimido de un bz2 de forma inmediata input2 = new BZip2InputStream (input); } else if (fichero.ToLower ().EndsWith (".gz") || fichero.ToLower ().EndsWith (".tgz")) { uncompressedSize = Dalle.Formatos.GZip.GZip.GetUncompressedSize (input); input2 = new GZipStream (input, CompressionMode.Decompress); } else if (fichero.ToLower ().EndsWith (".tar.lzma") || fichero.ToLower ().EndsWith ("tlz")) { input2 = new LZMAInputStream (input); uncompressedSize = ((LZMAInputStream)input2).UncompressedSize; } TarInputStream tarInput = new TarInputStream (input2); TarEntry tarEntry = null; byte[] buffer = new byte[Consts.BUFFER_LENGTH]; OnProgress (0, 1); long transferidos = 0; while ((tarEntry = tarInput.GetNextEntry ()) != null) { // Tamaño de la cabecera de la entrada. // Nota: TarInputStream ignora sileciosamente algunas entradas, // por lo que el progreso no será totalmente preciso. transferidos += 512; if (tarEntry.IsDirectory) { continue; } Stream entrada = new SizeLimiterStream (tarInput, tarEntry.Size); Stream salida = UtilidadesFicheros.CreateWriter (dirDest + Path.DirectorySeparatorChar + tarEntry.Name); int leidos = 0; while ((leidos = entrada.Read (buffer, 0, buffer.Length)) > 0) { salida.Write (buffer, 0, leidos); transferidos += leidos; if (uncompressedSize > 0) { OnProgress (transferidos, uncompressedSize); } else { OnProgress (input.Position, datosTotales); } } salida.Close (); transferidos += 512 - (tarEntry.Size % 512); } tarInput.Close (); OnProgress (1, 1); }
/** * extract tar file * */ private void extract_tar(TarInputStream tar, string desitination) { delegateProgressBar increment_bar = new delegateProgressBar((int v) => { Cursor.Current = Cursors.WaitCursor; progressBar1.Value = v; }); ICSharpCode.SharpZipLib.Tar.TarEntry entry = tar.GetNextEntry(); while (entry != null) { if (entry.IsDirectory == true) { if (Directory.Exists(desitination + "\\" + entry.Name)) Directory.Delete(desitination + "\\" + entry.Name, true); if (!Directory.Exists(desitination + "\\" + entry.Name)) Directory.CreateDirectory(desitination + "\\" + entry.Name); entry = tar.GetNextEntry(); continue; } Invoke(new delegateUpdateLabel((string s) => { this.label2.Text = s; }), new Object[] { entry.Name + " extracting ..." }); Invoke(new delegateProgressBar((Int32 v) => { progressBar1.Value = 0; progressBar1.Minimum = 0; progressBar1.Maximum = v; }), new Object[] { (Int32.MaxValue < entry.Size) ? (Int32)(entry.Size / 65536) : (Int32)entry.Size }); using (FileStream dest = new FileStream(desitination + "\\" + entry.Name, FileMode.Create, FileAccess.Write)) { Int32 count = 0; Int32 write_total = 0; byte[] buffer = new byte[32768]; using (BinaryWriter br = new BinaryWriter(dest)) { while ((count = tar.Read(buffer, 0, 32768)) > 0) { br.Write(buffer, 0, count); write_total += count; if (Int32.MaxValue < entry.Size) { Object[] inc_arg = { (Int32)write_total / 65536 }; Invoke(increment_bar, inc_arg); } else { Object[] inc_arg = { write_total }; Invoke(increment_bar, inc_arg); } } br.Flush(); br.Close(); } dest.Close(); } entry = tar.GetNextEntry(); } }
/// <summary> /// Extracts the files from the archive. /// </summary> protected override void ExecuteTask() { Stream fs = null; Stream instream = null; try { // ensure archive exists if (!SrcFile.Exists) throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Tar file '{0}' does not exist.", SrcFile.FullName), Location); fs = SrcFile.OpenRead(); // wrap inputstream with corresponding compression method switch (CompressionMethod) { case TarCompressionMethod.GZip: instream = new GZipInputStream(fs); break; case TarCompressionMethod.BZip2: instream = new BZip2InputStream(fs); break; default: instream = fs; break; } using (TarInputStream s = new TarInputStream(instream)) { Log(Level.Info, "Expanding '{0}' to '{1}'.", SrcFile.FullName, DestinationDirectory.FullName); TarEntry entry; // extract the file or directory entry while ((entry = s.GetNextEntry()) != null) { if (entry.IsDirectory) { ExtractDirectory(s, DestinationDirectory.FullName, entry.Name, entry.ModTime); } else { ExtractFile(s, DestinationDirectory.FullName, entry.Name, entry.ModTime, entry.Size); } } } } catch (IOException ex) { throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Failed to expand '{0}' to '{1}'.", SrcFile.FullName, DestinationDirectory.FullName), Location, ex); } catch (TarException ex) { throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Invalid tar file '{0}'.", SrcFile.FullName), Location, ex); } catch (BZip2Exception ex) { throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Invalid bzip2'd tar file '{0}'.", SrcFile.FullName), Location, ex); } catch (GZipException ex) { throw new BuildException(string.Format(CultureInfo.InvariantCulture, "Invalid gzipped tar file '{0}'.", SrcFile.FullName), Location, ex); } finally { // close the filestream if (fs != null) fs.Close (); } }
static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("USAGE: NBTExtractor <region dir> <output json>"); Console.WriteLine("USAGE: NBTExtractor <backup tar.gz> <world json> <nether json> <end json>"); return; } //args = new string[] { @"A:\Games\MC18DJ\saves\Civcraft\region", @"A:\Games\MC18DJ\saves\Civcraft\tiles.json" }; if (File.GetAttributes(args[0]).HasFlag(FileAttributes.Directory)) { var allTileEntities = new List<object>(); var files = Directory.GetFiles(args[0], "*.mca"); for (int i = 0; i < files.Length; i++) { var file = files[i]; Console.WriteLine("[{0,4}/{1,4}] {2}", i + 1, files.Length, file); using (var fs = new FileStream(file, FileMode.Open)) { foreach (var lst1 in ExtractRegion(fs)) { var lst = (List<KeyValuePair<string, object>>)lst1.Where(x => x.Key == "Level").Single().Value; foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is List<KeyValuePair<string, object>>).Select(x => (List<KeyValuePair<string, object>>)x.Value)) { allTileEntities.AddRange(tileEntities.Select(x => x.Value)); } foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is object[]).Select(x => (object[])x.Value)) { allTileEntities.AddRange(tileEntities); } } } } Console.WriteLine("{0} TileEntities gathered", allTileEntities.Count); Console.WriteLine("Converting to JSON..."); var js = new JSONSerializer(); using (var sw = new StreamWriter(args[1])) js.Serialize(ConvertObject(allTileEntities), sw); Console.WriteLine("Successfully exported to {0}", args[1]); } else { if (args.Length < 4) { Console.WriteLine("USAGE: NBTExtractor <backup tar.gz> <world json> <nether json> <end json>"); return; } var allTileEntities = new SortedDictionary<string, List<Object>>(); var outputStreams = new SortedDictionary<string, TextWriter>(); var firsts = new SortedDictionary<string, bool>(); string[] worlds = { "world", "world_nether", "world_the_end" }; foreach (var world in worlds) { allTileEntities[world] = new List<object>(); firsts[world] = true; } for (int i = 0; i < worlds.Length; i++) { var sw = new StreamWriter(args[i + 1]); outputStreams[worlds[i]] = sw; sw.WriteLine("["); } using (var fs = new FileStream(args[0], FileMode.Open)) using (var gis = new GZipInputStream(fs)) using(var tis = new TarInputStream(gis)) { //Console.WriteLine("Start"); TarEntry tarEntry; while((tarEntry = tis.GetNextEntry()) != null) { //Console.WriteLine(tarEntry.Name); if (tarEntry.IsDirectory) continue; foreach(var world in worlds) { FlushStreams(worlds, firsts, outputStreams, allTileEntities, 4096); if (tarEntry.Name.Contains("/" + world + "/") && tarEntry.Name.EndsWith(".mca")) { Console.WriteLine("[/] {0}", tarEntry.Name); var currentList = allTileEntities[world]; using (var fss = new MemoryStream()) { int numRead; byte[] buffer = new byte[4096]; numRead = tis.Read(buffer, 0, buffer.Length); while(numRead > 0) { fss.Write(buffer, 0, numRead); numRead = tis.Read(buffer, 0, buffer.Length); } fss.Seek(0, SeekOrigin.Begin); foreach (var lst1 in ExtractRegion(fss)) { var lst = (List<KeyValuePair<string, object>>)lst1.Where(x => x.Key == "Level").Single().Value; foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is List<KeyValuePair<string, object>>).Select(x => (List<KeyValuePair<string, object>>)x.Value)) { currentList.AddRange(tileEntities.Select(x => x.Value)); } foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is object[]).Select(x => (object[])x.Value)) { currentList.AddRange(tileEntities); } } } break; } } } Console.WriteLine("Done"); FlushStreams(worlds, firsts, outputStreams, allTileEntities, 0); foreach(var kvp in outputStreams) { kvp.Value.WriteLine("]"); kvp.Value.Close(); } } } }
public static IDictionary<string, byte[]> UntarGz( Stream stream ) { var result = new Dictionary<string, byte[]>(); using ( GZipInputStream gzStream = new GZipInputStream( stream ) ) using ( TarInputStream tarStream = new TarInputStream( gzStream ) ) { TarEntry entry; entry = tarStream.GetNextEntry(); while ( entry != null ) { if ( entry.Size != 0 ) { byte[] bytes = new byte[entry.Size]; StreamUtils.ReadFully( tarStream, bytes ); result[entry.Name] = bytes; } entry = tarStream.GetNextEntry(); } } return result; }
/// <summary> /// /// </summary> /// <param name="driveLetter"></param> /// <param name="fileName"></param> /// <param name="eCompType"></param> /// <param name="removeAfter"></param> /// <returns></returns> public bool WriteDrive(string driveLetter, string fileName, EnumCompressionType eCompType, bool removeAfter) { IsCancelling = false; var dtStart = DateTime.Now; if(!File.Exists(fileName)) throw new ArgumentException(fileName + Resources.Disk_WriteDrive__doesn_t_exist); // // Get physical drive partition for logical partition // var physicalDrive = _diskAccess.GetPhysicalPathForLogicalPath(driveLetter); if (string.IsNullOrEmpty(physicalDrive)) { LogMsg(Resources.Disk_WriteDrive_Error__Couldn_t_map_partition_to_physical_drive); _diskAccess.UnlockDrive(); return false; } // // Lock logical drive // var success = _diskAccess.LockDrive(driveLetter); if (!success) { LogMsg(Resources.Disk_WriteDrive_Failed_to_lock_drive); return false; } // // Get drive size // var driveSize = _diskAccess.GetDriveSize(physicalDrive); if (driveSize <= 0) { LogMsg(Resources.Disk_WriteDrive_Failed_to_get_device_size); _diskAccess.UnlockDrive(); return false; } // // Open the physical drive // var physicalHandle = _diskAccess.Open(physicalDrive); if (physicalHandle == null) { LogMsg(Resources.Disk_WriteDrive_Failed_to_open_physical_drive); _diskAccess.UnlockDrive(); return false; } var buffer = new byte[Globals.MaxBufferSize]; long offset = 0; var fileLength = new FileInfo(fileName).Length; var uncompressedlength = fileLength; var errored = true; using (var basefs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { Stream fs; switch (eCompType) { case EnumCompressionType.Zip: var zipFile = new ZipFile(basefs); var ze = (from ZipEntry zipEntry in zipFile where zipEntry.IsFile select zipEntry).FirstOrDefault(); if(ze == null) { LogMsg(Resources.Disk_WriteDrive_Error_reading_zip_input_stream); goto readfail2; } var zis = zipFile.GetInputStream(ze); uncompressedlength = ze.Size; fs = zis; break; case EnumCompressionType.Gzip: var gzis = new GZipInputStream(basefs) {IsStreamOwner = true}; uncompressedlength = gzis.Length; fs = gzis; break; case EnumCompressionType.Targzip: var gzos = new GZipInputStream(basefs) {IsStreamOwner = true}; var tis = new TarInputStream(gzos); TarEntry tarEntry; do { tarEntry = tis.GetNextEntry(); } while (tarEntry.IsDirectory); uncompressedlength = tarEntry.Size; fs = tis; break; case EnumCompressionType.XZ: var xzs = new XZInputStream(basefs); uncompressedlength = xzs.Length; fs = xzs; break; default: // No compression - direct to file stream fs = basefs; uncompressedlength = fs.Length; break; } var bufferOffset = 0; using (var br = new BinaryReader(fs)) { while (offset < uncompressedlength && !IsCancelling) { // Note: There's a problem writing certain lengths to the underlying physical drive. // This appears when we try to read from a compressed stream as it gives us // "strange" lengths which then fail to be written via Writefile() so try to build // up a decent block of bytes here... int readBytes; do { readBytes = br.Read(buffer, bufferOffset, buffer.Length - bufferOffset); bufferOffset += readBytes; } while (bufferOffset < Globals.MaxBufferSize && readBytes != 0); int wroteBytes; var bytesToWrite = bufferOffset; var trailingBytes = 0; // Assume that the underlying physical drive will at least accept powers of two! if(!IsPowerOfTwo((ulong)bufferOffset)) { // Find highest bit (32-bit max) var highBit = 31; for (; ((bufferOffset & (1 << highBit)) == 0) && highBit >= 0; highBit--) { } // Work out trailing bytes after last power of two var lastPowerOf2 = 1 << highBit; bytesToWrite = lastPowerOf2; trailingBytes = bufferOffset - lastPowerOf2; } if (_diskAccess.Write(buffer, bytesToWrite, out wroteBytes) < 0) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive__ + Marshal.GetHRForLastWin32Error()); goto readfail1; } if (wroteBytes != bytesToWrite) { LogMsg(Resources.Disk_WriteDrive_Error_writing_data_to_drive___past_EOF_); goto readfail1; } // Move trailing bytes up - Todo: Suboptimal if (trailingBytes > 0) { Buffer.BlockCopy(buffer, bufferOffset - trailingBytes, buffer, 0, trailingBytes); bufferOffset = trailingBytes; } else { bufferOffset = 0; } offset += (uint)wroteBytes; var percentDone = (int)(100 * offset / uncompressedlength); var tsElapsed = DateTime.Now.Subtract(dtStart); var bytesPerSec = offset / tsElapsed.TotalSeconds; Progress(percentDone); LogMsg(Resources.Disk_WriteDrive_Wrote + @": " + (offset / Globals.MbModifier) + @" / " + (uncompressedlength / Globals.MbModifier) + " MB; " + string.Format("{0:F}", (bytesPerSec / Globals.MbModifier)) + @" MB/s; " + Resources.Disk_Elapsed_time + ": " + tsElapsed.ToString(@"hh\:mm\:ss")); } } if (fs is ZipOutputStream) { ((ZipOutputStream)fs).CloseEntry(); ((ZipOutputStream)fs).Close(); } else if (fs is TarOutputStream) { ((TarOutputStream)fs).CloseEntry(); fs.Close(); } else if (fs is GZipOutputStream) { fs.Close(); } else if (fs is XZOutputStream) { fs.Close(); } } errored = false; if (removeAfter && !IsCancelling) _diskAccess.UnmountDrive(); readfail1: _diskAccess.Close(); readfail2: _diskAccess.UnlockDrive(); var tstotalTime = DateTime.Now.Subtract(dtStart); if (IsCancelling) LogMsg(Resources.Disk_WriteDrive_Cancelled); else LogMsg(Resources.Disk_WriteDrive_Wrote + @" " + offset + @" " + Resources.Disk_WriteDrive_bytes + @". " + Resources.Disk_Elapsed_time + @": " + tstotalTime.ToString(@"hh\:mm\:ss")); Progress(0); return !errored; }
static ResourcesClass() { using( MemoryStream memStream = new MemoryStream( PatcherLib.Resources.Properties.Resources.ZippedResources, false ) ) using( GZipInputStream gzStream = new GZipInputStream( memStream ) ) using( TarInputStream tarStream = new TarInputStream( gzStream ) ) { var tempDefault = new Dictionary<string, IList<byte>>(); TarEntry entry; entry = tarStream.GetNextEntry(); while( entry != null ) { if( entry.Size != 0 ) { byte[] bytes = new byte[entry.Size]; StreamUtils.ReadFully( tarStream, bytes ); tempDefault[entry.Name] = bytes.AsReadOnly(); } entry = tarStream.GetNextEntry(); } DefaultZipFileContents = new PatcherLib.Datatypes.ReadOnlyDictionary<string, IList<byte>>(tempDefault); } string defaultsFile = Path.Combine( Path.GetDirectoryName( System.Windows.Forms.Application.ExecutablePath ), "Resources.zip" ); if( File.Exists( defaultsFile ) ) { var tempContents = new Dictionary<string, IList<byte>>(); try { using( FileStream file = File.Open( defaultsFile, FileMode.Open, FileAccess.Read ) ) using( ZipInputStream zipStream = new ZipInputStream( file ) ) { ZipEntry entry = zipStream.GetNextEntry(); while( entry != null ) { if( entry.Size != 0 ) { byte[] bytes = new byte[entry.Size]; StreamUtils.ReadFully( zipStream, bytes ); tempContents[entry.Name] = bytes.AsReadOnly(); } entry = zipStream.GetNextEntry(); } foreach (KeyValuePair<string, IList<byte>> kvp in DefaultZipFileContents) { if (!tempContents.ContainsKey(kvp.Key)) { tempContents[kvp.Key] = kvp.Value; } } } ZipFileContents = new PatcherLib.Datatypes.ReadOnlyDictionary<string, IList<byte>>(tempContents); } catch( Exception ) { ZipFileContents = DefaultZipFileContents; } } else { ZipFileContents = DefaultZipFileContents; } abilityFormulasDoc = ZipFileContents[Paths.AbilityFormulasXML].ToUTF8String().ToXmlDocument(); }
private void ReadFile() { var buffer = new byte[1024 * 1024];// more than big enough for all files using (var bz2 = new BZip2InputStream(File.Open(_path, FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while (true) { var bytes = tar.Read(buffer, readSoFar, ((int)entry.Size) - readSoFar); if (bytes == 0) break; readSoFar += bytes; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can reads the values properly var fileText = new StreamReader(new MemoryStream(buffer, 0, readSoFar)).ReadToEnd(); _entries.Add(fileText); Interlocked.Increment(ref reads); } } _entries.Add(null); }
/// <summary> /// Updates the supplied registry from the supplied zip file. /// This will *clear* the registry of available modules first. /// This does not *save* the registry. For that, you probably want Repo.Update /// </summary> internal static void UpdateRegistryFromTarGz(string path, Registry registry) { log.DebugFormat("Starting registry update from tar.gz file: \"{0}\".", path); // Open the gzip'ed file. using (Stream inputStream = File.OpenRead(path)) { // Create a gzip stream. using (GZipInputStream gzipStream = new GZipInputStream(inputStream)) { // Create a handle for the tar stream. using (TarInputStream tarStream = new TarInputStream(gzipStream)) { // Walk the archive, looking for .ckan files. const string filter = @"\.ckan$"; while (true) { TarEntry entry = tarStream.GetNextEntry(); // Check for EOF. if (entry == null) { break; } string filename = entry.Name; // Skip things we don't want. if (!Regex.IsMatch(filename, filter)) { log.DebugFormat("Skipping archive entry {0}", filename); continue; } log.DebugFormat("Reading CKAN data from {0}", filename); // Read each file into a buffer. int buffer_size; try { buffer_size = Convert.ToInt32(entry.Size); } catch (OverflowException) { log.ErrorFormat("Error processing {0}: Metadata size too large.", entry.Name); continue; } byte[] buffer = new byte[buffer_size]; tarStream.Read(buffer, 0, buffer_size); // Convert the buffer data to a string. string metadata_json = Encoding.ASCII.GetString(buffer); ProcessRegistryMetadataFromJSON(metadata_json, registry, filename); } } } } }
// TODO: Create a class Archive.cs and do all the archiving stuff there! This is just copy and paste crap public static ArrayList UncompressTarFile(string Filename, string To, Gtk.ProgressBar bar) { ArrayList entries=new ArrayList(); try{ TarInputStream tarIn = new TarInputStream(File.OpenRead(Filename)); TarEntry entry; while ((entry = tarIn.GetNextEntry()) != null) { string savepath = Path.GetDirectoryName(To+entry.Name); if (!Directory.Exists(savepath)){ Directory.CreateDirectory(savepath); //Console.WriteLine(Path.GetDirectoryName(entry.Name)); } entries.Add(Path.GetDirectoryName(entry.Name)); if (!entry.IsDirectory) { FileStream streamWriter = File.Create(To + entry.Name); long size = entry.Size; byte[] data = new byte[size]; while (true) { size = tarIn.Read(data, 0, data.Length); if (size > 0) streamWriter.Write(data, 0, (int) size); else break; } streamWriter.Close(); } } Console.WriteLine("Deflating the tar file done!"); return entries; } catch(Exception e) { Console.WriteLine("An exception occured while deflating the tar file: "+e.Message); return entries; } }
public void ValuesPreserved() { MemoryStream ms = new MemoryStream(); TarEntry entry; DateTime modTime = DateTime.Now; using (TarOutputStream tarOut = new TarOutputStream(ms)) { entry = TarEntry.CreateTarEntry("TestEntry"); entry.GroupId = 12; entry.UserId = 14; entry.ModTime = modTime; entry.UserName = "******"; entry.GroupName = "GroupName"; entry.TarHeader.Mode = 12345; tarOut.PutNextEntry(entry); } MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); using (TarInputStream tarIn = new TarInputStream(ms2)) { TarEntry nextEntry = tarIn.GetNextEntry(); Assert.AreEqual(entry.TarHeader.Checksum, nextEntry.TarHeader.Checksum, "Checksum"); Assert.IsTrue(nextEntry.Equals(entry), "Entries should be equal"); Assert.IsTrue(nextEntry.TarHeader.Equals(entry.TarHeader), "Headers should match"); // Tar only stores seconds DateTime truncatedTime = new DateTime(modTime.Year, modTime.Month, modTime.Day, modTime.Hour, modTime.Minute, modTime.Second); Assert.AreEqual(truncatedTime, nextEntry.ModTime, "Modtimes should match"); int entryCount = 0; while ( nextEntry != null ) { ++entryCount; nextEntry = tarIn.GetNextEntry(); } Assert.AreEqual(1, entryCount, "Expected 1 entry"); } }
private Stopwatch ParseDisks(Action<Disk> addToBatch) { int i = 0; var parser = new Parser(); var buffer = new byte[1024 * 1024];// more than big enough for all files var sp = Stopwatch.StartNew(); using (var bz2 = new BZip2InputStream(File.Open(dataLocation, FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while (true) { var read = tar.Read(buffer, readSoFar, ((int)entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer, 0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); addToBatch(disk); if (i++ % BatchSize == 0) { process.Refresh(); MemoryUsage.Add(process.WorkingSet64); logger.Info("\r{0} {1:#,#} {2} ", entry.Name, i, sp.Elapsed); } } catch (Exception e) { logger.Error(""); logger.Error(entry.Name); logger.Error(e); return sp; } } } return sp; }
static Resources() { using( MemoryStream memStream = new MemoryStream( PatcherLib.Properties.Resources.ZippedResources, false ) ) using( GZipInputStream gzStream = new GZipInputStream( memStream ) ) using( TarInputStream tarStream = new TarInputStream( gzStream ) ) { DefaultZipFileContents = new Dictionary<string, byte[]>(); TarEntry entry; entry = tarStream.GetNextEntry(); while( entry != null ) { if( entry.Size != 0 ) { byte[] bytes = new byte[entry.Size]; StreamUtils.ReadFully( tarStream, bytes ); DefaultZipFileContents[entry.Name] = bytes; } entry = tarStream.GetNextEntry(); } } string defaultsFile = Path.Combine( Path.GetDirectoryName( System.Windows.Forms.Application.ExecutablePath ), "Resources.zip" ); if( File.Exists( defaultsFile ) ) { try { using( FileStream file = File.Open( defaultsFile, FileMode.Open, FileAccess.Read ) ) using( ZipInputStream zipStream = new ZipInputStream( file ) ) { ZipFileContents = new Dictionary<string, byte[]>(); ZipEntry entry = zipStream.GetNextEntry(); while( entry != null ) { if( entry.Size != 0 ) { byte[] bytes = new byte[entry.Size]; StreamUtils.ReadFully( zipStream, bytes ); ZipFileContents[entry.Name] = bytes; } entry = zipStream.GetNextEntry(); } foreach( KeyValuePair<string, byte[]> kvp in DefaultZipFileContents ) { if( !ZipFileContents.ContainsKey( kvp.Key ) ) { ZipFileContents[kvp.Key] = kvp.Value; } } } } catch( Exception ) { ZipFileContents = DefaultZipFileContents; } } else { ZipFileContents = DefaultZipFileContents; } }
private IEnumerable<GutCatDoc> getRdfFiles() { var req = (HttpWebRequest)WebRequest.Create(DownloadUrl); using (var resp = (HttpWebResponse)req.GetResponse()) { using (var zip = new ZipInputStream(resp.GetResponseStream())) { zip.GetNextEntry(); using (var tar = new TarInputStream(zip)) { TarEntry tarentry; while ((tarentry = tar.GetNextEntry()) != null) { if (tarentry.IsDirectory) continue; yield return new GutCatDoc(tar); } } } } }
/// <summary> /// Enumerate through the files of a TAR and get a list of KVP names-byte arrays /// </summary> /// <param name="stream">The input tar stream</param> /// <param name="isTarGz">True if the input stream is a .tar.gz or .tgz</param> /// <returns>An enumerable containing each tar entry and it's contents</returns> public static IEnumerable<KeyValuePair<string, byte[]>> UnTar(Stream stream, bool isTarGz) { using (var tar = new TarInputStream(isTarGz ? (Stream)new GZipInputStream(stream) : stream)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.IsDirectory) continue; using (var output = new MemoryStream()) { tar.CopyEntryContents(output); yield return new KeyValuePair<string, byte[]>(entry.Name, output.ToArray()); } } } }
void TryLongName(string name) { MemoryStream ms = new MemoryStream(); using ( TarOutputStream tarOut = new TarOutputStream(ms) ) { DateTime modTime = DateTime.Now; TarEntry entry = TarEntry.CreateTarEntry(name); tarOut.PutNextEntry(entry); } MemoryStream ms2 = new MemoryStream(); ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); using (TarInputStream tarIn = new TarInputStream(ms2)) { TarEntry nextEntry = tarIn.GetNextEntry(); Assert.AreEqual(nextEntry.Name, name, "Name match failure"); } }
void UnpackTar(Stream strm) { TarInputStream tar = new TarInputStream(strm); byte[] tempBuf = new byte[65536]; progressBar2.Style = ProgressBarStyle.Marquee; int done = 0; for (; ; ) { TarEntry entry = tar.GetNextEntry(); if (entry == null) break; string strName = entry.Name; string firstComponent = strName.Substring(0, strName.IndexOf('/')); if (firstComponent.ToUpper() == m_NamePrefixToDrop.ToUpper()) strName = strName.Substring(m_NamePrefixToDrop.Length + 1); if (strName == "") continue; string fn = m_Dir + "\\" + strName; if ((_Filter == null) || (_Filter(strName))) if (entry.IsDirectory) { if (!Directory.Exists(fn)) Directory.CreateDirectory(fn); } else using (FileStream ostrm = new FileStream(fn, FileMode.Create)) tar.CopyEntryContents(ostrm); if ((done += (int)entry.Size) > progressBar2.Maximum) done = progressBar2.Maximum; progressBar2.Value = done; Application.DoEvents(); } }
/// <summary> /// tar包解压 /// </summary> /// <param name="strFilePath">tar包路径</param> /// <param name="strUnpackDir">解压到的目录</param> /// <returns></returns> public bool UnpackTarFiles(string strFilePath, string strUnpackDir) { try { if (!File.Exists(strFilePath)) { return(false); } strUnpackDir = strUnpackDir.Replace("/", "\\"); if (!strUnpackDir.EndsWith("\\")) { strUnpackDir += "\\"; } if (!Directory.Exists(strUnpackDir)) { Directory.CreateDirectory(strUnpackDir); } FileStream fr = new FileStream(strFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); ICSharpCode.SharpZipLib.Tar.TarInputStream s = new ICSharpCode.SharpZipLib.Tar.TarInputStream(fr); ICSharpCode.SharpZipLib.Tar.TarEntry theEntry; while ((theEntry = s.GetNextEntry()) != null) { string directoryName = Path.GetDirectoryName(theEntry.Name); string fileName = Path.GetFileName(theEntry.Name); if (directoryName != String.Empty) { Directory.CreateDirectory(strUnpackDir + directoryName); } if (fileName != String.Empty) { FileStream streamWriter = File.Create(strUnpackDir + theEntry.Name); int size = 2048; byte[] data = new byte[2048]; while (true) { size = s.Read(data, 0, data.Length); if (size > 0) { streamWriter.Write(data, 0, size); } else { break; } } streamWriter.Close(); } } s.Close(); fr.Close(); return(true); } catch (Exception) { return(false); } }
private void processTask(RestoreTask task) { Logger.Debug("StorageThread:processTask:RestoreTask"); Stream inStream = File.OpenRead(task.ArchivePath); Stream gzipStream = new GZipInputStream(inStream); TarInputStream tarStream = new TarInputStream(gzipStream); TarEntry entry; List<string> list = task.RelativeFilenames(); RecoverResult recover = new RecoverResult(task.OutputDir, true); while ((entry = tarStream.GetNextEntry()) != null) { if (entry.IsDirectory) continue; if (list.IndexOf(entry.Name) != -1) { string name = entry.Name.Replace('/', Path.DirectorySeparatorChar); name = Path.Combine(task.OutputDir, name); Directory.CreateDirectory(Path.GetDirectoryName(name)); FileStream outStream = new FileStream(name, FileMode.CreateNew); tarStream.CopyEntryContents(outStream); outStream.Close(); DateTime myDt = DateTime.SpecifyKind(entry.ModTime, DateTimeKind.Utc); File.SetLastWriteTime(name, myDt); } } tarStream.Close(); lock (_lock) { recoverResults.Enqueue(recover); } }