public Read ( byte buffer, int offset, int count ) : int | ||
buffer | byte | /// The buffer into which to place bytes read. /// |
offset | int | /// The offset at which to place bytes read. /// |
count | int | /// The number of bytes to read. /// |
return | int |
private static Stopwatch ParseDisks(Action<Disk> addToBatch) { int i = 0; var parser = new Parser(); var buffer = new byte[1024*1024];// more than big enough for all files var sp = Stopwatch.StartNew(); using (var bz2 = new BZip2InputStream(File.Open(@"D:\Data\freedb-complete-20120101.tar.bz2", FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while((entry=tar.GetNextEntry()) != null) { if(entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while(true) { var read = tar.Read(buffer, readSoFar, ((int) entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer,0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); addToBatch(disk); if (i++ % BatchSize == 0) Console.Write("\r{0} {1:#,#} {2} ", entry.Name, i, sp.Elapsed); } catch (Exception e) { Console.WriteLine(); Console.WriteLine(entry.Name); Console.WriteLine(e); return sp; } } } return sp; }
private static void ParseDisks(BulkInsertOperation insert) { int i = 0; var parser = new Parser(); var buffer = new byte[1024*1024];// more than big enough for all files using (var bz2 = new BZip2InputStream(File.Open(@"D:\Scratch\freedb-complete-20150101.tar.bz2", FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while((entry=tar.GetNextEntry()) != null) { if(entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while(true) { var read = tar.Read(buffer, readSoFar, ((int) entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer,0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); insert.Store(disk); } catch (Exception e) { Console.WriteLine(); Console.WriteLine(entry.Name); Console.WriteLine(e); } } } }
/// <summary> /// tar包解压 /// </summary> /// <param name="strFilePath">tar包路径</param> /// <param name="strUnpackDir">解压到的目录</param> /// <returns></returns> public bool UnpackTarFiles(string strFilePath, string strUnpackDir) { try { if (!File.Exists(strFilePath)) { return(false); } strUnpackDir = strUnpackDir.Replace("/", "\\"); if (!strUnpackDir.EndsWith("\\")) { strUnpackDir += "\\"; } if (!Directory.Exists(strUnpackDir)) { Directory.CreateDirectory(strUnpackDir); } FileStream fr = new FileStream(strFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); ICSharpCode.SharpZipLib.Tar.TarInputStream s = new ICSharpCode.SharpZipLib.Tar.TarInputStream(fr); ICSharpCode.SharpZipLib.Tar.TarEntry theEntry; while ((theEntry = s.GetNextEntry()) != null) { string directoryName = Path.GetDirectoryName(theEntry.Name); string fileName = Path.GetFileName(theEntry.Name); if (directoryName != String.Empty) { Directory.CreateDirectory(strUnpackDir + directoryName); } if (fileName != String.Empty) { FileStream streamWriter = File.Create(strUnpackDir + theEntry.Name); int size = 2048; byte[] data = new byte[2048]; while (true) { size = s.Read(data, 0, data.Length); if (size > 0) { streamWriter.Write(data, 0, size); } else { break; } } streamWriter.Close(); } } s.Close(); fr.Close(); return(true); } catch (Exception) { return(false); } }
/// <summary> /// Extract an entry from the archive. This method assumes that the /// tarIn stream has been properly set with a call to getNextEntry(). /// </summary> /// <param name="destDir"> /// The destination directory into which to extract. /// </param> /// <param name="entry"> /// The TarEntry returned by tarIn.getNextEntry(). /// </param> void ExtractEntry(string destDir, TarEntry entry) { OnProgressMessageEvent(entry, null); var name = entry.Name; if (Path.IsPathRooted(name) == true) { // NOTE: // for UNC names... \\machine\share\zoom\beet.txt gives \zoom\beet.txt name = name.Substring(Path.GetPathRoot(name).Length); } name = name.Replace('/', Path.DirectorySeparatorChar); var destFile = Path.Combine(destDir, name); if (entry.IsDirectory) { EnsureDirectoryExists(destFile); } else { var parentDirectory = Path.GetDirectoryName(destFile); EnsureDirectoryExists(parentDirectory); var process = true; var fileInfo = new FileInfo(destFile); if (fileInfo.Exists) { if (keepOldFiles) { OnProgressMessageEvent(entry, "Destination file already exists"); process = false; } else if ((fileInfo.Attributes & FileAttributes.ReadOnly) != 0) { OnProgressMessageEvent(entry, "Destination file already exists, and is read-only"); process = false; } } if (process) { var asciiTrans = false; Stream outputStream = File.Create(destFile); if (asciiTranslate) { asciiTrans = !IsBinary(destFile); } StreamWriter outw = null; if (asciiTrans) { outw = new StreamWriter(outputStream); } var rdbuf = new byte[32 * 1024]; while (true) { var numRead = tarIn.Read(rdbuf, 0, rdbuf.Length); if (numRead <= 0) { break; } if (asciiTrans) { for (int off = 0, b = 0; b < numRead; ++b) { if (rdbuf[b] == 10) { var s = Encoding.ASCII.GetString(rdbuf, off, (b - off)); outw.WriteLine(s); off = b + 1; } } } else { outputStream.Write(rdbuf, 0, numRead); } } if (asciiTrans) { outw.Close(); } else { outputStream.Close(); } } } }
// TODO: Create a class Archive.cs and do all the archiving stuff there! This is just copy and paste crap public static ArrayList UncompressTarFile(string Filename, string To, Gtk.ProgressBar bar) { ArrayList entries=new ArrayList(); try{ TarInputStream tarIn = new TarInputStream(File.OpenRead(Filename)); TarEntry entry; while ((entry = tarIn.GetNextEntry()) != null) { string savepath = Path.GetDirectoryName(To+entry.Name); if (!Directory.Exists(savepath)){ Directory.CreateDirectory(savepath); //Console.WriteLine(Path.GetDirectoryName(entry.Name)); } entries.Add(Path.GetDirectoryName(entry.Name)); if (!entry.IsDirectory) { FileStream streamWriter = File.Create(To + entry.Name); long size = entry.Size; byte[] data = new byte[size]; while (true) { size = tarIn.Read(data, 0, data.Length); if (size > 0) streamWriter.Write(data, 0, (int) size); else break; } streamWriter.Close(); } } Console.WriteLine("Deflating the tar file done!"); return entries; } catch(Exception e) { Console.WriteLine("An exception occured while deflating the tar file: "+e.Message); return entries; } }
private void ExtractEntry(string destDir, TarEntry entry) { OnProgressMessageEvent(entry, null); string text = entry.Name; if (Path.IsPathRooted(text)) { text = text.Substring(Path.GetPathRoot(text).Length); } text = text.Replace('/', Path.DirectorySeparatorChar); string text2 = Path.Combine(destDir, text); if (entry.IsDirectory) { EnsureDirectoryExists(text2); return; } string directoryName = Path.GetDirectoryName(text2); EnsureDirectoryExists(directoryName); bool flag = true; FileInfo fileInfo = new FileInfo(text2); if (fileInfo.Exists) { if (keepOldFiles) { OnProgressMessageEvent(entry, "Destination file already exists"); flag = false; } else if ((fileInfo.Attributes & FileAttributes.ReadOnly) != 0) { OnProgressMessageEvent(entry, "Destination file already exists, and is read-only"); flag = false; } } if (!flag) { return; } bool flag2 = false; Stream stream = File.Create(text2); if (asciiTranslate) { flag2 = !IsBinary(text2); } StreamWriter streamWriter = null; if (flag2) { streamWriter = new StreamWriter(stream); } byte[] array = new byte[32768]; while (true) { int num = tarIn.Read(array, 0, array.Length); if (num <= 0) { break; } if (flag2) { int num2 = 0; for (int i = 0; i < num; i++) { if (array[i] == 10) { string @string = Encoding.ASCII.GetString(array, num2, i - num2); streamWriter.WriteLine(@string); num2 = i + 1; } } } else { stream.Write(array, 0, num); } } if (flag2) { streamWriter.Close(); } else { stream.Close(); } }
/// <summary> /// Updates the supplied registry from the supplied zip file. /// This will *clear* the registry of available modules first. /// This does not *save* the registry. For that, you probably want Repo.Update /// </summary> internal static void UpdateRegistryFromTarGz(string path, Registry registry) { log.DebugFormat("Starting registry update from tar.gz file: \"{0}\".", path); // Open the gzip'ed file. using (Stream inputStream = File.OpenRead(path)) { // Create a gzip stream. using (GZipInputStream gzipStream = new GZipInputStream(inputStream)) { // Create a handle for the tar stream. using (TarInputStream tarStream = new TarInputStream(gzipStream)) { // Walk the archive, looking for .ckan files. const string filter = @"\.ckan$"; while (true) { TarEntry entry = tarStream.GetNextEntry(); // Check for EOF. if (entry == null) { break; } string filename = entry.Name; // Skip things we don't want. if (!Regex.IsMatch(filename, filter)) { log.DebugFormat("Skipping archive entry {0}", filename); continue; } log.DebugFormat("Reading CKAN data from {0}", filename); // Read each file into a buffer. int buffer_size; try { buffer_size = Convert.ToInt32(entry.Size); } catch (OverflowException) { log.ErrorFormat("Error processing {0}: Metadata size too large.", entry.Name); continue; } byte[] buffer = new byte[buffer_size]; tarStream.Read(buffer, 0, buffer_size); // Convert the buffer data to a string. string metadata_json = Encoding.ASCII.GetString(buffer); ProcessRegistryMetadataFromJSON(metadata_json, registry, filename); } } } } }
private Stopwatch ParseDisks(Action<Disk> addToBatch) { int i = 0; var parser = new Parser(); var buffer = new byte[1024 * 1024];// more than big enough for all files var sp = Stopwatch.StartNew(); using (var bz2 = new BZip2InputStream(File.Open(dataLocation, FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while (true) { var read = tar.Read(buffer, readSoFar, ((int)entry.Size) - readSoFar); if (read == 0) break; readSoFar += read; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can read the values properly var fileText = new StreamReader(new MemoryStream(buffer, 0, readSoFar)).ReadToEnd(); try { var disk = parser.Parse(fileText); addToBatch(disk); if (i++ % BatchSize == 0) { process.Refresh(); MemoryUsage.Add(process.WorkingSet64); logger.Info("\r{0} {1:#,#} {2} ", entry.Name, i, sp.Elapsed); } } catch (Exception e) { logger.Error(""); logger.Error(entry.Name); logger.Error(e); return sp; } } } return sp; }
private void ReadFile() { var buffer = new byte[1024 * 1024];// more than big enough for all files using (var bz2 = new BZip2InputStream(File.Open(_path, FileMode.Open))) using (var tar = new TarInputStream(bz2)) { TarEntry entry; while ((entry = tar.GetNextEntry()) != null) { if (entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING") continue; var readSoFar = 0; while (true) { var bytes = tar.Read(buffer, readSoFar, ((int)entry.Size) - readSoFar); if (bytes == 0) break; readSoFar += bytes; } // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff // so we can reads the values properly var fileText = new StreamReader(new MemoryStream(buffer, 0, readSoFar)).ReadToEnd(); _entries.Add(fileText); Interlocked.Increment(ref reads); } } _entries.Add(null); }
static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("USAGE: NBTExtractor <region dir> <output json>"); Console.WriteLine("USAGE: NBTExtractor <backup tar.gz> <world json> <nether json> <end json>"); return; } //args = new string[] { @"A:\Games\MC18DJ\saves\Civcraft\region", @"A:\Games\MC18DJ\saves\Civcraft\tiles.json" }; if (File.GetAttributes(args[0]).HasFlag(FileAttributes.Directory)) { var allTileEntities = new List<object>(); var files = Directory.GetFiles(args[0], "*.mca"); for (int i = 0; i < files.Length; i++) { var file = files[i]; Console.WriteLine("[{0,4}/{1,4}] {2}", i + 1, files.Length, file); using (var fs = new FileStream(file, FileMode.Open)) { foreach (var lst1 in ExtractRegion(fs)) { var lst = (List<KeyValuePair<string, object>>)lst1.Where(x => x.Key == "Level").Single().Value; foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is List<KeyValuePair<string, object>>).Select(x => (List<KeyValuePair<string, object>>)x.Value)) { allTileEntities.AddRange(tileEntities.Select(x => x.Value)); } foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is object[]).Select(x => (object[])x.Value)) { allTileEntities.AddRange(tileEntities); } } } } Console.WriteLine("{0} TileEntities gathered", allTileEntities.Count); Console.WriteLine("Converting to JSON..."); var js = new JSONSerializer(); using (var sw = new StreamWriter(args[1])) js.Serialize(ConvertObject(allTileEntities), sw); Console.WriteLine("Successfully exported to {0}", args[1]); } else { if (args.Length < 4) { Console.WriteLine("USAGE: NBTExtractor <backup tar.gz> <world json> <nether json> <end json>"); return; } var allTileEntities = new SortedDictionary<string, List<Object>>(); var outputStreams = new SortedDictionary<string, TextWriter>(); var firsts = new SortedDictionary<string, bool>(); string[] worlds = { "world", "world_nether", "world_the_end" }; foreach (var world in worlds) { allTileEntities[world] = new List<object>(); firsts[world] = true; } for (int i = 0; i < worlds.Length; i++) { var sw = new StreamWriter(args[i + 1]); outputStreams[worlds[i]] = sw; sw.WriteLine("["); } using (var fs = new FileStream(args[0], FileMode.Open)) using (var gis = new GZipInputStream(fs)) using(var tis = new TarInputStream(gis)) { //Console.WriteLine("Start"); TarEntry tarEntry; while((tarEntry = tis.GetNextEntry()) != null) { //Console.WriteLine(tarEntry.Name); if (tarEntry.IsDirectory) continue; foreach(var world in worlds) { FlushStreams(worlds, firsts, outputStreams, allTileEntities, 4096); if (tarEntry.Name.Contains("/" + world + "/") && tarEntry.Name.EndsWith(".mca")) { Console.WriteLine("[/] {0}", tarEntry.Name); var currentList = allTileEntities[world]; using (var fss = new MemoryStream()) { int numRead; byte[] buffer = new byte[4096]; numRead = tis.Read(buffer, 0, buffer.Length); while(numRead > 0) { fss.Write(buffer, 0, numRead); numRead = tis.Read(buffer, 0, buffer.Length); } fss.Seek(0, SeekOrigin.Begin); foreach (var lst1 in ExtractRegion(fss)) { var lst = (List<KeyValuePair<string, object>>)lst1.Where(x => x.Key == "Level").Single().Value; foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is List<KeyValuePair<string, object>>).Select(x => (List<KeyValuePair<string, object>>)x.Value)) { currentList.AddRange(tileEntities.Select(x => x.Value)); } foreach (var tileEntities in lst.Where(x => x.Key == "TileEntities" && x.Value is object[]).Select(x => (object[])x.Value)) { currentList.AddRange(tileEntities); } } } break; } } } Console.WriteLine("Done"); FlushStreams(worlds, firsts, outputStreams, allTileEntities, 0); foreach(var kvp in outputStreams) { kvp.Value.WriteLine("]"); kvp.Value.Close(); } } } }
/** * extract tar file * */ private void extract_tar(TarInputStream tar, string desitination) { delegateProgressBar increment_bar = new delegateProgressBar((int v) => { Cursor.Current = Cursors.WaitCursor; progressBar1.Value = v; }); ICSharpCode.SharpZipLib.Tar.TarEntry entry = tar.GetNextEntry(); while (entry != null) { if (entry.IsDirectory == true) { if (Directory.Exists(desitination + "\\" + entry.Name)) Directory.Delete(desitination + "\\" + entry.Name, true); if (!Directory.Exists(desitination + "\\" + entry.Name)) Directory.CreateDirectory(desitination + "\\" + entry.Name); entry = tar.GetNextEntry(); continue; } Invoke(new delegateUpdateLabel((string s) => { this.label2.Text = s; }), new Object[] { entry.Name + " extracting ..." }); Invoke(new delegateProgressBar((Int32 v) => { progressBar1.Value = 0; progressBar1.Minimum = 0; progressBar1.Maximum = v; }), new Object[] { (Int32.MaxValue < entry.Size) ? (Int32)(entry.Size / 65536) : (Int32)entry.Size }); using (FileStream dest = new FileStream(desitination + "\\" + entry.Name, FileMode.Create, FileAccess.Write)) { Int32 count = 0; Int32 write_total = 0; byte[] buffer = new byte[32768]; using (BinaryWriter br = new BinaryWriter(dest)) { while ((count = tar.Read(buffer, 0, 32768)) > 0) { br.Write(buffer, 0, count); write_total += count; if (Int32.MaxValue < entry.Size) { Object[] inc_arg = { (Int32)write_total / 65536 }; Invoke(increment_bar, inc_arg); } else { Object[] inc_arg = { write_total }; Invoke(increment_bar, inc_arg); } } br.Flush(); br.Close(); } dest.Close(); } entry = tar.GetNextEntry(); } }