public void OpenArchive() { string assetPath = AssetDatabase.GetAssetPath(_archiveFile); BinaryReader reader = null; try { reader = new BinaryReader(new FileStream(assetPath, FileMode.Open, FileAccess.Read, FileShare.Read)); _header = new ArcHeader(reader); _files = new List<ArcFile>((int)_header.fileCount); for (int fileI = 0; fileI != (int)_header.fileCount; fileI++) { ArcFile file = new ArcFile(reader); file.ArchiveName = Path.GetFileName(assetPath); file.FileNumber = fileI; _files.Add(file); } } catch (Exception e) { Debug.LogError("Path " + assetPath); Debug.LogException(e); if (reader != null) { reader.Close(); } } }
/// <summary> /// Decompresses an ARC container. /// </summary> /// <param name="source">Source format.</param> /// <returns>The uncompressed format.</returns> public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; // Read the file header ArcHeader header = reader.Read <ArcHeader>() as ArcHeader; this.CheckHeader(header); switch (header.CompressionType) { case 0x00: // Not compressed return(source); case 0x02: // LZ4 Compression { DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; header.CompressionType = 0x00; writer.WriteOfType(header); byte[] filesInfo = reader.ReadBytes(0x70 * header.FileCount); byte[] compressedData = reader.ReadBytes((int)(source.Stream.Length - source.Stream.Position)); var decompressedData = new byte[header.OriginalSize]; int bytesWritten = LZ4Codec.Decode(compressedData, decompressedData); if (bytesWritten != header.OriginalSize) { throw new FormatException($"ARC: Bad LZ4 compression."); } writer.Write(filesInfo); writer.Write(decompressedData); return(new BinaryFormat(outputStream)); } default: throw new FormatException($"ARC: Unknown compression {header.CompressionType:X4}"); } }
public static void Create(System.IO.Stream output, IEnumerable<ArchiveCreateEntry> files, ArchiveFlags flags, CompressType compress, Action<double, string> onProgress) { if (!files.Any()) throw new IrosArcException("Can't create an archive that contains no files"); var sw = new System.Diagnostics.Stopwatch(); sw.Start(); double total = files.Count() + 2; int count = 0; onProgress(count / total, ""); ArcHeader h = new ArcHeader() { Flags = flags, Version = MAX_VERSION, Directory = 16 }; List<DirectoryEntry> entries = files.Select(f => new DirectoryEntry() { Filename = f.Filename, Flags = 0, }).ToList(); int dsize = entries.Select(e => (int)e.GetSize()).Sum(); h.Save(output); output.WriteInt(entries.Count); long position = h.Directory + dsize + 4; onProgress(++count / total, "Wrote header"); int index = 0; var combined = entries.Zip(files, (d,e) => new { Dir = d, ACE = e }).ToList(); var cw = new CompressWork() { Input = new System.Collections.Concurrent.ConcurrentBag<CompressEntry>(), Compressed = new System.Collections.Concurrent.BlockingCollection<CompressEntry>(8), Compress = compress }; foreach (var comb in combined) { cw.Input.Add(new CompressEntry() { ACE = comb.ACE, Dir = comb.Dir }); } foreach (int _ in Enumerable.Range(0, 8)) System.Threading.ThreadPool.QueueUserWorkItem(CompressWorkThread, cw); int filesDone = 0; while (filesDone < combined.Count) { var entry = cw.Compressed.Take(); entry.Dir.Offset = position; var data = entry.DataRec.Data; if (entry.DataRec.Compressed) entry.Dir.Flags |= FileFlags.CompressLZMA; entry.Dir.Length = data.Length; output.Position = position; output.Write(data, 0, data.Length); position += entry.Dir.Length; onProgress(++count / total, "Written " + entry.ACE.Filename); index++; filesDone++; } output.Position = h.Directory + 4; foreach (var entry in entries) { entry.Save(output); } sw.Stop(); onProgress(++count / total, String.Format("Complete: {0} files, {1:0.0}MB in {2} seconds", entries.Count, output.Length / (1024f*1024f), sw.Elapsed.TotalSeconds)); }
public NodeContainerFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; var result = new NodeContainerFormat(); Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; // Read the file header ArcHeader header = reader.Read <ArcHeader>() as ArcHeader; this.CheckHeader(header); if (header.CompressionType == 0x02) { // File is compressed throw new FormatException($"ARC: File is compressed."); } int startData = 0x10 + (header.FileCount * 0x070); for (var i = 0; i < header.FileCount; i++) { ArcFileInfo info = reader.Read <ArcFileInfo>() as ArcFileInfo; var binaryFormat = new BinaryFormat(source.Stream, startData + info.Offset, info.Size); string path = info.FileName.Trim('\0').Replace("\\", "/"); int lastSeparator = path.LastIndexOf('/'); string name = path.Substring(lastSeparator + 1); path = path.Substring(0, lastSeparator); var node = new Node(name, binaryFormat) { Tags = { ["Unknown1"] = info.Unknown1, ["Unknown2"] = info.Unknown2, }, }; NodeFactory.CreateContainersForChild(result.Root, path, node); } return(result); }
public ArcInfo(ArcHeader h) { dataoffset = 0; cid = 0; head = h; nodes = new List <ArcNode>(); files = new List <ArcFileEntry>(); strings = new List <string>(); fileSizes = new List <long>(); }
public IrosArc(string filename, bool patchable = false, Action <int, int> progressAction = null) { _source = filename; var sw = new System.Diagnostics.Stopwatch(); sw.Start(); if (patchable) { _data = new System.IO.FileStream(filename, System.IO.FileMode.Open, System.IO.FileAccess.ReadWrite); } else { _data = new System.IO.FileStream(filename, System.IO.FileMode.Open, System.IO.FileAccess.Read); } _header = new ArcHeader(); _header.Open(_data); int numfiles; _data.Position = _header.Directory; do { numfiles = _data.ReadInt(); if (numfiles == -1) { _data.Position = _data.ReadLong(); } } while (numfiles < 0); _entries = new List <DirectoryEntry>(); _lookup = new Dictionary <string, DirectoryEntry>(StringComparer.InvariantCultureIgnoreCase); _folderNames = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); for (int i = 0; i < numfiles; i++) { progressAction?.Invoke(i, numfiles); DirectoryEntry e = new DirectoryEntry(); e.Open(_data, _header.Version); #if !RUDE if ((e.Flags & FileFlags.RudeFlags) != 0) { throw new IrosArcException(String.Format("Archive {0} entry {1} has invalid flags", filename, e.Filename)); } #endif _entries.Add(e); _lookup[e.Filename] = e; int lpos = e.Filename.LastIndexOf('\\'); if (lpos > 0) { _folderNames.Add(e.Filename.Substring(0, lpos)); } } sw.Stop(); System.Diagnostics.Debug.WriteLine("IrosArc: opened {0}, contains {1} files, took {2} ms to parse", filename, _lookup.Count, sw.ElapsedMilliseconds); }
private void CheckHeader(ArcHeader header) { if (header == null) { throw new ArgumentNullException(nameof(header)); } if (header.MagicId != "TGP0") { throw new FormatException($"ARC: Bad magic Id ({header.MagicId} != TGP0)"); } }
public ArcFileEntry(Stream arcstr, ArcHeader head, int i) { arcstr.Seek(head.entryOffset + i * 20 + 0x20, SeekOrigin.Begin); id = Data.ReadUInt16(arcstr); unknown = Data.ReadUInt16(arcstr); unknown2 = Data.ReadUInt16(arcstr); filenameOffset = Data.ReadUInt16(arcstr); dataOffset = Data.ReadUInt32(arcstr); dataSize = Data.ReadUInt32(arcstr); zero = Data.ReadUInt32(arcstr); WritePath = null; }
public static void CreateArc(string dir, string dest) { ArcHeader head = new ArcHeader(); //Build dir info ArcInfo ai = new ArcInfo(head); ai.strings.Add("."); ai.strings.Add(".."); ai = BuildArcInfo(dir, "", 0xFFFF, new Dictionary <string, ushort>(), ai); ai.AlignHead(); FileStream arcstr = new FileStream(dest, FileMode.Create); WriteArc(arcstr, dir, ai); }
private static void DumpArcNode(Stream arcstr, ArcHeader head, ArcNode node, string destroot, string nameoverride = null) { string nodeName = nameoverride; arcstr.Seek(node.filenameOffset + head.stringOffset + 0x20, SeekOrigin.Begin); if (nodeName == null) { nodeName = Data.ReadString(arcstr); } destroot += "\\" + nodeName; Directory.CreateDirectory(destroot); for (int i = 0; i < node.entryCount; ++i) { ArcFileEntry curr = new ArcFileEntry(arcstr, head, (int)(node.entryOffset + i)); if (curr.id == 0xFFFF) //subdirectory { if (curr.filenameOffset != 0 && curr.filenameOffset != 2) //don't go to "." and ".." { ArcNode dirNode = new ArcNode(arcstr, (int)curr.dataOffset); //Some arc packing programs have a glitch arcstr.Seek(curr.filenameOffset + head.stringOffset + 0x20, SeekOrigin.Begin); //People use them so just work around it DumpArcNode(arcstr, head, dirNode, destroot, Data.ReadString(arcstr)); } } else //file { arcstr.Seek(curr.filenameOffset + head.stringOffset + 0x20, SeekOrigin.Begin); string currName = Data.ReadString(arcstr); FileStream dest = new FileStream(destroot + "\\" + currName, FileMode.Create); int read = 0; byte[] buff = new byte[1024]; arcstr.Seek(curr.dataOffset + head.dataStart + 0x20, SeekOrigin.Begin); while (read < curr.dataSize) { int r = arcstr.Read(buff, 0, (int)Math.Min(1024, curr.dataSize - read)); dest.Write(buff, 0, r); read += r; } dest.Close(); } } }
public static void ExtractArc(string arc, string dest) { FileStream arcstr = new FileStream(arc, FileMode.Open, FileAccess.Read); byte[] magic = Data.Read(arcstr, 4); if (Data.CompareBytes(magic, RARC) != 0) { Console.WriteLine(arc + " is not a valid arc file."); return; } ArcHeader head = new ArcHeader(arcstr); ArcNode root = new ArcNode(arcstr, 0); DumpArcNode(arcstr, head, root, dest); arcstr.Close(); }
/// <summary> /// Converts a NodeContainerFormat into an ARC container. /// </summary> /// <param name="source">The source format.</param> /// <returns>The binary format.</returns> public BinaryFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } var files = new List <Node>(); var tags = new Dictionary <string, Dictionary <string, int> >(); var totalSize = 0; foreach (Node node in Navigator.IterateNodes(source.Root, NavigationMode.DepthFirst)) { if (node.IsContainer) { continue; } if (node.Name.EndsWith(".arcinfo", StringComparison.InvariantCulture)) { string path = node.Path.Replace(".arcinfo", string.Empty); var dict = new Dictionary <string, int>(); var reader = new TextDataReader(node.Stream); while (!node.Stream.EndOfStream) { string line = reader.ReadLine(); if (string.IsNullOrEmpty(line)) { continue; } string[] split = line.Split('='); dict.Add(split[0], int.Parse(split[1], NumberStyles.Integer, CultureInfo.InvariantCulture)); } tags.Add(path, dict); } else { files.Add(node); totalSize += (int)node.Stream.Length; } } var header = new ArcHeader { MagicId = "TGP0", Version = 3, CompressionType = 0, FileCount = files.Count, OriginalSize = totalSize, }; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; writer.WriteOfType(header); var currentOffset = 0; foreach (Node node in files) { string path = node.Path.Substring(source.Root.Path.Length + 1).Replace("/", "\\"); var size = (int)node.Stream.Length; writer.Write(path, 0x60, false); writer.Write(currentOffset); writer.Write(size); if (tags.ContainsKey(node.Path)) { writer.Write(tags[node.Path]["Unknown1"]); writer.Write(tags[node.Path]["Unknown2"]); } else { writer.Write(0); writer.Write(0); } currentOffset += size; } foreach (Node node in files) { node.Stream.WriteTo(outputStream); } return(new BinaryFormat(outputStream)); }
/// <summary> /// Compresses an ARC container using LZ4. /// </summary> /// <param name="source">Source format.</param> /// <returns>The compressed format.</returns> public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; // Read the file header ArcHeader header = reader.Read<ArcHeader>() as ArcHeader; this.CheckHeader(header); switch (header.CompressionType) { case 0x00: // Not compressed { DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; header.CompressionType = 0x02; writer.WriteOfType(header); byte[] filesInfo = reader.ReadBytes(0x70 * header.FileCount); byte[] decompressedData = reader.ReadBytes((int)(source.Stream.Length - source.Stream.Position)); var compressedData = new byte[LZ4Codec.MaximumOutputSize(decompressedData.Length)]; int bytesWritten = LZ4Codec.Encode(decompressedData, compressedData, LZ4Level.L11_OPT); if (bytesWritten < 0) { throw new FormatException($"ARC: Error in LZ4 compression."); } writer.Write(filesInfo); var data = new byte[bytesWritten]; Array.Copy(compressedData, data, bytesWritten); writer.Write(data); return new BinaryFormat(outputStream); } case 0x02: // LZ4 Compression // Already compressed return source; default: throw new FormatException($"ARC: Unknown compression {header.CompressionType:X4}"); } }
public void UnloadArchive() { _header = null; _files = null; }