public bool HasFile(string name) { bool result = _lookup.ContainsKey(name); if (result) { DebugLogger.DetailedWriteLine($"ARCHIVE: {_source} contains file {name}"); } return(result); }
public bool HasFolder(string name) { bool result = _folderNames.Contains(name); if (result) { DebugLogger.DetailedWriteLine($"ARCHIVE: {_source} contains folder {name}"); } return(result); }
public IrosArc(string filename, bool patchable = false, Action <int, int> progressAction = null) { _source = filename; var sw = new System.Diagnostics.Stopwatch(); sw.Start(); if (patchable) { _data = new System.IO.FileStream(filename, System.IO.FileMode.Open, System.IO.FileAccess.ReadWrite); } else { _data = new System.IO.FileStream(filename, System.IO.FileMode.Open, System.IO.FileAccess.Read); } _header = new ArcHeader(); _header.Open(_data); int numfiles; _data.Position = _header.Directory; do { numfiles = _data.ReadInt(); if (numfiles == -1) { _data.Position = _data.ReadLong(); } } while (numfiles < 0); _entries = new List <DirectoryEntry>(); _lookup = new Dictionary <string, DirectoryEntry>(StringComparer.InvariantCultureIgnoreCase); _folderNames = new HashSet <string>(StringComparer.InvariantCultureIgnoreCase); for (int i = 0; i < numfiles; i++) { progressAction?.Invoke(i, numfiles); DirectoryEntry e = new DirectoryEntry(); e.Open(_data, _header.Version); #if !RUDE if ((e.Flags & FileFlags.RudeFlags) != 0) { throw new IrosArcException(String.Format("Archive {0} entry {1} has invalid flags", filename, e.Filename)); } #endif _entries.Add(e); _lookup[e.Filename] = e; int lpos = e.Filename.LastIndexOf('\\'); if (lpos > 0) { _folderNames.Add(e.Filename.Substring(0, lpos)); } } sw.Stop(); DebugLogger.DetailedWriteLine($"IrosArc: opened {filename}, contains {_lookup.Count} files, took {sw.ElapsedMilliseconds} ms to parse"); }
private void CleanCache() { long[] remove = _cache .ToArray() .Where(kv => kv.Value.LastAccess < DateTime.Now.AddSeconds(-60)) .Select(kv => kv.Key) .ToArray(); if (remove.Any()) { DebugLogger.DetailedWriteLine($"Removing {remove.Length} compressed files from cache: "); CacheEntry _; foreach (long r in remove) { _cache.TryRemove(r, out _); } } }
public static void Apply(System.IO.Stream source) { List <string> lines = new List <string>(); using (var sr = new System.IO.StreamReader(source)) { string line; while ((line = sr.ReadLine()) != null) { lines.Add(line); } } string delay = lines.FirstOrDefault(s => s.Trim().StartsWith("Delay", StringComparison.InvariantCultureIgnoreCase)); if (delay != null) { delay = delay.Trim().Substring(5).Trim(); if (delay.StartsWith("=")) { delay = delay.Substring(1).Trim(); } int time; if (int.TryParse(delay, out time)) { new System.Threading.Thread(() => { System.Threading.Thread.Sleep(time); DebugLogger.DetailedWriteLine("Applying delayed hex patch"); Apply(lines); }) { IsBackground = true, Name = "ApplyDelayedHexPatch" } .Start(); } } else { Apply(lines); } }
public void RawRead(string file, uint offset, uint length, IntPtr dest, ref uint bytesRead) { DirectoryEntry e; if (_lookup.TryGetValue(file, out e)) { if ((e.Flags & FileFlags.COMPRESSION_FLAGS) != 0) { var cache = GetCache(e); uint readLen = Math.Min(length, (uint)cache.Data.Length - offset); System.Runtime.InteropServices.Marshal.Copy(cache.Data, (int)offset, dest, (int)readLen); bytesRead = readLen; if (readLen == 0) { DebugLogger.DetailedWriteLine($"IrosArc RawRead file {file} offset {offset} length {length} read {readLen} bytes - cache data size {cache.Data.Length}"); } } else { uint readLen = Math.Min(length, (uint)e.Length - offset); long Loffset = offset + e.Offset; Win32.OVERLAPPED ov = new Win32.OVERLAPPED() { EventHandle = IntPtr.Zero, Internal = UIntPtr.Zero, InternalHigh = UIntPtr.Zero, Offset = (uint)(Loffset & 0xffffffff), OffsetHigh = (uint)(Loffset >> 32) }; Win32.ReadFile(_data.SafeFileHandle.DangerousGetHandle(), dest, readLen, ref bytesRead, ref ov); } } else { bytesRead = 0; } }
public LGPWrapper(IntPtr handle, string name, RuntimeProfile profile) { if (!AnyOverridesFor(name, profile)) { DebugLogger.WriteLine($"LGPWrapper: no overrides for {name}, early out"); return; //optimisation, don't run anything else, if no override files } IsActive = true; DebugLogger.WriteLine(" LGPWrapper: Parsing"); var fs = new System.IO.FileStream(new Microsoft.Win32.SafeHandles.SafeFileHandle(handle, false), FileAccess.Read); ProcMonParser.DataFile df = ProcMonParser.FF7Files.LoadLGP(fs, name); fs.Position = 0; Dictionary <string, int> sortKeys = df.Items.ToDictionary(i => i.Name, i => i.Index, StringComparer.InvariantCulture); Dictionary <string, long> dataKeys = df.Items.ToDictionary(i => i.Name, i => i.Start, StringComparer.InvariantCulture); Dictionary <string, int> filesSizes = new Dictionary <string, int>(StringComparer.InvariantCultureIgnoreCase); Dictionary <string, int> filesOptions = new Dictionary <string, int>(StringComparer.InvariantCultureIgnoreCase); Dictionary <string, ProcMonParser.DataItem> lgpItems = df.Items.ToDictionary(i => i.Name, i => i, StringComparer.InvariantCulture); foreach (var item in df.Items) { filesOptions[item.Name] = 0; filesSizes[item.Name] = item.Length - 24; DebugLogger.DetailedWriteLine($"Checking chunk support for {name}~{item.Name}~"); if (profile.Mods.Any(m => m.SupportsChunks(System.IO.Path.Combine(name, item.Name)))) { filesSizes[item.Name] = Math.Max(filesSizes[item.Name], 1024 * 1024 * 2); //This is a horrible hack. TODO. filesOptions[item.Name] |= 0x1; } } DebugLogger.WriteLine(" LGPWrapper: Prepared structures"); List <string> names = profile.Mods.SelectMany(m => m.GetPathOverrideNames(name)).Distinct(StringComparer.InvariantCultureIgnoreCase).ToList(); foreach (string fname in names) { if (fname.IndexOf(".chunk.", StringComparison.InvariantCultureIgnoreCase) >= 0) { continue; } if (!filesSizes.ContainsKey(fname)) { filesSizes[fname] = 0; DebugLogger.WriteLine($"Added LGP file {name} {fname}"); } var overrides = profile.Mods.SelectMany(m => m.GetOverrides(System.IO.Path.Combine(name, fname))); foreach (var over in overrides) { filesSizes[fname] = Math.Max(filesSizes[fname], over.Size); if (over.CFolder == null) { break; } } } List <LGPEntry> entries = filesSizes.Select(kv => new LGPEntry(kv.Key, kv.Value)).ToList(); List <LGP.LGPEntryMade> newentries; DebugLogger.WriteLine(" LGPWrapper: creating new headers"); byte[] headers = LGP.CalculateHeaders(entries, s => { int index; sortKeys.TryGetValue(s, out index); return(index); }, s => { long index; dataKeys.TryGetValue(s, out index); return((uint)index); }, out newentries ); int datastart = headers.Length; DebugLogger.WriteLine($" LGPWrapper: Calculated new LGP headers for {name} with {entries.Count} file entries"); /* * int datastart = df.Items[0].Start; * byte[] headers = new byte[datastart]; * fs.Read(headers, 0, datastart); * fs.Position = 0; */ uint offset = (uint)datastart; VFile = new VFile(); try { VFile.Add(new VRangeInline() { Start = 0, Length = offset, Data = headers, Tag = "Headers" }); int count = 0; foreach (var item in newentries.OrderBy(em => em.DataIndex)) { DebugLogger.DetailedWriteLine($"LGPWrapper calculate {item.Entry.Name}"); //Bytes.WriteUInt(headers, 16 + 20 + 27 * item.Index, offset); string fn = System.IO.Path.Combine(name, item.Entry.Name); var overrides = profile.Mods.SelectMany(m => m.GetOverrides(fn)); int fOptions; filesOptions.TryGetValue(item.Entry.Name, out fOptions); bool chunked = (fOptions & 0x1) != 0; //var overrides = Enumerable.Empty<OverrideFile>(); //DebugLogger.WriteLine("Virtualizing LGP entry {0} at offset {1}", item.Entry.Name, offset); if (item.DataOffset != offset) { throw new Exception("LGPWrapper mismatch on offset for " + item.Entry.Name + " offset=" + offset + " hoffset=" + item.DataOffset); } if (chunked) { long pos = lgpItems[item.Entry.Name].Start + 24; int len = lgpItems[item.Entry.Name].Length - 24; VFile.Add(new VRangeChunked(item.Entry.Name, profile.Mods, handle, (uint)pos, len) { Start = offset, Length = (uint)(item.Entry.MaxSize + 24), Name = fn, Tag = "Chunked" }); //DebugLogger.WriteLine("File {0} initialized with chunks", item.Entry.Name, 0); offset += (uint)item.Entry.MaxSize + 24; } else if (!overrides.Any()) //take from original LGP { long pos = lgpItems[item.Entry.Name].Start; VFile.Add(new VRangeHandle() { Start = offset, Length = (uint)(item.Entry.MaxSize + 24), Handle = handle, Offset = (uint)pos, Tag = item.Entry.Name }); offset += (uint)item.Entry.MaxSize + 24; //DebugLogger.WriteLine("--VRangeHandle"); } else if (overrides.First().CFolder == null) //only one override, replace directly { var ov = overrides.First(); byte[] fheader = new byte[24]; System.Text.Encoding.ASCII.GetBytes(item.Entry.Name, 0, item.Entry.Name.Length, fheader, 0); Bytes.WriteUInt(fheader, 20, (uint)ov.Size); VFile.Add(new VRangeInline() { Start = offset, Length = 24, Data = fheader, Tag = item.Entry.Name + "%header" }); offset += 24; VRange vr; if (ov.Archive == null) { var vf = new VRangeFile() { Start = offset, Length = (uint)ov.Size, Filename = ov.File, Tag = ov.File }; VFile.Add(vf); _wFiles.Add(vf); vr = vf; //DebugLogger.WriteLine("LGP entry {0} coming from file {1}", item.Entry.Name, ov.File); } else { vr = new VRangeArchive() { Start = offset, Length = (uint)ov.Size, File = ov.File, Archive = ov.Archive, Tag = ov.File }; VFile.Add(vr); //DebugLogger.WriteLine("LGP entry {0} coming from archive file {1} with size {2}", item.Entry.Name, ov.File, ov.Size); } //if (vr.Length != item.Entry.MaxSize) //DebugLogger.WriteLine("Entry {0} size difference {1} vs {2}", item.Entry.Name, vr.Length, item.Entry.MaxSize); if (vr.Length < item.Entry.MaxSize) { uint diff = (uint)item.Entry.MaxSize - vr.Length; VFile.Add(new VRangeNull() { Length = diff, Start = vr.Start + vr.Length, Tag = "Padding" }); } offset += (uint)item.Entry.MaxSize; //DebugLogger.WriteLine("--VRangeFile"); } else //multiple overrides; tricky! //DebugLogger.WriteLine("Add VRangeConditional for " + item.Entry.Name); { ProcMonParser.DataItem di; lgpItems.TryGetValue(item.Entry.Name, out di); uint fbOffset = (di == null) ? 0 : (uint)di.Start; var vcond = new VRangeConditional(item.Entry.Name, overrides.ToList(), handle, fbOffset) { Length = (uint)item.Entry.MaxSize + 24, Start = offset, Name = item.Entry.Name, Tag = item.Entry.Name }; VFile.Add(vcond); _wFiles.Add(vcond); offset += (uint)item.Entry.MaxSize + 24; //DebugLogger.WriteLine("--VRangeConditional"); } count++; /* * string file = MapFile(System.IO.Path.Combine(name, item.Name), profile); * if (file != null) { * uint length = (uint)new System.IO.FileInfo(file).Length; * byte[] fheader = new byte[24]; * System.Text.Encoding.ASCII.GetBytes(item.Name, 0, item.Name.Length, fheader, 0); * Bytes.WriteUInt(fheader, 20, length); * VFile.Add(new VRangeInline() { Start = offset, Length = 24, Data = fheader }); * offset += 24; * var vf = new VRangeFile() { Start = offset, Length = length, Filename = file }; * VFile.Add(vf); * _wFiles.Add(vf); * offset += length; * } else { * VFile.Add(new VRangeHandle() { Start = offset, Length = (uint)(item.Length), Handle = handle, Offset = (uint)item.Start }); * offset += (uint)item.Length; * }*/ } byte[] footer = System.Text.Encoding.ASCII.GetBytes("FINAL FANTASY7"); VFile.Add(new VRangeInline() { Start = offset, Length = (uint)footer.Length, Data = footer, Tag = "footer" }); DebugLogger.WriteLine("Created: " + VFile.ToString()); } catch { VFile.Dump(); throw; } }
public void ApplyPatch(IrosArc patch, Action <double, string> onProgress) { int currentDirSize = _entries.Sum(e => e.GetSize()); byte[] deldata = patch.GetBytes("%IrosPatch:Deleted"); if (deldata != null) { string[] delfile = System.Text.Encoding.Unicode.GetString(deldata).Split(new[] { "\n" }, StringSplitOptions.RemoveEmptyEntries); foreach (string del in delfile) { DebugLogger.DetailedWriteLine($"Removing file {del} from archive"); _entries.RemoveAll(e => e.Filename.Equals(del, StringComparison.InvariantCultureIgnoreCase)); } onProgress(0, "Removed " + delfile.Length + " deleted files"); } int count = 0; var files = patch.AllFileNames().Where(s => !s.StartsWith("%")).ToArray(); foreach (string file in files) { var patchEntry = patch._lookup[file]; byte[] data = new byte[patchEntry.Length]; patch._data.Position = patchEntry.Offset; patch._data.Read(data, 0, data.Length); if (HasFile(file)) //update existing { DebugLogger.DetailedWriteLine($"File {file} is already in archive..."); DirectoryEntry exist = _lookup[file]; if (exist.Length >= data.Length) //put data in same position, woo { DebugLogger.DetailedWriteLine("...updating in place"); _data.Position = exist.Offset; } else //stick at end of file { _data.Position = _data.Length; exist.Offset = _data.Position; DebugLogger.DetailedWriteLine("...size increase: writing to end of file"); } _data.Write(data, 0, data.Length); exist.Length = data.Length; exist.Flags = patchEntry.Flags; } else //new file, just append { DebugLogger.DetailedWriteLine($"File {file} is new, appending"); DirectoryEntry de = new DirectoryEntry() { Filename = file, Flags = patchEntry.Flags, Length = patchEntry.Length, Offset = _data.Length }; _data.Position = de.Offset; _data.Write(data, 0, data.Length); _entries.Add(de); _lookup[file] = de; } count++; onProgress(1.0 * count / files.Length, "Processed " + file); } int newDirSize = _entries.Sum(e => e.GetSize()); if (newDirSize <= currentDirSize) { DebugLogger.DetailedWriteLine("Directory will fit in existing location"); _data.Position = _header.Directory; } else { DebugLogger.DetailedWriteLine("Directory size increase, appending"); if (_data.Length >= int.MaxValue) //write forwarder { _data.Position = _header.Directory; _data.WriteInt(-1); _data.WriteLong(_data.Length); _data.Position = _data.Length; } else //write direct location { _header.Directory = (int)_data.Length; _data.Position = _header.Directory; } } _data.WriteInt(_entries.Count); foreach (var e in _entries) { e.Save(_data); } _header.Version = MAX_VERSION; _data.Position = 0; _header.Save(_data); onProgress(1.0, "Wrote directory"); } //TODO: track blank spaces in file and reuse where possible...