public void ReadData(BinaryReader reader, GpkPackage package) { value = new byte[size]; value = reader.ReadBytes(size); //real array parsing experiment /* * BinaryReader internalReader = new BinaryReader(new MemoryStream(value)); * List<byte[]> bList = new List<byte[]>(); * int pointer = 0; * do * { * int elementSize = internalReader.ReadInt32(); * byte[] element = internalReader.ReadBytes(elementSize); * bList.Add(element); * pointer += elementSize; * } while (pointer < size); * * if (bList.Count > 1) * { * Debug.Print(bList.Count.ToString()); * } */ RecalculateSize(); }
public void ReadData(GpkPackage package, GpkExport export) { BinaryReader reader = new BinaryReader(new MemoryStream(export.Data)); ObjectIndex = reader.ReadInt32(); ObjectName = package.GetObjectName(ObjectIndex); }
public void ReadData(BinaryReader reader, GpkPackage package) { long index = reader.ReadInt32(); value = package.GetString(index); padding = reader.ReadInt32(); }
private void WriteChunkContent(BinaryWriter writer, GpkPackage package) { logger.Debug("WriteChunkBlocks"); for (int i = 0; i < package.Header.ChunkHeaders.Count; i++) { GpkCompressedChunkHeader chunk = package.Header.ChunkHeaders[i]; chunk.CompressedOffset = (int)writer.BaseStream.Length; writer.Write(chunk.writableChunkblock.signature); writer.Write(chunk.writableChunkblock.blocksize); writer.Write(chunk.writableChunkblock.compressedSize); writer.Write(chunk.writableChunkblock.uncompressedSize_chunkheader); foreach (var block in chunk.writableChunkblock.chunkBlocks) { writer.Write(block.compressedSize); writer.Write(block.uncompressedDataSize); } foreach (var block in chunk.writableChunkblock.chunkBlocks) { writer.Write(block.compressedData); } chunk.CompressedSize = (int)writer.BaseStream.Length - chunk.CompressedOffset; //????? logger.Debug("Chunk {0}: UncompressedOffset {1}, UncompressedSize {2}, UncompressedEnd {3}, CompressedOffset {4}, CompressedSize {5}, CompressedEnd {6}, Blocks {7}", i, chunk.UncompressedOffset, chunk.UncompressedSize, chunk.UncompressedOffset + chunk.UncompressedSize, chunk.CompressedOffset, chunk.CompressedSize, chunk.CompressedOffset + chunk.CompressedSize, chunk.writableChunkblock.chunkBlocks.Count); } logger.Debug("WriteChunkBlocks done"); }
private void WriteImports(BinaryWriter writer, GpkPackage package) { if (writer.BaseStream.Position != package.Header.ImportOffset) { package.Header.ImportOffset = (int)writer.BaseStream.Position; writer.BaseStream.Seek(offsetImportPos, SeekOrigin.Begin); writer.Write(package.Header.ImportOffset); writer.BaseStream.Seek(package.Header.ImportOffset, SeekOrigin.Begin); logger.Debug("import offset mismatch, fixed!"); } foreach (GpkImport imp in package.ImportList.Values) { writer.Write(package.GetStringIndex(imp.ClassPackage)); writer.Write(package.GetStringIndex(imp.ClassName)); writer.Write(imp.PackageRef); writer.Write((int)package.GetStringIndex(imp.ObjectName)); writer.Write(imp.Unk); stat.progress++; } logger.Debug("Wrote imports pos " + writer.BaseStream.Position); }
public static void ParsePayload(GpkPackage package, GpkExport export) { switch (export.ClassName) { case "Core.SoundNodeWave": export.Payload = new Soundwave(); break; case "Core.SoundCue": export.Payload = new SoundCue(); break; case "Core.Texture2D": if (Settings.Default.EnableTexture2D) { //gmp hack if (isStrangeCompressedTexture(export)) { break; } export.Payload = new Texture2D(); } break; } if (export.Payload != null) { export.Payload.ReadData(package, export); } }
private void ReadImports(BinaryReader reader, GpkPackage package) { logger.Debug("Reading Imports at {0}....", package.Header.ImportOffset); reader.BaseStream.Seek(package.Header.ImportOffset, SeekOrigin.Begin); for (int i = 0; i < package.Header.ImportCount; i++) { GpkImport import = new GpkImport(); long package_class_index = reader.ReadInt64(); long class_index = reader.ReadInt64(); import.PackageRef = reader.ReadInt32(); long object_index = reader.ReadInt32(); import.Unk = reader.ReadInt32(); import.ClassPackage = package.GetString(package_class_index); import.ClassName = package.GetString(class_index); import.ObjectName = package.GetString(object_index); import.UID = GenerateUID(package, import); package.ImportList.Add(i, import); logger.Debug("Import {0}: ClassPackage {1} Class: {2} Object: {3}", i, import.ClassPackage, import.ClassName, import.ObjectName); stat.progress++; } }
private void WriteFilePadding(BinaryWriter writer, GpkPackage package, int compuSize) { long final_size = writer.BaseStream.Position; logger.Debug("New size: {0}, Old size: {1}", final_size, package.OrginalSize); logger.Debug("Compu Size: {0}, Diff: {1} -", compuSize, final_size - compuSize); if (final_size < package.OrginalSize) { //too short, fill up with 00s ^^ long missing = package.OrginalSize - final_size; writer.Write(new byte[missing]); logger.Info(String.Format("Package was filled up with {0} bytes..", missing)); } else if (final_size == package.OrginalSize) { logger.Info(String.Format("Package size is the old size...")); } else if (final_size > package.OrginalSize) { //Too big logger.Info("The new package size is bigger than the orginal one! Tera may not acccept this file."); logger.Info("New size {0} bytes, Old size {1} bytes. +{2} bytes", final_size, package.OrginalSize, final_size - package.OrginalSize); } }
private void ReadNames(BinaryReader reader, GpkPackage package) { logger.Debug("Reading Namelist at {0}....", package.Header.NameOffset); reader.BaseStream.Seek(package.Header.NameOffset, SeekOrigin.Begin); for (int i = 0; i < package.Header.NameCount; i++) { GpkString tmpString = new GpkString(); int len = reader.ReadInt32(); if (len > 0) { tmpString.name = Reader.ReadString(reader, len); } else { tmpString.name = Reader.ReadUnicodeString(reader, (len * -1) * 2); } tmpString.flags = reader.ReadInt64(); package.NameList.Add(i, tmpString); logger.Debug("Name {0}: {1}", i, tmpString.name); stat.progress++; } }
public static string GenerateUID(GpkPackage package, GpkExport export) { string proposedName; if (export.PackageName == "none") { proposedName = export.ObjectName; } else { proposedName = export.PackageName + "." + export.ObjectName; } int counter = 0; do { string tmpName = proposedName; if (counter > 0) { tmpName += ("_" + counter); } if (package.UidList.ContainsKey(tmpName) == false) { package.UidList.Add(tmpName, ""); return(tmpName); } counter++; } while (true); }
private void replaceSaveToolStripMenuItem_Click(object sender, EventArgs e) { bool save = false; if (changedExports != null) { for (int i = 0; i < changedExports.Length; i++) { List <GpkExport> list = changedExports[i]; if (list.Count > 0) { try { Writer tmpS = new Writer(); GpkPackage package = loadedGpkPackages[i]; string savepath = package.Path + "_patched"; tmpS.SaveReplacedExport(package, savepath, list); logger.Info(String.Format("Saved the changed data of package '{0} to {1}'!", package.Filename, savepath)); save = true; } catch (Exception ex) { logger.FatalException("Save failure! " + ex, ex); } } } } if (!save) { logger.Info("Nothing to save in PatchMode!"); } }
private GpkPackage ReadSubGpkPackage(GpkPackage package, byte[] data, bool skipExportData, Status stat) { BinaryReader reader = null; try { reader = new BinaryReader(new MemoryStream(data)); Stopwatch pkgWatch = new Stopwatch(); logger = LogManager.GetLogger("[ReadSubGpkPackage:" + package.Filename + "]"); logger.Debug("Reading Start"); stat.name = package.Filename; pkgWatch.Start(); //parsing ReadHeader(reader, package); var file = CheckAndDecompress(reader, package); if (file != null) { reader.Close(); reader.Dispose(); reader = new BinaryReader(new MemoryStream(file)); } ReadNames(reader, package); ReadImports(reader, package); ReadExports(reader, package); ReadDepends(reader, package); if (!skipExportData) { ReadExportData(reader, package); } reader.Close(); reader.Dispose(); //boring log stuff pkgWatch.Stop(); stat.time = pkgWatch.ElapsedMilliseconds; stat.finished = true; logger.Info("Reading of package {0} complete, took {1}ms!", package.Filename, pkgWatch.ElapsedMilliseconds); return(package); } catch (Exception ex) { logger.Fatal("Parse failure!"); logger.Fatal(ex); } finally { if (reader != null) { reader.Close(); } } return(null); }
public void ReadData(BinaryReader reader, GpkPackage package) { long structtype = reader.ReadInt64(); innerType = package.GetString(structtype); value = new byte[size]; value = reader.ReadBytes(size); }
private void openToolStripMenuItem_Click(object sender, EventArgs e) { String[] files = MiscFuncs.GenerateOpenDialog(); if (files.Length == 0) { return; } DateTime start = DateTime.Now; List <IProgress> runningReaders = new List <IProgress>(); List <Task> runningTasks = new List <Task>(); foreach (var path in files) { if (File.Exists(path)) { Task newTask = new Task(delegate() { Reader reader = new Reader(); runningReaders.Add(reader); GpkPackage tmpPack = reader.ReadGpk(path); if (tmpPack != null) { if (Settings.Default.Debug) { tmpPack.Changes = true; //tmp, remove after tests } loadedGpkPackages.Add(tmpPack); } }); newTask.Start(); runningTasks.Add(newTask); } } //display info while loading while (!Task.WaitAll(runningTasks.ToArray(), 50)) { Application.DoEvents(); DisplayStatus(runningReaders, "Loading", start); //Thread.Sleep(50); } //Diplay end info DisplayStatus(runningReaders, "Loading", start); //for patchmode Array.Resize(ref changedExports, loadedGpkPackages.Count); for (int i = 0; i < changedExports.Length; i++) { changedExports[i] = new List <GpkExport>(); } //gui stuff DrawPackages(); }
private void WriteFileEnding(BinaryWriter writer, GpkPackage package, int compuSize) { long final_size = writer.BaseStream.Position + 4; writer.Write((int)final_size); logger.Debug("New size: {0}, Old size: {1}", final_size, package.OrginalSize); logger.Debug("Compu Size: {0}, Diff: {1} -", compuSize, final_size - compuSize); }
public void WriteData(BinaryWriter writer, GpkPackage package, GpkExport export) { writer.Write(startUnk); if (inUnicode) { Writer.WriteUnicodeString(writer, tgaPath, true); } else { Writer.WriteString(writer, tgaPath, true); } writer.Write(maps.Count); foreach (var map in maps) { //refressh block info, compress blocks map.generateBlocks(); //chunk //info writer.Write(map.compFlag); writer.Write(map.uncompressedSize); int chunkSize = 16 + map.blocks.Count * 8 + map.compressedSize; if (chunkSize != map.compChunkSize) { logger.Info("fixing chunksize"); map.compChunkSize = chunkSize; } writer.Write(map.compChunkSize); writer.Write((int)(writer.BaseStream.Position + 4)); //chunkoffset //header writer.Write(map.signature); writer.Write(map.blocksize); writer.Write(map.compressedSize); writer.Write(map.uncompressedSize_chunkheader); foreach (var block in map.blocks) { writer.Write(block.compressedSize); writer.Write(block.uncompressedDataSize); } foreach (var block in map.blocks) { writer.Write(block.compressedData); } writer.Write(map.sizeX); writer.Write(map.sizeY); } writer.Write(guid); }
public void WriteData(BinaryWriter writer, GpkPackage package, GpkExport export) { writer.Write(cues.Count); foreach (SoundCueObject cue in cues) { writer.Write((int)package.GetObjectIndex(cue.objectName)); writer.Write(cue.Unk2); writer.Write(cue.Unk3); } }
public GpkPackage ReadGpk(string path) { try { Stopwatch watch = new Stopwatch(); GpkPackage package = new GpkPackage(); stat = new Status(); watch.Start(); package.Filename = Path.GetFileName(path); package.Path = path; stat.name = package.Filename; logger = LogManager.GetLogger("[Reader:" + package.Filename + "]"); logger.Info("Reading Start"); BinaryReader reader = new BinaryReader(new FileStream(path, FileMode.Open, FileAccess.Read)); package.OrginalSize = reader.BaseStream.Length; //parsing ReadHeader(reader, package); var file = CheckAndDecompress(reader, package); if (file != null) { reader.Close(); reader = new BinaryReader(new MemoryStream(file)); } ReadNames(reader, package); ReadImports(reader, package); ReadExports(reader, package); ReadExportData(reader, package); reader.Close(); reader.Dispose(); watch.Stop(); stat.time = watch.ElapsedMilliseconds; stat.finished = true; logger.Info("Reading of {0} complete, took {1}ms!", path, watch.ElapsedMilliseconds); return(package); } catch (Exception ex) { logger.Fatal(ex, "Parse failure! "); } return(null); }
public void WriteData(BinaryWriter writer, GpkPackage package) { if (size == 8) { writer.Write(package.GetStringIndex(nameValue)); } else { writer.Write(byteValue); } }
public void WriteData(BinaryWriter writer, GpkPackage package) { if (package.x64) { writer.Write(value); } else { writer.Write(Convert.ToInt32(value)); } }
private void ReadExports(BinaryReader reader, GpkPackage package) { logger.Debug("Reading Exports at {0}....", package.Header.ExportOffset); reader.BaseStream.Seek(package.Header.ExportOffset, SeekOrigin.Begin); for (int i = 0; i < package.Header.ExportCount; i++) { GpkExport export = new GpkExport(package); export.ClassIndex = reader.ReadInt32(); export.SuperIndex = reader.ReadInt32(); export.PackageIndex = reader.ReadInt32(); long nameIndex = reader.ReadInt32(); export.ObjectName = package.GetString(nameIndex); export.Unk1 = reader.ReadInt64(); export.Unk2 = reader.ReadInt64(); export.SerialSize = reader.ReadInt32(); if (export.SerialSize > 0) { export.SerialOffset = reader.ReadInt32(); } export.Unk3 = reader.ReadInt32(); export.UnkHeaderCount = reader.ReadInt32(); export.Unk4 = reader.ReadInt32(); export.Guid = reader.ReadBytes(16); export.UnkExtraInts = reader.ReadBytes(export.UnkHeaderCount * 4); package.ExportList.Add(i, export); logger.Debug("Export {0}: ObjectName: {1}, Data_Size: {2}, Data_Offset {3}, Export_offset {4}", i, export.ObjectName, export.SerialSize, export.SerialOffset, reader.BaseStream.Position); stat.progress++; } //post-processing. needed if a object points to another export. logger.Debug("Linking Exports.."); foreach (KeyValuePair <long, GpkExport> pair in package.ExportList) { GpkExport export = pair.Value; if (export.ClassName == null || export.SuperName == null || export.PackageName == null || export.UID == null) { export.ClassName = package.GetObjectName(export.ClassIndex); export.SuperName = package.GetObjectName(export.SuperIndex); export.PackageName = package.GetObjectName(export.PackageIndex); export.UID = GenerateUID(package, export); } stat.progress++; } }
private void FixNameCount(GpkPackage package) { int t = package.Header.PackageFlags & 8; /* * if ((package.Header.PackageFlags & 8) == 8) * { * package.Header.NameCount -= package.Header.NameOffset; * } */ package.Header.NameCount -= package.Header.NameOffset; }
public void ReadData(GpkPackage package, GpkExport export) { BinaryReader reader = new BinaryReader(new MemoryStream(export.Data)); IProperty formatProp = export.Properties.Find(t => ((GpkBaseProperty)t).name == "Format"); String format = ((GpkByteProperty)formatProp).nameValue; reader.ReadBytes(16); int len = reader.ReadInt32() * -1 * 2; string name = Reader.ReadUnicodeString(reader, len); int mipMapSize = reader.ReadInt32(); }
public void ReadData(BinaryReader reader, GpkPackage package) { if (size == 8) { long byteIndex = reader.ReadInt64(); nameValue = package.GetString(byteIndex); } else { byteValue = reader.ReadByte(); } }
public void ResetGUI() { selectedExport = null; selectedPackage = null; selectedClass = ""; boxInfo.Text = ""; boxGeneralButtons.Enabled = false; boxDataButtons.Enabled = false; boxPropertyButtons.Enabled = false; ProgressBar.Value = 0; lblStatus.Text = "Ready"; ClearGrid(); }
public void WriteData(BinaryWriter writer, GpkPackage package) { writer.Write(length); if (length > 0) { Writer.WriteString(writer, value, false); } else { Writer.WriteUnicodeString(writer, value, false); } }
public void ReadData(BinaryReader reader, GpkPackage package) { if (package.x64) { realSize = 1; value = reader.ReadBoolean(); } else { realSize = 4; value = Convert.ToBoolean(reader.ReadInt32()); } }
private void WriteHeaderSize(BinaryWriter writer, GpkPackage package) { if (writer.BaseStream.Position != package.Header.HeaderSize) { package.Header.HeaderSize = (int)writer.BaseStream.Position; writer.BaseStream.Seek(headerSizeOffset, SeekOrigin.Begin); writer.Write(package.Header.HeaderSize); writer.BaseStream.Seek(package.Header.HeaderSize, SeekOrigin.Begin); logger.Debug("headersize mismatch, fixed!"); } }
//http://forums.nexusmods.com/index.php?/topic/1964864-sound-replacement-possible/#entry18577584 public void ReadData(GpkPackage package, GpkExport export) { BinaryReader reader = new BinaryReader(new MemoryStream(export.Data)); reader.ReadBytes(20); //(12bytes 00) int ogg_length1 = reader.ReadInt32(); int ogg_length2 = reader.ReadInt32(); int offset1 = reader.ReadInt32(); oggdata = new byte[ogg_length1]; oggdata = reader.ReadBytes(ogg_length1); reader.ReadBytes(32); //2x(12bytes 00 + offset +4) }
private void WriteHeader(BinaryWriter writer, GpkPackage package) { writer.Write(package.Header.Tag); writer.Write(package.Header.FileVersion); writer.Write(package.Header.LicenseVersion); writer.Write(package.Header.PackageFlags); writer.Write(package.Header.PackageName.Length + 1); WriteString(writer, package.Header.PackageName); writer.Write(package.Header.Unk1); writer.Write(package.Header.Unk2); writer.Write(package.Header.NameCount + package.Header.NameOffset); //tera thing offsetNamePos = writer.BaseStream.Position; writer.Write(package.Header.NameOffset); writer.Write(package.Header.ExportCount); offsetExportPos = writer.BaseStream.Position; writer.Write(package.Header.ExportOffset); writer.Write(package.Header.ImportCount); offsetImportPos = writer.BaseStream.Position; writer.Write(package.Header.ImportOffset); writer.Write(package.Header.DependsOffset); writer.Write(package.Header.FGUID); writer.Write(package.Header.Generations.Count); for (int i = 0; i < package.Header.Generations.Count; i++) { GpkGeneration tmpgen = package.Header.Generations[i]; writer.Write(tmpgen.ExportCount); writer.Write(tmpgen.NameCount); writer.Write(tmpgen.NetObjectCount); } writer.Write(package.Header.Unk3); writer.Write(package.Header.Unk4); writer.Write(package.Header.Unk5); writer.Write(package.Header.Unk6); //writer.Write(package.Header.EngineVersion); writer.Write(0xC0FFEEAA); //my signature ^^ writer.Write(package.Header.CookerVersion); logger.Debug("Wrote header pos " + writer.BaseStream.Position); }