/// <summary> /// Gets the DBRecord for a particular ID. /// </summary> /// <param name="recordId">string ID of the record will be normalized internally</param> /// <returns>DBRecord corresponding to the string ID.</returns> public DBRecordCollection GetItem(string recordId) { if (string.IsNullOrEmpty(recordId)) { return(null); } DBRecordCollection databaseRecord; recordId = TQData.NormalizeRecordPath(recordId); if (cache.ContainsKey(recordId)) { databaseRecord = this.cache[recordId]; } else { RecordInfo rawRecord; if (this.recordInfo.ContainsKey(recordId)) { rawRecord = this.recordInfo[recordId]; } else { // record not found return(null); } databaseRecord = rawRecord.Decompress(this); this.cache.Add(recordId, databaseRecord); } return(databaseRecord); }
/// <summary> /// Gets a DBRecord for the specified item ID string. /// </summary> /// <remarks> /// Changed by VillageIdiot /// Changed search order so that IT records have precedence of TQ records. /// Add Custom Map database. Custom Map records have precedence over IT records. /// </remarks> /// <param name="itemId">Item Id which we are looking up</param> /// <returns>Returns the DBRecord for the item Id</returns> public DBRecordCollection GetRecordFromFile(string itemId) { itemId = TQData.NormalizeRecordPath(itemId); if (this.ArzFileMod != null) { DBRecordCollection recordMod = this.ArzFileMod.GetItem(itemId); if (recordMod != null) { // Custom Map records have highest precedence. return(recordMod); } } if (this.ArzFileIT != null) { // see if it's in IT ARZ file DBRecordCollection recordIT = this.ArzFileIT.GetItem(itemId); if (recordIT != null) { // IT file takes precedence over TQ. return(recordIT); } } return(ArzFile.GetItem(itemId)); }
/// <summary> /// Decodes the ARZ file. /// </summary> /// <param name="inReader">input BinaryReader</param> /// <param name="baseOffset">Offset in the file.</param> /// <param name="arzFile">ArzFile instance which we are operating.</param> public void Decode(BinaryReader inReader, int baseOffset, ArzFile arzFile) { // Record Entry Format // 0x0000 int32 stringEntryID (dbr filename) // 0x0004 int32 string length // 0x0008 string (record type) // 0x00?? int32 offset // 0x00?? int32 length in bytes // 0x00?? int32 timestamp? // 0x00?? int32 timestamp? this.idStringIndex = inReader.ReadInt32(); this.RecordType = TQData.ReadCString(inReader); this.offset = inReader.ReadInt32() + baseOffset; // Compressed size // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Crap1 - timestamp? // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Crap2 - timestamp? // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Get the ID string this.ID = arzFile.Getstring(this.idStringIndex); }
/// <summary> /// Parses the binary equipment block data /// </summary> /// <param name="offset">offset of the block within the player file.</param> /// <param name="reader">BinaryReader instance</param> private static void ParseEquipmentBlock(PlayerCollection pc, int offset, BinaryReader reader) { try { pc.equipmentBlockStart = offset; reader.BaseStream.Seek(offset, SeekOrigin.Begin); if (pc.IsImmortalThrone) { TQData.ValidateNextString("equipmentCtrlIOStreamVersion", reader); pc.equipmentCtrlIOStreamVersion = reader.ReadInt32(); } pc.EquipmentSack = new SackCollection(); pc.EquipmentSack.SackType = SackType.Equipment; pc.EquipmentSack.IsImmortalThrone = pc.IsImmortalThrone; SackCollectionProvider.Parse(pc.EquipmentSack, reader); pc.equipmentBlockEnd = (int)reader.BaseStream.Position; } catch (ArgumentException ex) { Log.Error($"ParseEquipmentBlock fail ! offset={offset}", ex); throw; } }
/// <summary> /// Encodes the internal item data back into raw data /// </summary> /// <returns>raw data for the item data</returns> private static byte[] EncodeItemData(Stash sta) { int dataLength; byte[] data; // Encode the item data into a memory stream using (MemoryStream writeStream = new MemoryStream(2048)) { using (BinaryWriter writer = new BinaryWriter(writeStream)) { // Write zero into the checksum value writer.Write(Convert.ToInt32(0, CultureInfo.InvariantCulture)); TQData.WriteCString(writer, "begin_block"); writer.Write(sta.beginBlockCrap); TQData.WriteCString(writer, "stashVersion"); writer.Write(sta.stashVersion); TQData.WriteCString(writer, "fName"); // Changed to raw data to support extended characters writer.Write(sta.name.Length); writer.Write(sta.name); TQData.WriteCString(writer, "sackWidth"); writer.Write(sta.Width); TQData.WriteCString(writer, "sackHeight"); writer.Write(sta.Height); // SackType should already be set at sta point SackCollectionProvider.Encode(sta.sack, writer); dataLength = (int)writeStream.Length; } // now just return the buffer we wrote to. data = writeStream.GetBuffer(); } // The problem is that data[] may be bigger than the amount of data in it. // We need to resize the array if (dataLength == data.Length) { return(data); } byte[] realData = new byte[dataLength]; Array.Copy(data, realData, dataLength); return(realData); }
/// <summary> /// Gets the Infor for a specific item id. /// </summary> /// <param name="itemId">Item ID which we are looking up. Will be normalized internally.</param> /// <returns>Returns Infor for item ID and NULL if not found.</returns> public Info GetInfo(string itemId) { Info result = null; if (string.IsNullOrEmpty(itemId)) { return(result); } itemId = TQData.NormalizeRecordPath(itemId); Info info; if (infoDB.ContainsKey(itemId)) { info = this.infoDB[itemId]; } else { DBRecordCollection record = null; // Add support for searching a custom map database if (this.ArzFileMod != null) { record = this.ArzFileMod.GetItem(itemId); } // Try the expansion pack database first. if (record == null && this.ArzFileIT != null) { record = this.ArzFileIT.GetItem(itemId); } // Try looking in TQ database now if (record == null || this.ArzFileIT == null) { record = this.ArzFile.GetItem(itemId); } if (record == null) { return(null); } info = new Info(record); this.infoDB.Add(itemId, info); } return(info); }
/// <summary> /// Encodes the live item data into raw binary format /// </summary> /// <returns>byte array holding the converted binary data</returns> private static byte[] EncodeItemData(PlayerCollection pc) { int dataLength; byte[] data; // Encode the item data into a memory stream using (MemoryStream writeStream = new MemoryStream(2048)) { using (BinaryWriter writer = new BinaryWriter(writeStream)) { TQData.WriteCString(writer, "numberOfSacks"); writer.Write(pc.numberOfSacks); TQData.WriteCString(writer, "currentlyFocusedSackNumber"); writer.Write(pc.currentlyFocusedSackNumber); TQData.WriteCString(writer, "currentlySelectedSackNumber"); writer.Write(pc.currentlySelectedSackNumber); for (int i = 0; i < pc.numberOfSacks; ++i) { // SackType should already be set at pc point SackCollectionProvider.Encode(pc.sacks[i], writer); } dataLength = (int)writeStream.Length; } // now just return the buffer we wrote to. data = writeStream.GetBuffer(); } // The problem is that ans() may be bigger than the amount of data in it. // We need to resize the array if (dataLength == data.Length) { return(data); } byte[] realData = new byte[dataLength]; Array.Copy(data, realData, dataLength); return(realData); }
/// <summary> /// Reads the whole string table into memory from a stream. /// </summary> /// <remarks> /// string Table Format /// first 4 bytes is the number of entries /// then /// one string followed by another... /// </remarks> /// <param name="pos">position within the file.</param> /// <param name="reader">input BinaryReader</param> /// <param name="outStream">output StreamWriter.</param> private void ReadStringTable(int pos, BinaryReader reader, StreamWriter outStream) { reader.BaseStream.Seek(pos, SeekOrigin.Begin); int numstrings = reader.ReadInt32(); this.strings = new string[numstrings]; if (outStream != null) { outStream.WriteLine("stringTable located at 0x{1:X} numstrings= {0:n0}", numstrings, pos); } for (int i = 0; i < numstrings; ++i) { this.strings[i] = TQData.ReadCString(reader); if (outStream != null) { outStream.WriteLine("{0},{1}", i, this.strings[i]); } } }
/// <summary> /// Parses an item block within the file and coverts raw item data into internal item data /// </summary> /// <param name="fileOffset">Offset into the file</param> /// <param name="reader">BinaryReader instance</param> private static void ParseItemBlock(Stash sta, int fileOffset, BinaryReader reader) { try { reader.BaseStream.Seek(fileOffset, SeekOrigin.Begin); reader.ReadInt32(); TQData.ValidateNextString("begin_block", reader); sta.beginBlockCrap = reader.ReadInt32(); TQData.ValidateNextString("stashVersion", reader); sta.stashVersion = reader.ReadInt32(); TQData.ValidateNextString("fName", reader); // Changed to raw data to support extended characters int stringLength = reader.ReadInt32(); sta.name = reader.ReadBytes(stringLength); TQData.ValidateNextString("sackWidth", reader); sta.Width = reader.ReadInt32(); TQData.ValidateNextString("sackHeight", reader); sta.Height = reader.ReadInt32(); sta.numberOfSacks = 1; sta.sack = new SackCollection(); sta.sack.SackType = SackType.Stash; sta.sack.IsImmortalThrone = true; SackCollectionProvider.Parse(sta.sack, reader); } catch (ArgumentException) { // The ValidateNextString Method can throw an ArgumentException. // We just pass it along at sta point. throw; } }
/// <summary> /// Gets a database record without adding it to the cache. /// </summary> /// <remarks> /// The Item property caches the DBRecords, which is great when you are only using a few 100 (1000?) records and are requesting /// them many times. Not great if you are looping through all the records as it eats alot of memory. This method will create /// the record on the fly if it is not in the cache so when you are done with it, it can be reclaimed by the garbage collector. /// Great for when you want to loop through all the records for some reason. It will take longer, but use less memory. /// </remarks> /// <param name="recordId">String ID of the record. Will be normalized internally.</param> /// <returns>Decompressed RecordInfo record</returns> public DBRecordCollection GetRecordNotCached(string recordId) { recordId = TQData.NormalizeRecordPath(recordId); try { // If it is already in the cache no need not to use it return(this.cache[recordId]); } catch (KeyNotFoundException ex) { Log.Debug("record not found first attempt", ex); try { return(this.recordInfo[recordId].Decompress(this)); } catch (KeyNotFoundException exx) { Log.Debug("record not found second attempt", exx); return(null); } } }
/// <summary> /// Encodes the live equipment data into raw binary /// </summary> /// <returns>byte array holding the converted binary data</returns> private static byte[] EncodeEquipmentData(PlayerCollection pc) { int dataLength; byte[] data; // Encode the item data into a memory stream using (MemoryStream writeStream = new MemoryStream(2048)) { using (BinaryWriter writer = new BinaryWriter(writeStream)) { if (pc.IsImmortalThrone) { TQData.WriteCString(writer, "equipmentCtrlIOStreamVersion"); writer.Write(pc.equipmentCtrlIOStreamVersion); } SackCollectionProvider.Encode(pc.EquipmentSack, writer); dataLength = (int)writeStream.Length; } // now just return the buffer we wrote to. data = writeStream.GetBuffer(); } // The problem is that ans() may be bigger than the amount of data in it. // We need to resize the array if (dataLength == data.Length) { return(data); } byte[] realData = new byte[dataLength]; Array.Copy(data, realData, dataLength); return(realData); }
/// <summary> /// Parses the item block /// </summary> /// <param name="offset">offset in the player file</param> /// <param name="reader">BinaryReader instance</param> private static void ParseItemBlock(PlayerCollection pc, int offset, BinaryReader reader) { try { pc.itemBlockStart = offset; reader.BaseStream.Seek(offset, SeekOrigin.Begin); TQData.ValidateNextString("numberOfSacks", reader); pc.numberOfSacks = reader.ReadInt32(); TQData.ValidateNextString("currentlyFocusedSackNumber", reader); pc.currentlyFocusedSackNumber = reader.ReadInt32(); TQData.ValidateNextString("currentlySelectedSackNumber", reader); pc.currentlySelectedSackNumber = reader.ReadInt32(); pc.sacks = new SackCollection[pc.numberOfSacks]; for (int i = 0; i < pc.numberOfSacks; ++i) { pc.sacks[i] = new SackCollection(); pc.sacks[i].SackType = SackType.Sack; pc.sacks[i].IsImmortalThrone = pc.IsImmortalThrone; SackCollectionProvider.Parse(pc.sacks[i], reader); } pc.itemBlockEnd = (int)reader.BaseStream.Position; } catch (ArgumentException ex) { // The ValidateNextString Method can throw an ArgumentException. // We just pass it along at pc point. Log.Debug("ValidateNextString fail !", ex); throw; } }
/// <summary> /// Reads the entire record table into memory from a stream. /// </summary> /// <param name="pos">position within the file.</param> /// <param name="numEntries">number of entries in the file.</param> /// <param name="reader">input BinaryReader</param> /// <param name="outStream">output StreamWriter.</param> private void ReadRecordTable(int pos, int numEntries, BinaryReader reader, StreamWriter outStream) { this.recordInfo = new Dictionary <string, RecordInfo>((int)Math.Round(numEntries * 1.2)); reader.BaseStream.Seek(pos, SeekOrigin.Begin); if (outStream != null) { outStream.WriteLine("RecordTable located at 0x{0:X}", pos); } for (int i = 0; i < numEntries; ++i) { RecordInfo recordInfo = new RecordInfo(); recordInfo.Decode(reader, 24, this); // 24 is the offset of where all record data begins this.recordInfo.Add(TQData.NormalizeRecordPath(recordInfo.ID), recordInfo); // output this record if (outStream != null) { outStream.WriteLine("{0},{1},{2}", i, recordInfo.ID, recordInfo.RecordType); } } }
/// <summary> /// Parses the binary sack data to internal data /// </summary> /// <param name="reader">BinaryReader instance</param> public static void Parse(SackCollection sc, BinaryReader reader) { try { sc.isModified = false; if (sc.sackType == SackType.Stash) { // IL decided to use a different format for the stash files. TQData.ValidateNextString("numItems", reader); sc.size = reader.ReadInt32(); } else if (sc.sackType == SackType.Equipment) { if (sc.isImmortalThrone) { sc.size = 12; sc.slots = 12; } else { sc.size = 11; sc.slots = 11; } } else { // sc is just a regular sack. TQData.ValidateNextString("begin_block", reader); // make sure we just read a new block sc.beginBlockCrap = reader.ReadInt32(); TQData.ValidateNextString("tempBool", reader); sc.tempBool = reader.ReadInt32(); TQData.ValidateNextString("size", reader); sc.size = reader.ReadInt32(); } sc.items = new List <Item>(sc.size); Item prevItem = null; for (int i = 0; i < sc.size; ++i) { // Additional logic to decode the weapon slots in the equipment section if (sc.sackType == SackType.Equipment && (i == 7 || i == 9)) { TQData.ValidateNextString("begin_block", reader); sc.beginBlockCrap = reader.ReadInt32(); // Eat the alternate tag and flag TQData.ValidateNextString("alternate", reader); // Skip over the alternateCrap reader.ReadInt32(); } Item item = new Item(); item.ContainerType = sc.sackType; ItemProvider.Parse(item, reader); // Stack sc item with the previous item if necessary if ((prevItem != null) && item.DoesStack && (item.PositionX == -1) && (item.PositionY == -1)) { prevItem.StackSize++; } else { prevItem = item; sc.items.Add(item); if (sc.sackType == SackType.Equipment) { // Get the item location from the table item.PositionX = SackCollection.GetEquipmentLocationOffset(i).X; item.PositionY = SackCollection.GetEquipmentLocationOffset(i).Y; // Eat the itemAttached tag and flag TQData.ValidateNextString("itemAttached", reader); // Skip over the itemAttachedCrap reader.ReadInt32(); } } // Additional logic to decode the weapon slots in the equipment section if (sc.sackType == SackType.Equipment && (i == 8 || i == 10)) { TQData.ValidateNextString("end_block", reader); sc.endBlockCrap = reader.ReadInt32(); } } TQData.ValidateNextString("end_block", reader); sc.endBlockCrap = reader.ReadInt32(); } catch (ArgumentException ex) { // The ValidateNextString Method can throw an ArgumentException. // We just pass it along at sc point. Log.Debug("ValidateNextString fail !", ex); throw; } }
/// <summary> /// Encodes the sack into binary form /// </summary> /// <param name="writer">BinaryWriter instance</param> public static void Encode(SackCollection sc, BinaryWriter writer) { if (sc.sackType == SackType.Stash) { // Item stacks are stored as single items in the stash TQData.WriteCString(writer, "numItems"); writer.Write(sc.Count); } else if (sc.sackType == SackType.Equipment) { // Nothing special except to skip all of the other header crap // since the number of items is always fixed. } else { TQData.WriteCString(writer, "begin_block"); writer.Write(sc.beginBlockCrap); TQData.WriteCString(writer, "tempBool"); writer.Write(sc.tempBool); TQData.WriteCString(writer, "size"); writer.Write(sc.CountTQItems()); } int slotNumber = -1; foreach (Item item in sc) { ++slotNumber; item.ContainerType = sc.sackType; int itemAttached = 0; int alternate = 0; // Additional logic to encode the weapon slots in the equipment section if (sc.sackType == SackType.Equipment && (slotNumber == 7 || slotNumber == 9)) { TQData.WriteCString(writer, "begin_block"); writer.Write(sc.beginBlockCrap); TQData.WriteCString(writer, "alternate"); if (slotNumber == 9) { // Only set the flag for the second set of weapons alternate = 1; } else { // Otherwise set the flag to false. alternate = 0; } writer.Write(alternate); } ItemProvider.Encode(item, writer); if (sc.sackType == SackType.Equipment) { TQData.WriteCString(writer, "itemAttached"); if (!string.IsNullOrEmpty(item.BaseItemId) && slotNumber != 9 && slotNumber != 10) { // If there is an item in sc slot, set the flag. // Unless it's in the secondary weapon slot. itemAttached = 1; } else { // sc is only a dummy item so we do not set the flag. itemAttached = 0; } writer.Write(itemAttached); } // Additional logic to encode the weapon slots in the equipment section if (sc.sackType == SackType.Equipment && (slotNumber == 8 || slotNumber == 10)) { TQData.WriteCString(writer, "end_block"); writer.Write(sc.endBlockCrap); } } TQData.WriteCString(writer, "end_block"); writer.Write(sc.endBlockCrap); }
/// <summary> /// Read the table of contents of the ARC file /// </summary> private void ReadARCToC() { // Format of an ARC file // 0x08 - 4 bytes = # of files // 0x0C - 4 bytes = # of parts // 0x18 - 4 bytes = offset to directory structure // // Format of directory structure // 4-byte int = offset in file where this part begins // 4-byte int = size of compressed part // 4-byte int = size of uncompressed part // these triplets repeat for each part in the arc file // After these triplets are a bunch of null-terminated strings // which are the sub filenames. // After the subfilenames comes the subfile data: // 4-byte int = 3 == indicates start of subfile item (maybe compressed flag??) // 1 == maybe uncompressed flag?? // 4-byte int = offset in file where first part of this subfile begins // 4-byte int = compressed size of this file // 4-byte int = uncompressed size of this file // 4-byte crap // 4-byte crap // 4-byte crap // 4-byte int = numParts this file uses // 4-byte int = part# of first part for this file (starting at 0). // 4-byte int = length of filename string // 4-byte int = offset in directory structure for filename this.fileHasBeenRead = true; if (TQDebug.ArcFileDebugLevel > 0) { Log.DebugFormat(CultureInfo.InvariantCulture, "ARCFile.ReadARCToC({0})", this.FileName); } try { using (FileStream arcFile = new FileStream(this.FileName, FileMode.Open, FileAccess.Read)) { using (BinaryReader reader = new BinaryReader(arcFile)) { if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "File Length={0}", arcFile.Length); } // check the file header if (reader.ReadByte() != 0x41) { return; } if (reader.ReadByte() != 0x52) { return; } if (reader.ReadByte() != 0x43) { return; } if (arcFile.Length < 0x21) { return; } reader.BaseStream.Seek(0x08, SeekOrigin.Begin); int numEntries = reader.ReadInt32(); int numParts = reader.ReadInt32(); if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "numEntries={0}, numParts={1}", numEntries, numParts); } ARCPartEntry[] parts = new ARCPartEntry[numParts]; ARCDirEntry[] records = new ARCDirEntry[numEntries]; if (TQDebug.ArcFileDebugLevel > 2) { Log.Debug("Seeking to tocOffset location"); } reader.BaseStream.Seek(0x18, SeekOrigin.Begin); int tocOffset = reader.ReadInt32(); if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "tocOffset = {0}", tocOffset); } // Make sure all 3 entries exist for the toc entry. if (arcFile.Length < (tocOffset + 12)) { return; } // Read in all of the part data reader.BaseStream.Seek(tocOffset, SeekOrigin.Begin); int i; for (i = 0; i < numParts; ++i) { parts[i] = new ARCPartEntry(); parts[i].FileOffset = reader.ReadInt32(); parts[i].CompressedSize = reader.ReadInt32(); parts[i].RealSize = reader.ReadInt32(); if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "parts[{0}]", i); Log.DebugFormat(CultureInfo.InvariantCulture , " fileOffset={0}, compressedSize={1}, realSize={2}" , parts[i].FileOffset , parts[i].CompressedSize , parts[i].RealSize ); } } // Now record this offset so we can come back and read in the filenames // after we have read in the file records int fileNamesOffset = (int)arcFile.Position; // Now seek to the location where the file record data is // This offset is from the end of the file. int fileRecordOffset = 44 * numEntries; if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture , "fileNamesOffset = {0}. Seeking to {1} to read file record data." , fileNamesOffset , fileRecordOffset ); } arcFile.Seek(-1 * fileRecordOffset, SeekOrigin.End); for (i = 0; i < numEntries; ++i) { records[i] = new ARCDirEntry(); // storageType = 3 - compressed / 1- non compressed int storageType = reader.ReadInt32(); if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "StorageType={0}", storageType); } // Added by VillageIdiot to support stored types records[i].StorageType = storageType; records[i].FileOffset = reader.ReadInt32(); records[i].CompressedSize = reader.ReadInt32(); records[i].RealSize = reader.ReadInt32(); int crap = reader.ReadInt32(); // crap if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Crap2={0}", crap); } crap = reader.ReadInt32(); // crap if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Crap3={0}", crap); } crap = reader.ReadInt32(); // crap if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Crap4={0}", crap); } int numberOfParts = reader.ReadInt32(); if (numberOfParts < 1) { records[i].Parts = null; if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "File {0} is not compressed.", i); } } else { records[i].Parts = new ARCPartEntry[numberOfParts]; } int firstPart = reader.ReadInt32(); crap = reader.ReadInt32(); // filename length if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Filename Length={0}", crap); } crap = reader.ReadInt32(); // filename offset if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Filename Offset={0}", crap); Log.DebugFormat(CultureInfo.InvariantCulture, "record[{0}]", i); Log.DebugFormat( CultureInfo.InvariantCulture, " offset={0} compressedSize={1} realSize={2}", records[i].FileOffset, records[i].CompressedSize, records[i].RealSize); if (storageType != 1 && records[i].IsActive) { Log.DebugFormat( CultureInfo.InvariantCulture, " numParts={0} firstPart={1} lastPart={2}", records[i].Parts.Length, firstPart, firstPart + records[i].Parts.Length - 1); } else { Log.DebugFormat(CultureInfo.InvariantCulture, " INACTIVE firstPart={0}", firstPart); } } if (storageType != 1 && records[i].IsActive) { for (int ip = 0; ip < records[i].Parts.Length; ++ip) { records[i].Parts[ip] = parts[ip + firstPart]; } } } // Now read in the record names arcFile.Seek(fileNamesOffset, SeekOrigin.Begin); byte[] buffer = new byte[2048]; ASCIIEncoding ascii = new ASCIIEncoding(); for (i = 0; i < numEntries; ++i) { // only Active files have a filename entry if (records[i].IsActive) { // For each string, read bytes until I hit a 0x00 byte. if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Reading entry name {0:n0}", i); } int bufferSize = 0; while ((buffer[bufferSize++] = reader.ReadByte()) != 0x00) { if (buffer[bufferSize - 1] == 0x03) { // File is null? arcFile.Seek(-1, SeekOrigin.Current); // backup bufferSize--; buffer[bufferSize] = 0x00; if (TQDebug.ArcFileDebugLevel > 2) { Log.Debug("Null file - inactive?"); } break; } if (bufferSize >= buffer.Length) { Log.Debug("ARCFile.ReadARCToC() Error - Buffer size of 2048 has been exceeded."); if (TQDebug.ArcFileDebugLevel > 2) { var content = buffer.Select(b => string.Format(CultureInfo.InvariantCulture, "0x{0:X}", b)).ToArray(); Log.Debug($"Buffer contents:{Environment.NewLine}{string.Join(string.Empty, content)}{Environment.NewLine}{string.Empty}"); } } } if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat( CultureInfo.InvariantCulture, "Read {0:n0} bytes for name. Converting to string.", bufferSize); } string newfile; if (bufferSize >= 1) { // Now convert the buffer to a string char[] chars = new char[ascii.GetCharCount(buffer, 0, bufferSize - 1)]; ascii.GetChars(buffer, 0, bufferSize - 1, chars, 0); newfile = new string(chars); } else { newfile = string.Format(CultureInfo.InvariantCulture, "Null File {0}", i); } records[i].FileName = TQData.NormalizeRecordPath(newfile); if (TQDebug.ArcFileDebugLevel > 2) { Log.DebugFormat(CultureInfo.InvariantCulture, "Name {0:n0} = '{1}'", i, records[i].FileName); } } } // Now convert the array of records into a Dictionary. Dictionary <string, ARCDirEntry> dictionary = new Dictionary <string, ARCDirEntry>(numEntries); if (TQDebug.ArcFileDebugLevel > 1) { Log.Debug("Creating Dictionary"); } for (i = 0; i < numEntries; ++i) { if (records[i].IsActive) { dictionary.Add(records[i].FileName, records[i]); } } this.directoryEntries = dictionary; if (TQDebug.ArcFileDebugLevel > 0) { Log.Debug("Exiting ARCFile.ReadARCToC()"); } } } } catch (IOException exception) { Log.Error("ARCFile.ReadARCToC() - Error reading arcfile", exception); } }
/// <summary> /// Parses the raw binary data for use within TQVault /// </summary> private static void ParseRawData(PlayerCollection pc) { // First create a memory stream so we can decode the binary data as needed. using (MemoryStream stream = new MemoryStream(pc.rawData, false)) { using (BinaryReader reader = new BinaryReader(stream)) { // Find the block pairs until we find the block that contains the item data. int blockNestLevel = 0; int currentOffset = 0; int itemOffset = 0; int equipmentOffset = 0; var playerReader = new PlayerInfoReader(); // vaults start at the item data with no crap bool foundItems = pc.IsVault; bool foundEquipment = pc.IsVault; while ((!foundItems || !foundEquipment) && (currentOffset = FindNextBlockDelim(pc, currentOffset)) != -1) { if (pc.rawData[currentOffset] == beginBlockPattern[0]) { // begin block ++blockNestLevel; currentOffset += beginBlockPattern.Length; // skip past the 4 bytes of noise after begin_block currentOffset += 4; // Seek our stream to the correct position stream.Seek(currentOffset, SeekOrigin.Begin); // Now get the string for pc block string blockName = TQData.ReadCString(reader).ToUpperInvariant(); // Assign loc to our new stream position currentOffset = (int)stream.Position; // See if we accidentally got a begin_block or end_block if (blockName.Equals("BEGIN_BLOCK")) { blockName = "(NONAME)"; currentOffset -= beginBlockPattern.Length; } else if (blockName.Equals("END_BLOCK")) { blockName = "(NONAME)"; currentOffset -= endBlockPattern.Length; } else if (blockName.Equals("ITEMPOSITIONSSAVEDASGRIDCOORDS")) { currentOffset += 4; itemOffset = currentOffset; // skip value for itemPositionsSavedAsGridCoords foundItems = true; } else if (blockName.Equals("USEALTERNATE")) { currentOffset += 4; equipmentOffset = currentOffset; // skip value for useAlternate foundEquipment = true; } else if (!pc.IsVault && playerReader.Match(blockName)) { playerReader.Record(blockName, currentOffset); } // Print the string with a nesting level indicator ////string levelString = new string ('-', System.Math.Max(0,blockNestLevel*2-2)); ////out.WriteLine ("{0} {2:n0} '{1}'", levelString, blockName, loc); } else { // end block --blockNestLevel; currentOffset += endBlockPattern.Length; ////if (blockNestLevel < 0) ////{ //// out.WriteLine ("{0:n0} Block Nest Level < 0!!!", loc); ////} } } ////out.WriteLine ("Final Block Level = {0:n0}", blockNestLevel); if (foundItems) { try { ParseItemBlock(pc, itemOffset, reader); } catch (ArgumentException exception) { var ex = new ArgumentException(string.Format(CultureInfo.InvariantCulture, "Error parsing player file Item Block- '{0}'", pc.PlayerName), exception); Log.ErrorFormat(CultureInfo.InvariantCulture, "Error parsing player file Item Block - '{0}'", pc.PlayerName); Log.ErrorException(exception); throw ex; } try { string outfile = string.Concat(Path.Combine(TQData.TQVaultSaveFolder, pc.PlayerName), " Export.txt"); using (StreamWriter outStream = new StreamWriter(outfile, false)) { outStream.WriteLine("Number of Sacks = {0}", pc.numberOfSacks); int sackNumber = 0; if (pc.sacks != null) { foreach (SackCollection sack in pc.sacks) { if (!sack.IsEmpty) { outStream.WriteLine(); outStream.WriteLine("SACK {0}", sackNumber); int itemNumber = 0; foreach (Item item in sack) { object[] params1 = new object[20]; params1[0] = itemNumber; params1[1] = ItemProvider.ToFriendlyName(item); params1[2] = item.PositionX; params1[3] = item.PositionY; params1[4] = item.Seed; ////params1[5] = outStream.WriteLine(" {0,5:n0} {1}", params1); itemNumber++; } } sackNumber++; } } } } catch (IOException exception) { Log.ErrorFormat(exception, "Error writing Export file - '{0}'" , string.Concat(Path.Combine(TQData.TQVaultSaveFolder, pc.PlayerName), " Export.txt") ); } } // Process the equipment block if (foundEquipment && !pc.IsVault) { try { ParseEquipmentBlock(pc, equipmentOffset, reader); } catch (ArgumentException exception) { var ex = new ArgumentException(string.Format(CultureInfo.InvariantCulture, "Error parsing player file Equipment Block - '{0}'", pc.PlayerName), exception); Log.ErrorFormat(ex, "Error parsing player file Equipment Block - '{0}'", pc.PlayerName); throw ex; } try { string outfile = string.Concat(Path.Combine(TQData.TQVaultSaveFolder, pc.PlayerName), " Equipment Export.txt"); using (StreamWriter outStream = new StreamWriter(outfile, false)) { if (!pc.EquipmentSack.IsEmpty) { int itemNumber = 0; foreach (Item item in pc.EquipmentSack) { object[] params1 = new object[20]; params1[0] = itemNumber; params1[1] = ItemProvider.ToFriendlyName(item); params1[2] = item.PositionX; params1[3] = item.PositionY; params1[4] = item.Seed; ////params1[5] = outStream.WriteLine(" {0,5:n0} {1}", params1); itemNumber++; } } } } catch (IOException exception) { Log.ErrorFormat(exception, "Error writing Export file - '{0}'" , string.Concat(Path.Combine(TQData.TQVaultSaveFolder, pc.PlayerName), " Equipment Export.txt") ); } } if (playerReader.FoundPlayerInfo && !pc.IsVault) { try { playerReader.Read(reader); pc.PlayerInfo = playerReader.GetPlayerInfo(); } catch (ArgumentException exception) { var rethrowex = new ArgumentException(string.Format(CultureInfo.InvariantCulture, "Error parsing player player info Block - '{0}'", pc.PlayerName), exception); Log.ErrorException(rethrowex); throw rethrowex; } } } } }
/// <summary> /// Gets a resource from the database using the resource Id. /// Modified by VillageIdiot to support loading resources from a custom map folder. /// </summary> /// <param name="resourceId">Resource which we are fetching</param> /// <returns>Retruns a byte array of the resource.</returns> public byte[] LoadResource(string resourceId) { if (TQDebug.DatabaseDebugLevel > 0) { Log.DebugFormat(CultureInfo.InvariantCulture, "Database.LoadResource({0})", resourceId); } resourceId = TQData.NormalizeRecordPath(resourceId); if (TQDebug.DatabaseDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, " Normalized({0})", resourceId); } // First we need to figure out the correct file to // open, by grabbing it off the front of the resourceID int backslashLocation = resourceId.IndexOf('\\'); if (backslashLocation <= 0) { // not a proper resourceID. return(null); } string arcFileBase = resourceId.Substring(0, backslashLocation); if (TQDebug.DatabaseDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "arcFileBase = {0}", arcFileBase); } string rootFolder; string arcFile; byte[] arcFileData = null; // Added by VillageIdiot // Check the mod folder for the image resource. if (TQData.IsCustom) { if (TQDebug.DatabaseDebugLevel > 1) { Log.Debug("Checking Custom Resources."); } rootFolder = Path.Combine(TQData.MapName, "resources"); arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } // We either didn't load the resource or didn't find what we were looking for so check the normal game resources. if (arcFileData == null) { // See if this guy is from Immortal Throne expansion pack. if (TQDebug.DatabaseDebugLevel > 1) { Log.Debug("Checking IT Resources."); } rootFolder = TQData.ImmortalThronePath; bool xpack = false; if (arcFileBase.ToUpperInvariant().Equals("XPACK")) { // Comes from Immortal Throne xpack = true; rootFolder = Path.Combine(Path.Combine(rootFolder, "Resources"), "XPack"); } else if (arcFileBase.ToUpperInvariant().Equals("XPACK2")) { // Comes from Ragnarok xpack = true; rootFolder = Path.Combine(Path.Combine(rootFolder, "Resources"), "XPack2"); } else if (arcFileBase.ToUpperInvariant().Equals("XPACK3")) { // Comes from Atlantis xpack = true; rootFolder = Path.Combine(Path.Combine(rootFolder, "Resources"), "XPack3"); } if (xpack == true) { // throw away that value and use the next field. int previousBackslash = backslashLocation; backslashLocation = resourceId.IndexOf('\\', backslashLocation + 1); if (backslashLocation <= 0) { return(null); // not a proper resourceID } arcFileBase = resourceId.Substring(previousBackslash + 1, backslashLocation - previousBackslash - 1); resourceId = resourceId.Substring(previousBackslash + 1); } else { // Changed by VillageIdiot to search the IT resources folder for updated resources // if IT is installed otherwise just the TQ folder. rootFolder = Path.Combine(rootFolder, "Resources"); } arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } // Added by VillageIdiot // Maybe the arc file is in the XPack folder even though the record does not state it. // Also could be that it says xpack in the record but the file is in the root. if (arcFileData == null) { rootFolder = Path.Combine(Path.Combine(TQData.ImmortalThronePath, "Resources"), "XPack"); arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } // Now, let's check if the item is in Ragnarok DLC if (arcFileData == null && TQData.IsRagnarokInstalled) { rootFolder = Path.Combine(Path.Combine(TQData.ImmortalThronePath, "Resources"), "XPack2"); arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } if (arcFileData == null && TQData.IsAtlantisInstalled) { rootFolder = Path.Combine(Path.Combine(TQData.ImmortalThronePath, "Resources"), "XPack3"); arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } if (arcFileData == null) { // We are either vanilla TQ or have not found our resource yet. // from the original TQ folder if (TQDebug.DatabaseDebugLevel > 1) { Log.Debug("Checking TQ Resources."); } rootFolder = TQData.TQPath; rootFolder = Path.Combine(rootFolder, "Resources"); arcFile = Path.Combine(rootFolder, Path.ChangeExtension(arcFileBase, ".arc")); arcFileData = this.ReadARCFile(arcFile, resourceId); } if (TQDebug.DatabaseDebugLevel > 0) { Log.Debug("Exiting Database.LoadResource()"); } return(arcFileData); }
/// <summary> /// Reads data from an ARC file and puts it into a Byte array (or NULL if not found) /// </summary> /// <param name="dataId">The string ID for the data which we are retieving.</param> /// <returns>Returns byte array of the data corresponding to the string ID.</returns> public byte[] GetData(string dataId) { if (TQDebug.ArcFileDebugLevel > 0) { Log.DebugFormat(CultureInfo.InvariantCulture, "ARCFile.GetData({0})", dataId); } if (!this.fileHasBeenRead) { this.ReadARCToC(); } if (this.directoryEntries == null) { if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "Error - Could not read {0}", this.FileName); } // could not read the file return(null); } // First normalize the filename dataId = TQData.NormalizeRecordPath(dataId); if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "Normalized dataID = {0}", dataId); } // Find our file in the toc. // First strip off the leading folder since it is just the ARC name int firstPathDelim = dataId.IndexOf('\\'); if (firstPathDelim != -1) { dataId = dataId.Substring(firstPathDelim + 1); } // Now see if this file is in the toc. ARCDirEntry directoryEntry; if (directoryEntries.ContainsKey(dataId)) { directoryEntry = this.directoryEntries[dataId]; } else { // record not found if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture, "Error - {0} not found.", dataId); } return(null); } // Now open the ARC file and read in the record. using (FileStream arcFile = new FileStream(this.FileName, FileMode.Open, FileAccess.Read)) { // Allocate memory for the uncompressed data byte[] data = new byte[directoryEntry.RealSize]; // Now process each part of this record int startPosition = 0; // First see if the data was just stored without compression. if ((directoryEntry.StorageType == 1) && (directoryEntry.CompressedSize == directoryEntry.RealSize)) { if (TQDebug.ArcFileDebugLevel > 1) { Log.DebugFormat(CultureInfo.InvariantCulture , "Offset={0} Size={1}" , directoryEntry.FileOffset , directoryEntry.RealSize ); } arcFile.Seek(directoryEntry.FileOffset, SeekOrigin.Begin); arcFile.Read(data, 0, directoryEntry.RealSize); } else { // The data was compressed so we attempt to decompress it. foreach (ARCPartEntry partEntry in directoryEntry.Parts) { // seek to the part we want arcFile.Seek(partEntry.FileOffset, SeekOrigin.Begin); // Ignore the zlib compression method. arcFile.ReadByte(); // Ignore the zlib compression flags. arcFile.ReadByte(); // Create a deflate stream. using (DeflateStream deflate = new DeflateStream(arcFile, CompressionMode.Decompress, true)) { int bytesRead; int partLength = 0; while ((bytesRead = deflate.Read(data, startPosition, data.Length - startPosition)) > 0) { startPosition += bytesRead; partLength += bytesRead; // break out of the read loop if we have processed this part completely. if (partLength >= partEntry.RealSize) { break; } } } } } if (TQDebug.ArcFileDebugLevel > 0) { Log.Debug("Exiting ARCFile.GetData()"); } return(data); } }