/// <summary> /// Gets a DBRecord for the specified item ID string. /// </summary> /// <remarks> /// Changed by VillageIdiot /// Changed search order so that IT records have precedence of TQ records. /// Add Custom Map database. Custom Map records have precedence over IT records. /// </remarks> /// <param name="itemId">Item Id which we are looking up</param> /// <returns>Returns the DBRecord for the item Id</returns> public DBRecordCollection GetRecordFromFile(string itemId) { itemId = TQData.NormalizeRecordPath(itemId); if (this.ArzFileMod != null) { DBRecordCollection recordMod = this.ArzFileMod.GetItem(itemId); if (recordMod != null) { // Custom Map records have highest precedence. return(recordMod); } } if (this.ArzFileIT != null) { // see if it's in IT ARZ file DBRecordCollection recordIT = this.ArzFileIT.GetItem(itemId); if (recordIT != null) { // IT file takes precedence over TQ. return(recordIT); } } return(ArzFile.GetItem(itemId)); }
/// <summary> /// Gets the DBRecord for a particular ID. /// </summary> /// <param name="recordId">string ID of the record will be normalized internally</param> /// <returns>DBRecord corresponding to the string ID.</returns> public DBRecordCollection GetItem(ArzFile file, string recordId) { if (string.IsNullOrEmpty(recordId)) { return(null); } recordId = TQData.NormalizeRecordPath(recordId); return(file.Cache.GetOrAddAtomic(recordId, k => { RecordInfo rawRecord; if (file.RecordInfo.ContainsKey(k)) { rawRecord = file.RecordInfo[k].Value; } else { // record not found return null; } return infoProv.Decompress(file, rawRecord); })); }
/// <summary> /// Decodes the ARZ file. /// </summary> /// <param name="inReader">input BinaryReader</param> /// <param name="baseOffset">Offset in the file.</param> /// <param name="arzFile">ArzFile instance which we are operating.</param> public void Decode(BinaryReader inReader, int baseOffset, ArzFile arzFile) { // Record Entry Format // 0x0000 int32 stringEntryID (dbr filename) // 0x0004 int32 string length // 0x0008 string (record type) // 0x00?? int32 offset // 0x00?? int32 length in bytes // 0x00?? int32 timestamp? // 0x00?? int32 timestamp? this.idStringIndex = inReader.ReadInt32(); this.RecordType = TQData.ReadCString(inReader); this.offset = inReader.ReadInt32() + baseOffset; // Compressed size // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Crap1 - timestamp? // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Crap2 - timestamp? // We throw it away and just advance the offset in the file. inReader.ReadInt32(); // Get the ID string this.ID = arzFile.Getstring(this.idStringIndex); }
/// <summary> /// Reads the entire record table into memory from a stream. /// </summary> /// <param name="pos">position within the file.</param> /// <param name="numEntries">number of entries in the file.</param> /// <param name="reader">input BinaryReader</param> /// <param name="outStream">output StreamWriter.</param> private void ReadRecordTable(ArzFile file, int pos, int numEntries, BinaryReader reader, StreamWriter outStream) { file.RecordInfo = new Dictionary <string, RecordInfo>((int)Math.Round(numEntries * 1.2)); reader.BaseStream.Seek(pos, SeekOrigin.Begin); if (outStream != null) { outStream.WriteLine("RecordTable located at 0x{0:X}", pos); } for (int i = 0; i < numEntries; ++i) { RecordInfo recordInfo = new RecordInfo(); infoProv.Decode(recordInfo, reader, 24, file); // 24 is the offset of where all record data begins file.RecordInfo.Add(TQData.NormalizeRecordPath(recordInfo.ID), recordInfo); // output this record if (outStream != null) { outStream.WriteLine("{0},{1},{2}", i, recordInfo.ID, recordInfo.RecordType); } } }
/// <summary> /// Gets the DBRecord for a particular ID. /// </summary> /// <param name="recordId">string ID of the record will be normalized internally</param> /// <returns>DBRecord corresponding to the string ID.</returns> public DBRecordCollection GetItem(ArzFile file, string recordId) { if (string.IsNullOrEmpty(recordId)) { return(null); } DBRecordCollection databaseRecord; recordId = TQData.NormalizeRecordPath(recordId); if (file.Cache.ContainsKey(recordId)) { databaseRecord = file.Cache[recordId]; } else { RecordInfo rawRecord; if (file.RecordInfo.ContainsKey(recordId)) { rawRecord = file.RecordInfo[recordId]; } else { // record not found return(null); } databaseRecord = infoProv.Decompress(file, rawRecord); file.Cache.Add(recordId, databaseRecord); } return(databaseRecord); }
/// <summary> /// Gets the list of keys from the recordInfo dictionary. /// </summary> /// <returns>string array holding the sorted list</returns> public string[] GetKeyTable(ArzFile file) { if (file.Keys == null || file.Keys.Length == 0) { this.BuildKeyTable(file); } return((string[])file.Keys.Clone()); }
/// <summary> /// Builds a list of the keys for this file. Used to help build the tree structure. /// </summary> private void BuildKeyTable(ArzFile file) { if (file.RecordInfo == null || file.RecordInfo.Count == 0) { return; } int index = 0; file.Keys = new string[file.RecordInfo.Count]; foreach (string recordID in file.RecordInfo.Keys) { file.Keys[index] = recordID; index++; } Array.Sort(file.Keys); }
/// <summary> /// Reads the whole string table into memory from a stream. /// </summary> /// <remarks> /// string Table Format /// first 4 bytes is the number of entries /// then /// one string followed by another... /// </remarks> /// <param name="pos">position within the file.</param> /// <param name="reader">input BinaryReader</param> /// <param name="outStream">output StreamWriter.</param> private void ReadStringTable(ArzFile file, int pos, BinaryReader reader, StreamWriter outStream) { reader.BaseStream.Seek(pos, SeekOrigin.Begin); int numstrings = reader.ReadInt32(); file.Strings = new string[numstrings]; if (outStream != null) { outStream.WriteLine("stringTable located at 0x{1:X} numstrings= {0:n0}", numstrings, pos); } for (int i = 0; i < numstrings; ++i) { file.Strings[i] = TQData.ReadCString(reader); if (outStream != null) { outStream.WriteLine("{0},{1}", i, file.Strings[i]); } } }
/// <summary> /// Decompresses the ARZ file into an array of bytes. /// </summary> /// <param name="arzFile">ArzFile which we are decompressing.</param> /// <returns>Returns a byte array containing the raw data.</returns> private byte[] DecompressBytes(ArzFile arzFile) { if (arzFile == null) { throw new ArgumentNullException("arzFile", "arzFile is null."); } // Read in the compressed data and decompress it, storing the results in a memorystream using (FileStream arzStream = new FileStream(arzFile.fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { arzStream.Seek(this.offset, SeekOrigin.Begin); // Ignore the zlib compression method. arzStream.ReadByte(); // Ignore the zlib compression flags. arzStream.ReadByte(); // Create a deflate stream. using (DeflateStream deflate = new DeflateStream(arzStream, CompressionMode.Decompress)) { // Create a memorystream to hold the decompressed data using (MemoryStream outStream = new MemoryStream()) { // Now decompress byte[] buffer = new byte[1024]; int len; while ((len = deflate.Read(buffer, 0, 1024)) > 0) { outStream.Write(buffer, 0, len); } // Return the decompressed data return(outStream.ToArray()); } } } }
/// <summary> /// Gets a database record without adding it to the cache. /// </summary> /// <remarks> /// The Item property caches the DBRecords, which is great when you are only using a few 100 (1000?) records and are requesting /// them many times. Not great if you are looping through all the records as it eats alot of memory. This method will create /// the record on the fly if it is not in the cache so when you are done with it, it can be reclaimed by the garbage collector. /// Great for when you want to loop through all the records for some reason. It will take longer, but use less memory. /// </remarks> /// <param name="recordId">String ID of the record. Will be normalized internally.</param> /// <returns>Decompressed RecordInfo record</returns> public DBRecordCollection GetRecordNotCached(ArzFile file, string recordId) { recordId = TQData.NormalizeRecordPath(recordId); try { // If it is already in the cache no need not to use it return(file.Cache[recordId]); } catch (KeyNotFoundException ex) { Log.Debug("record not found first attempt", ex); try { return(infoProv.Decompress(file, file.RecordInfo[recordId])); } catch (KeyNotFoundException exx) { Log.Debug("record not found second attempt", exx); return(null); } } }
/// <summary> /// Decompresses an individual record. /// </summary> /// <param name="arzFile">ARZ file which we are decompressing.</param> /// <returns>decompressed DBRecord.</returns> public DBRecordCollection Decompress(ArzFile arzFile) { // record variables have this format: // 0x00 int16 specifies data type: // 0x0000 = int - data will be an int32 // 0x0001 = float - data will be a Single // 0x0002 = string - data will be an int32 that is index into string table // 0x0003 = bool - data will be an int32 // 0x02 int16 specifies number of values (usually 1, but sometimes more (for arrays) // 0x04 int32 key string ID (the id into the string table for this variable name // 0x08 data value byte[] data = this.DecompressBytes(arzFile); int numberOfDWords = data.Length / 4; if (data.Length % 4 != 0) { var ex = new ArgumentException(string.Format(CultureInfo.InvariantCulture, "Error while parsing arz record {0}, data Length = {1} which is not a multiple of 4", this.ID, (int)data.Length)); Log.ErrorException(ex); throw ex; } DBRecordCollection record = new DBRecordCollection(this.ID, this.RecordType); // Create a memory stream to read the binary data using (BinaryReader inReader = new BinaryReader(new MemoryStream(data, false))) { int i = 0; while (i < numberOfDWords) { short dataType = inReader.ReadInt16(); short valCount = inReader.ReadInt16(); int variableID = inReader.ReadInt32(); string variableName = arzFile.Getstring(variableID); if (variableName == null) { var ex = new ArgumentNullException(string.Format(CultureInfo.InvariantCulture, "Error while parsing arz record {0}, variable is NULL", this.ID)); Log.ErrorFormat(CultureInfo.InvariantCulture, "Error in ARZFile - {0}", arzFile.fileName); Log.ErrorException(ex); throw ex; } if (dataType < 0 || dataType > 3) { var ex = new ArgumentOutOfRangeException(string.Format(CultureInfo.InvariantCulture, "Error while parsing arz record {0}, variable {1}, bad dataType {2}", this.ID, variableName, dataType)); Log.ErrorFormat(CultureInfo.InvariantCulture, "Error in ARZFile - {0}", arzFile.fileName); Log.ErrorException(ex); throw ex; } Variable v = new Variable(variableName, (VariableDataType)dataType, valCount); if (valCount < 1) { var ex = new ArgumentException(string.Format(CultureInfo.InvariantCulture, "Error while parsing arz record {0}, variable {1}, bad valCount {2}", this.ID, variableName, valCount)); Log.ErrorFormat(CultureInfo.InvariantCulture, "Error in ARZFile - {0}", arzFile.fileName); Log.ErrorException(ex); throw ex; } // increment our dword count i += 2 + valCount; for (int j = 0; j < valCount; ++j) { switch (v.DataType) { case VariableDataType.Integer: case VariableDataType.Boolean: { int val = inReader.ReadInt32(); v[j] = val; break; } case VariableDataType.Float: { float val = inReader.ReadSingle(); v[j] = val; break; } case VariableDataType.StringVar: { int id = inReader.ReadInt32(); string val = arzFile.Getstring(id); if (val == null) { val = string.Empty; } else { val = val.Trim(); } v[j] = val; break; } default: { int val = inReader.ReadInt32(); v[j] = val; break; } } } record.Set(v); } } return(record); }
/// <summary> /// Reads the ARZ file. /// </summary> /// <returns>true on success</returns> public bool Read(ArzFile file) { StreamWriter outStream = null; if (TQDebug.DatabaseDebugLevel > 2) { outStream = new StreamWriter("arzOut.txt", false); } try { // ARZ header file format // // 0x000000 int32 // 0x000004 int32 start of dbRecord table // 0x000008 int32 size in bytes of dbRecord table // 0x00000c int32 numEntries in dbRecord table // 0x000010 int32 start of string table // 0x000014 int32 size in bytes of string table using (FileStream instream = new FileStream(file.FileName, FileMode.Open, FileAccess.Read, FileShare.Read)) using (BinaryReader reader = new BinaryReader(instream)) { try { int[] header = new int[6]; for (int i = 0; i < 6; ++i) { header[i] = reader.ReadInt32(); if (outStream != null) { outStream.WriteLine("Header[{0}] = {1:n0} (0x{1:X})", i, header[i]); } } int firstTableStart = header[1]; int firstTableCount = header[3]; int secondTableStart = header[4]; this.ReadStringTable(file, secondTableStart, reader, outStream); this.ReadRecordTable(file, firstTableStart, firstTableCount, reader, outStream); // 4 final int32's from file // first int32 is numstrings in the stringtable // second int32 is something ;) // 3rd and 4th are crap (timestamps maybe?) for (int i = 0; i < 4; ++i) { int val = reader.ReadInt32(); if (outStream != null) { outStream.WriteLine("{0:n0} 0x{0:X}", val); } } } catch (IOException ex) { Log.ErrorException(ex); throw; } } } catch (IOException exception) { Log.ErrorException(exception); return(false); } finally { if (outStream != null) { outStream.Close(); } } return(true); }
/// <summary> /// Gets a database record without adding it to the cache. /// </summary> /// <remarks> /// The Item property caches the DBRecords, which is great when you are only using a few 100 (1000?) records and are requesting /// them many times. Not great if you are looping through all the records as it eats alot of memory. This method will create /// the record on the fly if it is not in the cache so when you are done with it, it can be reclaimed by the garbage collector. /// Great for when you want to loop through all the records for some reason. It will take longer, but use less memory. /// </remarks> /// <param name="recordId">String ID of the record. Will be normalized internally.</param> /// <returns>Decompressed RecordInfo record</returns> public DBRecordCollection GetRecordNotCached(ArzFile file, string recordId) { recordId = TQData.NormalizeRecordPath(recordId); return(file.Cache.GetOrAddAtomic(recordId, k => infoProv.Decompress(file, file.RecordInfo[k].Value))); }
/// <summary> /// Gets a database record without adding it to the cache. /// </summary> /// <remarks> /// The Item property caches the DBRecords, which is great when you are only using a few 100 (1000?) records and are requesting /// them many times. Not great if you are looping through all the records as it eats alot of memory. This method will create /// the record on the fly if it is not in the cache so when you are done with it, it can be reclaimed by the garbage collector. /// Great for when you want to loop through all the records for some reason. It will take longer, but use less memory. /// </remarks> /// <param name="recordId">String ID of the record. Will be normalized internally.</param> /// <returns>Decompressed RecordInfo record</returns> public DBRecordCollection GetRecordNotCached(ArzFile file, string recordId) { recordId = TQData.NormalizeRecordPath(recordId); return(infoProv.Decompress(file, file.RecordInfo[recordId].Value)); }