public LocaleTable LoadStrings(IReader reader) { LocaleTable result = new LocaleTable(this); if (StringCount == 0) return result; byte[] stringData = ReadLocaleData(reader); using (EndianReader stringReader = new EndianReader(new MemoryStream(stringData), Endian.BigEndian)) { reader.SeekTo(LocaleIndexTableLocation.AsOffset()); // Read each locale for (int i = 0; i < StringCount; i++) { // Read the offset and stringID StringID id; int offset; ReadLocalePointer(reader, out id, out offset); if (offset >= stringReader.Length) break; // Bad table - bail out so we don't end up in a huge memory-hogging loop stringReader.SeekTo(offset); string locale = stringReader.ReadUTF8(); result.Strings.Add(new Locale(id, locale)); } } return result; }
public void SaveStrings(IStream stream, LocaleTable locales) { MemoryStream offsetData = new MemoryStream(); MemoryStream stringData = new MemoryStream(); IWriter offsetWriter = new EndianWriter(offsetData, Endian.BigEndian); IWriter stringWriter = new EndianWriter(stringData, Endian.BigEndian); try { // Write the string and offset data to buffers foreach (Locale locale in locales.Strings) { WriteLocalePointer(offsetWriter, locale.ID, (int)stringWriter.Position); stringWriter.WriteUTF8(locale.Value); } // Round the size of the string data up int dataSize = (int)((stringData.Position + _sizeAlign - 1) & ~(_sizeAlign - 1)); stringData.SetLength(dataSize); // Update the two locale data hashes if we need to // (the hash arrays are set to null if the build doesn't need them) if (IndexTableHash != null) IndexTableHash = SHA1.Transform(offsetData.GetBuffer(), 0, (int)offsetData.Length); if (StringDataHash != null) StringDataHash = SHA1.Transform(stringData.GetBuffer(), 0, dataSize); // Make sure there's free space for the offset table and then write it to the file LocaleIndexTable.Resize((int)offsetData.Length, stream); stream.SeekTo(LocaleIndexTableLocation.AsOffset()); stream.WriteBlock(offsetData.GetBuffer(), 0, (int)offsetData.Length); // Encrypt the string data if necessary byte[] strings = stringData.GetBuffer(); if (_encryptionKey != null) strings = AES.Encrypt(strings, 0, dataSize, _encryptionKey.Key, _encryptionKey.IV); // Make sure there's free space for the string data and then write it to the file LocaleData.Resize(dataSize, stream); stream.SeekTo(LocaleDataLocation.AsOffset()); stream.WriteBlock(strings, 0, dataSize); // Update the string count and recalculate the language table offsets StringCount = locales.Strings.Count; } finally { offsetWriter.Close(); stringWriter.Close(); } }
public static List <LocaleTable> CreateLocalesTable(EndianReader reader, MapFile baseMapFile, Globals matg) { List <LocaleTable> localesTable = new List <LocaleTable>(); string localesKey = ""; switch (baseMapFile.Version) { case CacheVersion.Halo3Retail: localesKey = ""; break; case CacheVersion.Halo3ODST: localesKey = ""; break; case CacheVersion.HaloReach: localesKey = "BungieHaloReach!"; break; } var sectionTable = baseMapFile.Header.SectionTable; if (sectionTable.Sections[(int)CacheFileSectionType.LocalizationSection].Size == 0) { return(new List <LocaleTable>()); } foreach (var language in Enum.GetValues(typeof(GameLanguage))) { LocaleTable table = new LocaleTable(); var languageIndex = (int)language; var localeBlock = matg.LocaleGlobals[languageIndex]; if (localeBlock.StringCount == 0) { continue; } var stringCount = localeBlock.StringCount; var tableSize = localeBlock.LocaleTableSize; var offsetsTableOffset = sectionTable.GetOffset(CacheFileSectionType.LocalizationSection, localeBlock.LocaleIndexTableAddress); var tableOffset = sectionTable.GetOffset(CacheFileSectionType.LocalizationSection, localeBlock.LocaleDataIndexAddress); reader.SeekTo(offsetsTableOffset); var stringOffsets = new int[stringCount]; for (var i = 0; i < stringCount; i++) { table.Add(new CacheLocalizedString(reader.ReadInt32(), "", i)); stringOffsets[i] = reader.ReadInt32(); } reader.SeekTo(tableOffset); EndianReader newReader = null; if (localesKey == "") { newReader = new EndianReader(new MemoryStream(reader.ReadBytes(tableSize)), EndianFormat.BigEndian); } else { newReader = new EndianReader(reader.DecryptAesSegment(tableSize, localesKey)); } for (var i = 0; i < stringOffsets.Length; i++) { if (stringOffsets[i] == -1) { table[i].String = "<null>"; continue; } newReader.SeekTo(stringOffsets[i]); table[i].String = newReader.ReadNullTerminatedString(); } newReader.Close(); newReader.Dispose(); localesTable.Add(table); } return(localesTable); }
public LocaleHandler(HaloMap map) { //Set our instance of our HaloMap class Map = map; //Open our IO Map.OpenIO(); #region Get Matg Index //Create an integer to hold our matg Index int matgIndex = -1; //Loop through all the tags for (int i = 0; i < Map.IndexItems.Count; i++) { //If this tag's class is matg... if (Map.IndexItems[i].Class == "matg") { //Then set the index matgIndex = i; //Break out of the loop. break; } } #endregion #region Get Table Information //Initialuze our Locale Table list LocaleTables = new List <LocaleTable>(); //Loop for each language...(there's 12) for (int i = 0; i < 12; i++) { //Create our table instance LocaleTable unicTable = new LocaleTable(); //Set our language for this instance unicTable.Language = (LanguageType)i; //If we did find a matg tag. if (matgIndex != -1) { //Make our IO go to the position to read our information for our table. if (Map.Halo_Map_Version == HaloMap.HaloMapVersion.Halo3Retail) { unicTable.Offset = Map.IndexItems[matgIndex].Offset + 452 + ((int)unicTable.Language * 68); } else if (Map.Halo_Map_Version == HaloMap.HaloMapVersion.Halo3ODST) { unicTable.Offset = Map.IndexItems[matgIndex].Offset + 508 + ((int)unicTable.Language * 68); } map.IO.SeekTo(unicTable.Offset); //Read our count unicTable.LocaleCount = Map.IO.In.ReadInt32(); //Read our size unicTable.LocaleTableSize = Map.IO.In.ReadInt32(); //Read our index offset unicTable.LocaleTableIndexOffset = Map.IO.In.ReadInt32() + Map.MapHeader.localeTableAddressModifier; //Read our locale table offset unicTable.LocaleTableOffset = Map.IO.In.ReadInt32() + Map.MapHeader.localeTableAddressModifier; } //Add the language to our list... LocaleTables.Add(unicTable); } #endregion #region Load Strings //Loop through the localeTables for (int currentTableIndex = 0; currentTableIndex < LocaleTables.Count; currentTableIndex++) { //Get our LocaleTable instance LocaleTable currentTable = LocaleTables[currentTableIndex]; //Go to the LocaleTableIndexOffset Map.IO.In.BaseStream.Position = currentTable.LocaleTableIndexOffset; //Initialize our LocaleStringList currentTable.LocaleStrings = new List <LocaleTable.LocaleString>(); //Loop for each localeString for (int currentLocaleIndex = 0; currentLocaleIndex < currentTable.LocaleCount; currentLocaleIndex++) { //Skip 4 bytes. Map.IO.In.BaseStream.Position += 4; //Initialize an instance of a locale string LocaleTable.LocaleString currentLocaleString = new LocaleTable.LocaleString(); //Read it's offset currentLocaleString.Offset = Map.IO.In.ReadInt32(); //Add it to the list of Locales currentTable.LocaleStrings.Add(currentLocaleString); } //Read our strings //Loop for each string. for (int currentLocaleIndex = 0; currentLocaleIndex < currentTable.LocaleCount; currentLocaleIndex++) { //Go to the offset to read the locale. Map.IO.In.BaseStream.Position = currentTable.LocaleTableOffset + currentTable.LocaleStrings[currentLocaleIndex].Offset; //If we aren't at the last string if (currentLocaleIndex != currentTable.LocaleCount - 1) { //Calculate the length with (nextStringOffset - thisStringOffset) currentTable.LocaleStrings[currentLocaleIndex].Length = currentTable.LocaleStrings[currentLocaleIndex + 1].Offset - (currentTable.LocaleStrings[currentLocaleIndex].Offset + 1); } //if we are at the last string else { //Calculate the length with (endOfTable - thisStringOffset) currentTable.LocaleStrings[currentLocaleIndex].Length = currentTable.LocaleTableSize - (currentTable.LocaleStrings[currentLocaleIndex].Offset + 1); } //Read the string according to our length. currentTable.LocaleStrings[currentLocaleIndex].Name = Map.IO.In.ReadAsciiString(currentTable.LocaleStrings[currentLocaleIndex].Length); } } #endregion //Close our IO Map.CloseIO(); }
public void SaveStrings(IStream stream, LocaleTable locales) { MemoryStream offsetData = new MemoryStream(); MemoryStream stringData = new MemoryStream(); IWriter offsetWriter = new EndianWriter(offsetData, Endian.BigEndian); IWriter stringWriter = new EndianWriter(stringData, Endian.BigEndian); try { // Write the string and offset data to buffers foreach (Locale locale in locales.Strings) { WriteLocalePointer(offsetWriter, locale.ID, (int)stringWriter.Position); stringWriter.WriteUTF8(locale.Value); } // Round the size of the string data up to the nearest multiple of 0x10 (AES block size) stringData.SetLength((stringData.Position + 0xF) & ~0xF); // Update the two locale data hashes if we need to // (the hash arrays are set to null if the build doesn't need them) if (IndexTableHash != null) IndexTableHash = SHA1.Transform(offsetData.GetBuffer(), 0, (int)offsetData.Length); if (StringDataHash != null) StringDataHash = SHA1.Transform(stringData.GetBuffer(), 0, (int)stringData.Length); // Make sure there's free space for the offset table and then write it to the file LocaleDataLocation += StreamUtil.MakeFreeSpace(stream, LocaleIndexTableLocation.AsOffset(), LocaleDataLocation.AsOffset(), offsetData.Length, _pageSize); stream.SeekTo(LocaleIndexTableLocation.AsOffset()); stream.WriteBlock(offsetData.GetBuffer(), 0, (int)offsetData.Length); // Encrypt the string data if necessary byte[] strings = stringData.GetBuffer(); if (_encryptionKey != null) strings = AES.Encrypt(strings, 0, (int)stringData.Length, _encryptionKey.Key, _encryptionKey.IV); // Make free space for the string data uint oldDataEnd = (uint)((LocaleDataLocation.AsOffset() + LocaleTableSize + _pageSize - 1) & ~(_pageSize - 1)); // Add the old table size and round it up StreamUtil.MakeFreeSpace(stream, LocaleDataLocation.AsOffset(), oldDataEnd, stringData.Length, _pageSize); LocaleTableSize = (int)stringData.Length; // Write it to the file stream.SeekTo(LocaleDataLocation.AsOffset()); stream.WriteBlock(strings, 0, (int)stringData.Length); // Update the string count and recalculate the language table offsets StringCount = locales.Strings.Count; int localePointerSize = (int)(offsetData.Length / locales.Strings.Count); _languageGlobals.RecalculateLanguageOffsets(localePointerSize); } finally { offsetWriter.Close(); stringWriter.Close(); } }
public Log GetLog(PartTable parts, LocaleTable locales) { return new Log(new Qualifier.Unique(parts.GetPart(PartId), locales.GetLocale(LocaleId), Key), Date); }
public void Reload() { using (var connector = Connect()) { var connection = connector.Connection; var server = GetServer(connection); var database = GetDatabase(server); if (!database.Schemas.Contains(_tableConfiguration.Schema)) { var schema = new Schema(database, _tableConfiguration.Schema); database.Schemas.Add(schema); schema.Create(); } _partTable = new PartTable(connection, database, _tableConfiguration); _localeTable = new LocaleTable(connection, database, _tableConfiguration); _valueTable = new ValueTable(connection, database, _tableConfiguration); _logTable = new LogTable(database, _tableConfiguration); } }
public DBValue GetValue(PartTable.DBPart part, LocaleTable.DBLocale locale, String key) { try { return _valuesByQualifier[new InternalQualifier(part.Id, locale.Id, key)]; } catch(KeyNotFoundException) { var parent = part.Parent as PartTable.DBPart; if (parent != null) return GetValue(parent, locale, key); } return null; }
public IEnumerable<QualifiedValue> GetQualifiedValues(PartTable partTable, LocaleTable localeTable) { return _valuesByQualifier .Select(qv => new QualifiedValue( new Qualifier.Unique( partTable.GetPart(qv.Key.PartId), localeTable.GetLocale(qv.Key.LocaleId), qv.Key.Key ), qv.Value.Content )).ToList(); }