private void ReadMapFilesSection(EndianReader reader) { var section = GetSectionHeader(reader, ModPackageSection.MapFiles); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var mapCount = entry.Count; MapFileStreams = new List <MemoryStream>(); for (int i = 0; i < mapCount; i++) { reader.BaseStream.Position = entry.TableOffset + 0x8 * i; var tableEntry = new GenericTableEntry(reader); reader.BaseStream.Position = tableEntry.Offset; var stream = new MemoryStream(); byte[] data = new byte[section.Size]; reader.Read(data, 0, section.Size); stream.Write(data, 0, section.Size); MapFileStreams.Add(stream); } }
private void WriteMapsSection(EndianWriter writer) { GenericSectionEntry mapEntry = new GenericSectionEntry(MapFileStreams.Count, (int)writer.BaseStream.Position + 0x8); mapEntry.Write(writer); // make room for table writer.Write(new byte[0x8 * mapEntry.Count]); for (int i = 0; i < MapFileStreams.Count; i++) { var mapFileStream = MapFileStreams[i]; mapFileStream.Position = 0; int offset = (int)writer.BaseStream.Position; int size = (int)mapFileStream.Length; StreamUtil.Copy(mapFileStream, writer.BaseStream, (int)mapFileStream.Length); StreamUtil.Align(writer.BaseStream, 4); // seek to the table and update size and offset writer.BaseStream.Seek(mapEntry.TableOffset + 0x8 * i, SeekOrigin.Begin); var tableEntry = new GenericTableEntry(size, offset); tableEntry.Write(writer); writer.BaseStream.Seek(0, SeekOrigin.End); } writer.BaseStream.Seek(0, SeekOrigin.End); }
private void WriteTagNamesSection(EndianWriter writer, DataSerializationContext context, TagSerializer serializer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry tagNameFileEntry = new GenericSectionEntry(TagCacheNames.Count, 0x8); tagNameFileEntry.Write(writer); // make room for table writer.Write(new byte[0x8 * TagCacheNames.Count]); for (int i = 0; i < TagCacheNames.Count; i++) { //prepare tag names var names = new Dictionary <int, string>(); foreach (var entry in TagCaches[i].TagTable) { if (entry != null && entry.Name != null) { names.Add(entry.Index, entry.Name); } } uint offset = (uint)writer.BaseStream.Position; GenericSectionEntry tagNameTable = new GenericSectionEntry(names.Count, offset - sectionOffset + 0x8); tagNameTable.Write(writer); foreach (var entry in names) { var tagNameEntry = new ModPackageTagNamesEntry(entry.Key, entry.Value); serializer.Serialize(context, tagNameEntry); } uint size = (uint)(writer.BaseStream.Position - offset); writer.BaseStream.Seek(tagNameFileEntry.TableOffset + 0x8 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new GenericTableEntry(size, offset - sectionOffset); tableEntry.Write(writer); writer.BaseStream.Seek(0, SeekOrigin.End); } }
private void ReadTagNamesSection(EndianReader reader, DataSerializationContext context, TagDeserializer deserializer) { var section = GetSectionHeader(reader, ModPackageSection.TagNames); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var cacheCount = entry.Count; TagCacheNames = new List <Dictionary <int, string> >(); for (int i = 0; i < cacheCount; i++) { var nameDict = new Dictionary <int, string>(); reader.BaseStream.Position = entry.TableOffset + 0x8 * i + section.Offset; var tagNamesTableEntry = new GenericTableEntry(reader); if (tagNamesTableEntry.Size == 0) { throw new Exception("invalid tag name table entry size!"); } reader.BaseStream.Position = tagNamesTableEntry.Offset + section.Offset; var tagNamesHeader = new GenericSectionEntry(reader); reader.BaseStream.Position = tagNamesHeader.TableOffset + section.Offset; for (int j = 0; j < tagNamesHeader.Count; j++) { var tagNamesEntry = deserializer.Deserialize <ModPackageTagNamesEntry>(context); nameDict.Add(tagNamesEntry.TagIndex, tagNamesEntry.Name); } TagCacheNames.Add(nameDict); } }