private void WriteTagsSection(EndianWriter writer, DataSerializationContext context, TagSerializer serializer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry tagCachesEntry = new GenericSectionEntry(TagCaches.Count, 0x8); tagCachesEntry.Write(writer); // make room for table writer.Write(new byte[0x28 * TagCaches.Count]); for (int i = 0; i < TagCaches.Count; i++) { uint offset = (uint)writer.BaseStream.Position; TagCachesStreams[i].Position = 0; StreamUtil.Copy(TagCachesStreams[i], writer.BaseStream, (int)TagCachesStreams[i].Length); StreamUtil.Align(writer.BaseStream, 4); uint size = (uint)(writer.BaseStream.Position - offset); writer.BaseStream.Seek(tagCachesEntry.TableOffset + 0x28 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new CacheTableEntry(size, offset - sectionOffset, CacheNames[i]); serializer.Serialize(context, tableEntry); writer.BaseStream.Seek(0, SeekOrigin.End); } }
private void WriteFileEntries(EndianWriter writer, ISerializationContext context, TagSerializer serializer) { const int kFileTableEntrySize = 0x108; uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry table = new GenericSectionEntry(Files.Count, 0x8); table.Write(writer); // make room for table writer.BaseStream.Position = sectionOffset + table.TableOffset + Files.Count * kFileTableEntrySize; var index = 0; foreach (var fileEntry in Files) { StreamUtil.Align(writer.BaseStream, 0x10); uint offset = (uint)(writer.BaseStream.Position - sectionOffset); // write the contents fileEntry.Value.CopyTo(writer.BaseStream); // seek to the file table entry writer.BaseStream.Position = sectionOffset + table.TableOffset + index * kFileTableEntrySize; index++; // write the table entry var tableEntry = new FileTableEntry(); tableEntry.Path = fileEntry.Key; tableEntry.Size = (uint)fileEntry.Value.Length; tableEntry.Offset = offset; serializer.Serialize(context, tableEntry); // move back to where we were writer.Seek(0, SeekOrigin.End); } }
private void ReadMapFilesSection(EndianReader reader) { var section = GetSectionHeader(reader, ModPackageSection.MapFiles); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var mapCount = entry.Count; MapFileStreams = new List <Stream>(); MapToCacheMapping = new Dictionary <int, int>(); for (int i = 0; i < mapCount; i++) { reader.BaseStream.Position = entry.TableOffset + 0x10 * i + section.Offset; var tableEntry = new CacheMapTableEntry(reader); reader.BaseStream.Position = tableEntry.Offset + section.Offset; MapToCacheMapping.Add(i, tableEntry.CacheIndex); int size = (int)section.Size; var stream = new MemoryStream(); byte[] data = new byte[size]; reader.Read(data, 0, size); stream.Write(data, 0, size); MapFileStreams.Add(stream); } }
private void WriteMapsSection(EndianWriter writer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry mapEntry = new GenericSectionEntry(MapFileStreams.Count, 0x8); mapEntry.Write(writer); // make room for table writer.Write(new byte[0x10 * mapEntry.Count]); for (int i = 0; i < MapFileStreams.Count; i++) { var mapFileStream = MapFileStreams[i]; uint offset = (uint)writer.BaseStream.Position; int size = (int)mapFileStream.Length; mapFileStream.Position = 0; StreamUtil.Copy(mapFileStream, writer.BaseStream, (int)mapFileStream.Length); StreamUtil.Align(writer.BaseStream, 4); // seek to the table and update size and offset writer.BaseStream.Seek(mapEntry.TableOffset + 0x10 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new CacheMapTableEntry(size, offset - sectionOffset, MapToCacheMapping[i], MapIds[i]); tableEntry.Write(writer); writer.BaseStream.Seek(0, SeekOrigin.End); } writer.BaseStream.Seek(0, SeekOrigin.End); }
private void ReadMapFilesSection(EndianReader reader) { var section = GetSectionHeader(reader, ModPackageSection.MapFiles); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var mapCount = entry.Count; MapFileStreams = new List <MemoryStream>(); for (int i = 0; i < mapCount; i++) { reader.BaseStream.Position = entry.TableOffset + 0x8 * i; var tableEntry = new GenericTableEntry(reader); reader.BaseStream.Position = tableEntry.Offset; var stream = new MemoryStream(); byte[] data = new byte[section.Size]; reader.Read(data, 0, section.Size); stream.Write(data, 0, section.Size); MapFileStreams.Add(stream); } }
private void ReadTagNamesSection(EndianReader reader, DataSerializationContext context, TagDeserializer deserializer) { var section = GetSectionHeader(reader, ModPackageSection.TagNames); if (!GoToSectionHeaderOffset(reader, section)) { return; } var tagNamesHeader = new GenericSectionEntry(reader); reader.BaseStream.Position = tagNamesHeader.TableOffset; for (int i = 0; i < tagNamesHeader.Count; i++) { var tagNamesEntry = deserializer.Deserialize <ModPackageTagNamesEntry>(context); TagNames.Add(tagNamesEntry.TagIndex, tagNamesEntry.Name); } }
private void WriteTagNamesSection(EndianWriter writer, DataSerializationContext context, TagSerializer serializer) { uint sectionOffset = (uint)writer.BaseStream.Position; GenericSectionEntry tagNameFileEntry = new GenericSectionEntry(TagCacheNames.Count, 0x8); tagNameFileEntry.Write(writer); // make room for table writer.Write(new byte[0x8 * TagCacheNames.Count]); for (int i = 0; i < TagCacheNames.Count; i++) { //prepare tag names var names = new Dictionary <int, string>(); foreach (var entry in TagCaches[i].TagTable) { if (entry != null && entry.Name != null) { names.Add(entry.Index, entry.Name); } } uint offset = (uint)writer.BaseStream.Position; GenericSectionEntry tagNameTable = new GenericSectionEntry(names.Count, offset - sectionOffset + 0x8); tagNameTable.Write(writer); foreach (var entry in names) { var tagNameEntry = new ModPackageTagNamesEntry(entry.Key, entry.Value); serializer.Serialize(context, tagNameEntry); } uint size = (uint)(writer.BaseStream.Position - offset); writer.BaseStream.Seek(tagNameFileEntry.TableOffset + 0x8 * i + sectionOffset, SeekOrigin.Begin); var tableEntry = new GenericTableEntry(size, offset - sectionOffset); tableEntry.Write(writer); writer.BaseStream.Seek(0, SeekOrigin.End); } }
private void ReadTagNamesSection(EndianReader reader, DataSerializationContext context, TagDeserializer deserializer) { var section = GetSectionHeader(reader, ModPackageSection.TagNames); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var cacheCount = entry.Count; TagCacheNames = new List <Dictionary <int, string> >(); for (int i = 0; i < cacheCount; i++) { var nameDict = new Dictionary <int, string>(); reader.BaseStream.Position = entry.TableOffset + 0x8 * i + section.Offset; var tagNamesTableEntry = new GenericTableEntry(reader); if (tagNamesTableEntry.Size == 0) { throw new Exception("invalid tag name table entry size!"); } reader.BaseStream.Position = tagNamesTableEntry.Offset + section.Offset; var tagNamesHeader = new GenericSectionEntry(reader); reader.BaseStream.Position = tagNamesHeader.TableOffset + section.Offset; for (int j = 0; j < tagNamesHeader.Count; j++) { var tagNamesEntry = deserializer.Deserialize <ModPackageTagNamesEntry>(context); nameDict.Add(tagNamesEntry.TagIndex, tagNamesEntry.Name); } TagCacheNames.Add(nameDict); } }
private void ReadTagsSection(EndianReader reader, DataSerializationContext context, TagDeserializer deserializer) { var section = GetSectionHeader(reader, ModPackageSection.Tags); if (!GoToSectionHeaderOffset(reader, section)) { return; } var entry = new GenericSectionEntry(reader); var cacheCount = entry.Count; TagCachesStreams = new List <Stream>(); CacheNames = new List <string>(); for (int i = 0; i < cacheCount; i++) { var tagStream = new MemoryStream(); reader.BaseStream.Position = entry.TableOffset + 0x28 * i + section.Offset; var tableEntry = deserializer.Deserialize <CacheTableEntry>(context); CacheNames.Add(tableEntry.CacheName); reader.BaseStream.Position = tableEntry.Offset + section.Offset; if (section.Size > int.MaxValue) { throw new Exception("Tag cache size not supported"); } int size = (int)section.Size; byte[] data = new byte[size]; reader.Read(data, 0, size); tagStream.Write(data, 0, size); tagStream.Position = 0; TagCachesStreams.Add(tagStream); } }
private void ReadFileEntries(EndianReader reader, ISerializationContext context, TagDeserializer deserializer) { Files = new Dictionary <string, Stream>(); var section = GetSectionHeader(reader, ModPackageSection.Files); if (!GoToSectionHeaderOffset(reader, section)) { return; } var fileTable = new GenericSectionEntry(reader); reader.BaseStream.Position = fileTable.TableOffset + section.Offset; for (int i = 0; i < fileTable.Count; i++) { var tableEntry = deserializer.Deserialize <FileTableEntry>(context); var stream = new MemoryStream(); StreamUtil.Copy(reader.BaseStream, stream, tableEntry.Size); Files.Add(tableEntry.Path, stream); } }
private void WriteTagNamesSection(EndianWriter writer, DataSerializationContext context, TagSerializer serializer) { //prepare tag names var names = new Dictionary <int, string>(); foreach (var entry in Tags.Index) { if (entry != null && entry.Name != null) { names.Add(entry.Index, entry.Name); } } // create entry and immediatly write the tag names table GenericSectionEntry mapEntry = new GenericSectionEntry(names.Count, (int)writer.BaseStream.Position + 0x8); mapEntry.Write(writer); foreach (var entry in names) { var tagNameEntry = new ModPackageTagNamesEntry(entry.Key, entry.Value); serializer.Serialize(context, tagNameEntry); } }