public void Load(System.IO.Stream stream) { modelFolder = new LM2_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM2_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; var HashNames = NLG_Common.HashNames; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); ushort Unknown = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint FileCount = reader.ReadUInt32(); uint LargestCompressedFile = reader.ReadUInt32(); reader.SeekBegin(0x2C); byte[] Unknowns = reader.ReadBytes((int)FileCount); TreeNode tableNodes = new TreeNode("File Section Entries"); long FileTablePos = reader.Position; for (int i = 0; i < FileCount; i++) { var file = new FileEntry(this); file.Text = $"entry {i}"; file.Read(reader); fileEntries.Add(file); tableNodes.Nodes.Add(file); //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM2_ChunkTable(); ChunkTable.Read(tableReader); TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(tableNodes); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkOffset {chunk.ChunkOffset} Unknown1 {chunk.Unknown1} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkSize {chunk.ChunkSize} ChunkOffset {chunk.ChunkOffset}"); } } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded TexturePOWE currentTexture = new TexturePOWE(); LM2_Model currentModel = new LM2_Model(this); //Each part of the file is divided into multiple file/section entries //The first entry being the chunk table parsed before this //The second file being a duplicate (sometimes slightly larger than the first) //The third file stores texture headers, while the fourth one usually has the rest of the main data //Any additional ones currently are unknown how they work. Some of which have unknown compression aswell byte[] File002Data = fileEntries[2].GetData(); //Get the third file byte[] File003Data = fileEntries[3].GetData(); //Get the fourth file List <uint> ModelHashes = new List <uint>(); for (int i = 0; i < ChunkTable.ChunkEntries.Count; i++) { if (ChunkTable.ChunkEntries[i].ChunkType == DataType.Model) { } using (var chunkReader = new FileReader(File002Data)) { chunkReader.SeekBegin(ChunkTable.ChunkEntries[i].ChunkOffset); uint magic = chunkReader.ReadUInt32(); uint hash = chunkReader.ReadUInt32(); ModelHashes.Add(hash); Console.WriteLine($"{ChunkTable.ChunkEntries[i].ChunkType} {hash}"); } } int chunkId = 0; uint ImageHeaderIndex = 0; uint modelIndex = 0; uint messageIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.Text = $"Chunk {chunk.ChunkType.ToString("X")} {chunk.ChunkType} {chunkId++}"; chunkEntries.Add(chunkEntry); chunkFolder.Nodes.Add(chunkEntry); if (chunk.BlockIndex == 0) { chunkEntry.DataFile = File002Data; } else if (chunk.BlockIndex == 1) { chunkEntry.DataFile = File003Data; } switch (chunk.ChunkType) { case SubDataType.TextureHeader: //Read the info using (var textureReader = new FileReader(chunkEntry.FileData)) { currentTexture = new TexturePOWE(); currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); currentTexture.Text = $"Texture {ImageHeaderIndex}"; textureFolder.Nodes.Add(currentTexture); Renderer.TextureList.Add(currentTexture); Console.WriteLine(currentTexture.ID2); ImageHeaderIndex++; } break; case SubDataType.TextureData: currentTexture.ImageData = chunkEntry.FileData; break; case SubDataType.MaterialData: currentModel = new LM2_Model(this); currentModel.ModelInfo = new LM2_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData; modelFolder.Nodes.Add(currentModel); if (ModelHashes.Count > modelIndex) { currentModel.Text = $"Model {modelIndex} {ModelHashes[(int)modelIndex].ToString("x")}"; if (HashNames.ContainsKey(ModelHashes[(int)modelIndex])) { currentModel.Text = HashNames[ModelHashes[(int)modelIndex]]; } } modelIndex++; break; case SubDataType.ModelData: uint numModels = chunk.ChunkSize / 16; using (var dataReader = new FileReader(chunkEntry.FileData)) { for (int i = 0; i < numModels; i++) { uint hashID = dataReader.ReadUInt32(); uint numMeshes = dataReader.ReadUInt32(); dataReader.ReadUInt32(); dataReader.ReadUInt32(); //0 Console.WriteLine(hashID); string text = hashID.ToString("X"); if (HashNames.ContainsKey(hashID)) { text = HashNames[hashID]; } if (i == 0) { currentModel.Text = text; } } } break; case SubDataType.MeshBuffers: currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.BoneData: if (chunk.ChunkSize > 0x40 && currentModel.Skeleton == null) { using (var boneReader = new FileReader(chunkEntry.FileData)) { currentModel.Skeleton = new STSkeleton(); DrawableContainer.Drawables.Add(currentModel.Skeleton); uint numBones = chunk.ChunkSize / 68; for (int i = 0; i < numBones; i++) { boneReader.SeekBegin(i * 68); uint HashID = boneReader.ReadUInt32(); boneReader.ReadUInt32(); //unk boneReader.ReadUInt32(); //unk boneReader.ReadUInt32(); //unk boneReader.ReadSingle(); //0 var Scale = new OpenTK.Vector3( boneReader.ReadSingle(), boneReader.ReadSingle(), boneReader.ReadSingle()); boneReader.ReadSingle(); //0 var Rotate = new OpenTK.Vector3( boneReader.ReadSingle(), boneReader.ReadSingle(), boneReader.ReadSingle()); boneReader.ReadSingle(); //0 var Position = new OpenTK.Vector3( boneReader.ReadSingle(), boneReader.ReadSingle(), boneReader.ReadSingle()); boneReader.ReadSingle(); //1 STBone bone = new STBone(currentModel.Skeleton); bone.Text = HashID.ToString("X"); if (NLG_Common.HashNames.ContainsKey(HashID)) { bone.Text = NLG_Common.HashNames[HashID]; } bone.position = new float[3] { Position.X, Position.Z, -Position.Y }; bone.rotation = new float[4] { Rotate.X, Rotate.Z, -Rotate.Y, 1 }; bone.scale = new float[3] { 0.2f, 0.2f, 0.2f }; bone.RotationType = STBone.BoneRotationType.Euler; currentModel.Skeleton.bones.Add(bone); } currentModel.Skeleton.reset(); currentModel.Skeleton.update(); } } break; case SubDataType.VertexStartPointers: using (var vtxPtrReader = new FileReader(chunkEntry.FileData)) { while (!vtxPtrReader.EndOfStream) { currentModel.VertexBufferPointers.Add(vtxPtrReader.ReadUInt32()); } } break; case SubDataType.SubmeshInfo: int MeshCount = chunkEntry.FileData.Length / 0x28; using (var meshReader = new FileReader(chunkEntry.FileData)) { for (uint i = 0; i < MeshCount; i++) { LM2_Mesh mesh = new LM2_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); } } currentModel.ModelInfo.Read(new FileReader(currentModel.ModelInfo.Data), currentModel.Meshes); break; case SubDataType.ModelTransform: using (var transformReader = new FileReader(chunkEntry.FileData)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.BoneHashes: using (var chunkReader = new FileReader(chunkEntry.FileData)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 4) { uint hash = chunkReader.ReadUInt32(); string strHash = hash.ToString("X"); if (NLG_Common.HashNames.ContainsKey(hash)) { strHash = NLG_Common.HashNames[hash]; } } } break; case (SubDataType)0x7105: using (var chunkReader = new FileReader(chunkEntry.FileData)) { } break; case SubDataType.MaterialName: using (var matReader = new FileReader(chunkEntry.FileData)) { string mat = matReader.ReadZeroTerminatedString(); materialNamesFolder.Nodes.Add(mat); } break; case SubDataType.MessageData: messageFolder.Nodes.Add(new NLOC_Wrapper($"Message Data {messageIndex++}", new System.IO.MemoryStream(chunkEntry.FileData))); break; default: break; } } foreach (LM2_Model model in modelFolder.Nodes) { model.ReadVertexBuffers(); } if (messageFolder.Nodes.Count > 0) { Nodes.Add(messageFolder); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (materialNamesFolder.Nodes.Count > 0) { Nodes.Add(materialNamesFolder); } } }
public void Load(System.IO.Stream stream) { modelFolder = new LM2_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM2_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); ushort Unknown = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint FileCount = reader.ReadUInt32(); uint LargestCompressedFile = reader.ReadUInt32(); reader.SeekBegin(0x2C); byte[] Unknowns = reader.ReadBytes((int)FileCount); TreeNode tableNodes = new TreeNode("File Section Entries"); long FileTablePos = reader.Position; for (int i = 0; i < FileCount; i++) { var file = new FileEntry(this); file.Text = $"entry {i}"; file.Read(reader); fileEntries.Add(file); tableNodes.Nodes.Add(file); //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM2_ChunkTable(); ChunkTable.Read(tableReader); TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(tableNodes); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkOffset {chunk.ChunkOffset} Unknown1 {chunk.Unknown1} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType {chunk.ChunkType} ChunkSize {chunk.ChunkSize} Unknown {chunk.ChunkOffset}"); } } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded TexturePOWE currentTexture = new TexturePOWE(); LM2_Model currentModel = new LM2_Model(this); //Each part of the file is divided into multiple file/section entries //The first entry being the chunk table parsed before this //The second file being a duplicate (sometimes slightly larger than the first) //The third file stores texture headers, while the fourth one usually has the rest of the main data //Any additional ones currently are unknown how they work. Some of which have unknown compression aswell byte[] File002Data = fileEntries[2].GetData(); //Get the third file byte[] File003Data = fileEntries[3].GetData(); //Get the fourth file int chunkId = 0; uint ImageHeaderIndex = 0; uint modelIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.DataFile = File003Data; chunkEntry.Text = $"Chunk {chunk.ChunkType} {chunkId++}"; chunkEntries.Add(chunkEntry); chunkFolder.Nodes.Add(chunkEntry); switch (chunk.ChunkType) { case SubDataType.TextureHeader: chunkEntry.DataFile = File002Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData)) { currentTexture = new TexturePOWE(); currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); currentTexture.Text = $"Texture {ImageHeaderIndex}"; textureFolder.Nodes.Add(currentTexture); Renderer.TextureList.Add(currentTexture); ImageHeaderIndex++; } break; case SubDataType.TextureData: currentTexture.ImageData = chunkEntry.FileData; break; case SubDataType.ModelStart: currentModel = new LM2_Model(this); currentModel.ModelInfo = new LM2_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData; modelFolder.Nodes.Add(currentModel); modelIndex++; break; case SubDataType.MeshBuffers: currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.VertexStartPointers: using (var vtxPtrReader = new FileReader(chunkEntry.FileData)) { while (!vtxPtrReader.EndOfStream) { currentModel.VertexBufferPointers.Add(vtxPtrReader.ReadUInt32()); } } break; case SubDataType.SubmeshInfo: int MeshCount = chunkEntry.FileData.Length / 0x28; using (var meshReader = new FileReader(chunkEntry.FileData)) { for (uint i = 0; i < MeshCount; i++) { LM2_Mesh mesh = new LM2_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); } } currentModel.ModelInfo.Read(new FileReader(currentModel.ModelInfo.Data), currentModel.Meshes); break; case SubDataType.ModelTransform: using (var transformReader = new FileReader(chunkEntry.FileData)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.MaterialName: using (var matReader = new FileReader(chunkEntry.FileData)) { materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); } break; default: break; } } foreach (LM2_Model model in modelFolder.Nodes) { model.ReadVertexBuffers(); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (materialNamesFolder.Nodes.Count > 0) { Nodes.Add(materialNamesFolder); } } }
public void Load(System.IO.Stream stream) { CanSave = false; modelFolder = new LM3_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM3_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); Unknown0x4 = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint SizeLargestFile = reader.ReadUInt32(); byte numFiles = reader.ReadByte(); byte numChunkInfos = reader.ReadByte(); byte numStrings = reader.ReadByte(); reader.ReadByte(); //padding //Start of the chunk info. A fixed list of chunk information for (int i = 0; i < numChunkInfos; i++) { ChunkInfo chunk = new ChunkInfo(); chunk.Read(reader); ChunkInfos.Add(chunk); } TreeNode tableNodes = new TreeNode("File Section Entries"); TreeNode chunkLookupNodes = new TreeNode("Chunk Lookup Files"); tableNodes.Nodes.Add(chunkLookupNodes); Nodes.Add(tableNodes); TreeNode stringFolder = new TreeNode("Strings"); TreeNode chunkTexFolder = new TreeNode("Texture"); TreeNode chunkModelFolder = new TreeNode("Model"); long FileTablePos = reader.Position; for (int i = 0; i < numFiles; i++) { var file = new FileEntry(this); file.Read(reader); fileEntries.Add(file); if (file.DecompressedSize > 0) { file.Text = $"entry {i}"; if (i < 52) { chunkLookupNodes.Nodes.Add(file); } else { tableNodes.Nodes.Add(file); } } //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM3_ChunkTable(); ChunkTable.Read(tableReader); if (DebugMode) { TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType.ToString("X")} ChunkOffset {chunk.ChunkOffset} ChunkSize {chunk.ChunkSize} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType 0x{chunk.ChunkType.ToString("X")} Size {chunk.ChunkSize} Offset {chunk.ChunkOffset}"); } } } } } for (int i = 0; i < numStrings; i++) { StringList.Add(reader.ReadZeroTerminatedString()); stringFolder.Nodes.Add(StringList[i]); } TreeNode havokFolder = new TreeNode("Havok Physics"); //Model data block //Contains texture hash refs and model headers var File052Data = fileEntries[52].GetData(); //Unsure, layout data?? var File053Data = fileEntries[53].GetData(); //Contains model data var File054Data = fileEntries[54].GetData(); //Image header block. Also contains shader data var File063Data = fileEntries[63].GetData(); //Image data block var File065Data = fileEntries[65].GetData(); //Get a list of chunk hashes List <uint> ModelHashes = new List <uint>(); for (int i = 0; i < ChunkTable.ChunkEntries.Count; i++) { if (ChunkTable.ChunkEntries[i].ChunkType == DataType.Model) { using (var chunkReader = new FileReader(File052Data, true)) { chunkReader.SeekBegin(ChunkTable.ChunkEntries[i].ChunkOffset); uint magic = chunkReader.ReadUInt32(); uint hash = chunkReader.ReadUInt32(); ModelHashes.Add(hash); } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded LM3_Model currentModel = new LM3_Model(this); TreeNode currentModelChunk = null; TexturePOWE currentTexture = new TexturePOWE(); ChunkDataEntry currentVertexPointerList = null; List <uint> TextureHashes = new List <uint>(); int chunkId = 0; uint modelIndex = 0; uint ImageHeaderIndex = 0; uint havokFileIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.Text = $"{chunkId} {chunk.ChunkType.ToString("X")} {chunk.ChunkType} {chunk.ChunkOffset} {chunk.ChunkSize}"; switch (chunk.ChunkType) { case SubDataType.HavokPhysics: chunkEntry.DataFile = File052Data; chunkEntry.Text = $"File_{havokFileIndex++}.hkx"; havokFolder.Nodes.Add(chunkEntry); break; case SubDataType.TextureHeader: chunkEntry.DataFile = File063Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData, true)) { currentTexture = new TexturePOWE(); currentTexture.HeaderOffset = chunk.ChunkOffset; currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); if (DebugMode) { currentTexture.Text = $"Texture {ImageHeaderIndex} {currentTexture.Unknown} {currentTexture.Unknown2} {currentTexture.Unknown3.ToString("X")}"; } else { currentTexture.Text = $"Texture {currentTexture.ID2.ToString("X")}"; } if (NLG_Common.HashNames.ContainsKey(currentTexture.ID2)) { currentTexture.Text = NLG_Common.HashNames[currentTexture.ID2]; } textureFolder.Nodes.Add(currentTexture); if (!Renderer.TextureList.ContainsKey(currentTexture.ID2.ToString("x"))) { Renderer.TextureList.Add(currentTexture.ID2.ToString("x"), currentTexture); } TextureHashes.Add(currentTexture.ID2); ImageHeaderIndex++; } break; case SubDataType.TextureData: chunkEntry.DataFile = File065Data; currentTexture.DataOffset = chunk.ChunkOffset; currentTexture.ImageData = chunkEntry.FileData.ToBytes(); break; case SubDataType.ModelInfo: chunkEntry.DataFile = File052Data; uint numModels = chunk.ChunkSize / 12; using (var dataReader = new FileReader(chunkEntry.FileData, true)) { for (int i = 0; i < numModels; i++) { uint hashID = dataReader.ReadUInt32(); uint numMeshes = dataReader.ReadUInt32(); dataReader.ReadUInt32(); //0 string text = hashID.ToString("X"); if (NLG_Common.HashNames.ContainsKey(hashID)) { text = NLG_Common.HashNames[hashID]; } currentModel.Text = $"{currentModel.Text} [{text}]"; } } break; case SubDataType.MaterailData: currentModelChunk = new TreeNode($"Model {modelIndex}"); chunkFolder.Nodes.Add(currentModelChunk); chunkEntry.DataFile = File052Data; currentModel = new LM3_Model(this); currentModel.ModelInfo = new LM3_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData.ToBytes(); if (ModelHashes.Count > modelIndex) { currentModel.Text = $"Model {modelIndex} {ModelHashes[(int)modelIndex].ToString("x")}"; if (NLG_Common.HashNames.ContainsKey(ModelHashes[(int)modelIndex])) { currentModel.Text = NLG_Common.HashNames[ModelHashes[(int)modelIndex]]; } } modelIndex++; break; case SubDataType.MeshBuffers: chunkEntry.DataFile = File054Data; currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.VertexStartPointers: chunkEntry.DataFile = File052Data; currentVertexPointerList = chunkEntry; break; case SubDataType.SubmeshInfo: chunkEntry.DataFile = File052Data; int MeshCount = (int)chunkEntry.FileData.Length / 0x40; using (var vtxPtrReader = new FileReader(currentVertexPointerList.FileData, true)) using (var meshReader = new FileReader(chunkEntry.FileData, true)) { for (uint i = 0; i < MeshCount; i++) { meshReader.SeekBegin(i * 0x40); LM3_Mesh mesh = new LM3_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); var buffer = new LM3_Model.PointerInfo(); buffer.Read(vtxPtrReader, mesh.Unknown3 != 4294967295); currentModel.VertexBufferPointers.Add(buffer); } } modelFolder.Nodes.Add(currentModel); break; case SubDataType.ModelTransform: chunkEntry.DataFile = File052Data; using (var transformReader = new FileReader(chunkEntry.FileData, true)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.MaterialName: chunkEntry.DataFile = File053Data; /* using (var matReader = new FileReader(chunkEntry.FileData)) * { * materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); * }*/ break; case SubDataType.UILayoutMagic: chunkEntry.DataFile = File053Data; break; case SubDataType.UILayout: chunkEntry.DataFile = File053Data; break; case SubDataType.BoneData: if (chunk.ChunkSize > 0x40 && currentModel.Skeleton == null) { chunkEntry.DataFile = File052Data; using (var boneReader = new FileReader(chunkEntry.FileData, true)) { /* currentModel.Skeleton = new STSkeleton(); * DrawableContainer.Drawables.Add(currentModel.Skeleton); * * uint numBones = chunk.ChunkSize / 0x40; * for (int i = 0; i < numBones; i++) * { * boneReader.SeekBegin(i * 0x40); * uint HashID = boneReader.ReadUInt32(); * boneReader.ReadUInt32(); //unk * boneReader.ReadUInt32(); //unk * boneReader.ReadSingle(); //0 * var Scale = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Rotate = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Position = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * float test = boneReader.ReadSingle(); //1 * STBone bone = new STBone(currentModel.Skeleton); * bone.Text = HashID.ToString("x"); * // if (HashNames.ContainsKey(HashID)) * // bone.Text = HashNames[HashID]; * // else * // Console.WriteLine($"bone hash {HashID}"); * * bone.position = new float[3] { Position.X, Position.Z, Position.Y }; * bone.rotation = new float[4] { Rotate.X, Rotate.Y, Rotate.Z, 1 }; * bone.scale = new float[3] { 0.2f, 0.2f, 0.2f }; * * bone.RotationType = STBone.BoneRotationType.Euler; * currentModel.Skeleton.bones.Add(bone); * } * * currentModel.Skeleton.reset(); * currentModel.Skeleton.update();*/ } } break; case (SubDataType)0x5012: case (SubDataType)0x5013: case (SubDataType)0x5014: chunkEntry.DataFile = File063Data; break; case (SubDataType)0x7101: case (SubDataType)0x7102: case (SubDataType)0x7103: case (SubDataType)0x7104: case (SubDataType)0x7106: case (SubDataType)0x6503: case (SubDataType)0x6501: chunkEntry.DataFile = File053Data; break; /* case (SubDataType)0x7105: * chunkEntry.DataFile = File053Data; * using (var chunkReader = new FileReader(chunkEntry.FileData)) * { * while (chunkReader.Position <= chunkReader.BaseStream.Length - 8) * { * uint hash = chunkReader.ReadUInt32(); * uint unk = chunkReader.ReadUInt32(); * * if (HashNames.ContainsKey(hash)) * Console.WriteLine("Hash Match! " + HashNames[hash]); * } * } * break;*/ case SubDataType.BoneHashList: chunkEntry.DataFile = File053Data; using (var chunkReader = new FileReader(chunkEntry.FileData, true)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 4) { uint hash = chunkReader.ReadUInt32(); // if (HashNames.ContainsKey(hash)) // Console.WriteLine("Hash Match! " + HashNames[hash]); } } break; default: chunkEntry.DataFile = File052Data; break; } if (chunk.ChunkType == SubDataType.MaterailData || chunk.ChunkType == SubDataType.ModelInfo || chunk.ChunkType == SubDataType.MeshBuffers || chunk.ChunkType == SubDataType.MeshIndexTable || chunk.ChunkType == SubDataType.SubmeshInfo || chunk.ChunkType == SubDataType.BoneHashList || chunk.ChunkType == SubDataType.BoneData) { currentModelChunk.Nodes.Add(chunkEntry); } else if (chunk.ChunkType != SubDataType.HavokPhysics) { chunkFolder.Nodes.Add(chunkEntry); } chunkId++; } foreach (var model in modelFolder.Nodes) { ((LM3_Model)model).ModelInfo.Read(new FileReader( ((LM3_Model)model).ModelInfo.Data), ((LM3_Model)model), TextureHashes); } if (havokFolder.Nodes.Count > 0) { Nodes.Add(havokFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (stringFolder.Nodes.Count > 0) { Nodes.Add(stringFolder); } } }
void Read(FileReader reader) { //File is empty so return if (reader.BaseStream.Length <= 4) { return; } Dictionary <int, Chunk> globalChunkList = new Dictionary <int, Chunk>(); int globalIndex = 0; reader.SetByteOrder(false); while (reader.Position <= reader.BaseStream.Length - 12) { //Read through all sections that use an identifier //These sections determine when a file is used or else using raw data. ushort identifier = reader.ReadUInt16(); if (identifier == ChunkInfoIdenfier) { //Skip padding ushort flag = reader.ReadUInt16(); ChunkEntry entry = new ChunkEntry(); entry.ChunkSize = reader.ReadUInt32(); entry.ChunkOffset = reader.ReadUInt32(); entry.ChunkType = (ChunkFileType)reader.ReadUInt16(); entry.Flags = reader.ReadUInt16(); entry.Flags2 = reader.ReadUInt32(); //Child Count or File Size entry.Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset Files.Add(entry); globalChunkList.Add(globalIndex, entry); //File entries shift global index by 2 globalIndex += 2; //Additional chunk entry if ((int)entry.ChunkType == 0x11) //This file seems to use same hash as some of the model files. { ChunkEntry secondaryEntry = new ChunkEntry(); secondaryEntry.Flags2 = reader.ReadUInt32(); //Child Count or File Size secondaryEntry.Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset secondaryEntry.ChunkType = (ChunkFileType)reader.ReadUInt16(); secondaryEntry.Flags = reader.ReadUInt16(); Files.Add(secondaryEntry); globalChunkList.Add(globalIndex, entry); //Extra entries shift global index by 1 globalIndex += 1; } //Extension to the existing file entry? //Possibly includes both sub chunks and data offset/size chunks in one if ((int)entry.ChunkType == 0x20) { var Flags2 = reader.ReadUInt32(); //Child Count or File Size var Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset var ChunkType = (ChunkFileType)reader.ReadUInt16(); var Flags = reader.ReadUInt16(); // //Extra entries shift global index by 1 globalIndex += 1; } } else { reader.Seek(-2); ChunkDataEntry subEntry = new ChunkDataEntry(); subEntry.ChunkType = reader.ReadEnum <ChunkDataType>(false); //The type of chunk. 0x8701B5 for example for texture info subEntry.Flags = reader.ReadUInt16(); subEntry.ChunkSize = reader.ReadUInt32(); subEntry.ChunkOffset = reader.ReadUInt32(); byte blockFlag = (byte)((subEntry.Flags >> 12)); if (blockFlag < 8) { subEntry.BlockIndex = blockFlag; } DataEntries.Add(subEntry); globalChunkList.Add(globalIndex, subEntry); globalIndex += 1; } } if (Files.Count == 0) { var file = new ChunkEntry(); file.SubData.AddRange(DataEntries); Files.Add(file); } for (int i = 0; i < DataEntries.Count; i++) { if (DataEntries[i].ChunkType == ChunkDataType.BoneStart || DataEntries[i].ChunkType == (ChunkDataType)0xC800 || DataEntries[i].ChunkType == (ChunkDataType)0x6200 || DataEntries[i].ChunkType == (ChunkDataType)0x6500) { // Console.WriteLine($"BONEFLAGS {DataEntries[i].ChunkFlags} {((DataEntries[i].ChunkFlags >> 12) > 2)}"); for (int f = 0; f < DataEntries[i].ChunkSize; f++) { DataEntries[i].SubData.Add((ChunkDataEntry)globalChunkList[(int)DataEntries[i].ChunkOffset + f]); } } } for (int i = 0; i < Files.Count; i++) { if (Files[i].HasSubData && globalChunkList.ContainsKey((int)Files[i].Flags3)) { Files[i].BeginIndex = (int)Files[i].Flags3; for (int f = 0; f < Files[i].Flags2; f++) { Files[i].SubData.Add((ChunkDataEntry)globalChunkList[Files[i].BeginIndex + f]); } } } }