public void Load(System.IO.Stream stream) { CanSave = false; modelFolder = new LM3_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM3_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); Unknown0x4 = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint SizeLargestFile = reader.ReadUInt32(); byte numFiles = reader.ReadByte(); byte numChunkInfos = reader.ReadByte(); byte numStrings = reader.ReadByte(); reader.ReadByte(); //padding //Start of the chunk info. A fixed list of chunk information for (int i = 0; i < numChunkInfos; i++) { ChunkInfo chunk = new ChunkInfo(); chunk.Read(reader); ChunkInfos.Add(chunk); } TreeNode tableNodes = new TreeNode("File Section Entries"); TreeNode chunkLookupNodes = new TreeNode("Chunk Lookup Files"); tableNodes.Nodes.Add(chunkLookupNodes); Nodes.Add(tableNodes); TreeNode stringFolder = new TreeNode("Strings"); TreeNode chunkTexFolder = new TreeNode("Texture"); TreeNode chunkModelFolder = new TreeNode("Model"); long FileTablePos = reader.Position; for (int i = 0; i < numFiles; i++) { var file = new FileEntry(this); file.Read(reader); fileEntries.Add(file); if (file.DecompressedSize > 0) { file.Text = $"entry {i}"; if (i < 52) { chunkLookupNodes.Nodes.Add(file); } else { tableNodes.Nodes.Add(file); } } //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM3_ChunkTable(); ChunkTable.Read(tableReader); if (DebugMode) { TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType.ToString("X")} ChunkOffset {chunk.ChunkOffset} ChunkSize {chunk.ChunkSize} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType 0x{chunk.ChunkType.ToString("X")} Size {chunk.ChunkSize} Offset {chunk.ChunkOffset}"); } } } } } for (int i = 0; i < numStrings; i++) { StringList.Add(reader.ReadZeroTerminatedString()); stringFolder.Nodes.Add(StringList[i]); } TreeNode havokFolder = new TreeNode("Havok Physics"); //Model data block //Contains texture hash refs and model headers var File052Data = fileEntries[52].GetData(); //Unsure, layout data?? var File053Data = fileEntries[53].GetData(); //Contains model data var File054Data = fileEntries[54].GetData(); //Image header block. Also contains shader data var File063Data = fileEntries[63].GetData(); //Image data block var File065Data = fileEntries[65].GetData(); //Get a list of chunk hashes List <uint> ModelHashes = new List <uint>(); for (int i = 0; i < ChunkTable.ChunkEntries.Count; i++) { if (ChunkTable.ChunkEntries[i].ChunkType == DataType.Model) { using (var chunkReader = new FileReader(File052Data, true)) { chunkReader.SeekBegin(ChunkTable.ChunkEntries[i].ChunkOffset); uint magic = chunkReader.ReadUInt32(); uint hash = chunkReader.ReadUInt32(); ModelHashes.Add(hash); } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded LM3_Model currentModel = new LM3_Model(this); TreeNode currentModelChunk = null; TexturePOWE currentTexture = new TexturePOWE(); ChunkDataEntry currentVertexPointerList = null; List <uint> TextureHashes = new List <uint>(); int chunkId = 0; uint modelIndex = 0; uint ImageHeaderIndex = 0; uint havokFileIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.Text = $"{chunkId} {chunk.ChunkType.ToString("X")} {chunk.ChunkType} {chunk.ChunkOffset} {chunk.ChunkSize}"; switch (chunk.ChunkType) { case SubDataType.HavokPhysics: chunkEntry.DataFile = File052Data; chunkEntry.Text = $"File_{havokFileIndex++}.hkx"; havokFolder.Nodes.Add(chunkEntry); break; case SubDataType.TextureHeader: chunkEntry.DataFile = File063Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData, true)) { currentTexture = new TexturePOWE(); currentTexture.HeaderOffset = chunk.ChunkOffset; currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); if (DebugMode) { currentTexture.Text = $"Texture {ImageHeaderIndex} {currentTexture.Unknown} {currentTexture.Unknown2} {currentTexture.Unknown3.ToString("X")}"; } else { currentTexture.Text = $"Texture {currentTexture.ID2.ToString("X")}"; } if (NLG_Common.HashNames.ContainsKey(currentTexture.ID2)) { currentTexture.Text = NLG_Common.HashNames[currentTexture.ID2]; } textureFolder.Nodes.Add(currentTexture); if (!Renderer.TextureList.ContainsKey(currentTexture.ID2.ToString("x"))) { Renderer.TextureList.Add(currentTexture.ID2.ToString("x"), currentTexture); } TextureHashes.Add(currentTexture.ID2); ImageHeaderIndex++; } break; case SubDataType.TextureData: chunkEntry.DataFile = File065Data; currentTexture.DataOffset = chunk.ChunkOffset; currentTexture.ImageData = chunkEntry.FileData.ToBytes(); break; case SubDataType.ModelInfo: chunkEntry.DataFile = File052Data; uint numModels = chunk.ChunkSize / 12; using (var dataReader = new FileReader(chunkEntry.FileData, true)) { for (int i = 0; i < numModels; i++) { uint hashID = dataReader.ReadUInt32(); uint numMeshes = dataReader.ReadUInt32(); dataReader.ReadUInt32(); //0 string text = hashID.ToString("X"); if (NLG_Common.HashNames.ContainsKey(hashID)) { text = NLG_Common.HashNames[hashID]; } currentModel.Text = $"{currentModel.Text} [{text}]"; } } break; case SubDataType.MaterailData: currentModelChunk = new TreeNode($"Model {modelIndex}"); chunkFolder.Nodes.Add(currentModelChunk); chunkEntry.DataFile = File052Data; currentModel = new LM3_Model(this); currentModel.ModelInfo = new LM3_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData.ToBytes(); if (ModelHashes.Count > modelIndex) { currentModel.Text = $"Model {modelIndex} {ModelHashes[(int)modelIndex].ToString("x")}"; if (NLG_Common.HashNames.ContainsKey(ModelHashes[(int)modelIndex])) { currentModel.Text = NLG_Common.HashNames[ModelHashes[(int)modelIndex]]; } } modelIndex++; break; case SubDataType.MeshBuffers: chunkEntry.DataFile = File054Data; currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.VertexStartPointers: chunkEntry.DataFile = File052Data; currentVertexPointerList = chunkEntry; break; case SubDataType.SubmeshInfo: chunkEntry.DataFile = File052Data; int MeshCount = (int)chunkEntry.FileData.Length / 0x40; using (var vtxPtrReader = new FileReader(currentVertexPointerList.FileData, true)) using (var meshReader = new FileReader(chunkEntry.FileData, true)) { for (uint i = 0; i < MeshCount; i++) { meshReader.SeekBegin(i * 0x40); LM3_Mesh mesh = new LM3_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); var buffer = new LM3_Model.PointerInfo(); buffer.Read(vtxPtrReader, mesh.Unknown3 != 4294967295); currentModel.VertexBufferPointers.Add(buffer); } } modelFolder.Nodes.Add(currentModel); break; case SubDataType.ModelTransform: chunkEntry.DataFile = File052Data; using (var transformReader = new FileReader(chunkEntry.FileData, true)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.MaterialName: chunkEntry.DataFile = File053Data; /* using (var matReader = new FileReader(chunkEntry.FileData)) * { * materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); * }*/ break; case SubDataType.UILayoutMagic: chunkEntry.DataFile = File053Data; break; case SubDataType.UILayout: chunkEntry.DataFile = File053Data; break; case SubDataType.BoneData: if (chunk.ChunkSize > 0x40 && currentModel.Skeleton == null) { chunkEntry.DataFile = File052Data; using (var boneReader = new FileReader(chunkEntry.FileData, true)) { /* currentModel.Skeleton = new STSkeleton(); * DrawableContainer.Drawables.Add(currentModel.Skeleton); * * uint numBones = chunk.ChunkSize / 0x40; * for (int i = 0; i < numBones; i++) * { * boneReader.SeekBegin(i * 0x40); * uint HashID = boneReader.ReadUInt32(); * boneReader.ReadUInt32(); //unk * boneReader.ReadUInt32(); //unk * boneReader.ReadSingle(); //0 * var Scale = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Rotate = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Position = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * float test = boneReader.ReadSingle(); //1 * STBone bone = new STBone(currentModel.Skeleton); * bone.Text = HashID.ToString("x"); * // if (HashNames.ContainsKey(HashID)) * // bone.Text = HashNames[HashID]; * // else * // Console.WriteLine($"bone hash {HashID}"); * * bone.position = new float[3] { Position.X, Position.Z, Position.Y }; * bone.rotation = new float[4] { Rotate.X, Rotate.Y, Rotate.Z, 1 }; * bone.scale = new float[3] { 0.2f, 0.2f, 0.2f }; * * bone.RotationType = STBone.BoneRotationType.Euler; * currentModel.Skeleton.bones.Add(bone); * } * * currentModel.Skeleton.reset(); * currentModel.Skeleton.update();*/ } } break; case (SubDataType)0x5012: case (SubDataType)0x5013: case (SubDataType)0x5014: chunkEntry.DataFile = File063Data; break; case (SubDataType)0x7101: case (SubDataType)0x7102: case (SubDataType)0x7103: case (SubDataType)0x7104: case (SubDataType)0x7106: case (SubDataType)0x6503: case (SubDataType)0x6501: chunkEntry.DataFile = File053Data; break; /* case (SubDataType)0x7105: * chunkEntry.DataFile = File053Data; * using (var chunkReader = new FileReader(chunkEntry.FileData)) * { * while (chunkReader.Position <= chunkReader.BaseStream.Length - 8) * { * uint hash = chunkReader.ReadUInt32(); * uint unk = chunkReader.ReadUInt32(); * * if (HashNames.ContainsKey(hash)) * Console.WriteLine("Hash Match! " + HashNames[hash]); * } * } * break;*/ case SubDataType.BoneHashList: chunkEntry.DataFile = File053Data; using (var chunkReader = new FileReader(chunkEntry.FileData, true)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 4) { uint hash = chunkReader.ReadUInt32(); // if (HashNames.ContainsKey(hash)) // Console.WriteLine("Hash Match! " + HashNames[hash]); } } break; default: chunkEntry.DataFile = File052Data; break; } if (chunk.ChunkType == SubDataType.MaterailData || chunk.ChunkType == SubDataType.ModelInfo || chunk.ChunkType == SubDataType.MeshBuffers || chunk.ChunkType == SubDataType.MeshIndexTable || chunk.ChunkType == SubDataType.SubmeshInfo || chunk.ChunkType == SubDataType.BoneHashList || chunk.ChunkType == SubDataType.BoneData) { currentModelChunk.Nodes.Add(chunkEntry); } else if (chunk.ChunkType != SubDataType.HavokPhysics) { chunkFolder.Nodes.Add(chunkEntry); } chunkId++; } foreach (var model in modelFolder.Nodes) { ((LM3_Model)model).ModelInfo.Read(new FileReader( ((LM3_Model)model).ModelInfo.Data), ((LM3_Model)model), TextureHashes); } if (havokFolder.Nodes.Count > 0) { Nodes.Add(havokFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (stringFolder.Nodes.Count > 0) { Nodes.Add(stringFolder); } } }