public void Load(System.IO.Stream stream) { CanSave = false; modelFolder = new LM3_ModelFolder(this); DrawableContainer.Name = FileName; Renderer = new LM3_Renderer(); DrawableContainer.Drawables.Add(Renderer); Text = FileName; using (var reader = new FileReader(stream)) { reader.ByteOrder = Syroot.BinaryData.ByteOrder.LittleEndian; uint Identifier = reader.ReadUInt32(); Unknown0x4 = reader.ReadUInt16(); //Could also be 2 bytes, not sure. Always 0x0401 IsCompressed = reader.ReadByte() == 1; reader.ReadByte(); //Padding uint SizeLargestFile = reader.ReadUInt32(); byte numFiles = reader.ReadByte(); byte numChunkInfos = reader.ReadByte(); byte numStrings = reader.ReadByte(); reader.ReadByte(); //padding //Start of the chunk info. A fixed list of chunk information for (int i = 0; i < numChunkInfos; i++) { ChunkInfo chunk = new ChunkInfo(); chunk.Read(reader); ChunkInfos.Add(chunk); } TreeNode tableNodes = new TreeNode("File Section Entries"); TreeNode chunkLookupNodes = new TreeNode("Chunk Lookup Files"); tableNodes.Nodes.Add(chunkLookupNodes); Nodes.Add(tableNodes); TreeNode stringFolder = new TreeNode("Strings"); TreeNode chunkTexFolder = new TreeNode("Texture"); TreeNode chunkModelFolder = new TreeNode("Model"); long FileTablePos = reader.Position; for (int i = 0; i < numFiles; i++) { var file = new FileEntry(this); file.Read(reader); fileEntries.Add(file); if (file.DecompressedSize > 0) { file.Text = $"entry {i}"; if (i < 52) { chunkLookupNodes.Nodes.Add(file); } else { tableNodes.Nodes.Add(file); } } //The first file stores a chunk layout //The second one seems to be a duplicate? if (i == 0) { using (var tableReader = new FileReader(file.GetData())) { ChunkTable = new LM3_ChunkTable(); ChunkTable.Read(tableReader); if (DebugMode) { TreeNode debugFolder = new TreeNode("DEBUG TABLE INFO"); Nodes.Add(debugFolder); TreeNode list1 = new TreeNode("Entry List 1"); TreeNode list2 = new TreeNode("Entry List 2 "); debugFolder.Nodes.Add(list1); debugFolder.Nodes.Add(list2); debugFolder.Nodes.Add(chunkFolder); foreach (var chunk in ChunkTable.ChunkEntries) { list1.Nodes.Add($"ChunkType {chunk.ChunkType.ToString("X")} ChunkOffset {chunk.ChunkOffset} ChunkSize {chunk.ChunkSize} ChunkSubCount {chunk.ChunkSubCount} Unknown3 {chunk.Unknown3}"); } foreach (var chunk in ChunkTable.ChunkSubEntries) { list2.Nodes.Add($"ChunkType 0x{chunk.ChunkType.ToString("X")} Size {chunk.ChunkSize} Offset {chunk.ChunkOffset}"); } } } } } for (int i = 0; i < numStrings; i++) { StringList.Add(reader.ReadZeroTerminatedString()); stringFolder.Nodes.Add(StringList[i]); } TreeNode havokFolder = new TreeNode("Havok Physics"); //Model data block //Contains texture hash refs and model headers var File052Data = fileEntries[52].GetData(); //Unsure, layout data?? var File053Data = fileEntries[53].GetData(); //Contains model data var File054Data = fileEntries[54].GetData(); //Image header block. Also contains shader data var File063Data = fileEntries[63].GetData(); //Image data block var File065Data = fileEntries[65].GetData(); //Get a list of chunk hashes List <uint> ModelHashes = new List <uint>(); for (int i = 0; i < ChunkTable.ChunkEntries.Count; i++) { if (ChunkTable.ChunkEntries[i].ChunkType == DataType.Model) { using (var chunkReader = new FileReader(File052Data, true)) { chunkReader.SeekBegin(ChunkTable.ChunkEntries[i].ChunkOffset); uint magic = chunkReader.ReadUInt32(); uint hash = chunkReader.ReadUInt32(); ModelHashes.Add(hash); } } } //Set an instance of our current data //Chunks are in order, so you build off of when an instance gets loaded LM3_Model currentModel = new LM3_Model(this); TreeNode currentModelChunk = null; TexturePOWE currentTexture = new TexturePOWE(); ChunkDataEntry currentVertexPointerList = null; List <uint> TextureHashes = new List <uint>(); int chunkId = 0; uint modelIndex = 0; uint ImageHeaderIndex = 0; uint havokFileIndex = 0; foreach (var chunk in ChunkTable.ChunkSubEntries) { var chunkEntry = new ChunkDataEntry(this, chunk); chunkEntry.Text = $"{chunkId} {chunk.ChunkType.ToString("X")} {chunk.ChunkType} {chunk.ChunkOffset} {chunk.ChunkSize}"; switch (chunk.ChunkType) { case SubDataType.HavokPhysics: chunkEntry.DataFile = File052Data; chunkEntry.Text = $"File_{havokFileIndex++}.hkx"; havokFolder.Nodes.Add(chunkEntry); break; case SubDataType.TextureHeader: chunkEntry.DataFile = File063Data; //Read the info using (var textureReader = new FileReader(chunkEntry.FileData, true)) { currentTexture = new TexturePOWE(); currentTexture.HeaderOffset = chunk.ChunkOffset; currentTexture.ImageKey = "texture"; currentTexture.SelectedImageKey = currentTexture.ImageKey; currentTexture.Index = ImageHeaderIndex; currentTexture.Read(textureReader); if (DebugMode) { currentTexture.Text = $"Texture {ImageHeaderIndex} {currentTexture.Unknown} {currentTexture.Unknown2} {currentTexture.Unknown3.ToString("X")}"; } else { currentTexture.Text = $"Texture {currentTexture.ID2.ToString("X")}"; } if (NLG_Common.HashNames.ContainsKey(currentTexture.ID2)) { currentTexture.Text = NLG_Common.HashNames[currentTexture.ID2]; } textureFolder.Nodes.Add(currentTexture); if (!Renderer.TextureList.ContainsKey(currentTexture.ID2.ToString("x"))) { Renderer.TextureList.Add(currentTexture.ID2.ToString("x"), currentTexture); } TextureHashes.Add(currentTexture.ID2); ImageHeaderIndex++; } break; case SubDataType.TextureData: chunkEntry.DataFile = File065Data; currentTexture.DataOffset = chunk.ChunkOffset; currentTexture.ImageData = chunkEntry.FileData.ToBytes(); break; case SubDataType.ModelInfo: chunkEntry.DataFile = File052Data; uint numModels = chunk.ChunkSize / 12; using (var dataReader = new FileReader(chunkEntry.FileData, true)) { for (int i = 0; i < numModels; i++) { uint hashID = dataReader.ReadUInt32(); uint numMeshes = dataReader.ReadUInt32(); dataReader.ReadUInt32(); //0 string text = hashID.ToString("X"); if (NLG_Common.HashNames.ContainsKey(hashID)) { text = NLG_Common.HashNames[hashID]; } currentModel.Text = $"{currentModel.Text} [{text}]"; } } break; case SubDataType.MaterailData: currentModelChunk = new TreeNode($"Model {modelIndex}"); chunkFolder.Nodes.Add(currentModelChunk); chunkEntry.DataFile = File052Data; currentModel = new LM3_Model(this); currentModel.ModelInfo = new LM3_ModelInfo(); currentModel.Text = $"Model {modelIndex}"; currentModel.ModelInfo.Data = chunkEntry.FileData.ToBytes(); if (ModelHashes.Count > modelIndex) { currentModel.Text = $"Model {modelIndex} {ModelHashes[(int)modelIndex].ToString("x")}"; if (NLG_Common.HashNames.ContainsKey(ModelHashes[(int)modelIndex])) { currentModel.Text = NLG_Common.HashNames[ModelHashes[(int)modelIndex]]; } } modelIndex++; break; case SubDataType.MeshBuffers: chunkEntry.DataFile = File054Data; currentModel.BufferStart = chunkEntry.Entry.ChunkOffset; currentModel.BufferSize = chunkEntry.Entry.ChunkSize; break; case SubDataType.VertexStartPointers: chunkEntry.DataFile = File052Data; currentVertexPointerList = chunkEntry; break; case SubDataType.SubmeshInfo: chunkEntry.DataFile = File052Data; int MeshCount = (int)chunkEntry.FileData.Length / 0x40; using (var vtxPtrReader = new FileReader(currentVertexPointerList.FileData, true)) using (var meshReader = new FileReader(chunkEntry.FileData, true)) { for (uint i = 0; i < MeshCount; i++) { meshReader.SeekBegin(i * 0x40); LM3_Mesh mesh = new LM3_Mesh(); mesh.Read(meshReader); currentModel.Meshes.Add(mesh); var buffer = new LM3_Model.PointerInfo(); buffer.Read(vtxPtrReader, mesh.Unknown3 != 4294967295); currentModel.VertexBufferPointers.Add(buffer); } } modelFolder.Nodes.Add(currentModel); break; case SubDataType.ModelTransform: chunkEntry.DataFile = File052Data; using (var transformReader = new FileReader(chunkEntry.FileData, true)) { //This is possibly very wrong //The data isn't always per mesh, but sometimes is if (transformReader.BaseStream.Length / 0x40 == currentModel.Meshes.Count) { for (int i = 0; i < currentModel.Meshes.Count; i++) { currentModel.Meshes[i].Transform = transformReader.ReadMatrix4(); } } } break; case SubDataType.MaterialName: chunkEntry.DataFile = File053Data; /* using (var matReader = new FileReader(chunkEntry.FileData)) * { * materialNamesFolder.Nodes.Add(matReader.ReadZeroTerminatedString()); * }*/ break; case SubDataType.UILayoutMagic: chunkEntry.DataFile = File053Data; break; case SubDataType.UILayout: chunkEntry.DataFile = File053Data; break; case SubDataType.BoneData: if (chunk.ChunkSize > 0x40 && currentModel.Skeleton == null) { chunkEntry.DataFile = File052Data; using (var boneReader = new FileReader(chunkEntry.FileData, true)) { /* currentModel.Skeleton = new STSkeleton(); * DrawableContainer.Drawables.Add(currentModel.Skeleton); * * uint numBones = chunk.ChunkSize / 0x40; * for (int i = 0; i < numBones; i++) * { * boneReader.SeekBegin(i * 0x40); * uint HashID = boneReader.ReadUInt32(); * boneReader.ReadUInt32(); //unk * boneReader.ReadUInt32(); //unk * boneReader.ReadSingle(); //0 * var Scale = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Rotate = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * boneReader.ReadSingle(); //0 * var Position = new OpenTK.Vector3( * boneReader.ReadSingle(), * boneReader.ReadSingle(), * boneReader.ReadSingle()); * float test = boneReader.ReadSingle(); //1 * STBone bone = new STBone(currentModel.Skeleton); * bone.Text = HashID.ToString("x"); * // if (HashNames.ContainsKey(HashID)) * // bone.Text = HashNames[HashID]; * // else * // Console.WriteLine($"bone hash {HashID}"); * * bone.position = new float[3] { Position.X, Position.Z, Position.Y }; * bone.rotation = new float[4] { Rotate.X, Rotate.Y, Rotate.Z, 1 }; * bone.scale = new float[3] { 0.2f, 0.2f, 0.2f }; * * bone.RotationType = STBone.BoneRotationType.Euler; * currentModel.Skeleton.bones.Add(bone); * } * * currentModel.Skeleton.reset(); * currentModel.Skeleton.update();*/ } } break; case (SubDataType)0x5012: case (SubDataType)0x5013: case (SubDataType)0x5014: chunkEntry.DataFile = File063Data; break; case (SubDataType)0x7101: case (SubDataType)0x7102: case (SubDataType)0x7103: case (SubDataType)0x7104: case (SubDataType)0x7106: case (SubDataType)0x6503: case (SubDataType)0x6501: chunkEntry.DataFile = File053Data; break; /* case (SubDataType)0x7105: * chunkEntry.DataFile = File053Data; * using (var chunkReader = new FileReader(chunkEntry.FileData)) * { * while (chunkReader.Position <= chunkReader.BaseStream.Length - 8) * { * uint hash = chunkReader.ReadUInt32(); * uint unk = chunkReader.ReadUInt32(); * * if (HashNames.ContainsKey(hash)) * Console.WriteLine("Hash Match! " + HashNames[hash]); * } * } * break;*/ case SubDataType.BoneHashList: chunkEntry.DataFile = File053Data; using (var chunkReader = new FileReader(chunkEntry.FileData, true)) { while (chunkReader.Position <= chunkReader.BaseStream.Length - 4) { uint hash = chunkReader.ReadUInt32(); // if (HashNames.ContainsKey(hash)) // Console.WriteLine("Hash Match! " + HashNames[hash]); } } break; default: chunkEntry.DataFile = File052Data; break; } if (chunk.ChunkType == SubDataType.MaterailData || chunk.ChunkType == SubDataType.ModelInfo || chunk.ChunkType == SubDataType.MeshBuffers || chunk.ChunkType == SubDataType.MeshIndexTable || chunk.ChunkType == SubDataType.SubmeshInfo || chunk.ChunkType == SubDataType.BoneHashList || chunk.ChunkType == SubDataType.BoneData) { currentModelChunk.Nodes.Add(chunkEntry); } else if (chunk.ChunkType != SubDataType.HavokPhysics) { chunkFolder.Nodes.Add(chunkEntry); } chunkId++; } foreach (var model in modelFolder.Nodes) { ((LM3_Model)model).ModelInfo.Read(new FileReader( ((LM3_Model)model).ModelInfo.Data), ((LM3_Model)model), TextureHashes); } if (havokFolder.Nodes.Count > 0) { Nodes.Add(havokFolder); } if (textureFolder.Nodes.Count > 0) { Nodes.Add(textureFolder); } if (modelFolder.Nodes.Count > 0) { Nodes.Add(modelFolder); } if (stringFolder.Nodes.Count > 0) { Nodes.Add(stringFolder); } } }
public void ReadVertexBuffers() { Nodes.Clear(); using (var reader = new FileReader(DataDictionary.GetFileBufferData(), true)) { TreeNode texturesList = new TreeNode("Texture Maps"); TreeNode skeletonNode = new TreeNode("Skeleton"); for (int t = 0; t < Skeleton?.bones.Count; t++) { skeletonNode.Nodes.Add(Skeleton.bones[t]); } for (int t = 0; t < TextureHashes.Count; t++) { if (DataDictionary.Renderer.TextureList.ContainsKey(TextureHashes[t].ToString("x"))) { var tex = DataDictionary.Renderer.TextureList[TextureHashes[t].ToString("x")]; texturesList.Nodes.Add(new TreeNode(tex.Text) { ImageKey = tex.ImageKey, SelectedImageKey = tex.ImageKey, Tag = tex }); } else { Nodes.Add(TextureHashes[t].ToString("x")); } } if (skeletonNode.Nodes.Count > 0) { Nodes.Add(skeletonNode); } if (texturesList.Nodes.Count > 0) { Nodes.Add(texturesList); } for (int i = 0; i < Meshes.Count; i++) { LM3_Mesh mesh = Meshes[i]; RenderableMeshWrapper genericObj = new RenderableMeshWrapper(); genericObj.Mesh = mesh; genericObj.Checked = true; genericObj.Text = $"Mesh {i} {mesh.HashID.ToString("X")}"; if (NLG_Common.HashNames.ContainsKey(mesh.HashID)) { genericObj.Text = NLG_Common.HashNames[mesh.HashID]; } genericObj.SetMaterial(mesh.Material); RenderedMeshes.Add(genericObj); Nodes.Add(genericObj); DataDictionary.Renderer.Meshes.Add(genericObj); STGenericPolygonGroup polyGroup = new STGenericPolygonGroup(); genericObj.PolygonGroups.Add(polyGroup); uint vertexBufferPointer = VertexBufferPointers[i].VertexBufferPointer; uint weightTablePointer = VertexBufferPointers[i].WeightTablePointer; using (reader.TemporarySeek(BufferStart + vertexBufferPointer, System.IO.SeekOrigin.Begin)) { var bufferNodeDebug = new DebugVisualBytes(reader.ReadBytes((int)80 * (int)mesh.VertexCount)); bufferNodeDebug.Text = $"Buffer {mesh.DataFormat.ToString("x")}"; genericObj.Nodes.Add(bufferNodeDebug); } LM3_Mesh.FormatInfo formatInfo; if (!LM3_Mesh.FormatInfos.ContainsKey(mesh.DataFormat)) { Console.WriteLine($"Unsupported data format! " + mesh.DataFormat.ToString("x")); formatInfo = new LM3_Mesh.FormatInfo(VertexDataFormat.Float32_32_32, 0x30); // continue; } else { formatInfo = LM3_Mesh.FormatInfos[mesh.DataFormat]; } if (formatInfo.BufferLength > 0) { Console.WriteLine($"BufferStart {BufferStart} IndexStartOffset {mesh.IndexStartOffset}"); reader.BaseStream.Position = BufferStart + mesh.IndexStartOffset; switch (mesh.IndexFormat) { case IndexFormat.Index_8: for (int f = 0; f < mesh.IndexCount; f++) { polyGroup.faces.Add(reader.ReadByte()); } break; case IndexFormat.Index_16: for (int f = 0; f < mesh.IndexCount; f++) { polyGroup.faces.Add(reader.ReadUInt16()); } break; /* case IndexFormat.Index_32: * for (int f = 0; f < mesh.IndexCount; f++) * polyGroup.faces.Add((int)reader.ReadUInt32()); * break;*/ } Console.WriteLine($"Mesh {genericObj.Text} Format {formatInfo.Format} BufferLength {formatInfo.BufferLength}"); Console.WriteLine($"BufferStart {BufferStart} VertexBufferPointers {vertexBufferPointer}"); uint bufferOffet = BufferStart + vertexBufferPointer; /* for (int v = 0; v < mesh.VertexCount; v++) * { * reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); * * }*/ switch (formatInfo.Format) { case VertexDataFormat.Float16: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3( UShortToFloatDecode(reader.ReadInt16()), UShortToFloatDecode(reader.ReadInt16()), UShortToFloatDecode(reader.ReadInt16())); Vector4 nrm = Read_8_8_8_8_Snorm(reader); vert.nrm = nrm.Xyz.Normalized(); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); vert.uv0 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); if (formatInfo.BufferLength == 22) { Console.WriteLine("unk 1 " + reader.ReadUInt16()); Console.WriteLine("unk 2 " + reader.ReadUInt16()); Console.WriteLine("unk 3 " + reader.ReadUInt16()); Console.WriteLine("unk 4 " + reader.ReadUInt16()); } } break; case VertexDataFormat.Float32: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); } break; case VertexDataFormat.Float32_32: reader.BaseStream.Position = BufferStart + vertexBufferPointer + 0x08; for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); vert.uv0 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); vert.uv1 = NormalizeUvCoordsToFloat(reader.ReadUInt16(), reader.ReadUInt16()); vert.col = Read_8_8_8_8_Unorm(reader); } break; case VertexDataFormat.Float32_32_32: for (int v = 0; v < mesh.VertexCount; v++) { reader.SeekBegin(bufferOffet + (v * formatInfo.BufferLength)); Vertex vert = new Vertex(); genericObj.vertices.Add(vert); vert.pos = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.pos = Vector3.TransformPosition(vert.pos, mesh.Transform); //Texture coordinates are stored between normals, WHY NLG var texCoordU = reader.ReadSingle(); vert.nrm = new Vector3(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); var texCoordV = reader.ReadSingle(); vert.tan = new Vector4(reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle(), reader.ReadSingle()); vert.uv0 = new Vector2(texCoordU, texCoordV); } break; } genericObj.TransformPosition(new Vector3(0), new Vector3(-90, 0, 0), new Vector3(1)); } if (weightTablePointer != uint.MaxValue) { using (reader.TemporarySeek(BufferStart + weightTablePointer, System.IO.SeekOrigin.Begin)) { byte maxIndex = 0; for (int v = 0; v < genericObj.vertices.Count; v++) { byte[] boneIndices = reader.ReadBytes(4); float[] boneWeights = reader.ReadSingles(4); for (int j = 0; j < 4; j++) { maxIndex = Math.Max(maxIndex, boneIndices[j]); // genericObj.vertices[v].boneIds.Add(boneIndices[j]); // genericObj.vertices[v].boneWeights.Add(boneWeights[j]); } } Console.WriteLine("maxIndex " + maxIndex); } } genericObj.RemoveDuplicateVertices(); } } }