private void ParseChunk_GEOS(BinaryReader data, Stream stream, uint chunkLength) { long chunkEnd = stream.Position + chunkLength; // Pre-calculate how many geosets we have to save memory. uint geosetCount = 0; while (stream.Position < chunkEnd) { geosetCount++; stream.Seek(data.ReadUInt32() - 4, SeekOrigin.Current); } stream.Position = chunkEnd - chunkLength; Geoset[] geosets = new Geoset[geosetCount]; uint geosetIndex = 0; while (stream.Position < chunkEnd) { long geosetStart = stream.Position; uint geosetSize = data.ReadUInt32(); long geosetEnd = geosetStart + geosetSize; geosets[geosetIndex++] = ParseGeoset(data, stream, geosetEnd); stream.Position = geosetEnd; } model.geosets = geosets; }
private void ParseChunk_MATS(BinaryReader data, ref Geoset geoset) { // Not Yet Implemented data.Skip((int)data.ReadUInt32() * 4); // This seems version specific (900?), may need additional checks. geoset.materialIndex = data.ReadUInt32(); data.Skip(12); byte[] geosetName = data.ReadBytes(112); // 112 name bytes? geoset.name = Encoding.UTF8.GetString(geosetName).Replace("\0", string.Empty); }
private void ParseChunk_PVTX(BinaryReader data, ref Geoset geoset) { uint primitiveVertCount = data.ReadUInt32(); uint primitiveCount = primitiveVertCount / 3; // Technically this 3 maps from the PCNT, but it's always 3 so. Primitive[] primitives = new Primitive[primitiveCount]; for (int i = 0; i < primitiveCount; i++) { primitives[i] = new Primitive { v1 = data.ReadUInt16(), v2 = data.ReadUInt16(), v3 = data.ReadUInt16() } } ; geoset.primitives = primitives; }
private void ParseChunk_NRMS(BinaryReader data, ref Geoset geoset) { uint normalsCount = data.ReadUInt32(); Normal[] normals = new Normal[normalsCount]; for (int i = 0; i < normalsCount; i++) { normals[i] = new Normal { x = data.ReadSingle(), y = data.ReadSingle(), z = data.ReadSingle() } } ; geoset.normals = normals; }
private void ParseChunk_VRTX(BinaryReader data, ref Geoset geoset) { uint vertCount = data.ReadUInt32(); Vert[] verts = new Vert[vertCount]; for (int i = 0; i < vertCount; i++) { verts[i] = new Vert { x = data.ReadSingle(), y = data.ReadSingle(), z = data.ReadSingle() } } ; geoset.verts = verts; }
private void ParseChunk_UVBS(BinaryReader data, ref Geoset geoset) { uint uvCount = data.ReadUInt32(); UV[] uvs = new UV[uvCount]; for (int i = 0; i < uvCount; i++) { uvs[i] = new UV { x = data.ReadSingle(), y = data.ReadSingle() } } ; geoset.uvs = uvs; } }
private void ParseChunk_UVAS(BinaryReader data, Stream stream, ref Geoset geoset) { uint chunkCount = data.ReadUInt32(); // 900 UVAS contain UVBS sub-chunks. If it doesn't, treat it like an old UVAS. // Note: No idea what the use of auxiliary UVBS chunks are, so we ignore them. if ((MDXChunks)data.ReadUInt32() != MDXChunks.UVBS) { stream.Seek(-8, SeekOrigin.Current); } ParseChunk_UVBS(data, ref geoset); // Even though we don't parse auxiliary UVBS chunks, we need to skip them. for (int i = 0; i < chunkCount - 1; i++) { data.Skip(4); // UVBS header. data.Skip((int)data.ReadUInt32() * 8); } }
private Geoset ParseGeoset(BinaryReader data, Stream stream, long end) { Geoset geoset = new Geoset { }; while (stream.Position < end) { MDXChunks chunkID = (MDXChunks)data.ReadUInt32(); switch (chunkID) { case MDXChunks.VRTX: ParseChunk_VRTX(data, ref geoset); break; case MDXChunks.NRMS: ParseChunk_NRMS(data, ref geoset); break; case MDXChunks.PTYP: ParseChunk_PTYP(data); break; case MDXChunks.PCNT: ParseChunk_PCNT(data); break; case MDXChunks.PVTX: ParseChunk_PVTX(data, ref geoset); break; case MDXChunks.GNDX: ParseChunk_GNDX(data); break; case MDXChunks.MTGC: ParseChunk_MTGC(data); break; case MDXChunks.MATS: ParseChunk_MATS(data, ref geoset); break; case MDXChunks.TANG: ParseChunk_TANG(data); break; case MDXChunks.SKIN: ParseChunk_SKIN(data); break; case MDXChunks.UVAS: ParseChunk_UVAS(data, stream, ref geoset); break; default: // Unknown/unimplemented geoset data. Breakpoint here to evaluate. stream.Position = end; break; } } return(geoset); }
public static void ExportMDX(MDXReader reader, string outFile, BackgroundWorker worker = null) { if (worker == null) { worker = new BackgroundWorker { WorkerReportsProgress = true } } ; var customCulture = (CultureInfo)Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; Thread.CurrentThread.CurrentCulture = customCulture; string mtlFile = Path.GetFileNameWithoutExtension(outFile) + ".mtl"; string mtlPath = Path.Combine(Path.GetDirectoryName(outFile), mtlFile); worker.ReportProgress(30, "Reading MDX file..."); MDXModel model = reader.model; // Empty model. if (model.geosets == null) { return; } worker.ReportProgress(60, "Writing material library..."); // Write the material library. if (model.textures != null) { StreamWriter writerMTL = new StreamWriter(mtlPath); for (int i = 0; i < model.textures.Length; i++) { string rawFile = Path.GetFileNameWithoutExtension(model.textures[i]); writerMTL.WriteLine("newmtl {0}", rawFile); writerMTL.WriteLine("illum 1"); writerMTL.WriteLine("map_Kd {0}.dds\n", rawFile); } writerMTL.Close(); } worker.ReportProgress(90, "Writing OBJ..."); StreamWriter writerOBJ = new StreamWriter(outFile); writerOBJ.WriteLine("# Exported using Marlamin's WoW Export Tools. MDX Exporter by Kruithne."); writerOBJ.WriteLine("# Model: {0} (MDX version {1})\n", model.name, model.version); writerOBJ.WriteLine("mtllib {0}\n", mtlFile); // Object Name writerOBJ.WriteLine("o {0}", model.name); // Instead of writing verts/normals/uvs for each geoset in order, we instead // batch all of them together in three large lists at the top. writerOBJ.WriteLine("\n# Verticies"); for (int geosetIndex = 0; geosetIndex < model.geosets.Length; geosetIndex++) { Geoset geoset = model.geosets[geosetIndex]; for (int i = 0; i < geoset.verts.Length; i++) { writerOBJ.WriteLine("v {0} {1} {2}", geoset.verts[i].x, geoset.verts[i].z, -geoset.verts[i].y); } } writerOBJ.WriteLine("\n# Normals"); for (int geosetIndex = 0; geosetIndex < model.geosets.Length; geosetIndex++) { Geoset geoset = model.geosets[geosetIndex]; for (int i = 0; i < geoset.normals.Length; i++) { writerOBJ.WriteLine("vn {0} {1} {2}", geoset.normals[i].x, geoset.normals[i].y, geoset.normals[i].z); } } writerOBJ.WriteLine("\n# UVs"); for (int geosetIndex = 0; geosetIndex < model.geosets.Length; geosetIndex++) { Geoset geoset = model.geosets[geosetIndex]; for (int i = 0; i < geoset.uvs.Length; i++) { writerOBJ.WriteLine("vt {0} {1}", geoset.uvs[i].x, geoset.uvs[i].y * -1); // Flip the Y UV, because it's backwards? } } // Write geoset meshes together. long faceIndex = 0; for (int geosetIndex = 0; geosetIndex < model.geosets.Length; geosetIndex++) { Geoset geoset = model.geosets[geosetIndex]; writerOBJ.WriteLine("\ng {0}", geoset.name); if (model.textures != null) { string textureFile = model.textures[model.materials[geoset.materialIndex].textureID]; writerOBJ.WriteLine("usemtl {0}", Path.GetFileNameWithoutExtension(textureFile)); writerOBJ.WriteLine("s 1"); } // +1 to each face to account for OBJ not liking zero-indexed lists. for (int i = 0; i < geoset.primitives.Length; i++) { writerOBJ.WriteLine("f {0}/{0}/{0} {1}/{1}/{1} {2}/{2}/{2}", faceIndex + geoset.primitives[i].v1 + 1, faceIndex + geoset.primitives[i].v2 + 1, faceIndex + geoset.primitives[i].v3 + 1); } // Maintain absolute offset rather than relative. faceIndex += geoset.verts.Length; } writerOBJ.Close(); } }