public void BuildNormals() { if (HasNormals) { return; } int i, j; // Find vertices to share. // We are using very simple algorithm here: to share all vertices with the same position // independently on normals of faces which share this vertex. CVec3[] tmpNorm = new CVec3[NumVerts]; // really will use Points.Num() items, which value is smaller than NumVerts CVertexShare Share = new CVertexShare(); Share.Prepare(Verts, NumVerts, 48); for (i = 0; i < NumVerts; i++) { CPackedNormal NullVec; NullVec.Data = 0; Share.AddVertex(Verts[i].Position, NullVec); } for (i = 0; i < Indices.Length / 3; i++) { CSkelMeshVertex[] V = new CSkelMeshVertex[3]; CVec3[] N = new CVec3[3]; for (j = 0; j < 3; j++) { int idx = (int)Indices[i * 3 + j]; // index in Verts[] V[j] = Verts[idx]; N[j] = tmpNorm[Share.WedgeToVert[idx]]; // remap to shared verts } // compute edges CVec3[] D = new CVec3[] { V[1].Position - V[0].Position, V[2].Position - V[1].Position, V[0].Position - V[2].Position }; // compute face normal CVec3 norm = CVec3.Cross(D[1], D[0]); norm.Normalize(); // compute angles for (j = 0; j < 3; j++) { D[j].Normalize(); } float[] angle = new float[] { (float)Math.Acos(-CVec3.Dot(D[0], D[2])), (float)Math.Acos(-CVec3.Dot(D[0], D[1])), (float)Math.Acos(-CVec3.Dot(D[1], D[2])) }; // add normals for triangle verts for (j = 0; j < 3; j++) { N[j].VectorMA(angle[j], norm); } for (j = 0; j < 3; j++) { int idx = (int)Indices[i * 3 + j]; Verts[idx] = V[j]; tmpNorm[Share.WedgeToVert[idx]] = N[j]; } } // TODO: add "hard angle threshold" - do not share vertex between faces when angle between them // is too large. // normalize shared normals ... for (i = 0; i < Share.Points.Count; i++) { tmpNorm[i].Normalize(); } // ... then place ("unshare") normals to Verts for (i = 0; i < NumVerts; i++) { Verts[i].Normal.Pack(tmpNorm[Share.WedgeToVert[i]]); } HasNormals = true; }
static void ExportCommonMeshData(BinaryWriter writer, CMeshSection[] Sections, CSkelMeshVertex[] Verts, CIndexBuffer Indices, CVertexShare Share) { VChunkHeader MainHdr = new VChunkHeader(), PtsHdr, WedgHdr, FacesHdr, MatrHdr; int i; // main psk header SAVE_CHUNK(writer, MainHdr, "ACTRHEAD"); PtsHdr = new VChunkHeader { DataCount = Share.Points.Count, DataSize = 12 }; SAVE_CHUNK(writer, PtsHdr, "PNTS0000"); for (i = 0; i < Share.Points.Count; i++) { FVector V = Share.Points[i]; V.Y = -V.Y; V.Write(writer); } // get number of faces (some Gears3 meshes may have index buffer larger than needed) // get wedge-material mapping int numFaces = 0; int[] WedgeMat = new int[Verts.Length]; for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; numFaces += Sec.NumFaces; for (int j = 0; j < Sec.NumFaces * 3; j++) { WedgeMat[Indices[j + Sec.FirstIndex]] = i; } } WedgHdr = new VChunkHeader { DataCount = Verts.Length, DataSize = 16 }; SAVE_CHUNK(writer, WedgHdr, "VTXW0000"); for (i = 0; i < Verts.Length; i++) { CSkelMeshVertex S = Verts[i]; VVertex W = new VVertex { PointIndex = Share.WedgeToVert[i], U = S.UV.U, V = S.UV.V, MatIndex = (byte)WedgeMat[i], Reserved = 0, Pad = 0 }; W.Write(writer); } if (Verts.Length <= 65536) { FacesHdr = new VChunkHeader { DataCount = numFaces, DataSize = 12 }; SAVE_CHUNK(writer, FacesHdr, "FACE0000"); for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; for (int j = 0; j < Sec.NumFaces; j++) { VTriangle16 T = new VTriangle16 { WedgeIndex = new ushort[3] }; for (int k = 0; k < 3; k++) { int idx = (int)Indices[Sec.FirstIndex + j * 3 + k]; if (idx < 0 || idx >= 65536) { throw new FileLoadException("Invalid section index"); } T.WedgeIndex[k] = (ushort)idx; } T.MatIndex = (byte)i; T.AuxMatIndex = 0; T.SmoothingGroups = 1; ushort tmp = T.WedgeIndex[0]; T.WedgeIndex[0] = T.WedgeIndex[1]; T.WedgeIndex[1] = tmp; T.Write(writer); } } } else { // pskx extension FacesHdr = new VChunkHeader { DataCount = numFaces, DataSize = 18 }; SAVE_CHUNK(writer, FacesHdr, "FACE3200"); for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; for (int j = 0; j < Sec.NumFaces; j++) { VTriangle32 T = new VTriangle32 { WedgeIndex = new int[3] }; for (int k = 0; k < 3; k++) { int idx = (int)Indices[Sec.FirstIndex + j * 3 + k]; T.WedgeIndex[k] = idx; } T.MatIndex = (byte)i; T.AuxMatIndex = 0; T.SmoothingGroups = 1; int tmp = T.WedgeIndex[0]; T.WedgeIndex[0] = T.WedgeIndex[1]; T.WedgeIndex[1] = tmp; T.Write(writer); } } } MatrHdr = new VChunkHeader { DataCount = Sections.Length, DataSize = 88 }; SAVE_CHUNK(writer, MatrHdr, "MATT0000"); for (i = 0; i < Sections.Length; i++) { VMaterial M = new VMaterial { MaterialName = new byte[64] }; UUnrealMaterial Tex = Sections[i].Material; M.TextureIndex = i; // could be required for UT99 //!! this will not handle (UMaterialWithPolyFlags->Material==NULL) correctly - will make MaterialName=="None" //!! (the same valid for md5mesh export) Tex = null; if (Tex != null) { //Extensions.StrCpy(M.MaterialName, Tex.Name); //ExportObject(Tex); } else { Extensions.StrCpy(M.MaterialName, $"material_{i}"); } M.Write(writer); } }
public static void ExportMesh(BinaryWriter writer, CSkeletalMesh Mesh, CSkelMeshLod Lod) { VChunkHeader BoneHdr, InfHdr; int i, j; CVertexShare Share = new CVertexShare(); // weld vertices // The code below differs from similar code for StaticMesh export: it relies on vertex weight // information to not perform occasional welding of vertices which has the same position and // normal, but belongs to different bones. Share.Prepare(Lod.Verts, Lod.NumVerts, 48); for (i = 0; i < Lod.NumVerts; i++) { CSkelMeshVertex S = Lod.Verts[i]; // Here we relies on high possibility that vertices which should be shared between // triangles will have the same order of weights and bones (because most likely // these vertices were duplicated by copying). Doing more complicated comparison // will reduce performance with possibly reducing size of exported mesh by a few // more vertices. uint WeightsHash = S.PackedWeights; for (j = 0; j < S.Bone.Length; j++) { WeightsHash ^= (uint)(S.Bone[j] << j); } Share.AddVertex(S.Position, S.Normal, WeightsHash); } ExportCommonMeshData(writer, Lod.Sections, Lod.Verts, Lod.Indices, Share); int numBones = Mesh.RefSkeleton.Length; BoneHdr = new VChunkHeader { DataCount = numBones, DataSize = 120 }; SAVE_CHUNK(writer, BoneHdr, "REFSKELT"); for (i = 0; i < numBones; i++) { VBone B = new VBone { Name = new byte[64] }; CSkelMeshBone S = Mesh.RefSkeleton[i]; Extensions.StrCpy(B.Name, S.Name); // count NumChildren int NumChildren = 0; for (j = 0; j < numBones; j++) { if ((j != i) && (Mesh.RefSkeleton[j].ParentIndex == i)) { NumChildren++; } } B.NumChildren = NumChildren; B.ParentIndex = S.ParentIndex; B.BonePos.Position = S.Position; B.BonePos.Orientation = S.Orientation; B.BonePos.Orientation.Y *= -1; B.BonePos.Orientation.W *= -1; B.BonePos.Position.Y *= -1; B.Write(writer); } // count influences int NumInfluences = 0; for (i = 0; i < Share.Points.Count; i++) { int WedgeIndex = Share.VertToWedge[i]; CSkelMeshVertex V = Lod.Verts[WedgeIndex]; for (j = 0; j < 4; j++) { if (V.Bone[j] < 0) { break; } NumInfluences++; } } // write influences InfHdr = new VChunkHeader { DataCount = NumInfluences, DataSize = 12 }; SAVE_CHUNK(writer, InfHdr, "RAWWEIGHTS"); for (i = 0; i < Share.Points.Count; i++) { int WedgeIndex = Share.VertToWedge[i]; CSkelMeshVertex V = Lod.Verts[WedgeIndex]; CVec4 UnpackedWeights = V.UnpackWeights(); for (j = 0; j < 4; j++) { if (V.Bone[j] < 0) { break; } NumInfluences--; // just for verification VRawBoneInfluence I; I.Weight = UnpackedWeights.v[j]; I.BoneIndex = V.Bone[j]; I.PointIndex = i; I.Write(writer); } } if (NumInfluences != 0) { throw new FileLoadException("Did not write to all influences"); } ExportExtraUV(writer, Lod.ExtraUV, Lod.NumVerts, Lod.NumTexCoords); }