private void ExportStaticMeshLods(CStaticMeshLod lod, FArchiveWriter Ar, List <MaterialExporter>?materialExports, ETexturePlatform platform = ETexturePlatform.DesktopMobile) { var share = new CVertexShare(); var boneHdr = new VChunkHeader(); var infHdr = new VChunkHeader(); share.Prepare(lod.Verts); foreach (var vert in lod.Verts) { share.AddVertex(vert.Position, vert.Normal); } ExportCommonMeshData(Ar, lod.Sections.Value, lod.Verts, lod.Indices.Value, share, materialExports, platform); boneHdr.DataCount = 0; boneHdr.DataSize = 120; Ar.SerializeChunkHeader(boneHdr, "REFSKELT"); infHdr.DataCount = 0; infHdr.DataSize = 12; Ar.SerializeChunkHeader(infHdr, "RAWWEIGHTS"); ExportVertexColors(Ar, lod.VertexColors, lod.NumVerts); ExportExtraUV(Ar, lod.ExtraUV.Value, lod.NumVerts, lod.NumTexCoords); }
private void ExportSkeletalMeshLod(CSkelMeshLod lod, List <CSkelMeshBone> bones, FArchiveWriter Ar, List <MaterialExporter>?materialExports, ETexturePlatform platform = ETexturePlatform.DesktopMobile) { var share = new CVertexShare(); var infHdr = new VChunkHeader(); share.Prepare(lod.Verts); foreach (var vert in lod.Verts) { var weightsHash = vert.PackedWeights; for (var i = 0; i < vert.Bone.Length; i++) { weightsHash ^= (uint)vert.Bone[i] << i; } share.AddVertex(vert.Position, vert.Normal, weightsHash); } ExportCommonMeshData(Ar, lod.Sections.Value, lod.Verts, lod.Indices.Value, share, materialExports, platform); ExportSkeletonData(Ar, bones); var numInfluences = 0; for (var i = 0; i < share.Points.Count; i++) { for (var j = 0; j < Constants.NUM_INFLUENCES_UE4; j++) { if (lod.Verts[share.VertToWedge.Value[i]].Bone[j] < 0) { break; } numInfluences++; } } infHdr.DataCount = numInfluences; infHdr.DataSize = 12; Ar.SerializeChunkHeader(infHdr, "RAWWEIGHTS"); for (var i = 0; i < share.Points.Count; i++) { var v = lod.Verts[share.VertToWedge.Value[i]]; var unpackedWeights = v.UnpackWeights(); for (var j = 0; j < Constants.NUM_INFLUENCES_UE4; j++) { if (v.Bone[j] < 0) { break; } Ar.Write(unpackedWeights[j]); Ar.Write(i); Ar.Write((int)v.Bone[j]); } } ExportVertexColors(Ar, lod.VertexColors, lod.NumVerts); ExportExtraUV(Ar, lod.ExtraUV.Value, lod.NumVerts, lod.NumTexCoords); }
private void ExportCommonMeshData(FArchiveWriter Ar, CMeshSection[] sections, CMeshVertex[] verts, FRawStaticIndexBuffer indices, CVertexShare share, List <MaterialExporter>?materialExports, ETexturePlatform platform = ETexturePlatform.DesktopMobile) { var mainHdr = new VChunkHeader(); var ptsHdr = new VChunkHeader(); var wedgHdr = new VChunkHeader(); var facesHdr = new VChunkHeader(); var matrHdr = new VChunkHeader(); var normHdr = new VChunkHeader(); mainHdr.TypeFlag = Constants.PSK_VERSION; Ar.SerializeChunkHeader(mainHdr, "ACTRHEAD"); var numPoints = share.Points.Count; ptsHdr.DataCount = numPoints; ptsHdr.DataSize = 12; Ar.SerializeChunkHeader(ptsHdr, "PNTS0000"); for (var i = 0; i < numPoints; i++) { var point = share.Points[i]; point.Y = -point.Y; // MIRROR_MESH point.Serialize(Ar); } var numFaces = 0; var numVerts = verts.Length; var numSections = sections.Length; var wedgeMat = new int[numVerts]; for (var i = 0; i < numSections; i++) { var faces = sections[i].NumFaces; numFaces += faces; for (var j = 0; j < faces * 3; j++) { wedgeMat[indices[j + sections[i].FirstIndex]] = i; } } wedgHdr.DataCount = numVerts; wedgHdr.DataSize = 16; Ar.SerializeChunkHeader(wedgHdr, "VTXW0000"); for (var i = 0; i < numVerts; i++) { Ar.Write(share.WedgeToVert[i]); Ar.Write(verts[i].UV.U); Ar.Write(verts[i].UV.V); Ar.Write((byte)wedgeMat[i]); Ar.Write((byte)0); Ar.Write((short)0); } facesHdr.DataCount = numFaces; if (numVerts <= 65536) { facesHdr.DataSize = 12; Ar.SerializeChunkHeader(facesHdr, "FACE0000"); for (var i = 0; i < numSections; i++) { for (var j = 0; j < sections[i].NumFaces; j++) { var wedgeIndex = new ushort[3]; for (var k = 0; k < wedgeIndex.Length; k++) { wedgeIndex[k] = (ushort)indices[sections[i].FirstIndex + j * 3 + k]; } Ar.Write(wedgeIndex[1]); // MIRROR_MESH Ar.Write(wedgeIndex[0]); // MIRROR_MESH Ar.Write(wedgeIndex[2]); Ar.Write((byte)i); Ar.Write((byte)0); Ar.Write((uint)1); } } } else { facesHdr.DataSize = 18; Ar.SerializeChunkHeader(facesHdr, "FACE3200"); for (var i = 0; i < numSections; i++) { for (var j = 0; j < sections[i].NumFaces; j++) { var wedgeIndex = new int[3]; for (var k = 0; k < wedgeIndex.Length; k++) { wedgeIndex[k] = indices[sections[i].FirstIndex + j * 3 + k]; } Ar.Write(wedgeIndex[1]); // MIRROR_MESH Ar.Write(wedgeIndex[0]); // MIRROR_MESH Ar.Write(wedgeIndex[2]); Ar.Write((byte)i); Ar.Write((byte)0); Ar.Write((uint)1); } } } matrHdr.DataCount = numSections; matrHdr.DataSize = 88; Ar.SerializeChunkHeader(matrHdr, "MATT0000"); for (var i = 0; i < numSections; i++) { string materialName; if (sections[i].Material?.Load <UMaterialInterface>() is { } tex) { materialName = tex.Name; materialExports?.Add(new MaterialExporter(tex, true, platform)); }
static void ExportCommonMeshData(BinaryWriter writer, CMeshSection[] Sections, CSkelMeshVertex[] Verts, CIndexBuffer Indices, CVertexShare Share) { VChunkHeader MainHdr = new VChunkHeader(), PtsHdr, WedgHdr, FacesHdr, MatrHdr; int i; // main psk header SAVE_CHUNK(writer, MainHdr, "ACTRHEAD"); PtsHdr = new VChunkHeader { DataCount = Share.Points.Count, DataSize = 12 }; SAVE_CHUNK(writer, PtsHdr, "PNTS0000"); for (i = 0; i < Share.Points.Count; i++) { FVector V = Share.Points[i]; V.Y = -V.Y; V.Write(writer); } // get number of faces (some Gears3 meshes may have index buffer larger than needed) // get wedge-material mapping int numFaces = 0; int[] WedgeMat = new int[Verts.Length]; for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; numFaces += Sec.NumFaces; for (int j = 0; j < Sec.NumFaces * 3; j++) { WedgeMat[Indices[j + Sec.FirstIndex]] = i; } } WedgHdr = new VChunkHeader { DataCount = Verts.Length, DataSize = 16 }; SAVE_CHUNK(writer, WedgHdr, "VTXW0000"); for (i = 0; i < Verts.Length; i++) { CSkelMeshVertex S = Verts[i]; VVertex W = new VVertex { PointIndex = Share.WedgeToVert[i], U = S.UV.U, V = S.UV.V, MatIndex = (byte)WedgeMat[i], Reserved = 0, Pad = 0 }; W.Write(writer); } if (Verts.Length <= 65536) { FacesHdr = new VChunkHeader { DataCount = numFaces, DataSize = 12 }; SAVE_CHUNK(writer, FacesHdr, "FACE0000"); for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; for (int j = 0; j < Sec.NumFaces; j++) { VTriangle16 T = new VTriangle16 { WedgeIndex = new ushort[3] }; for (int k = 0; k < 3; k++) { int idx = (int)Indices[Sec.FirstIndex + j * 3 + k]; if (idx < 0 || idx >= 65536) { throw new FileLoadException("Invalid section index"); } T.WedgeIndex[k] = (ushort)idx; } T.MatIndex = (byte)i; T.AuxMatIndex = 0; T.SmoothingGroups = 1; ushort tmp = T.WedgeIndex[0]; T.WedgeIndex[0] = T.WedgeIndex[1]; T.WedgeIndex[1] = tmp; T.Write(writer); } } } else { // pskx extension FacesHdr = new VChunkHeader { DataCount = numFaces, DataSize = 18 }; SAVE_CHUNK(writer, FacesHdr, "FACE3200"); for (i = 0; i < Sections.Length; i++) { CMeshSection Sec = Sections[i]; for (int j = 0; j < Sec.NumFaces; j++) { VTriangle32 T = new VTriangle32 { WedgeIndex = new int[3] }; for (int k = 0; k < 3; k++) { int idx = (int)Indices[Sec.FirstIndex + j * 3 + k]; T.WedgeIndex[k] = idx; } T.MatIndex = (byte)i; T.AuxMatIndex = 0; T.SmoothingGroups = 1; int tmp = T.WedgeIndex[0]; T.WedgeIndex[0] = T.WedgeIndex[1]; T.WedgeIndex[1] = tmp; T.Write(writer); } } } MatrHdr = new VChunkHeader { DataCount = Sections.Length, DataSize = 88 }; SAVE_CHUNK(writer, MatrHdr, "MATT0000"); for (i = 0; i < Sections.Length; i++) { VMaterial M = new VMaterial { MaterialName = new byte[64] }; UUnrealMaterial Tex = Sections[i].Material; M.TextureIndex = i; // could be required for UT99 //!! this will not handle (UMaterialWithPolyFlags->Material==NULL) correctly - will make MaterialName=="None" //!! (the same valid for md5mesh export) Tex = null; if (Tex != null) { //Extensions.StrCpy(M.MaterialName, Tex.Name); //ExportObject(Tex); } else { Extensions.StrCpy(M.MaterialName, $"material_{i}"); } M.Write(writer); } }
public void BuildNormals() { if (HasNormals) { return; } int i, j; // Find vertices to share. // We are using very simple algorithm here: to share all vertices with the same position // independently on normals of faces which share this vertex. CVec3[] tmpNorm = new CVec3[NumVerts]; // really will use Points.Num() items, which value is smaller than NumVerts CVertexShare Share = new CVertexShare(); Share.Prepare(Verts, NumVerts, 48); for (i = 0; i < NumVerts; i++) { CPackedNormal NullVec; NullVec.Data = 0; Share.AddVertex(Verts[i].Position, NullVec); } for (i = 0; i < Indices.Length / 3; i++) { CSkelMeshVertex[] V = new CSkelMeshVertex[3]; CVec3[] N = new CVec3[3]; for (j = 0; j < 3; j++) { int idx = (int)Indices[i * 3 + j]; // index in Verts[] V[j] = Verts[idx]; N[j] = tmpNorm[Share.WedgeToVert[idx]]; // remap to shared verts } // compute edges CVec3[] D = new CVec3[] { V[1].Position - V[0].Position, V[2].Position - V[1].Position, V[0].Position - V[2].Position }; // compute face normal CVec3 norm = CVec3.Cross(D[1], D[0]); norm.Normalize(); // compute angles for (j = 0; j < 3; j++) { D[j].Normalize(); } float[] angle = new float[] { (float)Math.Acos(-CVec3.Dot(D[0], D[2])), (float)Math.Acos(-CVec3.Dot(D[0], D[1])), (float)Math.Acos(-CVec3.Dot(D[1], D[2])) }; // add normals for triangle verts for (j = 0; j < 3; j++) { N[j].VectorMA(angle[j], norm); } for (j = 0; j < 3; j++) { int idx = (int)Indices[i * 3 + j]; Verts[idx] = V[j]; tmpNorm[Share.WedgeToVert[idx]] = N[j]; } } // TODO: add "hard angle threshold" - do not share vertex between faces when angle between them // is too large. // normalize shared normals ... for (i = 0; i < Share.Points.Count; i++) { tmpNorm[i].Normalize(); } // ... then place ("unshare") normals to Verts for (i = 0; i < NumVerts; i++) { Verts[i].Normal.Pack(tmpNorm[Share.WedgeToVert[i]]); } HasNormals = true; }
public static void ExportMesh(BinaryWriter writer, CSkeletalMesh Mesh, CSkelMeshLod Lod) { VChunkHeader BoneHdr, InfHdr; int i, j; CVertexShare Share = new CVertexShare(); // weld vertices // The code below differs from similar code for StaticMesh export: it relies on vertex weight // information to not perform occasional welding of vertices which has the same position and // normal, but belongs to different bones. Share.Prepare(Lod.Verts, Lod.NumVerts, 48); for (i = 0; i < Lod.NumVerts; i++) { CSkelMeshVertex S = Lod.Verts[i]; // Here we relies on high possibility that vertices which should be shared between // triangles will have the same order of weights and bones (because most likely // these vertices were duplicated by copying). Doing more complicated comparison // will reduce performance with possibly reducing size of exported mesh by a few // more vertices. uint WeightsHash = S.PackedWeights; for (j = 0; j < S.Bone.Length; j++) { WeightsHash ^= (uint)(S.Bone[j] << j); } Share.AddVertex(S.Position, S.Normal, WeightsHash); } ExportCommonMeshData(writer, Lod.Sections, Lod.Verts, Lod.Indices, Share); int numBones = Mesh.RefSkeleton.Length; BoneHdr = new VChunkHeader { DataCount = numBones, DataSize = 120 }; SAVE_CHUNK(writer, BoneHdr, "REFSKELT"); for (i = 0; i < numBones; i++) { VBone B = new VBone { Name = new byte[64] }; CSkelMeshBone S = Mesh.RefSkeleton[i]; Extensions.StrCpy(B.Name, S.Name); // count NumChildren int NumChildren = 0; for (j = 0; j < numBones; j++) { if ((j != i) && (Mesh.RefSkeleton[j].ParentIndex == i)) { NumChildren++; } } B.NumChildren = NumChildren; B.ParentIndex = S.ParentIndex; B.BonePos.Position = S.Position; B.BonePos.Orientation = S.Orientation; B.BonePos.Orientation.Y *= -1; B.BonePos.Orientation.W *= -1; B.BonePos.Position.Y *= -1; B.Write(writer); } // count influences int NumInfluences = 0; for (i = 0; i < Share.Points.Count; i++) { int WedgeIndex = Share.VertToWedge[i]; CSkelMeshVertex V = Lod.Verts[WedgeIndex]; for (j = 0; j < 4; j++) { if (V.Bone[j] < 0) { break; } NumInfluences++; } } // write influences InfHdr = new VChunkHeader { DataCount = NumInfluences, DataSize = 12 }; SAVE_CHUNK(writer, InfHdr, "RAWWEIGHTS"); for (i = 0; i < Share.Points.Count; i++) { int WedgeIndex = Share.VertToWedge[i]; CSkelMeshVertex V = Lod.Verts[WedgeIndex]; CVec4 UnpackedWeights = V.UnpackWeights(); for (j = 0; j < 4; j++) { if (V.Bone[j] < 0) { break; } NumInfluences--; // just for verification VRawBoneInfluence I; I.Weight = UnpackedWeights.v[j]; I.BoneIndex = V.Bone[j]; I.PointIndex = i; I.Write(writer); } } if (NumInfluences != 0) { throw new FileLoadException("Did not write to all influences"); } ExportExtraUV(writer, Lod.ExtraUV, Lod.NumVerts, Lod.NumTexCoords); }
private void ExportCommonMeshData(FCustomArchiveWriter writer, CMeshSection[] sections, CMeshVertex[] verts, FRawStaticIndexBuffer indices, CVertexShare share, List <MaterialExporter>?materialExports) { var mainHdr = new VChunkHeader(); var ptsHdr = new VChunkHeader(); var wedgHdr = new VChunkHeader(); var facesHdr = new VChunkHeader(); var matrHdr = new VChunkHeader(); mainHdr.TypeFlag = _PSK_VERSION; writer.SerializeChunkHeader(mainHdr, "ACTRHEAD"); var numPoints = share.Points.Count; ptsHdr.DataCount = numPoints; ptsHdr.DataSize = 12; writer.SerializeChunkHeader(ptsHdr, "PNTS0000"); for (var i = 0; i < numPoints; i++) { var point = share.Points[i]; point.Y = -point.Y; // MIRROR_MESH point.Serialize(writer); } var numFaces = 0; var numVerts = verts.Length; var numSections = sections.Length; var wedgeMat = new int[numVerts]; for (var i = 0; i < numSections; i++) { var faces = sections[i].NumFaces; numFaces += faces; for (var j = 0; j < faces * 3; j++) { wedgeMat[indices[j + sections[i].FirstIndex]] = i; } } wedgHdr.DataCount = numVerts; wedgHdr.DataSize = 16; writer.SerializeChunkHeader(wedgHdr, "VTXW0000"); for (var i = 0; i < numVerts; i++) { writer.Write(share.WedgeToVert[i]); writer.Write((int)verts[i].UV.U); // the 4 bit int value is the actual needed float value writer.Write((int)verts[i].UV.V); // the 4 bit int value is the actual needed float value writer.Write((byte)wedgeMat[i]); writer.Write((byte)0); writer.Write((short)0); } facesHdr.DataCount = numFaces; if (numVerts <= 65536) { facesHdr.DataSize = 12; writer.SerializeChunkHeader(facesHdr, "FACE0000"); for (var i = 0; i < numSections; i++) { for (var j = 0; j < sections[i].NumFaces; j++) { var wedgeIndex = new ushort[3]; for (var k = 0; k < wedgeIndex.Length; k++) { wedgeIndex[k] = (ushort)indices[sections[i].FirstIndex + j * 3 + k]; } writer.Write(wedgeIndex[1]); // MIRROR_MESH writer.Write(wedgeIndex[0]); // MIRROR_MESH writer.Write(wedgeIndex[2]); writer.Write((byte)i); writer.Write((byte)0); writer.Write((uint)1); } } } else { facesHdr.DataSize = 18; writer.SerializeChunkHeader(facesHdr, "FACE3200"); for (var i = 0; i < numSections; i++) { for (var j = 0; j < sections[i].NumFaces; j++) { var wedgeIndex = new int[3]; for (var k = 0; k < wedgeIndex.Length; k++) { wedgeIndex[k] = indices[sections[i].FirstIndex + j * 3 + k]; } writer.Write(wedgeIndex[1]); // MIRROR_MESH writer.Write(wedgeIndex[0]); // MIRROR_MESH writer.Write(wedgeIndex[2]); writer.Write((byte)i); writer.Write((byte)0); writer.Write((uint)1); } } } matrHdr.DataCount = numSections; matrHdr.DataSize = 88; writer.SerializeChunkHeader(matrHdr, "MATT0000"); for (var i = 0; i < numSections; i++) { string materialName; if (sections[i].Material?.Value is { } tex) { materialName = tex.Name; materialExports?.Add(new MaterialExporter(tex, true)); }
private void ExportSkeletalMeshLod(CSkelMeshLod lod, CSkelMeshBone[] bones, FCustomArchiveWriter writer, List <MaterialExporter>?materialExports) { var share = new CVertexShare(); var boneHdr = new VChunkHeader(); var infHdr = new VChunkHeader(); share.Prepare(lod.Verts); foreach (var vert in lod.Verts) { var weightsHash = vert.PackedWeights; for (var i = 0; i < vert.Bone.Length; i++) { weightsHash ^= (uint)vert.Bone[i] << i; } share.AddVertex(vert.Position, vert.Normal, weightsHash); } ExportCommonMeshData(writer, lod.Sections.Value, lod.Verts, lod.Indices.Value, share, materialExports); var numBones = bones.Length; boneHdr.DataCount = numBones; boneHdr.DataSize = 120; writer.SerializeChunkHeader(boneHdr, "REFSKELT"); for (var i = 0; i < numBones; i++) { var numChildren = 0; for (var j = 0; j < numBones; j++) { if (j != i && bones[j].ParentIndex == i) { numChildren++; } } var bone = new VBone { Name = bones[i].Name.Text, NumChildren = numChildren, ParentIndex = bones[i].ParentIndex, BonePos = new VJointPosPsk { Position = bones[i].Position, Orientation = bones[i].Orientation } }; // MIRROR_MESH bone.BonePos.Orientation.Y *= -1; bone.BonePos.Orientation.W *= -1; bone.BonePos.Position.Y *= -1; bone.Serialize(writer); } var numInfluences = 0; for (var i = 0; i < share.Points.Count; i++) { for (var j = 0; j < 4; j++) { if (lod.Verts[share.VertToWedge.Value[i]].Bone[j] < 0) { break; } numInfluences++; } } infHdr.DataCount = numInfluences; infHdr.DataSize = 12; writer.SerializeChunkHeader(infHdr, "RAWWEIGHTS"); for (var i = 0; i < share.Points.Count; i++) { var v = lod.Verts[share.VertToWedge.Value[i]]; var unpackedWeights = v.UnpackWeights(); for (var j = 0; j < 4; j++) { if (v.Bone[j] < 0) { break; } writer.Write(unpackedWeights[j]); writer.Write(i); writer.Write((int)v.Bone[j]); } } ExportVertexColors(writer, lod.VertexColors, lod.NumVerts); ExportExtraUV(writer, lod.ExtraUV.Value, lod.NumVerts, lod.NumTexCoords); }