/// <summary> /// Merge any positions in the <see cref="PositionCollection"/> of the /// specified mesh that are at a distance less than the specified tolerance /// from each other. /// </summary> /// <param name="mesh">Mesh to be processed.</param> /// <param name="tolerance">Tolerance value that determines how close /// positions must be to each other to be merged.</param> /// <remarks> /// This method will also update the <see cref="VertexContent.PositionIndices"/> /// in the <see cref="GeometryContent"/> of the specified mesh. /// </remarks> public static void MergeDuplicatePositions(MeshContent mesh, float tolerance) { if (mesh == null) { throw new ArgumentNullException("mesh"); } // TODO Improve performance with spatial partitioning scheme var indexLists = new List <IndexUpdateList>(); foreach (var geom in mesh.Geometry) { var list = new IndexUpdateList(geom.Vertices.PositionIndices); indexLists.Add(list); } for (var i = mesh.Positions.Count - 1; i >= 1; i--) { var pi = mesh.Positions[i]; for (var j = i - 1; j >= 0; j--) { var pj = mesh.Positions[j]; if (Vector3.Distance(pi, pj) <= tolerance) { foreach (var list in indexLists) { list.Update(i, j); } mesh.Positions.RemoveAt(i); } } } }
/// <summary> /// Merge vertices with the same <see cref="VertexContent.PositionIndices"/> and /// <see cref="VertexChannel"/> data within the specified /// <see cref="GeometryContent"/>. /// </summary> /// <param name="geometry">Geometry to be processed.</param> public static void MergeDuplicateVertices(GeometryContent geometry) { if (geometry == null) { throw new ArgumentNullException("geometry"); } var verts = geometry.Vertices; var hashMap = new Dictionary <int, List <VertexData> >(); var indices = new IndexUpdateList(geometry.Indices); var vIndex = 0; for (var i = 0; i < geometry.Indices.Count; i++) { var iIndex = geometry.Indices[i]; var iData = new VertexData { Index = iIndex, PositionIndex = verts.PositionIndices[vIndex], ChannelData = new object[verts.Channels.Count] }; for (var channel = 0; channel < verts.Channels.Count; channel++) { iData.ChannelData[channel] = verts.Channels[channel][vIndex]; } var hash = iData.ComputeHash(); var merged = false; List <VertexData> candidates; if (hashMap.TryGetValue(hash, out candidates)) { for (var candidateIndex = 0; candidateIndex < candidates.Count; candidateIndex++) { var c = candidates[candidateIndex]; if (!iData.ContentEquals(c)) { continue; } // Match! Update the corresponding indices and remove the vertex indices.Update(iIndex, c.Index); verts.RemoveAt(vIndex); merged = true; } if (!merged) { candidates.Add(iData); } } else { // no vertices with the same hash yet, create a new list for the data hashMap.Add(hash, new List <VertexData> { iData }); } if (!merged) { vIndex++; } } // update the indices because of the vertices we removed indices.Pack(); }