/// <summary> /// Internal method evaluates all collapse costs from this vertex and picks the lowest for a single buffer /// </summary> float ComputeEdgeCostAtVertexForBuffer(PMWorkingData workingData, uint vertIndex) { // compute the edge collapse cost for all edges that start // from vertex v. Since we are only interested in reducing // the object by selecting the min cost edge at each step, we // only cache the cost of the least cost edge at this vertex // (in member variable collapse) as well as the value of the // cost (in member variable objdist). PMVertex v = workingData.vertList[(int)vertIndex]; if (v.neighbors.Count == 0) { // v doesn't have neighbors so nothing to collapse v.NotifyRemoved(); return(v.collapseCost); } // Init metrics v.collapseCost = float.MaxValue; v.collapseTo = null; // search all neighboring edges for "least cost" edge foreach (PMVertex neighbor in v.neighbors) { float cost = ComputeEdgeCollapseCost(v, neighbor); if ((v.collapseTo == null) || cost < v.collapseCost) { v.collapseTo = neighbor; // candidate for edge collapse v.collapseCost = cost; // cost of the collapse } } return(v.collapseCost); }
internal void AddIfNonNeighbor(PMVertex n) { if (this.neighbors.Contains(n)) { return; // Already in neighbor list } this.neighbors.Add(n); }
// is edge this->src a manifold edge? internal bool IsManifoldEdgeWith(PMVertex v) { // Check the sides involving both these verts // If there is only 1 this is a manifold edge ushort sidesCount = 0; foreach (var face in this.faces) { if (face.HasCommonVertex(v)) { sidesCount++; } } return(sidesCount == 1); }
internal PMFaceVertex GetFaceVertexFromCommon(PMVertex commonVert) { if (this.vertex[0].commonVertex == commonVert) { return(this.vertex[0]); } if (this.vertex[1].commonVertex == commonVert) { return(this.vertex[1]); } if (this.vertex[2].commonVertex == commonVert) { return(this.vertex[2]); } return(null); }
/// Internal method for initialising the edge collapse costs void InitialiseEdgeCollapseCosts() { worstCosts = new float[vertexData.vertexCount]; foreach (PMWorkingData data in workingDataList) { for (int i = 0; i < data.vertList.Length; ++i) { // Typically, at the end, there are a bunch of null entries to represent vertices // that were common. Make sure we have a vertex here if (data.vertList[i] == null) { data.vertList[i] = new PMVertex(); } PMVertex vertex = data.vertList[i]; vertex.collapseTo = null; vertex.collapseCost = float.MaxValue; } } }
internal void RemoveIfNonNeighbor(PMVertex n) { // removes n from neighbor list if n isn't a neighbor. if (!neighbors.Contains(n)) { return; // Not in neighbor list anyway } foreach (PMTriangle face in faces) { if (face.HasCommonVertex(n)) { return; // Still a neighbor } } neighbors.Remove(n); if (neighbors.Count == 0 && !toBeRemoved) { // This vertex has been removed through isolation (collapsing around it) this.NotifyRemoved(); } }
/// <summary> /// Internal method for building PMWorkingData from geometry data /// </summary> void AddWorkingData(VertexData vertexData, IndexData indexData) { // Insert blank working data, then fill PMWorkingData work = new PMWorkingData(); this.workingDataList.Add(work); // Build vertex list // Resize face list (this will always be this big) work.faceVertList = new PMFaceVertex[vertexData.vertexCount]; // Also resize common vert list to max, to avoid reallocations work.vertList = new PMVertex[vertexData.vertexCount]; // locate position element & the buffer to go with it VertexElement posElem = vertexData.vertexDeclaration.FindElementBySemantic(VertexElementSemantic.Position); HardwareVertexBuffer vbuf = vertexData.vertexBufferBinding.GetBuffer(posElem.Source); // lock the buffer for reading IntPtr bufPtr = vbuf.Lock(BufferLocking.ReadOnly); uint numCommon = 0; unsafe { byte *pVertex = (byte *)bufPtr.ToPointer(); float * pFloat; Vector3 pos; // Map for identifying duplicate position vertices Dictionary <Vector3, uint> commonVertexMap = new Dictionary <Vector3, uint>(); for (uint i = 0; i < vertexData.vertexCount; ++i, pVertex += vbuf.VertexSize) { pFloat = (float *)(pVertex + posElem.Offset); pos.x = *pFloat++; pos.y = *pFloat++; pos.z = *pFloat++; work.faceVertList[(int)i] = new PMFaceVertex(); // Try to find this position in the existing map if (!commonVertexMap.ContainsKey(pos)) { // Doesn't exist, so create it PMVertex commonVert = new PMVertex(); commonVert.SetDetails(pos, numCommon); commonVert.removed = false; commonVert.toBeRemoved = false; commonVert.seam = false; // Add it to our working set work.vertList[(int)numCommon] = commonVert; // Enter it in the map commonVertexMap.Add(pos, numCommon); // Increment common index ++numCommon; work.faceVertList[(int)i].commonVertex = commonVert; work.faceVertList[(int)i].realIndex = i; } else { // Exists already, reference it PMVertex existingVert = work.vertList[(int)commonVertexMap[pos]]; work.faceVertList[(int)i].commonVertex = existingVert; work.faceVertList[(int)i].realIndex = i; // Also tag original as a seam since duplicates at this location work.faceVertList[(int)i].commonVertex.seam = true; } } } vbuf.Unlock(); numCommonVertices = numCommon; // Build tri list uint numTris = (uint)indexData.indexCount / 3; HardwareIndexBuffer ibuf = indexData.indexBuffer; bool use32bitindexes = (ibuf.Type == IndexType.Size32); IntPtr indexBufferPtr = ibuf.Lock(BufferLocking.ReadOnly); unsafe { ushort *pShort = null; uint * pInt = null; if (use32bitindexes) { pInt = (uint *)indexBufferPtr.ToPointer(); } else { pShort = (ushort *)indexBufferPtr.ToPointer(); } work.triList = new PMTriangle[(int)numTris]; // assumed tri list for (uint i = 0; i < numTris; ++i) { // use 32-bit index always since we're not storing uint vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v0 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v1 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v2 = work.faceVertList[(int)vindex]; work.triList[(int)i] = new PMTriangle(); work.triList[(int)i].SetDetails(i, v0, v1, v2); work.triList[(int)i].removed = false; } } ibuf.Unlock(); }
/// Internal calculation method for deriving a collapse cost from u to v float ComputeEdgeCollapseCost(PMVertex src, PMVertex dest) { // if we collapse edge uv by moving src to dest then how // much different will the model change, i.e. how much "error". // The method of determining cost was designed in order // to exploit small and coplanar regions for // effective polygon reduction. Vector3 edgeVector = src.position - dest.position; float cost; float curvature = 0.001f; // find the "sides" triangles that are on the edge uv List<PMTriangle> sides = new List<PMTriangle>(); // Iterate over src's faces and find 'sides' of the shared edge which is being collapsed foreach (PMTriangle srcFace in src.faces) { // Check if this tri also has dest in it (shared edge) if (srcFace.HasCommonVertex(dest)) sides.Add(srcFace); } // Special cases // If we're looking at a border vertex if (src.IsBorder) { if (sides.Count > 1) { // src is on a border, but the src-dest edge has more than one tri on it // So it must be collapsing inwards // Mark as very high-value cost // curvature = 1.0f; cost = 1.0f; } else { // Collapsing ALONG a border // We can't use curvature to measure the effect on the model // Instead, see what effect it has on 'pulling' the other border edges // The more colinear, the less effect it will have // So measure the 'kinkiness' (for want of a better term) // Normally there can be at most 1 other border edge attached to this // However in weird cases there may be more, so find the worst Vector3 collapseEdge, otherBorderEdge; float kinkiness, maxKinkiness; maxKinkiness = 0.0f; edgeVector.Normalize(); collapseEdge = edgeVector; foreach (PMVertex neighbor in src.neighbors) { if (neighbor != dest && neighbor.IsManifoldEdgeWith(src)) { otherBorderEdge = src.position - neighbor.position; otherBorderEdge.Normalize(); // This time, the nearer the dot is to -1, the better, because that means // the edges are opposite each other, therefore less kinkiness // Scale into [0..1] kinkiness = (otherBorderEdge.Dot(collapseEdge) + 1.002f) * 0.5f; maxKinkiness = (float)Math.Max(kinkiness, maxKinkiness); } } cost = maxKinkiness; } } else // not a border { // Standard inner vertex // Calculate curvature // use the triangle facing most away from the sides // to determine our curvature term // Iterate over src's faces again foreach (PMTriangle srcFace in src.faces) { float mincurv = 1.0f; // curve for face i and closer side to it // Iterate over the sides foreach (PMTriangle sideFace in sides) { // Dot product of face normal gives a good delta angle float dotprod = srcFace.normal.Dot(sideFace.normal); // NB we do (1-..) to invert curvature where 1 is high curvature [0..1] // Whilst dot product is high when angle difference is low mincurv = (float)Math.Min(mincurv, (1.002f - dotprod) * 0.5f); } curvature = (float)Math.Max(curvature, mincurv); } cost = curvature; } // check for texture seam ripping if (src.seam && !dest.seam) cost = 1.0f; // Check for singular triangle destruction // If src and dest both only have 1 triangle (and it must be a shared one) // then this would destroy the shape, so don't do this if (src.faces.Count == 1 && dest.faces.Count == 1) cost = float.MaxValue; // Degenerate case check // Are we going to invert a face normal of one of the neighbouring faces? // Can occur when we have a very small remaining edge and collapse crosses it // Look for a face normal changing by > 90 degrees foreach (PMTriangle srcFace in src.faces) { // Ignore the deleted faces (those including src & dest) if (!srcFace.HasCommonVertex(dest)) { // Test the new face normal PMVertex v0, v1, v2; // Replace src with dest wherever it is v0 = (srcFace.vertex[0].commonVertex == src) ? dest : srcFace.vertex[0].commonVertex; v1 = (srcFace.vertex[1].commonVertex == src) ? dest : srcFace.vertex[1].commonVertex; v2 = (srcFace.vertex[2].commonVertex == src) ? dest : srcFace.vertex[2].commonVertex; // Cross-product 2 edges Vector3 e1 = v1.position - v0.position; Vector3 e2 = v2.position - v1.position; Vector3 newNormal = e1.Cross(e2); newNormal.Normalize(); // Dot old and new face normal // If < 0 then more than 90 degree difference if (newNormal.Dot(srcFace.normal) < 0.0f) { // Don't do it! cost = float.MaxValue; break; // No point continuing } } } Debug.Assert(cost >= 0); return cost; }
internal void RemoveIfNonNeighbor(PMVertex n) { // removes n from neighbor list if n isn't a neighbor. if (!neighbors.Contains(n)) return; // Not in neighbor list anyway foreach (PMTriangle face in faces) { if (face.HasCommonVertex(n)) return; // Still a neighbor } neighbors.Remove(n); if (neighbors.Count == 0 && !toBeRemoved) { // This vertex has been removed through isolation (collapsing around it) this.NotifyRemoved(); } }
internal bool HasCommonVertex(PMVertex v) { return(v == this.vertex[0].commonVertex || v == this.vertex[1].commonVertex || v == this.vertex[2].commonVertex); }
// is edge this->src a manifold edge? internal bool IsManifoldEdgeWith(PMVertex v) { // Check the sides involving both these verts // If there is only 1 this is a manifold edge ushort sidesCount = 0; foreach (PMTriangle face in faces) { if (face.HasCommonVertex(v)) sidesCount++; } return (sidesCount == 1); }
internal void NotifyRemoved() { foreach (PMVertex vertex in neighbors) { // Remove me from neighbor vertex.neighbors.Remove(this); } removed = true; this.collapseTo = null; this.collapseCost = float.MaxValue; }
/// <summary> /// Builds the progressive mesh with the specified number of levels. /// </summary> public void Build(ushort numLevels, List <IndexData> lodFaceList, VertexReductionQuota quota, float reductionValue) { ComputeAllCosts(); // Init currNumIndexes = (uint)indexData.indexCount; // Use COMMON vert count, not original vert count // Since collapsing 1 common vert position is equivalent to collapsing them all uint numVerts = numCommonVertices; uint numCollapses = 0; bool abandon = false; while (numLevels-- != 0) { // NB if 'abandon' is set, we stop reducing // However, we still bake the number of LODs requested, even if it // means they are the same if (!abandon) { if (quota == VertexReductionQuota.Proportional) { numCollapses = (uint)(numVerts * reductionValue); } else { numCollapses = (uint)reductionValue; } // Minimum 3 verts! if ((numVerts - numCollapses) < 3) { numCollapses = numVerts - 3; } // Store new number of verts numVerts = numVerts - numCollapses; Debug.Assert(numVerts >= 3); while (numCollapses-- != 0 && !abandon) { int nextIndex = GetNextCollapser(); // Collapse on every buffer foreach (PMWorkingData data in workingDataList) { PMVertex collapser = data.vertList[nextIndex]; // This will reduce currNumIndexes and recalc costs as required if (collapser.collapseTo == null) { // Must have run out of valid collapsables abandon = true; break; } Debug.Assert(collapser.collapseTo.removed == false); Collapse(collapser); } } } // Bake a new LOD and add it to the list IndexData newLod = new IndexData(); BakeNewLOD(newLod); lodFaceList.Add(newLod); } }
internal void AddIfNonNeighbor(PMVertex n) { if (neighbors.Contains(n)) return; // Already in neighbor list neighbors.Add(n); }
/// Internal calculation method for deriving a collapse cost from u to v private float ComputeEdgeCollapseCost(PMVertex src, PMVertex dest) { // if we collapse edge uv by moving src to dest then how // much different will the model change, i.e. how much "error". // The method of determining cost was designed in order // to exploit small and coplanar regions for // effective polygon reduction. var edgeVector = src.position - dest.position; float cost; var curvature = 0.001f; // find the "sides" triangles that are on the edge uv var sides = new List <PMTriangle>(); // Iterate over src's faces and find 'sides' of the shared edge which is being collapsed foreach (var srcFace in src.faces) { // Check if this tri also has dest in it (shared edge) if (srcFace.HasCommonVertex(dest)) { sides.Add(srcFace); } } // Special cases // If we're looking at a border vertex if (src.IsBorder) { if (sides.Count > 1) { // src is on a border, but the src-dest edge has more than one tri on it // So it must be collapsing inwards // Mark as very high-value cost // curvature = 1.0f; cost = 1.0f; } else { // Collapsing ALONG a border // We can't use curvature to measure the effect on the model // Instead, see what effect it has on 'pulling' the other border edges // The more colinear, the less effect it will have // So measure the 'kinkiness' (for want of a better term) // Normally there can be at most 1 other border edge attached to this // However in weird cases there may be more, so find the worst Vector3 collapseEdge, otherBorderEdge; float kinkiness, maxKinkiness; maxKinkiness = 0.0f; edgeVector.Normalize(); collapseEdge = edgeVector; foreach (var neighbor in src.neighbors) { if (neighbor != dest && neighbor.IsManifoldEdgeWith(src)) { otherBorderEdge = src.position - neighbor.position; otherBorderEdge.Normalize(); // This time, the nearer the dot is to -1, the better, because that means // the edges are opposite each other, therefore less kinkiness // Scale into [0..1] kinkiness = (otherBorderEdge.Dot(collapseEdge) + 1.002f) * 0.5f; maxKinkiness = Utility.Max(kinkiness, maxKinkiness); } } cost = maxKinkiness; } } else // not a border { // Standard inner vertex // Calculate curvature // use the triangle facing most away from the sides // to determine our curvature term // Iterate over src's faces again foreach (var srcFace in src.faces) { var mincurv = 1.0f; // curve for face i and closer side to it // Iterate over the sides foreach (var sideFace in sides) { // Dot product of face normal gives a good delta angle float dotprod = srcFace.normal.Dot(sideFace.normal); // NB we do (1-..) to invert curvature where 1 is high curvature [0..1] // Whilst dot product is high when angle difference is low mincurv = Utility.Min(mincurv, (1.002f - dotprod) * 0.5f); } curvature = Utility.Max(curvature, mincurv); } cost = curvature; } // check for texture seam ripping if (src.seam && !dest.seam) { cost = 1.0f; } // Check for singular triangle destruction // If src and dest both only have 1 triangle (and it must be a shared one) // then this would destroy the shape, so don't do this if (src.faces.Count == 1 && dest.faces.Count == 1) { cost = float.MaxValue; } // Degenerate case check // Are we going to invert a face normal of one of the neighbouring faces? // Can occur when we have a very small remaining edge and collapse crosses it // Look for a face normal changing by > 90 degrees foreach (var srcFace in src.faces) { // Ignore the deleted faces (those including src & dest) if (!srcFace.HasCommonVertex(dest)) { // Test the new face normal PMVertex v0, v1, v2; // Replace src with dest wherever it is v0 = (srcFace.vertex[0].commonVertex == src) ? dest : srcFace.vertex[0].commonVertex; v1 = (srcFace.vertex[1].commonVertex == src) ? dest : srcFace.vertex[1].commonVertex; v2 = (srcFace.vertex[2].commonVertex == src) ? dest : srcFace.vertex[2].commonVertex; // Cross-product 2 edges var e1 = v1.position - v0.position; var e2 = v2.position - v1.position; var newNormal = e1.Cross(e2); newNormal.Normalize(); // Dot old and new face normal // If < 0 then more than 90 degree difference if (newNormal.Dot(srcFace.normal) < 0.0f) { // Don't do it! cost = float.MaxValue; break; // No point continuing } } } Debug.Assert(cost >= 0); return(cost); }
internal PMFaceVertex GetFaceVertexFromCommon( PMVertex commonVert ) { if ( this.vertex[ 0 ].commonVertex == commonVert ) { return this.vertex[ 0 ]; } if ( this.vertex[ 1 ].commonVertex == commonVert ) { return this.vertex[ 1 ]; } if ( this.vertex[ 2 ].commonVertex == commonVert ) { return this.vertex[ 2 ]; } return null; }
/// <summary> /// Internal method, collapses vertex onto it's saved collapse target. /// </summary> /// <remarks> /// This updates the working triangle list to drop a triangle and recalculates /// the edge collapse costs around the collapse target. /// This also updates all the working vertex lists for the relevant buffer. /// </remarks> /// <pram name="src">the collapser</pram> void Collapse(PMVertex src) { PMVertex dest = src.collapseTo; List<PMVertex> recomputeSet = new List<PMVertex>(); // Abort if we're never supposed to collapse if (src.collapseCost == float.MaxValue) return; // Remove this vertex from the running for the next check src.collapseTo = null; src.collapseCost = float.MaxValue; worstCosts[(int)src.index] = float.MaxValue; // Collapse the edge uv by moving vertex u onto v // Actually remove tris on uv, then update tris that // have u to have v, and then remove u. if (dest == null) { // src is a vertex all by itself return; } // Add dest and all the neighbours of source and dest to recompute list recomputeSet.Add(dest); // PMVertex temp; (unused) foreach (PMVertex neighbor in src.neighbors) { if (!recomputeSet.Contains(neighbor)) recomputeSet.Add(neighbor); } foreach (PMVertex neighbor in dest.neighbors) { if (!recomputeSet.Contains(neighbor)) recomputeSet.Add(neighbor); } // delete triangles on edge src-dest // Notify others to replace src with dest // Queue of faces for removal / replacement // prevents us screwing up the iterators while we parse List<PMTriangle> faceRemovalList = new List<PMTriangle>(); List<PMTriangle> faceReplacementList = new List<PMTriangle>(); foreach (PMTriangle face in src.faces) { if (face.HasCommonVertex(dest)) { // Tri is on src-dest therefore is gone faceRemovalList.Add(face); // Reduce index count by 3 (useful for quick allocation later) currNumIndexes -= 3; } else { // Only src involved, replace with dest faceReplacementList.Add(face); } } src.toBeRemoved = true; // Replace all the faces queued for replacement foreach (PMTriangle face in faceReplacementList) { /* Locate the face vertex which corresponds with the common 'dest' vertex To to this, find a removed face which has the FACE vertex corresponding with src, and use it's FACE vertex version of dest. */ PMFaceVertex srcFaceVert = face.GetFaceVertexFromCommon(src); PMFaceVertex destFaceVert = null; foreach (PMTriangle removed in faceRemovalList) { //if (removed.HasFaceVertex(srcFaceVert)) //{ destFaceVert = removed.GetFaceVertexFromCommon(dest); //} } Debug.Assert(destFaceVert != null); face.ReplaceVertex(srcFaceVert, destFaceVert); } // Remove all the faces queued for removal foreach (PMTriangle face in faceRemovalList) { face.NotifyRemoved(); } // Notify the vertex that it is gone src.NotifyRemoved(); // recompute costs foreach (PMVertex recomp in recomputeSet) { ComputeEdgeCostAtVertex(recomp.index); } }
/// <summary> /// Internal method for building PMWorkingData from geometry data /// </summary> void AddWorkingData(VertexData vertexData, IndexData indexData) { // Insert blank working data, then fill PMWorkingData work = new PMWorkingData(); this.workingDataList.Add(work); // Build vertex list // Resize face list (this will always be this big) work.faceVertList = new PMFaceVertex[vertexData.vertexCount]; // Also resize common vert list to max, to avoid reallocations work.vertList = new PMVertex[vertexData.vertexCount]; // locate position element & the buffer to go with it VertexElement posElem = vertexData.vertexDeclaration.FindElementBySemantic(VertexElementSemantic.Position); HardwareVertexBuffer vbuf = vertexData.vertexBufferBinding.GetBuffer(posElem.Source); // lock the buffer for reading IntPtr bufPtr = vbuf.Lock(BufferLocking.ReadOnly); uint numCommon = 0; unsafe { byte *pVertex = (byte *)bufPtr.ToPointer(); float* pFloat; Vector3 pos; // Map for identifying duplicate position vertices Dictionary<Vector3, uint> commonVertexMap = new Dictionary<Vector3,uint>(); for (uint i = 0; i < vertexData.vertexCount; ++i, pVertex += vbuf.VertexSize) { pFloat = (float *)(pVertex + posElem.Offset); pos.x = *pFloat++; pos.y = *pFloat++; pos.z = *pFloat++; work.faceVertList[(int)i] = new PMFaceVertex(); // Try to find this position in the existing map if (!commonVertexMap.ContainsKey(pos)) { // Doesn't exist, so create it PMVertex commonVert = new PMVertex(); commonVert.SetDetails(pos, numCommon); commonVert.removed = false; commonVert.toBeRemoved = false; commonVert.seam = false; // Add it to our working set work.vertList[(int)numCommon] = commonVert; // Enter it in the map commonVertexMap.Add(pos, numCommon); // Increment common index ++numCommon; work.faceVertList[(int)i].commonVertex = commonVert; work.faceVertList[(int)i].realIndex = i; } else { // Exists already, reference it PMVertex existingVert = work.vertList[(int)commonVertexMap[pos]]; work.faceVertList[(int)i].commonVertex = existingVert; work.faceVertList[(int)i].realIndex = i; // Also tag original as a seam since duplicates at this location work.faceVertList[(int)i].commonVertex.seam = true; } } } vbuf.Unlock(); numCommonVertices = numCommon; // Build tri list uint numTris = (uint)indexData.indexCount / 3; HardwareIndexBuffer ibuf = indexData.indexBuffer; bool use32bitindexes = (ibuf.Type == IndexType.Size32); IntPtr indexBufferPtr = ibuf.Lock(BufferLocking.ReadOnly); unsafe { ushort* pShort = null; uint* pInt = null; if (use32bitindexes) { pInt = (uint *)indexBufferPtr.ToPointer(); } else { pShort = (ushort *)indexBufferPtr.ToPointer(); } work.triList = new PMTriangle[(int)numTris]; // assumed tri list for (uint i = 0; i < numTris; ++i) { // use 32-bit index always since we're not storing uint vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v0 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v1 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? *pInt++ : *pShort++; PMFaceVertex v2 = work.faceVertList[(int)vindex]; work.triList[(int)i] = new PMTriangle(); work.triList[(int)i].SetDetails(i, v0, v1, v2); work.triList[(int)i].removed = false; } } ibuf.Unlock(); }
/// <summary> /// Internal method for building PMWorkingData from geometry data /// </summary> private void AddWorkingData( VertexData vertexData, IndexData indexData ) { // Insert blank working data, then fill var work = new PMWorkingData(); this.workingDataList.Add( work ); // Build vertex list // Resize face list (this will always be this big) work.faceVertList = new PMFaceVertex[vertexData.vertexCount]; // Also resize common vert list to max, to avoid reallocations work.vertList = new PMVertex[vertexData.vertexCount]; // locate position element & the buffer to go with it var posElem = vertexData.vertexDeclaration.FindElementBySemantic( VertexElementSemantic.Position ); var vbuf = vertexData.vertexBufferBinding.GetBuffer( posElem.Source ); // lock the buffer for reading var bufPtr = vbuf.Lock( BufferLocking.ReadOnly ); uint numCommon = 0; #if !AXIOM_SAFE_ONLY unsafe #endif { var pVertex = bufPtr; Vector3 pos; // Map for identifying duplicate position vertices var commonVertexMap = new Dictionary<Vector3, uint>(); for ( uint i = 0; i < vertexData.vertexCount; ++i, pVertex += vbuf.VertexSize ) { var pFloat = ( pVertex + posElem.Offset ).ToFloatPointer(); pos.x = pFloat[ 0 ]; pos.y = pFloat[ 1 ]; pos.z = pFloat[ 2 ]; work.faceVertList[ (int)i ] = new PMFaceVertex(); // Try to find this position in the existing map if ( !commonVertexMap.ContainsKey( pos ) ) { // Doesn't exist, so create it var commonVert = new PMVertex(); commonVert.SetDetails( pos, numCommon ); commonVert.removed = false; commonVert.toBeRemoved = false; commonVert.seam = false; // Add it to our working set work.vertList[ (int)numCommon ] = commonVert; // Enter it in the map commonVertexMap.Add( pos, numCommon ); // Increment common index ++numCommon; work.faceVertList[ (int)i ].commonVertex = commonVert; work.faceVertList[ (int)i ].realIndex = i; } else { // Exists already, reference it var existingVert = work.vertList[ (int)commonVertexMap[ pos ] ]; work.faceVertList[ (int)i ].commonVertex = existingVert; work.faceVertList[ (int)i ].realIndex = i; // Also tag original as a seam since duplicates at this location work.faceVertList[ (int)i ].commonVertex.seam = true; } } } vbuf.Unlock(); this.numCommonVertices = numCommon; // Build tri list var numTris = (uint)indexData.indexCount/3; var ibuf = indexData.indexBuffer; var use32bitindexes = ( ibuf.Type == IndexType.Size32 ); var indexBufferPtr = ibuf.Lock( BufferLocking.ReadOnly ); #if !AXIOM_SAFE_ONLY unsafe #endif { var pInt = indexBufferPtr.ToUIntPointer(); var pShort = indexBufferPtr.ToUShortPointer(); var idx = 0; work.triList = new PMTriangle[(int)numTris]; // assumed tri list for ( uint i = 0; i < numTris; ++i ) { // use 32-bit index always since we're not storing var vindex = use32bitindexes ? pInt[ idx++ ] : pShort[ idx++ ]; var v0 = work.faceVertList[ (int)vindex ]; vindex = use32bitindexes ? pInt[ idx++ ] : pShort[ idx++ ]; var v1 = work.faceVertList[ (int)vindex ]; vindex = use32bitindexes ? pInt[ idx++ ] : pShort[ idx++ ]; var v2 = work.faceVertList[ (int)vindex ]; work.triList[ (int)i ] = new PMTriangle(); work.triList[ (int)i ].SetDetails( i, v0, v1, v2 ); work.triList[ (int)i ].removed = false; } } ibuf.Unlock(); }
/// <summary> /// Internal method for building PMWorkingData from geometry data /// </summary> private void AddWorkingData(VertexData vertexData, IndexData indexData) { // Insert blank working data, then fill var work = new PMWorkingData(); this.workingDataList.Add(work); // Build vertex list // Resize face list (this will always be this big) work.faceVertList = new PMFaceVertex[vertexData.vertexCount]; // Also resize common vert list to max, to avoid reallocations work.vertList = new PMVertex[vertexData.vertexCount]; // locate position element & the buffer to go with it var posElem = vertexData.vertexDeclaration.FindElementBySemantic(VertexElementSemantic.Position); var vbuf = vertexData.vertexBufferBinding.GetBuffer(posElem.Source); // lock the buffer for reading var bufPtr = vbuf.Lock(BufferLocking.ReadOnly); uint numCommon = 0; #if !AXIOM_SAFE_ONLY unsafe #endif { var pVertex = bufPtr; Vector3 pos; // Map for identifying duplicate position vertices var commonVertexMap = new Dictionary <Vector3, uint>(); for (uint i = 0; i < vertexData.vertexCount; ++i, pVertex += vbuf.VertexSize) { var pFloat = (pVertex + posElem.Offset).ToFloatPointer(); pos.x = pFloat[0]; pos.y = pFloat[1]; pos.z = pFloat[2]; work.faceVertList[(int)i] = new PMFaceVertex(); // Try to find this position in the existing map if (!commonVertexMap.ContainsKey(pos)) { // Doesn't exist, so create it var commonVert = new PMVertex(); commonVert.SetDetails(pos, numCommon); commonVert.removed = false; commonVert.toBeRemoved = false; commonVert.seam = false; // Add it to our working set work.vertList[(int)numCommon] = commonVert; // Enter it in the map commonVertexMap.Add(pos, numCommon); // Increment common index ++numCommon; work.faceVertList[(int)i].commonVertex = commonVert; work.faceVertList[(int)i].realIndex = i; } else { // Exists already, reference it var existingVert = work.vertList[(int)commonVertexMap[pos]]; work.faceVertList[(int)i].commonVertex = existingVert; work.faceVertList[(int)i].realIndex = i; // Also tag original as a seam since duplicates at this location work.faceVertList[(int)i].commonVertex.seam = true; } } } vbuf.Unlock(); this.numCommonVertices = numCommon; // Build tri list var numTris = (uint)indexData.indexCount / 3; var ibuf = indexData.indexBuffer; var use32bitindexes = (ibuf.Type == IndexType.Size32); var indexBufferPtr = ibuf.Lock(BufferLocking.ReadOnly); #if !AXIOM_SAFE_ONLY unsafe #endif { var pInt = indexBufferPtr.ToUIntPointer(); var pShort = indexBufferPtr.ToUShortPointer(); var idx = 0; work.triList = new PMTriangle[(int)numTris]; // assumed tri list for (uint i = 0; i < numTris; ++i) { // use 32-bit index always since we're not storing var vindex = use32bitindexes ? pInt[idx++] : pShort[idx++]; var v0 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? pInt[idx++] : pShort[idx++]; var v1 = work.faceVertList[(int)vindex]; vindex = use32bitindexes ? pInt[idx++] : pShort[idx++]; var v2 = work.faceVertList[(int)vindex]; work.triList[(int)i] = new PMTriangle(); work.triList[(int)i].SetDetails(i, v0, v1, v2); work.triList[(int)i].removed = false; } } ibuf.Unlock(); }
internal PMFaceVertex GetFaceVertexFromCommon(PMVertex commonVert) { if (vertex[0].commonVertex == commonVert) return vertex[0]; if (vertex[1].commonVertex == commonVert) return vertex[1]; if (vertex[2].commonVertex == commonVert) return vertex[2]; return null; }
/// <summary> /// Internal method, collapses vertex onto it's saved collapse target. /// </summary> /// <remarks> /// This updates the working triangle list to drop a triangle and recalculates /// the edge collapse costs around the collapse target. /// This also updates all the working vertex lists for the relevant buffer. /// </remarks> /// <pram name="src">the collapser</pram> private void Collapse(PMVertex src) { var dest = src.collapseTo; var recomputeSet = new List <PMVertex>(); // Abort if we're never supposed to collapse if (src.collapseCost == float.MaxValue) { return; } // Remove this vertex from the running for the next check src.collapseTo = null; src.collapseCost = float.MaxValue; this.worstCosts[(int)src.index] = float.MaxValue; // Collapse the edge uv by moving vertex u onto v // Actually remove tris on uv, then update tris that // have u to have v, and then remove u. if (dest == null) { // src is a vertex all by itself return; } // Add dest and all the neighbours of source and dest to recompute list recomputeSet.Add(dest); foreach (var neighbor in src.neighbors) { if (!recomputeSet.Contains(neighbor)) { recomputeSet.Add(neighbor); } } foreach (var neighbor in dest.neighbors) { if (!recomputeSet.Contains(neighbor)) { recomputeSet.Add(neighbor); } } // delete triangles on edge src-dest // Notify others to replace src with dest // Queue of faces for removal / replacement // prevents us screwing up the iterators while we parse var faceRemovalList = new List <PMTriangle>(); var faceReplacementList = new List <PMTriangle>(); foreach (var face in src.faces) { if (face.HasCommonVertex(dest)) { // Tri is on src-dest therefore is gone faceRemovalList.Add(face); // Reduce index count by 3 (useful for quick allocation later) this.currNumIndexes -= 3; } else { // Only src involved, replace with dest faceReplacementList.Add(face); } } src.toBeRemoved = true; // Replace all the faces queued for replacement foreach (var face in faceReplacementList) { /* Locate the face vertex which corresponds with the common 'dest' vertex * To to this, find a removed face which has the FACE vertex corresponding with * src, and use it's FACE vertex version of dest. */ var srcFaceVert = face.GetFaceVertexFromCommon(src); PMFaceVertex destFaceVert = null; foreach (var removed in faceRemovalList) { //if (removed.HasFaceVertex(srcFaceVert)) //{ destFaceVert = removed.GetFaceVertexFromCommon(dest); //} } Debug.Assert(destFaceVert != null); face.ReplaceVertex(srcFaceVert, destFaceVert); } // Remove all the faces queued for removal foreach (var face in faceRemovalList) { face.NotifyRemoved(); } // Notify the vertex that it is gone src.NotifyRemoved(); // recompute costs foreach (var recomp in recomputeSet) { ComputeEdgeCostAtVertex(recomp.index); } }
internal bool HasCommonVertex(PMVertex v) { return (v == vertex[0].commonVertex || v == vertex[1].commonVertex || v == vertex[2].commonVertex); }