internal static void CheckAllocated(HashedVertices otherHashedVertices) { if (otherHashedVertices.IsCreated) { throw new ArgumentException($"Value {otherHashedVertices} is not allocated."); } }
public HashedVertices(HashedVertices otherHashedVertices, Allocator allocator = Allocator.Persistent) : this((otherHashedVertices.m_Vertices != null) ? otherHashedVertices.m_Vertices->Length : 1, (otherHashedVertices.m_ChainedIndices != null) ? otherHashedVertices.m_ChainedIndices->Length : 1, allocator, 2) { CheckAllocated(otherHashedVertices); m_ChainedIndices->AddRangeNoResize(*otherHashedVertices.m_ChainedIndices); m_Vertices->AddRangeNoResize(*otherHashedVertices.m_Vertices); }
static float3 FindPolygonCentroid([NoAlias] HashedVertices vertices, [NoAlias] NativeArray <ushort> indices, int offset, int indicesCount) { var centroid = float3.zero; for (int i = 0; i < indicesCount; i++, offset++) { centroid += vertices[indices[offset]]; } return(centroid / indicesCount); }
public static void EnsureCapacityAndClear(ref HashedVertices hashedVertices, int desiredCapacity, Allocator allocator = Allocator.Temp) { if (!hashedVertices.IsCreated) { hashedVertices = new HashedVertices(desiredCapacity, allocator); } else { if (hashedVertices.Capacity < desiredCapacity) { hashedVertices.Dispose(); hashedVertices = new HashedVertices(desiredCapacity, Allocator.Temp); } else { hashedVertices.Clear(); } } }
static bool IsDegenerate(HashedVertices hashedVertices, NativeArray <Edge> edges, int edgeCount) { if (edgeCount < 3) { return(true); } for (int i = 0; i < edgeCount; i++) { var vertexIndex1 = edges[i].index1; var vertex1 = hashedVertices[vertexIndex1]; for (int j = 0; j < edgeCount; j++) { if (i == j) { continue; } var vertexIndexA = edges[j].index1; var vertexIndexB = edges[j].index2; // Loop loops back on same vertex if (vertexIndex1 == vertexIndexA || vertexIndex1 == vertexIndexB || vertexIndexA == vertexIndexB) { continue; } var vertexA = hashedVertices[vertexIndexA]; var vertexB = hashedVertices[vertexIndexB]; var distance = GeometryMath.SqrDistanceFromPointToLineSegment(vertex1, vertexA, vertexB); if (distance <= CSGConstants.kSqrDistanceEpsilon) { return(true); } } } return(false); }
public void Execute(int index) { //var brushNodeIndex = treeBrushNodeIndices[index]; var count = input.BeginForEachIndex(index); if (count == 0) { return; } HashedVertices brushVertices; NativeListArray <int> surfaceLoopIndices; NativeList <SurfaceInfo> surfaceLoopAllInfos; NativeListArray <Edge> surfaceLoopAllEdges; var brushNodeIndex = input.Read <int>(); var vertexCount = input.Read <int>(); brushVertices = new HashedVertices(vertexCount, allocator); for (int v = 0; v < vertexCount; v++) { var vertex = input.Read <float3>(); brushVertices.AddNoResize(vertex); } var surfaceOuterCount = input.Read <int>(); surfaceLoopIndices = new NativeListArray <int>(surfaceOuterCount, allocator); surfaceLoopIndices.ResizeExact(surfaceOuterCount); for (int o = 0; o < surfaceOuterCount; o++) { var surfaceInnerCount = input.Read <int>(); if (surfaceInnerCount > 0) { var inner = surfaceLoopIndices.AllocateWithCapacityForIndex(o, surfaceInnerCount); //inner.ResizeUninitialized(surfaceInnerCount); for (int i = 0; i < surfaceInnerCount; i++) { inner.AddNoResize(input.Read <int>()); } } } var surfaceLoopCount = input.Read <int>(); surfaceLoopAllInfos = new NativeList <SurfaceInfo>(surfaceLoopCount, allocator); surfaceLoopAllEdges = new NativeListArray <Edge>(surfaceLoopCount, allocator); surfaceLoopAllInfos.ResizeUninitialized(surfaceLoopCount); surfaceLoopAllEdges.ResizeExact(surfaceLoopCount); for (int l = 0; l < surfaceLoopCount; l++) { surfaceLoopAllInfos[l] = input.Read <SurfaceInfo>(); var edgeCount = input.Read <int>(); if (edgeCount > 0) { var edgesInner = surfaceLoopAllEdges.AllocateWithCapacityForIndex(l, edgeCount); //edgesInner.ResizeUninitialized(edgeCount); for (int e = 0; e < edgeCount; e++) { edgesInner.AddNoResize(input.Read <Edge>()); } } } input.EndForEachIndex(); var maxLoops = 0; var maxIndices = 0; for (int s = 0; s < surfaceLoopIndices.Length; s++) { if (!surfaceLoopIndices.IsIndexCreated(s)) { continue; } var length = surfaceLoopIndices[s].Length; maxIndices += length; maxLoops = math.max(maxLoops, length); } ref var baseSurfaces = ref basePolygons[brushNodeIndex].Value.surfaces;
void FindIntersectionVertices(ref NativeArray <float4> intersectingPlanes0, int intersectingPlanes0Length, int intersectingPlanesAndEdges0Length, ref NativeArray <float4> intersectingPlanes1, int intersectingPlanes1Length, int intersectingPlanesAndEdges1Length, ref NativeArray <PlanePair> usedPlanePairs1, int usedPlanePairs1Length, ref NativeArray <int> intersectingPlaneIndices0, int intersectingPlaneIndices0Length, float4x4 nodeToTreeSpaceMatrix0, ref HashedVertices hashedTreeSpaceVertices, ref HashedVertices snapHashedVertices, NativeArray <PlaneVertexIndexPair> foundIndices0, ref int foundIndices0Length, NativeArray <PlaneVertexIndexPair> foundIndices1, ref int foundIndices1Length) { int foundVerticesCount = usedPlanePairs1Length * intersectingPlanes0Length; NativeCollectionHelpers.EnsureMinimumSize(ref foundVertices, foundVerticesCount); NativeCollectionHelpers.EnsureMinimumSize(ref foundEdges, foundVerticesCount); NativeCollectionHelpers.EnsureMinimumSize(ref foundIntersections, foundVerticesCount); var n = 0; for (int i = 0; i < usedPlanePairs1Length; i++) { for (int j = 0; j < intersectingPlanes0Length; j++) { var plane0 = usedPlanePairs1[i].plane0; var plane1 = usedPlanePairs1[i].plane1; var plane2 = intersectingPlanes0[j]; foundIntersections[n] = new IntersectionPlanes { //plane0 = plane0, //plane1 = plane1, plane2 = plane2, planeIndex0 = usedPlanePairs1[i].planeIndex0, planeIndex1 = usedPlanePairs1[i].planeIndex1, planeIndex2 = intersectingPlaneIndices0[j] }; foundEdges[n] = new IntersectionEdge { edgeVertex0 = usedPlanePairs1[i].edgeVertex0, edgeVertex1 = usedPlanePairs1[i].edgeVertex1 }; if (math.abs(math.dot(plane2.xyz, plane0.xyz)) >= CSGConstants.kNormalDotAlignEpsilon || math.abs(math.dot(plane2.xyz, plane1.xyz)) >= CSGConstants.kNormalDotAlignEpsilon || math.abs(math.dot(plane0.xyz, plane1.xyz)) >= CSGConstants.kNormalDotAlignEpsilon) { continue; } var localVertex = PlaneExtensions.Intersection(plane2, plane0, plane1); if (double.IsNaN(localVertex.x)) { continue; } foundVertices[n] = new float4((float3)localVertex, 1); n++; } } for (int k = n - 1; k >= 0; k--) { var edgeVertex0 = foundEdges[k].edgeVertex0; var edgeVertex1 = foundEdges[k].edgeVertex1; var plane2 = foundIntersections[k].plane2; if (math.abs(math.dot(plane2, edgeVertex0)) <= kFatPlaneWidthEpsilon && math.abs(math.dot(plane2, edgeVertex1)) <= kFatPlaneWidthEpsilon) { if (k < n - 1) { foundIntersections[k] = foundIntersections[n - 1]; foundVertices[k] = foundVertices[n - 1]; } n--; } } // TODO: since we're using a pair in the outer loop, we could also determine which // 2 planes it intersects at both ends and just check those two planes .. // NOTE: for brush2, the intersection will always be only on two planes // UNLESS it's a corner vertex along that edge (we can compare to the two vertices) // in which case we could use a pre-calculated list of planes .. // OR when the intersection is outside of the edge .. for (int k = n - 1; k >= 0; k--) { if (IsOutsidePlanes(intersectingPlanes0, intersectingPlanesAndEdges0Length, foundVertices[k]) || IsOutsidePlanes(intersectingPlanes1, intersectingPlanesAndEdges1Length, foundVertices[k])) { if (k < n - 1) { foundIntersections[k] = foundIntersections[n - 1]; foundVertices[k] = foundVertices[n - 1]; } n--; } } for (int k = 0; k < n; k++) { var planeIndex0 = (ushort)foundIntersections[k].planeIndex0; var planeIndex1 = (ushort)foundIntersections[k].planeIndex1; var planeIndex2 = (ushort)foundIntersections[k].planeIndex2; var localVertex = foundVertices[k]; // TODO: should be having a Loop for each plane that intersects this vertex, and add that vertex // to ensure they are identical var treeSpaceVertex = math.mul(nodeToTreeSpaceMatrix0, localVertex).xyz; treeSpaceVertex = snapHashedVertices[snapHashedVertices.AddNoResize(treeSpaceVertex)]; var treeSpaceVertexIndex = hashedTreeSpaceVertices.AddNoResize(treeSpaceVertex); { // TODO: optimize for (int f = 0; f < foundIndices0Length; f++) { if (foundIndices0[f].vertexIndex == treeSpaceVertexIndex && foundIndices0[f].planeIndex == planeIndex2) { goto skip0; } } foundIndices0[foundIndices0Length] = new PlaneVertexIndexPair { planeIndex = planeIndex2, vertexIndex = treeSpaceVertexIndex }; foundIndices0Length++; skip0 :; } { // TODO: optimize for (int f = 0; f < foundIndices1Length; f++) { if (foundIndices1[f].vertexIndex == treeSpaceVertexIndex && foundIndices1[f].planeIndex == planeIndex0) { goto skip1; } } foundIndices1[foundIndices1Length] = new PlaneVertexIndexPair { planeIndex = planeIndex0, vertexIndex = treeSpaceVertexIndex }; foundIndices1Length++; skip1 :; } { // TODO: optimize for (int f = 0; f < foundIndices1Length; f++) { if (foundIndices1[f].vertexIndex == treeSpaceVertexIndex && foundIndices1[f].planeIndex == planeIndex1) { goto skip2; } } foundIndices1[foundIndices1Length] = new PlaneVertexIndexPair { planeIndex = planeIndex1, vertexIndex = treeSpaceVertexIndex }; foundIndices1Length++; skip2 :; } } }
void GenerateLoop(IndexOrder brushIndexOrder0, IndexOrder brushIndexOrder1, bool invertedTransform, [NoAlias] NativeArray <SurfaceInfo> surfaceInfos, [NoAlias] int surfaceInfosLength, [NoAlias] ref BrushTreeSpacePlanes brushTreeSpacePlanes0, [NoAlias] NativeArray <PlaneVertexIndexPair> foundIndices0, [NoAlias] ref int foundIndices0Length, [NoAlias] ref HashedVertices hashedTreeSpaceVertices, [NoAlias] NativeList <BrushIntersectionLoop> .ParallelWriter outputSurfaces) { // Why is the unity NativeSort slower than bubble sort? // TODO: revisit this assumption //* for (int i = 0; i < foundIndices0Length - 1; i++) { for (int j = i + 1; j < foundIndices0Length; j++) { var x = foundIndices0[i]; var y = foundIndices0[j]; if (x.planeIndex > y.planeIndex) { continue; } if (x.planeIndex == y.planeIndex) { if (x.vertexIndex <= y.vertexIndex) { continue; } } var t = x; foundIndices0[i] = foundIndices0[j]; foundIndices0[j] = t; } } /*/ * foundIndices0.Sort(comparer); * //*/ NativeCollectionHelpers.EnsureMinimumSize(ref planeIndexOffsets, foundIndices0Length); NativeCollectionHelpers.EnsureCapacityAndClear(ref uniqueIndices, foundIndices0Length); var planeIndexOffsetsLength = 0; //var planeIndexOffsets = stackalloc PlaneIndexOffsetLength[foundIndices0Length]; //var uniqueIndices = stackalloc ushort[foundIndices0Length]; // Now that our indices are sorted by planeIndex, we can segment them by start/end offset var previousPlaneIndex = foundIndices0[0].planeIndex; var previousVertexIndex = foundIndices0[0].vertexIndex; uniqueIndices.Add(previousVertexIndex); var loopStart = 0; for (int i = 1; i < foundIndices0Length; i++) { var indices = foundIndices0[i]; var planeIndex = indices.planeIndex; var vertexIndex = indices.vertexIndex; // TODO: why do we have soooo many duplicates sometimes? if (planeIndex == previousPlaneIndex && vertexIndex == previousVertexIndex) { continue; } if (planeIndex != previousPlaneIndex) { var currLength = RemoveDuplicateEdges(ref uniqueIndices, loopStart, uniqueIndices.Length); //var currLength = (uniqueIndices.Length - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } loopStart = uniqueIndices.Length; } uniqueIndices.Add(vertexIndex); previousVertexIndex = vertexIndex; previousPlaneIndex = planeIndex; } { var currLength = RemoveDuplicateEdges(ref uniqueIndices, loopStart, uniqueIndices.Length); //var currLength = (uniqueIndices.Length - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } } var maxLength = 0; for (int i = 0; i < planeIndexOffsetsLength; i++) { maxLength = math.max(maxLength, planeIndexOffsets[i].length); } NativeCollectionHelpers.EnsureMinimumSize(ref sortedStack, maxLength * 2); var uniqueIndicesArray = uniqueIndices.AsArray(); // For each segment, we now sort our vertices within each segment, // making the assumption that they are convex //var sortedStack = stackalloc int2[maxLength * 2]; ref var vertices = ref hashedTreeSpaceVertices;//.GetUnsafeReadOnlyPtr();
// TODO: sort by using plane information instead of unreliable floating point math .. static void SortIndices([NoAlias] HashedVertices vertices, [NoAlias] NativeArray <int2> sortedStack, [NoAlias] NativeArray <ushort> indices, int offset, int indicesCount, float3 normal) { // There's no point in trying to sort a point or a line if (indicesCount < 3) { return; } float3 tangentX, tangentY; if (normal.x > normal.y) { if (normal.x > normal.z) { tangentX = math.cross(normal, new float3(0, 1, 0)); tangentY = math.cross(normal, tangentX); } else { tangentX = math.cross(normal, new float3(0, 0, 1)); tangentY = math.cross(normal, tangentX); } } else { if (normal.y > normal.z) { tangentX = math.cross(normal, new float3(1, 0, 0)); tangentY = math.cross(normal, tangentX); } else { tangentX = math.cross(normal, new float3(0, 1, 0)); tangentY = math.cross(normal, tangentX); } } var centroid = FindPolygonCentroid(vertices, indices, offset, indicesCount); var center = new float2(math.dot(tangentX, centroid), // distance in direction of tangentX math.dot(tangentY, centroid)); // distance in direction of tangentY var sortedStackLength = 1; sortedStack[0] = new int2(0, indicesCount - 1); while (sortedStackLength > 0) { var top = sortedStack[sortedStackLength - 1]; sortedStackLength--; var l = top.x; var r = top.y; var left = l; var right = r; var va = vertices[indices[offset + (left + right) / 2]]; while (true) { var a_angle = math.atan2(math.dot(tangentX, va) - center.x, math.dot(tangentY, va) - center.y); { var vb = vertices[indices[offset + left]]; var b_angle = math.atan2(math.dot(tangentX, vb) - center.x, math.dot(tangentY, vb) - center.y); while (b_angle > a_angle) { left++; vb = vertices[indices[offset + left]]; b_angle = math.atan2(math.dot(tangentX, vb) - center.x, math.dot(tangentY, vb) - center.y); } } { var vb = vertices[indices[offset + right]]; var b_angle = math.atan2(math.dot(tangentX, vb) - center.x, math.dot(tangentY, vb) - center.y); while (a_angle > b_angle) { right--; vb = vertices[indices[offset + right]]; b_angle = math.atan2(math.dot(tangentX, vb) - center.x, math.dot(tangentY, vb) - center.y); } } if (left <= right) { if (left != right) { var t = indices[offset + left]; indices[offset + left] = indices[offset + right]; indices[offset + right] = t; } left++; right--; } if (left > right) { break; } } if (l < right) { sortedStack[sortedStackLength] = new int2(l, right); sortedStackLength++; } if (left < r) { sortedStack[sortedStackLength] = new int2(left, r); sortedStackLength++; } } }
//[MethodImpl(MethodImplOptions.NoInlining)] void FindInsideVertices([NoAlias] NativeArray <float3> usedVertices0, int usedVertices0Length, [NoAlias] NativeArray <ushort> vertexIntersectionPlanes, int vertexIntersectionPlanesLength, [NoAlias] NativeArray <int2> vertexIntersectionSegments, int vertexIntersectionSegmentsLength, [NoAlias] NativeArray <float4> intersectingPlanes1, int intersectingPlanes1Length, int intersectingPlanesAndEdges1Length, float4x4 nodeToTreeSpaceMatrix1, float4x4 vertexToLocal0, [NoAlias] ref HashedVertices hashedTreeSpaceVertices, [NoAlias] ref HashedVertices snapHashedVertices, [NoAlias] NativeArray <PlaneVertexIndexPair> foundIndices0, ref int foundIndices0Length) { NativeCollectionHelpers.EnsureMinimumSize(ref localVertices, usedVertices0Length); NativeCollectionHelpers.EnsureMinimumSize(ref usedVertexIndices, usedVertices0Length); for (int j = 0; j < usedVertices0Length; j++) { var brushVertex1 = new float4(usedVertices0[j], 1); localVertices[j] = math.mul(vertexToLocal0, brushVertex1); usedVertexIndices[j] = (ushort)j; } var foundVertexCount = usedVertices0Length; for (int j = foundVertexCount - 1; j >= 0; j--) { if (IsOutsidePlanes(intersectingPlanes1, intersectingPlanesAndEdges1Length, localVertices[j])) { if (j < foundVertexCount - 1) { localVertices[j] = localVertices[foundVertexCount - 1]; usedVertexIndices[j] = usedVertexIndices[foundVertexCount - 1]; } foundVertexCount--; } } for (int j = 0; j < foundVertexCount; j++) { var usedVertexIndex = usedVertexIndices[j]; var segment = vertexIntersectionSegments[usedVertexIndex]; if (segment.y == 0) { continue; } var treeSpaceVertex = math.mul(nodeToTreeSpaceMatrix1, localVertices[j]).xyz; treeSpaceVertex = snapHashedVertices[snapHashedVertices.AddNoResize(treeSpaceVertex)]; var treeSpaceVertexIndex = hashedTreeSpaceVertices.AddNoResize(treeSpaceVertex); for (int i = segment.x; i < segment.x + segment.y; i++) { var planeIndex = vertexIntersectionPlanes[i]; // TODO: optimize for (int k = 0; k < foundIndices0Length; k++) { if (foundIndices0[k].planeIndex == planeIndex && foundIndices0[k].vertexIndex == treeSpaceVertexIndex) { goto skipMe; } } foundIndices0[foundIndices0Length] = new PlaneVertexIndexPair { planeIndex = (ushort)planeIndex, vertexIndex = (ushort)treeSpaceVertexIndex }; foundIndices0Length++; skipMe: ; } } }
public static ChiselBlobBuilderArray <T> Construct <T>(this ChiselBlobBuilder builder, ref ChiselBlobArray <T> blobArray, HashedVertices data) where T : unmanaged { var blobBuilderArray = builder.Allocate(ref blobArray, data.Length); if (data.Length > 0) { UnsafeUtility.MemCpy(blobBuilderArray.GetUnsafePtr(), data.GetUnsafeReadOnlyPtr(), blobBuilderArray.Length * sizeof(T)); } return(blobBuilderArray); }
void CopyFrom(NativeList <UnsafeList <Edge> > dst, int index, ref BrushIntersectionLoop brushIntersectionLoop, HashedVertices hashedTreeSpaceVertices, int extraCapacity) { Debug.Assert(extraCapacity >= 0); ref var vertexIndex = ref brushIntersectionLoop.loopVertexIndex;
MinMaxAABB CopyPolygonToIndices(BlobAssetReference <BrushMeshBlob> mesh, int polygonIndex, float4x4 nodeToTreeSpaceMatrix, HashedVertices hashedVertices, NativeArray <Edge> edges, ref int edgeCount) { ref var halfEdges = ref mesh.Value.halfEdges;
void IntersectLoopsJob(HashedVertices brushVertices, NativeListArray <int> .NativeList loopIndices, int surfaceLoopIndex, NativeListArray <int> holeIndices, NativeList <SurfaceInfo> allInfos, NativeListArray <Edge> allEdges, NativeListArray <Edge> .NativeList intersectionLoop, CategoryGroupIndex intersectionCategory, SurfaceInfo intersectionInfo) { if (intersectionLoop.Length == 0) { return; } //Debug.Assert(allEdges.Length == allInfos.Length); //Debug.Assert(allInfos.Length == holeIndices.Length); var currentLoopEdges = allEdges[surfaceLoopIndex]; var currentInfo = allInfos[surfaceLoopIndex]; var currentHoleIndices = holeIndices[surfaceLoopIndex]; // It might look like we could just set the interiorCategory of brush_intersection here, and let all other cut loops copy from it below, // but the same brush_intersection might be used by another categorized_loop and then we'd try to reroute it again, which wouldn't work //brush_intersection.interiorCategory = newHoleCategory; if (currentLoopEdges.Length == 0) { return; } var maxLength = math.max(intersectionLoop.Length, currentLoopEdges.Length); if (maxLength < 3) { return; } int inside2 = 0, outside2 = 0; var categories2 = stackalloc EdgeCategory[currentLoopEdges.Length]; var treeSpacePlanes1 = brushTreeSpacePlanes[intersectionInfo.brushNodeIndex]; for (int e = 0; e < currentLoopEdges.Length; e++) { var category = BooleanEdgesUtility.CategorizeEdge(currentLoopEdges[e], ref treeSpacePlanes1.Value.treeSpacePlanes, intersectionLoop, brushVertices); categories2[e] = category; if (category == EdgeCategory.Inside) { inside2++; } else if (category == EdgeCategory.Outside) { outside2++; } } var aligned2 = currentLoopEdges.Length - (inside2 + outside2); int inside1 = 0, outside1 = 0; var categories1 = stackalloc EdgeCategory[intersectionLoop.Length]; var treeSpacePlanes2 = brushTreeSpacePlanes[currentInfo.brushNodeIndex]; for (int e = 0; e < intersectionLoop.Length; e++) { var category = BooleanEdgesUtility.CategorizeEdge(intersectionLoop[e], ref treeSpacePlanes2.Value.treeSpacePlanes, currentLoopEdges, brushVertices); categories1[e] = category; if (category == EdgeCategory.Inside) { inside1++; } else if (category == EdgeCategory.Outside) { outside1++; } } var aligned1 = intersectionLoop.Length - (inside1 + outside1); // Completely outside if ((inside1 + aligned1) == 0 && (aligned2 + inside2) == 0) { return; } if ((inside1 + (inside2 + aligned2)) < 3) { return; } // Completely aligned if (((outside1 + inside1) == 0 && (outside2 + inside2) == 0) || // polygon1 edges Completely inside polygon2 (inside1 == 0 && outside2 == 0)) { // New polygon overrides the existing polygon currentInfo.interiorCategory = intersectionCategory; allInfos[surfaceLoopIndex] = currentInfo; //Debug.Assert(holeIndices.IsAllocated(surfaceLoopIndex)); return; } var outEdges = stackalloc Edge[maxLength]; var outEdgesLength = 0; // polygon2 edges Completely inside polygon1 if (outside1 == 0 && inside2 == 0) { // polygon1 Completely inside polygon2 for (int n = 0; n < intersectionLoop.Length; n++) { outEdges[outEdgesLength] = intersectionLoop[n]; outEdgesLength++; } //OperationResult.Polygon1InsidePolygon2; } else { //int outEdgesLength = 0; // Can't read from outEdges.Length since it's marked as WriteOnly for (int e = 0; e < intersectionLoop.Length; e++) { var category = categories1[e]; if (category == EdgeCategory.Inside) { outEdges[outEdgesLength] = intersectionLoop[e]; outEdgesLength++; } } for (int e = 0; e < currentLoopEdges.Length; e++) { var category = categories2[e]; if (category != EdgeCategory.Outside) { outEdges[outEdgesLength] = currentLoopEdges[e]; outEdgesLength++; } } //OperationResult.Cut; } if (outEdgesLength < 3) { return; } // FIXME: when brush_intersection and categorized_loop are grazing each other, // technically we cut it but we shouldn't be creating it as a separate polygon + hole (bug7) // the output of cutting operations are both holes for the original polygon (categorized_loop) // and new polygons on the surface of the brush that need to be categorized intersectionInfo.interiorCategory = intersectionCategory; if (currentHoleIndices.Length > 0 && // TODO: fix touching not being updated properly brushesTouchedByBrushes.ContainsKey(currentInfo.brushNodeIndex)) { // Figure out why this is seemingly not necessary? var intersectedHoleIndices = stackalloc int[currentHoleIndices.Length]; var intersectedHoleIndicesLength = 0; // the output of cutting operations are both holes for the original polygon (categorized_loop) // and new polygons on the surface of the brush that need to be categorized ref var brushesTouchedByBrush = ref brushesTouchedByBrushes[currentInfo.brushNodeIndex].Value; ref var brushIntersections = ref brushesTouchedByBrushes[currentInfo.brushNodeIndex].Value.brushIntersections;
void CopyFrom(NativeListArray <Edge> dst, int index, ref BrushIntersectionLoop brushIntersectionLoop, HashedVertices hashedTreeSpaceVertices, int extraCapacity) { ref var vertexIndex = ref brushIntersectionLoop.loopVertexIndex;
void GenerateLoop(IndexOrder brushIndexOrder0, IndexOrder brushIndexOrder1, bool invertedTransform, [NoAlias] NativeArray <SurfaceInfo> surfaceInfos, [NoAlias] int surfaceInfosLength, [NoAlias] ref BrushTreeSpacePlanes brushTreeSpacePlanes0, [NoAlias] NativeArray <PlaneVertexIndexPair> foundIndices0, [NoAlias] ref int foundIndices0Length, [NoAlias] ref HashedVertices hashedTreeSpaceVertices, [NoAlias] NativeList <BrushIntersectionLoop> .ParallelWriter outputSurfaces) { // Why is the unity NativeSort slower than bubble sort? for (int i = 0; i < foundIndices0Length - 1; i++) { for (int j = i + 1; j < foundIndices0Length; j++) { var x = foundIndices0[i]; var y = foundIndices0[j]; if (x.planeIndex > y.planeIndex) { continue; } if (x.planeIndex == y.planeIndex) { if (x.vertexIndex <= y.vertexIndex) { continue; } } var t = x; foundIndices0[i] = foundIndices0[j]; foundIndices0[j] = t; } } NativeCollectionHelpers.EnsureMinimumSize(ref planeIndexOffsets, foundIndices0Length); NativeCollectionHelpers.EnsureMinimumSize(ref uniqueIndices, foundIndices0Length); var planeIndexOffsetsLength = 0; //var planeIndexOffsets = stackalloc PlaneIndexOffsetLength[foundIndices0Length]; var uniqueIndicesLength = 0; //var uniqueIndices = stackalloc ushort[foundIndices0Length]; // Now that our indices are sorted by planeIndex, we can segment them by start/end offset var previousPlaneIndex = foundIndices0[0].planeIndex; var previousVertexIndex = foundIndices0[0].vertexIndex; uniqueIndices[uniqueIndicesLength] = previousVertexIndex; uniqueIndicesLength++; var loopStart = 0; for (int i = 1; i < foundIndices0Length; i++) { var indices = foundIndices0[i]; var planeIndex = indices.planeIndex; var vertexIndex = indices.vertexIndex; // TODO: why do we have soooo many duplicates sometimes? if (planeIndex == previousPlaneIndex && vertexIndex == previousVertexIndex) { continue; } if (planeIndex != previousPlaneIndex) { var currLength = (uniqueIndicesLength - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } loopStart = uniqueIndicesLength; } uniqueIndices[uniqueIndicesLength] = vertexIndex; uniqueIndicesLength++; previousVertexIndex = vertexIndex; previousPlaneIndex = planeIndex; } { var currLength = (uniqueIndicesLength - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } } var maxLength = 0; for (int i = 0; i < planeIndexOffsetsLength; i++) { maxLength = math.max(maxLength, planeIndexOffsets[i].length); } NativeCollectionHelpers.EnsureMinimumSize(ref sortedStack, maxLength * 2); // For each segment, we now sort our vertices within each segment, // making the assumption that they are convex //var sortedStack = stackalloc int2[maxLength * 2]; var vertices = hashedTreeSpaceVertices;//.GetUnsafeReadOnlyPtr(); for (int n = planeIndexOffsetsLength - 1; n >= 0; n--) { var planeIndexOffset = planeIndexOffsets[n]; var length = planeIndexOffset.length; var offset = planeIndexOffset.offset; var planeIndex = planeIndexOffset.planeIndex; float3 normal = brushTreeSpacePlanes0.treeSpacePlanes[planeIndex].xyz * (invertedTransform ? 1 : -1); // TODO: use plane information instead SortIndices(vertices, sortedStack, uniqueIndices, offset, length, normal); } var totalLoopsSize = 16 + (planeIndexOffsetsLength * UnsafeUtility.SizeOf <BrushIntersectionLoop>()); var totalSize = totalLoopsSize; for (int j = 0; j < planeIndexOffsetsLength; j++) { var planeIndexLength = planeIndexOffsets[j]; var loopLength = planeIndexLength.length; totalSize += (loopLength * UnsafeUtility.SizeOf <float3>()); } var srcVertices = hashedTreeSpaceVertices; for (int j = 0; j < planeIndexOffsetsLength; j++) { var planeIndexLength = planeIndexOffsets[j]; var offset = planeIndexLength.offset; var loopLength = planeIndexLength.length; var basePlaneIndex = planeIndexLength.planeIndex; var surfaceInfo = surfaceInfos[basePlaneIndex]; NativeCollectionHelpers.EnsureMinimumSize(ref temporaryVertices, loopLength); for (int d = 0; d < loopLength; d++) { temporaryVertices[d] = srcVertices[uniqueIndices[offset + d]]; } outputSurfaces.AddNoResize(new BrushIntersectionLoop { indexOrder0 = brushIndexOrder0, indexOrder1 = brushIndexOrder1, surfaceInfo = surfaceInfo, loopVertexIndex = outputSurfaceVertices.AddRangeNoResize(temporaryVertices.GetUnsafePtr(), loopLength), loopVertexCount = loopLength }); } }
bool CopyPolygonToIndices(ChiselBlobAssetReference <BrushMeshBlob> mesh, ref ChiselBlobArray <float3> treeSpaceVertices, int polygonIndex, HashedVertices hashedTreeSpaceVertices, NativeArray <Edge> edges, ref int edgeCount) { ref var halfEdges = ref mesh.Value.halfEdges;