public void Execute() { NativeCollectionHelpers.EnsureCapacityAndClear(ref requiredTemporaryBullShitByDOTS, allTreeBrushIndexOrders.Length); NativeCollectionHelpers.EnsureMinimumSizeAndClear(ref foundBrushes, allTreeBrushIndexOrders.Length); for (int i = 0; i < rebuildTreeBrushIndexOrders.Length; i++) { foundBrushes.Set(rebuildTreeBrushIndexOrders[i].nodeOrder, true); requiredTemporaryBullShitByDOTS.AddNoResize(rebuildTreeBrushIndexOrders[i]); } for (int i = 0; i < brushesThatNeedIndirectUpdate.Length; i++) { var indexOrder = brushesThatNeedIndirectUpdate[i]; if (!foundBrushes.IsSet(indexOrder.nodeOrder)) { requiredTemporaryBullShitByDOTS.AddNoResize(indexOrder); foundBrushes.Set(indexOrder.nodeOrder, true); } } requiredTemporaryBullShitByDOTS.Sort(new IntersectionUtility.IndexOrderComparer()); allUpdateBrushIndexOrders.AddRangeNoResize(requiredTemporaryBullShitByDOTS); }
public void Execute() { var minCount = brushBrushIntersections.Count * 16; NativeCollectionHelpers.EnsureCapacityAndClear(ref intersections, minCount); for (int i = 0; i < brushBrushIntersections.Count; i++) { if (!brushBrushIntersections.IsAllocated(i)) { continue; } var subArray = brushBrushIntersections[i]; for (int j = 0; j < subArray.Count; j++) { var intersectWith = subArray[j]; var pair = new BrushPair { brushNodeOrder0 = i, brushNodeOrder1 = intersectWith.brushNodeOrder1, type = intersectWith.type }; intersections.Add(pair); pair.Flip(); intersections.Add(pair); } } brushIntersectionsWith->Clear(); if (intersections.Length == 0) { return; } intersections.Sort(new ListComparer()); var currentPair = intersections[0]; int previousOrder = currentPair.brushNodeOrder0; brushIntersectionsWith->Add(new BrushIntersectWith { brushNodeOrder1 = currentPair.brushNodeOrder1, type = currentPair.type, }); int2 range = new int2(0, 1); for (int i = 1; i < intersections.Length; i++) { currentPair = intersections[i]; int currentOrder = currentPair.brushNodeOrder0; brushIntersectionsWith->Add(new BrushIntersectWith { brushNodeOrder1 = currentPair.brushNodeOrder1, type = currentPair.type, }); if (currentOrder != previousOrder) { //Debug.Log($"{previousOrder} {range}"); brushIntersectionsWithRange[previousOrder] = range; previousOrder = currentOrder; range.x = i; range.y = 1; } else { range.y++; } } brushIntersectionsWithRange[previousOrder] = range; }
void GenerateLoop(IndexOrder brushIndexOrder0, IndexOrder brushIndexOrder1, bool invertedTransform, [NoAlias] NativeArray <SurfaceInfo> surfaceInfos, [NoAlias] int surfaceInfosLength, [NoAlias] ref BrushTreeSpacePlanes brushTreeSpacePlanes0, [NoAlias] NativeArray <PlaneVertexIndexPair> foundIndices0, [NoAlias] ref int foundIndices0Length, [NoAlias] ref HashedVertices hashedTreeSpaceVertices, [NoAlias] NativeList <BrushIntersectionLoop> .ParallelWriter outputSurfaces) { // Why is the unity NativeSort slower than bubble sort? // TODO: revisit this assumption //* for (int i = 0; i < foundIndices0Length - 1; i++) { for (int j = i + 1; j < foundIndices0Length; j++) { var x = foundIndices0[i]; var y = foundIndices0[j]; if (x.planeIndex > y.planeIndex) { continue; } if (x.planeIndex == y.planeIndex) { if (x.vertexIndex <= y.vertexIndex) { continue; } } var t = x; foundIndices0[i] = foundIndices0[j]; foundIndices0[j] = t; } } /*/ * foundIndices0.Sort(comparer); * //*/ NativeCollectionHelpers.EnsureMinimumSize(ref planeIndexOffsets, foundIndices0Length); NativeCollectionHelpers.EnsureCapacityAndClear(ref uniqueIndices, foundIndices0Length); var planeIndexOffsetsLength = 0; //var planeIndexOffsets = stackalloc PlaneIndexOffsetLength[foundIndices0Length]; //var uniqueIndices = stackalloc ushort[foundIndices0Length]; // Now that our indices are sorted by planeIndex, we can segment them by start/end offset var previousPlaneIndex = foundIndices0[0].planeIndex; var previousVertexIndex = foundIndices0[0].vertexIndex; uniqueIndices.Add(previousVertexIndex); var loopStart = 0; for (int i = 1; i < foundIndices0Length; i++) { var indices = foundIndices0[i]; var planeIndex = indices.planeIndex; var vertexIndex = indices.vertexIndex; // TODO: why do we have soooo many duplicates sometimes? if (planeIndex == previousPlaneIndex && vertexIndex == previousVertexIndex) { continue; } if (planeIndex != previousPlaneIndex) { var currLength = RemoveDuplicateEdges(ref uniqueIndices, loopStart, uniqueIndices.Length); //var currLength = (uniqueIndices.Length - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } loopStart = uniqueIndices.Length; } uniqueIndices.Add(vertexIndex); previousVertexIndex = vertexIndex; previousPlaneIndex = planeIndex; } { var currLength = RemoveDuplicateEdges(ref uniqueIndices, loopStart, uniqueIndices.Length); //var currLength = (uniqueIndices.Length - loopStart); if (currLength > 2) { planeIndexOffsets[planeIndexOffsetsLength] = new PlaneIndexOffsetLength { length = (ushort)currLength, offset = (ushort)loopStart, planeIndex = previousPlaneIndex }; planeIndexOffsetsLength++; } } var maxLength = 0; for (int i = 0; i < planeIndexOffsetsLength; i++) { maxLength = math.max(maxLength, planeIndexOffsets[i].length); } NativeCollectionHelpers.EnsureMinimumSize(ref sortedStack, maxLength * 2); var uniqueIndicesArray = uniqueIndices.AsArray(); // For each segment, we now sort our vertices within each segment, // making the assumption that they are convex //var sortedStack = stackalloc int2[maxLength * 2]; ref var vertices = ref hashedTreeSpaceVertices;//.GetUnsafeReadOnlyPtr();
public void Execute(int index) { var count = input.BeginForEachIndex(index); if (count == 0) { return; } var brushIndexOrder = input.Read <IndexOrder>(); var brushNodeOrder = brushIndexOrder.nodeOrder; var vertexCount = input.Read <int>(); NativeCollectionHelpers.EnsureCapacityAndClear(ref brushVertices, vertexCount); for (int v = 0; v < vertexCount; v++) { var vertex = input.Read <float3>(); brushVertices.AddNoResize(vertex); } var surfaceOuterCount = input.Read <int>(); NativeCollectionHelpers.EnsureSizeAndClear(ref surfaceLoopIndices, surfaceOuterCount); for (int o = 0; o < surfaceOuterCount; o++) { UnsafeList <int> inner = default; var surfaceInnerCount = input.Read <int>(); if (surfaceInnerCount > 0) { inner = new UnsafeList <int>(surfaceInnerCount, Allocator.Temp); //inner.ResizeUninitialized(surfaceInnerCount); for (int i = 0; i < surfaceInnerCount; i++) { inner.AddNoResize(input.Read <int>()); } } surfaceLoopIndices[o] = inner; } var surfaceLoopCount = input.Read <int>(); NativeCollectionHelpers.EnsureMinimumSizeAndClear(ref surfaceLoopAllInfos, surfaceLoopCount); NativeCollectionHelpers.EnsureSizeAndClear(ref surfaceLoopAllEdges, surfaceLoopCount); for (int l = 0; l < surfaceLoopCount; l++) { surfaceLoopAllInfos[l] = input.Read <SurfaceInfo>(); var edgeCount = input.Read <int>(); if (edgeCount > 0) { var edgesInner = new UnsafeList <Edge>(edgeCount, Allocator.Temp); //edgesInner.ResizeUninitialized(edgeCount); for (int e = 0; e < edgeCount; e++) { edgesInner.AddNoResize(input.Read <Edge>()); } surfaceLoopAllEdges[l] = edgesInner; } } input.EndForEachIndex(); if (!basePolygonCache[brushNodeOrder].IsCreated) { return; } var maxLoops = 0; var maxIndices = 0; for (int s = 0; s < surfaceLoopIndices.Length; s++) { if (!surfaceLoopIndices[s].IsCreated) { continue; } var length = surfaceLoopIndices[s].Length; maxIndices += length; maxLoops = math.max(maxLoops, length); } ref var baseSurfaces = ref basePolygonCache[brushNodeOrder].Value.surfaces;