// @TODO: Use parallel for job... (Need to expose combine jobs) public unsafe static void Execute(ref JobDataWithFiltering jobData, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { if (jobData.appendCount == -1) { ExecuteFilter(ref jobData, bufferRangePatchData); } else { ExecuteAppend(ref jobData, bufferRangePatchData); } }
public unsafe static void ExecuteAppend(ref JobDataWithFiltering jobData, System.IntPtr bufferRangePatchData) { int oldLength = jobData.outputIndices.Length; jobData.outputIndices.Capacity = math.max(jobData.appendCount + oldLength, jobData.outputIndices.Capacity); int *outputPtr = (int *)jobData.outputIndices.GetUnsafePtr(); int outputIndex = oldLength; #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), 0, jobData.appendCount); #endif for (int i = 0; i != jobData.appendCount; i++) { if (jobData.data.Execute(i)) { outputPtr[outputIndex] = i; outputIndex++; } } jobData.outputIndices.ResizeUninitialized(outputIndex); }
public unsafe static void ExecuteFilter(ref JobDataWithFiltering jobData, System.IntPtr bufferRangePatchData) { int *outputPtr = (int *)jobData.outputIndices.GetUnsafePtr(); int inputLength = jobData.outputIndices.Length; int outputCount = 0; for (int i = 0; i != inputLength; i++) { int inputIndex = outputPtr[i]; #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), inputIndex, 1); #endif if (jobData.data.Execute(inputIndex)) { outputPtr[outputCount] = inputIndex; outputCount++; } } jobData.outputIndices.ResizeUninitialized(outputCount); }