internal unsafe static void ExecuteInternal( ref JobEntityBatchIndexWrapper <T> jobWrapper, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount); bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batchCount; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var batch = filteredChunks[batchIndex]; Assert.IsTrue(batch.Count > 0); // Empty batches are expected to be skipped by the prefilter job! var entityOffset = entityIndices[batchIndex]; #if ENABLE_UNITY_COLLECTIONS_CHECKS if (isParallel) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), entityOffset, batch.Count); } #endif jobWrapper.JobData.Execute(batch, batchIndex, entityOffset); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
internal unsafe static void ExecuteInternal( ref JobEntityBatchIndexWrapper <T> jobWrapper, ref JobRanges ranges, int jobIndex) { ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount); bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batchCount; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { jobWrapper.JobData.Execute(filteredChunks[batchIndex], batchIndex, entityIndices[batchIndex]); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }