public unsafe static void Execute(ref BodyPairsJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { for (int currentIdx = 0; currentIdx < jobData.PhasedDispatchPairs.Length; currentIdx++) { DispatchPairSequencer.DispatchPair dispatchPair = jobData.PhasedDispatchPairs[currentIdx]; // Skip joint pairs and invalid pairs if (dispatchPair.IsJoint || !dispatchPair.IsValid) { continue; } var pair = new ModifiableBodyPair { BodyIndexPair = new BodyIndexPair { BodyIndexA = dispatchPair.BodyIndexA, BodyIndexB = dispatchPair.BodyIndexB }, EntityPair = new EntityPair { EntityA = jobData.Bodies[dispatchPair.BodyIndexA].Entity, EntityB = jobData.Bodies[dispatchPair.BodyIndexB].Entity } }; jobData.UserJobData.Execute(ref pair); if (pair.BodyIndexA == -1 || pair.BodyIndexB == -1) { jobData.PhasedDispatchPairs[currentIdx] = DispatchPairSequencer.DispatchPair.Invalid; } } }
public unsafe static void Execute(ref CollisionEventJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { foreach (ref LowLevel.CollisionEvent eventData in jobData.EventReader) { int numContactPoints = eventData.NumNarrowPhaseContactPoints; var contactPoints = new NativeArray <ContactPoint>(numContactPoints, Allocator.Temp); for (int i = 0; i < numContactPoints; i++) { contactPoints[i] = eventData.AccessContactPoint(i); } int bodyAIndex = eventData.BodyIndices.BodyAIndex; int bodyBIndex = eventData.BodyIndices.BodyBIndex; jobData.UserJobData.Execute(new CollisionEvent { EventData = eventData, Entities = new EntityPair { EntityA = jobData.Bodies[bodyAIndex].Entity, EntityB = jobData.Bodies[bodyBIndex].Entity }, TimeStep = jobData.TimeStep, InputVelocityA = bodyAIndex < jobData.InputVelocities.Length ? jobData.InputVelocities[bodyAIndex] : Velocity.Zero, InputVelocityB = bodyBIndex < jobData.InputVelocities.Length ? jobData.InputVelocities[bodyBIndex] : Velocity.Zero, NarrowPhaseContactPoints = contactPoints }); } }
public static unsafe void Execute(void *structPtr, int jobIndex) { var jobStruct = UnsafeUtility.AsRef <JobStructDefer <T> >(structPtr); var jobData = jobStruct.JobData; var ranges = new JobRanges(); var arrayLength = UnsafeUtility.AsRef <int>(jobStruct.ArrayLengthPtr.ToPointer()); ranges.ArrayLength = arrayLength; ranges.IndicesPerPhase = JobsUtility.GetDefaultIndicesPerPhase(arrayLength); while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var begin, out var end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(IntPtr.Zero, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobData.Execute(i); } break; } }
public unsafe static void Execute(ref BodyPairsJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { int currentIdx = 0; while (currentIdx < jobData.PhasedDispatchPairs.Length) { DispatchPairSequencer.DispatchPair dispatchPair = jobData.PhasedDispatchPairs[currentIdx]; var pair = new ModifiableBodyPair { BodyIndices = new BodyIndexPair { BodyAIndex = dispatchPair.BodyAIndex, BodyBIndex = dispatchPair.BodyBIndex }, Entities = new EntityPair { EntityA = jobData.Bodies[dispatchPair.BodyAIndex].Entity, EntityB = jobData.Bodies[dispatchPair.BodyBIndex].Entity } }; jobData.UserJobData.Execute(ref pair); if (pair.BodyIndices.BodyAIndex == -1 || pair.BodyIndices.BodyBIndex == -1) { jobData.PhasedDispatchPairs[currentIdx] = DispatchPairSequencer.DispatchPair.Invalid; } do { currentIdx++; } while (currentIdx < jobData.PhasedDispatchPairs.Length && jobData.PhasedDispatchPairs[currentIdx].IsJoint); } }
private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var buckets = (int *)fullData.HashMap.m_Buffer->buckets; var nextPtrs = (int *)fullData.HashMap.m_Buffer->next; var keys = fullData.HashMap.m_Buffer->keys; var values = fullData.HashMap.m_Buffer->values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); var value = UnsafeUtility.ReadArrayElement <TValue>(values, entryIndex); fullData.JobData.ExecuteNext(key, value); entryIndex = nextPtrs[entryIndex]; } } } }
public static unsafe void Execute( ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { // Loop until we're done executing ranges of indices while (true) { // Get the range of indices to execute // If this returns false, we're done int begin; int end; if (!JobsUtility.GetWorkStealingRange( ref ranges, jobIndex, out begin, out end)) { break; } // Call the job's Execute for each index in the range for (var i = begin; i < end; ++i) { for (var j = i + 1; j < end; ++j) { jobData.Execute(i, j); } } } }
/// <summary> /// Execute the job until there are no more work stealing ranges /// available to execute /// </summary> /// /// <param name="jobData"> /// The job to execute /// </param> /// /// <param name="additionalPtr"> /// TBD. Unused. /// </param> /// /// <param name="bufferRangePatchData"> /// TBD. Unused. /// </param> /// /// <param name="ranges"> /// Work stealing ranges to execute from /// </param> /// /// <param name="jobIndex"> /// Index of this job /// </param> public static unsafe void Execute( ref TJob jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { int startIndex; int endIndex; while (JobsUtility.GetWorkStealingRange( ref ranges, jobIndex, out startIndex, out endIndex)) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges( bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), startIndex, endIndex - startIndex); #endif jobData.Execute(startIndex, endIndex); } }
private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var buckets = (int *)fullData.HashMap.m_Buffer->buckets; var nextPtrs = (int *)fullData.HashMap.m_Buffer->next; var keys = fullData.HashMap.m_Buffer->keys; var values = fullData.HashMap.m_Buffer->values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); var value = UnsafeUtility.ReadArrayElement <int>(values, entryIndex); int firstValue; NativeMultiHashMapIterator <TKey> it; fullData.HashMap.TryGetFirstValue(key, out firstValue, out it); // [macton] Didn't expect a usecase for this with multiple same values // (since it's intended use was for unique indices.) // https://forum.unity.com/threads/ijobnativemultihashmapmergedsharedkeyindices-unexpected-behavior.569107/#post-3788170 if (entryIndex == it.EntryIndex) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), value, 1); #endif fullData.JobData.ExecuteFirst(value); } else { #if ENABLE_UNITY_COLLECTIONS_CHECKS var startIndex = Math.Min(firstValue, value); var lastIndex = Math.Max(firstValue, value); var rangeLength = (lastIndex - startIndex) + 1; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), startIndex, rangeLength); #endif fullData.JobData.ExecuteNext(firstValue, value); } entryIndex = nextPtrs[entryIndex]; } } } }
public unsafe static void Execute(ref CollisionEventJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { foreach (CollisionEvent collisionEvent in jobData.EventReader) { jobData.UserJobData.Execute(collisionEvent); } }
static unsafe void ProducerExecuteFn_Gen(void *structPtr) { CustomJobData <T> jobStruct = *(CustomJobData <T> *)structPtr; var jobRanges = new JobRanges(); Execute(ref jobStruct, new IntPtr(0), new IntPtr(0), ref jobRanges, 0); UnsafeUtility.Free(structPtr, Allocator.TempJob); }
public unsafe static void Execute(ref TriggerEventJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges jobRanges, int jobIndex) { foreach (var triggerEvent in jobData.EventReader) { jobData.UserJobData.Execute(triggerEvent); } }
public static void Execute( ref EventJobReaderStruct <TJob, T> fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { fullData.JobData.Execute(fullData.Reader, fullData.Index); }
private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var buckets = (int *)fullData.HashMap.m_Buffer->buckets; var nextPtrs = (int *)fullData.HashMap.m_Buffer->next; var keys = fullData.HashMap.m_Buffer->keys; var values = fullData.HashMap.m_Buffer->values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); var value = UnsafeUtility.ReadArrayElement <int>(values, entryIndex); int firstValue; NativeMultiHashMapIterator <TKey> it; fullData.HashMap.TryGetFirstValue(key, out firstValue, out it); if (firstValue == value) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), value, 1); #endif fullData.JobData.ExecuteFirst(value); } else { #if ENABLE_UNITY_COLLECTIONS_CHECKS var startIndex = Math.Min(firstValue, value); var lastIndex = Math.Max(firstValue, value); var rangeLength = (lastIndex - startIndex) + 1; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), startIndex, rangeLength); #endif fullData.JobData.ExecuteNext(firstValue, value); } entryIndex = nextPtrs[entryIndex]; } } } }
public static void Execute( ref JobEntityBatchWrapper <T> jobWrapper, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { ExecuteInternal(ref jobWrapper, ref ranges, jobIndex); }
public static void Execute(ref CustomJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { #if JOBS_CODEGEN_SAMPLE jobData.UserJobData.PrepareJobAtExecuteTimeFn_Gen(jobIndex); #endif jobData.UserJobData.Execute(ref jobData.abData); #if JOBS_CODEGEN_SAMPLE jobData.UserJobData.CleanupJobFn_Gen(void *structPtr); #endif }
internal unsafe static void ExecuteInternal( ref JobEntityBatchIndexWrapper <T> jobWrapper, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount); bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batchCount; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var batch = filteredChunks[batchIndex]; Assert.IsTrue(batch.Count > 0); // Empty batches are expected to be skipped by the prefilter job! var entityOffset = entityIndices[batchIndex]; #if ENABLE_UNITY_COLLECTIONS_CHECKS if (isParallel) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), entityOffset, batch.Count); } #endif jobWrapper.JobData.Execute(batch, batchIndex, entityOffset); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
public unsafe static void Execute(ref TriggerEventJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { foreach (LowLevel.TriggerEvent eventData in jobData.EventReader) { jobData.UserJobData.Execute(new TriggerEvent { EventData = eventData, Entities = new EntityPair { EntityA = jobData.Bodies[eventData.BodyIndices.BodyAIndex].Entity, EntityB = jobData.Bodies[eventData.BodyIndices.BodyBIndex].Entity } }); } }
internal unsafe static void ExecuteInternal( ref JobEntityBatchWrapper <T> jobWrapper, ref JobRanges ranges, int jobIndex) { var chunks = jobWrapper.CachedChunks; bool isParallel = jobWrapper.IsParallel == 1; bool isFiltering = jobWrapper.Filter.RequiresMatchesFilter; while (true) { int beginBatchIndex = 0; int endBatchIndex = chunks.Length; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var chunkIndex = batchIndex / jobWrapper.JobsPerChunk; var batchIndexInChunk = batchIndex % jobWrapper.JobsPerChunk; var chunk = chunks.Ptr[chunkIndex]; if (isFiltering && chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter)) { continue; } jobWrapper.JobData.Execute(ArchetypeChunk.EntityBatchFromChunk(chunk, jobWrapper.JobsPerChunk, batchIndexInChunk, chunks.EntityComponentStore), batchIndex); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
public static unsafe void Execute ( ref JobNativeMultiHashMapVisitKeyMutableValueProducer <TJob, TKey, TValue> producer, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex ) { var uniqueIndex = 0; while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var bucketData = producer.HashMap.GetUnsafeBucketData(); var buckets = (int *)bucketData.buckets; var nextPtrs = (int *)bucketData.next; var keys = bucketData.keys; var values = bucketData.values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); producer.JobData.ExecuteNext(uniqueIndex++, key, ref UnsafeUtility.ArrayElementAsRef <TValue>(values, entryIndex)); entryIndex = nextPtrs[entryIndex]; } } } }
public static void Execute( ref JobEventReaderForEachStructParallel <TJob, T> fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var begin, out var end)) { return; } for (int i = begin; i < end; i++) { fullData.JobData.Execute(fullData.Reader, i); } } }
public static void Execute( ref JobEventProducer <TJob, T> fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin = 0; int end = fullData.Reader.ForEachCount; // If we are running the job in parallel, steal some work. if (fullData.IsParallel) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } } for (int i = begin; i < end; i++) { var count = fullData.Reader.BeginForEachIndex(i); for (var j = 0; j < count; j++) { var e = fullData.Reader.Read <T>(); fullData.JobData.Execute(e); } fullData.Reader.EndForEachIndex(); } if (!fullData.IsParallel) { break; } } }
public static unsafe void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS //JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobData.Execute(i); } DoDeallocateOnJobCompletion(jobData); } }
internal unsafe static void ExecuteInternal( ref JobEntityBatchWrapper <T> jobWrapper, ref JobRanges ranges, int jobIndex) { var batches = jobWrapper.Batches; bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batches.Length; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { jobWrapper.JobData.Execute(batches[batchIndex], batchIndex); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
internal unsafe static void ExecuteInternal( ref JobEntityBatchIndexWrapper <T> jobWrapper, ref JobRanges ranges, int jobIndex) { ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount); bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batchCount; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { jobWrapper.JobData.Execute(filteredChunks[batchIndex], batchIndex, entityIndices[batchIndex]); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
public static unsafe void Execute(ref JobParallelForProducer <T> jobParallelForProducer, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { #if ENABLE_UNITY_COLLECTIONS_CHECKS Assert.AreEqual(jobParallelForProducer.Sentinel - ranges.ArrayLength, 37); #endif // TODO Tiny doesn't currently support work stealing. https://unity3d.atlassian.net/browse/DOTSR-286 while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out int begin, out int end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(IntPtr.Zero, UnsafeUtility.AddressOf(ref jobParallelForProducer), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobParallelForProducer.JobData.Execute(i); } } }
public unsafe static void Execute(ref BodyPairsJobData <T> jobData, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { if (jobData.BlockStreamStart == null || !jobData.BlockStreamStart->HasElements) { return; } var blockStreamReader = new Havok.Physics.HpBlockStreamReader(jobData.BlockStreamStart); int *pluginIndexToLocal = jobData.PluginIndexToLocal->Data; while (blockStreamReader.HasItems) { BodyIndexPair indices = blockStreamReader.Read <BodyIndexPair>(); // Really an hknpBodyIdPair int bodyIndexA = pluginIndexToLocal[indices.BodyIndexA & 0x00ffffff]; int bodyIndexB = pluginIndexToLocal[indices.BodyIndexB & 0x00ffffff]; var pair = new ModifiableBodyPair { BodyIndexPair = new BodyIndexPair { BodyIndexA = bodyIndexA, BodyIndexB = bodyIndexB }, EntityPair = new EntityPair { EntityA = jobData.Bodies[bodyIndexA].Entity, EntityB = jobData.Bodies[bodyIndexB].Entity } }; jobData.UserJobData.Execute(ref pair); if (pair.BodyIndexA == -1 || pair.BodyIndexB == -1) { blockStreamReader.Write(BodyIndexPair.Invalid); } } }
public static void Execute(ref JobProducer <T> jobProducer, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { jobProducer.JobData.Execute(); }
static unsafe void Execute(ref JobStruct_Process3 <T, U0, U1, U2> jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { if (jobData.Iterator.m_IsParallelFor) { int begin; int end; while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif ExecuteInnerLoop(ref jobData, begin, end); } } else { ExecuteInnerLoop(ref jobData, 0, jobData.Iterator.m_Length); } }
public static unsafe void Execute(ref T jobData, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var beginIndex, out var endIndex)) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), beginIndex, endIndex - beginIndex); for (int i = beginIndex; i < endIndex; i++) { var index = i; var budget = jobData.ChunkBudget; while (budget > 0 && index < jobData.TotalItems) { budget -= jobData.Execute(index, budget, jobIndex); index += jobData.Chunks; } } } }
public static void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int num; int num2; bool flag = !JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2); if (flag) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num, num2 - num); int num3 = num2; for (int i = num; i < num3; i++) { jobData.Execute(i); } } }