public unsafe static void Execute(ref T data, IntPtr listDataPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { NativeListData * ptr = (NativeListData *)((void *)listDataPtr); NativeParticleData nativeParticleData; ParticleSystem.CopyManagedJobData(ptr->system, out nativeParticleData); ParticleSystemJobData particleSystemJobData = new ParticleSystemJobData(ref nativeParticleData); while (true) { int num; int num2; bool flag = !JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2); if (flag) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref data), num, num2 - num); for (int i = num; i < num2; i++) { data.Execute(particleSystemJobData, i); } } AtomicSafetyHandle.CheckDeallocateAndThrow(particleSystemJobData.m_Safety); AtomicSafetyHandle.Release(particleSystemJobData.m_Safety); }
public static unsafe void Execute(ref T data, IntPtr listDataPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { NativeParticleData particleData; var listData = (NativeListData *)listDataPtr; ParticleSystem.CopyManagedJobData(listData->system, out particleData); ParticleSystemJobData jobData = new ParticleSystemJobData(ref particleData); while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref data), begin, end - begin); data.Execute(jobData, begin, end - begin); } AtomicSafetyHandle.CheckDeallocateAndThrow(jobData.m_Safety); AtomicSafetyHandle.Release(jobData.m_Safety); }
public static unsafe void Execute(void *structPtr, int jobIndex) { var jobStruct = UnsafeUtility.AsRef <JobStructDefer <T> >(structPtr); var jobData = jobStruct.JobData; var ranges = new JobRanges(); var arrayLength = UnsafeUtility.AsRef <int>(jobStruct.ArrayLengthPtr.ToPointer()); ranges.ArrayLength = arrayLength; ranges.IndicesPerPhase = JobsUtility.GetDefaultIndicesPerPhase(arrayLength); while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var begin, out var end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(IntPtr.Zero, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobData.Execute(i); } break; } }
/// <summary> /// Execute the job until there are no more work stealing ranges /// available to execute /// </summary> /// /// <param name="jobData"> /// The job to execute /// </param> /// /// <param name="additionalPtr"> /// TBD. Unused. /// </param> /// /// <param name="bufferRangePatchData"> /// TBD. Unused. /// </param> /// /// <param name="ranges"> /// Work stealing ranges to execute from /// </param> /// /// <param name="jobIndex"> /// Index of this job /// </param> public static unsafe void Execute( ref TJob jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { int startIndex; int endIndex; while (JobsUtility.GetWorkStealingRange( ref ranges, jobIndex, out startIndex, out endIndex)) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges( bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), startIndex, endIndex - startIndex); #endif jobData.Execute(startIndex, endIndex); } }
private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var buckets = (int *)fullData.HashMap.m_Buffer->buckets; var nextPtrs = (int *)fullData.HashMap.m_Buffer->next; var keys = fullData.HashMap.m_Buffer->keys; var values = fullData.HashMap.m_Buffer->values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); var value = UnsafeUtility.ReadArrayElement <int>(values, entryIndex); int firstValue; NativeMultiHashMapIterator <TKey> it; fullData.HashMap.TryGetFirstValue(key, out firstValue, out it); // [macton] Didn't expect a usecase for this with multiple same values // (since it's intended use was for unique indices.) // https://forum.unity.com/threads/ijobnativemultihashmapmergedsharedkeyindices-unexpected-behavior.569107/#post-3788170 if (entryIndex == it.EntryIndex) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), value, 1); #endif fullData.JobData.ExecuteFirst(value); } else { #if ENABLE_UNITY_COLLECTIONS_CHECKS var startIndex = Math.Min(firstValue, value); var lastIndex = Math.Max(firstValue, value); var rangeLength = (lastIndex - startIndex) + 1; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), startIndex, rangeLength); #endif fullData.JobData.ExecuteNext(firstValue, value); } entryIndex = nextPtrs[entryIndex]; } } } }
private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } var buckets = (int *)fullData.HashMap.m_Buffer->buckets; var nextPtrs = (int *)fullData.HashMap.m_Buffer->next; var keys = fullData.HashMap.m_Buffer->keys; var values = fullData.HashMap.m_Buffer->values; for (int i = begin; i < end; i++) { int entryIndex = buckets[i]; while (entryIndex != -1) { var key = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex); var value = UnsafeUtility.ReadArrayElement <int>(values, entryIndex); int firstValue; NativeMultiHashMapIterator <TKey> it; fullData.HashMap.TryGetFirstValue(key, out firstValue, out it); if (firstValue == value) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), value, 1); #endif fullData.JobData.ExecuteFirst(value); } else { #if ENABLE_UNITY_COLLECTIONS_CHECKS var startIndex = Math.Min(firstValue, value); var lastIndex = Math.Max(firstValue, value); var rangeLength = (lastIndex - startIndex) + 1; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref fullData), startIndex, rangeLength); #endif fullData.JobData.ExecuteNext(firstValue, value); } entryIndex = nextPtrs[entryIndex]; } } } }
public static void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { int num; int num2; while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2)) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num, num2 - num); for (int i = num; i < num2; i++) { jobData.Execute(i); } } }
internal unsafe static void ExecuteInternal( ref JobEntityBatchIndexWrapper <T> jobWrapper, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount); bool isParallel = jobWrapper.IsParallel == 1; while (true) { int beginBatchIndex = 0; int endBatchIndex = batchCount; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } } // Do the actual user work. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var batch = filteredChunks[batchIndex]; Assert.IsTrue(batch.Count > 0); // Empty batches are expected to be skipped by the prefilter job! var entityOffset = entityIndices[batchIndex]; #if ENABLE_UNITY_COLLECTIONS_CHECKS if (isParallel) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), entityOffset, batch.Count); } #endif jobWrapper.JobData.Execute(batch, batchIndex, entityOffset); } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
public unsafe static void Execute(ref T jobData, IntPtr jobData2, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { IJobParallelForTransformExtensions.TransformParallelForLoopStruct <T> .TransformJobData transformJobData; UnsafeUtility.CopyPtrToStructure <IJobParallelForTransformExtensions.TransformParallelForLoopStruct <T> .TransformJobData>((void *)jobData2, out transformJobData); int * ptr = (int *)((void *)TransformAccessArray.GetSortedToUserIndex(transformJobData.TransformAccessArray)); TransformAccess *ptr2 = (TransformAccess *)((void *)TransformAccessArray.GetSortedTransformAccess(transformJobData.TransformAccessArray)); bool flag = transformJobData.IsReadOnly == 1; if (flag) { while (true) { int num; int num2; bool flag2 = !JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2); if (flag2) { break; } int num3 = num2; for (int i = num; i < num3; i++) { int num4 = i; int num5 = ptr[num4]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num5, 1); TransformAccess transform = ptr2[num4]; transform.MarkReadOnly(); jobData.Execute(num5, transform); } } } else { int num6; int num7; JobsUtility.GetJobRange(ref ranges, jobIndex, out num6, out num7); for (int j = num6; j < num7; j++) { int num8 = j; int num9 = ptr[num8]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num9, 1); TransformAccess transform2 = ptr2[num8]; transform2.MarkReadWrite(); jobData.Execute(num9, transform2); } } }
public unsafe static void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out int begin, out int end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobData.Execute(i); } } }
static unsafe void Execute(ref JobStruct_Process3 <T, U0, U1, U2> jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { if (jobData.Iterator.m_IsParallelFor) { int begin; int end; while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif ExecuteInnerLoop(ref jobData, begin, end); } } else { ExecuteInnerLoop(ref jobData, 0, jobData.Iterator.m_Length); } }
public static unsafe void Execute(ref T jobData, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var beginIndex, out var endIndex)) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), beginIndex, endIndex - beginIndex); for (int i = beginIndex; i < endIndex; i++) { var index = i; var budget = jobData.ChunkBudget; while (budget > 0 && index < jobData.TotalItems) { budget -= jobData.Execute(index, budget, jobIndex); index += jobData.Chunks; } } } }
public static unsafe void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); for (var i = begin; i < end; ++i) { jobData.Execute(i); } } }
public unsafe static void Execute(ref T jobData, IntPtr jobData2, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { IntPtr transformArrayIntPtr; UnsafeUtility.CopyPtrToStructure <IntPtr>(jobData2, out transformArrayIntPtr); int * ptr = (int *)((void *)TransformAccessArray.GetSortedToUserIndex(transformArrayIntPtr)); TransformAccess *ptr2 = (TransformAccess *)((void *)TransformAccessArray.GetSortedTransformAccess(transformArrayIntPtr)); int num; int num2; JobsUtility.GetJobRange(ref ranges, jobIndex, out num, out num2); for (int i = num; i < num2; i++) { int num3 = i; int num4 = ptr[num3]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num4, 1); jobData.Execute(num4, ptr2[num3]); } }
public static unsafe void Execute(ref T jobData, System.IntPtr jobData2, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { UnsafeUtility.CopyPtrToStructure((void *)jobData2, out TransformJobData transformJobData); int * sortedToUserIndex = (int *)TransformAccessArray.GetSortedToUserIndex(transformJobData.TransformAccessArray); TransformAccess *sortedTransformAccess = (TransformAccess *)TransformAccessArray.GetSortedTransformAccess(transformJobData.TransformAccessArray); if (transformJobData.IsReadOnly == 1) { while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var begin, out var end)) { break; } var endThatCompilerCanSeeWillNeverChange = end; for (var i = begin; i < endThatCompilerCanSeeWillNeverChange; ++i) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), userIndex, 1); var transformAccess = sortedTransformAccess[sortedIndex]; transformAccess.MarkReadOnly(); jobData.Execute(userIndex, transformAccess); } } } else { JobsUtility.GetJobRange(ref ranges, jobIndex, out var begin, out var end); for (int i = begin; i < end; i++) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), userIndex, 1); var transformAccess = sortedTransformAccess[sortedIndex]; transformAccess.MarkReadWrite(); jobData.Execute(userIndex, transformAccess); } } }
public static unsafe void Execute(void *structPtr, int jobIndex) { var jobStruct = UnsafeUtility.AsRef <ParallelForBatchJobStruct <T> >(structPtr); var ranges = jobStruct.Ranges; var jobData = jobStruct.JobData; while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out int begin, out int end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(IntPtr.Zero, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); #endif jobData.Execute(begin, end - begin); break; } }
public unsafe static void Execute(ref T jobData, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { while (true) { int begin; int end; if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end)) { return; } #if ENABLE_UNITY_COLLECTIONS_CHECKS if (jobData.allowBoundsChecks) { JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin); } #endif jobData.Execute(begin, end - begin); } }
public static unsafe void Execute(ref T jobData, System.IntPtr jobData2, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { IntPtr transformAccessArray; UnsafeUtility.CopyPtrToStructure((void *)jobData2, out transformAccessArray); int * sortedToUserIndex = (int *)TransformAccessArray.GetSortedToUserIndex(transformAccessArray); TransformAccess *sortedTransformAccess = (TransformAccess *)TransformAccessArray.GetSortedTransformAccess(transformAccessArray); int begin; int end; JobsUtility.GetJobRange(ref ranges, jobIndex, out begin, out end); for (int i = begin; i < end; i++) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), userIndex, 1); jobData.Execute(userIndex, sortedTransformAccess[sortedIndex]); } }
public unsafe static void ExecuteAppend(ref JobDataWithFiltering jobData, System.IntPtr bufferRangePatchData) { int oldLength = jobData.outputIndices.Length; jobData.outputIndices.Capacity = math.max(jobData.appendCount + oldLength, jobData.outputIndices.Capacity); int *outputPtr = (int *)jobData.outputIndices.GetUnsafePtr(); int outputIndex = oldLength; #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), 0, jobData.appendCount); #endif for (int i = 0; i != jobData.appendCount; i++) { if (jobData.data.Execute(i)) { outputPtr[outputIndex] = i; outputIndex++; } } jobData.outputIndices.ResizeUninitialized(outputIndex); }
public static unsafe void Execute(ref JobParallelForProducer <T> jobParallelForProducer, IntPtr additionalData, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { #if ENABLE_UNITY_COLLECTIONS_CHECKS Assert.AreEqual(jobParallelForProducer.Sentinel - ranges.ArrayLength, 37); #endif // TODO Tiny doesn't currently support work stealing. https://unity3d.atlassian.net/browse/DOTSR-286 while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out int begin, out int end)) { break; } #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(IntPtr.Zero, UnsafeUtility.AddressOf(ref jobParallelForProducer), begin, end - begin); #endif for (var i = begin; i < end; ++i) { jobParallelForProducer.JobData.Execute(i); } } }
public static unsafe void ExecuteFilter(ref JobWrapper jobWrapper, System.IntPtr bufferRangePatchData) { int *outputPtr = (int *)jobWrapper.outputIndices.GetUnsafePtr(); int inputLength = jobWrapper.outputIndices.Length; int outputCount = 0; for (int i = 0; i != inputLength; i++) { int inputIndex = outputPtr[i]; #if ENABLE_UNITY_COLLECTIONS_CHECKS JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), inputIndex, 1); #endif if (jobWrapper.JobData.Execute(inputIndex)) { outputPtr[outputCount] = inputIndex; outputCount++; } } jobWrapper.outputIndices.ResizeUninitialized(outputCount); }
internal unsafe static void ExecuteInternal( ref JobEntityBatchWrapper <T> jobWrapper, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { var chunks = jobWrapper.CachedChunks; var prebuiltBatches = (ArchetypeChunk *)jobWrapper.PrebuiltBatchList.Ptr; bool isParallel = jobWrapper.IsParallel == 1; bool isFiltering = jobWrapper.Filter.RequiresMatchesFilter; while (true) { int beginBatchIndex = 0; int endBatchIndex = jobWrapper.UsePrebuiltBatchList == 1 ? jobWrapper.PrebuiltBatchList.Length : chunks.Length; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), 0, 0); } // Do the actual user work. if (jobWrapper.UsePrebuiltBatchList == 1) { for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var batch = prebuiltBatches[batchIndex]; if (isFiltering && !batch.m_Chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[jobWrapper.PrebuiltBatchListMatchingArchetypeIndices.Ptr[batchIndex]], ref jobWrapper.Filter)) { continue; } Assert.AreNotEqual(0, batch.Count); jobWrapper.JobData.Execute(batch, batchIndex); } } else { if (jobWrapper.JobsPerChunk == 1) { // 1 batch per chunk, with/without filtering for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var chunkIndex = batchIndex; var chunk = chunks.Ptr[chunkIndex]; if (isFiltering && !chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter)) { continue; } var batch = new ArchetypeChunk(chunk, chunks.EntityComponentStore); Assert.AreNotEqual(0, batch.Count); jobWrapper.JobData.Execute(batch, batchIndex); } } else { // 2+ batches per chunk, with/without filtering // This is the most general case; if only one code path survives, it should be this one. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var chunkIndex = batchIndex / jobWrapper.JobsPerChunk; var batchIndexInChunk = batchIndex % jobWrapper.JobsPerChunk; var chunk = chunks.Ptr[chunkIndex]; if (isFiltering && !chunk->MatchesFilter( jobWrapper.MatchingArchetypes.Ptr[ chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter)) { continue; } if (ArchetypeChunk.EntityBatchFromChunk(chunk, chunk->Count, jobWrapper.JobsPerChunk, batchIndexInChunk, chunks.EntityComponentStore, out var batch)) { jobWrapper.JobData.Execute(batch, batchIndex); } } } } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }