Esempio n. 1
0
            private static unsafe void Execute(ref JobMultiHashMap fullData, IntPtr additionalPtr,
                                               IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
            {
                while (true)
                {
                    int begin;
                    int end;

                    if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end))
                    {
                        return;
                    }

                    var buckets  = (int *)fullData.HashMap.m_Buffer->buckets;
                    var nextPtrs = (int *)fullData.HashMap.m_Buffer->next;
                    var keys     = fullData.HashMap.m_Buffer->keys;
                    var values   = fullData.HashMap.m_Buffer->values;

                    for (int i = begin; i < end; i++)
                    {
                        int entryIndex = buckets[i];

                        while (entryIndex != -1)
                        {
                            var key   = UnsafeUtility.ReadArrayElement <TKey>(keys, entryIndex);
                            var value = UnsafeUtility.ReadArrayElement <int>(values, entryIndex);
                            int firstValue;

                            NativeMultiHashMapIterator <TKey> it;
                            fullData.HashMap.TryGetFirstValue(key, out firstValue, out it);

                            // [macton] Didn't expect a usecase for this with multiple same values
                            // (since it's intended use was for unique indices.)
                            // https://forum.unity.com/threads/ijobnativemultihashmapmergedsharedkeyindices-unexpected-behavior.569107/#post-3788170
                            if (entryIndex == it.EntryIndex)
                            {
#if ENABLE_UNITY_COLLECTIONS_CHECKS
                                JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData,
                                                                    UnsafeUtility.AddressOf(ref fullData), value, 1);
#endif
                                fullData.JobData.ExecuteFirst(value);
                            }
                            else
                            {
#if ENABLE_UNITY_COLLECTIONS_CHECKS
                                var startIndex  = Math.Min(firstValue, value);
                                var lastIndex   = Math.Max(firstValue, value);
                                var rangeLength = (lastIndex - startIndex) + 1;

                                JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData,
                                                                    UnsafeUtility.AddressOf(ref fullData), startIndex, rangeLength);
#endif
                                fullData.JobData.ExecuteNext(firstValue, value);
                            }

                            entryIndex = nextPtrs[entryIndex];
                        }
                    }
                }
            }
Esempio n. 2
0
        public unsafe static void Execute(ref T data, IntPtr listDataPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
        {
            NativeListData *   ptr = (NativeListData *)((void *)listDataPtr);
            NativeParticleData nativeParticleData;

            ParticleSystem.CopyManagedJobData(ptr->system, out nativeParticleData);
            ParticleSystemJobData particleSystemJobData = new ParticleSystemJobData(ref nativeParticleData);

            while (true)
            {
                int  num;
                int  num2;
                bool flag = !JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2);
                if (flag)
                {
                    break;
                }
                JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref data), num, num2 - num);
                for (int i = num; i < num2; i++)
                {
                    data.Execute(particleSystemJobData, i);
                }
            }
            AtomicSafetyHandle.CheckDeallocateAndThrow(particleSystemJobData.m_Safety);
            AtomicSafetyHandle.Release(particleSystemJobData.m_Safety);
        }
            static unsafe void Execute(ref JobStruct <T, U0> jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
            {
                int begin;
                int end;
                var entity = default(U0);

                while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end))
                {
                    while (begin != end)
                    {
                        if (begin < jobData.Array.CacheBeginIndex || begin >= jobData.Array.CacheEndIndex)
                        {
                            jobData.Array.UpdateCache(begin);
                        }

                        var endLoop = Math.Min(end, jobData.Array.CacheEndIndex);

                        for (var i = begin; i != endLoop; i++)
                        {
                            jobData.Array.PatchPtrs(i, (byte *)UnsafeUtility.AddressOf(ref entity));
                            jobData.Data.Execute(entity);
                        }

                        begin = endLoop;
                    }
                }
            }
Esempio n. 4
0
            internal unsafe static void ExecuteInternal(ref JobChunkData <T> jobData, ref JobRanges ranges, int jobIndex)
            {
                ComponentChunkIterator.UnpackPrefilterData(jobData.PrefilterData, out var filteredChunks, out var entityIndices, out var chunkCount);

                int chunkIndex, end;

                while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out chunkIndex, out end))
                {
                    var chunk        = filteredChunks[chunkIndex];
                    var entityOffset = entityIndices[chunkIndex];

                    jobData.Data.Execute(chunk, chunkIndex, entityOffset);
                }
            }
Esempio n. 5
0
            internal static void ExecuteInternal(ref JobChunkExtensions.JobDataLiveFilter <T> jobData, ref JobRanges ranges, int jobIndex)
            {
                int num;
                int num2;

                while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2))
                {
                    jobData.iterator.MoveToChunkWithoutFiltering(num);
                    if (jobData.iterator.MatchesFilter())
                    {
                        ArchetypeChunk currentChunk = jobData.iterator.GetCurrentChunk();
                        jobData.data.Execute(currentChunk, num);
                    }
                }
            }
Esempio n. 6
0
            internal unsafe static void ExecuteInternal(ref JobChunkData <T> jobData, ref JobRanges ranges, int jobIndex)
            {
                var filteredChunks = (ArchetypeChunk *)NativeArrayUnsafeUtility.GetUnsafePtr(jobData.PrefilterData);
                var entityIndices  = (int *)(filteredChunks + ranges.TotalIterationCount);

                int chunkIndex, end;

                while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out chunkIndex, out end))
                {
                    var chunk        = filteredChunks[chunkIndex];
                    var entityOffset = entityIndices[chunkIndex];

                    jobData.Data.Execute(chunk, chunkIndex, entityOffset);
                }
            }
Esempio n. 7
0
            internal unsafe static void ExecuteInternal(
                ref JobEntityBatchIndexWrapper <T> jobWrapper,
                IntPtr bufferRangePatchData,
                ref JobRanges ranges,
                int jobIndex)
            {
                ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount);

                bool isParallel = jobWrapper.IsParallel == 1;

                while (true)
                {
                    int beginBatchIndex = 0;
                    int endBatchIndex   = batchCount;

                    // If we are running the job in parallel, steal some work.
                    if (isParallel)
                    {
                        // If we have no range to steal, exit the loop.
                        if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex))
                        {
                            break;
                        }
                    }

                    // Do the actual user work.
                    for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                    {
                        var batch = filteredChunks[batchIndex];
                        Assert.IsTrue(batch.Count > 0); // Empty batches are expected to be skipped by the prefilter job!
                        var entityOffset = entityIndices[batchIndex];

#if ENABLE_UNITY_COLLECTIONS_CHECKS
                        if (isParallel)
                        {
                            JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), entityOffset, batch.Count);
                        }
#endif
                        jobWrapper.JobData.Execute(batch, batchIndex, entityOffset);
                    }

                    // If we are not running in parallel, our job is done.
                    if (!isParallel)
                    {
                        break;
                    }
                }
            }
            internal unsafe static void ExecuteInternal(ref JobDataLiveFilter <T> jobData, ref JobRanges ranges, int jobIndex)
            {
                int chunkIndex, end;

                while (JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out chunkIndex, out end))
                {
                    jobData.iterator.MoveToChunkWithoutFiltering(chunkIndex);
                    if (!jobData.iterator.MatchesFilter())
                    {
                        continue;
                    }

                    var chunk = jobData.iterator.GetCurrentChunk();
                    jobData.data.Execute(chunk, chunkIndex);
                }
            }
            internal unsafe static void ExecuteInternal(
                ref JobEntityBatchWrapper <T> jobWrapper,
                ref JobRanges ranges,
                int jobIndex)
            {
                var chunks = jobWrapper.CachedChunks;

                bool isParallel  = jobWrapper.IsParallel == 1;
                bool isFiltering = jobWrapper.Filter.RequiresMatchesFilter;

                while (true)
                {
                    int beginBatchIndex = 0;
                    int endBatchIndex   = chunks.Length;

                    // If we are running the job in parallel, steal some work.
                    if (isParallel)
                    {
                        // If we have no range to steal, exit the loop.
                        if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex))
                        {
                            break;
                        }
                    }

                    // Do the actual user work.
                    for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                    {
                        var chunkIndex        = batchIndex / jobWrapper.JobsPerChunk;
                        var batchIndexInChunk = batchIndex % jobWrapper.JobsPerChunk;
                        var chunk             = chunks.Ptr[chunkIndex];

                        if (isFiltering && chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter))
                        {
                            continue;
                        }

                        jobWrapper.JobData.Execute(ArchetypeChunk.EntityBatchFromChunk(chunk, jobWrapper.JobsPerChunk, batchIndexInChunk, chunks.EntityComponentStore), batchIndex);
                    }

                    // If we are not running in parallel, our job is done.
                    if (!isParallel)
                    {
                        break;
                    }
                }
            }
            public unsafe static void Execute(ref T jobData, IntPtr jobData2, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
            {
                IJobParallelForTransformExtensions.TransformParallelForLoopStruct <T> .TransformJobData transformJobData;
                UnsafeUtility.CopyPtrToStructure <IJobParallelForTransformExtensions.TransformParallelForLoopStruct <T> .TransformJobData>((void *)jobData2, out transformJobData);
                int *            ptr  = (int *)((void *)TransformAccessArray.GetSortedToUserIndex(transformJobData.TransformAccessArray));
                TransformAccess *ptr2 = (TransformAccess *)((void *)TransformAccessArray.GetSortedTransformAccess(transformJobData.TransformAccessArray));
                bool             flag = transformJobData.IsReadOnly == 1;

                if (flag)
                {
                    while (true)
                    {
                        int  num;
                        int  num2;
                        bool flag2 = !JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out num, out num2);
                        if (flag2)
                        {
                            break;
                        }
                        int num3 = num2;
                        for (int i = num; i < num3; i++)
                        {
                            int num4 = i;
                            int num5 = ptr[num4];
                            JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num5, 1);
                            TransformAccess transform = ptr2[num4];
                            transform.MarkReadOnly();
                            jobData.Execute(num5, transform);
                        }
                    }
                }
                else
                {
                    int num6;
                    int num7;
                    JobsUtility.GetJobRange(ref ranges, jobIndex, out num6, out num7);
                    for (int j = num6; j < num7; j++)
                    {
                        int num8 = j;
                        int num9 = ptr[num8];
                        JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num9, 1);
                        TransformAccess transform2 = ptr2[num8];
                        transform2.MarkReadWrite();
                        jobData.Execute(num9, transform2);
                    }
                }
            }
Esempio n. 11
0
            public unsafe static void Execute(ref T jobData, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
            {
                while (true)
                {
                    int begin;
                    int end;
                    if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end))
                    {
                        return;
                    }

                    #if ENABLE_UNITY_COLLECTIONS_CHECKS
                    JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin);
                    #endif

                    jobData.Execute(begin, end - begin);
                }
            }
Esempio n. 12
0
            public unsafe static void Execute(ref T self, System.IntPtr additionalPtr, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex)
            {
                while (true)
                {
                    int begin;
                    int end;
                    if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end))
                    {
                        break;
                    }

                    #if ENABLE_UNITY_COLLECTIONS_CHECKS
                    JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref self), begin, end - begin);
                    #endif

                    for (var i = begin; i < end; ++i)
                    {
                        self.Execute(i);
                    }
                }
            }
Esempio n. 13
0
            public static unsafe void Execute(ref T jobData, IntPtr additionalPtr, IntPtr bufferRangePatchData,
                                              ref JobRanges ranges, int jobIndex)
            {
                while (true)
                {
                    int begin;
                    int end;
                    if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out begin, out end))
                    {
                        break;
                    }

#if ENABLE_UNITY_COLLECTIONS_CHECKS
                    //JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), begin, end - begin);
                    #endif

                    for (var i = begin; i < end; ++i)
                    {
                        jobData.Execute(i);
                    }
                    DoDeallocateOnJobCompletion(jobData);
                }
            }
            internal unsafe static void ExecuteInternal(
                ref JobEntityBatchIndexWrapper <T> jobWrapper,
                ref JobRanges ranges,
                int jobIndex)
            {
                ChunkIterationUtility.UnpackPrefilterData(jobWrapper.PrefilterData, out var filteredChunks, out var entityIndices, out var batchCount);

                bool isParallel = jobWrapper.IsParallel == 1;

                while (true)
                {
                    int beginBatchIndex = 0;
                    int endBatchIndex   = batchCount;

                    // If we are running the job in parallel, steal some work.
                    if (isParallel)
                    {
                        // If we have no range to steal, exit the loop.
                        if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex))
                        {
                            break;
                        }
                    }

                    // Do the actual user work.
                    for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                    {
                        jobWrapper.JobData.Execute(filteredChunks[batchIndex], batchIndex, entityIndices[batchIndex]);
                    }

                    // If we are not running in parallel, our job is done.
                    if (!isParallel)
                    {
                        break;
                    }
                }
            }
            internal unsafe static void ExecuteInternal(
                ref JobEntityBatchWrapper <T> jobWrapper,
                IntPtr bufferRangePatchData,
                ref JobRanges ranges,
                int jobIndex)
            {
                var chunks          = jobWrapper.CachedChunks;
                var prebuiltBatches = (ArchetypeChunk *)jobWrapper.PrebuiltBatchList.Ptr;

                bool isParallel  = jobWrapper.IsParallel == 1;
                bool isFiltering = jobWrapper.Filter.RequiresMatchesFilter;

                while (true)
                {
                    int beginBatchIndex = 0;
                    int endBatchIndex   = jobWrapper.UsePrebuiltBatchList == 1 ? jobWrapper.PrebuiltBatchList.Length : chunks.Length;

                    // If we are running the job in parallel, steal some work.
                    if (isParallel)
                    {
                        // If we have no range to steal, exit the loop.
                        if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex))
                        {
                            break;
                        }

                        JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), 0, 0);
                    }

                    // Do the actual user work.
                    if (jobWrapper.UsePrebuiltBatchList == 1)
                    {
                        for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                        {
                            var batch = prebuiltBatches[batchIndex];

                            if (isFiltering && !batch.m_Chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[jobWrapper.PrebuiltBatchListMatchingArchetypeIndices.Ptr[batchIndex]], ref jobWrapper.Filter))
                            {
                                continue;
                            }

                            Assert.AreNotEqual(0, batch.Count);
                            jobWrapper.JobData.Execute(batch, batchIndex);
                        }
                    }
                    else
                    {
                        if (jobWrapper.JobsPerChunk == 1)
                        {
                            // 1 batch per chunk, with/without filtering
                            for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                            {
                                var chunkIndex = batchIndex;
                                var chunk      = chunks.Ptr[chunkIndex];

                                if (isFiltering && !chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter))
                                {
                                    continue;
                                }

                                var batch = new ArchetypeChunk(chunk, chunks.EntityComponentStore);
                                Assert.AreNotEqual(0, batch.Count);
                                jobWrapper.JobData.Execute(batch, batchIndex);
                            }
                        }
                        else
                        {
                            // 2+ batches per chunk, with/without filtering
                            // This is the most general case; if only one code path survives, it should be this one.
                            for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex)
                            {
                                var chunkIndex        = batchIndex / jobWrapper.JobsPerChunk;
                                var batchIndexInChunk = batchIndex % jobWrapper.JobsPerChunk;
                                var chunk             = chunks.Ptr[chunkIndex];

                                if (isFiltering && !chunk->MatchesFilter(
                                        jobWrapper.MatchingArchetypes.Ptr[
                                            chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]],
                                        ref jobWrapper.Filter))
                                {
                                    continue;
                                }

                                if (ArchetypeChunk.EntityBatchFromChunk(chunk, chunk->Count, jobWrapper.JobsPerChunk,
                                                                        batchIndexInChunk, chunks.EntityComponentStore, out var batch))
                                {
                                    jobWrapper.JobData.Execute(batch, batchIndex);
                                }
                            }
                        }
                    }

                    // If we are not running in parallel, our job is done.
                    if (!isParallel)
                    {
                        break;
                    }
                }
            }