예제 #1
0
        protected override void OnCreate()
        {
            m_NewGhosts        = new NativeList <T>(16, Allocator.Persistent);
            m_NewGhostIds      = new NativeList <int>(16, Allocator.Persistent);
            m_InitialArchetype = EntityManager.CreateArchetype(ComponentType.ReadWrite <T>(),
                                                               ComponentType.ReadWrite <GhostComponent>());

            m_GhostUpdateSystemGroup = World.GetOrCreateSystem <GhostUpdateSystemGroup>();
            m_GhostMap           = m_GhostUpdateSystemGroup.GhostEntityMap;
            m_ConcurrentGhostMap = m_GhostMap.AsParallelWriter();
            m_DestroyGroup       = GetEntityQuery(ComponentType.ReadOnly <T>(),
                                                  ComponentType.Exclude <GhostComponent>(),
                                                  ComponentType.Exclude <PredictedGhostSpawnRequestComponent>());
            m_SpawnRequestGroup = GetEntityQuery(ComponentType.ReadOnly <T>(),
                                                 ComponentType.ReadOnly <PredictedGhostSpawnRequestComponent>());
            m_PlayerGroup = GetEntityQuery(ComponentType.ReadOnly <NetworkStreamConnection>(),
                                           ComponentType.ReadOnly <NetworkIdComponent>(), ComponentType.Exclude <NetworkStreamDisconnected>());

            m_InvalidGhosts                 = new NativeList <Entity>(1024, Allocator.Persistent);
            m_DelayedSpawnQueue             = new NativeQueue <DelayedSpawnGhost>(Allocator.Persistent);
            m_CurrentDelayedSpawnList       = new NativeList <DelayedSpawnGhost>(1024, Allocator.Persistent);
            m_ConcurrentDelayedSpawnQueue   = m_DelayedSpawnQueue.AsParallelWriter();
            m_PredictedSpawnQueue           = new NativeQueue <DelayedSpawnGhost>(Allocator.Persistent);
            m_CurrentPredictedSpawnList     = new NativeList <DelayedSpawnGhost>(1024, Allocator.Persistent);
            m_ConcurrentPredictedSpawnQueue = m_PredictedSpawnQueue.AsParallelWriter();
            m_Barrier = World.GetOrCreateSystem <EndSimulationEntityCommandBufferSystem>();

            m_PredictSpawnGhosts        = new NativeList <PredictSpawnGhost>(16, Allocator.Persistent);
            m_PredictionSpawnCleanupMap = new NativeHashMap <int, int>(16, Allocator.Persistent);

            m_ClientSimulationSystemGroup = World.GetOrCreateSystem <ClientSimulationSystemGroup>();
        }
        public void TestIJobFor([Values(0, 1, 2)] int mode)
        {
            const int N = 1000;

            NativeHashMap <int, int> output = new NativeHashMap <int, int>(N, Allocator.TempJob);
            SimpleJobFor             job    = new SimpleJobFor()
            {
                result = output.AsParallelWriter()
            };

            if (mode == 0)
            {
                job.Run(N);
            }
            else if (mode == 1)
            {
                job.Schedule(N, new JobHandle()).Complete();
            }
            else
            {
                job.ScheduleParallel(N, 13, new JobHandle()).Complete();
            }

            Assert.AreEqual(N, output.Count());
            for (int i = 0; i < N; ++i)
            {
                Assert.AreEqual(123, output[i]);
            }

            output.Dispose();
        }
예제 #3
0
        public void RunHashWriterParallelFor()
        {
            const int MAPSIZE = 100;
            // Make sure that each iteration was called and the parallel write worked.
            NativeHashMap <int, int> map = new NativeHashMap <int, int>(MAPSIZE, Allocator.TempJob);
            // Tracks the threadIndex used for each job.
            NativeHashMap <int, bool> threadMap = new NativeHashMap <int, bool>(JobsUtility.MaxJobThreadCount, Allocator.TempJob);

            HashWriterParallelFor job = new HashWriterParallelFor()
            {
                result    = map.AsParallelWriter(),
                threadMap = threadMap.AsParallelWriter()
            };

            JobHandle handle = job.Schedule(MAPSIZE, 10);

            handle.Complete();

            for (int i = 0; i < MAPSIZE; ++i)
            {
                Assert.AreEqual(17, map[i]);
            }

#if !UNITY_SINGLETHREADED_JOBS
            Assert.IsTrue(threadMap.Length > 1);     // should have run in parallel, and used different thread indices
#else
            Assert.IsTrue(threadMap.Length == 1);    // only have one thread.
            Assert.IsTrue(threadMap[0] == true);     // and it should be job index 0
#endif

            map.Dispose();
            threadMap.Dispose();
        }
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            var glyphMap = new NativeHashMap <int, Entity>(glyphQuery.CalculateEntityCount(), Allocator.TempJob);

            var glyphMapDeps = new BuildGlyphMapJob {
                GlyphMap = glyphMap.AsParallelWriter()
            }.Schedule(glyphQuery, inputDeps);

            var textMeshDeps = new BuildTextMeshJob {
                GlyphMap           = glyphMap,
                GlyphData          = GetBufferFromEntity <GlyphElement>(true),
                FontFaces          = GetComponentDataFromEntity <FontFaceInfo>(true),
                EntityType         = GetArchetypeChunkEntityType(),
                CharBufferType     = GetArchetypeChunkBufferType <CharElement>(true),
                TextOptionType     = GetArchetypeChunkComponentType <TextOptions>(true),
                TxtFontIDType      = GetArchetypeChunkComponentType <TextFontID>(true),
                ColorType          = GetArchetypeChunkComponentType <AppliedColor>(true),
                LTWType            = GetArchetypeChunkComponentType <LocalToWorld>(true),
                DimensionType      = GetArchetypeChunkComponentType <Dimensions>(true),
                MeshVertexDataType = GetArchetypeChunkBufferType <MeshVertexData>(),
                TriangleIndexType  = GetArchetypeChunkBufferType <TriangleIndexElement>(),
                CmdBuffer          = cmdBufferSystem.CreateCommandBuffer().ToConcurrent()
            }.Schedule(textQuery, glyphMapDeps);

            var finalDeps = glyphMap.Dispose(textMeshDeps);

            cmdBufferSystem.AddJobHandleForProducer(finalDeps);

            return(finalDeps);
        }
예제 #5
0
        protected override void OnCreate()
        {
            m_NewGhosts          = new NativeList <T>(16, Allocator.Persistent);
            m_NewGhostIds        = new NativeList <int>(16, Allocator.Persistent);
            m_Archetype          = GetGhostArchetype();
            m_PredictedArchetype = GetPredictedGhostArchetype();
            m_InitialArchetype   = EntityManager.CreateArchetype(ComponentType.ReadWrite <T>(), ComponentType.ReadWrite <ReplicatedEntityComponent>());

            m_GhostMap           = World.GetOrCreateSystem <GhostReceiveSystemGroup>().GhostEntityMap;
            m_ConcurrentGhostMap = m_GhostMap.AsParallelWriter();
            m_SpawnRequestGroup  = GetEntityQuery(ComponentType.ReadOnly <T>(),
                                                  ComponentType.ReadOnly <PredictedSpawnRequestComponent>());

            m_InvalidGhosts                 = new NativeList <Entity>(1024, Allocator.Persistent);
            m_DelayedSpawnQueue             = new NativeQueue <DelayedSpawnGhost>(Allocator.Persistent);
            m_CurrentDelayedSpawnList       = new NativeList <DelayedSpawnGhost>(1024, Allocator.Persistent);
            m_ConcurrentDelayedSpawnQueue   = m_DelayedSpawnQueue.AsParallelWriter();
            m_PredictedSpawnQueue           = new NativeQueue <DelayedSpawnGhost>(Allocator.Persistent);
            m_CurrentPredictedSpawnList     = new NativeList <DelayedSpawnGhost>(1024, Allocator.Persistent);
            m_ConcurrentPredictedSpawnQueue = m_PredictedSpawnQueue.AsParallelWriter();
            m_Barrier = World.GetOrCreateSystem <EndSimulationEntityCommandBufferSystem>();

            m_PredictSpawnGhosts        = new NativeList <PredictSpawnGhost>(16, Allocator.Persistent);
            m_PredictionSpawnCleanupMap = new NativeHashMap <int, int>(16, Allocator.Persistent);

            m_TimeSystem = World.GetOrCreateSystem <NetworkTimeSystem>();
        }
예제 #6
0
        /// <summary>
        /// Constructs a set of changes between the given src and dst chunks.
        /// </summary>
        /// <remarks>
        /// A chunk is considered unchanged if the <see cref="Chunk.SequenceNumber"/> matches and all type change versions match.
        /// </remarks>
        internal static ArchetypeChunkChanges GetArchetypeChunkChanges(
            NativeArray <ArchetypeChunk> srcChunks,
            NativeArray <ArchetypeChunk> dstChunks,
            Allocator allocator,
            out JobHandle jobHandle,
            JobHandle dependsOn = default)
        {
            var archetypeChunkChanges     = new ArchetypeChunkChanges(allocator);
            var srcChunksBySequenceNumber = new NativeHashMap <ulong, ArchetypeChunk>(srcChunks.Length, Allocator.TempJob);

            var buildChunkSequenceNumberMap = new BuildChunkSequenceNumberMap
            {
                Chunks = srcChunks,
                ChunksBySequenceNumber = srcChunksBySequenceNumber.AsParallelWriter()
            }.Schedule(srcChunks.Length, 64, dependsOn);

            var gatherArchetypeChunkChanges = new GatherArchetypeChunkChanges
            {
                SrcChunks = srcChunks,
                DstChunks = dstChunks,
                SrcChunksBySequenceNumber  = srcChunksBySequenceNumber,
                CreatedChunks              = archetypeChunkChanges.CreatedSrcChunks.Chunks,
                CreatedChunkFlags          = archetypeChunkChanges.CreatedSrcChunks.Flags,
                CreatedChunkEntityCounts   = archetypeChunkChanges.CreatedSrcChunks.EntityCounts,
                DestroyedChunks            = archetypeChunkChanges.DestroyedDstChunks.Chunks,
                DestroyedChunkFlags        = archetypeChunkChanges.DestroyedDstChunks.Flags,
                DestroyedChunkEntityCounts = archetypeChunkChanges.DestroyedDstChunks.EntityCounts,
            }.Schedule(buildChunkSequenceNumberMap);

            jobHandle = srcChunksBySequenceNumber.Dispose(gatherArchetypeChunkChanges);

            return(archetypeChunkChanges);
        }
        public void EvaluateCumulativeRewardEstimatorMultipleStates()
        {
            const int kStateCount     = 10;
            var       states          = new NativeList <int>(kStateCount, Allocator.TempJob);
            var       stateInfoLookup = new NativeHashMap <int, StateInfo>(kStateCount, Allocator.TempJob);
            var       binnedStateKeys = new NativeMultiHashMap <int, int>(kStateCount, Allocator.TempJob);

            for (int i = 0; i < kStateCount; i++)
            {
                states.Add(i);
            }

            var stateEvaluationJob = new EvaluateNewStatesJob <int, int, TestStateDataContext, StateValueAsCumulativeRewardEstimatorValue, DefaultTerminalStateEvaluator <int> >
            {
                StateDataContext = new TestStateDataContext(),
                StateInfoLookup  = stateInfoLookup.AsParallelWriter(),
                States           = states.AsDeferredJobArray(),
                BinnedStateKeys  = binnedStateKeys.AsParallelWriter(),
            };

            stateEvaluationJob.Schedule(states, default).Complete();

            for (int i = 0; i < states.Length; i++)
            {
                stateInfoLookup.TryGetValue(i, out var stateInfo);

                Assert.AreEqual(new BoundedValue(i, i, i), stateInfo.CumulativeRewardEstimate);
            }

            states.Dispose();
            stateInfoLookup.Dispose();
            binnedStateKeys.Dispose();
        }
예제 #8
0
    public void NativeHashMap_Clear_And_Write()
    {
        var hashMap     = new NativeHashMap <int, int>(hashMapSize / 2, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var clearJob = new Clear
        {
            hashMap = hashMap
        };

        var clearJobHandle = clearJob.Schedule();

        var writeJob = new HashMapWriteJob
        {
            hashMap = hashMap.AsParallelWriter(),
            status  = writeStatus,
            keyMod  = hashMapSize,
        };

        var writeJobHandle = writeJob.Schedule(clearJobHandle);

        writeJobHandle.Complete();

        writeStatus.Dispose();
        hashMap.Dispose();
    }
예제 #9
0
    public void NativeHashMap_Read_And_Write()
    {
        var hashMap     = new NativeHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new HashMapWriteJob()
        {
            hashMap = hashMap.AsParallelWriter(),
            status  = writeStatus,
            keyMod  = hashMapSize,
        };

        var readData = new HashMapReadParallelForJob()
        {
            hashMap = hashMap,
            values  = readValues,
            keyMod  = writeData.keyMod,
        };

        var writeJob = writeData.Schedule();
        var readJob  = readData.Schedule(hashMapSize, 1, writeJob);

        readJob.Complete();

        for (int i = 0; i < hashMapSize; ++i)
        {
            Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map");
            Assert.AreEqual(i, readValues[i], "Job failed to read from hash map");
        }

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
        protected override void OnUpdate()
        {
            var glyphMap = new NativeHashMap <int, Entity>(glyphQuery.CalculateEntityCount(), Allocator.TempJob);

            Dependency = new BuildGlyphMapJobChunk {
                GlyphMap   = glyphMap.AsParallelWriter(),
                EntityType = GetEntityTypeHandle(),
                FontType   = GetComponentTypeHandle <FontID>(true)
            }.Schedule(glyphQuery, Dependency);

            Dependency = new BuildTextMeshJob {
                GlyphMap           = glyphMap,
                GlyphData          = GetBufferFromEntity <GlyphElement>(true),
                FontFaces          = GetComponentDataFromEntity <FontFaceInfo>(true),
                Parents            = GetComponentDataFromEntity <Parent>(true),
                EntityType         = GetEntityTypeHandle(),
                CharBufferType     = GetBufferTypeHandle <CharElement>(true),
                TextOptionType     = GetComponentTypeHandle <TextOptions>(true),
                TxtFontIDType      = GetComponentTypeHandle <TextFontID>(true),
                ColorType          = GetComponentTypeHandle <AppliedColor>(true),
                LTWType            = GetComponentTypeHandle <LocalToWorld>(true),
                DimensionType      = GetComponentTypeHandle <Dimensions>(true),
                MeshVertexDataType = GetBufferTypeHandle <LocalVertexData>(),
                TriangleIndexType  = GetBufferTypeHandle <LocalTriangleIndexElement>(),
                CmdBuffer          = cmdBufferSystem.CreateCommandBuffer().AsParallelWriter()
            }.Schedule(textQuery, Dependency);

            Dependency = glyphMap.Dispose(Dependency);
            cmdBufferSystem.AddJobHandleForProducer(Dependency);
        }
    protected override void OnUpdate()
    {
        m_StaticCollidableHashMapJobHandle  = Dependency;
        m_DynamicCollidableHashMapJobHandle = Dependency;

        int staticCollidableCount = m_StaticCollidableEntityQuery.CalculateEntityCount();

        if (staticCollidableCount != 0)
        {
            if (m_StaticCollidableHashMap.IsCreated)
            {
                m_StaticCollidableHashMap.Dispose();
            }

            m_StaticCollidableHashMap = new NativeHashMap <int, int>(staticCollidableCount, Allocator.Persistent);
            var parallelWriter = m_StaticCollidableHashMap.AsParallelWriter();

            m_StaticCollidableHashMapJobHandle = Entities
                                                 .WithName("HashStaticCollidables")
                                                 .WithAll <StaticCollidable>()
                                                 .WithChangeFilter <StaticCollidable>()
                                                 .WithStoreEntityQueryInField(ref m_StaticCollidableEntityQuery)
                                                 .WithBurst()
                                                 .ForEach((int entityInQueryIndex, in GridPosition gridPosition) =>
            {
                var hash = (int)math.hash(gridPosition.Value);
                parallelWriter.TryAdd(hash, entityInQueryIndex);
            })
                                                 .ScheduleParallel(Dependency);
        }

        int dynamicCollidableCount = m_DynamicCollidableEntityQuery.CalculateEntityCount();

        if (dynamicCollidableCount != 0)
        {
            if (m_DynamicCollidableHashMap.IsCreated)
            {
                m_DynamicCollidableHashMap.Dispose();
            }

            m_DynamicCollidableHashMap = new NativeHashMap <int, int>(dynamicCollidableCount, Allocator.Persistent);
            var parallelWriter = m_DynamicCollidableHashMap.AsParallelWriter();

            m_DynamicCollidableHashMapJobHandle = Entities
                                                  .WithName("HashDynamicCollidables")
                                                  .WithAll <DynamicCollidable>()
                                                  .WithStoreEntityQueryInField(ref m_DynamicCollidableEntityQuery)
                                                  .WithBurst()
                                                  .ForEach((int entityInQueryIndex, in GridPosition gridPosition) =>
            {
                var hash = (int)math.hash(gridPosition.Value);
                parallelWriter.TryAdd(hash, entityInQueryIndex);
            })
                                                  .ScheduleParallel(Dependency);
        }

        Dependency = JobHandle.CombineDependencies(m_StaticCollidableHashMapJobHandle, m_DynamicCollidableHashMapJobHandle);
    }
예제 #12
0
        void BackpropagateParallel(float discountFactor)
        {
            JobHandle jobHandle = default;
            int       maxDepth  = 0;

            using (var depths = m_DepthMap.GetValueArray(Allocator.Temp))
            {
                for (int i = 0; i < depths.Length; i++)
                {
                    maxDepth = math.max(maxDepth, depths[i]);
                }
            }

            // Containers
            var m_SelectedStatesByHorizon = new NativeMultiHashMap <int, int>(m_DepthMap.Count(), Allocator.TempJob);
            var predecessorStates         = new NativeHashMap <int, byte>(m_DepthMap.Count(), Allocator.TempJob);
            var horizonStateList          = new NativeList <int>(m_DepthMap.Count(), Allocator.TempJob);

            jobHandle = new UpdateDepthMapAndResizeContainersJob <int>
            {
                SelectedStates = m_SelectedStates,
                MaxDepth       = maxDepth,

                DepthMap = m_DepthMap,
                SelectedStatesByHorizon = m_SelectedStatesByHorizon,
                PredecessorStates       = predecessorStates,
                HorizonStateList        = horizonStateList,
            }.Schedule(jobHandle);

            // horizons of backprop
            for (int horizon = maxDepth + 1; horizon >= 0; horizon--)
            {
                // Prepare info
                jobHandle = new PrepareBackpropagationHorizon <int>
                {
                    Horizon = horizon,
                    SelectedStatesByHorizon = m_SelectedStatesByHorizon,
                    PredecessorInputStates  = predecessorStates,
                    OutputStates            = horizonStateList,
                }.Schedule(jobHandle);

                // Compute updated values
                jobHandle = new ParallelBackpropagationJob <int, int>
                {
                    DiscountFactor = discountFactor,
                    StatesToUpdate = horizonStateList.AsDeferredJobArray(),

                    // plan graph info
                    ActionLookup              = m_PlanGraph.ActionLookup,
                    PredecessorGraph          = m_PlanGraph.PredecessorGraph,
                    ResultingStateLookup      = m_PlanGraph.ResultingStateLookup,
                    StateInfoLookup           = m_PlanGraph.StateInfoLookup,
                    ActionInfoLookup          = m_PlanGraph.ActionInfoLookup,
                    StateTransitionInfoLookup = m_PlanGraph.StateTransitionInfoLookup,

                    PredecessorStatesToUpdate = predecessorStates.AsParallelWriter(),
                }.Schedule(horizonStateList, default, jobHandle);
예제 #13
0
    public JobHandle CheckAbilityGrantedJob(EntityQuery query, JobHandle inputDeps, NativeHashMap <Entity, bool> AbilityGranted)
    {
        var job = new GenericCheckAbilityGrantedJob <T1>
        {
            AbilityGranted = AbilityGranted.AsParallelWriter()
        };

        return(job.Schedule(query, inputDeps));
    }
예제 #14
0
 public static JobHandle ToEntityComponentMap <T>(
     this EntityQuery entityQuery,
     ref NativeHashMap <Entity, T> resultEntityComponentMap,
     JobHandle inputDeps)
     where T : struct, IComponentData
 {
     inputDeps = resultEntityComponentMap.Clear(inputDeps, entityQuery.CalculateEntityCountWithoutFiltering());
     inputDeps = new GatherEntityComponentMap <T> {
         Result = resultEntityComponentMap.AsParallelWriter()
     }.Schedule(entityQuery, inputDeps);
     return(inputDeps);
 }
예제 #15
0
        public ParallelWriter AsParallelWriter()
        {
            return(new ParallelWriter
            {
                ActionLookup = ActionLookup.AsParallelWriter(),
                ResultingStateLookup = ResultingStateLookup.AsParallelWriter(),
                PredecessorGraph = PredecessorGraph.AsParallelWriter(),

                StateInfoLookup = StateInfoLookup.AsParallelWriter(),
                ActionInfoLookup = ActionInfoLookup.AsParallelWriter(),
                StateTransitionInfoLookup = StateTransitionInfoLookup.AsParallelWriter()
            });
        }
예제 #16
0
 public static JobHandle ToEntityIndexMap(
     this EntityQuery entityQuery,
     EntityManager entityManager,
     ref NativeHashMap <Entity, int> resultEntityIndexMap,
     JobHandle inputDeps)
 {
     inputDeps = resultEntityIndexMap.Clear(inputDeps, entityQuery.CalculateEntityCountWithoutFiltering());
     inputDeps = new GatherEntityIndexMap {
         EntityType     = entityManager.GetArchetypeChunkEntityType(),
         EntityIndexMap = resultEntityIndexMap.AsParallelWriter()
     }.Schedule(entityQuery, inputDeps);
     return(inputDeps);
 }
    protected override void OnUpdate()
    {
        var resethandle = new ResetAttackFlag()
        {
        }.Schedule(this);

        var cnt = query.CalculateEntityCount();

        var detectedActionEntities   = new NativeHashMap <Entity, MeleeDetectData>(cnt, Allocator.TempJob);
        var detectingActionJobHandle = new DetectAnimaionActionJobCunk()
        {
            detectedActionEntities = detectedActionEntities.AsParallelWriter(),
            actionType             = GetArchetypeChunkComponentType <ActionOnAnimationFrameComponentData>(true),
            animationType          = GetArchetypeChunkComponentType <SpriteSheetAnimationComponentData>(true),
            attackType             = GetArchetypeChunkComponentType <MeleeAttackComponentData>(),
            entityType             = GetArchetypeChunkEntityType(),
            targetType             = GetArchetypeChunkComponentType <MeleeTargetComponentData>()
        }.Schedule(query, resethandle);

        detectingActionJobHandle.Complete();
        var keys = detectedActionEntities.GetKeyArray(Allocator.TempJob);

        for (int i = 0; i < keys.Length; i++)
        {
            MeleeDetectData detect;
            if (!detectedActionEntities.TryGetValue(keys[i], out detect))
            {
                continue;
            }

            var target = detect.target.target;
            if (target == Entity.Null)
            {
                continue;
            }

            if (!EntityManager.HasComponent <HealthComponentData>(target))
            {
                continue;
            }

            var atackData = detect.attack;
            //наносим урон
            var enemyHealth = EntityManager.GetComponentData <HealthComponentData>(target);
            enemyHealth.value -= atackData.damage;
            EntityManager.SetComponentData(target, enemyHealth);
        }

        keys.Dispose();
        detectedActionEntities.Dispose();
    }
예제 #18
0
        protected override JobHandle OnUpdate(JobHandle inputDependencies)
        {
            var parentMatrices = new NativeHashMap <int, float4x4>(16, Allocator.TempJob);

            return(parentMatrices.Dispose(
                       new CollectMatricesJob
            {
                parentMatrices = parentMatrices
            }.Schedule(this,
                       new CollectParentMatricesJob
            {
                output = parentMatrices.AsParallelWriter()
            }.Schedule(this, inputDependencies))
                       ));
        }
예제 #19
0
    public void NativeHashMap_Full_HashMap_Throws()
    {
        var hashMap = new NativeHashMap <int, int>(16, Allocator.Temp);

        // Fill the hash map
        for (int i = 0; i < 16; ++i)
        {
            Assert.IsTrue(hashMap.TryAdd(i, i), "Failed to add value");
        }
        // Make sure overallocating throws and exception if using the Concurrent version - normal hash map would grow
        var cHashMap = hashMap.AsParallelWriter();

        Assert.Throws <System.InvalidOperationException>(() => { cHashMap.TryAdd(100, 100); });
        hashMap.Dispose();
    }
예제 #20
0
 public static JobHandle ToEntityComponentMap <T>(
     this EntityQuery entityQuery,
     EntityManager entityManager,
     ref NativeHashMap <Entity, T> resultEntityComponentMap,
     JobHandle inputDeps)
     where T : struct, IComponentData
 {
     inputDeps = resultEntityComponentMap.Clear(inputDeps, entityQuery.CalculateEntityCountWithoutFiltering());
     inputDeps = new GatherEntityComponentMap <T> {
         ChunkEntityType = entityManager.GetArchetypeChunkEntityType(),
         ChunkDataType   = entityManager.GetArchetypeChunkComponentType <T>(true),
         Result          = resultEntityComponentMap.AsParallelWriter()
     }.Schedule(entityQuery, inputDeps);
     return(inputDeps);
 }
예제 #21
0
    protected override unsafe void OnUpdate()
    {
        int   bucketWidth    = (int)StaticValues.BUCKET_SIZE;
        int   bucketsPerAxis = (int)(StaticValues.SIZE / StaticValues.BUCKET_SIZE);
        int   bucketCount    = (int)math.pow(bucketsPerAxis, 3);
        int   bucketsPAHalf  = bucketsPerAxis / 2;
        int   count          = obstacleQuery.CalculateEntityCount();
        float et             = Convert.ToSingle(Time.ElapsedTime);
        float dt             = Convert.ToSingle(Time.DeltaTime);

        NativeArray <float3>     translations     = new NativeArray <float3>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
        NativeArray <quaternion> rotations        = new NativeArray <quaternion>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
        NativeArray <float>      cohesionValues   = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
        NativeArray <float>      alignmentValues  = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
        NativeArray <float>      separationValues = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);

        NativeMultiHashMap <int, int>    bucketEntityMap       = new NativeMultiHashMap <int, int>(count, Allocator.TempJob);         // bucketIndex -> entityInQueryIndex
        NativeHashMap <int, float3>      bucketCenterPositions = new NativeHashMap <int, float3>(count, Allocator.TempJob);           // bucketIndex -> bucket center pos
        NativeHashMap <int, float3>      bucketAvergeHeading   = new NativeHashMap <int, float3>(count, Allocator.TempJob);           // bucketIndex -> average heading in the bucket
        NativeHashMap <int, int>         entityBucketIndexMap  = new NativeHashMap <int, int>(count, Allocator.TempJob);              // entityInQueryIndex -> bucketIndex
        NativeArray <AnimalMovementData> mvmtData = new NativeArray <AnimalMovementData>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
        NativeHashMap <int, float3>      entityToCenterTargetDirection   = new NativeHashMap <int, float3>(count, Allocator.TempJob); // entityInQueryIndex -> bucketCenterDirection
        NativeHashMap <int, float3>      entitySeparationTargetDirection = new NativeHashMap <int, float3>(count, Allocator.TempJob); // entityInQueryIndex -> separationDirection

        NativeMultiHashMap <int, int> .ParallelWriter parallelBucketMap = bucketEntityMap.AsParallelWriter();
        NativeHashMap <int, int> .ParallelWriter      parallelEntityMap = entityBucketIndexMap.AsParallelWriter();

        JobHandle fillLists = Entities
                              .WithName("fillListsJob")
                              .WithAll <ObstacleTag>()
                              .ForEach((int entityInQueryIndex, in Translation translation, in Rotation rotation, in AnimalMovementData movData, in BoidBehaviourData boidData) =>
        {
            translations[entityInQueryIndex]     = translation.Value;
            rotations[entityInQueryIndex]        = rotation.Value;
            cohesionValues[entityInQueryIndex]   = boidData.cohesion;
            alignmentValues[entityInQueryIndex]  = boidData.alignmewnt;
            separationValues[entityInQueryIndex] = boidData.separation;
            mvmtData[entityInQueryIndex]         = movData;

            int3 roundedPosition = new int3((int)translation.Value.x, (int)translation.Value.y, (int)translation.Value.z);
            int3 bucket3D        = (roundedPosition / bucketWidth);
            int3 offsetBucket3D  = bucket3D + new int3(bucketsPAHalf, bucketsPAHalf, bucketsPAHalf);
            int bucketIndex      = (offsetBucket3D.x - 1) * bucketsPerAxis + offsetBucket3D.z + (bucketsPerAxis * bucketsPerAxis * offsetBucket3D.y);

            //Debug.Log(string.Format("fill bucket: idx {0}  pos {1}   pos {2}", bucketIndex, roundedPosition, bucket3D));
            parallelBucketMap.Add(bucketIndex, entityInQueryIndex);
            bool result = parallelEntityMap.TryAdd(entityInQueryIndex, bucketIndex);
        }).ScheduleParallel(Dependency);
예제 #22
0
 public static JobHandle GetChangedComponentDataFromEntity <T>(
     this EntityQuery query,
     ComponentSystemBase system,
     ref NativeHashMap <Entity, T> resultHashMap,
     JobHandle inputDeps)
     where T : struct, IComponentData
 {
     inputDeps = resultHashMap.Clear(inputDeps, query.CalculateEntityCount());
     inputDeps = new ChangedComponentToEntity <T> {
         EntityType        = system.GetArchetypeChunkEntityType(),
         ChunkType         = system.GetArchetypeChunkComponentType <T>(true),
         ChangedComponents = resultHashMap.AsParallelWriter(),
         LastSystemVersion = system.LastSystemVersion
     }.Schedule(query, inputDeps);
     return(inputDeps);
 }
예제 #23
0
        /// <summary>
        /// Builds high level chunk changes between the given chunk sets.
        ///
        /// Created chunks are from the srcWorld while destroyed chunks point to the dstWorld.
        /// </summary>
        /// <param name="srcChunks">A set of chunks from the srcWorld to consider.</param>
        /// <param name="dstChunks">A set of chunks from the dstWorld to consider.</param>
        /// <param name="dstChunkToSrcChunkSequenceNumbers">Mapping of sequence numbers for dstChunk to srcChunk.</param>
        /// <param name="allocator">The allocator that should be used for the returned structure.</param>
        /// <returns>A set of chunks which should have been created and/or destroyed from the srcWorld.</returns>
        public static unsafe ArchetypeChunkChanges GetArchetypeChunkChanges(
            NativeArray <ArchetypeChunk> srcChunks,
            NativeArray <ArchetypeChunk> dstChunks,
            Allocator allocator)
        {
            var createdEntityCount   = 0;
            var destroyedEntityCount = 0;

            var createdChunks          = new NativeList <ArchetypeChunk>(srcChunks.Length, allocator);
            var destroyedChunks        = new NativeList <ArchetypeChunk>(dstChunks.Length, allocator);
            var createdChunkFlags      = new NativeList <ChunkChangeFlags>(srcChunks.Length, allocator);
            var destroyedChunkFlags    = new NativeList <ChunkChangeFlags>(dstChunks.Length, allocator);
            var createdChunksOffsets   = new NativeArray <int>(srcChunks.Length, allocator, NativeArrayOptions.UninitializedMemory);
            var destroyedChunksOffsets = new NativeArray <int>(dstChunks.Length, allocator, NativeArrayOptions.UninitializedMemory);

            using (var srcChunksBySequenceNumber = new NativeHashMap <ulong, ArchetypeChunk>(srcChunks.Length, Allocator.TempJob))
                using (var visitedChunks = new NativeHashMap <ulong, byte>(srcChunks.Length, Allocator.TempJob))
                {
                    var buildSrcChunkSequenceNumberMap = new BuildChunkSequenceNumberMap
                    {
                        Chunks = srcChunks,
                        ChunksBySequenceNumber = srcChunksBySequenceNumber.AsParallelWriter()
                    }.Schedule(srcChunks.Length, 64);

                    var buildArchetypeChunkChanges = new BuildArchetypeChunkChanges
                    {
                        SrcChunks = srcChunks,
                        DstChunks = dstChunks,
                        SrcChunksBySequenceNumber = srcChunksBySequenceNumber,
                        CreatedChunks             = createdChunks,
                        CreatedChunkFlags         = createdChunkFlags,
                        CreatedChunkOffsets       = createdChunksOffsets,
                        DestroyedChunks           = destroyedChunks,
                        DestroyedChunkFlags       = destroyedChunkFlags,
                        DestroyedChunkOffsets     = destroyedChunksOffsets,
                        VisitedChunks             = visitedChunks,
                        CreateEntityCount         = &createdEntityCount,
                        DestroyedEntityCount      = &destroyedEntityCount
                    }.Schedule(buildSrcChunkSequenceNumberMap);

                    buildArchetypeChunkChanges.Complete();
                }

            return(new ArchetypeChunkChanges(
                       new ArchetypeChunkCollection(createdChunks, createdChunkFlags, createdChunksOffsets, createdEntityCount),
                       new ArchetypeChunkCollection(destroyedChunks, destroyedChunkFlags, destroyedChunksOffsets, destroyedEntityCount)));
        }
예제 #24
0
    protected override void OnUpdate()
    {
        var query      = GetEntityQuery(typeof(SquadTagSharedComponentData), typeof(SquadComponentData), typeof(LerpShootTargetProvederComponentData), typeof(ArcherTargetPositionComponentData));
        var chunkCount = query.CalculateChunkCount();

        var sharedIndicesMap = new NativeHashMap <int, int>(chunkCount, Allocator.TempJob);
        var indicesJobH      = new SharedIndicesJob()
        {
            indices      = sharedIndicesMap.AsParallelWriter(),
            squadTagType = GetArchetypeChunkSharedComponentType <SquadTagSharedComponentData>()
        }.Schedule(query);

        var sharedData = new NativeHashMap <int, SquadFormationData>(chunkCount, Allocator.TempJob);

        indicesJobH.Complete();

        var indices = sharedIndicesMap.GetKeyArray(Allocator.TempJob);

        for (int i = 0; i < indices.Length; i++)
        {
            var data = EntityManager.GetSharedComponentData <SquadTagSharedComponentData>(indices[i]);
            sharedData.TryAdd(indices[i], new SquadFormationData()
            {
                yUnitCount = data.data.heightUnitsCount,
                xUnitCount = data.unitCount.value / data.data.heightUnitsCount + (data.unitCount.value % data.data.heightUnitsCount == 0 ? 0 : 1),
                bot2top    = data.data.directionBottomToTop,
                left2right = data.data.directionLeftToRight
            });
        }

        var setTargetJobH = new SetTargetJob()
        {
            sharedData         = sharedData,
            squadTagType       = GetArchetypeChunkSharedComponentType <SquadTagSharedComponentData>(),
            squadType          = GetArchetypeChunkComponentType <SquadComponentData>(true),
            targetProviderType = GetArchetypeChunkComponentType <LerpShootTargetProvederComponentData>(true),
            targetTypr         = GetArchetypeChunkComponentType <ArcherTargetPositionComponentData>(false),
            rnd = new Unity.Mathematics.Random((uint)UnityEngine.Random.Range(0, int.MaxValue))
        }.Schedule(query);

        setTargetJobH.Complete();

        indices.Dispose();
        sharedData.Dispose();
        sharedIndicesMap.Dispose();
    }
예제 #25
0
    public void NativeHashMap_Key_Collisions()
    {
        var hashMap     = new NativeHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new HashMapWriteJob()
        {
            hashMap = hashMap.AsParallelWriter(),
            status  = writeStatus,
            keyMod  = 16,
        };

        var readData = new HashMapReadParallelForJob()
        {
            hashMap = hashMap,
            values  = readValues,
            keyMod  = writeData.keyMod,
        };

        var writeJob = writeData.Schedule();
        var readJob  = readData.Schedule(hashMapSize, 1, writeJob);

        readJob.Complete();

        var missing = new HashSet <int>();

        for (int i = 0; i < hashMapSize; ++i)
        {
            if (writeStatus[i] == -1)
            {
                missing.Add(i);
                Assert.AreNotEqual(i, readValues[i], "Job read a value form hash map which should not be there");
            }
            else
            {
                Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map");
                Assert.AreEqual(i, readValues[i], "Job failed to read from hash map");
            }
        }
        Assert.AreEqual(hashMapSize - writeData.keyMod, missing.Count, "Wrong indices written to hash map");

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
예제 #26
0
        public static JobHandle GetChangedTransformFromEntity(
            this EntityQuery query,
            ComponentSystemBase system,
            ref NativeHashMap <Entity, LocalToWorld> resultHashMap,
            JobHandle inputDeps)
        {
            inputDeps = resultHashMap.Clear(inputDeps, query.CalculateEntityCount());
            var entities = query.ToEntityArrayAsync(Allocator.TempJob, out var toEntityHandle);

            inputDeps = JobHandle.CombineDependencies(inputDeps, toEntityHandle);
            inputDeps = new ChangedTransformsToEntity {
                Entities = entities,
                LocalToWorldFromEntity = system.GetComponentDataFromEntity <LocalToWorld>(true),
                ChangedComponents      = resultHashMap.AsParallelWriter()
            }.Schedule(query.GetTransformAccessArray(), inputDeps);
            return(inputDeps);
        }
예제 #27
0
        public void RunHashWriterJob()
        {
            NativeHashMap <int, int> map = new NativeHashMap <int, int>(HashWriterJob.N, Allocator.TempJob);

            HashWriterJob job = new HashWriterJob();

            job.result = map.AsParallelWriter();
            JobHandle handle = job.Schedule();

            handle.Complete();

            for (int i = 0; i < HashWriterJob.N; ++i)
            {
                Assert.AreEqual(map[i], 47);
            }
            map.Dispose();
        }
예제 #28
0
        void SelectParallel()
        {
            int maxDepth = 0;

            using (var depths = m_DepthMap.GetValueArray(Allocator.Temp))
            {
                for (int i = 0; i < depths.Length; i++)
                {
                    maxDepth = math.max(maxDepth, depths[i]);
                }
            }

            var inputStates = new NativeList <int>(1, Allocator.TempJob);

            inputStates.Add(rootState);
            var inputBudgets = new NativeList <int>(1, Allocator.TempJob);

            inputBudgets.Add(1);

            var outputStateBudgets = new NativeMultiHashMap <int, int>(1, Allocator.TempJob);
            var selectedUnexpanded = new NativeHashMap <int, byte>(1, Allocator.TempJob);

            JobHandle jobHandle = default;

            for (int iteration = 0; iteration <= maxDepth; iteration++)
            {
                // Selection job
                jobHandle = new ParallelSelectionJob <int, int>()
                {
                    StateDepthLookup          = m_DepthMap,
                    StateInfoLookup           = m_PlanGraph.StateInfoLookup,
                    ActionInfoLookup          = m_PlanGraph.ActionInfoLookup,
                    ActionLookup              = m_PlanGraph.ActionLookup,
                    ResultingStateLookup      = m_PlanGraph.ResultingStateLookup,
                    StateTransitionInfoLookup = m_PlanGraph.StateTransitionInfoLookup,

                    Horizon      = iteration,
                    InputStates  = inputStates.AsDeferredJobArray(),
                    InputBudgets = inputBudgets.AsDeferredJobArray(),

                    OutputStateBudgets       = outputStateBudgets.AsParallelWriter(),
                    SelectedStateHorizons    = m_SelectedStateHorizons.AsParallelWriter(),
                    SelectedUnexpandedStates = selectedUnexpanded.AsParallelWriter(),
                }.Schedule(inputStates, default, jobHandle);
예제 #29
0
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            if (m_KeyboardEventGroup.CalculateEntityCount() > 0 || m_PointerEventGroup.CalculateEntityCount() > 0)
            {
                m_TargetToKeyboardEvent.Clear();
                m_TargetToPointerEvent.Clear();
                var entityType = GetArchetypeChunkEntityType();
                using (new Profiling.ProfilerSample("ScheduleJobs"))
                {
                    CreateTargetToKeyboardEvent createTargetToKeyboardEvent = new CreateTargetToKeyboardEvent()
                    {
                        EntityType    = entityType,
                        KbdEventType  = GetArchetypeChunkComponentType <KeyboardEvent>(true),
                        TargetToEvent = m_TargetToKeyboardEvent.AsParallelWriter()
                    };
                    inputDeps = createTargetToKeyboardEvent.Schedule(m_KeyboardEventGroup, inputDeps);
                    CreateTargetToPointerEvent createTargetToPointerEvent = new CreateTargetToPointerEvent()
                    {
                        EntityType       = entityType,
                        PointerEventType = GetArchetypeChunkComponentType <PointerEvent>(true),
                        TargetToEvent    = m_TargetToPointerEvent.AsParallelWriter()
                    };
                    inputDeps = createTargetToPointerEvent.Schedule(m_PointerEventGroup, inputDeps);
                    EventProcessor inputEventProcessor = new EventProcessor()
                    {
                        KeyboardInputBufferFromEntity = GetBufferFromEntity <KeyboardInputBuffer>(true),
                        PointerInputBufferFromEntity  = GetBufferFromEntity <PointerInputBuffer>(true),
                        InputFieldCaretLinkFromEntity = GetComponentDataFromEntity <InputFieldCaretEntityLink>(true),
                        EntityType            = entityType,
                        CaretStateType        = GetArchetypeChunkComponentType <InputFieldCaretState>(),
                        InputFieldType        = GetArchetypeChunkComponentType <InputField>(),
                        TextDataFromEntity    = GetBufferFromEntity <TextData>(),
                        TargetToKeyboardEvent = m_TargetToKeyboardEvent,
                        TargetToPointerEvent  = m_TargetToPointerEvent,
                        CommandBuff           = m_InputSystemBarrier.CreateCommandBuffer().ToConcurrent(),
                        CaretArchetype        = m_CaretArchetype
                    };
                    inputDeps = inputEventProcessor.Schedule(m_InputFieldGroup, inputDeps);
                    m_InputSystemBarrier.AddJobHandleForProducer(inputDeps);
                }
            }

            return(inputDeps);
        }
예제 #30
0
    protected override void OnUpdate()
    {
        var query = GetEntityQuery(
            typeof(SquadTagSharedComponentData),
            typeof(SquadComponentData),
            typeof(LinearMovementComponentData),
            ComponentType.Exclude <SequenceMovementSharedComponentData>()
            );

        var sharedIndices = new NativeHashMap <int, int>(query.CalculateChunkCount(), Allocator.TempJob);

        new SharedIndicesJobChunk()
        {
            sharedIndices = sharedIndices.AsParallelWriter(),
            squadTagType  = GetArchetypeChunkSharedComponentType <SquadTagSharedComponentData>()
        }.Schedule(query).Complete();

        var indices     = sharedIndices.GetKeyArray(Allocator.TempJob);
        var sharedDatas = new NativeHashMap <int, SquadFormationData>(indices.Length, Allocator.TempJob);

        for (int i = 0; i < indices.Length; i++)
        {
            var sharedData = EntityManager.GetSharedComponentData <SquadTagSharedComponentData>(indices[i]);
            sharedDatas.TryAdd(indices[i], new SquadFormationData()
            {
                mainData = sharedData.data,
                count    = sharedData.unitCount != null ? sharedData.unitCount.value : 0
            });
        }

        new SetPositionJobChunk()
        {
            squadTagType = GetArchetypeChunkSharedComponentType <SquadTagSharedComponentData>(),
            squadTagMap  = sharedDatas,
            rnd          = new Unity.Mathematics.Random((uint)UnityEngine.Random.Range(0, int.MaxValue)),
            moveType     = GetArchetypeChunkComponentType <LinearMovementComponentData>(),
            scaleType    = GetArchetypeChunkComponentType <Scale>(true),
            squadType    = GetArchetypeChunkComponentType <SquadComponentData>()
        }.Schedule(query).Complete();

        indices.Dispose();
        sharedIndices.Dispose();
        sharedDatas.Dispose();
    }