コード例 #1
0
        protected override JobHandle CooldownJobs(JobHandle inputDeps)
        {
            NativeMultiHashMap <Entity, GameplayEffectDurationComponent> Cooldowns = new NativeMultiHashMap <Entity, GameplayEffectDurationComponent>(CooldownEffectsQuery.CalculateEntityCount() * 2 + GrantedAbilityQuery.CalculateEntityCount(), Allocator.TempJob);

            // Collect all effects which act as cooldowns for this ability
            inputDeps = new GatherCooldownGameplayEffectsJob
            {
                GameplayEffectDurations = Cooldowns.AsParallelWriter()
            }.Schedule(CooldownEffectsQuery, inputDeps);

            // Add a default value of '0' for all entities as well
            inputDeps = new CooldownAbilityIsZeroIfAbsentJob
            {
                GameplayEffectDurations = Cooldowns.AsParallelWriter()
            }.Schedule(GrantedAbilityQuery, inputDeps);

            // Get the effect with the longest cooldown remaining
            inputDeps = new GatherLongestCooldownPerEntity
            {
                GameplayEffectDurationComponent = Cooldowns
            }.Schedule(GrantedAbilityQuery, inputDeps);

            Cooldowns.Dispose(inputDeps);
            return(inputDeps);
        }
コード例 #2
0
        protected sealed override void OnUpdate()
        {
            Dependency = JobHandle.CombineDependencies(Dependency, TriggerJobHandle);

            // If the producer did not actually write anything to the stream, the native stream will not be flaged as created.
            // In that case we don't need to do anything.
            // Not doing this checks actually result in a non authrorized access to the memory and crashes Unity.
            if (!_effectStream.IsCreated)
            {
                return;
            }
            NativeStream.Reader effectReader = GetEffectReader();
            SetupEffectMap      AllocateJob  = new SetupEffectMap()
            {
                EffectReader = effectReader,
                Effects      = _effects
            };

            Dependency = AllocateJob.Schedule(Dependency);


            NativeMultiHashMap <Entity, EFFECT_CTX> .ParallelWriter effectsWriter = _effects.AsParallelWriter();
            RemapEffects RemapEffectsJob = new RemapEffects()
            {
                EffectReader  = effectReader,
                EffectsWriter = _effects.AsParallelWriter()
            };

            Dependency = RemapEffectsJob.Schedule(_forEachCount, 1, Dependency);

            // Call the effect consumption logic defined in hte derived class.
            Consume();

            Dependency = _effectStream.Dispose(Dependency);
        }
コード例 #3
0
        public void EvaluateCumulativeRewardEstimatorMultipleStates()
        {
            const int kStateCount     = 10;
            var       states          = new NativeList <int>(kStateCount, Allocator.TempJob);
            var       stateInfoLookup = new NativeHashMap <int, StateInfo>(kStateCount, Allocator.TempJob);
            var       binnedStateKeys = new NativeMultiHashMap <int, int>(kStateCount, Allocator.TempJob);

            for (int i = 0; i < kStateCount; i++)
            {
                states.Add(i);
            }

            var stateEvaluationJob = new EvaluateNewStatesJob <int, int, TestStateDataContext, StateValueAsCumulativeRewardEstimatorValue, DefaultTerminalStateEvaluator <int> >
            {
                StateDataContext = new TestStateDataContext(),
                StateInfoLookup  = stateInfoLookup.AsParallelWriter(),
                States           = states.AsDeferredJobArray(),
                BinnedStateKeys  = binnedStateKeys.AsParallelWriter(),
            };

            stateEvaluationJob.Schedule(states, default).Complete();

            for (int i = 0; i < states.Length; i++)
            {
                stateInfoLookup.TryGetValue(i, out var stateInfo);

                Assert.AreEqual(new BoundedValue(i, i, i), stateInfo.CumulativeRewardEstimate);
            }

            states.Dispose();
            stateInfoLookup.Dispose();
            binnedStateKeys.Dispose();
        }
        protected override void OnUpdate()
        {
            if (m_AttributeModifiers.CalculateEntityCount() == 0)
            {
                Dependency = new ResetAttributeModificationsForPlayer()
                {
                    AttributeModifierValuesHandle = GetComponentTypeHandle <TAttributeModifier>(false),
                    EntitiesHandle = GetEntityTypeHandle()
                }.ScheduleParallel(m_AttributesGroup, Dependency);
                return;
            }

            NativeMultiHashMap <Entity, TGameplayAttributesModifier> AttributeModifiersNMHM = new NativeMultiHashMap <Entity, TGameplayAttributesModifier>(m_AttributeModifiers.CalculateEntityCount(), Allocator.TempJob);

            Dependency = new CollectAllAttributeModifiers()
            {
                AttributeModifiersNMHMWriter    = AttributeModifiersNMHM.AsParallelWriter(),
                GameplayAttributeModifierHandle = GetComponentTypeHandle <TGameplayAttributesModifier>(true),
                GameplayEffectContextHandle     = GetComponentTypeHandle <GameplayEffectContextComponent>(true)
            }.ScheduleParallel(m_AttributeModifiers, Dependency);
            Dependency = new MapAttributeModificationsToPlayer()
            {
                AttributeModifierValuesHandle = GetComponentTypeHandle <TAttributeModifier>(false),
                AttributeModifierCollection   = AttributeModifiersNMHM,
                EntitiesHandle = GetEntityTypeHandle()
            }.ScheduleParallel(m_AttributesGroup, Dependency);
            // Now write to the attributes
            AttributeModifiersNMHM.Dispose(Dependency);
        }
コード例 #5
0
        public void Tick()
        {
            if (ActiveCamera == null || !_batcher.IsCreated)
            {
                return;
            }
            //share component id can only be visited by architypechunks,
            //so we iterate over architypechunks here
            //https://github.com/Unity-Technologies/EntityComponentSystemSamples/blob/8f94d72d1fd9b8db896646d9d533055917dc265a/Documentation/reference/chunk_iteration.md
            _batcher.Clear();
            UnityEngine.Profiling.Profiler.BeginSample("gather chunks");
            NativeArray <ArchetypeChunk> chunks = _queryGroup.CreateArchetypeChunkArray(Allocator.TempJob);

            UnityEngine.Profiling.Profiler.EndSample();
            UnityEngine.Profiling.Profiler.BeginSample("start cull");
            var cullJob = new CullJob()
            {
                EntityType       = GetArchetypeChunkEntityType(),
                Chunks           = chunks,
                RenderTypes      = GetArchetypeChunkSharedComponentType <InstanceRendererData>(),
                LocalToWorldType = GetArchetypeChunkComponentType <LocalToWorld>(true),
                Batcher          = _batcher.AsParallelWriter(),
                CamPos           = ActiveCamera.transform.position,
                CullDistance     = _cullDistance
            };
            var deps = cullJob.Schedule(chunks.Length, 1);

            deps.Complete();
            UnityEngine.Profiling.Profiler.EndSample();
            UnityEngine.Profiling.Profiler.BeginSample("start render");
            Render();
            UnityEngine.Profiling.Profiler.EndSample();
        }
コード例 #6
0
        protected override void OnUpdate()
        {
            EntityQuery eq = GetEntityQuery(typeof(Translation), typeof(CollisionComponent));

            Debug.Log("Entity count = " + eq.CalculateEntityCount());

            quadTreeMap.Clear();
            if (eq.CalculateEntityCount() > quadTreeMap.Capacity)
            {
                quadTreeMap.Capacity = eq.CalculateEntityCount();
            }

            SetQuadrantHashMapJob sqj = new SetQuadrantHashMapJob
            {
                quadTreeMap = quadTreeMap.AsParallelWriter(),
            };
            JobHandle job = JobForEachExtensions.Schedule(sqj, eq);

            job.Complete();

            //Debug.Log("Total number of entities = " + quadTreeMap.Length);

            DrawQuadrant(Camera.main.ScreenToWorldPoint(Input.mousePosition));
            //Debug.Log("Entities in Quadrant " + GetPositionHashMapKey(Camera.main.ScreenToWorldPoint(Input.mousePosition)) + " = " + GetEntityCount(quadTreeMap, GetPositionHashMapKey(Camera.main.WorldToScreenPoint(Input.mousePosition))));
        }
コード例 #7
0
    protected override JobHandle OnUpdate(JobHandle inputDependencies)
    {
        var cmndBuffer = commandBuffer.CreateCommandBuffer().ToConcurrent();

        int unitCount = damageableUnits.CalculateEntityCount();

        DamageMap.Clear();

        if (unitCount > DamageMap.Capacity)
        {
            DamageMap.Capacity = unitCount;
        }

        var job = new ZombieAttackSystemJob
        {
            CommandBuffer   = cmndBuffer,
            MinDistance     = GameGlobals.zombieAttackReach,
            DamageTakerData = GetComponentDataFromEntity <DamageTakerTag>(true),
            HealthData      = GetComponentDataFromEntity <HealthComponent>(true),
            EntityDamageMap = DamageMap.AsParallelWriter()
        }.Schedule(this, inputDependencies);

        commandBuffer.AddJobHandleForProducer(job);
        return(job);
    }
コード例 #8
0
        protected override void OnUpdate()
        {
            var cellSize = Environment.cellSize;
            var offset   = Environment.minXY;
            var numCell  = Environment.numCell;

            var entityQuery = this.GetEntityQuery(typeof(VehicleData));

            cellEntityElementHashMap.Clear();
            if (entityQuery.CalculateEntityCount() > cellEntityElementHashMap.Capacity)
            {
                cellEntityElementHashMap.Capacity = entityQuery.CalculateEntityCount();
            }
            var parallelWriter = cellEntityElementHashMap.AsParallelWriter();

            this.Entities.WithAll <VehicleData>().ForEach((Entity entity, in EntityData entityData) =>
            {
                var index = GetCellRawIndex(entityData.position, offset, numCell, cellSize);
                parallelWriter.Add(index, new CellEntityElement
                {
                    entity     = entity,
                    entityData = entityData,
                    position   = entityData.position,
                    radius     = entityData.radius
                });
            }).ScheduleParallel();
        }
コード例 #9
0
        protected override void OnUpdate()
        {
            QuadrantHashMap.Clear();

            var navFlockingSettings = navSystem.FlockingSettings;
            var entityCount         = GetEntityQuery(typeof(NavAgent)).CalculateEntityCount();

            if (entityCount > QuadrantHashMap.Capacity)
            {
                QuadrantHashMap.Capacity = entityCount;
            }

            var parallelHashMap = QuadrantHashMap.AsParallelWriter();

            Entities
            .WithAll <NavAgent>()
            .ForEach((Entity entity, in LocalToWorld localToWorld) =>
            {
                parallelHashMap.Add(
                    HashPosition(localToWorld.Position, navFlockingSettings),
                    new QuadrantData
                {
                    LocalToWorld = localToWorld
                }
                    );
            }
                     )
            .WithName("NavHashPositionJob")
            .ScheduleParallel();
        }
コード例 #10
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        int massCount   = massQuery.CalculateEntityCount();
        int springCount = springQuery.CalculateEntityCount();

        if (massCount == 0 || springCount == 0)
        {
            return(inputDeps);
        }

        NativeMultiHashMap <Entity, float3> hashMap = new NativeMultiHashMap <Entity, float3>(massCount * 4, Allocator.TempJob);

        HashSpringForceJob hashMassSpringJob = new HashSpringForceJob {
            _hashMap = hashMap.AsParallelWriter()
        };

        JobHandle hashMassSPringHandle = hashMassSpringJob.Schedule(this, inputDeps);

        MassSpringForceJob massSpringForceJob = new MassSpringForceJob {
            _massSpringHashMap = hashMap
        };

        JobHandle massSpringForceHandle = massSpringForceJob.Schedule(this, hashMassSPringHandle);

        massSpringForceHandle.Complete();
        hashMap.Dispose();
        return(massSpringForceHandle);
    }
コード例 #11
0
    protected override void OnUpdate()
    {
        EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(QuadrantEntity));

        quadrantMap.Clear();
        if (entityQuery.CalculateEntityCount() > quadrantMap.Capacity)
        {
            quadrantMap.Capacity = entityQuery.CalculateEntityCount();
        }

        // Create Job
        SetQuadrantDataHashMapJob job = new SetQuadrantDataHashMapJob
        {
            hashMap = quadrantMap.AsParallelWriter()
        };

        // Schedule and complete Job
        JobHandle jobHandle = JobForEachExtensions.Schedule(job, entityQuery);

        jobHandle.Complete();

        DebugDrawQuadrant(UtilsClass.GetMouseWorldPosition());

        int quadrantKey = GetKeyFromPosition(UtilsClass.GetMouseWorldPosition());
        int entityCount = GetEntityCountInQuadrant(quadrantKey);

        Debug.Log("There are " + entityCount + " in Quadrant " + quadrantKey);
    }
コード例 #12
0
    protected override void OnUpdate()
    {
        var unitCount = query.CalculateEntityCount();
        var nextGridPositionHashMap = new NativeMultiHashMap <int, int>(unitCount, Allocator.TempJob);
        var parallelWriter          = nextGridPositionHashMap.AsParallelWriter();

        Entities
        .WithName("HashNextGridPositions")
        .WithStoreEntityQueryInField(ref query)
        .WithBurst()
        .ForEach((int entityInQueryIndex, in NextGridPosition nextGridPosition) =>
        {
            var hash = (int)math.hash(nextGridPosition.Value);
            parallelWriter.Add(hash, entityInQueryIndex);
        })
        .ScheduleParallel();

        Entities
        .WithName("FinalizeMovement")
        .WithReadOnly(nextGridPositionHashMap)
        .WithDisposeOnCompletion(nextGridPositionHashMap)
        .WithBurst()
        .ForEach((ref NextGridPosition nextGridPosition, in GridPosition gridPosition) =>
        {
            int hash = (int)math.hash(nextGridPosition.Value);
            if (nextGridPositionHashMap.TryGetFirstValue(hash, out _, out var iter))
            {
                if (nextGridPositionHashMap.TryGetNextValue(out _, ref iter))
                {
                    nextGridPosition.Value = gridPosition.Value;
                }
            }
        })
        .ScheduleParallel();
    }
コード例 #13
0
    public void NativeMultiHashMap_ForEach_Throws_When_Modified_From_Job()
    {
        using (var container = new NativeMultiHashMap <int, int>(32, Allocator.TempJob))
        {
            var iter = container.GetEnumerator();

            var jobHandle = new ParallelWriteToMultiHashMapJob
            {
                Writer = container.AsParallelWriter()
            }.Schedule(1, 2);

#if UNITY_2020_2_OR_NEWER
            Assert.Throws <ObjectDisposedException>(() =>
#else
            Assert.Throws <InvalidOperationException>(() =>
#endif
            {
                while (iter.MoveNext())
                {
                }
            });

            jobHandle.Complete();
        }
    }
コード例 #14
0
        protected override void OnUpdate()
        {
            int targetNumber    = TargetQuery.CalculateEntityCount();
            var targetMap       = new NativeMultiHashMap <int, Target>(targetNumber, Allocator.TempJob);
            var targetMapWriter = targetMap.AsParallelWriter();

            // Store target information in hash map.
            Dependency = Entities
                         .WithAll <Targetable>()
                         .ForEach(
                (
                    Entity targetEntity,
                    in LocalToWorld localToWorld,
                    in Team team,
                    in AgentCategory category,
                    in Health health,
                    in MaxHealth maxHealth
                ) =>
            {
                var healthFraction = health.Value / maxHealth.Value;
                var pos            = localToWorld.Position;
                targetMapWriter.Add(
                    HashPosition(pos), new Target
                {
                    Entity        = targetEntity,
                    Category      = category,
                    HealthPercent = healthFraction,
                    Position      = pos,
                    Team          = team.ID
                });
            }
コード例 #15
0
ファイル: BoidHashMap.cs プロジェクト: Amix2/Fight-The-Flock
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            BoidHashMap.cellSize = Settings.Instance.Boid.surroundingsViewRange;
            //EntityArchetype archetype = EntityManager.CreateArchetype(typeof(BoidComponent), typeof(Translation), typeof(PhysicsVelocity));
            EntityQuery query = EntityManager.CreateEntityQuery(typeof(BoidComponent), typeof(Translation), typeof(PhysicsVelocity));

            BoidMap.Clear();
            int queryCount = query.CalculateEntityCount();

            if (queryCount > BoidMap.Capacity)
            {
                BoidMap.Capacity = queryCount;
            }

            NativeMultiHashMap <int, BoidData> .ParallelWriter parallelWriter = BoidMap.AsParallelWriter();
            float cellSize = BoidHashMap.cellSize;

            inputDeps = Entities.WithAny <BoidComponent>().ForEach((Entity entity, ref Translation translation, ref PhysicsVelocity velocity) =>
            {
                parallelWriter.Add(Utils.GetHash(translation.Value, cellSize), new BoidData {
                    entity = entity, position = translation.Value, velocity = velocity.Linear
                });
            }).Schedule(inputDeps);


            return(inputDeps);
        }
    public void Read_And_Write_Without_Fences()
    {
        var hashMap     = new NativeMultiHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new MultiHashMapWriteParallelForJob();

        writeData.hashMap = hashMap.AsParallelWriter();
        writeData.status  = writeStatus;
        writeData.keyMod  = hashMapSize;
        var readData = new MultiHashMapReadParallelForJob();

        readData.hashMap = hashMap;
        readData.values  = readValues;
        readData.keyMod  = writeData.keyMod;
        var writeJob = writeData.Schedule(hashMapSize, 1);

        Assert.Throws <InvalidOperationException> (() => { readData.Schedule(hashMapSize, 1); });
        writeJob.Complete();

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
コード例 #17
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        var query = GetEntityQuery(typeof(ShiftCastShadowsZoneComponentTagData));
        var shiftedEntitiesQuery = GetEntityQuery(typeof(ShiftCastShadowsTagComponentData));

        zones.Dispose();
        JobHandle toArrayHandle;

        zones = query.ToComponentDataArray <ShiftCastShadowsZoneComponentTagData>(Allocator.TempJob, out toArrayHandle);

        additionalOffsets.Dispose();
        //размер придется по максимуму задавать...
        additionalOffsets = new NativeMultiHashMap <Entity, float3>(
            shiftedEntitiesQuery.CalculateEntityCount() * query.CalculateEntityCount(),
            Allocator.TempJob
            );

        var resetHandler = new ResetOffsetJob().Schedule(this, JobHandle.CombineDependencies(toArrayHandle, inputDeps));

        var triggerUpdaterHandler = new ShiftTriggerZoneUpdateJob()
        {
            triggerZones      = zones,
            additionalOffsets = additionalOffsets.AsParallelWriter()
        }.Schedule(this, resetHandler);

        var offsetJob = new DoOffsetJob()
        {
            additionalOffsets = additionalOffsets
        }.Schedule(this, triggerUpdaterHandler);

        return(offsetJob);
    }
コード例 #18
0
        protected override void OnUpdate()
        {
            // 计算每个对象的hash,然后放到对应的map中
            EntityQuery query = GetEntityQuery(typeof(Translation), typeof(QuardrantType));

            /* 放到job中执行
             * Entities.ForEach((Entity entity, ref Translation translation) =>
             * {
             *  int hasMapKey = GetPositionHasMapKey(translation.Value);
             *  quardrantMultiHasMap.Add(hasMapKey, entity);
             * });
             */

            quardrantMultiHasMap.Clear();
            if (quardrantMultiHasMap.Length < query.CalculateEntityCount())
            {
                // quardrantMultiHasMap.Capacity = query.CalculateEntityCount();
            }
            //quardrantMultiHasMap = new NativeMultiHashMap<int, QuardrantData>(query.CalculateEntityCount(), Allocator.TempJob);

            SetQuadrantDataHasMapJob job = new SetQuadrantDataHasMapJob()
            {
                map = quardrantMultiHasMap.AsParallelWriter(),
            };
            var handle = JobForEachExtensions.Schedule(job, query);

            handle.Complete();

            // DebugDrawQuadrant(UtilClass.GetMouseWorldPosition(Camera.main));
            // 光标所在象限entity的个数
            // Debug.Log(GetEntityCountInHasMap(quardrantMultiHasMap, GetPositionHasMapKey(UtilClass.GetMouseWorldPosition())));
        }
コード例 #19
0
    public void NativeMultiHashMap_Key_Collisions()
    {
        var hashMap     = new NativeMultiHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new MultiHashMapWriteParallelForJob()
        {
            hashMap = hashMap.AsParallelWriter(),
            status  = writeStatus,
            keyMod  = 16,
        };

        var readData = new MultiHashMapReadParallelForJob()
        {
            hashMap = hashMap,
            values  = readValues,
            keyMod  = writeData.keyMod,
        };

        var writeJob = writeData.Schedule(hashMapSize, 1);
        var readJob  = readData.Schedule(hashMapSize, 1, writeJob);

        readJob.Complete();

        for (int i = 0; i < hashMapSize; ++i)
        {
            Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map");
            Assert.AreEqual(hashMapSize / readData.keyMod, readValues[i], "Job failed to read from hash map");
        }

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
コード例 #20
0
        protected override void OnUpdate()
        {
            var config = Config.Get <DebugConfig>();

            _renderData.Clear();
            var writer = _renderData.AsParallelWriter();

            DebugConfig.HitboxColor colors = config.HitboxColors;

            var hitboxJob = Entities
                            .WithBurst(FloatMode.Fast)
                            .ForEach((in Hitbox hitbox, in HitboxState state, in LocalToWorld transform) => {
                if (!state.Enabled)
                {
                    return;
                }
                float3 currentPosition = math.transform(transform.Value, float3.zero);
                float3 prevPosition    = state.PreviousPosition ?? currentPosition;
                float3 center          = (currentPosition + prevPosition) / 2;
                float height           = math.length(prevPosition - currentPosition) / hitbox.Radius;
                var scale = new float3(hitbox.Radius);

                var trs = (Matrix4x4)float4x4.TRS(center, quaternion.identity, scale);
                writer.Add(height, new HitboxRenderData {
                    Color = colors.GetHitboxColor(), Transform = trs
                });
            }).ScheduleParallel(Dependency);
コード例 #21
0
    protected override JobHandle OnUpdate(JobHandle inputDependencies)
    {
        quandrantMultiHashMap.Clear();
        EntityQuery query = GetEntityQuery(typeof(QuadrantEntity));
        int         quandrantEntityLength = query.CalculateEntityCount();

        if (quandrantEntityLength > quandrantMultiHashMap.Capacity)
        {
            Debug.Log("Setting Capacity " + quandrantEntityLength);
            quandrantMultiHashMap.Capacity = quandrantEntityLength;
        }
        var job = new QuadrantSystemJob()
        {
            entityCommandBuffer   = endSimulationEntityCommandBuffer.CreateCommandBuffer().ToConcurrent(),
            quandrantMultiHashMap = quandrantMultiHashMap.AsParallelWriter(),
        };
        var schedule = job.Schedule(this, inputDependencies);

        endSimulationEntityCommandBuffer.AddJobHandleForProducer(schedule);
        schedule.Complete();
        var currentMousePosition = Camera.main.ScreenToWorldPoint(
            new Vector3(Input.mousePosition.x,
                        Input.mousePosition.y,
                        Camera.main.nearClipPlane));

        DebugDrawQuadrant(currentMousePosition);
        //Debug.Log(quandrantMultiHashMap.CountValuesForKey(GetPositionHasMapKey(currentMousePosition)));
        /*   quandrantMultiHashMap.Dispose(); */
        return(schedule);
    }
コード例 #22
0
 public void Update()
 {
     if (this.m_listIndex > 0)
     {
         //如果是完全并行运算
         if (this.FullParallel)
         {
             var paths       = new NativeArray <Path2D>(this.m_listIndex, Allocator.TempJob);
             var targetPaths = new NativeMultiHashMap <int, float2>(this.m_listIndex * this.MaxFindPathPointCount, Allocator.TempJob);
             for (int i = 0; i < this.m_listIndex; i++)
             {
                 var path = this.m_list2dPaths[i];
                 paths[i] = path;
             }
             var job = new Job2DPathParallelProcess();
             job.ABPath = paths;
             job.Paths  = targetPaths.AsParallelWriter();
             var handle = job.Schedule(this.m_listIndex, 32);
             handle.Complete();
             //然后传递数据给主线程
             for (int i = 0; i < this.m_listIndex; i++)
             {
                 var callback = this.m_callbacks[i];
                 var array    = new NativeList <float2>(Allocator.Persistent);
                 foreach (var temp in targetPaths.GetValuesForKey(i))
                 {
                     array.Add(temp);
                 }
                 callback?.Invoke(array);
             }
             this.m_listIndex = 0;
             this.m_list2dPaths.Clear();
             paths.Dispose();
             targetPaths.Dispose();
         }
         else
         {
             var allJobs = new NativeArray <JobHandle>(this.m_listIndex, Allocator.Temp);
             var data    = new NativeList <float2> [this.m_listIndex];
             for (int i = 0; i < this.m_listIndex; i++)
             {
                 var path = this.m_list2dPaths[i];
                 var job  = new Job2DPathIJobProcess();
                 job.ABPath = path;
                 job.Paths  = new NativeList <float2>(5, Allocator.TempJob);
                 data[i]    = job.Paths;
                 allJobs[i] = job.Schedule();
             }
             JobHandle.CompleteAll(allJobs);
             for (int i = 0; i < this.m_listIndex; i++)
             {
                 var d        = data[i];
                 var callback = this.m_callbacks[i];
                 callback?.Invoke(d);
             }
             allJobs.Dispose();
         }
     }
 }
コード例 #23
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        var pheromones = GetSingleton <PheromoneGrid>();
        var settings   = GetSingleton <AntManagerSettings>();

        int   mapSize       = settings.MapSize;
        float antSpeed      = settings.AntSpeed;
        float trailAddSpeed = settings.TrailAddSpeed;
        float deltaTime     = Time.DeltaTime;

        var gridUpdates = new NativeMultiHashMap <int, float>(mapSize * mapSize, Allocator.TempJob);

        var jobDropLow = new PheromoneDropJob
        {
            GridUpdates   = gridUpdates.AsParallelWriter(),
            MapSize       = mapSize,
            AntSpeed      = antSpeed,
            TrailAddSpeed = trailAddSpeed,
            Excitement    = .3f,
            DeltaTime     = deltaTime,
        };

        var jobDropHigh = new PheromoneDropJob
        {
            GridUpdates   = gridUpdates.AsParallelWriter(),
            MapSize       = mapSize,
            AntSpeed      = antSpeed,
            TrailAddSpeed = trailAddSpeed,
            Excitement    = 1f,
            DeltaTime     = deltaTime,
        };

        var jobGather = new PheromoneGatherUpdatesJob
        {
            Grid        = pheromones.Values,
            GridUpdates = gridUpdates,
        };

        var h1 = jobDropLow.Schedule(m_FoodSeekerQuery, inputDeps);
        var h2 = jobDropHigh.Schedule(m_FoodHolderQuery, h1);

        var h3 = jobGather.Schedule(mapSize * mapSize, mapSize * mapSize / 8, h2);

        PheromoneUpdateDep = h3;
        return(gridUpdates.Dispose(h3));
    }
コード例 #24
0
 /// <summary>
 /// Schedules an attribute job
 /// </summary>
 /// <param name="inputDependencies">JobHandle</param>
 /// <param name="query">The EntityQuery used for filtering group</param>
 /// <param name="AttributeHash">Attribute MultiHashMap mapping entity to attribute value</param>
 /// <param name="job">Returned job handle</param>
 /// <typeparam name="TOper">The type of operator for this attribute job</typeparam>
 private void ScheduleAttributeJob <TOper>(JobHandle inputDependencies, EntityQuery query, out NativeMultiHashMap <Entity, float> AttributeHash, out JobHandle job)
     where TOper : struct, IAttributeOperator, IComponentData
 {
     AttributeHash = new NativeMultiHashMap <Entity, float>(query.CalculateEntityCount(), Allocator.TempJob);
     job           = new GetAttributeValuesJob_Sum <TOper, TAttributeTag>
     {
         AttributeModifierValues = AttributeHash.AsParallelWriter()
     }.Schedule(query, inputDependencies);
 }
コード例 #25
0
        protected override void OnUpdate()
        {
            if (_expandingTilesQuery.IsEmpty)
            {
                return;
            }

            NativeArray <Entity> tilesArray = _expandingTilesQuery.ToEntityArray(Allocator.Temp);
            EntityCommandBuffer  ecb        = ecbSystem.CreateCommandBuffer();

            foreach (Entity tile in tilesArray)
            {
                ecb.AddSharedComponent(
                    tile,
                    _ite < 4
                            ? GridGenerationComponent.InnerNodeLinkingPhase
                            : GridGenerationComponent.ReadyPhase
                    );
            }

            tilesArray.Dispose();

            var maxTileLinksCount = _expandingTilesQuery.CalculateEntityCount() * 6;
            var adjTileLinksMap   = new NativeMultiHashMap <GridPosition, TileLink>(
                maxTileLinksCount,
                Allocator.TempJob
                );

            Dependency =
                new ComputeAdjacentTilesJob
            {
                entityTypeHandle        = GetEntityTypeHandle(),
                tileComponentTypeHandle = GetComponentTypeHandle <TileComponent>(true),
                hexTileOffsets          = _hexTileOffsets,
                mapWriter = adjTileLinksMap.AsParallelWriter()
            }.Schedule(_expandingTilesQuery, Dependency);

            var uniqueKeys = new NativeList <GridPosition>(maxTileLinksCount, Allocator.TempJob);

            Dependency = new GetUniqueMultHMapKeysJob <GridPosition, TileLink>
            {
                multiHashMap = adjTileLinksMap, keys = uniqueKeys
            }.Schedule(Dependency);

            Dependency = new InstantiateAdjacentTilesJob
            {
                adjTileLinksKeys = uniqueKeys,
                adjTileLinksMap  = adjTileLinksMap,
                ecbWriter        = ecbSystem.CreateCommandBuffer()
            }.Schedule(Dependency);

            adjTileLinksMap.Dispose(Dependency);
            uniqueKeys.Dispose(Dependency);

            ecbSystem.AddJobHandleForProducer(Dependency);
        }
コード例 #26
0
        private void CpClearingHouse()
        {
            // For all entities with inventory, process C and P in inventory then generates offers/bids for supply/demand.
            // Factories and province RGOs first.
            var bidCapacity = EntityManager.CreateEntityQuery(typeof(Factory)).CalculateEntityCount()
                              * GoodsCount;

            var factoryBids = new NativeMultiHashMap <BidKey, BidOffers>(bidCapacity, Allocator.TempJob);
            var fbCon       = factoryBids.AsParallelWriter();

            Entities
            .WithName("Factory_CP_OB")
            .ForEach((Entity facEntity, ref DynamicBuffer <Inventory> inventory, in Identity identity,
                      in Factory factory, in Location location) =>
            {
                ref var deltas = ref identity.MarketIdentity.Value.Deltas;

                // Calculate maximum production capacity in terms of workers depending on current inventory.
                var maximumPossibleManPower = float.PositiveInfinity;
                for (var goodIndex = 0; goodIndex < inventory.Length; goodIndex++)
                {
                    if (deltas[goodIndex] >= 0)
                    {
                        continue;
                    }

                    // Consumption is indicated by negative delta value.
                    maximumPossibleManPower = math.min(maximumPossibleManPower,
                                                       inventory[goodIndex].Value / -deltas[goodIndex]);
                }

                // Determine if there is enough workers to work maximum or if there is too much.
                var goodsMultiplier = math.min(factory.TotalEmployed, maximumPossibleManPower);

                var directInventory = inventory.Reinterpret <float>();

                for (var goodIndex = 0; goodIndex < inventory.Length; goodIndex++)
                {
                    // Apply consumption production pattern.
                    directInventory[goodIndex] += goodsMultiplier * deltas[goodIndex];

                    if (math.abs(inventory[goodIndex].Value - factory.TotalEmployed * deltas[goodIndex]) < 1)
                    {
                        continue;
                    }

                    // Add bids to collector categorized by region and goods for region first exchange.
                    var quantity = math.min(factory.TotalEmployed * deltas[goodIndex], 0) + directInventory[goodIndex];
                    fbCon.Add(new BidKey(location.State, goodIndex, quantity), new BidOffers
                    {
                        Source   = facEntity,
                        Quantity = math.abs(quantity)
                    });
                }
            }).ScheduleParallel();
コード例 #27
0
        public JobHandle Schedule(NativeArray <ArchetypeChunk> chunks)
        {
            var handle = new BuildComponentDataToEntityLookup
            {
                Chunks = chunks,
                ComponentDataToEntity = m_ComponentDataToEntity.AsParallelWriter(),
                ComponentTypeIndex    = TypeManager.GetTypeIndex <TComponentData>()
            }.Schedule(chunks.Length, 64);

            return(handle);
        }
コード例 #28
0
    protected override void OnUpdate()
    {
        // Alterantives between clearing the two buckets in a seperate thread

        /*
         * NativeMultiHashMap<uint, Entity>.ParallelWriter buckets;
         * JobHandle clearBucketJob;
         * if (useFirstBuffer) {
         *  useFirstBuffer = false;
         *  var bufferBucket = _buckets2;
         *  clearBucketJob = Job.WithCode(() => {
         *      bufferBucket.Clear();
         *  })
         *      .WithName("ClearBucketsOneJob")
         *      .Schedule(Dependency);
         *  buckets = _buckets1.AsParallelWriter();
         *  World.GetOrCreateSystem<UnitMovementSystem>().SetBucketsBuffer(_buckets1);
         * } else {
         *  useFirstBuffer = true; // Use the 1st buffer for the next OnUpdate()
         *  var bufferBucket = _buckets1;
         *  clearBucketJob = Job.WithCode(() => {
         *      bufferBucket.Clear();
         *  })
         *      .WithName("ClearBucketsTwoJob")
         *      .Schedule(Dependency);
         *  buckets = _buckets2.AsParallelWriter();
         *  World.GetOrCreateSystem<UnitMovementSystem>().SetBucketsBuffer(_buckets2);
         * }
         */
        Profiler.BeginSample("Clear buckets");
        _buckets.Clear();
        Profiler.EndSample();
        var buckets = _buckets.AsParallelWriter();

        // Resize if needed
        _buckets.Capacity = math.max(_buckets.Capacity, _unitQuery.CalculateEntityCount());

        var conversionFactor = _conversionFactor;
        var cellWidth        = _cellWidth;

        Entities
        .WithName("HashUnitsJob")
        .WithStoreEntityQueryInField(ref _unitQuery)
        .WithAll <UnitTag>()
        .WithChangeFilter <LocalToWorld>()
        .WithNativeDisableParallelForRestriction(buckets)
        .ForEach((Entity unitEntity, ref AABBPositionHash hash, in UnitDataBlobReference unitAABB, in LocalToWorld ltw) => {
            var aabb        = unitAABB.Value;
            hash.MinPosHash = SpatialHashHelper.Hash(ltw.Position - aabb.Extents, conversionFactor, cellWidth);
            hash.MaxPosHash = SpatialHashHelper.Hash(ltw.Position + aabb.Extents, conversionFactor, cellWidth);
            var posHash     = SpatialHashHelper.Hash(ltw.Position, conversionFactor, cellWidth);
            buckets.Add(posHash, unitEntity);
        }).ScheduleParallel();
コード例 #29
0
    protected override void OnUpdate()
    {
        //calculate the number of entities we have to store (entities with translation component and QuadrantEntity component)
        EntityQuery                        entityQuery        = GetEntityQuery(quadrantQueryDesc);
        NativeArray <Entity>               entityArray        = entityQuery.ToEntityArray(Allocator.TempJob);                               // create the entity array
        NativeArray <Translation>          transArray         = entityQuery.ToComponentDataArray <Translation>(Allocator.TempJob);
        NativeArray <MovingQuadrantEntity> movingQuadEntArray = entityQuery.ToComponentDataArray <MovingQuadrantEntity>(Allocator.TempJob); // create the stationary quadrant entities array
        NativeArray <PreviousMovement>     prevMoveArray      = entityQuery.ToComponentDataArray <PreviousMovement>(Allocator.TempJob);

        //the length is calculated from above
        //NativeMultiHashMap<int, QuadrantData> quadrantMultiHashMap = new NativeMultiHashMap<int, QuadrantData>(entityQuery.CalculateLength(),Allocator.TempJob);

        quadrantMultiHashMap.Clear(); // clear the hashmap

        // if the amount of stuff to add to the hashmap is larger than the capacity of the hashmap
        if (entityQuery.CalculateEntityCount() > quadrantMultiHashMap.Capacity)
        {
            quadrantMultiHashMap.Capacity = entityQuery.CalculateEntityCount(); //Increase the hashmap to hold everything
        }

        //using jobs
        //Cycle through all entities and get their positions
        //selects all entities with a translation component and adds them to the hashmap
        SetQuadrantDataHashMapJob setQuadrantDataHashMapJob = new SetQuadrantDataHashMapJob {
            quadrantMultiHashMap = quadrantMultiHashMap.AsParallelWriter(), //ToConcurrent used to allow for concurrent writing
            entities             = entityArray,
            translations         = transArray,
            quadEntTypes         = movingQuadEntArray,
            prevMovements        = prevMoveArray
        };
        JobHandle jobHandle = IJobParallelForExtensions.Schedule(setQuadrantDataHashMapJob, entityArray.Length, 32, this.Dependency);

        //JobForEachExtensions.Schedule(setQuadrantDataHashMapJob, entityQuery);

        jobHandle.Complete();



        //Cycle through all entities and get their positions
        //selects all entities with a translation component
        //without jobs

        /*Entities.ForEach((Entity entity, ref Translation translation) =>{
         *  int hashMapKey = GetPositionHashMapKey(translation.Value);
         *  quadrantMultiHashMap.Add(hashMapKey, entity);
         * });*/

        //Debug.Log(GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50)) + " Mouse position: " + MousePosition.GetMouseWorldPositionOnPlane(50));
        //DebugDrawQuadrant(MousePosition.GetMouseWorldPositionOnPlane(50));
        //Debug.Log(GetEntityCountInHashMap(quadrantMultiHashMap,GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50))));

        //quadrantMultiHashMap.Dispose();
    }
コード例 #30
0
        protected override void OnUpdate()
        {
            var inputDeps = JobHandle.CombineDependencies(Dependency, JobHandle, CrossFrameJobHandle);

            if (CommandsMap.IsCreated)
            {
                JobHandle = CommandsMap.Dispose(inputDeps);
            }
            CommandsMap = new NativeMultiHashMap <MapKey, COMMAND>(0, Allocator.TempJob);

            // Schedule in sequence the realocation of the necessary memory to handle each commands based on the queues sizes.
            // Not done in parallel as the resize consist of an new allocation and a copy.
            // Doing it in parallel would result in branching allocations.
            NativeArray <int> counter = new NativeArray <int>(1, Allocator.TempJob);

            for (int i = 0; i < CommandsQueues.Count; i++)
            {
                JobHandle = new CountCommands()
                {
                    CommandsQueue     = CommandsQueues[i],
                    TotalCommandCount = counter
                }.Schedule(JobHandle);
            }

            JobHandle AllocationJH = new AllocateCommandsMap()
            {
                TotalCommandCount = counter,
                CommandsMap       = CommandsMap
            }.Schedule(JobHandle);

            JobHandle CounterDisposedJH = counter.Dispose(AllocationJH);

            NativeArray <JobHandle> MapperJobHanldes = new NativeArray <JobHandle>(CommandsQueues.Count, Allocator.TempJob);
            var CommandsMapParallelWriter            = CommandsMap.AsParallelWriter();

            for (int i = 0; i < CommandsQueues.Count; i++)
            {
                var jh = new MapCommands()
                {
                    CommandsMap   = CommandsMapParallelWriter,
                    CommandsQueue = CommandsQueues[i]
                }.Schedule(AllocationJH);

                MapperJobHanldes[i] = CommandsQueues[i].Dispose(jh);
            }

            CommandsQueues.Clear();

            Dependency     = JobHandle.CombineDependencies(AllocationJH, JobHandle.CombineDependencies(MapperJobHanldes));
            finalJobHandle = Dependency;
            MapperJobHanldes.Dispose();
        }