protected override JobHandle OnUpdate(JobHandle inputDependencies)
    {
        var cmndBuffer = commandBuffer.CreateCommandBuffer().ToConcurrent();

        int unitCount = damageableUnits.CalculateEntityCount();

        DamageMap.Clear();

        if (unitCount > DamageMap.Capacity)
        {
            DamageMap.Capacity = unitCount;
        }

        var job = new ZombieAttackSystemJob
        {
            CommandBuffer   = cmndBuffer,
            MinDistance     = GameGlobals.zombieAttackReach,
            DamageTakerData = GetComponentDataFromEntity <DamageTakerTag>(true),
            HealthData      = GetComponentDataFromEntity <HealthComponent>(true),
            EntityDamageMap = DamageMap.AsParallelWriter()
        }.Schedule(this, inputDependencies);

        commandBuffer.AddJobHandleForProducer(job);
        return(job);
    }
Exemple #2
0
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            BoidHashMap.cellSize = Settings.Instance.Boid.surroundingsViewRange;
            //EntityArchetype archetype = EntityManager.CreateArchetype(typeof(BoidComponent), typeof(Translation), typeof(PhysicsVelocity));
            EntityQuery query = EntityManager.CreateEntityQuery(typeof(BoidComponent), typeof(Translation), typeof(PhysicsVelocity));

            BoidMap.Clear();
            int queryCount = query.CalculateEntityCount();

            if (queryCount > BoidMap.Capacity)
            {
                BoidMap.Capacity = queryCount;
            }

            NativeMultiHashMap <int, BoidData> .ParallelWriter parallelWriter = BoidMap.AsParallelWriter();
            float cellSize = BoidHashMap.cellSize;

            inputDeps = Entities.WithAny <BoidComponent>().ForEach((Entity entity, ref Translation translation, ref PhysicsVelocity velocity) =>
            {
                parallelWriter.Add(Utils.GetHash(translation.Value, cellSize), new BoidData {
                    entity = entity, position = translation.Value, velocity = velocity.Linear
                });
            }).Schedule(inputDeps);


            return(inputDeps);
        }
Exemple #3
0
        protected override void OnUpdate()
        {
            EntityQuery eq = GetEntityQuery(typeof(Translation), typeof(CollisionComponent));

            Debug.Log("Entity count = " + eq.CalculateEntityCount());

            quadTreeMap.Clear();
            if (eq.CalculateEntityCount() > quadTreeMap.Capacity)
            {
                quadTreeMap.Capacity = eq.CalculateEntityCount();
            }

            SetQuadrantHashMapJob sqj = new SetQuadrantHashMapJob
            {
                quadTreeMap = quadTreeMap.AsParallelWriter(),
            };
            JobHandle job = JobForEachExtensions.Schedule(sqj, eq);

            job.Complete();

            //Debug.Log("Total number of entities = " + quadTreeMap.Length);

            DrawQuadrant(Camera.main.ScreenToWorldPoint(Input.mousePosition));
            //Debug.Log("Entities in Quadrant " + GetPositionHashMapKey(Camera.main.ScreenToWorldPoint(Input.mousePosition)) + " = " + GetEntityCount(quadTreeMap, GetPositionHashMapKey(Camera.main.WorldToScreenPoint(Input.mousePosition))));
        }
        protected override void Prepare(ref VoronoiJob job, float delta)
        {
            if (!TryGetFirstInCompound(out m_verticesProvider) ||
                !TryGetFirstInCompound(out m_triadProvider))
            {
                throw new System.Exception("Missing providers");
            }

            //Clear previously built triangles
            m_outputVertices.Clear();
            m_outputSites.Clear();
            m_outputEdges.Clear();

            job.centroidWeight = m_centroidWeight;

            job.inputVertices = m_verticesProvider.outputVertices;

            job.inputTriangles     = m_triadProvider.outputTriangles;
            job.inputHullVertices  = m_triadProvider.outputHullVertices;
            job.inputUnorderedHull = m_triadProvider.outputUnorderedHull;

            job.outputVertices = m_outputVertices;
            job.outputSites    = m_outputSites;
            job.outputEdges    = m_outputEdges;
        }
Exemple #5
0
        protected override void OnUpdate()
        {
            QuadrantHashMap.Clear();

            var navFlockingSettings = navSystem.FlockingSettings;
            var entityCount         = GetEntityQuery(typeof(NavAgent)).CalculateEntityCount();

            if (entityCount > QuadrantHashMap.Capacity)
            {
                QuadrantHashMap.Capacity = entityCount;
            }

            var parallelHashMap = QuadrantHashMap.AsParallelWriter();

            Entities
            .WithAll <NavAgent>()
            .ForEach((Entity entity, in LocalToWorld localToWorld) =>
            {
                parallelHashMap.Add(
                    HashPosition(localToWorld.Position, navFlockingSettings),
                    new QuadrantData
                {
                    LocalToWorld = localToWorld
                }
                    );
            }
                     )
            .WithName("NavHashPositionJob")
            .ScheduleParallel();
        }
Exemple #6
0
        protected override void OnUpdate()
        {
            EnemyHashCodes.Clear();
            PlayerBulletCodes.Clear();
            SnowBulletCodes.Clear();
            PlayerBulletPositionHashCodeSet.Clear();
            SnowBulletPositionHashCodeSet.Clear();
            var manager = EntityManager;

            manager.AddMatchingArchetypes(qEnemy, fEnemy);
            manager.AddMatchingArchetypes(qPlayerBullet, fPlayerBullet);
            manager.AddMatchingArchetypes(qSnowBullet, fSnowBullet);
            var PositionTypeRO = manager.GetArchetypeChunkComponentType <Position>(true);
            var EntityType     = manager.GetArchetypeChunkEntityType();

            using (var enemyChunks = manager.CreateArchetypeChunkArray(fEnemy, Allocator.Temp))
            {
                AddHashCode(PositionTypeRO, EntityType, enemyChunks, EnemyHashCodes);
            }
            using (var playerBulletChunks = manager.CreateArchetypeChunkArray(fPlayerBullet, Allocator.Temp))
            {
                AddHashCode(PositionTypeRO, EntityType, playerBulletChunks, PlayerBulletCodes, PlayerBulletPositionHashCodeSet);
            }
            using (var snowBulletChunks = manager.CreateArchetypeChunkArray(fSnowBullet, Allocator.Temp))
            {
                AddHashCode(PositionTypeRO, EntityType, snowBulletChunks, SnowBulletCodes, SnowBulletPositionHashCodeSet);
            }
        }
    protected override void OnUpdate()
    {
        EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(QuadrantEntity));

        quadrantMap.Clear();
        if (entityQuery.CalculateEntityCount() > quadrantMap.Capacity)
        {
            quadrantMap.Capacity = entityQuery.CalculateEntityCount();
        }

        // Create Job
        SetQuadrantDataHashMapJob job = new SetQuadrantDataHashMapJob
        {
            hashMap = quadrantMap.AsParallelWriter()
        };

        // Schedule and complete Job
        JobHandle jobHandle = JobForEachExtensions.Schedule(job, entityQuery);

        jobHandle.Complete();

        DebugDrawQuadrant(UtilsClass.GetMouseWorldPosition());

        int quadrantKey = GetKeyFromPosition(UtilsClass.GetMouseWorldPosition());
        int entityCount = GetEntityCountInQuadrant(quadrantKey);

        Debug.Log("There are " + entityCount + " in Quadrant " + quadrantKey);
    }
Exemple #8
0
        /// <summary>
        /// Schedules background jobs to move all agents using the given delta time
        /// </summary>
        public JobHandle MoveAgents(float dt, JobHandle dependsOn = default)
        {
            m_spatialMap.Clear();

            AgentWorld agentWorld = new AgentWorld();

            agentWorld.offset   = world.transform.position;
            agentWorld.rotation = world.transform.rotation;
            agentWorld.center   = world.data.center;
            agentWorld.scale    = world.scale;
            agentWorld.size     = new int3(
                world.size.x,
                world.size.y,
                world.size.z
                );

            if (!movingAllAgents.IsCompleted)
            {
                movingAllAgents.Complete();
            }

            // update the spatial map with all agent positions
            JobHandle spatialMaps = UpdateSpatialMap(agentWorld, dependsOn);

            // update each agents position by archetype
            for (int i = 0; i < archetypes.Count; i++)
            {
                m_movingByArchetype[i] = MoveByArchetype(i, agentWorld, dt, spatialMaps);
            }

            movingAllAgents = JobHandle.CombineDependencies(m_movingByArchetype);
            return(movingAllAgents);
        }
        public void Tick()
        {
            if (ActiveCamera == null || !_batcher.IsCreated)
            {
                return;
            }
            //share component id can only be visited by architypechunks,
            //so we iterate over architypechunks here
            //https://github.com/Unity-Technologies/EntityComponentSystemSamples/blob/8f94d72d1fd9b8db896646d9d533055917dc265a/Documentation/reference/chunk_iteration.md
            _batcher.Clear();
            UnityEngine.Profiling.Profiler.BeginSample("gather chunks");
            NativeArray <ArchetypeChunk> chunks = EntityManager.CreateArchetypeChunkArray(_query, Allocator.TempJob);

            UnityEngine.Profiling.Profiler.EndSample();
            UnityEngine.Profiling.Profiler.BeginSample("start cull");
            var cullJob = new CullJob()
            {
                EntityType       = GetArchetypeChunkEntityType(),
                Chunks           = chunks,
                RenderTypes      = GetArchetypeChunkSharedComponentType <InstanceRendererData>(),
                LocalToWorldType = GetArchetypeChunkComponentType <LocalToWorld>(true),
                Batcher          = _batcher.ToConcurrent(),
                CamPos           = ActiveCamera.transform.position,
                CullDistance     = _cullDistance
            };
            var deps = cullJob.Schedule(chunks.Length, 1);

            deps.Complete();
            UnityEngine.Profiling.Profiler.EndSample();
            UnityEngine.Profiling.Profiler.BeginSample("start render");
            Render();
            UnityEngine.Profiling.Profiler.EndSample();
        }
Exemple #10
0
        protected override void OnUpdate()
        {
            // 计算每个对象的hash,然后放到对应的map中
            EntityQuery query = GetEntityQuery(typeof(Translation), typeof(QuardrantType));

            /* 放到job中执行
             * Entities.ForEach((Entity entity, ref Translation translation) =>
             * {
             *  int hasMapKey = GetPositionHasMapKey(translation.Value);
             *  quardrantMultiHasMap.Add(hasMapKey, entity);
             * });
             */

            quardrantMultiHasMap.Clear();
            if (quardrantMultiHasMap.Length < query.CalculateEntityCount())
            {
                // quardrantMultiHasMap.Capacity = query.CalculateEntityCount();
            }
            //quardrantMultiHasMap = new NativeMultiHashMap<int, QuardrantData>(query.CalculateEntityCount(), Allocator.TempJob);

            SetQuadrantDataHasMapJob job = new SetQuadrantDataHasMapJob()
            {
                map = quardrantMultiHasMap.AsParallelWriter(),
            };
            var handle = JobForEachExtensions.Schedule(job, query);

            handle.Complete();

            // DebugDrawQuadrant(UtilClass.GetMouseWorldPosition(Camera.main));
            // 光标所在象限entity的个数
            // Debug.Log(GetEntityCountInHasMap(quardrantMultiHasMap, GetPositionHasMapKey(UtilClass.GetMouseWorldPosition())));
        }
        protected override void OnUpdate()
        {
            if (ActiveCamera == null)
            {
                return;
            }
            _batcher.Clear();
            NativeArray <ArchetypeChunk> chunks = EntityManager.CreateArchetypeChunkArray(_query, Allocator.TempJob);
            var cullJob = new CullJob()
            {
                EntityType       = GetArchetypeChunkEntityType(),
                Chunks           = chunks,
                RenderTypes      = GetArchetypeChunkSharedComponentType <InstanceRendererData>(),
                LocalToWorldType = GetArchetypeChunkComponentType <LocalToWorld>(true),
                Batcher          = _batcher.ToConcurrent(),
                CamPos           = ActiveCamera.transform.position,
                CullDistance     = _cullDistance
            };

            var deps = cullJob.Schedule(chunks.Length, 32);

            deps.Complete();

            Render();
        }
Exemple #12
0
        protected override void OnUpdate()
        {
            var config = Config.Get <DebugConfig>();

            _renderData.Clear();
            var writer = _renderData.AsParallelWriter();

            DebugConfig.HitboxColor colors = config.HitboxColors;

            var hitboxJob = Entities
                            .WithBurst(FloatMode.Fast)
                            .ForEach((in Hitbox hitbox, in HitboxState state, in LocalToWorld transform) => {
                if (!state.Enabled)
                {
                    return;
                }
                float3 currentPosition = math.transform(transform.Value, float3.zero);
                float3 prevPosition    = state.PreviousPosition ?? currentPosition;
                float3 center          = (currentPosition + prevPosition) / 2;
                float height           = math.length(prevPosition - currentPosition) / hitbox.Radius;
                var scale = new float3(hitbox.Radius);

                var trs = (Matrix4x4)float4x4.TRS(center, quaternion.identity, scale);
                writer.Add(height, new HitboxRenderData {
                    Color = colors.GetHitboxColor(), Transform = trs
                });
            }).ScheduleParallel(Dependency);
Exemple #13
0
    protected override JobHandle OnUpdate(JobHandle inputDependencies)
    {
        var job = inputDependencies;

        spatialMap_.Clear();
        keysList_.Clear();

        // Build our spatial map
        job = new BuildSpatialMap
        {
            spatialMap = spatialMap_.ToConcurrent(),
        }.Schedule(this, job);

        // Initialize the size of our keys list. We can't know the size of our list
        // during schedule time so we need to use "DeferredJobArray"
        // Example in Packages/Jobs/Unity.Jobs.Test/NativeListDeferredArrayTests
        job = new InitializeKeysList
        {
            outKeys    = keysList_,
            spatialMap = spatialMap_,
        }.Schedule(job);

        // Check for collisions and tag entities
        job = new GenerateCollisionData
        {
            spatialMap         = spatialMap_,
            keys               = keysList_.AsDeferredJobArray(),
            colliderFromEntity = GetComponentDataFromEntity <ECSCollider>(true),
            posFromEntity      = GetComponentDataFromEntity <Translation>(true),
            commandBuffer      = initBufferSystem_.CreateCommandBuffer().ToConcurrent(),
        }.Schedule(keysList_, 5, job);

        return(job);
    }
        /// <summary>
        /// Update the store with the recorded BlobAsset/UnityObject associations.
        /// </summary>
        /// <remarks>
        /// User don't have to call this method because <see cref="Dispose"/> will do it.
        /// This method can be called multiple times, on the first one will matter.
        /// </remarks>
        public void UpdateBlobStore()
        {
            var keys = m_BlobPerUnityObject.GetUniqueKeyArray(Allocator.Temp);

            using (keys.Item1)
            {
                for (var k = 0; k < keys.Item2; ++k)
                {
                    var key        = keys.Item1[k];
                    var valueCount = m_BlobPerUnityObject.CountValuesForKey(key);
                    var valueArray = new NativeArray <Hash128>(valueCount, Allocator.Temp);
                    var i          = 0;
                    if (m_BlobPerUnityObject.TryGetFirstValue(key, out var value, out var iterator))
                    {
                        do
                        {
                            valueArray[i++] = value;
                        }while (m_BlobPerUnityObject.TryGetNextValue(out value, ref iterator));

                        valueArray.Sort();
                    }

                    m_BlobAssetStore.UpdateBlobAssetForUnityObject <TB>(key, valueArray);
                    valueArray.Dispose();
                }
            }

            m_BlobPerUnityObject.Clear();
        }
Exemple #15
0
        /// <summary>
        /// Schedules an attribute job
        /// </summary>
        /// <param name="inputDependencies">JobHandle</param>
        /// <param name="query">The EntityQuery used for filtering group</param>
        /// <param name="AttributeHash">Attribute MultiHashMap mapping entity to attribute value</param>
        /// <param name="job">Returned job handle</param>
        /// <typeparam name="TOper">The type of operator for this attribute job</typeparam>
        private void ScheduleAttributeJob <TOper>(ref JobHandle inputDependencies, ref EntityQuery query, ref NativeMultiHashMap <Entity, float> AttributeHash, out JobHandle job)
            where TOper : struct, IAttributeOperator, IComponentData
        {
            var nEntities    = query.CalculateEntityCount();
            var hashCapacity = AttributeHash.Capacity;

            AttributeHash.Clear();
            if (nEntities == 0)
            {
                job = inputDependencies;
                return;
            }
            ;
            if (hashCapacity < nEntities)   // We need to increase hash capacity
            {
                AttributeHash.Capacity = (int)(nEntities * 1.1);
            }
            else if (hashCapacity > nEntities * 4)     // We need to reduce hash capacity
            {
                AttributeHash.Dispose();
                AttributeHash = new NativeMultiHashMap <Entity, float>(nEntities, Allocator.Persistent);
            }
            // // AttributeHash = new NativeMultiHashMap<Entity, float>(query.CalculateEntityCount(), Allocator.TempJob);
            inputDependencies = new GetAttributeValuesJob_Sum <TOper, TAttributeTag>
            {
                owners                  = GetArchetypeChunkComponentType <AttributesOwnerComponent>(false),
                attributeModifiers      = GetArchetypeChunkComponentType <AttributeModifier <TOper, TAttributeTag> >(false),
                AttributeModifierValues = AttributeHash.AsParallelWriter()
            }.Schedule(query, inputDependencies);
            job = inputDependencies;
        }
Exemple #16
0
    protected override void OnUpdate()
    {
        EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(QuadrantEntity));

        quadrantMultiHashMap.Clear();
        if (entityQuery.CalculateEntityCount() > quadrantMultiHashMap.Capacity)
        {
            quadrantMultiHashMap.Capacity = entityQuery.CalculateEntityCount();
        }

        NativeMultiHashMap <int, QuadrantData> .ParallelWriter quadrantMultiHashMap2 = quadrantMultiHashMap.AsParallelWriter(); //TODO smells

        Entities.ForEach((Entity entity, Translation t, ref QuadrantEntity qe, in InfectionComponent ic) => {
            if (ic.infected)
            {
                int hashMapKey = GetPositionHashMapKey(t.Value);
                //Debug.Log(hashMapKey);
                quadrantMultiHashMap2.Add(hashMapKey, new QuadrantData {
                    entity         = entity,
                    position       = t.Value,
                    quadrantEntity = qe
                });
            }
        }).ScheduleParallel();
    }
Exemple #17
0
        public void Clear()
        {
            if (ActionLookup.IsCreated)
            {
                ActionLookup.Clear();
            }
            if (ResultingStateLookup.IsCreated)
            {
                ResultingStateLookup.Clear();
            }
            if (PredecessorGraph.IsCreated)
            {
                PredecessorGraph.Clear();
            }

            if (StateInfoLookup.IsCreated)
            {
                StateInfoLookup.Clear();
            }
            if (ActionInfoLookup.IsCreated)
            {
                ActionInfoLookup.Clear();
            }
            if (StateTransitionInfoLookup.IsCreated)
            {
                StateTransitionInfoLookup.Clear();
            }
        }
        protected override void OnUpdate()
        {
            VehiclesSegmentsHashMap.Clear();
            EntityQuery entityQuery = GetEntityQuery(typeof(VehiclePositionComponent));

            if (entityQuery.CalculateEntityCount() > VehiclesSegmentsHashMap.Capacity)
            {
                VehiclesSegmentsHashMap.Capacity = entityQuery.CalculateEntityCount();
            }

            NativeMultiHashMap <Entity, VehicleSegmentData> .ParallelWriter multiHashMap = VehiclesSegmentsHashMap.AsParallelWriter();
            Dependency = Entities.ForEach((Entity entity, int entityInQueryIndex,
                                           in VehicleSegmentInfoComponent vehicleSegmentInfoComponent,
                                           in VehiclePositionComponent vehiclePositionComponent,
                                           in VehicleConfigComponent vehicleConfigComponent) =>
            {
                Entity segmentEntity = vehicleSegmentInfoComponent.IsBackInPreviousSegment
                    ? vehicleSegmentInfoComponent.PreviousSegment
                    : vehicleSegmentInfoComponent.HeadSegment;
                multiHashMap.Add(segmentEntity, new VehicleSegmentData
                {
                    Entity          = entity,
                    BackSegPosition = vehiclePositionComponent.BackSegPos,
                    VehicleSize     = vehicleConfigComponent.Length
                });
            }).ScheduleParallel(Dependency);
 public void Execute()
 {
     TradeAsks.Clear();
     TradeBids.Clear();
     DeltaMoney.Clear();
     ProfitsByLogic.Clear();
 }
        protected override void OnUpdate()
        {
            var cellSize = Environment.cellSize;
            var offset   = Environment.minXY;
            var numCell  = Environment.numCell;

            var entityQuery = this.GetEntityQuery(typeof(VehicleData));

            cellEntityElementHashMap.Clear();
            if (entityQuery.CalculateEntityCount() > cellEntityElementHashMap.Capacity)
            {
                cellEntityElementHashMap.Capacity = entityQuery.CalculateEntityCount();
            }
            var parallelWriter = cellEntityElementHashMap.AsParallelWriter();

            this.Entities.WithAll <VehicleData>().ForEach((Entity entity, in EntityData entityData) =>
            {
                var index = GetCellRawIndex(entityData.position, offset, numCell, cellSize);
                parallelWriter.Add(index, new CellEntityElement
                {
                    entity     = entity,
                    entityData = entityData,
                    position   = entityData.position,
                    radius     = entityData.radius
                });
            }).ScheduleParallel();
        }
 protected override JobHandle OnUpdate(JobHandle inputDeps)
 {
     if (agent.Length > 0)
     {
         indexMap.Clear();
         nextPositionMap.Clear();
         var hashPositionsJob = new HashPositionsJob
         {
             mapSize         = querySystem.MaxMapWidth,
             agents          = agent.Agents,
             avoidances      = agent.Avoidances,
             indexMap        = indexMap,
             nextPositionMap = nextPositionMap
         };
         var dt = Time.deltaTime;
         var hashPositionsJobHandle = hashPositionsJob.Schedule(agent.Length, 64, inputDeps);
         var avoidanceJob           = new NavAgentAvoidanceJob
         {
             dt              = dt,
             indexMap        = indexMap,
             nextPositionMap = nextPositionMap,
             agents          = agent.Agents,
             avoidances      = agent.Avoidances,
             entities        = agent.Entities,
             navMeshQuery    = navMeshQuery
         };
         var avoidanceJobHandle = avoidanceJob.Schedule(indexMap, 64, hashPositionsJobHandle);
         return(avoidanceJobHandle);
     }
     return(inputDeps);
 }
Exemple #22
0
    protected override JobHandle OnUpdate(JobHandle inputDependencies)
    {
        quandrantMultiHashMap.Clear();
        EntityQuery query = GetEntityQuery(typeof(QuadrantEntity));
        int         quandrantEntityLength = query.CalculateEntityCount();

        if (quandrantEntityLength > quandrantMultiHashMap.Capacity)
        {
            Debug.Log("Setting Capacity " + quandrantEntityLength);
            quandrantMultiHashMap.Capacity = quandrantEntityLength;
        }
        var job = new QuadrantSystemJob()
        {
            entityCommandBuffer   = endSimulationEntityCommandBuffer.CreateCommandBuffer().ToConcurrent(),
            quandrantMultiHashMap = quandrantMultiHashMap.AsParallelWriter(),
        };
        var schedule = job.Schedule(this, inputDependencies);

        endSimulationEntityCommandBuffer.AddJobHandleForProducer(schedule);
        schedule.Complete();
        var currentMousePosition = Camera.main.ScreenToWorldPoint(
            new Vector3(Input.mousePosition.x,
                        Input.mousePosition.y,
                        Camera.main.nearClipPlane));

        DebugDrawQuadrant(currentMousePosition);
        //Debug.Log(quandrantMultiHashMap.CountValuesForKey(GetPositionHasMapKey(currentMousePosition)));
        /*   quandrantMultiHashMap.Dispose(); */
        return(schedule);
    }
Exemple #23
0
 public void Execute()
 {
     if (Capacity != default && Capacity > Source.Capacity)
     {
         Source.Capacity = Capacity;
     }
     Source.Clear();
 }
 public void Execute()
 {
     Effects.Clear();
     if (Effects.Capacity < EffectReader.ComputeItemCount())
     {
         Effects.Capacity = EffectReader.ComputeItemCount();
     }
 }
Exemple #25
0
 /// <inheritdoc />
 public void Execute()
 {
     TopologicalListFrontToBack.Clear();
     IsometricDataPresentOnCameraView.Clear();
     IsometricDepthAssigned.Clear();
     IsometricElementFromTopologicalList.Clear();
     HelpTopologicalList.Clear();
 }
Exemple #26
0
 public void EndSim()
 {
     foreach (var line in Lines)
     {
         Destroy(line.gameObject);
     }
     Lines.Clear();
     hashmap.Clear();
 }
Exemple #27
0
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            int cellTagMap_size = (int)m_cloudTagDesiredQuantitySystem.TotalTags * 2;

            if (lastsize_cellTagMap != cellTagMap_size)
            {
                cellTagMap.Dispose();
                cellTagMap = new NativeMultiHashMap <int, int>(cellTagMap_size, Allocator.Persistent);
            }
            else
            {
                cellTagMap.Clear();
            }
            lastsize_cellTagMap = cellTagMap_size;


            if (lastsize_tagQuantityByCloud != m_tagCloudGroup.Length)
            {
                tagQuantityByCloud.Dispose();

                tagQuantityByCloud = new NativeArray <int>(m_tagCloudGroup.Length, Allocator.Persistent);
            }
            lastsize_tagQuantityByCloud = m_tagCloudGroup.Length;

            if (lastsize_cloudIDPositions != m_tagCloudGroup.Length)
            {
                cloudIDPositions.Dispose();

                cloudIDPositions = new NativeHashMap <int, CloudIDPosRadius>(m_tagCloudGroup.Length, Allocator.Persistent);
            }
            else
            {
                cloudIDPositions.Clear();
            }
            lastsize_cloudIDPositions = m_tagCloudGroup.Length;


            FillMapLists fillMapListsJob = new FillMapLists
            {
                cellTagMap  = cellTagMap.ToConcurrent(),
                tagQuantity = tagQuantityByCloud,
                CloudData   = m_tagCloudGroup.CloudData,
                Position    = m_tagCloudGroup.Position,
                cloudPos    = cloudIDPositions.ToConcurrent(),
                cellIDmap   = m_cellIdMapSystem.cellId2Cellfloat3
            };
            var fillMapDep = fillMapListsJob.Schedule(m_tagCloudGroup.Length, 64, inputDeps);

            fillMapDep.Complete();


            return(fillMapDep);
        }
Exemple #28
0
    protected override void OnUpdate()
    {
        //calculate the number of entities we have to store (entities with translation component and QuadrantEntity component)
        EntityQuery                        entityQuery        = GetEntityQuery(quadrantQueryDesc);
        NativeArray <Entity>               entityArray        = entityQuery.ToEntityArray(Allocator.TempJob);                               // create the entity array
        NativeArray <Translation>          transArray         = entityQuery.ToComponentDataArray <Translation>(Allocator.TempJob);
        NativeArray <MovingQuadrantEntity> movingQuadEntArray = entityQuery.ToComponentDataArray <MovingQuadrantEntity>(Allocator.TempJob); // create the stationary quadrant entities array
        NativeArray <PreviousMovement>     prevMoveArray      = entityQuery.ToComponentDataArray <PreviousMovement>(Allocator.TempJob);

        //the length is calculated from above
        //NativeMultiHashMap<int, QuadrantData> quadrantMultiHashMap = new NativeMultiHashMap<int, QuadrantData>(entityQuery.CalculateLength(),Allocator.TempJob);

        quadrantMultiHashMap.Clear(); // clear the hashmap

        // if the amount of stuff to add to the hashmap is larger than the capacity of the hashmap
        if (entityQuery.CalculateEntityCount() > quadrantMultiHashMap.Capacity)
        {
            quadrantMultiHashMap.Capacity = entityQuery.CalculateEntityCount(); //Increase the hashmap to hold everything
        }

        //using jobs
        //Cycle through all entities and get their positions
        //selects all entities with a translation component and adds them to the hashmap
        SetQuadrantDataHashMapJob setQuadrantDataHashMapJob = new SetQuadrantDataHashMapJob {
            quadrantMultiHashMap = quadrantMultiHashMap.AsParallelWriter(), //ToConcurrent used to allow for concurrent writing
            entities             = entityArray,
            translations         = transArray,
            quadEntTypes         = movingQuadEntArray,
            prevMovements        = prevMoveArray
        };
        JobHandle jobHandle = IJobParallelForExtensions.Schedule(setQuadrantDataHashMapJob, entityArray.Length, 32, this.Dependency);

        //JobForEachExtensions.Schedule(setQuadrantDataHashMapJob, entityQuery);

        jobHandle.Complete();



        //Cycle through all entities and get their positions
        //selects all entities with a translation component
        //without jobs

        /*Entities.ForEach((Entity entity, ref Translation translation) =>{
         *  int hashMapKey = GetPositionHashMapKey(translation.Value);
         *  quadrantMultiHashMap.Add(hashMapKey, entity);
         * });*/

        //Debug.Log(GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50)) + " Mouse position: " + MousePosition.GetMouseWorldPositionOnPlane(50));
        //DebugDrawQuadrant(MousePosition.GetMouseWorldPositionOnPlane(50));
        //Debug.Log(GetEntityCountInHashMap(quadrantMultiHashMap,GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50))));

        //quadrantMultiHashMap.Dispose();
    }
    protected override void OnUpdate()
    {
        // Alterantives between clearing the two buckets in a seperate thread

        /*
         * NativeMultiHashMap<uint, Entity>.ParallelWriter buckets;
         * JobHandle clearBucketJob;
         * if (useFirstBuffer) {
         *  useFirstBuffer = false;
         *  var bufferBucket = _buckets2;
         *  clearBucketJob = Job.WithCode(() => {
         *      bufferBucket.Clear();
         *  })
         *      .WithName("ClearBucketsOneJob")
         *      .Schedule(Dependency);
         *  buckets = _buckets1.AsParallelWriter();
         *  World.GetOrCreateSystem<UnitMovementSystem>().SetBucketsBuffer(_buckets1);
         * } else {
         *  useFirstBuffer = true; // Use the 1st buffer for the next OnUpdate()
         *  var bufferBucket = _buckets1;
         *  clearBucketJob = Job.WithCode(() => {
         *      bufferBucket.Clear();
         *  })
         *      .WithName("ClearBucketsTwoJob")
         *      .Schedule(Dependency);
         *  buckets = _buckets2.AsParallelWriter();
         *  World.GetOrCreateSystem<UnitMovementSystem>().SetBucketsBuffer(_buckets2);
         * }
         */
        Profiler.BeginSample("Clear buckets");
        _buckets.Clear();
        Profiler.EndSample();
        var buckets = _buckets.AsParallelWriter();

        // Resize if needed
        _buckets.Capacity = math.max(_buckets.Capacity, _unitQuery.CalculateEntityCount());

        var conversionFactor = _conversionFactor;
        var cellWidth        = _cellWidth;

        Entities
        .WithName("HashUnitsJob")
        .WithStoreEntityQueryInField(ref _unitQuery)
        .WithAll <UnitTag>()
        .WithChangeFilter <LocalToWorld>()
        .WithNativeDisableParallelForRestriction(buckets)
        .ForEach((Entity unitEntity, ref AABBPositionHash hash, in UnitDataBlobReference unitAABB, in LocalToWorld ltw) => {
            var aabb        = unitAABB.Value;
            hash.MinPosHash = SpatialHashHelper.Hash(ltw.Position - aabb.Extents, conversionFactor, cellWidth);
            hash.MaxPosHash = SpatialHashHelper.Hash(ltw.Position + aabb.Extents, conversionFactor, cellWidth);
            var posHash     = SpatialHashHelper.Hash(ltw.Position, conversionFactor, cellWidth);
            buckets.Add(posHash, unitEntity);
        }).ScheduleParallel();
Exemple #30
0
        public IEnumerator TestPerformanceOnLargeGraphBudget10()
        {
            var planGraph = PlanGraphUtility.BuildLattice(midLatticeDepth: 10);

            var nodeCount = planGraph.Size;
            var depthMap  = new NativeHashMap <int, int>(nodeCount, Allocator.TempJob);
            var queue     = new NativeQueue <StateHorizonPair <int> >(Allocator.TempJob);

            planGraph.GetExpandedDepthMap(0, depthMap, queue);

            var selectedUnexpandedStates = new NativeList <int>(1, Allocator.Persistent);
            var allExpandedStates        = new NativeMultiHashMap <int, int>(1, Allocator.Persistent);

            yield return(null);

            // Set up performance test
            Measure.Method(() =>
            {
                var selectJob = new SelectionJob <int, int>()
                {
                    StateExpansionBudget      = 10,
                    RootStateKey              = 0,
                    StateDepthLookup          = depthMap,
                    StateInfoLookup           = planGraph.StateInfoLookup,
                    ActionLookup              = planGraph.ActionLookup,
                    ActionInfoLookup          = planGraph.ActionInfoLookup,
                    ResultingStateLookup      = planGraph.ResultingStateLookup,
                    StateTransitionInfoLookup = planGraph.StateTransitionInfoLookup,

                    SelectedUnexpandedStates = selectedUnexpandedStates,
                    AllSelectedStates        = allExpandedStates
                };
                selectJob.Schedule().Complete();
            }).WarmupCount(1).MeasurementCount(1).IterationsPerMeasurement(1).CleanUp(() =>
            {
                depthMap.Clear();
                queue.Clear();
                planGraph.GetExpandedDepthMap(0, depthMap, queue);

                selectedUnexpandedStates.Clear();
                allExpandedStates.Clear();
            }).Run();

            queue.Dispose();
            depthMap.Dispose();
            planGraph.Dispose();
            selectedUnexpandedStates.Dispose();
            allExpandedStates.Dispose();

            // Check performance times
            PerformanceUtility.AssertRange(0.00, 5);
        }