Пример #1
0
    public void NativeMultiHashMap_Double_Deallocate_Throws()
    {
        var hashMap = new NativeMultiHashMap <int, int>(16, Allocator.TempJob);

        hashMap.Dispose();
        Assert.Throws <InvalidOperationException>(() => { hashMap.Dispose(); });
    }
Пример #2
0
 protected override void OnDestroy()
 {
     foreach (NativeQueue <COMMAND> CommandsQueue in CommandsQueues)
     {
         CommandsQueue.Dispose();
     }
     CommandsMap.Dispose();
 }
Пример #3
0
    protected override void OnDestroy()
    {
        base.OnDestroy();

        sharedIndices.Dispose();
        entityCountBySharedIndex.Dispose();
        chunkCountDataBySharedIndex.Dispose();
        indicesInSquadBySharedIndices.Dispose();
    }
 public NativeMultiHashMap <int, int> CreateHashMap()
 {
     if (_HasInitMap)
     {
         _HashMap.Dispose();
     }
     _HasInitMap = true;
     _HashMap    = new NativeMultiHashMap <int, int>(_context.CurBoidCount,
                                                     Unity.Collections.Allocator.Persistent);
     return(_HashMap);
 }
Пример #5
0
        protected override void OnDestroyManager()
        {
            if (cloudID2MarkedCellsMap.IsCreated)
            {
                cloudID2MarkedCellsMap.Dispose();
            }

            if (Cell2OwningCloud.IsCreated)
            {
                Cell2OwningCloud.Dispose();
            }
        }
Пример #6
0
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            int cellTagMap_size = (int)m_cloudTagDesiredQuantitySystem.TotalTags * 2;

            if (lastsize_cellTagMap != cellTagMap_size)
            {
                cellTagMap.Dispose();
                cellTagMap = new NativeMultiHashMap <int, int>(cellTagMap_size, Allocator.Persistent);
            }
            else
            {
                cellTagMap.Clear();
            }
            lastsize_cellTagMap = cellTagMap_size;


            if (lastsize_tagQuantityByCloud != m_tagCloudGroup.Length)
            {
                tagQuantityByCloud.Dispose();

                tagQuantityByCloud = new NativeArray <int>(m_tagCloudGroup.Length, Allocator.Persistent);
            }
            lastsize_tagQuantityByCloud = m_tagCloudGroup.Length;

            if (lastsize_cloudIDPositions != m_tagCloudGroup.Length)
            {
                cloudIDPositions.Dispose();

                cloudIDPositions = new NativeHashMap <int, CloudIDPosRadius>(m_tagCloudGroup.Length, Allocator.Persistent);
            }
            else
            {
                cloudIDPositions.Clear();
            }
            lastsize_cloudIDPositions = m_tagCloudGroup.Length;


            FillMapLists fillMapListsJob = new FillMapLists
            {
                cellTagMap  = cellTagMap.ToConcurrent(),
                tagQuantity = tagQuantityByCloud,
                CloudData   = m_tagCloudGroup.CloudData,
                Position    = m_tagCloudGroup.Position,
                cloudPos    = cloudIDPositions.ToConcurrent(),
                cellIDmap   = m_cellIdMapSystem.cellId2Cellfloat3
            };
            var fillMapDep = fillMapListsJob.Schedule(m_tagCloudGroup.Length, 64, inputDeps);

            fillMapDep.Complete();


            return(fillMapDep);
        }
    public void NativeMultiHashMap_Double_Deallocate_Throws()
    {
        var hashMap = new NativeMultiHashMap <int, int>(16, Allocator.TempJob);

        hashMap.Dispose();
#if UNITY_2020_2_OR_NEWER
        Assert.Throws <ObjectDisposedException>(
#else
        Assert.Throws <InvalidOperationException>(
#endif
            () => { hashMap.Dispose(); });
    }
Пример #8
0
    void Update()
    {
        if (Input.GetKeyDown(KeyCode.F5))
        {
            Setup();
        }

        if (Input.GetKeyDown(KeyCode.F6))
        {
            ResetPositionsOnly();
        }

        if (hashMap.IsCreated)
        {
            hashMap.Dispose();
        }
        hashMap = new NativeMultiHashMap <int, int>(NumCells, Allocator.TempJob);

        _jobHashMap = new HashCellsJob()
        {
            hashMap     = hashMap.ToConcurrent(),
            position    = _cellPositions,
            envSettings = envSettings
        };

        _jobCellularForce = new CellularForceJob()
        {
            numCells    = NumCells,
            numGroups   = NumGroups,
            position    = _cellPositions,
            velocity    = _cellVelocities,
            groupIndex  = _cellGroupIndex,
            forceMatrix = _cellGroupsForceMatrix,

            hashMap     = hashMap,
            envSettings = envSettings,
            deltaTime   = Time.deltaTime,
        };

        _jobPos = new PositionUpdateJob()
        {
            position  = _cellPositions,
            velocity  = _cellVelocities,
            deltaTime = Time.deltaTime,
        };

        _jobsHandleHashMap       = _jobHashMap.Schedule(NumCells, 128);
        _jobsHandleCellularForce = _jobCellularForce.Schedule(NumCells, 128, _jobsHandleHashMap);
        _jobHandlePosition       = _jobPos.Schedule(_cellTfmAccessArray, _jobsHandleCellularForce);
    }
Пример #9
0
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            if (AgentMarkers.Capacity < agentGroup.Agents.Length * qtdMarkers * 4)
            {
                AgentMarkers.Dispose();
                AgentMarkers = new NativeMultiHashMap <int, float3>(agentGroup.Agents.Length * qtdMarkers * 4, Allocator.Persistent);
            }
            else
            {
                AgentMarkers.Clear();
            }


            if (!CellTagSystem.AgentIDToPos.IsCreated)
            {
                return(inputDeps);
            }



            if (createDict)
            {
                createDict = false;
                CreateCells();
            }

            //Get QuadTree quadrants that need to be schedueled
            var schedule = new NativeArray <int3>(qt.GetScheduled(), Allocator.Temp);
            //Debug.Log(schedule.Length);
            //list<quadrants> --> [[cell1, cell2, ..., celln], [celln+1, ...], ...]
            //[cell1, cell2, ..., celln] --> ComponentDataArray<Position>
            //Job <-- Position, MarkedCells { markerCell --> checkAgents }

            var job = new TakeMarkers
            {
                AgentMarkers = AgentMarkers.ToConcurrent(),
                Densities    = LocalDensities.ToConcurrent(),
                cellToAgent  = CellTagSystem.CellToMarkedAgents,
                cells        = schedule,
                AgentIDToPos = CellTagSystem.AgentIDToPos
            };

            var sq = job.Schedule(schedule.Length, Settings.BatchSize, inputDeps);

            sq.Complete();

            schedule.Dispose();
            frame++;
            return(sq);
        }
 protected override void OnDestroy()
 {
     if (translations.IsCreated)
     {
         translations.Dispose();
     }
     if (velocities.IsCreated)
     {
         velocities.Dispose();
     }
     if (CollisionBuckets.IsCreated)
     {
         CollisionBuckets.Dispose();
     }
 }
Пример #11
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        if (_massData.Length == 0 || _springData.Length == 0)
        {
            return(inputDeps);
        }

        NativeMultiHashMap <Entity, float3> hashMap = new NativeMultiHashMap <Entity, float3>(_massData.Length * 4, Allocator.Temp);

        HashSpringForceJob hashMassSpringJob = new HashSpringForceJob {
            _hashMap            = hashMap.ToConcurrent(),
            _springEntityPairs  = _springData.EntityPairs,
            _springElasticities = _springData.Elasticities,
            _springLines        = _springData.Lines
        };

        JobHandle hashMassSPringHandle = hashMassSpringJob.Schedule(_springData.Length, 64, inputDeps);

        MassSpringForceJob massSpringForceJob = new MassSpringForceJob {
            _massSpringHashMap = hashMap,
            _massEntities      = _massData.Entities,
            _massPhysicals     = _massData.Physicals
        };

        JobHandle massSpringForceHandle = massSpringForceJob.Schedule(_massData.Length, 64, hashMassSPringHandle);

        massSpringForceHandle.Complete();
        hashMap.Dispose();
        return(massSpringForceHandle);
    }
Пример #12
0
    public void NativeMultiHashMap_Key_Collisions()
    {
        var hashMap     = new NativeMultiHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new MultiHashMapWriteParallelForJob()
        {
            hashMap = hashMap.AsParallelWriter(),
            status  = writeStatus,
            keyMod  = 16,
        };

        var readData = new MultiHashMapReadParallelForJob()
        {
            hashMap = hashMap,
            values  = readValues,
            keyMod  = writeData.keyMod,
        };

        var writeJob = writeData.Schedule(hashMapSize, 1);
        var readJob  = readData.Schedule(hashMapSize, 1, writeJob);

        readJob.Complete();

        for (int i = 0; i < hashMapSize; ++i)
        {
            Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map");
            Assert.AreEqual(hashMapSize / readData.keyMod, readValues[i], "Job failed to read from hash map");
        }

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
Пример #13
0
        public void Dispose()
        {
            // Free all allocated chunks for all allocated archetypes
            while (m_LastArchetype != null)
            {
                while (!m_LastArchetype->ChunkList.IsEmpty)
                {
                    var chunk = m_LastArchetype->ChunkList.Begin;
                    chunk->Remove();
                    UnsafeUtility.Free(chunk, Allocator.Persistent);
                }
                m_LastArchetype = m_LastArchetype->PrevArchetype;
            }

            // And all pooled chunks
            while (!m_EmptyChunkPool->IsEmpty)
            {
                var chunk = m_EmptyChunkPool->Begin;
                chunk->Remove();
                UnsafeUtility.Free(chunk, Allocator.Persistent);
            }

            m_TypeLookup.Dispose();
            m_ArchetypeChunkAllocator.Dispose();
        }
Пример #14
0
    public void NativeHashMap_MergeCountShared()
    {
        var count            = 1024;
        var sharedKeyCount   = 16;
        var sharedCount      = new NativeArray <int>(count, Allocator.TempJob);
        var sharedIndices    = new NativeArray <int>(count, Allocator.TempJob);
        var totalSharedCount = new NativeArray <int>(1, Allocator.TempJob);
        var hashMap          = new NativeMultiHashMap <int, int>(count, Allocator.TempJob);

        for (int i = 0; i < count; i++)
        {
            hashMap.Add(i & (sharedKeyCount - 1), i);
            sharedCount[i] = 1;
        }

        var mergeSharedValuesJob = new MergeSharedValues
        {
            sharedCount   = sharedCount,
            sharedIndices = sharedIndices,
        };

        var mergetedSharedValuesJobHandle = mergeSharedValuesJob.Schedule(hashMap, 64);

        mergetedSharedValuesJobHandle.Complete();

        for (int i = 0; i < count; i++)
        {
            Assert.AreEqual(count / sharedKeyCount, sharedCount[sharedIndices[i]]);
        }

        sharedCount.Dispose();
        sharedIndices.Dispose();
        totalSharedCount.Dispose();
        hashMap.Dispose();
    }
Пример #15
0
 public void Dispose()
 {
     _nodeToParent.Dispose();
     _nodeStates.Dispose();
     _preconditions.Dispose();
     _effects.Dispose();
 }
Пример #16
0
    public void NativeMultiHashMap_RemoveKeyAndValue()
    {
        var hashMap = new NativeMultiHashMap <int, long> (1, Allocator.Temp);

        hashMap.Add(10, 0);
        hashMap.Add(10, 1);
        hashMap.Add(10, 2);

        hashMap.Add(20, 2);
        hashMap.Add(20, 2);
        hashMap.Add(20, 1);
        hashMap.Add(20, 2);
        hashMap.Add(20, 1);

        hashMap.Remove(10, 1L);
        ExpectValues(hashMap, 10, new [] { 0L, 2L });
        ExpectValues(hashMap, 20, new [] { 1L, 1L, 2L, 2L, 2L });

        hashMap.Remove(20, 2L);
        ExpectValues(hashMap, 10, new [] { 0L, 2L });
        ExpectValues(hashMap, 20, new [] { 1L, 1L });

        hashMap.Remove(20, 1L);
        ExpectValues(hashMap, 10, new [] { 0L, 2L });
        ExpectValues(hashMap, 20, new long [0]);

        hashMap.Dispose();
    }
Пример #17
0
    public void NativeMultiHashMap_RemoveKeyValueThrowsInvalidParam()
    {
        var hashMap = new NativeMultiHashMap <int, long>(1, Allocator.Temp);

        Assert.Throws <ArgumentException>(() => hashMap.Remove(5, 5));
        hashMap.Dispose();
    }
Пример #18
0
        public void Dispose(JobHandle jobHandle = default)
        {
            if (ActionLookup.IsCreated)
            {
                ActionLookup.Dispose(jobHandle);
            }
            if (ResultingStateLookup.IsCreated)
            {
                ResultingStateLookup.Dispose(jobHandle);
            }
            if (PredecessorGraph.IsCreated)
            {
                PredecessorGraph.Dispose(jobHandle);
            }

            if (StateInfoLookup.IsCreated)
            {
                StateInfoLookup.Dispose(jobHandle);
            }
            if (ActionInfoLookup.IsCreated)
            {
                ActionInfoLookup.Dispose(jobHandle);
            }
            if (StateTransitionInfoLookup.IsCreated)
            {
                StateTransitionInfoLookup.Dispose(jobHandle);
            }
        }
Пример #19
0
 /// <summary>
 /// 释放,如果为true,则重新加载数据
 /// 注意,该操作会释放大量GC
 /// </summary>
 /// <param name="isReset"></param>
 public void Dispose(bool isReset)
 {
     JobHandle.CompleteAll(handleList.AsArray());
     pointReadList.Dispose();
     pointReadWriteList.Dispose();
     pointTransformsList.Dispose();
     constraintReadList1.Dispose();
     ConstraintReadByPointIndex.Dispose();
     for (int i = 0; i < constraintReadList.Length; i++)
     {
         constraintReadList[i].Dispose();
     }
     if (isReset)
     {
         pointTransformsList  = new TransformAccessArray(0);
         m_constraintList     = new List <ConstraintRead[]>();
         m_pointReadList      = new List <PointRead>();
         m_pointReadWriteList = new List <PointReadWrite>();
     }
     else
     {
         handleList.Dispose();
         collidersReadList.Dispose();
         collidersReadWriteList.Dispose();
         colliderTransformsList.Dispose();
     }
 }
Пример #20
0
 public void Dispose()
 {
     if (HashMap.IsCreated)
     {
         HashMap.Dispose();
     }
 }
        protected override JobHandle CooldownJobs(JobHandle inputDeps)
        {
            NativeMultiHashMap <Entity, GameplayEffectDurationComponent> Cooldowns = new NativeMultiHashMap <Entity, GameplayEffectDurationComponent>(CooldownEffectsQuery.CalculateEntityCount() * 2 + GrantedAbilityQuery.CalculateEntityCount(), Allocator.TempJob);

            // Collect all effects which act as cooldowns for this ability
            inputDeps = new GatherCooldownGameplayEffectsJob
            {
                GameplayEffectDurations = Cooldowns.AsParallelWriter()
            }.Schedule(CooldownEffectsQuery, inputDeps);

            // Add a default value of '0' for all entities as well
            inputDeps = new CooldownAbilityIsZeroIfAbsentJob
            {
                GameplayEffectDurations = Cooldowns.AsParallelWriter()
            }.Schedule(GrantedAbilityQuery, inputDeps);

            // Get the effect with the longest cooldown remaining
            inputDeps = new GatherLongestCooldownPerEntity
            {
                GameplayEffectDurationComponent = Cooldowns
            }.Schedule(GrantedAbilityQuery, inputDeps);

            Cooldowns.Dispose(inputDeps);
            return(inputDeps);
        }
Пример #22
0
        public void Dispose()
        {
            // Move all chunks to become pooled chunks
            while (m_LastArchetype != null)
            {
                while (!m_LastArchetype->ChunkList.IsEmpty)
                {
                    var chunk = (Chunk *)m_LastArchetype->ChunkList.Begin;
                    SetChunkCount(chunk, 0);
                }

                m_LastArchetype->FreeChunksBySharedComponents.Dispose();
                m_LastArchetype = m_LastArchetype->PrevArchetype;
            }

            // And all pooled chunks
            while (!m_EmptyChunkPool->IsEmpty)
            {
                var chunk = m_EmptyChunkPool->Begin;
                chunk->Remove();
                UnsafeUtility.Free(chunk, Allocator.Persistent);
            }

            m_ManagedArrays = null;
            m_TypeLookup.Dispose();
            m_ArchetypeChunkAllocator.Dispose();
        }
Пример #23
0
 void dispose_resources()
 {
     if (enemy_bullet_hashmap_.IsCreated)
     {
         enemy_bullet_hashmap_.Dispose();
     }
     if (player_bullet_hashmap_.IsCreated)
     {
         player_bullet_hashmap_.Dispose();
     }
     if (enemy_hashmap_.IsCreated)
     {
         enemy_hashmap_.Dispose();
     }
     if (player_hashmap_.IsCreated)
     {
         player_hashmap_.Dispose();
     }
     if (enemy_bullet_colliders_.IsCreated)
     {
         enemy_bullet_colliders_.Dispose();
     }
     if (player_bullet_colliders_.IsCreated)
     {
         player_bullet_colliders_.Dispose();
     }
     if (enemy_colliders_.IsCreated)
     {
         enemy_colliders_.Dispose();
     }
     if (player_colliders_.IsCreated)
     {
         player_colliders_.Dispose();
     }
 }
Пример #24
0
        /// <summary>
        /// Schedules an attribute job
        /// </summary>
        /// <param name="inputDependencies">JobHandle</param>
        /// <param name="query">The EntityQuery used for filtering group</param>
        /// <param name="AttributeHash">Attribute MultiHashMap mapping entity to attribute value</param>
        /// <param name="job">Returned job handle</param>
        /// <typeparam name="TOper">The type of operator for this attribute job</typeparam>
        private void ScheduleAttributeJob <TOper>(ref JobHandle inputDependencies, ref EntityQuery query, ref NativeMultiHashMap <Entity, float> AttributeHash, out JobHandle job)
            where TOper : struct, IAttributeOperator, IComponentData
        {
            var nEntities    = query.CalculateEntityCount();
            var hashCapacity = AttributeHash.Capacity;

            AttributeHash.Clear();
            if (nEntities == 0)
            {
                job = inputDependencies;
                return;
            }
            ;
            if (hashCapacity < nEntities)   // We need to increase hash capacity
            {
                AttributeHash.Capacity = (int)(nEntities * 1.1);
            }
            else if (hashCapacity > nEntities * 4)     // We need to reduce hash capacity
            {
                AttributeHash.Dispose();
                AttributeHash = new NativeMultiHashMap <Entity, float>(nEntities, Allocator.Persistent);
            }
            // // AttributeHash = new NativeMultiHashMap<Entity, float>(query.CalculateEntityCount(), Allocator.TempJob);
            inputDependencies = new GetAttributeValuesJob_Sum <TOper, TAttributeTag>
            {
                owners                  = GetArchetypeChunkComponentType <AttributesOwnerComponent>(false),
                attributeModifiers      = GetArchetypeChunkComponentType <AttributeModifier <TOper, TAttributeTag> >(false),
                AttributeModifierValues = AttributeHash.AsParallelWriter()
            }.Schedule(query, inputDependencies);
            job = inputDependencies;
        }
Пример #25
0
    public void NativeMultiHashMap_ValueIterator()
    {
        var hashMap = new NativeMultiHashMap <int, int> (1, Allocator.Temp);

        hashMap.Add(5, 0);
        hashMap.Add(5, 1);
        hashMap.Add(5, 2);

        var list = new NativeList <int>(Allocator.TempJob);

        GCAllocRecorder.ValidateNoGCAllocs(() =>
        {
            list.Clear();
            foreach (var value in hashMap.GetValuesForKey(5))
            {
                list.Add(value);
            }
        });

        list.Sort();
        Assert.AreEqual(list.ToArray(), new int[] { 0, 1, 2 });

        foreach (var value in hashMap.GetValuesForKey(6))
        {
            Assert.Fail();
        }

        list.Dispose();
        hashMap.Dispose();
    }
Пример #26
0
 protected override void OnDestroy()
 {
     if (Walkables.IsCreated)
     {
         Walkables.Dispose();
     }
 }
Пример #27
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        int massCount   = massQuery.CalculateEntityCount();
        int springCount = springQuery.CalculateEntityCount();

        if (massCount == 0 || springCount == 0)
        {
            return(inputDeps);
        }

        NativeMultiHashMap <Entity, float3> hashMap = new NativeMultiHashMap <Entity, float3>(massCount * 4, Allocator.TempJob);

        HashSpringForceJob hashMassSpringJob = new HashSpringForceJob {
            _hashMap = hashMap.AsParallelWriter()
        };

        JobHandle hashMassSPringHandle = hashMassSpringJob.Schedule(this, inputDeps);

        MassSpringForceJob massSpringForceJob = new MassSpringForceJob {
            _massSpringHashMap = hashMap
        };

        JobHandle massSpringForceHandle = massSpringForceJob.Schedule(this, hashMassSPringHandle);

        massSpringForceHandle.Complete();
        hashMap.Dispose();
        return(massSpringForceHandle);
    }
    public void Read_And_Write_Without_Fences()
    {
        var hashMap     = new NativeMultiHashMap <int, int>(hashMapSize, Allocator.Temp);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.Temp);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.Temp);

        var writeData = new MultiHashMapWriteParallelForJob();

        writeData.hashMap = hashMap;
        writeData.status  = writeStatus;
        writeData.keyMod  = hashMapSize;
        var readData = new MultiHashMapReadParallelForJob();

        readData.hashMap = hashMap;
        readData.values  = readValues;
        readData.keyMod  = writeData.keyMod;
        var writeJob = writeData.Schedule(hashMapSize, 1);

        Assert.Throws <InvalidOperationException> (() => { readData.Schedule(hashMapSize, 1); });
        writeJob.Complete();

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }
Пример #29
0
 /// <summary>
 ///
 /// </summary>
 public void Dispose()
 {
     buffer.Dispose();
     offset.Dispose();
     length.Dispose();
     hash.Dispose();
 }
Пример #30
0
    public void Read_And_Write()
    {
        var hashMap     = new NativeMultiHashMap <int, int>(hashMapSize, Allocator.TempJob);
        var writeStatus = new NativeArray <int>(hashMapSize, Allocator.TempJob);
        var readValues  = new NativeArray <int>(hashMapSize, Allocator.TempJob);

        var writeData = new MultiHashMapWriteParallelForJob();

        writeData.hashMap = hashMap;
        writeData.status  = writeStatus;
        writeData.keyMod  = hashMapSize;
        var readData = new MultiHashMapReadParallelForJob();

        readData.hashMap = hashMap;
        readData.values  = readValues;
        readData.keyMod  = writeData.keyMod;
        var writeJob = writeData.Schedule(hashMapSize, 1);
        var readJob  = readData.Schedule(hashMapSize, 1, writeJob);

        readJob.Complete();

        for (int i = 0; i < hashMapSize; ++i)
        {
            Assert.AreEqual(0, writeStatus[i], "Job failed to write value to hash map");
            Assert.AreEqual(1, readValues[i], "Job failed to read from hash map");
        }

        hashMap.Dispose();
        writeStatus.Dispose();
        readValues.Dispose();
    }