protected override void OnUpdate() { m_ResizeBuffersSystem.Update(); #if ENABLE_COMPUTE_DEFORMATIONS k_PushSharedMeshData.Begin(); if (m_RebuildSharedMeshBuffers) { m_PushSharedMeshDataSystem.Update(); } k_PushSharedMeshData.End(); #endif if (m_RebuildDeformedMeshBuffers) { k_MaterialPropertyUpdate.Begin(); // Layout Deformed Meshes in buffer #if ENABLE_COMPUTE_DEFORMATIONS MeshHashToDeformedMeshIndex.Clear(); MeshHashToBlendWeightIndex.Clear(); uint deformedMeshOffset = 0; int blendShapeOffset = 0; #endif MeshHashToSkinMatrixIndex.Clear(); int skinMatrixOffset = 0; foreach (var meshData in UniqueSharedMeshData) { if (meshData.RenderMeshHash == 0) { continue; } int instanceCount = MeshHashToInstanceCount[meshData.RenderMeshHash]; #if ENABLE_COMPUTE_DEFORMATIONS MeshHashToDeformedMeshIndex.Add(meshData.RenderMeshHash, deformedMeshOffset); deformedMeshOffset += (uint)instanceCount * (uint)meshData.VertexCount; if (meshData.HasBlendShapes) { MeshHashToBlendWeightIndex.Add(meshData.RenderMeshHash, blendShapeOffset); blendShapeOffset += instanceCount * meshData.BlendShapeCount; } #endif if (meshData.HasSkinning) { MeshHashToSkinMatrixIndex.Add(meshData.RenderMeshHash, skinMatrixOffset); skinMatrixOffset += instanceCount * meshData.BoneCount; } } // Write deformed mesh index to material property #if ENABLE_COMPUTE_DEFORMATIONS var deformedMeshIndexType = GetComponentTypeHandle <DeformedMeshIndex>(); var blendWeightIndexType = GetComponentTypeHandle <BlendWeightBufferIndex>(); #endif var skinMatrixIndexType = GetComponentTypeHandle <SkinMatrixBufferIndex>(); var sharedMeshDataType = GetSharedComponentTypeHandle <SharedMeshData>(); m_BufferIndexQuery.CompleteDependency(); using (var chunks = m_BufferIndexQuery.CreateArchetypeChunkArray(Allocator.TempJob)) { var skinMatrixInstancesMap = new NativeHashMap <int, int>(chunks.Length, Allocator.Temp); #if ENABLE_COMPUTE_DEFORMATIONS var deformedMeshInstancesMap = new NativeHashMap <int, int>(chunks.Length, Allocator.Temp); var blendShapeWeightInstancesMap = new NativeHashMap <int, int>(chunks.Length, Allocator.Temp); #endif foreach (var chunk in chunks) { var sharedMeshData = chunk.GetSharedComponentData(sharedMeshDataType, EntityManager); #if ENABLE_COMPUTE_DEFORMATIONS deformedMeshInstancesMap.TryGetValue(sharedMeshData.RenderMeshHash, out int count); var deformedMeshIndices = chunk.GetNativeArray(deformedMeshIndexType); var deformedMeshIndex = MeshHashToDeformedMeshIndex[sharedMeshData.RenderMeshHash]; for (int i = 0; i < chunk.Count; i++) { var index = deformedMeshIndex + count + (i * sharedMeshData.VertexCount); deformedMeshIndices[i] = new DeformedMeshIndex { Value = (uint)index }; } if (count == 0) { deformedMeshInstancesMap.Add(sharedMeshData.RenderMeshHash, chunk.Count * sharedMeshData.VertexCount); } else { deformedMeshInstancesMap[sharedMeshData.RenderMeshHash] += chunk.Count * sharedMeshData.VertexCount; } if (sharedMeshData.HasBlendShapes) { blendShapeWeightInstancesMap.TryGetValue(sharedMeshData.RenderMeshHash, out int instanceCount); var blendWeightIndices = chunk.GetNativeArray(blendWeightIndexType); int blendShapeIndex = MeshHashToBlendWeightIndex[sharedMeshData.RenderMeshHash]; for (int i = 0; i < chunk.Count; i++) { var index = blendShapeIndex + instanceCount + (i * sharedMeshData.BlendShapeCount); blendWeightIndices[i] = new BlendWeightBufferIndex { Value = index }; } if (instanceCount == 0) { blendShapeWeightInstancesMap.Add(sharedMeshData.RenderMeshHash, chunk.Count * sharedMeshData.BlendShapeCount); } else { blendShapeWeightInstancesMap[sharedMeshData.RenderMeshHash] += chunk.Count * sharedMeshData.BlendShapeCount; } } #endif if (sharedMeshData.HasSkinning) { skinMatrixInstancesMap.TryGetValue(sharedMeshData.RenderMeshHash, out int instanceCount); var skinMatrixIndices = chunk.GetNativeArray(skinMatrixIndexType); int skinMatrixIndex = MeshHashToSkinMatrixIndex[sharedMeshData.RenderMeshHash]; for (int i = 0; i < chunk.Count; i++) { var index = skinMatrixIndex + instanceCount + (i * sharedMeshData.BoneCount); skinMatrixIndices[i] = new SkinMatrixBufferIndex { Value = index }; } if (instanceCount == 0) { skinMatrixInstancesMap.Add(sharedMeshData.RenderMeshHash, chunk.Count * sharedMeshData.BoneCount); } else { skinMatrixInstancesMap[sharedMeshData.RenderMeshHash] += chunk.Count * sharedMeshData.BoneCount; } } } skinMatrixInstancesMap.Dispose(); #if ENABLE_COMPUTE_DEFORMATIONS deformedMeshInstancesMap.Dispose(); blendShapeWeightInstancesMap.Dispose(); #endif m_RebuildDeformedMeshBuffers = false; } k_MaterialPropertyUpdate.End(); } }
public virtual void Schedule() { vertexBuffer.Clear(); indexBuffer.Clear(); weldMap.Clear(); }
public TStateData CreateStateData(Entity planningAgent, IEnumerable <Entity> traitBasedObjects = null) { m_ObjectIdToObject.Clear(); m_EntityToObjectId.Clear(); var sourceEntityManager = World.DefaultGameObjectInjectionWorld.EntityManager; // Retrieve known ObjectId for a specific trait-based object data if (traitBasedObjects != null) { foreach (var objectData in traitBasedObjects) { foreach (var kvp in m_ObjectIdToEntity) { if (kvp.Value == objectData) { m_EntityToObjectId[objectData] = kvp.Key; break; } } } } m_ObjectIdToEntity.Clear(); var state = m_StateManager.CreateStateData(); NativeArray <Entity> entities; if (traitBasedObjects != null && traitBasedObjects.Any()) { entities = new NativeArray <Entity>(traitBasedObjects.ToArray(), Allocator.TempJob); } else { entities = sourceEntityManager .CreateEntityQuery(ComponentType.ReadOnly <SemanticObjectData>()) .ToEntityArray(Allocator.TempJob); } for (int i = 0; i < entities.Length; i++) { var entity = entities[i]; if (entity == default) { continue; } var sourceTypes = sourceEntityManager.GetComponentTypes(entity); var plannerTypes = new NativeList <ComponentType>(sourceTypes.Length, Allocator.TempJob); for (int j = 0; j < sourceTypes.Length; j++) { if (m_TypeLookup.TryGetValue(sourceTypes[j], out var plannerType)) { plannerTypes.Add(plannerType); } } if (entity == planningAgent) { plannerTypes.Add(typeof(PlanningAgent)); } TObject traitBasedObject; FixedString64 entityName = default; #if UNITY_EDITOR entityName = sourceEntityManager.GetName(entity); #endif if (m_EntityToObjectId.TryGetValue(entity, out var traitBasedObjectId)) { state.AddObject(plannerTypes, out traitBasedObject, traitBasedObjectId, entityName); } else { state.AddObject(plannerTypes, out traitBasedObject, out traitBasedObjectId, entityName); m_EntityToObjectId[entity] = traitBasedObjectId; } plannerTypes.Dispose(); m_ObjectIdToObject[traitBasedObjectId.Id] = traitBasedObject; m_ObjectIdToEntity[traitBasedObjectId] = entity; } // Second pass - set all properties for (int i = 0; i < entities.Length; i++) { var entity = entities[i]; if (entity == default) { continue; } var sourceTraitTypes = sourceEntityManager.GetComponentTypes(entity); var traitBasedObjectId = m_EntityToObjectId[entity]; var traitBasedObject = m_ObjectIdToObject[traitBasedObjectId.Id]; state.ConvertAndSetPlannerTrait(entity, sourceEntityManager, sourceTraitTypes, m_EntityToObjectId, ref traitBasedObject); } entities.Dispose(); return(state); }
public void Execute() { hashMap.Clear(); }
protected override unsafe void OnUpdate() { EntityCommandBuffer commandBuffer = m_BeginSimulationBarrier.CreateCommandBuffer(); var spawnListEntity = GetSingletonEntity <PredictedGhostSpawnList>(); var spawnListFromEntity = GetBufferFromEntity <PredictedGhostSpawn>(); if (!m_GhostInitQuery.IsEmptyIgnoreFilter) { m_ChildEntityLookup.Clear(); var childCount = m_ChildEntityQuery.CalculateEntityCountWithoutFiltering(); if (childCount > m_ChildEntityLookup.Capacity) { m_ChildEntityLookup.Capacity = childCount; } var buildChildJob = new BuildChildEntityLookupJob { entityType = GetEntityTypeHandle(), childEntityLookup = m_ChildEntityLookup.AsParallelWriter() }; Dependency = buildChildJob.ScheduleParallel(m_ChildEntityQuery, Dependency); var initJob = new InitGhostJob { GhostCollectionSingleton = GetSingletonEntity <GhostCollection>(), GhostComponentCollectionFromEntity = GetBufferFromEntity <GhostComponentSerializer.State>(true), GhostTypeCollectionFromEntity = GetBufferFromEntity <GhostCollectionPrefabSerializer>(true), GhostComponentIndexFromEntity = GetBufferFromEntity <GhostCollectionComponentIndex>(true), GhostCollectionFromEntity = GetBufferFromEntity <GhostCollectionPrefab>(true), entityType = GetEntityTypeHandle(), snapshotDataType = GetComponentTypeHandle <SnapshotData>(), snapshotDataBufferType = GetBufferTypeHandle <SnapshotDataBuffer>(), snapshotDynamicDataBufferType = GetBufferTypeHandle <SnapshotDynamicDataBuffer>(), spawnListFromEntity = spawnListFromEntity, spawnListEntity = spawnListEntity, ghostFromEntity = GetComponentDataFromEntity <GhostComponent>(), ghostTypeFromEntity = GetComponentDataFromEntity <GhostTypeComponent>(true), commandBuffer = commandBuffer, spawnTick = m_SpawnTick, linkedEntityGroupType = GetBufferTypeHandle <LinkedEntityGroup>(), childEntityLookup = m_ChildEntityLookup }; var ghostComponentCollection = EntityManager.GetBuffer <GhostCollectionComponentType>(initJob.GhostCollectionSingleton); var listLength = ghostComponentCollection.Length; if (listLength <= 32) { var dynamicListJob = new InitGhostJob32 { Job = initJob }; DynamicTypeList.PopulateList(this, ghostComponentCollection, true, ref dynamicListJob.List); Dependency = dynamicListJob.ScheduleSingle(m_GhostInitQuery, Dependency); } else if (listLength <= 64) { var dynamicListJob = new InitGhostJob64 { Job = initJob }; DynamicTypeList.PopulateList(this, ghostComponentCollection, true, ref dynamicListJob.List); Dependency = dynamicListJob.ScheduleSingle(m_GhostInitQuery, Dependency); } else if (listLength <= 128) { var dynamicListJob = new InitGhostJob128 { Job = initJob }; DynamicTypeList.PopulateList(this, ghostComponentCollection, true, ref dynamicListJob.List); Dependency = dynamicListJob.ScheduleSingle(m_GhostInitQuery, Dependency); } else { throw new System.InvalidOperationException( $"Too many ghost component types present in project, limit is {DynamicTypeList.MaxCapacity} types. This is any struct which has a field marked with GhostField attribute."); } } // Validate all ghosts in the list of predictive spawn ghosts and destroy the ones which are too old uint interpolatedTick = m_ClientSimulationSystemGroup.InterpolationTick; Dependency = Job.WithCode(() => { var spawnList = spawnListFromEntity[spawnListEntity]; for (int i = 0; i < spawnList.Length; ++i) { var ghost = spawnList[i]; if (SequenceHelpers.IsNewer(interpolatedTick, ghost.spawnTick)) { // Destroy entity and remove from list commandBuffer.DestroyEntity(ghost.entity); spawnList[i] = spawnList[spawnList.Length - 1]; spawnList.RemoveAt(spawnList.Length - 1); --i; } } }).Schedule(Dependency); m_BeginSimulationBarrier.AddJobHandleForProducer(Dependency); m_SpawnTick = m_ClientSimulationSystemGroup.ServerTick; }
public static void SwapCellAndUpdateBoard(ActionKey action, StateData state, Cell cell1, Cell cell2) { // Swap cell types (cell1.Type, cell2.Type) = (cell2.Type, cell1.Type); state.SetTraitOnObjectAtIndex(cell1, action[Cell1Index]); state.SetTraitOnObjectAtIndex(cell2, action[Cell2Index]); int newScore = 0; var cellsToDestroy = new NativeList <int>(1, Allocator.Temp); // Check match3 and destroy used Gem (set to Type = None) CheckMatchOnGem(state, cell1, action[Cell1Index], ref cellsToDestroy); CheckMatchOnGem(state, cell2, action[Cell2Index], ref cellsToDestroy); if (cellsToDestroy.Length > 0) { // Unset all destroyed cells var cellQueue = new NativeList <int>(Allocator.Temp); var cellsQueued = new NativeHashMap <int, byte>(3, Allocator.Temp); var cellChanged = new NativeHashMap <int, byte>(3, Allocator.Temp); while (cellsToDestroy.Length > 0) { for (int i = 0; i < cellsToDestroy.Length; i++) { var cellIndex = cellsToDestroy[i]; if (cellsQueued.ContainsKey(cellIndex)) { continue; } var cellTrait = state.GetTraitOnObjectAtIndex <Cell>(cellIndex); newScore += GetScore(cellTrait.Type); cellTrait.Type = CellType.None; state.SetTraitOnObjectAtIndex(cellTrait, cellIndex); cellQueue.Add(cellIndex); cellsQueued.TryAdd(cellIndex, default); } cellsToDestroy.Clear(); // Stitch Unset Gems with Top Gem while (cellQueue.Length > 0) { var cellIndex = cellQueue[0]; cellQueue.RemoveAtSwapBack(0); cellsQueued.Remove(cellIndex); var cell = state.GetTraitOnObjectAtIndex <Cell>(cellIndex); if (cell.Top.Id == ObjectId.None) { continue; } if (cell.Type == CellType.None) { var cellTopObject = state.GetTraitBasedObject(cell.Top); var cellTop = state.GetTraitOnObject <Cell>(cellTopObject); // Find first cell with a known type on top while (cellTop.Type == CellType.None) { if (cellTop.Top.Id == ObjectId.None) { break; } cellTopObject = state.GetTraitBasedObject(cellTop.Top); cellTop = state.GetTraitOnObject <Cell>(cellTopObject); } if (cellTop.Type != CellType.None) { cell.Type = cellTop.Type; state.SetTraitOnObjectAtIndex(cell, cellIndex); var newCellTop = cellTop; newCellTop.Type = CellType.None; state.SetTraitOnObject(newCellTop, ref cellTopObject); var index = state.GetTraitBasedObjectIndex(cellTopObject); cellQueue.Add(index); cellsQueued.TryAdd(index, default); // Queue all vertical cells for checking var cellTopIndex = state.GetTraitBasedObjectIndex(cell.Top); while (cellTop.Type != CellType.None) { cellChanged.TryAdd(cellTopIndex, default); if (cellTop.Top == TraitBasedObjectId.None) { break; } cellTopIndex = state.GetTraitBasedObjectIndex(cellTop.Top); cellTop = state.GetTraitOnObjectAtIndex <Cell>(cellTopIndex); } } } } // Check cells affected by stitching for chained-explosion var changedKeys = cellChanged.GetKeyArray(Allocator.Temp); for (int i = 0; i < changedKeys.Length; i++) { var cellIndex = changedKeys[i]; var cell = state.GetTraitOnObjectAtIndex <Cell>(cellIndex); CheckMatchOnGem(state, cell, cellIndex, ref cellsToDestroy); } changedKeys.Dispose(); cellChanged.Clear(); } cellQueue.Dispose(); cellsQueued.Dispose(); cellChanged.Dispose(); } // Store information in Game state var gameId = state.GetTraitBasedObjectId(action[GameIndex]); var game = state.GetTraitBasedObject(gameId); var gameTrait = state.GetTraitOnObject <Game>(game); // Score is stored in the Game Object and apply later in the reward function gameTrait.Score = newScore; state.SetTraitOnObject(gameTrait, ref game); cellsToDestroy.Dispose(); }
private void HandleBeginPostprocess() { m_PostProcessRemap.Clear(); }
public void Execute() { var offsetIndex = Location.SpiralChunkIndex((int2)math.round(FollowTargetPosition)); var newOffset = Location.SpiralChunkPosition(offsetIndex); CentroidPosition.Value = newOffset; while (FinishedBlocks.TryDequeue(out var blockData)) { _finishedBlocksRemapped.TryAddAndThrowOnFail(blockData.Offset, blockData.Entity); } for (var i = 0; i < BaseOffsetPositions.Length; i++) { var offset = BaseOffsetPositions[i] + newOffset; if (ActualPositions.TryGetValue(offset, out var entity)) { var blockData = new BlockData { Entity = entity, Offset = offset }; if (_finishedBlocksRemapped.TryGetValue(offset, out _)) { _correctBlocks.Enqueue(blockData); ActualPositions.Remove(offset); } else { _incorrectBlocks.Enqueue(blockData); } } else { _incorrectBlocks.Enqueue(new BlockData { Entity = Entity.Null, Offset = offset }); } } var stubData = ActualPositions.GetEnumerator(); { while (stubData.MoveNext()) { var blockData = _incorrectBlocks.Dequeue(); blockData.Entity = stubData.Current.Value; _incorrectBlocks.Enqueue(blockData); } } stubData.Dispose(); FinishedBlocks.Clear(); AvailableBlocks.Clear(); ActualPositions.Clear(); while (_correctBlocks.TryDequeue(out var blockData)) { ActualPositions.TryAddAndThrowOnFail(blockData.Offset, blockData.Entity); FinishedBlocks.Enqueue(blockData); } while (_incorrectBlocks.TryDequeue(out var blockData)) { ActualPositions.TryAddAndThrowOnFail(blockData.Offset, blockData.Entity); AvailableBlocks.Enqueue(blockData); } }
public void Execute() { ghostMap.Clear(); }
public void Clear() { hashMap.Clear(); list.Clear(); keyList.Clear(); }
protected override void OnUpdate() { if (!GameSettings.EndlessSettingsInstance.isActivated) { return; } var mainCamera = Camera.main; var mapSize = GameSettings.MapSettingsInstance.mapSize - 1; var cameraPosition = mainCamera != null ? (float3)mainCamera.transform.position : new float3(0); // Convert camera position var cameraChunkPosition = ToChunkPosition(cameraPosition, mapSize); // View camera position Debug.DrawLine(cameraPosition, cameraChunkPosition, Color.blue); var chunkPerAxis = GameSettings.EndlessSettingsInstance.chunkPerAxis; // Generate neighbours chunk position... var neighbours = GenerateNeighbourChunksPosition(cameraChunkPosition, mapSize, chunkPerAxis, Allocator.Temp); var chunksB = new NativeHashMap <float3, Entity>(neighbours.Length, Allocator.Temp); if (!_initialized) { _chunksA = new NativeHashMap <float3, Entity>(neighbours.Length, Allocator.Persistent); } // View neighbours position... for (var i = 0; i < neighbours.Length; i++) { Debug.DrawLine(neighbours[i], cameraPosition, Color.green); } for (var i = 0; i < neighbours.Length; i++) { var key = neighbours[i]; if (!_initialized) { _chunksA.Add(key, CreateChunkEntity(key)); } else if (_chunksA.ContainsKey(key)) { chunksB.Add(key, _chunksA[key]); _chunksA.Remove(key); } else { chunksB.Add(key, CreateChunkEntity(key)); } } if (!_initialized) { _initialized = true; } // Destroy all entities in chunks A EntityManager.DestroyEntity(_chunksA.GetValueArray(Allocator.Temp)); // Clear chunksA _chunksA.Clear(); // Copy all key value from chunksB to chunkA var keyValue = chunksB.GetKeyValueArrays(Allocator.Temp); for (var i = 0; i < keyValue.Keys.Length; i++) { _chunksA.Add(keyValue.Keys[i], keyValue.Values[i]); } }
public void Begin(EntityCommandBuffer buffer, ComponentDataFromEntity <T> source) { allState.Clear(); this.buffer = buffer; this.source = source; }
public void Clear() { nativeHashMap.Clear(); nativeLength = 0; useKeySet.Clear(); }
public void Execute() { container.Clear(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (DesiredCloudID2AgentInWindow.Capacity != m_CloudDataGroup.Length * 2) { DesiredCloudID2AgentInWindow.Dispose(); DesiredCloudID2AgentInWindow = new NativeHashMap <int, int>(m_CloudDataGroup.Length * 2, Allocator.Persistent); CloudID2AgentInWindow.Dispose(); CloudID2AgentInWindow = new NativeHashMap <int, int>(m_CloudDataGroup.Length * 2, Allocator.Persistent); } else { DesiredCloudID2AgentInWindow.Clear(); CloudID2AgentInWindow.Clear(); } parameterBuffer.Clear(); SpawnedAgentsInFrame.Clear(); var cg = ComponentGroups[m_AgentDataGroup.GroupIndex]; //JobHandle[] jobs = new JobHandle[m_CloudDataGroup.Length]; for (int i = 0; i < m_CloudDataGroup.Length; i++) { AgentCloudID newCloudID = new AgentCloudID { CloudID = i }; cg.SetFilter <AgentCloudID>(newCloudID); //CurrentInAreaJob newAreajob = new CurrentInAreaJob //{ // AgentData = cg.GetComponentDataArray<BioCrowds.AgentData>(), // CloudData = m_CloudDataGroup.CloudData, // CloudID2AgentInWindow = CloudID2AgentInWindow.ToConcurrent(), // CloudMarkersMap = m_CellMarkSystem.cloudID2MarkedCellsMap, // Position = cg.GetComponentDataArray<Position>() //}; //jobs[i] = newAreajob.Schedule(m_CloudDataGroup.Length, 1, inputDeps); int quantity = cg.CalculateLength(); //Debug.Log("Cloud " + i + " has " + quantity + " agents"); CloudID2AgentInWindow.TryAdd(i, quantity); } //for (int i = 0; i < m_CloudDataGroup.Length; i++) //{ // jobs[i].Complete(); //} if (_ChangedWindow) { var resetAcummulators = new ResetCloudAccumulator { AgentsPerCloud = CloudID2AgentInWindow, CloudData = m_CloudDataGroup.CloudData, Counter = m_CloudDataGroup.SpawnedAgents }; var resetJob = resetAcummulators.Schedule(m_CloudDataGroup.Length, 1, inputDeps); resetJob.Complete(); } var desiredJob = new DesiredCloudAgent2CrowdAgentJob { cellid2pos = m_CellID2PosSystem.cellId2Cellfloat3, cloudDensities = m_heatmap.cloudDensities, CloudData = m_CloudDataGroup.CloudData, CloudMarkersMap = m_CellMarkSystem.cloudID2MarkedCellsMap, desiredQuantity = DesiredCloudID2AgentInWindow.ToConcurrent() }; var desiredJobHandle = desiredJob.Schedule(m_CloudDataGroup.Length, 1, inputDeps); desiredJobHandle.Complete(); AddDifferencePerCloudJob differenceJob = new AddDifferencePerCloudJob { buffer = parameterBuffer.ToConcurrent(), CloudData = m_CloudDataGroup.CloudData, CloudGoal = m_CloudDataGroup.CloudGoal, CloudID2AgentInWindow = CloudID2AgentInWindow, CloudMarkersMap = m_CellMarkSystem.cloudID2MarkedCellsMap, DesiredCloudID2AgentInWindow = DesiredCloudID2AgentInWindow, AddedAgentsPerCloud = SpawnedAgentsInFrame.ToConcurrent(), Counter = m_CloudDataGroup.SpawnedAgents }; var diffJobHandle = differenceJob.Schedule(m_CloudDataGroup.Length, 1, inputDeps); diffJobHandle.Complete(); CloudAgentAccumulator accumulatorJob = new CloudAgentAccumulator { AddedAgentsInFramePerCloud = SpawnedAgentsInFrame, Counter = m_CloudDataGroup.SpawnedAgents, CloudData = m_CloudDataGroup.CloudData }; var accumJobHandle = accumulatorJob.Schedule(m_CloudDataGroup.Length, 1, diffJobHandle); accumJobHandle.Complete(); //Debug.Log("L1 " + parameterBuffer.Length); //for(int i = 0; i < m_CloudDataGroup.Length; i++) //{ // Debug.Log("Spawned Agents in Cloud" + m_CloudDataGroup.CloudData[i].ID + " : " + m_CloudDataGroup.SpawnedAgents[i].Quantity); //} _ChangedWindow = false; return(accumJobHandle); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var targetCount = m_TargetGroup.CalculateLength(); if (m_Capacity < targetCount) { if (m_NodeMap.IsCreated) { m_NodeMap.Dispose(); } if (m_TranslationMap.IsCreated) { m_TranslationMap.Dispose(); } m_Capacity = math.max(100, targetCount + targetCount >> 1); m_NodeMap = new NativeMultiHashMap <int2, Entity>(m_Capacity, Allocator.Persistent); m_TranslationMap = new NativeHashMap <Entity, float3>(m_Capacity, Allocator.Persistent); } else { m_NodeMap.Clear(); m_TranslationMap.Clear(); } var count = m_Group.CalculateLength(); if (count > 0) { var entityArray = new NativeArray <Entity>(count, Allocator.TempJob); var targetEntityArray = new NativeArray <Entity>(count, Allocator.TempJob); var engagedArray = new NativeArray <@bool>(count, Allocator.TempJob); var commandBufferSystem = World.GetExistingManager <BeginSimulationEntityCommandBufferSystem>(); var commandBuffer = commandBufferSystem.CreateCommandBuffer(); inputDeps = new ConsolidateNodesJob { NodeMap = m_NodeMap.ToConcurrent(), TranslationMap = m_TranslationMap.ToConcurrent(), NodeSize = NodeSize }.Schedule(this, inputDeps); inputDeps = new EngageNearestTargetJob { NodeMap = m_NodeMap, TranslationMap = m_TranslationMap, EntityArray = entityArray, TargetArray = targetEntityArray, EngagedArray = engagedArray, TargetFromEntity = GetComponentDataFromEntity <Target>(), FactionFromEntity = GetComponentDataFromEntity <Faction>(true), DeadFromEntity = GetComponentDataFromEntity <Dead>(true), NodeSize = NodeSize }.Schedule(this, inputDeps); inputDeps = new AddTargetJob { CommandBuffer = commandBuffer.ToConcurrent(), EntityArray = entityArray, TargetArray = targetEntityArray, EngagedArray = engagedArray, TargetFromEntity = GetComponentDataFromEntity <Target>() }.Schedule(count, 64, inputDeps); commandBufferSystem.AddJobHandleForProducer(inputDeps); } return(inputDeps); }
public void Execute(Entity entity, int index, ref Path path) { if (!path.InProgress) { return; } CloseSet.Clear(); OpenSet.Clear(); //setp 1 DynamicBuffer <SearchNode> searchNodeBuffer = SearchNodeBuffer[entity]; #if UNITY_EDITOR DynamicBuffer <DebugNode> debugNodeBuffer = DebugNodeBuffer[entity]; #endif //step2 searchNodeBuffer.Add(new SearchNode { Coord = path.StartCoord, HeapIndex = 0, PrevNode = path.StartCoord, HeuristicDistance = GetHeuristicPathLength(path.StartCoord, path.GoalCoord), DistanceFromStart = 0 }); OpenSet.TryAdd(path.StartCoord.GetHashCode(), path.StartCoord); while (searchNodeBuffer.Length > 0) { //step 3 int2 currentNode = RemoveFirst(ref searchNodeBuffer, out int2 prevCoord, out int prevDistanceFromStart); #if UNITY_EDITOR debugNodeBuffer.Add(new DebugNode { Coord = currentNode, PrevCoord = prevCoord }); #endif //step 4 if (currentNode.Equals(path.GoalCoord)) { DynamicBuffer <PathNode> pathNodeBuffer = PathNodeBuffer[entity]; pathNodeBuffer.Add(new PathNode { Coord = currentNode }); int2 pathNode = currentNode; while (!path.StartCoord.Equals(pathNode)) { if (CloseSet.TryGetValue(pathNode.GetHashCode(), out int2 prevNode)) { pathNodeBuffer.Add(new PathNode { Coord = prevNode }); pathNode = prevNode; } else { path.InProgress = false; return; } } path.Reachable = true; path.InProgress = false; searchNodeBuffer.Clear(); OpenSet.Clear(); CloseSet.Clear(); return; } //step 5 if (Nodes.TryGetValue(currentNode, out NodeWithEntity nodeWithEntity)) { DynamicBuffer <NodeLink> nodeLinkBuffer = NodeLinkBuffer[nodeWithEntity.Entity]; // calculate heuristic distance for neighbour nodes // add neighbour nodes to open set buffer (binary heap) for (var i = 0; i < nodeLinkBuffer.Length; i++) { var nodeLink = nodeLinkBuffer[i]; if (Nodes.TryGetValue(nodeLink.LinkedEntityCoord, out NodeWithEntity neighbourNodeWithEntity)) { if (!neighbourNodeWithEntity.Node.Walkable || CloseSet.ContainsKey(neighbourNodeWithEntity.Node.Coord.GetHashCode())) { continue; } } else { continue; } // if open set not contain node, add it if (!OpenSet.ContainsKey(nodeLink.LinkedEntityCoord.GetHashCode())) { OpenSet.TryAdd(nodeLink.LinkedEntityCoord.GetHashCode(), nodeLink.LinkedEntityCoord); int heapIndex = searchNodeBuffer.Length; searchNodeBuffer.Add(new SearchNode { HeapIndex = heapIndex, Coord = nodeLink.LinkedEntityCoord, HeuristicDistance = GetHeuristicPathLength(nodeLink.LinkedEntityCoord, path.GoalCoord), PrevNode = currentNode, DistanceFromStart = prevDistanceFromStart + GetHeuristicPathLength(nodeLink.LinkedEntityCoord, currentNode) }); SortUp(ref searchNodeBuffer, heapIndex); } } } } path.InProgress = false; }
protected override JobHandle OnUpdate(JobHandle handle) { bubCount = boublesQuery.CalculateEntityCount(); var sm = GetSingleton <SpawnerRndAreaComp>(); if (BoubleAuth._grad == 0 || bubCount == 0 || spawnSystem._prefabEntity == Entity.Null) { return(handle); } if (sizeToGrav == 0) { sizeToGrav = .5f / BoubleAuth._grad; } bubMap.Clear(); dt = Time.DeltaTime; new InitBubs { bubs = bubList }.Schedule(this, handle).Complete(); int workers = 14; int countPerJob = bubCount / workers; if (gravWaitTime >= spawnSystem.sd.gravUpdateDelay) { new GravJob { t = framesSinceLastGrav > 0 ? dt * framesSinceLastGrav : dt, bubsIn = bubList, bubsOut = bubList2, numOfBubs = bubCount, gravMultiplier = bd.gravityMult, gravDivider = bd.gravityDiv }.Schedule(bubCount, countPerJob, handle).Complete(); gravWaitTime = 0; for (int i = 0; i < bubCount; i++) { var item = bubList2[i]; bubMap.TryAdd(item.entity, item); } framesSinceLastGrav = 0; } else { gravWaitTime += Time.DeltaTime; framesSinceLastGrav++; for (int i = 0; i < bubCount; i++) { var item = bubList[i]; bubMap.TryAdd(item.entity, item); } } new UpdateBubsJob { maxRad = spawnSystem.sd.cylinderRadius + bd.boundryBuffer, maxHeight = spawnSystem.sd.cylinderHeight + bd.boundryBuffer, sizeToGrav = sizeToGrav, bubMap = bubMap, bounce = bd.bounce, absorb = bd.absorb, maxScale = bd.maxScale }.Schedule(this, handle).Complete(); new ChangeColors { sm = sm }.Schedule(this, handle).Complete(); if (bd.absorb) { var cj = new CollisionJob { ecb = _ecbs.CreateCommandBuffer().ToConcurrent(), bubs = bubMap, prefabEntity = spawnSystem._prefabEntity, scaleInc = bd.scaleInc, explosionLifeTime = spawnSystem.sd.explosionLifeTime, explosionEndScale = spawnSystem.sd.explosionEndScale, explode = spawnSystem.sd.explode }; handle = cj.Schedule(this, handle); collisionSystem.AddDependingJobHandle(handle); _ecbs.AddJobHandleForProducer(handle); } return(handle); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { //var count = m_GeneratorQuery.CalculateEntityCount(); //if (count <= 0) // return inputDeps; // Get the generator data var generator = m_GeneratorQuery.GetSingleton <LbBoardGenerator>(); // Clean up our structs m_FloorMap.Clear(); m_WallMap.Clear(); m_HomebaseMap.Clear(); m_DirectionMap.Clear(); // Resize them var size = generator.SizeX * generator.SizeY; m_FloorMap.Capacity = size; m_DirectionMap.Capacity = size; m_DirectionMap.Resize(size, NativeArrayOptions.UninitializedMemory); m_WallMap.Capacity = (generator.SizeX + 1) * (generator.SizeY + 1); // // Cell, Homebases and holes var floorCommandBuffer = m_Barrier.CreateCommandBuffer(); var floorJobHandle = new GenerateFloorJob() { Generator = generator, Seed = m_Random.NextUInt(), CommandBuffer = floorCommandBuffer, FloorMap = m_FloorMap, }.Schedule(inputDeps); var homebaseHandle = new GenerateHomebasesJob() { Generator = generator, Seed = m_Random.NextUInt(), CommandBuffer = m_Barrier.CreateCommandBuffer(), HomebaseMap = m_HomebaseMap, }.Schedule(inputDeps); var holeJobHandle = new GenerateHoleJob() { Generator = generator, Seed = m_Random.NextUInt(), CommandBuffer = floorCommandBuffer, FloorMap = m_FloorMap, HomebaseMap = m_HomebaseMap, }.Schedule(JobHandle.CombineDependencies(floorJobHandle, homebaseHandle)); // // Walls var wallJobHandle = new GenerateWallsJob() { Generator = generator, Seed = m_Random.NextUInt(), CommandBuffer = m_Barrier.CreateCommandBuffer(), WallMap = m_WallMap, }.Schedule(inputDeps); // // DirectionMap and Board var directionMapJob = new DirectionMapJob() { Generator = generator, FloorMap = m_FloorMap, WallMap = m_WallMap, HomebaseMap = m_HomebaseMap, DirectionMap = m_DirectionMap.AsParallelWriter(), }.Schedule(generator.SizeY, 1, JobHandle.CombineDependencies(holeJobHandle, wallJobHandle)); var boardJob = new BoardBuilderJob() { Generator = generator, CommandBuffer = m_Barrier.CreateCommandBuffer(), DirectionMap = m_DirectionMap, }.Schedule(directionMapJob); // // Clean UP var cleanUpHandle = new CleanUpGenerationJob() { CommandBuffer = m_Barrier.CreateCommandBuffer().ToConcurrent(), }.Schedule(this, boardJob); m_Barrier.AddJobHandleForProducer(cleanUpHandle); return(cleanUpHandle); }
public void InvalidateAllGroups() { m_buildGroups.Clear(); }
public void Clear() { m_InstanceIdToEntity.Clear(); m_EntityToInstanceId.Clear(); }
internal static HierarchySearchFilter GenerateSearchFilter(EntityManager entityManager, List <NameFilter> names, List <ComponentFilter> components) { var queries = new List <ComplexEntityQuery>(); using (var map = new NativeHashMap <int, bool>(components.Count, Allocator.Temp)) { foreach (var filter in components) { map.Clear(); foreach (var type in FilterUtility.AllTypes) { if (filter.Inverted) { if (!filter.Keep(type.Name)) { map.TryAdd(type.TypeIndex, true); } } else { if (filter.Keep(type.Name)) { map.TryAdd(type.TypeIndex, false); } } } var query = new ComplexEntityQuery(); if (map.Length > 0) { using (var types = map.GetKeyArray(Allocator.TempJob)) { var all = new NativeList <ComponentType>(0, Allocator.TempJob); var none = new NativeList <ComponentType>(0, Allocator.TempJob); try { for (var i = 0; i < types.Length; ++i) { if (map[types[i]]) { none.Add(TypeManager.GetTypeInfo(types[i]).Type); } else { var typeInfo = TypeManager.GetTypeInfo(types[i]); all.Add(typeInfo.Type); } } for (var i = 0; i < all.Length; i += 4) { var count = Mathf.Min(all.Length - i, 4); var t = new ComponentType[count]; for (var j = 0; j < count; ++j) { t[j] = all[i + j]; } query.AddAnyQuery(entityManager.CreateEntityQuery(new EntityQueryDesc { Any = t, Options = EntityQueryOptions.IncludeDisabled })); } for (var i = 0; i < none.Length; i += 4) { var count = Mathf.Min(none.Length - i, 4); var t = new ComponentType[count]; for (var j = 0; j < count; ++j) { t[j] = none[i + j]; } query.AddNoneQuery(entityManager.CreateEntityQuery(new EntityQueryDesc { None = t, Options = EntityQueryOptions.IncludeDisabled })); } } finally { all.Dispose(); none.Dispose(); } } } else { query.AddNoneQuery(entityManager.CreateEntityQuery(new EntityQueryDesc { None = new ComponentType[] { typeof(Entity) }, Options = EntityQueryOptions.IncludeDisabled })); } queries.Add(query); } } return(new HierarchySearchFilter(entityManager, names, queries.ToArray())); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { EntityManager.DestroyEntity(m_InvalidGhosts); m_InvalidGhosts.Clear(); var targetTick = NetworkTimeSystem.interpolateTargetTick; m_CurrentDelayedSpawnList.Clear(); while (m_DelayedSpawnQueue.Count > 0 && !SequenceHelpers.IsNewer(m_DelayedSpawnQueue.Peek().spawnTick, targetTick)) { var ghost = m_DelayedSpawnQueue.Dequeue(); GhostEntity gent; if (m_GhostMap.TryGetValue(ghost.ghostId, out gent)) { m_CurrentDelayedSpawnList.Add(ghost); m_InvalidGhosts.Add(gent.entity); } } m_CurrentPredictedSpawnList.Clear(); while (m_PredictedSpawnQueue.Count > 0) { var ghost = m_PredictedSpawnQueue.Dequeue(); GhostEntity gent; if (m_GhostMap.TryGetValue(ghost.ghostId, out gent)) { m_CurrentPredictedSpawnList.Add(ghost); m_InvalidGhosts.Add(gent.entity); } } var delayedEntities = default(NativeArray <Entity>); delayedEntities = new NativeArray <Entity>(m_CurrentDelayedSpawnList.Length, Allocator.TempJob); if (m_CurrentDelayedSpawnList.Length > 0) { EntityManager.CreateEntity(m_Archetype, delayedEntities); } var predictedEntities = default(NativeArray <Entity>); predictedEntities = new NativeArray <Entity>(m_CurrentPredictedSpawnList.Length, Allocator.TempJob); if (m_CurrentPredictedSpawnList.Length > 0) { EntityManager.CreateEntity(m_PredictedArchetype, predictedEntities); } var predictSpawnRequests = m_SpawnRequestGroup.ToEntityArray(Allocator.TempJob); var predictSpawnEntities = new NativeArray <Entity>(predictSpawnRequests.Length, Allocator.TempJob); if (predictSpawnEntities.Length > 0) { EntityManager.CreateEntity(m_PredictedArchetype, predictSpawnEntities); } var newEntities = default(NativeArray <Entity>); newEntities = new NativeArray <Entity>(m_NewGhosts.Length, Allocator.TempJob); if (m_NewGhosts.Length > 0) { EntityManager.CreateEntity(m_InitialArchetype, newEntities); } if (m_CurrentDelayedSpawnList.Length > 0) { var delayedjob = new DelayedSpawnJob { entities = delayedEntities, delayedGhost = m_CurrentDelayedSpawnList, snapshotFromEntity = GetBufferFromEntity <T>(), ghostMap = m_GhostMap, ghostType = GhostType }; inputDeps = delayedjob.Schedule(inputDeps); inputDeps = UpdateNewInterpolatedEntities(delayedEntities, inputDeps); } // FIXME: current and predicted can run in parallel I think if (m_CurrentPredictedSpawnList.Length > 0) { var delayedjob = new DelayedSpawnJob { entities = predictedEntities, delayedGhost = m_CurrentPredictedSpawnList, snapshotFromEntity = GetBufferFromEntity <T>(), ghostMap = m_GhostMap, ghostType = GhostType }; inputDeps = delayedjob.Schedule(inputDeps); inputDeps = UpdateNewPredictedEntities(predictedEntities, inputDeps); } if (predictSpawnRequests.Length > 0) { var spawnJob = new PredictSpawnJob { requests = predictSpawnRequests, entities = predictSpawnEntities, snapshotFromEntity = GetBufferFromEntity <T>(), commandBuffer = m_Barrier.CreateCommandBuffer(), predictSpawnGhosts = m_PredictSpawnGhosts }; inputDeps = spawnJob.Schedule(inputDeps); inputDeps = UpdateNewPredictedEntities(predictSpawnEntities, inputDeps); } m_PredictionSpawnCleanupMap.Clear(); if (m_NewGhosts.Length > 0) { if (m_PredictionSpawnCleanupMap.Capacity < m_NewGhosts.Length) { m_PredictionSpawnCleanupMap.Capacity = m_NewGhosts.Length; } var predictionMask = new NativeArray <int>(m_NewGhosts.Length, Allocator.TempJob); inputDeps = MarkPredictedGhosts(m_NewGhosts, predictionMask, m_PredictSpawnGhosts, inputDeps); var job = new CopyInitialStateJob { entities = newEntities, newGhosts = m_NewGhosts, newGhostIds = m_NewGhostIds, snapshotFromEntity = GetBufferFromEntity <T>(), ghostMap = m_ConcurrentGhostMap, ghostType = GhostType, pendingSpawnQueue = m_ConcurrentDelayedSpawnQueue, predictedSpawnQueue = m_ConcurrentPredictedSpawnQueue, predictionMask = predictionMask, predictionSpawnGhosts = m_PredictSpawnGhosts, predictionSpawnCleanupMap = m_PredictionSpawnCleanupMap.ToConcurrent(), commandBuffer = m_Barrier.CreateCommandBuffer().ToConcurrent() }; inputDeps = job.Schedule(newEntities.Length, 8, inputDeps); } var spawnClearJob = new PredictSpawnCleanupJob { predictionSpawnCleanupMap = m_PredictionSpawnCleanupMap, predictionSpawnGhosts = m_PredictSpawnGhosts, interpolationTarget = targetTick, commandBuffer = m_Barrier.CreateCommandBuffer(), replicatedEntityComponentType = ComponentType.ReadWrite <ReplicatedEntityComponent>() }; inputDeps = spawnClearJob.Schedule(inputDeps); m_Barrier.AddJobHandleForProducer(inputDeps); var clearJob = new ClearNewJob { entities = newEntities, visibleEntities = delayedEntities, visiblePredictedEntities = predictedEntities, newGhosts = m_NewGhosts, newGhostIds = m_NewGhostIds, predictSpawnEntities = predictSpawnEntities, predictSpawnRequests = predictSpawnRequests }; return(clearJob.Schedule(inputDeps)); }
public void Clear() { _frontier.Clear(); _costs.Clear(); _parents.Clear(); }
public void Execute() { { var collectedFrames = new NativeList <FrameSortData>(Allocator.Temp); var oldToNewFrameIndex = new NativeHashMap <int, int>(StackFrames.Length, Allocator.Temp); var inSamples = (SampleData *)Samples.GetUnsafeReadOnlyPtr(); var inFrames = (StackFrameData *)StackFrames.GetUnsafeReadOnlyPtr(); // collect all samples in this thread and remap the stack frame indices for (int s = 0, n = Samples.Length; s < n; s++) { if (inSamples[s].ThreadIdx == Thread) { int frameIndex = inSamples[s].StackTrace; if (frameIndex != -1 && !oldToNewFrameIndex.TryGetValue(frameIndex, out int _)) { oldToNewFrameIndex.Add(frameIndex, -1); collectedFrames.Add(new FrameSortData { Depth = inFrames[frameIndex].Depth, Index = frameIndex }); } SamplesInThread.Add(inSamples[s]); } } // collect all remaining stack frames and remap the caller indices // NB this loop adds new entries to FramesInThread during the iteration for (int s = 0; s < collectedFrames.Length; s++) { int caller = inFrames[collectedFrames[s].Index].CallerStackFrame; if (caller != -1 && !oldToNewFrameIndex.TryGetValue(caller, out int _)) { oldToNewFrameIndex.Add(caller, -1); collectedFrames.Add(new FrameSortData { Depth = inFrames[caller].Depth, Index = caller }); } } // sort all frames by their depth so that the lowest depth stack frames are at the beginning NativeSortExtension.Sort((FrameSortData *)collectedFrames.GetUnsafePtr(), collectedFrames.Length, new FrameComp()); // map old indices to new indices oldToNewFrameIndex.Clear(); for (int i = 0; i < collectedFrames.Length; i++) { oldToNewFrameIndex.Add(collectedFrames[i].Index, i); } { // copy stack frames to output and adjust caller indices FramesInThread.ResizeUninitialized(collectedFrames.Length); var outputPtr = (StackFrameSamples *)FramesInThread.GetUnsafePtr(); for (int i = 0, n = collectedFrames.Length; i < n; i++) { outputPtr[i] = new StackFrameSamples { FrameData = StackFrames[collectedFrames[i].Index], }; ref int caller = ref outputPtr[i].FrameData.CallerStackFrame; if (oldToNewFrameIndex.TryGetValue(caller, out var newCaller)) { caller = newCaller; } } } { // adjust stack frame references in the samples var outputPtr = (SampleData *)SamplesInThread.GetUnsafePtr(); for (int i = 0, n = SamplesInThread.Length; i < n; i++) { ref int trace = ref outputPtr[i].StackTrace; if (oldToNewFrameIndex.TryGetValue(trace, out var newTrace)) { trace = newTrace; } } }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (!initialized) { Initialize(); initialized = true; } if (uninitializedPlayfieldGroup.CalculateLength() == 0) { return(inputDeps); } resultPositions.Clear(); playfieldIndex.Clear(); SharedComponentDataArray <Playfield> playfieldArray = uninitializedPlayfieldGroup.GetSharedComponentDataArray <Playfield>(); EntityArray entityArray = uninitializedPlayfieldGroup.GetEntityArray(); Playfield playfield = entityManager.GetSharedComponentData <Playfield>(entityArray[0]); for (int i = 0; i < playfieldArray.Length; i++) { playfieldIndex.TryAdd(playfieldArray[i].index, i); } JobHandle combinedHandle = new JobHandle(); int totalNumCells = 0; for (int i = 0; i < playfieldArray.Length; i++) { int size = playfieldArray[i].width * playfieldArray[i].height; totalNumCells += size; JobHandle pHandle = new PlayfieldCellPositionJob { width = playfieldArray[i].width, height = playfieldArray[i].height, playfieldIndex = i, cellDatas = positions.ToConcurrent(), }.Schedule(size, 64, inputDeps); if (i == 0) { combinedHandle = pHandle; } else { combinedHandle = JobHandle.CombineDependencies(combinedHandle, pHandle); } } JobHandle qlHandle = new NativeQueueToNativeListJob <CellData> { queue = positions, out_list = resultPositions, }.Schedule(combinedHandle); JobHandle pijHandle = new PlayfieldCellCreationJob { commandBuffer = initBarrier.CreateCommandBuffer().ToConcurrent(), positions = resultPositions, playfieldCellArchetype = playfieldCellArchetype, playfieldArray = playfieldArray, playfieldIndex = playfieldIndex, }.Schedule(totalNumCells, 64, qlHandle); JobHandle pfjHandle = new PlayfieldFinalizeJob { commandBuffer = finalizeBarrier.CreateCommandBuffer().ToConcurrent(), playfields = entityArray, }.Schedule(entityArray.Length, 64, pijHandle); return(pfjHandle); }
public void Reset(int numStaticBodies, int numDynamicBodies) { SetCapacity(numStaticBodies + numDynamicBodies); Broadphase.Reset(numStaticBodies, numDynamicBodies); EntityBodyIndexMap.Clear(); }
public void Execute() { NativeHashMap <int3, Empty> hashMap = new NativeHashMap <int3, Empty>(chunkSize * chunkSize, Allocator.Temp); for (int direction = 0; direction < 6; direction++) { for (int depth = 0; depth < chunkSize; depth++) { for (int x = 0; x < chunkSize; x++) { for (int y = 0; y < chunkSize;) { int3 gridPosition = new int3 { [DirectionAlignedX[direction]] = x, [DirectionAlignedY[direction]] = y, [DirectionAlignedZ[direction]] = depth }; Voxel voxel = voxels[VoxelUtil.To1DIndex(gridPosition, chunkSize)]; if (voxel.data == Voxel.VoxelType.Air) { y++; continue; } if (hashMap.ContainsKey(gridPosition)) { y++; continue; } int3 neighborPosition = gridPosition + VoxelDirectionOffsets[direction]; if (TransparencyCheck(voxels, neighborPosition, chunkSize)) { y++; continue; } VoxelLight light = lightData[VoxelUtil.To1DIndex(gridPosition, chunkSize)]; hashMap.TryAdd(gridPosition, new Empty()); int height; for (height = 1; height + y < chunkSize; height++) { int3 nextPosition = gridPosition; nextPosition[DirectionAlignedY[direction]] += height; Voxel nextVoxel = voxels[VoxelUtil.To1DIndex(nextPosition, chunkSize)]; VoxelLight nextLight = lightData[VoxelUtil.To1DIndex(nextPosition, chunkSize)]; if (nextVoxel.data != voxel.data) { break; } if (!nextLight.CompareFace(light, direction)) { break; } if (hashMap.ContainsKey(nextPosition)) { break; } hashMap.TryAdd(nextPosition, new Empty()); } bool isDone = false; int width; for (width = 1; width + x < chunkSize; width++) { for (int dy = 0; dy < height; dy++) { int3 nextPosition = gridPosition; nextPosition[DirectionAlignedX[direction]] += width; nextPosition[DirectionAlignedY[direction]] += dy; Voxel nextVoxel = voxels[VoxelUtil.To1DIndex(nextPosition, chunkSize)]; VoxelLight nextLight = lightData[VoxelUtil.To1DIndex(nextPosition, chunkSize)]; if (nextVoxel.data != voxel.data || hashMap.ContainsKey(nextPosition) || !nextLight.CompareFace(light, direction)) { isDone = true; break; } } if (isDone) { break; } for (int dy = 0; dy < height; dy++) { int3 nextPosition = gridPosition; nextPosition[DirectionAlignedX[direction]] += width; nextPosition[DirectionAlignedY[direction]] += dy; hashMap.TryAdd(nextPosition, new Empty()); } } AddQuadByDirection(direction, voxel.data, light, width, height, gridPosition, counter, vertices, normals, uvs, colors, indices); y += height; } } hashMap.Clear(); } } hashMap.Dispose(); }
protected override void OnUpdate() { var shapeCount = m_ShapeQuery.CalculateEntityCount(); if (shapeCount == 0) { return; } // First pass Profiler.BeginSample("Collect Inputs from Authoring Components"); using (var shapeEntities = m_ShapeQuery.ToEntityArray(Allocator.TempJob)) { foreach (var shapeEntity in shapeEntities) { var shape = EntityManager.GetComponentObject <T>(shapeEntity); GetInputDataFromAuthoringComponent(shape); } } Profiler.EndSample(); // Second pass Profiler.BeginSample("Generate Hashes for Inputs"); var hashes = new NativeArray <Hash128>(shapeCount, Allocator.TempJob); GenerateShapesHash(hashes); Profiler.EndSample(); // Third pass Profiler.BeginSample("Determine New Colliders to Create"); var shapeIndicesNeedingNewBlobs = new NativeArray <int>(shapeCount, Allocator.TempJob); var numNewBlobAssets = 0; // Parse all the entries, associate the computed hash with its GameObject, check if we need to compute the BlobAsset for (var i = 0; i < shapeCount; i++) { var hash = hashes[i]; var shapeData = m_ShapeComputationData[i]; var instance = shapeData.Instance; instance.Hash = hash; shapeData.Instance = instance; m_ShapeComputationData[i] = shapeData; var convertedIndex = shapeData.Instance.ConvertedAuthoringComponentIndex; var gameObject = m_EndColliderConversionSystem.GetConvertedAuthoringComponent(convertedIndex).gameObject; BlobComputationContext.AssociateBlobAssetWithUnityObject(hash, gameObject); if (BlobComputationContext.NeedToComputeBlobAsset(hash)) { BlobComputationContext.AddBlobAssetToCompute(hash, 0); switch (shapeData.ShapeType) { case ShapeType.ConvexHull: case ShapeType.Mesh: // jobs already registered when input data were collected break; default: shapeIndicesNeedingNewBlobs[numNewBlobAssets++] = i; break; } } m_BuildCompoundsSystem.SetLeafDirty(instance); // Detect re-parenting of a shape (the shape has a different body than the current one, if any) if (m_AllBodiesByLeaf.TryGetValue(instance.ShapeEntity, out var bodyEntity)) { if (!bodyEntity.Equals(instance.BodyEntity)) { // Mark the former body dirty to trigger re-computation of its compound m_BuildCompoundsSystem.SetLeafDirty( new ColliderInstance { BodyEntity = bodyEntity, ShapeEntity = bodyEntity } ); m_AllBodiesByLeaf[instance.ShapeEntity] = instance.BodyEntity; } } else { m_AllBodiesByLeaf.Add(instance.ShapeEntity, instance.BodyEntity); } } Profiler.EndSample(); // Compute the BlobAssets Profiler.BeginSample("Create New Colliders"); using (var blobAssets = new NativeArray <BlobAssetReference <Collider> >(shapeCount, Allocator.TempJob)) { CreateNewBlobAssets(m_ShapeComputationData, shapeIndicesNeedingNewBlobs, numNewBlobAssets, blobAssets); for (var i = 0; i < numNewBlobAssets; i++) { var index = shapeIndicesNeedingNewBlobs[i]; BlobComputationContext.AddComputedBlobAsset(hashes[index], blobAssets[i]); } } shapeIndicesNeedingNewBlobs.Dispose(); hashes.Dispose(); Profiler.EndSample(); // Convert convex hulls and meshes Profiler.BeginSample("Convert Hulls and Meshes"); ConvertHullsAndMeshes(); Profiler.EndSample(); m_AllBodiesByLeaf.Clear(); m_ShapeComputationData.Clear(); m_ConvexColliderJobs.Clear(); m_ConvexColliderPoints.Clear(); m_MeshColliderJobs.Clear(); m_MeshColliderVertices.Clear(); m_MeshColliderTriangles.Clear(); }
public void Execute() { m.Clear(); }