public void Execute() { var currentCandidateCapacity = MaxFlybys - ActiveFlybyEntities.Length; if (currentCandidateCapacity == 0) { FlyByCandidates.Clear(); return; } var candidates = FlyByCandidates.Count; NativeArray <Entity> sortedCandidates = new NativeArray <Entity>(candidates, Allocator.Temp); for (int i = 0; i < candidates; ++i) { // TODO: Cull things that already exist sortedCandidates[i] = FlyByCandidates.Dequeue(); } var comparitor = new DistanceComparitor { EmitterFromEntity = EmitterFromEntity }; sortedCandidates.Sort(comparitor); var maxEmittersToConsider = math.min(sortedCandidates.Length, currentCandidateCapacity); for (int i = 0, c = 0; i < sortedCandidates.Length; ++i) { if (c >= maxEmittersToConsider) { break; } var emitter = sortedCandidates[i]; bool alreadyExists = false; for (int e = 0; e < ActiveFlybyEntities.Length; ++e) { if (emitter == ActiveFlybyEntities[e]) { alreadyExists = true; break; } } if (alreadyExists) { continue; } c++; OutputFlyBys.Add(emitter); } FlyByCandidates.Clear(); sortedCandidates.Dispose(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { // inputDeps = new KillerCountJob { actorDeathByKillerCount = playerScoreServerSystem.scoreCount, } .Schedule(this, inputDeps); outs.Clear(); ins.Clear(); inputDeps = new KillInfoSerializeJob { outs = outs.AsParallelWriter(), } .Schedule(this, inputDeps); inputDeps = outs.ToListJob(ref ins, inputDeps); inputDeps = new SyncKillInfoJob { ins = ins.AsDeferredJobArray(), } .Schedule(this, inputDeps); return(inputDeps); }
protected override void OnUpdate() { var ecb = m_EndSimulationEcbSystem.CreateCommandBuffer().AsParallelWriter(); ScheduledTicks.Clear(); var ScheduledTicksWriter = ScheduledTicks.AsParallelWriter(); var dt = Time.DeltaTime; Entities .ForEach((int entityInQueryIndex, Entity entity, ref TimeDurationComponent durationComponent, ref DurationStateComponent state) => { if (durationComponent.Tick(dt)) { ScheduledTicksWriter.Enqueue(entity); state.MarkTick(); } else { state.State &= ~(EDurationState.TICKED_THIS_FRAME); } if (durationComponent.IsExpired()) { ecb.DestroyEntity(entityInQueryIndex, entity); state.MarkExpired(); } }) .WithBurst() .ScheduleParallel(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { outAs.Clear(); outBs.Clear(); ins.Clear(); var inputDepsA = new WeaponOnMessageJobA { outs = outAs.AsParallelWriter(), } .Schedule(this, inputDeps); var inputDepsB = new WeaponOnMessageJobB { outs = outBs.AsParallelWriter(), } .Schedule(this, inputDeps); inputDeps = NativeQueueEx.ToListJob(outAs, outBs, ref ins, JobHandle.CombineDependencies(inputDepsA, inputDepsB)); inputDeps = new SyncWeaponInstalledStateJob { ins = ins.AsDeferredJobArray(), } .Schedule(this, inputDeps); return(inputDeps); }
public override void UpdateBehaviour() { #if UNITY_EDITOR debugDrawDependencies.Complete(); NativeArray <DebugDrawCommand> debugDrawCommands = m_debugDrawCommandQueue.ToArray(Allocator.Temp); m_debugDrawCommandQueue.Clear(); for (int debugDrawCommandIndex = 0; debugDrawCommandIndex < debugDrawCommands.Length; debugDrawCommandIndex++) { DebugDrawCommand debugDrawCommand = debugDrawCommands[debugDrawCommandIndex]; switch (debugDrawCommand.debugDrawCommandType) { case DebugDrawCommandType.Line: DebugDrawLineData debugDrawLineData = debugDrawCommand.DebugCommandData <DebugDrawLineData>(); Debug.DrawLine(debugDrawLineData.start, debugDrawLineData.end, debugDrawLineData.colour); break; case DebugDrawCommandType.Sphere: break; default: throw new Exception("Invalid debug draw command type"); } } debugDrawCommands.Dispose(); #endif }
internal void RemoveState(TStateKey stateKey) { var predecessorQueue = new NativeQueue <TStateKey>(Allocator.Temp); // State Info StateInfoLookup.Remove(stateKey); // Actions if (ActionLookup.TryGetFirstValue(stateKey, out var actionKey, out var actionIterator)) { do { var stateActionPair = new StateActionPair <TStateKey, TActionKey>(stateKey, actionKey); // Action Info ActionInfoLookup.Remove(stateActionPair); // Results if (ResultingStateLookup.TryGetFirstValue(stateActionPair, out var resultingStateKey, out var resultIterator)) { do { // Remove Predecessor Link if (PredecessorGraph.TryGetFirstValue(resultingStateKey, out var predecessorKey, out var predecessorIterator)) { predecessorQueue.Clear(); do { if (!stateKey.Equals(predecessorKey)) { predecessorQueue.Enqueue(predecessorKey); } } while (PredecessorGraph.TryGetNextValue(out predecessorKey, ref predecessorIterator)); // Reset Predecessors PredecessorGraph.Remove(resultingStateKey); // Requeue Predecessors while (predecessorQueue.TryDequeue(out var queuedPredecessorKey)) { PredecessorGraph.Add(resultingStateKey, queuedPredecessorKey); } } // Action Result Info StateTransitionInfoLookup.Remove(new StateTransition <TStateKey, TActionKey>(stateKey, stateActionPair.ActionKey, resultingStateKey)); } while (ResultingStateLookup.TryGetNextValue(out resultingStateKey, ref resultIterator)); ResultingStateLookup.Remove(stateActionPair); } } while (ActionLookup.TryGetNextValue(out actionKey, ref actionIterator)); ActionLookup.Remove(stateKey); } // Predecessors PredecessorGraph.Remove(stateKey); predecessorQueue.Dispose(); }
protected override void OnUpdate() { if (!m_TickThisFrame) { return; } var ecb = m_EndSimulationEcbSystem.CreateCommandBuffer().AsParallelWriter(); ScheduledTicks.Clear(); var tickEvents = ScheduledTicks.AsParallelWriter(); Entities .ForEach((int entityInQueryIndex, Entity entity, ref TurnDurationComponent durationComponent, ref DurationStateComponent state) => { if (durationComponent.Tick()) { tickEvents.Enqueue(entity); state.MarkTick(); } else { state.State &= ~(EDurationState.TICKED_THIS_FRAME); } if (durationComponent.IsExpired()) { ecb.DestroyEntity(entityInQueryIndex, entity); state.MarkExpired(); } }) .WithBurst() .ScheduleParallel(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { CollisionManifoldsQueue.Clear(); var computeSphereSphereContactsJob = new ComputeSphereSphereContacts { CollisionPairsArray = PhysicsSystem.SphereSphereCollisionPairsArray, ColliderFromEntity = ColliderFromEntity, RigidBodyFromEntity = RigidBodyFromEntity, ColliderPositionFromEntity = ColliderPositionFromEntity, ColliderPhysicsPropertiesFromEntity = ColliderPhysicsPropertiesFromEntity, SphereColliderFromEntity = SphereColliderFromEntity, VelocityFromEntity = VelocityFromEntity, CollisionManifoldsQueue = CollisionManifoldsQueue, }; var computeSphereSphereContacts = computeSphereSphereContactsJob.Schedule(PhysicsSystem.SphereSphereCollisionPairsArray.Length, PhysicsSystem.Settings.ContactsGenerationSystemBatchCount, inputDeps); computeSphereSphereContacts.Complete(); if (PhysicsSystem.CollisionManifoldsArray.IsCreated) { PhysicsSystem.CollisionManifoldsArray.Dispose(); } PhysicsSystem.CollisionManifoldsArray = new NativeArray <CollisionManifold>(CollisionManifoldsQueue.Count, Allocator.TempJob); DequeueIntoArray <CollisionManifold> dequeueManifoldsJob = new DequeueIntoArray <CollisionManifold>() { InputQueue = CollisionManifoldsQueue, OutputArray = PhysicsSystem.CollisionManifoldsArray, }; JobHandle dequeueCollisionManifolds = dequeueManifoldsJob.Schedule(computeSphereSphereContacts); // Need to complete jobs here because counter will be read in the next system dequeueCollisionManifolds.Complete(); return(dequeueCollisionManifolds); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { // destroyActorOutsA.Clear(); destroyActorOutsB.Clear(); destroyActors.Clear(); inputDeps = new GetDestroyActorJobA { destroyActorOuts = destroyActorOutsA.AsParallelWriter(), } .Schedule(this, inputDeps); inputDeps = new GetDestroyActorJobB { destroyActorOuts = destroyActorOutsB.AsParallelWriter(), } .Schedule(this, inputDeps); inputDeps = NativeQueueEx.ToListJob(destroyActorOutsA, destroyActorOutsB, ref destroyActors, inputDeps); inputDeps = new DestroyVisibleDistanceJob { destroyActors = destroyActors.AsDeferredJobArray(), } .Schedule(this, inputDeps); return(inputDeps); }
/// <inheritdoc /> public void Execute() { TopologicalListFrontToBack.Clear(); IsometricDataPresentOnCameraView.Clear(); IsometricDepthAssigned.Clear(); IsometricElementFromTopologicalList.Clear(); HelpTopologicalList.Clear(); }
protected override void OnDestroyManager() { m_ApplyGroupIdFromSceneGroupIds.Clear(); m_Groups.Clear(); m_PooledGroups.Clear(); m_ApplyGroupIdFromSceneGroupIds = null; m_Groups = null; }
private void ClearQueue() { #if UseArrayAsQueue _queueTail = 0; _queueHead = 0; #else PositionsToValidate.Clear(); #endif }
// NOTE: If `NativeMultiHashMap.Clear()` is too slow we can try // using `NativeHashMap` + entities with dynamic buffers instead. void ClearMapsIfCreated() { if (HasCreatedMaps) { ColliderMap.Clear(); TargetMap.Clear(); LargeColliders.Clear(); } }
protected override JobHandle OnUpdate(JobHandle inputDeps) { _queue.Clear(); var ha = new ChunkJob { cq = _queue.ToConcurrent() }.Schedule(query, inputDeps); _waitHandle = ha; return(_waitHandle); }
public unsafe void Test() { const int cnt = 1000; using var queue = new NativeQueue <int>(0); var managed = new Queue <int>(cnt); var arr = new int[cnt]; var rand = new Random(); for (int i = 0; i < cnt; i++) { arr[i] = rand.Next(cnt); } for (int i = 0; i < cnt; i++) { managed.Enqueue(arr[i]); queue.Enqueue(arr[i]); } var se = queue.GetEnumerator(); var me = managed.GetEnumerator(); var toArr = queue.ToArray(); for (var i = 0; i < cnt; i++) { se.MoveNext(); me.MoveNext(); Assert.True(se.Current == me.Current); Assert.True(toArr[i] == se.Current); } se.Dispose(); me.Dispose(); for (int i = 0; i < cnt; i++) { Assert.True(managed.Count == queue.Count); var l = managed.Dequeue(); var r = queue.Peek(); Assert.True(l == r); queue.Dequeue(); } queue.Clear(); queue.TrimExcess(); Assert.True(!queue.IsDisposed); queue.Dispose(); Assert.True(queue.Ptr == null); Assert.True(queue.IsDisposed); }
public IEnumerator TestPerformanceOnLargeGraphBudget10() { var planGraph = PlanGraphUtility.BuildLattice(midLatticeDepth: 10); var nodeCount = planGraph.Size; var depthMap = new NativeHashMap <int, int>(nodeCount, Allocator.TempJob); var queue = new NativeQueue <StateHorizonPair <int> >(Allocator.TempJob); planGraph.GetExpandedDepthMap(0, depthMap, queue); var selectedUnexpandedStates = new NativeList <int>(1, Allocator.Persistent); var allExpandedStates = new NativeMultiHashMap <int, int>(1, Allocator.Persistent); yield return(null); // Set up performance test Measure.Method(() => { var selectJob = new SelectionJob <int, int>() { StateExpansionBudget = 10, RootStateKey = 0, StateDepthLookup = depthMap, StateInfoLookup = planGraph.StateInfoLookup, ActionLookup = planGraph.ActionLookup, ActionInfoLookup = planGraph.ActionInfoLookup, ResultingStateLookup = planGraph.ResultingStateLookup, StateTransitionInfoLookup = planGraph.StateTransitionInfoLookup, SelectedUnexpandedStates = selectedUnexpandedStates, AllSelectedStates = allExpandedStates }; selectJob.Schedule().Complete(); }).WarmupCount(1).MeasurementCount(1).IterationsPerMeasurement(1).CleanUp(() => { depthMap.Clear(); queue.Clear(); planGraph.GetExpandedDepthMap(0, depthMap, queue); selectedUnexpandedStates.Clear(); allExpandedStates.Clear(); }).Run(); queue.Dispose(); depthMap.Dispose(); planGraph.Dispose(); selectedUnexpandedStates.Dispose(); allExpandedStates.Dispose(); // Check performance times PerformanceUtility.AssertRange(0.00, 5); }
public static void GetExpandedDepthMap <TStateKey, TStateInfo, TActionKey, TActionInfo, TStateTransitionInfo>(this PlanGraph <TStateKey, TStateInfo, TActionKey, TActionInfo, TStateTransitionInfo> planGraph, TStateKey rootKey, NativeHashMap <TStateKey, int> depthMap, NativeQueue <StateHorizonPair <TStateKey> > queue) where TStateKey : struct, IEquatable <TStateKey> where TStateInfo : struct, IStateInfo where TActionKey : struct, IEquatable <TActionKey> where TActionInfo : struct, IActionInfo where TStateTransitionInfo : struct { depthMap.Clear(); queue.Clear(); var actionLookup = planGraph.ActionLookup; var resultingStateLookup = planGraph.ResultingStateLookup; depthMap.TryAdd(rootKey, 0); queue.Enqueue(new StateHorizonPair <TStateKey> { StateKey = rootKey, Horizon = 0 }); while (queue.TryDequeue(out var stateHorizonPair)) { var stateKey = stateHorizonPair.StateKey; var horizon = stateHorizonPair.Horizon; var nextHorizon = horizon + 1; if (actionLookup.TryGetFirstValue(stateKey, out var actionKey, out var iterator)) { do { var stateActionPair = new StateActionPair <TStateKey, TActionKey>(stateKey, actionKey); if (resultingStateLookup.TryGetFirstValue(stateActionPair, out var resultingStateKey, out var resultIterator)) { do { // Skip unexpanded states if (!actionLookup.TryGetFirstValue(resultingStateKey, out _, out _)) { continue; } // first add will be most shallow due to BFS if (depthMap.TryAdd(resultingStateKey, nextHorizon)) { queue.Enqueue(new StateHorizonPair <TStateKey>() { StateKey = resultingStateKey, Horizon = nextHorizon }); } } while (resultingStateLookup.TryGetNextValue(out resultingStateKey, ref resultIterator)); } } while (actionLookup.TryGetNextValue(out actionKey, ref iterator)); } } }
public void Clear() { if (Verticies.IsCreated) { Verticies.Clear(); } if (Indicies.IsCreated) { Indicies.Clear(); } //if (mapping.IsCreated) // mapping.CopyFrom(_clear); }
protected override void OnUpdate() { if (_systemStateGroup.IsEmptyIgnoreFilter || _emptyCellsGroup.IsEmptyIgnoreFilter) { return; } var systemState = _systemStateGroup.GetSingletonEntity(); EntityCommandBuffer ecb = new EntityCommandBuffer(Allocator.TempJob); var gravity = GetComponentDataFromEntity <Gravity>(true)[systemState]; ComponentDataFromEntity <GridPosition> gridPositions = GetComponentDataFromEntity <GridPosition>(true); ComponentDataFromEntity <BallLink> ballLinks = GetComponentDataFromEntity <BallLink>(true); var startY = gravity.Value.y > 0 ? _field.Height - 1 : 0; NativeQueue <Entity> cellsToFallIn = new NativeQueue <Entity>(Allocator.TempJob); for (int x = 0; x < _field.Width; x++) { cellsToFallIn.Clear(); for (var y = startY; y < _field.Height && y >= 0; y -= gravity.Value.y) { var cell = _field.GetCell(x, y); if (!ballLinks.Exists(cell)) { cellsToFallIn.Enqueue(cell); } else if (cellsToFallIn.Count > 0) { ecb.RemoveComponent <BallLink>(cell); var ball = ballLinks[cell].Value; var cellToFall = cellsToFallIn.Dequeue(); ecb.SetComponent(ball, new CellLink { Value = cellToFall }); ecb.AddComponent(ball, new Destination { Value = _field.GetWorldPosition(gridPositions[cellToFall].Value) }); ecb.AddComponent(cellToFall, new BallLink { Value = ball }); ecb.RemoveComponent <Interactable>(systemState); } } } cellsToFallIn.Dispose(); ecb.Playback(EntityManager); ecb.Dispose(); }
public void Execute() { LitVoxels.Clear(); //Enqueue lit voxels for processing //TODO: parallel job filter or store litvoxels already in calculate light ray job for (var x = GeometryConsts.LIGHTS_CLUSTER_MIN; x < GeometryConsts.LIGHTS_CLUSTER_MAX; x++) { for (var z = GeometryConsts.LIGHTS_CLUSTER_MIN; z < GeometryConsts.LIGHTS_CLUSTER_MAX; z++) { for (var y = GeometryConsts.CHUNK_HEIGHT - 1; y >= 0; y--) { var index = ArrayHelper.ToCluster1D(x, y, z); if (LightLevels[index] > GeometryConsts.LIGHT_FALL_OFF) { LitVoxels.Enqueue(new int3(x, y, z)); } } } } //Iterate trough lit voxels and project light to neighbours while (LitVoxels.Count > 0) { var litVoxel = LitVoxels.Dequeue(); var litVoxelId = ArrayHelper.ToCluster1D(litVoxel.x, litVoxel.y, litVoxel.z); var neighbourLightValue = LightLevels[litVoxelId] - GeometryConsts.LIGHT_FALL_OFF; //iterate trough neighbours for (int iF = 0; iF < GeometryConsts.FACES_PER_VOXEL; iF++) { var neighbour = litVoxel + Neighbours[iF]; if (CheckVoxelBounds(neighbour.x, neighbour.y, neighbour.z)) { var neighbourId = ArrayHelper.ToCluster1D(neighbour.x, neighbour.y, neighbour.z); var neighbourType = MapData[neighbourId]; if (!BlockDataLookup[neighbourType].IsSolid && LightLevels[neighbourId] < neighbourLightValue) { LightLevels[neighbourId] = neighbourLightValue; if (neighbourLightValue > GeometryConsts.LIGHT_FALL_OFF) { LitVoxels.Enqueue(neighbour); } } } } } }
protected override void OnUpdate() { float deltaTime = Time.DeltaTime; NonUniformScale goldUiScale = EntityManager.GetComponentData <NonUniformScale>(_entityGold); float3 goldUiPosition = EntityManager.GetComponentData <LocalToWorld>(_entityGold).Position; float3 posCam = EntityManager.GetComponentData <Translation>(_entityCam).Value; var commandBuffer = new EntityCommandBuffer(Allocator.TempJob); var score = new NativeQueue <bool>(Allocator.TempJob); JobHandle jobHandle = Entities .WithAll <GoldToScoreComponent>() .WithBurst() .ForEach((Entity entity, DynamicBuffer <Child> children, ref Translation position, ref NonUniformScale scale, ref GoldToScoreComponent goldToScoreComponent) => { goldToScoreComponent.Lerp += deltaTime; float3 t3CameraFixPosition = goldToScoreComponent.GoldFixPosition + posCam - goldToScoreComponent.CameraFixPosition; position.Value = math.lerp(t3CameraFixPosition, goldUiPosition, goldToScoreComponent.Lerp); scale.Value = math.lerp(goldToScoreComponent.StartScale, goldUiScale.Value, goldToScoreComponent.Lerp); if (goldToScoreComponent.Lerp >= 1) { commandBuffer.DestroyEntity(entity); foreach (Child child in children) { commandBuffer.DestroyEntity(child.Value); } score.Enqueue(true); } }).Schedule(Dependency); jobHandle.Complete(); commandBuffer.Playback(EntityManager); commandBuffer.Dispose(); if (score.Count <= 0) { return; } Entities. WithAll <GameDataComponent>(). ForEach((ref GameDataComponent gameData) => { gameData.Score += score.Count; }).Run(); TextLayout.SetEntityTextRendererString(EntityManager, _text, GetSingleton <GameDataComponent>().Score.ToString()); score.Clear(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { #if ENABLE_UNITY_COLLECTIONS_CHECKS m_StatsCollection.AddSnapshotReceiveStats(m_NetStats); #endif var commandBuffer = m_Barrier.CreateCommandBuffer(); if (playerGroup.IsEmptyIgnoreFilter) { m_DelayedDespawnQueue.Clear(); var clearMapJob = new ClearMapJob { ghostMap = m_ghostEntityMap }; var clearHandle = clearMapJob.Schedule(inputDeps); var clearJob = new ClearGhostsJob { commandBuffer = commandBuffer.ToConcurrent() }; inputDeps = clearJob.Schedule(this, inputDeps); m_Barrier.AddJobHandleForProducer(inputDeps); return(JobHandle.CombineDependencies(inputDeps, clearHandle)); } serializers.BeginDeserialize(this); JobHandle playerHandle; var readJob = new ReadStreamJob { commandBuffer = commandBuffer, players = playerGroup.ToEntityArray(Allocator.TempJob, out playerHandle), snapshotFromEntity = GetBufferFromEntity <IncomingSnapshotDataStreamBufferComponent>(), snapshotAckFromEntity = GetComponentDataFromEntity <NetworkSnapshotAckComponent>(), ghostEntityMap = m_ghostEntityMap, compressionModel = m_CompressionModel, serializers = serializers, #if ENABLE_UNITY_COLLECTIONS_CHECKS netStats = m_NetStats, #endif replicatedEntityType = ComponentType.ReadWrite <ReplicatedEntityComponent>(), delayedDespawnQueue = m_DelayedDespawnQueue, targetTick = m_TimeSystem.interpolateTargetTick, predictedFromEntity = GetComponentDataFromEntity <PredictedEntityComponent>(true) }; inputDeps = readJob.Schedule(JobHandle.CombineDependencies(inputDeps, playerHandle)); m_Barrier.AddJobHandleForProducer(inputDeps); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var concurrentFreeQueue = freeNetworkIds.AsParallelWriter(); inputDeps = m_Driver.ScheduleUpdate(inputDeps); if (m_DriverListening) { // Schedule accept job var acceptJob = new ConnectionAcceptJob(); acceptJob.driver = m_Driver; acceptJob.commandBuffer = m_Barrier.CreateCommandBuffer(); inputDeps = acceptJob.Schedule(inputDeps); // Schedule job to assign network ids to new connections var assignJob = new AssignNetworkIdJob(); assignJob.commandBuffer = m_Barrier.CreateCommandBuffer(); assignJob.numNetworkId = numNetworkIds; assignJob.freeNetworkIds = freeNetworkIds; assignJob.rpcQueue = rpcQueue; assignJob.rpcBuffer = GetBufferFromEntity <OutgoingRpcDataStreamBufferComponent>(); inputDeps = assignJob.ScheduleSingle(this, inputDeps); } else { freeNetworkIds.Clear(); } // Schedule parallel update job var recvJob = new ConnectionReceiveJob(); recvJob.commandBuffer = m_Barrier.CreateCommandBuffer().ToConcurrent(); recvJob.driver = m_ConcurrentDriver; recvJob.freeNetworkIds = concurrentFreeQueue; recvJob.networkId = GetComponentDataFromEntity <NetworkIdComponent>(); recvJob.rpcBuffer = GetBufferFromEntity <IncomingRpcDataStreamBufferComponent>(); recvJob.cmdBuffer = GetBufferFromEntity <IncomingCommandDataStreamBufferComponent>(); recvJob.snapshotBuffer = GetBufferFromEntity <IncomingSnapshotDataStreamBufferComponent>(); recvJob.localTime = NetworkTimeSystem.TimestampMS; // FIXME: because it uses buffer from entity var handle = recvJob.ScheduleSingle(this, inputDeps); m_Barrier.AddJobHandleForProducer(handle); return(handle); }
public void Enqueue_Dequeue_Clear() { var queue = new NativeQueue <int> (Allocator.Temp); Assert.AreEqual(0, queue.Count); Assert.Throws <System.InvalidOperationException> (() => { queue.Dequeue(); }); for (int i = 0; i < 16; ++i) { queue.Enqueue(i); } Assert.AreEqual(16, queue.Count); for (int i = 0; i < 8; ++i) { Assert.AreEqual(i, queue.Dequeue(), "Got the wrong value from the queue"); } Assert.AreEqual(8, queue.Count); queue.Clear(); Assert.AreEqual(0, queue.Count); Assert.Throws <System.InvalidOperationException> (() => { queue.Dequeue(); }); queue.Dispose(); }
public void Initialize(World world, Vector3i bp, Vector3i cp) { this.world = world; // maybe world can change? i dunno would prob have its own pool this.bp = bp; this.cp = cp; loaded = false; update = false; lightUpdate = false; rendered = false; blockReaders = 0; lightReaders = 0; blockWriter = false; lightWriter = false; builtStructures = false; needToUpdateSave = false; dying = false; needNewCollider = true; loadedNeighbors = 0; faces.Clear(); // just to make sure list doesnt get too large faces.Capacity = 32; sunBFS.Clear(); sunRBFS.Clear(); gameObject.transform.position = bp.ToVector3() / BPU; gameObject.name = string.Format("Chunk {0}.{1}.{2}", cp.x, cp.y, cp.z); gameObject.SetActive(true); for (int i = 0; i < 6; ++i) { neighbors[i] = null; } mr.material = Chunk.beGreedy ? world.TileMatGreedy : world.TileMat; }
public void Execute() { srcQue.Clear(); dstQue.Clear(); /*System.Diagnostics.Stopwatch w0 = new System.Diagnostics.Stopwatch(); * System.Diagnostics.Stopwatch w1 = new System.Diagnostics.Stopwatch(); * System.Diagnostics.Stopwatch w2 = new System.Diagnostics.Stopwatch(); * System.Diagnostics.Stopwatch w3 = new System.Diagnostics.Stopwatch(); * System.Diagnostics.Stopwatch w4 = new System.Diagnostics.Stopwatch(); * System.Diagnostics.Stopwatch w5 = new System.Diagnostics.Stopwatch(); * w3.Start();*/ int w = field.width; int d = field.depth; int wd = w * d; int spanCount = field.spans.Length; int expandIterations = 8; var srcReg = new NativeArray <ushort>(spanCount, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var srcDist = new NativeArray <ushort>(spanCount, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var closed = new NativeArray <bool>(spanCount, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var spanFlags = new NativeArray <int>(spanCount, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var stack = new NativeArray <Int3>(spanCount, Allocator.Temp, NativeArrayOptions.UninitializedMemory); // The array pool arrays may contain arbitrary data. We need to zero it out. for (int i = 0; i < spanCount; i++) { srcReg[i] = (ushort)0; srcDist[i] = (ushort)0xFFFF; closed[i] = false; spanFlags[i] = 0; } var spanDistances = distanceField; var areaTypes = field.areaTypes; var compactCells = field.cells; const ushort BorderReg = VoxelUtilityBurst.BorderReg; ushort regionId = 2; MarkRectWithRegion(0, borderSize, 0, d, (ushort)(regionId | BorderReg), srcReg); regionId++; MarkRectWithRegion(w - borderSize, w, 0, d, (ushort)(regionId | BorderReg), srcReg); regionId++; MarkRectWithRegion(0, w, 0, borderSize, (ushort)(regionId | BorderReg), srcReg); regionId++; MarkRectWithRegion(0, w, d - borderSize, d, (ushort)(regionId | BorderReg), srcReg); regionId++; // TODO: Can be optimized int maxDistance = 0; for (int i = 0; i < distanceField.Length; i++) { maxDistance = math.max((int)distanceField[i], maxDistance); } // A distance is 2 to an adjacent span and 1 for a diagonally adjacent one. NativeArray <int> sortedSpanCounts = new NativeArray <int>((maxDistance) / 2 + 1, Allocator.Temp); for (int i = 0; i < field.spans.Length; i++) { // Do not take borders or unwalkable spans into account. if ((srcReg[i] & BorderReg) == BorderReg || areaTypes[i] == CompactVoxelField.UnwalkableArea) { continue; } sortedSpanCounts[distanceField[i] / 2]++; } var distanceIndexOffsets = new NativeArray <int>(sortedSpanCounts.Length, Allocator.Temp); for (int i = 1; i < distanceIndexOffsets.Length; i++) { distanceIndexOffsets[i] = distanceIndexOffsets[i - 1] + sortedSpanCounts[i - 1]; } var totalRelevantSpans = distanceIndexOffsets[distanceIndexOffsets.Length - 1] + sortedSpanCounts[sortedSpanCounts.Length - 1]; var bucketSortedSpans = new NativeArray <Int3>(totalRelevantSpans, Allocator.Temp, NativeArrayOptions.UninitializedMemory); // Bucket sort the spans based on distance for (int z = 0, pz = 0; z < wd; z += w, pz++) { for (int x = 0; x < field.width; x++) { CompactVoxelCell c = compactCells[z + x]; for (int i = (int)c.index, ni = (int)(c.index + c.count); i < ni; i++) { // Do not take borders or unwalkable spans into account. if ((srcReg[i] & BorderReg) == BorderReg || areaTypes[i] == CompactVoxelField.UnwalkableArea) { continue; } int distIndex = distanceField[i] / 2; bucketSortedSpans[distanceIndexOffsets[distIndex]++] = new Int3(x, i, z); } } } if (distanceIndexOffsets[distanceIndexOffsets.Length - 1] != totalRelevantSpans) { throw new System.Exception("Unexpected span count"); } // Go through spans in reverse order (i.e largest distances first) for (int distIndex = sortedSpanCounts.Length - 1; distIndex >= 0; distIndex--) { var level = (uint)distIndex * 2; var spansAtLevel = sortedSpanCounts[distIndex]; for (int i = 0; i < spansAtLevel; i++) { // Go through the spans stored in bucketSortedSpans for this distance index. // Note that distanceIndexOffsets[distIndex] will point to the element after the end of the group of spans. // There is no particular reason for this, the code just turned out to be a bit simpler to implemen that way. var spanInfo = bucketSortedSpans[distanceIndexOffsets[distIndex] - i - 1]; int spanIndex = spanInfo.y; // This span is adjacent to a region, so we should start the BFS search from it if (spanFlags[spanIndex] != 0 && srcReg[spanIndex] == 0) { srcReg[spanIndex] = (ushort)spanFlags[spanIndex]; srcQue.Enqueue(spanInfo); closed[spanIndex] = true; } } // Expand a few iterations out from every known node for (int expansionIteration = 0; expansionIteration < expandIterations && srcQue.Count > 0; expansionIteration++) { while (srcQue.Count > 0) { Int3 spanInfo = srcQue.Dequeue(); var area = areaTypes[spanInfo.y]; var span = field.spans[spanInfo.y]; var region = srcReg[spanInfo.y]; closed[spanInfo.y] = true; ushort nextDist = (ushort)(srcDist[spanInfo.y] + 2); // Go through the neighbours of the span for (int dir = 0; dir < 4; dir++) { var neighbour = span.GetConnection(dir); if (neighbour == CompactVoxelField.NotConnected) { continue; } int nx = spanInfo.x + VoxelUtilityBurst.DX[dir]; int nz = spanInfo.z + VoxelUtilityBurst.DZ[dir] * field.width; int ni = (int)compactCells[nx + nz].index + neighbour; if ((srcReg[ni] & BorderReg) == BorderReg) // Do not take borders into account. { continue; } // Do not combine different area types if (area == areaTypes[ni]) { if (nextDist < srcDist[ni]) { if (spanDistances[ni] < level) { srcDist[ni] = nextDist; spanFlags[ni] = region; } else if (!closed[ni]) { srcDist[ni] = nextDist; if (srcReg[ni] == 0) { dstQue.Enqueue(new Int3(nx, ni, nz)); } srcReg[ni] = region; } } } } } Util.Memory.Swap(ref srcQue, ref dstQue); } // Find the first span that has not been seen yet and start a new region that expands from there for (int i = 0; i < spansAtLevel; i++) { var info = bucketSortedSpans[distanceIndexOffsets[distIndex] - i - 1]; if (srcReg[info.y] == 0) { if (!FloodRegion(info.x, info.z, info.y, level, regionId, field, distanceField, srcReg, srcDist, stack, spanFlags, closed)) { // The starting voxel was already adjacent to an existing region so we skip flooding it. // It will be visited in the next area expansion. } else { regionId++; } } } } var maxRegions = regionId; // Filter out small regions. FilterSmallRegions(field, srcReg, minRegionSize, maxRegions); // Write the result out. for (int i = 0; i < spanCount; i++) { var span = field.spans[i]; span.reg = srcReg[i]; field.spans[i] = span; } // TODO: // field.maxRegions = maxRegions; // #if ASTAR_DEBUGREPLAY // DebugReplay.BeginGroup("Regions"); // for (int z = 0, pz = 0; z < wd; z += field.width, pz++) { // for (int x = 0; x < field.width; x++) { // CompactVoxelCell c = field.cells[x+z]; // for (int i = (int)c.index; i < c.index+c.count; i++) { // CompactVoxelSpan s = field.spans[i]; // DebugReplay.DrawCube(CompactSpanToVector(x, pz, i), UnityEngine.Vector3.one*cellSize, AstarMath.IntToColor(s.reg, 1.0f)); // } // } // } // DebugReplay.EndGroup(); // int maxDist = 0; // for (int i = 0; i < srcDist.Length; i++) if (srcDist[i] != 0xFFFF) maxDist = Mathf.Max(maxDist, srcDist[i]); // DebugReplay.BeginGroup("Distances"); // for (int z = 0, pz = 0; z < wd; z += field.width, pz++) { // for (int x = 0; x < field.width; x++) { // CompactVoxelCell c = field.cells[x+z]; // for (int i = (int)c.index; i < c.index+c.count; i++) { // CompactVoxelSpan s = field.spans[i]; // float f = (float)srcDist[i]/maxDist; // DebugReplay.DrawCube(CompactSpanToVector(x, z/field.width, i), Vector3.one*cellSize, new Color(f, f, f)); // } // } // } // DebugReplay.EndGroup(); // #endif }
public void Execute() { nativeQueue.Clear(); }
public void Execute() { var commandQueueArray = commandQueue.ToArray(Allocator.Temp); { // Mass destroy weaponToDestroyCache.Clear(); for (int i = 0; i < commandQueueArray.Length; ++i) { var command = commandQueueArray[i]; if (command.currentWeaponRef.Value != Entity.Null) { weaponToDestroyCache.Add(command.currentWeaponRef.Value); } } if (weaponToDestroyCache.Length > 0) { // To be replaced with batched ECB destroy when API ships { for (int i = 0; i < weaponToDestroyCache.Length; ++i) { commandBuffer.DestroyEntity(weaponToDestroyCache[i]); } } } // Instantiate has to 1-1 since mass instantiate does not work with LinkedEntityGroup newWeaponInstantiatedCache.Clear(); for (int i = 0; i < commandQueueArray.Length; ++i) { var command = commandQueueArray[i]; newWeaponInstantiatedCache.Add(commandBuffer.Instantiate(weaponPrefabs[command.newWeaponIdx])); } // Mass add component { // To be replaced with batched ECB add component when API ships { for (int i = 0; i < newWeaponInstantiatedCache.Length; ++i) { commandBuffer.AddComponent <Parent>(newWeaponInstantiatedCache[i]); } for (int i = 0; i < newWeaponInstantiatedCache.Length; ++i) { commandBuffer.AddComponent <LocalToParent>(newWeaponInstantiatedCache[i]); } } } // There is no mass set component but setting component is way less of a cost than add component for (int i = 0; i < newWeaponInstantiatedCache.Length; ++i) { var command = commandQueueArray[i]; var newWeapon = newWeaponInstantiatedCache[i]; commandBuffer.SetComponent(newWeapon, new Parent { Value = command.weaponOwner }); commandBuffer.SetComponent(newWeapon, new LocalToParent { Value = command.localToParent }); commandBuffer.SetComponent(command.weaponOwner, new Weapon { Value = newWeapon }); } } commandQueueArray.Dispose(); commandQueue.Clear(); }
public void Execute() { Source.Clear(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { //coloring algo //loop through all connection: MultiHashMap<node, out_con_of_node>, MultiHashMap<node, in_con_of_node> //start with a node //breath first search with its in_con & out_con, add all the node to visited node, the con to one color group //all con must have the same level as the first con selected! //repeat with remaining nodes & con //a node connecting two connections of different levels is an exit node //nothing can have two IndexInNetwork comp //can a node belong to two networks? and have two indexes? //propose 1: treat exit & entrance nodes differently //entrance: contain a Next list to all network nodes //exit: every node has a Next to every exit node (indexed differently) //need a way to connect an exit of this network to an entrance of another // //path finding algo //target: //- finalTarget: conEnt //- nextTarget: entity //- targetIdx: int //- curLoc: entity //- curLevel: int //- curNet: int // // NetworkGroup[connection].Index // Entrances[node].NetId // Exits[node].NetId // // Connection[connection].Level // Entrances[node].Level // Exits[node].Level // //curNet = curCon.Network //curLevel = curCon.Level //curLoc = curCon // //if curCon == finalTarget: done //elseif curCon.endNode == nextTarget: // curNet = Entrances[nextTarget].NetId // curLevel = Entrances[nextTarget].Level // curLoc = nextTarget // find_target //if curNet == finalTarget.Network: targetIdx = Indices[finalTarget] //else // if curLevel <= finalTarget.Level //climb // nextTarget = curLoc.exit // targetIdx = Exits[nextTarget].Idx // else //descend // nextTarget = finalTarget // do // nextTarget = nextTarget.entrance // while (curLevel > Exits[nextTarget].Level) // var exitInfo = Exits[nextTarget] // if curNet == exitInfo.NetId // targetIdx = exitInfo.Idx // else //climb // nextTarget = nextTarget.exit // targetIdx = Exits[nextTarget].Idx //Next[curCon][targetIdx] //propose 2: treat exit & entrance like every other con in the network //nearest_exit or entrance will have to choose 1 among multiples (also happens with node, but less) //the exit/entrance con is still in the same network, it has to be an OnlyNext con to push the agent //to the other network => can't have entrance as intersection! must buffer it with a straight road! //propose 3: create a new connection with startNode == endNode == exitNode //this is not good! //compute direct pointer: for connection with 1 exit //scan through node with 1 out_con_node: set all connection in in_con_node to has direct pointer //compute index in network //all con without direct pointer will be indexed incrementally based on network //follow the direct_pointer to compute combined-distance to Dist array //compute Dist & Next //compute node.Exit = nearest_exit_node, node.Entrance = nearest_entrance_node //during computation of Dist, record the smallest Dist[i,j] to Exit[i] if j is an exit node //path finding: //conTarget: //- finalTarget: connection //- nextNode: Null // //if Direct[node] != Entity.Null => use Direct[node] //if node == conTarget.nextNode => conTarget.nextNode = null //if conTarget.nextNode == Null // nextNode = finalTarget.endNode // while (node.network != nextNode.network) // if node.Level == nextNode.Level => nextNode = node.exit; break // elseif node.Level > nextNode.Level => nextNode = nextNode.entrance // conTarget.nextNode = nextNode //use Next[node][IndexInNetwork[conTarget.nextNode]] _outCons.Clear(); _inCons.Clear(); _newCons.Clear(); var inout = new OutConAndInCon { OutCons = _outCons.ToConcurrent(), InCons = _inCons.ToConcurrent(), NewCons = _newCons.ToConcurrent(), }.Schedule(this, inputDeps); //fill _outCons and _inCons: multi hash map for each node, storing all outward connections / inward connections inout.Complete(); if (_newCons.Count > 0) { var addOnlyNext = new AddOnlyNext { OutCons = _outCons, }.Schedule(this, inputDeps); addOnlyNext.Complete(); EntityManager.AddSharedComponentData(_query, new NetworkGroup()); _conToNets.Clear(); _bfsOpen.Clear(); while (_newCons.Count > 0) { //each loop will create a new network group var newCon = _newCons.Dequeue(); int level = EntityManager.GetComponentData <Connection>(newCon).Level; if (_conToNets.TryGetValue(newCon, out int _)) { continue; //already has net group! } //breath-first-search here _networkCount++; _networkCons.Clear(); _entrances.Clear(); _exits.Clear(); _conToNets.TryAdd(newCon, _networkCount); _bfsOpen.Enqueue(newCon); _networkCons.Add(newCon); //use BFS to scan all connections belong to the same network as "newCon" while (_bfsOpen.Count > 0) { var curConEnt = _bfsOpen.Dequeue(); var connection = EntityManager.GetComponentData <Connection>(curConEnt); BFS(ref _outCons, ref _networkCons, connection.EndNode, EntityManager, level, true, ref _exits); BFS(ref _inCons, ref _networkCons, connection.StartNode, EntityManager, level, false, ref _entrances); } var networkEnt = EntityManager.CreateEntity(_networkArchetype); EntityManager.SetComponentData(networkEnt, new Network { Index = _networkCount, }); var networkGroup = new NetworkGroup { NetworkId = _networkCount, }; var networkGroupState = new NetworkGroupState { NetworkId = _networkCount, Network = networkEnt, }; //add NetworkGroup & assign OnlyNext ==> this apply to ALL connection, jobify this! for (int i = 0; i < _networkCons.Length; i++) { var conEnt = _networkCons[i]; EntityManager.SetSharedComponentData(conEnt, networkGroup); EntityManager.SetComponentData(conEnt, networkGroupState); } var networkCache = NetworkCache.Create(networkEnt); for (int i = 0; i < _networkCons.Length; i++) { var conEnt = _networkCons[i]; var connection = EntityManager.GetComponentData <Connection>(conEnt); var conLen = EntityManager.GetComponentData <ConnectionLengthInt>(conEnt); var conSpeed = EntityManager.GetComponentData <ConnectionSpeedInt>(conEnt); networkCache.AddConnection(connection.StartNode, connection.EndNode, (float)conLen.Length / conSpeed.Speed, conEnt, connection.OnlyNext); } var entrances = _entrances.GetKeyArray(Allocator.Temp); //add entrances EntityManager.AddComponent(entrances, _entranceType); for (int i = 0; i < entrances.Length; i++) { var node = entrances[i]; EntityManager.SetComponentData(node, new Entrance { NetIdx = _networkCount, Network = networkEnt, Level = level, }); } var indexToTarget = EntityManager.AddBuffer <IndexToTargetBuffer>(networkEnt); int conCount = networkCache.ConnectionCount(); for (int i = 0; i < conCount; i++) { indexToTarget.Add(new IndexToTargetBuffer { Target = networkCache.GetConnection(i), }); } var exits = _exits.GetKeyArray(Allocator.Temp); for (int i = 0; i < exits.Length; i++) { var exitNode = exits[i]; indexToTarget.Add(new IndexToTargetBuffer { Target = exitNode, }); } //add exits EntityManager.AddComponent(exits, _exitType); EntityManager.AddComponent(exits, _indexInNetworkType); for (int i = 0; i < exits.Length; i++) { var exitNode = exits[i]; EntityManager.SetComponentData(exitNode, new Exit { NetIdx = _networkCount, Level = level, }); EntityManager.SetComponentData(exitNode, new IndexInNetwork { Index = i + conCount, }); } networkCache.Compute2(EntityManager, ref entrances, ref exits); networkCache.Dispose(); entrances.Dispose(); exits.Dispose(); } } return(inout); }