protected override JobHandle OnUpdate(JobHandle inputDeps) { float time = (float)Time.ElapsedTime; float frequency = 1f; //Debug.Log("Freq "+frequency+" diff " +(time - lastCountTime)); if ((time - lastCountTime) > frequency) { lastCountTime = time; int[] countArray = { 0 }; count = new NativeArray <int>(countArray, Allocator.TempJob); EntityQuery query = GetEntityQuery(ComponentType.ReadOnly <Crowd>(), ComponentType.ReadOnly <Translation>()); NativeArray <Translation> crowdTranslationArray = query.ToComponentDataArray <Translation>(Allocator.TempJob); JobHandle countJobHandle = new CountJob { // creates the counting job targetArray = crowdTranslationArray, count = count }.Schedule(this, inputDeps); JobHandle outputJobHandle = new OutputCountJob { time = time, count = count }.Schedule(this, countJobHandle); return(outputJobHandle); } else { return(inputDeps); } }
/// <summary> /// Ensure a <see cref="NativeMultiHashMap{TKey,TValue}" /> has the capacity to be filled with all events of a specific type. /// If the hash map already has elements, it will increase the size so that all events and existing elements can fit. /// </summary> /// <param name="handle"> Input dependencies. </param> /// <param name="hashMap"> The <see cref="NativeHashMap{TKey,TValue}"/> to ensure capacity of. </param> /// <typeparam name="TKey"> The key type of the <see cref="NativeHashMap{TKey,TValue}"/> . </typeparam> /// <typeparam name="TValue"> The value type of the <see cref="NativeHashMap{TKey,TValue}"/> . </typeparam> /// <returns> The dependency handle. </returns> public JobHandle EnsureHashMapCapacity <TKey, TValue>( JobHandle handle, NativeMultiHashMap <TKey, TValue> hashMap) where TKey : struct, IEquatable <TKey> where TValue : struct { var readerCount = this._eventSystem.GetEventReadersCount <T>(); if (readerCount != 0) { var counter = new NativeArray <int>(readerCount, Allocator.TempJob); handle = new CountJob { Counter = counter } .ScheduleSimultaneous <CountJob, T>(this._eventSystem, handle); handle = new EnsureMultiHashMapCapacityJob <TKey, TValue> { Counter = counter, HashMap = hashMap, } .Schedule(handle); handle = counter.Dispose(handle); } return(handle); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var job = new CountJob() { m_charaMotions = m_group.GetComponentDataArray <CharaMotion>() }; inputDeps = job.Schedule(inputDeps); inputDeps.Complete(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { NativeArray <FieldBanish> fieldBanishs = m_query.ToComponentDataArray <FieldBanish>(Allocator.TempJob); var job = new CountJob() { fieldBanishs = fieldBanishs, BanishEndCount = Settings.Instance.Common.BanishEndCount, }; inputDeps = job.Schedule(inputDeps); inputDeps.Complete(); m_query.CopyFromComponentDataArray(job.fieldBanishs); fieldBanishs.Dispose(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { NativeArray <CharaMotion> charaMotions = m_query.ToComponentDataArray <CharaMotion>(Allocator.TempJob); var job = new CountJob() { m_charaMotions = charaMotions }; inputDeps = job.Schedule(inputDeps); inputDeps.Complete(); m_query.CopyFromComponentDataArray(job.m_charaMotions); charaMotions.Dispose(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { NativeArray <PieceState> pieceStates = m_queryPiece.ToComponentDataArray <PieceState>(Allocator.TempJob); var job = new CountJob() { pieceStates = pieceStates, BanishEndCount = Settings.Instance.Common.BanishEndCount, BanishImageCount = Settings.Instance.Common.BanishImageCount, }; inputDeps = job.Schedule(inputDeps); inputDeps.Complete(); m_queryPiece.CopyFromComponentDataArray(job.pieceStates); pieceStates.Dispose(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (m_connectionGroup.IsEmptyIgnoreFilter) { // No connected players, just destroy all asteroids to save CPU inputDeps.Complete(); World.GetExistingManager <EntityManager>().DestroyEntity(asteroidGroup); return(default(JobHandle)); } var settings = GetSingleton <ServerSettings>(); var maxAsteroids = settings.numAsteroids; JobHandle gatherJob; var countJob = new CountJob { chunks = asteroidGroup.CreateArchetypeChunkArray(Allocator.TempJob, out gatherJob), count = count, entityType = GetArchetypeChunkEntityType() }; inputDeps = countJob.Schedule(JobHandle.CombineDependencies(inputDeps, gatherJob)); JobHandle levelHandle; var spawnJob = new SpawnJob { commandBuffer = barrier.CreateCommandBuffer(), count = count, targetCount = maxAsteroids, asteroidArchetype = settings.asteroidArchetype, asteroidRadius = settings.asteroidRadius, asteroidVelocity = settings.asteroidVelocity, level = m_LevelGroup.ToComponentDataArray <LevelComponent>(Allocator.TempJob, out levelHandle), rand = new Unity.Mathematics.Random((uint)Stopwatch.GetTimestamp()) }; var handle = spawnJob.Schedule(JobHandle.CombineDependencies(inputDeps, levelHandle)); barrier.AddJobHandleForProducer(handle); return(handle); }
/// <summary> Get the total number of events of a specific type. </summary> /// <param name="handle"> Input dependencies. </param> /// <param name="count"> The output array. This must be length of at least 1 and the result will be stored in the index of 0. </param> /// <returns> The dependency handle. </returns> public JobHandle GetEventCount(JobHandle handle, NativeArray <int> count) { var readerCount = this._eventSystem.GetEventReadersCount <T>(); if (readerCount != 0) { var counter = new NativeArray <int>(readerCount, Allocator.TempJob); handle = new CountJob { Counter = counter } .ScheduleSimultaneous <CountJob, T>(this._eventSystem, handle); handle = new SumJob { Counter = counter, Count = count }.Schedule(handle); counter.Dispose(handle); } return(handle); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { NativeArray <int> fallLength = new NativeArray <int>(Settings.Instance.Common.GridRowLength, Allocator.TempJob); NativeArray <FieldBanish> fieldBanishs = m_queryField.ToComponentDataArray <FieldBanish>(Allocator.TempJob); NativeArray <GridState> gridStates = m_queryGrid.ToComponentDataArray <GridState>(Allocator.TempJob); NativeArray <PieceState> pieceStates = m_queryPiece.ToComponentDataArray <PieceState>(Allocator.TempJob); NativeArray <PiecePosition> piecePositions = m_queryPiece.ToComponentDataArray <PiecePosition>(Allocator.TempJob); var job = new CountJob() { fieldBanishs = fieldBanishs, gridStates = gridStates, pieceStates = pieceStates, piecePositions = piecePositions, fallCount = fallLength, BanishEndCount = Settings.Instance.Common.BanishEndCount, BanishImageCount = Settings.Instance.Common.BanishImageCount, GridRowLength = Settings.Instance.Common.GridRowLength, GridSize = Settings.Instance.Common.GridSize, FieldHeight = Settings.Instance.Common.FieldHeight, }; inputDeps = job.Schedule(inputDeps); inputDeps.Complete(); m_queryField.CopyFromComponentDataArray(job.fieldBanishs); m_queryPiece.CopyFromComponentDataArray(job.pieceStates); m_queryPiece.CopyFromComponentDataArray(job.piecePositions); m_queryGrid.CopyFromComponentDataArray(job.gridStates); fallLength.Dispose(); fieldBanishs.Dispose(); gridStates.Dispose(); pieceStates.Dispose(); piecePositions.Dispose(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (!mapSystem.mapData.IsValid) { return(inputDeps); } generateField.Complete(); var calculateAgentTile = new CalculateNavAgentTile(); var handle = calculateAgentTile.Schedule(this, inputDeps); var clearTileAgentsCount = new ClearTileAgentsCount(); var handle2 = clearTileAgentsCount.Schedule(this, inputDeps); handle = JobHandle.CombineDependencies(handle, handle2); if (!counts.IsCreated) { counts = new NativeMultiHashMap <int2, int>(mapSystem.mapData.width * mapSystem.mapData.height, Allocator.TempJob); } counts.Clear(); handle = new CountJob { hashMap = counts.AsParallelWriter() }.Schedule(this, handle); handle = new UpdateCounts { hashMap = counts }.Schedule(this, handle); var updateJob = new UpdateMapCache { mapTiles = mapSystem.mapData.tiles, width = mapSystem.mapData.width }; handle = updateJob.Schedule(this, handle); if (!flowField.isValid) { var target = new int2(mapSystem.mapData.width / 2 + 1, mapSystem.mapData.height / 2 + 1); flowField = new FlowField(target, mapSystem.mapData); } handle = new SetTargetJob { target = flowField.targetTile, flowField = flowField }.Schedule(handle); var initJob = new InitializeFieldJob { stepField = flowField.stepField, flowField = flowField.flowField, target = flowField.targetTile.x + flowField.targetTile.y * mapSystem.mapData.width }; handle = initJob.Schedule(flowField.flowField.Length, 64, handle); var generateCostFieldJob = new GenerateCostField { map = mapSystem.mapData, blob = mapSystem.mapData.mapDataBlob, stepField = flowField.stepField, openSet = flowField.openSet, nextSet = flowField.nextSet }; handle = generateCostFieldJob.Schedule(handle); var generateFlowFieldJob = new GenerateFlowField { map = mapSystem.mapData, stepField = flowField.stepField, flowField = flowField.flowField, blob = mapSystem.mapData.mapDataBlob }; generateField = generateFlowFieldJob.Schedule(flowField.flowField.Length, 64, handle); return(generateField); }