protected override JobHandle OnUpdate(JobHandle inputDeps) { EntityManager.GetAllUniqueSharedComponentData(m_UniqueTypes); var obstacleCount = m_ObstacleGroup.CalculateLength(); var targetCount = m_TargetGroup.CalculateLength(); // Ignore typeIndex 0, can't use the default for anything meaningful. for (int typeIndex = 1; typeIndex < m_UniqueTypes.Count; typeIndex++) { var settings = m_UniqueTypes[typeIndex]; m_BoidGroup.SetFilter(settings); var boidCount = m_BoidGroup.CalculateLength(); var cacheIndex = typeIndex - 1; var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var cellObstacleDistance = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellTargetPositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyTargetPositions = new NativeArray <float3>(targetCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyObstaclePositions = new NativeArray <float3>(obstacleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var initialCellAlignmentJob = new CopyHeadings { headings = cellAlignment }; var initialCellAlignmentJobHandle = initialCellAlignmentJob.Schedule(m_BoidGroup, inputDeps); var initialCellSeparationJob = new CopyPositions { positions = cellSeparation }; var initialCellSeparationJobHandle = initialCellSeparationJob.Schedule(m_BoidGroup, inputDeps); var copyTargetPositionsJob = new CopyPositions { positions = copyTargetPositions }; var copyTargetPositionsJobHandle = copyTargetPositionsJob.Schedule(m_TargetGroup, inputDeps); var copyObstaclePositionsJob = new CopyPositions { positions = copyObstaclePositions }; var copyObstaclePositionsJobHandle = copyObstaclePositionsJob.Schedule(m_ObstacleGroup, inputDeps); var nextCells = new PrevCells { cellIndices = cellIndices, hashMap = hashMap, copyObstaclePositions = copyObstaclePositions, copyTargetPositions = copyTargetPositions, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount }; if (cacheIndex > (m_PrevCells.Count - 1)) { m_PrevCells.Add(nextCells); } else { m_PrevCells[cacheIndex].hashMap.Dispose(); m_PrevCells[cacheIndex].cellIndices.Dispose(); m_PrevCells[cacheIndex].cellObstaclePositionIndex.Dispose(); m_PrevCells[cacheIndex].cellTargetPositionIndex.Dispose(); m_PrevCells[cacheIndex].copyTargetPositions.Dispose(); m_PrevCells[cacheIndex].copyObstaclePositions.Dispose(); m_PrevCells[cacheIndex].cellAlignment.Dispose(); m_PrevCells[cacheIndex].cellSeparation.Dispose(); m_PrevCells[cacheIndex].cellObstacleDistance.Dispose(); m_PrevCells[cacheIndex].cellCount.Dispose(); } m_PrevCells[cacheIndex] = nextCells; var hashPositionsJob = new HashPositions { hashMap = hashMap.ToConcurrent(), cellRadius = settings.CellRadius }; var hashPositionsJobHandle = hashPositionsJob.Schedule(m_BoidGroup, inputDeps); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, inputDeps); var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellAlignmentJobHandle, initialCellSeparationJobHandle, initialCellCountJobHandle); var copyTargetObstacleBarrierJobHandle = JobHandle.CombineDependencies(copyTargetPositionsJobHandle, copyObstaclePositionsJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, copyTargetObstacleBarrierJobHandle); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); var steerJob = new Steer { cellIndices = nextCells.cellIndices, settings = settings, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions, dt = Time.deltaTime, }; var steerJobHandle = steerJob.Schedule(m_BoidGroup, mergeCellsJobHandle); inputDeps = steerJobHandle; m_BoidGroup.AddDependency(inputDeps); } m_UniqueTypes.Clear(); return(inputDeps); }
private void UpdateSimulation(float deltaTime) { //todo: break this down better. use delta time plus last sim time to figure out a list of game frames to step through, using a starting state. //Can be jobified, but it's not super necessary for (int index = 0; index < NUM_CURSORS; index++) { //_cursorTeamIDs[index] = index; _cursorInputDeltas[index] = InputMan.ListOfSources[index].GetInputAxis() * CursorAccel;//todo: needs a game frame to reference //_cursorAccelerations[index] = float2.zero; //_cursorVelocities[index] = float2.zero; //_cursorPositions[index] = Random.insideUnitCircle; //_cursorRadii[index] = 1.0f; } UpdateRuntimeValues(deltaTime); #region Job Kickoff and Dependancy // // Fire off jobs with all the data that has been set up above. Prefer not to in-line job data and job scheduling due to dependancies // #region ResetBeginningOfSimFrame _jobHandleResetBlobAccelerations = _jobDataResetBlobAccelerations.Schedule(_blobAccelerations.Length, 64); _jobHandleResetCursorAccelerations = _jobDataResetCursorAccelerations.Schedule(_cursorAccelerations.Length, 1); #endregion //ResetBeginningOfSimFrame //We need to copy values of positions over into the knn tree (one day we might be able to rule this out) _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetBlobAccelerations, _jobHandleResetCursorAccelerations); //_jobDataQueryNearestNeighboursKNN if (bNearestNeighboursDirty || DynamicallyUpdateNearestNeighbours) //HACK: see what happens when we maintain the initial lattice { #region KNN Tree _jobHandleCopyBlobInfoToFloat3 = _jobDataCopyBlobInfoToFloat3.Schedule(_blobPositionsV3.Length, 64, _jobHandleResetJobs); _jobHandleBuildKNNTree = _jobBuildKnnTree.Schedule(_jobHandleCopyBlobInfoToFloat3); // _jobHandleSetBlobRadii = _jobDataCopyBlobRadii.Schedule(_blobRadii.Length, 64); // JobHandle jobHandleResetRadiiAndBuildKNNTree = JobHandle.CombineDependencies(_jobHandleBuildKNNTree, _jobHandleBuildKNNTree); //now query nearest neighbours JobHandle jobHandleQueryKNN = _jobDataQueryNearestNeighboursKNN.Schedule(_blobPositionsV3.Length, 64, _jobHandleBuildKNNTree); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, jobHandleQueryKNN); #endregion _jobHandleResetGroupIDs = _jobDataResetGooGroups.Schedule(_blobGroupIDs.Length, 64); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleResetGroupIDs); if (UseUniqueEdges) { // Debug.Log($"Unique Blob edges length { _uniqueBlobEdges.Count() }"); _uniqueBlobEdgesHashSet.Clear();//maybe if this was a job it'd be less slow? _uniqueBlobEdges.Clear(); JobHandle jobHandFindUniqueEdges = _jobCompileDataUniqueEdges.Schedule(_blobPositionsV3.Length, 64, _jobHandleResetJobs); _jobHandleFloodFillGroupiID = _jobDataFloodFillGroupIDsMultiHashMap.Schedule(jobHandFindUniqueEdges); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleFloodFillGroupiID); // Debug.Log($"Num Groups: {_jobDataFloodFillGroupIDsMultiHashMap.NumGroups[0]}"); } else { _jobHandleFloodFillGroupiID = _jobDataFloodFillGroupIDsKnn.Schedule(_jobHandleResetJobs); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleFloodFillGroupiID); // Debug.Log($"Num Groups: {_jobDataFloodFillGroupIDsKnn.NumGroups[0]}"); } bNearestNeighboursDirty = false; } #region SimUpdateFrame // // Cursors must be done first. Luckily there's very few // //update cursors//todo: treat more like ECS. cursors happen to have positions/velocities/radii. But they out to be "type" tagged somehow. _jobHandleSetCursorAcceleration = _jobDataSetCursorAcceleration.Schedule(_cursorInputDeltas.Length, 1, _jobHandleResetJobs); _jobHandleApplyCursorFriction = _jobDataApplyCursorFriction.Schedule(_cursorInputDeltas.Length, 1, _jobHandleSetCursorAcceleration); _jobHandleUpdateCursorPositions = _jobDataUpdateCursorPositions.Schedule(_cursorInputDeltas.Length, 1, _jobHandleApplyCursorFriction); _jobHandleCursorsInfluenceBlobs = _jobDataCursorsInfluenceBlobs.Schedule(_blobPositions.Length, 64, _jobHandleUpdateCursorPositions);//todo: give cursors knnquery data. //Cursor Influences blobs once it's ready //Blob sim gets updated after cursor influence //blobs all figure out how much push and pull is coming from neighbouring blobs. if (UseUniqueEdges) { _jobHandleSpringForces = _jobDataSpringForcesUniqueEdges.Schedule(_blobAccelerations.Length, 64, _jobHandleCursorsInfluenceBlobs); } else { _jobHandleSpringForces = _jobDataSpringForcesUsingKnn.Schedule(_blobAccelerations.Length, 64, _jobHandleCursorsInfluenceBlobs); } _jobHandleApplyBlobFriction = _jobDataApplyFrictionToBlobs.Schedule(_blobAccelerations.Length, 64, _jobHandleSpringForces); _jobHandleFluidInfluences = _jobDataFluidInfluence.Schedule(_blobAccelerations.Length, 64, _jobHandleApplyBlobFriction); _jobHandleUpdateBlobPositions = _jobDataUpdateBlobPositions.Schedule(_blobAccelerations.Length, 64, _jobHandleFluidInfluences); #endregion //SimUpdateFrame //temp - needs an interpolator job //Todo: spit out into a particle effect instead of transforms, which are probably slow as heck //but this is still somewhat useful for debug JobHandle jobHandleDebugColorization; switch (DebugStyle) { case BlobColorDebugStyle.Edges: jobHandleDebugColorization = _jobDataDebugColorisationKNNLength.Schedule(_blobKNNNearestNeighbourQueryResults.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.Velocity: jobHandleDebugColorization = _jobDataDebugColorisationFloat2Magnitude.Schedule(_blobVelocities.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.Acceleration: jobHandleDebugColorization = _jobDataDebugColorisationFloat2Magnitude.Schedule(_blobAccelerations.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.TeamID: jobHandleDebugColorization = _jobDataDebugColorisationInt.Schedule(_blobTeamIDs.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.GroupID: jobHandleDebugColorization = _jobDataDebugColorisationInt.Schedule(_blobGroupIDs.Length, 64, _jobHandleUpdateBlobPositions); break; default: throw new ArgumentOutOfRangeException(); } _jobHandleCopyBlobsToParticleSystem = _jobDataCopyBlobsToParticleSystem.ScheduleBatch(BlobParticleSystemOutput, 64, jobHandleDebugColorization); _jobHandleCopyCursorsToTransforms = _jobDataCopyCursorsToTransforms.Schedule(_cursorTransformAccessArray, _jobHandleCursorsInfluenceBlobs); _jobHandleCopyBlobsToParticleSystem.Complete(); _jobHandleCopyCursorsToTransforms.Complete(); _jobHandleBuildAABB = _jobDataCalculateAABB.Schedule(_jobHandleUpdateBlobPositions); _jobHandleBuildAABB.Complete(); // Debug.Log($"Unique Blob edges length { _uniqueBlobEdges.Count() }"); #endregion // Job Kickoff and Dependancy //No. You must call "complete" on any handle that has something dependant on it. Which is all of them, you'd expect. //maybe i only need to complete the last, since that's dependant. }
protected override JobHandle OnUpdate(JobHandle inputDeps) { EntityManager.GetAllUniqueSharedComponentData(m_UniqueTypes); var obstacleCount = m_ObstacleQuery.CalculateEntityCount(); var targetCount = m_TargetQuery.CalculateEntityCount(); // Cannot call [DeallocateOnJobCompletion] on Hashmaps yet, so doing own cleanup here // of the hashes created in the previous iteration. for (int i = 0; i < m_PrevFrameHashmaps.Count; ++i) { m_PrevFrameHashmaps[i].Dispose(); } m_PrevFrameHashmaps.Clear(); // Each variant of the Boid represents a different value of the SharedComponentData and is self-contained, // meaning Boids of the same variant only interact with one another. Thus, this loop processes each // variant type individually. for (int boidVariantIndex = 0; boidVariantIndex < m_UniqueTypes.Count; boidVariantIndex++) { var settings = m_UniqueTypes[boidVariantIndex]; m_BoidQuery.SetFilter(settings); var boidCount = m_BoidQuery.CalculateEntityCount(); if (boidCount == 0) { // Early out. If the given variant includes no Boids, move on to the next loop. // For example, variant 0 will always exit early bc it's it represents a default, uninitialized // Boid struct, which does not appear in this sample. continue; } // The following calculates spatial cells of neighboring Boids // note: working with a sparse grid and not a dense bounded grid so there // are no predefined borders of the space. var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellTargetPositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstacleDistance = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyTargetPositions = new NativeArray <float3>(targetCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyObstaclePositions = new NativeArray <float3>(obstacleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); // The following jobs all run in parallel because the same JobHandle is passed for their // input dependencies when the jobs are scheduled; thus, they can run in any order (or concurrently). // The concurrency is property of how they're scheduled, not of the job structs themselves. var initialCellAlignmentJob = new CopyHeadings { headings = cellAlignment }; var initialCellAlignmentJobHandle = initialCellAlignmentJob.Schedule(m_BoidQuery, inputDeps); var initialCellSeparationJob = new CopyPositions { positions = cellSeparation }; var initialCellSeparationJobHandle = initialCellSeparationJob.Schedule(m_BoidQuery, inputDeps); var copyTargetPositionsJob = new CopyPositions { positions = copyTargetPositions }; var copyTargetPositionsJobHandle = copyTargetPositionsJob.Schedule(m_TargetQuery, inputDeps); var copyObstaclePositionsJob = new CopyPositions { positions = copyObstaclePositions }; var copyObstaclePositionsJobHandle = copyObstaclePositionsJob.Schedule(m_ObstacleQuery, inputDeps); // Cannot call [DeallocateOnJobCompletion] on Hashmaps yet, so adding resolved hashes to the list // so that theyre usable in the upcoming cell jobs and also have a straight forward cleanup. m_PrevFrameHashmaps.Add(hashMap); // setting up the jobs for position and cell count var hashPositionsJob = new HashPositions { hashMap = hashMap.AsParallelWriter(), cellRadius = settings.CellRadius }; var hashPositionsJobHandle = hashPositionsJob.Schedule(m_BoidQuery, inputDeps); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, inputDeps); var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellAlignmentJobHandle, initialCellSeparationJobHandle, initialCellCountJobHandle); var copyTargetObstacleBarrierJobHandle = JobHandle.CombineDependencies(copyTargetPositionsJobHandle, copyObstaclePositionsJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, copyTargetObstacleBarrierJobHandle); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); var steerJob = new Steer { cellIndices = cellIndices, settings = settings, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions, dt = Time.deltaTime, }; var steerJobHandle = steerJob.Schedule(m_BoidQuery, mergeCellsJobHandle); inputDeps = steerJobHandle; m_BoidQuery.AddDependency(inputDeps); } m_UniqueTypes.Clear(); return(inputDeps); }
protected override void OnUpdate() { var obstacleCount = m_ObstacleQuery.CalculateEntityCount(); var targetCount = m_TargetQuery.CalculateEntityCount(); EntityManager.GetAllUniqueSharedComponentData(m_UniqueTypes); // Each variant of the Boid represents a different value of the SharedComponentData and is self-contained, // meaning Boids of the same variant only interact with one another. Thus, this loop processes each // variant type individually. for (int boidVariantIndex = 0; boidVariantIndex < m_UniqueTypes.Count; boidVariantIndex++) { var settings = m_UniqueTypes[boidVariantIndex]; m_BoidQuery.AddSharedComponentFilter(settings); var boidCount = m_BoidQuery.CalculateEntityCount(); if (boidCount == 0) { // Early out. If the given variant includes no Boids, move on to the next loop. // For example, variant 0 will always exit early bc it's it represents a default, uninitialized // Boid struct, which does not appear in this sample. m_BoidQuery.ResetFilter(); continue; } // The following calculates spatial cells of neighboring Boids // note: working with a sparse grid and not a dense bounded grid so there // are no predefined borders of the space. var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellTargetPositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstacleDistance = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyTargetPositions = new NativeArray <float3>(targetCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyObstaclePositions = new NativeArray <float3>(obstacleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); // The following jobs all run in parallel because the same JobHandle is passed for their // input dependencies when the jobs are scheduled; thus, they can run in any order (or concurrently). // The concurrency is property of how they're scheduled, not of the job structs themselves. // These jobs extract the relevant position, heading component // to NativeArrays so that they can be randomly accessed by the `MergeCells` and `Steer` jobs. // These jobs are defined inline using the Entities.ForEach lambda syntax. var initialCellAlignmentJobHandle = Entities .WithSharedComponentFilter(settings) .WithName("InitialCellAlignmentJob") .ForEach((int entityInQueryIndex, in LocalToWorld localToWorld) => { cellAlignment[entityInQueryIndex] = localToWorld.Forward; }) .ScheduleParallel(Dependency); var initialCellSeparationJobHandle = Entities .WithSharedComponentFilter(settings) .WithName("InitialCellSeparationJob") .ForEach((int entityInQueryIndex, in LocalToWorld localToWorld) => { cellSeparation[entityInQueryIndex] = localToWorld.Position; }) .ScheduleParallel(Dependency); var copyTargetPositionsJobHandle = Entities .WithName("CopyTargetPositionsJob") .WithAll <BoidTarget>() .WithStoreEntityQueryInField(ref m_TargetQuery) .ForEach((int entityInQueryIndex, in LocalToWorld localToWorld) => { copyTargetPositions[entityInQueryIndex] = localToWorld.Position; }) .ScheduleParallel(Dependency); var copyObstaclePositionsJobHandle = Entities .WithName("CopyObstaclePositionsJob") .WithAll <BoidObstacle>() .WithStoreEntityQueryInField(ref m_ObstacleQuery) .ForEach((int entityInQueryIndex, in LocalToWorld localToWorld) => { copyObstaclePositions[entityInQueryIndex] = localToWorld.Position; }) .ScheduleParallel(Dependency); // Populates a hash map, where each bucket contains the indices of all Boids whose positions quantize // to the same value for a given cell radius so that the information can be randomly accessed by // the `MergeCells` and `Steer` jobs. // This is useful in terms of the algorithm because it limits the number of comparisons that will // actually occur between the different boids. Instead of for each boid, searching through all // boids for those within a certain radius, this limits those by the hash-to-bucket simplification. var parallelHashMap = hashMap.AsParallelWriter(); var hashPositionsJobHandle = Entities .WithName("HashPositionsJob") .WithAll <Boid>() .ForEach((int entityInQueryIndex, in LocalToWorld localToWorld) => { var hash = (int)math.hash(new int3(math.floor(localToWorld.Position / settings.CellRadius))); parallelHashMap.Add(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, Dependency); var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellAlignmentJobHandle, initialCellSeparationJobHandle, initialCellCountJobHandle); var copyTargetObstacleBarrierJobHandle = JobHandle.CombineDependencies(copyTargetPositionsJobHandle, copyObstaclePositionsJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, copyTargetObstacleBarrierJobHandle); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); // This reads the previously calculated boid information for all the boids of each cell to update // the `localToWorld` of each of the boids based on their newly calculated headings using // the standard boid flocking algorithm. float deltaTime = math.min(0.05f, Time.DeltaTime); var steerJobHandle = Entities .WithName("Steer") .WithSharedComponentFilter(settings) // implies .WithAll<Boid>() .WithReadOnly(cellIndices) .WithReadOnly(cellCount) .WithReadOnly(cellAlignment) .WithReadOnly(cellSeparation) .WithReadOnly(cellObstacleDistance) .WithReadOnly(cellObstaclePositionIndex) .WithReadOnly(cellTargetPositionIndex) .WithReadOnly(copyObstaclePositions) .WithReadOnly(copyTargetPositions) .ForEach((int entityInQueryIndex, ref LocalToWorld localToWorld) => { // temporarily storing the values for code readability var forward = localToWorld.Forward; var currentPosition = localToWorld.Position; var cellIndex = cellIndices[entityInQueryIndex]; var neighborCount = cellCount[cellIndex]; var alignment = cellAlignment[cellIndex]; var separation = cellSeparation[cellIndex]; var nearestObstacleDistance = cellObstacleDistance[cellIndex]; var nearestObstaclePositionIndex = cellObstaclePositionIndex[cellIndex]; var nearestTargetPositionIndex = cellTargetPositionIndex[cellIndex]; var nearestObstaclePosition = copyObstaclePositions[nearestObstaclePositionIndex]; var nearestTargetPosition = copyTargetPositions[nearestTargetPositionIndex]; // Setting up the directions for the three main biocrowds influencing directions adjusted based // on the predefined weights: // 1) alignment - how much should it move in a direction similar to those around it? // note: we use `alignment/neighborCount`, because we need the average alignment in this case; however // alignment is currently the summation of all those of the boids within the cellIndex being considered. var alignmentResult = settings.AlignmentWeight * math.normalizesafe((alignment / neighborCount) - forward); // 2) separation - how close is it to other boids and are there too many or too few for comfort? // note: here separation represents the summed possible center of the cell. We perform the multiplication // so that both `currentPosition` and `separation` are weighted to represent the cell as a whole and not // the current individual boid. var separationResult = settings.SeparationWeight * math.normalizesafe((currentPosition * neighborCount) - separation); // 3) target - is it still towards its destination? var targetHeading = settings.TargetWeight * math.normalizesafe(nearestTargetPosition - currentPosition); // creating the obstacle avoidant vector s.t. it's pointing towards the nearest obstacle // but at the specified 'ObstacleAversionDistance'. If this distance is greater than the // current distance to the obstacle, the direction becomes inverted. This simulates the // idea that if `currentPosition` is too close to an obstacle, the weight of this pushes // the current boid to escape in the fastest direction; however, if the obstacle isn't // too close, the weighting denotes that the boid doesnt need to escape but will move // slower if still moving in that direction (note: we end up not using this move-slower // case, because of `targetForward`'s decision to not use obstacle avoidance if an obstacle // isn't close enough). var obstacleSteering = currentPosition - nearestObstaclePosition; var avoidObstacleHeading = (nearestObstaclePosition + math.normalizesafe(obstacleSteering) * settings.ObstacleAversionDistance) - currentPosition; // the updated heading direction. If not needing to be avoidant (ie obstacle is not within // predefined radius) then go with the usual defined heading that uses the amalgamation of // the weighted alignment, separation, and target direction vectors. var nearestObstacleDistanceFromRadius = nearestObstacleDistance - settings.ObstacleAversionDistance; var normalHeading = math.normalizesafe(alignmentResult + separationResult + targetHeading); var targetForward = math.select(normalHeading, avoidObstacleHeading, nearestObstacleDistanceFromRadius < 0); // updates using the newly calculated heading direction var nextHeading = math.normalizesafe(forward + deltaTime * (targetForward - forward)); localToWorld = new LocalToWorld { Value = float4x4.TRS( new float3(localToWorld.Position + (nextHeading * settings.MoveSpeed * deltaTime)), quaternion.LookRotationSafe(nextHeading, math.up()), new float3(1.0f, 1.0f, 1.0f)) }; }).ScheduleParallel(mergeCellsJobHandle); // Dispose allocated containers with dispose jobs. Dependency = steerJobHandle; var disposeJobHandle = hashMap.Dispose(Dependency); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellIndices.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellObstaclePositionIndex.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellTargetPositionIndex.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellCount.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellObstacleDistance.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellAlignment.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, cellSeparation.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, copyObstaclePositions.Dispose(Dependency)); disposeJobHandle = JobHandle.CombineDependencies(disposeJobHandle, copyTargetPositions.Dispose(Dependency)); Dependency = disposeJobHandle; // We pass the job handle and add the dependency so that we keep the proper ordering between the jobs // as the looping iterates. For our purposes of execution, this ordering isn't necessary; however, without // the add dependency call here, the safety system will throw an error, because we're accessing multiple // pieces of boid data and it would think there could possibly be a race condition. m_BoidQuery.AddDependency(Dependency); m_BoidQuery.ResetFilter(); } m_UniqueTypes.Clear(); }
protected override JobHandle OnUpdate(JobHandle inputDependencies) { Settings settings; settings.CellRadius = 16; settings.SeparationWeight = 1; settings.AlignmentWeight = 1; settings.TargetWeight = 2; settings.MaxTargetDistance = 10000; //settings.ObstacleAversionDistance = 35; settings.MoveSpeed = 25; settings.boidRadius = 0.5f; EntityManager.GetAllUniqueSharedComponentData(uniqueFactions); int healthCount = healthQuery.CalculateEntityCount(); for (int i = 0; i < prevFrameHashmaps.Count; i++) { prevFrameHashmaps[i].Dispose(); } prevFrameHashmaps.Clear(); for (int index = 0; index < uniqueFactions.Count; index++) { boidQuery.SetFilter(uniqueFactions[index]); int boidCount = boidQuery.CalculateEntityCount(); if (boidCount == 0) { continue; } var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var cellObstacleDistance = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var killTrigger = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var boidsData = new NativeArray <Boid>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellTargetPositions = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositions = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var bulletSpawns = new NativeArray <BulletSpawn>(boidCount, Allocator.TempJob, NativeArrayOptions.ClearMemory); var damageDict = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var initialCellAlignmentJob = new CopyHeadings { headings = cellAlignment }; var initialCellAlignmentJobHandle = initialCellAlignmentJob.Schedule(boidQuery, inputDependencies); var initialCellSeparationJob = new CopyPositions { positions = cellSeparation }; var initialCellSeparationJobHandle = initialCellSeparationJob.Schedule(boidQuery, inputDependencies); var initialBoidData = new CopyBoids { boids = boidsData }; var initialBoidDataJobHandle = initialBoidData.Schedule(boidQuery, inputDependencies); // Cannot call [DeallocateOnJobCompletion] on Hashmaps yet prevFrameHashmaps.Add(hashMap); var hashPositionsJob = new HashPositions { hashMap = hashMap.AsParallelWriter(), cellRadius = settings.CellRadius }; var hashPositionsJobHandle = hashPositionsJob.Schedule(boidQuery, inputDependencies); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, inputDependencies); var killTriggerJob = new MemsetNativeArray <int> { Source = killTrigger, Value = 0 }; var killTriggerJobHandle = killTriggerJob.Schedule(boidCount, 64, inputDependencies); var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellAlignmentJobHandle, initialCellSeparationJobHandle, initialCellCountJobHandle); var initialBoidBarrierJobHandle = JobHandle.CombineDependencies(initialBoidDataJobHandle, killTriggerJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, initialBoidBarrierJobHandle); ref PhysicsWorld physicsWorld = ref Unity.Entities.World.Active.GetExistingSystem <BuildPhysicsWorld>().PhysicsWorld; var commandBuffer = m_Barrier.CreateCommandBuffer().ToConcurrent(); prevFrameHashmaps.Add(damageDict); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellObstaclePositions = cellObstaclePositions, cellTargetPositions = cellTargetPositions, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellCount = cellCount, boidsData = boidsData, killTrigger = killTrigger, physicsWorld = physicsWorld, damageDict = damageDict.AsParallelWriter(), bulletSpawns = bulletSpawns, commandBuffer = commandBuffer, bulletPrefab = BulletPrefabAuthoring.Prefab, //enemyEntityLook = Setup.enemyEntityLook, groupIndex = math.select(4u, 8u, uniqueFactions[index].Value == 0), time = Time.time, settings = settings, }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); m_Barrier.AddJobHandleForProducer(mergeCellsJobHandle); var applyBulletSpawnDataJob = new ApplyBulletSpawnData { bulletSpawns = bulletSpawns, destroyAtTime = Time.time + 5, commandBuffer = commandBuffer, bulletPrefab = BulletPrefabAuthoring.Prefab }; var applyBulletSpawnDataJobHandle = applyBulletSpawnDataJob.Schedule(boidCount, 64, mergeCellsJobHandle); m_Barrier.AddJobHandleForProducer(applyBulletSpawnDataJobHandle); var updateBoidData = new UpdateBoidData { boidsData = boidsData }; var updateBoidDataJobHandle = updateBoidData.Schedule(boidQuery, applyBulletSpawnDataJobHandle); var steerJob = new Steer { cellIndices = cellIndices, settings = settings, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellCount = cellCount, targetPositions = cellTargetPositions, obstaclePositions = cellObstaclePositions, boidsData = boidsData, dt = Time.deltaTime, }; var steerJobHandle = steerJob.Schedule(boidQuery, updateBoidDataJobHandle); var killJob = new Kill { killTrigger = killTrigger, commandBuffer = commandBuffer, }; var killJobHandle = killJob.Schedule(boidQuery, steerJobHandle); m_Barrier.AddJobHandleForProducer(killJobHandle); var applyDamageJob = new ApplyDamage { damageDict = damageDict }; var applyDamageJobHandle = applyDamageJob.Schedule(healthQuery, mergeCellsJobHandle); inputDependencies = JobHandle.CombineDependencies(killJobHandle, applyDamageJobHandle, applyBulletSpawnDataJobHandle); boidQuery.AddDependency(inputDependencies); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (cameraTransform == null) { cameraTransform = GameObject.Find("Main Camera").transform; } EntityManager.GetAllUniqueSharedComponentData(uniqueTypes); ComponentDataArray <SPHCollider> colliders = SPHColliderGroup.GetComponentDataArray <SPHCollider>(); int colliderCount = colliders.Length; for (int typeIndex = 1; typeIndex < uniqueTypes.Count; typeIndex++) { // Get the current chunk setting SPHParticle settings = uniqueTypes[typeIndex]; SPHCharacterGroup.SetFilter(settings); // Cache the data ComponentDataArray <Position> positions = SPHCharacterGroup.GetComponentDataArray <Position>(); ComponentDataArray <SPHVelocity> velocities = SPHCharacterGroup.GetComponentDataArray <SPHVelocity>(); int cacheIndex = typeIndex - 1; int particleCount = positions.Length; NativeMultiHashMap <int, int> hashMap = new NativeMultiHashMap <int, int>(particleCount, Allocator.TempJob); NativeArray <Position> particlesPosition = new NativeArray <Position>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <SPHVelocity> particlesVelocity = new NativeArray <SPHVelocity>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float3> particlesForces = new NativeArray <float3>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float> particlesPressure = new NativeArray <float>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float> particlesDensity = new NativeArray <float>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> particleIndices = new NativeArray <int>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> cellOffsetTableNative = new NativeArray <int>(cellOffsetTable, Allocator.TempJob); NativeArray <SPHCollider> copyColliders = new NativeArray <SPHCollider>(colliderCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); // Add new or dispose previous particle chunks PreviousParticle nextParticles = new PreviousParticle { hashMap = hashMap, particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesForces = particlesForces, particlesPressure = particlesPressure, particlesDensity = particlesDensity, particleIndices = particleIndices, cellOffsetTable = cellOffsetTableNative, copyColliders = copyColliders }; if (cacheIndex > previousParticles.Count - 1) { previousParticles.Add(nextParticles); } else { previousParticles[cacheIndex].hashMap.Dispose(); previousParticles[cacheIndex].particlesPosition.Dispose(); previousParticles[cacheIndex].particlesVelocity.Dispose(); previousParticles[cacheIndex].particlesForces.Dispose(); previousParticles[cacheIndex].particlesPressure.Dispose(); previousParticles[cacheIndex].particlesDensity.Dispose(); previousParticles[cacheIndex].particleIndices.Dispose(); previousParticles[cacheIndex].cellOffsetTable.Dispose(); previousParticles[cacheIndex].copyColliders.Dispose(); } previousParticles[cacheIndex] = nextParticles; // Copy the component data to native arrays CopyComponentData <Position> particlesPositionJob = new CopyComponentData <Position> { Source = positions, Results = particlesPosition }; JobHandle particlesPositionJobHandle = particlesPositionJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SPHVelocity> particlesVelocityJob = new CopyComponentData <SPHVelocity> { Source = velocities, Results = particlesVelocity }; JobHandle particlesVelocityJobHandle = particlesVelocityJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SPHCollider> copyCollidersJob = new CopyComponentData <SPHCollider> { Source = colliders, Results = copyColliders }; JobHandle copyCollidersJobHandle = copyCollidersJob.Schedule(colliderCount, 64, inputDeps); MemsetNativeArray <float> particlesPressureJob = new MemsetNativeArray <float> { Source = particlesPressure, Value = 0.0f }; JobHandle particlesPressureJobHandle = particlesPressureJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <float> particlesDensityJob = new MemsetNativeArray <float> { Source = particlesDensity, Value = 0.0f }; JobHandle particlesDensityJobHandle = particlesDensityJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <int> particleIndicesJob = new MemsetNativeArray <int> { Source = particleIndices, Value = 0 }; JobHandle particleIndicesJobHandle = particleIndicesJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <float3> particlesForcesJob = new MemsetNativeArray <float3> { Source = particlesForces, Value = new float3(0, 0, 0) }; JobHandle particlesForcesJobHandle = particlesForcesJob.Schedule(particleCount, 64, inputDeps); // Put positions into a hashMap HashPositions hashPositionsJob = new HashPositions { positions = particlesPosition, hashMap = hashMap.ToConcurrent(), cellRadius = settings.radius }; JobHandle hashPositionsJobHandle = hashPositionsJob.Schedule(particleCount, 64, particlesPositionJobHandle); JobHandle mergedPositionIndicesJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, particleIndicesJobHandle); MergeParticles mergeParticlesJob = new MergeParticles { particleIndices = particleIndices }; JobHandle mergeParticlesJobHandle = mergeParticlesJob.Schedule(hashMap, 64, mergedPositionIndicesJobHandle); JobHandle mergedMergedParticlesDensityPressure = JobHandle.CombineDependencies(mergeParticlesJobHandle, particlesPressureJobHandle, particlesDensityJobHandle); // Compute density pressure ComputeDensityPressure computeDensityPressureJob = new ComputeDensityPressure { particlesPosition = particlesPosition, densities = particlesDensity, pressures = particlesPressure, hashMap = hashMap, cellOffsetTable = cellOffsetTableNative, settings = settings }; JobHandle computeDensityPressureJobHandle = computeDensityPressureJob.Schedule(particleCount, 64, mergedMergedParticlesDensityPressure); JobHandle mergeComputeDensityPressureVelocityForces = JobHandle.CombineDependencies(computeDensityPressureJobHandle, particlesForcesJobHandle, particlesVelocityJobHandle); // Compute forces ComputeForces computeForcesJob = new ComputeForces { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesForces = particlesForces, particlesPressure = particlesPressure, particlesDensity = particlesDensity, cellOffsetTable = cellOffsetTableNative, hashMap = hashMap, settings = settings }; JobHandle computeForcesJobHandle = computeForcesJob.Schedule(particleCount, 64, mergeComputeDensityPressureVelocityForces); // Integrate Integrate integrateJob = new Integrate { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesDensity = particlesDensity, particlesForces = particlesForces }; JobHandle integrateJobHandle = integrateJob.Schedule(particleCount, 64, computeForcesJobHandle); JobHandle mergedIntegrateCollider = JobHandle.CombineDependencies(integrateJobHandle, copyCollidersJobHandle); // Compute Colliders ComputeColliders computeCollidersJob = new ComputeColliders { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, copyColliders = copyColliders, settings = settings }; JobHandle computeCollidersJobHandle = computeCollidersJob.Schedule(particleCount, 64, mergedIntegrateCollider); // Apply positions ApplyPositions applyPositionsJob = new ApplyPositions { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, positions = positions, velocities = velocities }; JobHandle applyPositionsJobHandle = applyPositionsJob.Schedule(particleCount, 64, computeCollidersJobHandle); inputDeps = applyPositionsJobHandle; } // Done uniqueTypes.Clear(); return(inputDeps); }
//La data del sphereParticle es compartiiiida protected override JobHandle OnUpdate(JobHandle inputDeps) { if (cameraTransform == null) { cameraTransform = GameObject.Find("Main Camera").transform; } EntityManager.GetAllUniqueSharedComponentData(uniqueTypes); ComponentDataArray <SMBCollider> colliders = SMBColliderGroup.GetComponentDataArray <SMBCollider>(); int colliderCount = colliders.Length; for (int typeIndex = 1; typeIndex < uniqueTypes.Count; typeIndex++) { // Get the current chunk setting SMBProperties settings = uniqueTypes[typeIndex]; //SMBDestination smbdestination = _destination[typeIndex]; SMBCharacterGroup.SetFilter(settings); // Cache the data ComponentDataArray <Position> positions = SMBCharacterGroup.GetComponentDataArray <Position>(); ComponentDataArray <SMBVelocity> velocities = SMBCharacterGroup.GetComponentDataArray <SMBVelocity>(); ComponentDataArray <SMBDestination> SMBdestinations = SMBCharacterGroup.GetComponentDataArray <SMBDestination>(); ComponentDataArray <SMBSspeed> SMBSspeeds = SMBCharacterGroup.GetComponentDataArray <SMBSspeed>(); ComponentDataArray <SMBPath> indexPaths = SMBCharacterGroup.GetComponentDataArray <SMBPath>(); int cacheIndex = typeIndex - 1; int particleCount = positions.Length; NativeMultiHashMap <int, int> hashMap = new NativeMultiHashMap <int, int>(particleCount, Allocator.TempJob); NativeArray <Position> particlesPosition = new NativeArray <Position>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <Position> finalposition = new NativeArray <Position>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <SMBVelocity> particlesVelocity = new NativeArray <SMBVelocity>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <SMBDestination> particlesDestination = new NativeArray <SMBDestination>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <SMBSspeed> particlesSspeed = new NativeArray <SMBSspeed>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <SMBPath> particlesindexPaths = new NativeArray <SMBPath>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float3> particlesForces = new NativeArray <float3>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float> particlesPressure = new NativeArray <float>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <float> particlesDensity = new NativeArray <float>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> particleIndices = new NativeArray <int>(particleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> cellOffsetTableNative = new NativeArray <int>(cellOffsetTable, Allocator.TempJob); NativeArray <SMBCollider> copyColliders = new NativeArray <SMBCollider>(colliderCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); // Add new or dispose previous particle chunks PreviousParticle nextParticles = new PreviousParticle { hashMap = hashMap, particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesDestination = particlesDestination, particlesSspeed = particlesSspeed, particlesindexPaths = particlesindexPaths, particlesForces = particlesForces, particlesPressure = particlesPressure, particlesDensity = particlesDensity, particleIndices = particleIndices, cellOffsetTable = cellOffsetTableNative, copyColliders = copyColliders }; if (cacheIndex > previousParticles.Count - 1) { previousParticles.Add(nextParticles); } else { previousParticles[cacheIndex].hashMap.Dispose(); previousParticles[cacheIndex].particlesPosition.Dispose(); previousParticles[cacheIndex].particlesVelocity.Dispose(); previousParticles[cacheIndex].particlesDestination.Dispose(); previousParticles[cacheIndex].particlesSspeed.Dispose(); previousParticles[cacheIndex].particlesindexPaths.Dispose(); previousParticles[cacheIndex].particlesForces.Dispose(); previousParticles[cacheIndex].particlesPressure.Dispose(); previousParticles[cacheIndex].particlesDensity.Dispose(); previousParticles[cacheIndex].particleIndices.Dispose(); previousParticles[cacheIndex].cellOffsetTable.Dispose(); previousParticles[cacheIndex].copyColliders.Dispose(); } previousParticles[cacheIndex] = nextParticles; // Copy the component data to native arrays CopyComponentData <Position> particlesPositionJob = new CopyComponentData <Position> { Source = positions, Results = particlesPosition }; JobHandle particlesPositionJobHandle = particlesPositionJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SMBVelocity> particlesVelocityJob = new CopyComponentData <SMBVelocity> { Source = velocities, Results = particlesVelocity }; JobHandle particlesVelocityJobHandle = particlesVelocityJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SMBDestination> particlesDestinationJob = new CopyComponentData <SMBDestination> { Source = SMBdestinations, Results = particlesDestination }; JobHandle particlesDestinationJobHandle = particlesDestinationJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SMBSspeed> particlesSspeedJob = new CopyComponentData <SMBSspeed> { Source = SMBSspeeds, Results = particlesSspeed }; JobHandle particlesSspeedJobHandle = particlesSspeedJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SMBCollider> copyCollidersJob = new CopyComponentData <SMBCollider> { Source = colliders, Results = copyColliders }; JobHandle copyCollidersJobHandle = copyCollidersJob.Schedule(colliderCount, 64, inputDeps); MemsetNativeArray <float> particlesPressureJob = new MemsetNativeArray <float> { Source = particlesPressure, Value = 0.0f }; JobHandle particlesPressureJobHandle = particlesPressureJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <float> particlesDensityJob = new MemsetNativeArray <float> { Source = particlesDensity, Value = 0.0f }; JobHandle particlesDensityJobHandle = particlesDensityJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <int> particleIndicesJob = new MemsetNativeArray <int> { Source = particleIndices, Value = 0 }; JobHandle particleIndicesJobHandle = particleIndicesJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <float3> particlesForcesJob = new MemsetNativeArray <float3> { Source = particlesForces, Value = new float3(0, 0, 0) }; JobHandle particlesForcesJobHandle = particlesForcesJob.Schedule(particleCount, 64, inputDeps); MemsetNativeArray <Position> finalpositionJob = new MemsetNativeArray <Position> { Source = finalposition, Value = new Position { Value = new float3() } }; JobHandle finalpositionJobHandle = finalpositionJob.Schedule(particleCount, 64, inputDeps); //JobHandle computepathsJobHandle = particlesPositionJobHandle; if (first) { int index = 0, firsTriangle = 1; for (int i = 0; i < particleCount; ++i) { astar.cleanStructures(); astar.setOrigin(positions[i].Value); astar.setDestination(SMBdestinations[i].destination); astar.trianglePath2(); //Allpaths.AddRange(astar.trianglePath2()); wayPointsPath.AddRange(astar.getWayPoints()); int aux = wayPointsPath.Count; if (aux - index == 0) { firsTriangle = -1; } else { firsTriangle = 1; } Unity.Mathematics.Random ola = new Unity.Mathematics.Random(1); indexPaths[i] = new SMBPath { indexIni = index, indexFin = aux, NextPoint = new float3(), Firsttriangle = firsTriangle, recalculate = ola.NextInt(15), fordwarDir = new float3() }; index = aux; } first = false; } //Una vez llegado al destino ir a otro, funciona pero va muuuuy lento /*else * { * int index = 0, firsTriangle = 1, aux = 0; * int diff = 0; * for (int i = 0; i < particleCount; ++i) * { * index = indexPaths[i].indexIni + diff; * aux = indexPaths[i].indexFin + diff; * float3 NextPoint = indexPaths[i].NextPoint, fordwarDir = indexPaths[i].fordwarDir; * int recalculate = indexPaths[i].recalculate; * firsTriangle = indexPaths[i].Firsttriangle; * if (SMBdestinations[i].finished == 1) * { * firsTriangle = 1; * astar.cleanStructures(); * astar.setOrigin(positions[i].Value); * astar.setDestination(SMBdestinations[i].destinations2); * astar.trianglePath2(); * int count = 0; * if (i == 0) * { * wayPointsPath.RemoveRange(0, indexPaths[i].indexFin); * index = 0; * count = indexPaths[i].indexFin; * } * else * { * index = indexPaths[i - 1].indexFin; * count = indexPaths[i].indexFin + diff - indexPaths[i - 1].indexFin; * wayPointsPath.RemoveRange(indexPaths[i - 1].indexFin, count); * } * List<SMBWaypoint> wayaux = astar.getWayPoints(); * wayPointsPath.InsertRange(index, wayaux); * * aux = wayaux.Count; * * indexPaths[i] = new SMBPath { indexIni = index, indexFin = aux + index, NextPoint = new float3(), Firsttriangle = firsTriangle, recalculate = recalculate, fordwarDir = new float3() }; * SMBdestinations[i] = new SMBDestination {finished = 2, destinations2 = SMBdestinations[i].destinations2, destination = SMBdestinations[i].destination }; * diff += aux - count; * } * else indexPaths[i] = new SMBPath { indexIni = index, indexFin = aux, NextPoint = NextPoint, Firsttriangle = firsTriangle, recalculate = recalculate, fordwarDir = fordwarDir }; * } * }*/ NativeArray <SMBWaypoint> NwayPointspaths = new NativeArray <SMBWaypoint>(wayPointsPath.Count, Allocator.TempJob); //MemsetNativeArray<SMBWaypoint> waypointsJob = new MemsetNativeArray<SMBWaypoint> { Source = NwayPointspaths, Value = new SMBWaypoint { } }; //NativeArray<int>.Copy(Allpaths.ToArray(), paths); NativeArray <SMBWaypoint> .Copy(wayPointsPath.ToArray(), NwayPointspaths, wayPointsPath.Count); //CopyComponentData<SMBDestination> particlesDestinationJob = new CopyComponentData<SMBDestination> { Source = SMBdestinations, Results = particlesDestination }; //JobHandle particlesDestinationJobHandle = particlesDestinationJob.Schedule(particleCount, 64, inputDeps); CopyComponentData <SMBPath> particlesIndexPathJob = new CopyComponentData <SMBPath> { Source = indexPaths, Results = particlesindexPaths }; JobHandle particlesIndexPathJobHandle = particlesIndexPathJob.Schedule(particleCount, 64, inputDeps); // Put positions into a hashMap HashPositions hashPositionsJob = new HashPositions { positions = particlesPosition, hashMap = hashMap.ToConcurrent(), cellRadius = settings.radius }; JobHandle hashPositionsJobHandle = hashPositionsJob.Schedule(particleCount, 64, particlesPositionJobHandle); JobHandle mergedPositionIndicesJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, particleIndicesJobHandle); MergeParticles mergeParticlesJob = new MergeParticles { particleIndices = particleIndices }; JobHandle mergeParticlesJobHandle = mergeParticlesJob.Schedule(hashMap, 64, mergedPositionIndicesJobHandle); JobHandle mergedMergedParticlesDensityPressure = JobHandle.CombineDependencies(mergeParticlesJobHandle, particlesPressureJobHandle, particlesDensityJobHandle); // Compute density pressure ComputeDensityPressure computeDensityPressureJob = new ComputeDensityPressure { particlesPosition = particlesPosition, densities = particlesDensity, pressures = particlesPressure, hashMap = hashMap, cellOffsetTable = cellOffsetTableNative, settings = settings }; JobHandle computeDensityPressureJobHandle = computeDensityPressureJob.Schedule(particleCount, 64, mergedMergedParticlesDensityPressure); JobHandle mergeComputeDensityPressureVelocityForces = JobHandle.CombineDependencies(computeDensityPressureJobHandle, particlesForcesJobHandle, particlesVelocityJobHandle); // Compute forces ComputeForces computeForcesJob = new ComputeForces { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesForces = particlesForces, particlesPressure = particlesPressure, particlesDensity = particlesDensity, cellOffsetTable = cellOffsetTableNative, hashMap = hashMap, settings = settings }; JobHandle computeForcesJobHandle = computeForcesJob.Schedule(particleCount, 64, mergeComputeDensityPressureVelocityForces); // Integrate Integrate integrateJob = new Integrate { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, particlesDensity = particlesDensity, particlesForces = particlesForces }; JobHandle integrateJobHandle = integrateJob.Schedule(particleCount, 64, computeForcesJobHandle); JobHandle mergedIntegrateCollider = JobHandle.CombineDependencies(integrateJobHandle, copyCollidersJobHandle); //JobHandle mergedIntegrateCollider = JobHandle.CombineDependencies(particlesPositionJobHandle, particlesVelocityJobHandle, copyCollidersJobHandle); // Compute Colliders ComputeColliders computeCollidersJob = new ComputeColliders { particlesPosition = particlesPosition, particlesVelocity = particlesVelocity, copyColliders = copyColliders, settings = settings }; JobHandle computeCollidersJobHandle = computeCollidersJob.Schedule(particleCount, 64, mergedIntegrateCollider); JobHandle allReady = JobHandle.CombineDependencies(computeCollidersJobHandle, particlesIndexPathJobHandle, particlesDestinationJobHandle); ComputeNewPoint computeNewPointJob = new ComputeNewPoint { particlesPosition = particlesPosition, waypoints = NwayPointspaths, indexPaths = particlesindexPaths, particlesDestination = particlesDestination }; JobHandle computeNewPointJobHandle = computeNewPointJob.Schedule(particleCount, 64, allReady); computeNewPointJobHandle = JobHandle.CombineDependencies(computeNewPointJobHandle, finalpositionJobHandle); RecomputeNewPoint RecomputeNewPointJob = new RecomputeNewPoint { particlesPosition = particlesPosition, waypoints = NwayPointspaths, indexPaths = particlesindexPaths }; JobHandle RecomputeNewPointJobHandle = RecomputeNewPointJob.Schedule(particleCount, 64, computeNewPointJobHandle); JobHandle preparedToComputePositions = JobHandle.CombineDependencies(RecomputeNewPointJobHandle, particlesSspeedJobHandle); ComputePosition computePositionJob = new ComputePosition { particlesPosition = particlesPosition, particlesDestination = particlesDestination, particlesSspeed = particlesSspeed, particlesVelocity = particlesVelocity, indexPaths = particlesindexPaths, hashMap = hashMap, settings = settings, cellOffsetTable = cellOffsetTableNative, finalPosition = finalposition }; JobHandle comptePositionJobHandle = computePositionJob.Schedule(particleCount, 64, preparedToComputePositions); // Apply positions ApplyPositions applyPositionsJob = new ApplyPositions { particlesPosition = finalposition, particlesVelocity = particlesVelocity, particlesindexPaths = particlesindexPaths, //particlesDestination = particlesDestination, particlesSspeed = particlesSspeed, positions = positions, velocities = velocities, indexPaths = indexPaths, SMBSspeeds = SMBSspeeds, //SMBdestinations = SMBdestinations, }; JobHandle applyPositionsJobHandle = applyPositionsJob.Schedule(particleCount, 64, comptePositionJobHandle); inputDeps = applyPositionsJobHandle; inputDeps.Complete(); NwayPointspaths.Dispose(); finalposition.Dispose(); } // Done uniqueTypes.Clear(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var settings = ECSController.FlockParams; var gameSettings = GlobalSettings.Instance; EntityManager.GetAllUniqueSharedComponentData(UniqueTypes); int targetsCount = BoidTargetsGroup.CalculateLength(); int obstaclesCount = BoidObstaclesGroup.CalculateLength(); UIControl.Instance.NrOfObstacles = obstaclesCount; // Ignore typeIndex 0, can't use the default for anything meaningful. for (int typeIndex = 1; typeIndex < UniqueTypes.Count; typeIndex++) { Boid boid = UniqueTypes[typeIndex]; BoidGroup.SetFilter(boid); var boidCount = BoidGroup.CalculateLength(); UIControl.Instance.NrOfBoidsAlive = boidCount; var cacheIndex = typeIndex - 1; // containers that store all the data. var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellPositions = new NativeArray <float3>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var targetsPositions = new NativeArray <float3>(targetsCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var closestTargetIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var obstaclesPositions = new NativeArray <float3>(obstaclesCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var closestObstacleIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var closestObstacleSqDistances = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); float3 sumOfAllBoidsPositions = float3.zero; // copy values to buffers. var initialCellAlignmentJob = new CopyHeadingsInBuffer { headingsResult = cellAlignment }; var initialCellAlignmentJobHandle = initialCellAlignmentJob.Schedule(BoidGroup, inputDeps); var initialCopyPositionJob = new CopyPositionsInBuffer { positionsResult = cellPositions }; var initialCopyPositionJobHandle = initialCopyPositionJob.Schedule(BoidGroup, inputDeps); var sumPositionsJob = new SumPositions { positionsSum = sumOfAllBoidsPositions }; var sumPositionsJobHandle = sumPositionsJob.Schedule(BoidGroup, inputDeps); // copy targets positions var copyPositionsOfTargetsJob = new CopyPositionsInBuffer { positionsResult = targetsPositions }; var copyPositionsOfTargetsJobHandle = copyPositionsOfTargetsJob.Schedule(BoidTargetsGroup, inputDeps); // copy obstacles positions var copyPositionsOfObstaclesJob = new CopyPositionsInBuffer { positionsResult = obstaclesPositions }; var copyPositionsOfObstaclesJobHandle = copyPositionsOfObstaclesJob.Schedule(BoidObstaclesGroup, inputDeps); var newCellData = new CellsData { indicesOfCells = cellIndices, hashMapBlockIndexWithBoidsIndex = hashMap, sumOfDirectionsOnCells = cellAlignment, sumOfPositionsOnCells = cellPositions, nrOfBoidsOnCells = cellCount, targetsPositions = targetsPositions, closestTargetIndices = closestTargetIndices, closestObstacleIndices = closestObstacleIndices, closestObstacleSqDistances = closestObstacleSqDistances, obstaclesPositions = obstaclesPositions, }; if (cacheIndex > (_CellsData.Count - 1)) { _CellsData.Add(newCellData); } else { DisposeCellData(_CellsData[cacheIndex]); } _CellsData[cacheIndex] = newCellData; // hash the entity position var hashPositionsJob = new HashPositionsToHashMap { hashMap = hashMap.ToConcurrent(), cellRadius = ECSController.Instance.CellSizeVaried, positionOffsetVary = ECSController.Instance.PositionNeighbourCubeOffset }; var hashPositionsJobHandle = hashPositionsJob.Schedule(BoidGroup, inputDeps); // set all cell count to 1. var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, inputDeps); // bariers. from now on we need to use the created buffers. // and we need to know that they are finished. var initialCellBarrierJobHandle = JobHandle.CombineDependencies( initialCellAlignmentJobHandle, initialCopyPositionJobHandle, initialCellCountJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies( hashPositionsJobHandle, initialCellBarrierJobHandle, sumPositionsJobHandle); var targetsJobHandle = JobHandle.CombineDependencies(mergeCellsBarrierJobHandle, copyPositionsOfTargetsJobHandle, copyPositionsOfObstaclesJobHandle); var mergeCellsJob = new MergeCellsJob { indicesOfCells = cellIndices, cellAlignment = cellAlignment, cellPositions = cellPositions, cellCount = cellCount, targetsPositions = targetsPositions, closestTargetIndexToCells = closestTargetIndices, closestObstacleSqDistanceToCells = closestObstacleSqDistances, closestObstacleIndexToCells = closestObstacleIndices, obstaclesPositions = obstaclesPositions }; // job now depends on last barrier. var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, targetsJobHandle); EntityCommandBuffer.Concurrent commandBuffer = barrierCommand.CreateCommandBuffer().ToConcurrent(); NativeQueue <float3> killedPositionsQueue = new NativeQueue <float3>(Allocator.TempJob); var steerJob = new MoveBoids { cellIndices = newCellData.indicesOfCells, alignmentWeight = gameSettings.AlignmentWeight, separationWeight = gameSettings.SeparationWeight, cohesionWeight = gameSettings.CohesionWeight, cellSize = ECSController.Instance.CellSizeVaried, sphereBoundarySize = gameSettings.SphereBoundarySize, sphereBoundaryWeight = gameSettings.BoundaryWeight, moveSpeed = gameSettings.MoveSpeed, cellAlignment = cellAlignment, cellPositions = cellPositions, cellCount = cellCount, dt = Time.deltaTime, walkToFlockCenterWeight = gameSettings.WalkToFlockCenterWeight, sumOfAllPositions = sumOfAllBoidsPositions, nrOfTotalBoids = boidCount, maintainYWeight = gameSettings.maintainYWeight, yLength = gameSettings.yLength, perlinNoiseScale = settings.perlinNoiseScale, targetsPositions = targetsPositions, cellClosestTargetsIndices = closestTargetIndices, goToTargetsWeight = gameSettings.goToTargetsWeight, obstaclesPositions = obstaclesPositions, cellClosestObstaclesIndices = closestObstacleIndices, cellClosestObstaclesSqDistances = closestObstacleSqDistances, startAvoidingObstacleAtDistance = gameSettings.avoidDistanceObstacles, avoidObstaclesWeight = gameSettings.avoidObstaclesWeight, terrainY = ECSController.TerrainY, distanceToAvoidTerrain = settings.distanceToAvoidTerrain, avoidTerrainWeight = gameSettings.avoidTerrainWeight, avoidXZwhileHeightBiggerThan = settings.avoidXZwhileHeightBiggerThan, avoidXZwhileHeightBiggerFade = settings.avoidXZwhileHeightBiggerFade, obstacleKillRadius = settings.obstacleKillRadius, commandBuffer = commandBuffer, diedPositions = killedPositionsQueue.ToConcurrent(), }; // job depends on merge cells job var steerJobHandle = steerJob.Schedule(BoidGroup, mergeCellsJobHandle); barrierCommand.AddJobHandleForProducer(steerJobHandle); steerJobHandle.Complete(); if (killedPositionsQueue.TryDequeue(out float3 pos)) { GameController.Instance.KilledBoidAt(pos); } killedPositionsQueue.Dispose(); inputDeps = steerJobHandle; BoidGroup.AddDependency(inputDeps); } UniqueTypes.Clear(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDependencies) { EntityManager.GetAllUniqueSharedComponentData(uniqueTypes); var obstacleCount = obstacleQuery.CalculateEntityCount(); var targetCount = targetQuery.CalculateEntityCount(); for (int i = 0; i < prevFrameHashmaps.Count; i++) { prevFrameHashmaps[i].Dispose(); } prevFrameHashmaps.Clear(); for (int hordeVariantIndex = 0; hordeVariantIndex < uniqueTypes.Count; hordeVariantIndex++) { var settings = uniqueTypes[hordeVariantIndex]; hordeQuery.SetFilter(settings); var hordeCount = hordeQuery.CalculateEntityCount(); if (hordeCount == 0) { continue; } //Debug.Log(hordeCount); #region Initial vars var hashMap = new NativeMultiHashMap <int, int>(hordeCount, Allocator.TempJob); var cellIndices = new NativeArray <int>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositionIndex = new NativeArray <int>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellTargetPositionIndex = new NativeArray <int>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstacleDistance = new NativeArray <float>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellAlignment = new NativeArray <float3>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <float3>(hordeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyTargetPositions = new NativeArray <float3>(targetCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyObstaclePositions = new NativeArray <float3>(obstacleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); #endregion #region Initial jobs var initialCellAlignmentJob = new CopyHeadings { headings = cellAlignment }; var initialCellAlignmentJobHandle = initialCellAlignmentJob.Schedule(hordeQuery, inputDependencies); var initialCellSeparationJob = new CopyPositions { positions = cellSeparation }; var initialCellSeparationJobHandle = initialCellSeparationJob.Schedule(hordeQuery, inputDependencies); var copyTargetPositionsJob = new CopyPositions { positions = copyTargetPositions }; var copyTargetPositionsJobHandle = copyTargetPositionsJob.Schedule(targetQuery, inputDependencies); var copyObstaclePositionsJob = new CopyPositions { positions = copyObstaclePositions }; var copyObstaclePositionsJobHandle = copyObstaclePositionsJob.Schedule(obstacleQuery, inputDependencies); prevFrameHashmaps.Add(hashMap); var hashPositionsJob = new HashPositions { hashMap = hashMap.AsParallelWriter(), cellRadius = settings.CellRadius }; var hashPositionsJobHandle = hashPositionsJob.Schedule(hordeQuery, inputDependencies); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(hordeCount, 64, inputDependencies); #endregion var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellAlignmentJobHandle, initialCellSeparationJobHandle, initialCellCountJobHandle); var copyTargetObstacleBarrierJobHandle = JobHandle.CombineDependencies(copyTargetPositionsJobHandle, copyObstaclePositionsJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, copyTargetObstacleBarrierJobHandle); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); var steerJob = new Steer { cellIndices = cellIndices, settings = settings, cellAlignment = cellAlignment, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellTargetPositionIndex = cellTargetPositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions, dt = Time.deltaTime, }; var steerJobHandle = steerJob.Schedule(hordeQuery, mergeCellsJobHandle); inputDependencies = steerJobHandle; hordeQuery.AddDependency(inputDependencies); } uniqueTypes.Clear(); return(inputDependencies); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var boidCount = m_Boids.boidTag.Length; var cellIndices = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var hashMap = new NativeMultiHashMap <int, int>(boidCount, Allocator.TempJob); var copyTargetPositions = new NativeArray <Position>(m_Boids.boidTarget.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var copyObstaclePositions = new NativeArray <Position>(m_Obstacles.obstaclePositions.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellSeparation = new NativeArray <Position>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstacleDistance = new NativeArray <float>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellObstaclePositionIndex = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var cellCount = new NativeArray <int>(boidCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var hashPositionsJob = new HashPositions { positions = m_Boids.boidPositions, hashMap = hashMap, cellRadius = ECSBootstrapper.boidSettings.cellRadius }; var hashPositionsJobHandle = hashPositionsJob.Schedule(boidCount, 64, inputDeps); var initialCellSeparationJob = new CopyComponentData <Position> { Source = m_Boids.boidPositions, Results = cellSeparation }; var initialCellSeparationJobHandle = initialCellSeparationJob.Schedule(boidCount, 64, inputDeps); var initialCellCountJob = new MemsetNativeArray <int> { Source = cellCount, Value = 1 }; var initialCellCountJobHandle = initialCellCountJob.Schedule(boidCount, 64, inputDeps); var initialCellBarrierJobHandle = JobHandle.CombineDependencies(initialCellSeparationJobHandle, initialCellCountJobHandle); var copyTargetPositionJob = new CopyTargetPositions { Source = m_Boids.boidTarget, Results = copyTargetPositions }; var copyTargetPositionsJobHandle = copyTargetPositionJob.Schedule(m_Boids.boidTarget.Length, 2, inputDeps); var copyObstaclePositionsJob = new CopyComponentData <Position> { Source = m_Obstacles.obstaclePositions, Results = copyObstaclePositions }; var copyObstaclePositionsJobHandle = copyObstaclePositionsJob.Schedule(m_Obstacles.obstaclePositions.Length, 2, inputDeps); var copyTargetObstacleBarrierJobHandle = JobHandle.CombineDependencies(copyTargetPositionsJobHandle, copyObstaclePositionsJobHandle); var mergeCellsBarrierJobHandle = JobHandle.CombineDependencies(hashPositionsJobHandle, initialCellBarrierJobHandle, copyTargetObstacleBarrierJobHandle); var mergeCellsJob = new MergeCells { cellIndices = cellIndices, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellCount = cellCount, obstaclePositions = copyObstaclePositions }; var mergeCellsJobHandle = mergeCellsJob.Schedule(hashMap, 64, mergeCellsBarrierJobHandle); var steerJob = new Steer { cellIndices = cellIndices, //D settings = ECSBootstrapper.boidSettings, cellSeparation = cellSeparation, cellObstacleDistance = cellObstacleDistance, cellObstaclePositionIndex = cellObstaclePositionIndex, cellCount = cellCount, targetPositions = copyTargetPositions, obstaclePositions = copyObstaclePositions, dt = Time.deltaTime, positions = m_Boids.boidPositions, headings = m_Boids.boidHeadings, }; var steerJobHandle = steerJob.Schedule(boidCount, 64, mergeCellsJobHandle); steerJobHandle.Complete(); cellIndices.Dispose(); hashMap.Dispose(); copyTargetPositions.Dispose(); copyObstaclePositions.Dispose(); cellSeparation.Dispose(); cellObstacleDistance.Dispose(); cellObstaclePositionIndex.Dispose(); cellCount.Dispose(); return(inputDeps); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { // reset values to -1 var clearArrayJob = new MemsetNativeArray <int> // MemsetNativeArray job assigns the same value to the whole array { Source = gridIndexArray, Value = -1 }; var clearArrayJobHandle = clearArrayJob.Schedule(ARRAY_SIZE, 64, inputDeps); // not sure what the 64 innerloop batch count means // fill index array job var fillJob = new FillDataArrayJob() { gridIndexData = gridIndexArray }; var fillJobHandle = fillJob.Schedule(this, clearArrayJobHandle); // makes sure the clearArrayJob is complete #region legacy slow /* * entities = MicrobeSpawner.entityArray; * centerPos = Object.FindObjectOfType<InputComponent>().transform.position; * globalOffset += .2f; // WTF IS THIS * * // update avoidance data and calculate force * * // THIS MAKES THE SYSTEM SLOW!!! * * for (int i = 0; i < MicrobeSpawner.total; i++) * { * MovementData indexForcePrevPos = EntityManager.GetComponentData<MovementData>(entities[i]); * Position position = EntityManager.GetComponentData<Position>(entities[i]); * entityPositionsArray[i] = position.Value; * * // remove old position from grid * int outerIndex = CoordsToOuterIndex( * (int)indexForcePrevPos.PreviousPosition.x, * (int)indexForcePrevPos.PreviousPosition.y); * * if (outerIndex >= 0 && outerIndex < gridIndexArray.Length) * { * for (int innerIndex = outerIndex; innerIndex < outerIndex + COLLISION_FORCES_PER_TILE; innerIndex++) * { * if (gridIndexArray[innerIndex] == indexForcePrevPos.Index) * { * gridIndexArray[innerIndex] = 0; * } * } * } * * // add new position to grid * outerIndex = CoordsToOuterIndex((int)position.Value.x, (int)position.Value.z); * if (outerIndex >= 0 && outerIndex < gridIndexArray.Length) * { * for (int innerIndex = outerIndex; innerIndex < outerIndex + COLLISION_FORCES_PER_TILE; innerIndex++) * { * if (gridIndexArray[innerIndex] == 0) * { * gridIndexArray[innerIndex] = indexForcePrevPos.Index; * } * } * } * }*/ #endregion // movement job var movementJob = new PositionJob(gridIndexArray, entityPositionsArray); var movementJobHandle = movementJob.Schedule(this, fillJobHandle); // makes sure the fillJob is complete return(movementJobHandle); //return movementJob.Schedule(this, inputDeps); }