public static JobHandle BeginJob(NativeArray <FindZoneController.FindZoneData> findZones, int findZoneCount, NativeArray <ZoneController.ZoneData> zones, NativeMultiHashMap <int, int> neiboringZones) { FindZonesJob job = new FindZonesJob { findZones = findZones, zones = zones, neiboringZones = neiboringZones }; return(IJobParallelForExtensions.Schedule(job, findZoneCount, 1)); }
public static JobHandle BeginJob(NativeArray <float3> zonePositions, NativeMultiHashMap <int, int> .ParallelWriter neiboringZones, int maxNeiboringZones, float distance) { ZoneUpdateJobTest job = new ZoneUpdateJobTest() { zonePositions = zonePositions, neiboringZones = neiboringZones, maxNeiboringZones = maxNeiboringZones, distance = distance }; return(IJobParallelForExtensions.Schedule(job, zonePositions.Length, 1)); }
public static JobHandle Begin(NativeArray <float3> positions, int batchSize) { MoveJob job = new MoveJob() { Positions = positions, DeltaTime = Time.deltaTime }; return(IJobParallelForExtensions.Schedule(job, positions.Length, batchSize)); }
public static JobHandle BeginJob(NativeArray <ZoneController.ZoneData> zones, NativeMultiHashMap <int, int> .ParallelWriter neiboringZones, int maxNeiboringZones, double distance, NativeArray <float> maxZoneSize) { ZoneSetupJobByDistance job = new ZoneSetupJobByDistance() { zones = zones, neiboringZones = neiboringZones, maxNeiboringZones = maxNeiboringZones, distance = distance, maxZoneSize = maxZoneSize }; return(IJobParallelForExtensions.Schedule(job, zones.Length, 1)); }
protected override void OnUpdate() { //calculate the number of entities we have to store (entities with translation component and QuadrantEntity component) EntityQuery entityQuery = GetEntityQuery(quadrantQueryDesc); NativeArray <Entity> entityArray = entityQuery.ToEntityArray(Allocator.TempJob); // create the entity array NativeArray <Translation> transArray = entityQuery.ToComponentDataArray <Translation>(Allocator.TempJob); NativeArray <MovingQuadrantEntity> movingQuadEntArray = entityQuery.ToComponentDataArray <MovingQuadrantEntity>(Allocator.TempJob); // create the stationary quadrant entities array NativeArray <PreviousMovement> prevMoveArray = entityQuery.ToComponentDataArray <PreviousMovement>(Allocator.TempJob); //the length is calculated from above //NativeMultiHashMap<int, QuadrantData> quadrantMultiHashMap = new NativeMultiHashMap<int, QuadrantData>(entityQuery.CalculateLength(),Allocator.TempJob); quadrantMultiHashMap.Clear(); // clear the hashmap // if the amount of stuff to add to the hashmap is larger than the capacity of the hashmap if (entityQuery.CalculateEntityCount() > quadrantMultiHashMap.Capacity) { quadrantMultiHashMap.Capacity = entityQuery.CalculateEntityCount(); //Increase the hashmap to hold everything } //using jobs //Cycle through all entities and get their positions //selects all entities with a translation component and adds them to the hashmap SetQuadrantDataHashMapJob setQuadrantDataHashMapJob = new SetQuadrantDataHashMapJob { quadrantMultiHashMap = quadrantMultiHashMap.AsParallelWriter(), //ToConcurrent used to allow for concurrent writing entities = entityArray, translations = transArray, quadEntTypes = movingQuadEntArray, prevMovements = prevMoveArray }; JobHandle jobHandle = IJobParallelForExtensions.Schedule(setQuadrantDataHashMapJob, entityArray.Length, 32, this.Dependency); //JobForEachExtensions.Schedule(setQuadrantDataHashMapJob, entityQuery); jobHandle.Complete(); //Cycle through all entities and get their positions //selects all entities with a translation component //without jobs /*Entities.ForEach((Entity entity, ref Translation translation) =>{ * int hashMapKey = GetPositionHashMapKey(translation.Value); * quadrantMultiHashMap.Add(hashMapKey, entity); * });*/ //Debug.Log(GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50)) + " Mouse position: " + MousePosition.GetMouseWorldPositionOnPlane(50)); //DebugDrawQuadrant(MousePosition.GetMouseWorldPositionOnPlane(50)); //Debug.Log(GetEntityCountInHashMap(quadrantMultiHashMap,GetPositionHashMapKey(MousePosition.GetMouseWorldPositionOnPlane(50)))); //quadrantMultiHashMap.Dispose(); }
public static JobHandle BeginJob(NativeArray <float2> plantReasourceGain, NativeArray <PlantScript.GrowthStage> plantGrowthStage, NativeArray <int> plants, int plantCount, NativeArray <PlantScript.PlantData> allPlants, NativeArray <ZoneController.ZoneData> zones, NativeMultiHashMap <int, int> neiboringZones, NativeMultiHashMap <int, int> plantsInZones, EarthScript.EarthState earthState, NativeArray <PlantSpecies.GrowthStageData> growthStages, PlantSpeciesSeeds.SeedGerminationRequirement seedGerminationRequirement) { PlantUpdateJob job = new PlantUpdateJob { plantReasourceGain = plantReasourceGain, plantGrowthStage = plantGrowthStage, updatePlants = plants, allPlants = allPlants, zones = zones, neiboringZones = neiboringZones, plantsInZones = plantsInZones, earthState = earthState, growthStages = growthStages, seedGerminationRequirement = seedGerminationRequirement, }; return(IJobParallelForExtensions.Schedule(job, plantCount, 1)); }
protected override void OnUpdate() { //calculate the number of entities we have to store (entities with translation component and QuadrantEntity component) EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(StationaryQuadrantEntity)); /*EntityQuery addEntityQuery = GetEntityQuery(typeof(Translation), typeof(StationaryQuadrantEntity), typeof(AddStationaryQuadrantEntity)); * EntityQuery removeEntityQuery = GetEntityQuery(typeof(Translation), typeof(StationaryQuadrantEntity), typeof(RemoveStationaryQuadrantEntity));*/ EntityQuery addEntityQuery = GetEntityQuery(addQueryDesc); // do the query NativeArray <Entity> addEntityArray = addEntityQuery.ToEntityArray(Allocator.TempJob); // create the entity array NativeArray <Translation> addTransArray = addEntityQuery.ToComponentDataArray <Translation>(Allocator.TempJob); NativeArray <StationaryQuadrantEntity> addStatQuadEntArray = addEntityQuery.ToComponentDataArray <StationaryQuadrantEntity>(Allocator.TempJob); // create the stationary quadrant entities array // if the number of stationary stuff has grown larger than the capacity of the hashmap if (entityQuery.CalculateEntityCount() > quadrantMultiHashMap.Capacity) { quadrantMultiHashMap.Capacity = entityQuery.CalculateEntityCount(); //Increase the hashmap to hold everything } //selects all entities with a translation component and the 'add to stationary quadrant tag' and adds them to the hashmap /*RemoveStationaryQuadrantEntityFromMapJob removeEntityFromMapJob = new RemoveStationaryQuadrantEntityFromMapJob{ * quadrantMultiHashMap = quadrantMultiHashMap, * entityCommandBuffer = commandBufferSystem.CreateCommandBuffer() * };*/ //selects all entities with a translation component and the 'add to stationary quadrant tag' and adds them to the hashmap AddStationaryQuadrantEntityToMapJob addEntityToMapJob = new AddStationaryQuadrantEntityToMapJob { quadrantMultiHashMap = quadrantMultiHashMap.AsParallelWriter(), //Asparallelwriter used to allow for concurrent writing entities = addEntityArray, translations = addTransArray, quadEntTypes = addStatQuadEntArray, entityCommandBuffer = commandBufferSystem.CreateCommandBuffer().AsParallelWriter() }; /*JobHandle removeJobHandle = JobForEachExtensions.Schedule(removeEntityFromMapJob, removeEntityQuery); * commandBufferSystem.AddJobHandleForProducer(removeJobHandle); // tell the system to execute the command buffer after the job has been completed*/ JobHandle addJobHandle = IJobParallelForExtensions.Schedule(addEntityToMapJob, addEntityArray.Length, 32, this.Dependency); //JobForEachExtensions.Schedule(addEntityToMapJob, addEntityQuery, this.Dependency/*, removeJobHandle*/); commandBufferSystem.AddJobHandleForProducer(addJobHandle); // tell the system to execute the command buffer after the job has been completed addJobHandle.Complete(); }
public void EnqueuesToCorrectThread([Values(1, 10, 100)] int jobCount, [Values(1, 2, 10)] int numPerThread) { var threadIds = new NativeArray <int>(jobCount, Allocator.TempJob); var threadUsages = new NativeArray <int>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); var(baseQueue, componentQueue, bufferQueue) = new QueueRig <EcsTestData, EcsIntElement>(Allocator.TempJob); try { var handle = IJobParallelForExtensions.Schedule(new EnqueuesToCorrectThreadJob { Events = componentQueue, ThreadIds = threadIds, ThreadUsages = threadUsages }, jobCount, numPerThread); handle.Complete(); for (int i = 0; i < jobCount; i++) { var threadId = threadIds[i]; var usageCount = threadUsages[threadId]; if (usageCount == 0) { continue; } var size = baseQueue.GetComponentsForThread(threadId).Length; var componentCountForThread = size / sizeof(EcsTestData); if (usageCount != componentCountForThread) { Debugger.Break(); } Assert.AreEqual(usageCount, componentCountForThread); } } finally { threadIds.Dispose(); threadUsages.Dispose(); baseQueue.Dispose(); } }
public static JobHandle BeginJob(NativeArray <AnimalScript.AnimalActions> animalActions, NativeArray <int> updateAnimals, int animalCount, float speciesFullFood, float speciesMaxFood, float speciesSightRange, EyesScript.EyeTypes speciesEyeType, float speciesEatRange, float speciesSmellRange, int speciesFoodType, NativeArray <int> eddibleFoodTypes, NativeArray <int> predatorFoodTypes, NativeArray <AnimalScript.AnimalData> allAnimals, NativeArray <PlantScript.PlantData> allPlants, NativeArray <ZoneController.ZoneData> zones, NativeMultiHashMap <int, int> neiboringZones, NativeMultiHashMap <int, int> animalsInZones, NativeMultiHashMap <int, int> plantsInZones, NativeMultiHashMap <int2, ZoneController.DataLocation> organismsByFoodTypeInZones) { AnimalUpdateJob job = new AnimalUpdateJob() { animalActions = animalActions, updateAnimals = updateAnimals, speciesFullFood = speciesFullFood, speciesMaxFood = speciesMaxFood, speciesSightRange = speciesSightRange, speciesEyeType = speciesEyeType, speciesEatRange = speciesEatRange, speciesSmellRange = speciesSmellRange, speciesFoodType = speciesFoodType, eddibleFoodTypes = eddibleFoodTypes, predatorFoodTypes = predatorFoodTypes, allAnimals = allAnimals, allPlants = allPlants, zones = zones, neiboringZones = neiboringZones, animalsInZones = animalsInZones, plantsInZones = plantsInZones, organismsByFoodTypeInZones = organismsByFoodTypeInZones }; return(IJobParallelForExtensions.Schedule(job, animalCount, 1)); }
public static JobHandle Begin( NativeArray <ShockwaveData> cells, ShockMasterParams master, JobHandle dependency ) { var job = new ShockwaveSuperpositionJob() { Cells = cells, Params = master, deltaTime = Time.deltaTime }; return(IJobParallelForExtensions.Schedule( job, cells.Length, ShockDataExt.GetInnerLoopCountFromSize(master), dependency )); }
public static JobHandle Begin( NativeArray <float3> positions, SizeParams sizes, NoiseFuncKinds noiseFunc ) { var job = new NoisePositionJob() { Time = UnityEngine.Time.realtimeSinceStartup, Positions = positions, Sizes = sizes, NoiseFunction = noiseFunc }; if (sizes.NumberOfRows == 0 || sizes.NumberPerRow == 0) { Debug.LogWarning($"Sizes - {sizes}"); } return(IJobParallelForExtensions.Schedule(job, positions.Length, BATCH_SIZE)); }
public static JobHandle Begin( ShockwaveData centre, ShockMasterParams masterParams, NativeArray <ShockwaveData> cells, JobHandle dependency ) { ShockwaveSimJob job = new ShockwaveSimJob() { Cells = cells, Centre = centre, Params = masterParams }; return(IJobParallelForExtensions.Schedule( job, cells.Length, ShockDataExt.GetInnerLoopCountFromSize(masterParams), dependency )); }
public JobHandle Schedule() { return(IJobParallelForExtensions.Schedule <CreateTrajectoryFragmentsJob>(this, numFragments, 64)); }
private void UpdateParticles2WithJob() { Plane plane = (Plane)null; for (int index = 1; index < this.m_Particles.Count; ++index) { DynamicBoneV2.Particle particle1 = this.m_Particles[index]; DynamicBoneV2.Particle particle2 = this.m_Particles[particle1.m_ParentIndex]; float magnitude1; if (Object.op_Inequality((Object)particle1.m_Transform, (Object)null)) { Vector3 vector3 = Vector3.op_Subtraction(particle2.m_Transform.get_position(), particle1.m_Transform.get_position()); magnitude1 = ((Vector3) ref vector3).get_magnitude(); } else { Matrix4x4 localToWorldMatrix = particle2.m_Transform.get_localToWorldMatrix(); Vector3 vector3 = ((Matrix4x4) ref localToWorldMatrix).MultiplyVector(particle1.m_EndOffset); magnitude1 = ((Vector3) ref vector3).get_magnitude(); } float num1 = Mathf.Lerp(1f, particle1.m_Stiffness, this.m_Weight); if ((double)num1 > 0.0 || (double)particle1.m_Elasticity > 0.0) { Matrix4x4 localToWorldMatrix = particle2.m_Transform.get_localToWorldMatrix(); ((Matrix4x4) ref localToWorldMatrix).SetColumn(3, Vector4.op_Implicit(particle2.m_Position)); Vector3 vector3_1 = !Object.op_Inequality((Object)particle1.m_Transform, (Object)null) ? ((Matrix4x4) ref localToWorldMatrix).MultiplyPoint3x4(particle1.m_EndOffset) : ((Matrix4x4) ref localToWorldMatrix).MultiplyPoint3x4(particle1.m_Transform.get_localPosition()); Vector3 vector3_2 = Vector3.op_Subtraction(vector3_1, particle1.m_Position); DynamicBoneV2.Particle particle3 = particle1; particle3.m_Position = Vector3.op_Addition(particle3.m_Position, Vector3.op_Multiply(vector3_2, particle1.m_Elasticity)); if ((double)num1 > 0.0) { Vector3 vector3_3 = Vector3.op_Subtraction(vector3_1, particle1.m_Position); float magnitude2 = ((Vector3) ref vector3_3).get_magnitude(); float num2 = (float)((double)magnitude1 * (1.0 - (double)num1) * 2.0); if ((double)magnitude2 > (double)num2) { DynamicBoneV2.Particle particle4 = particle1; particle4.m_Position = Vector3.op_Addition(particle4.m_Position, Vector3.op_Multiply(vector3_3, (magnitude2 - num2) / magnitude2)); } } } if (this.m_Colliders != null) { int count = this.m_Colliders.Count; NativeArray <DynamicBoneColliderBase.Bound> nativeArray1; ((NativeArray <DynamicBoneColliderBase.Bound>) ref nativeArray1).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <float> nativeArray2; ((NativeArray <float>) ref nativeArray2).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <float> nativeArray3; ((NativeArray <float>) ref nativeArray3).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <Vector3> nativeArray4; ((NativeArray <Vector3>) ref nativeArray4).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <Vector3> nativeArray5; ((NativeArray <Vector3>) ref nativeArray5).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <Vector3> nativeArray6; ((NativeArray <Vector3>) ref nativeArray6).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <float> nativeArray7; ((NativeArray <float>) ref nativeArray7).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); NativeArray <bool> nativeArray8; ((NativeArray <bool>) ref nativeArray8).\u002Ector(count, (Allocator)2, (NativeArrayOptions)1); DynamicBoneV2.ColliderJob colliderJob = new DynamicBoneV2.ColliderJob() { boundAry = nativeArray1, particlePosition = particle1.m_Position, particleRadiusAry = nativeArray2, capsuleHeightAry = nativeArray3, centerAry = nativeArray4, c0Ary = nativeArray5, c1Ary = nativeArray6, radiusAry = nativeArray7, enabledAry = nativeArray8 }; JobHandle jobHandle = IJobParallelForExtensions.Schedule <DynamicBoneV2.ColliderJob>((M0)colliderJob, count, 0, (JobHandle)null); ((JobHandle) ref jobHandle).Complete(); particle1.m_Position = colliderJob.particlePosition; ((NativeArray <DynamicBoneColliderBase.Bound>) ref nativeArray1).Dispose(); ((NativeArray <float>) ref nativeArray2).Dispose(); ((NativeArray <float>) ref nativeArray3).Dispose(); ((NativeArray <Vector3>) ref nativeArray4).Dispose(); ((NativeArray <Vector3>) ref nativeArray5).Dispose(); ((NativeArray <Vector3>) ref nativeArray6).Dispose(); ((NativeArray <float>) ref nativeArray7).Dispose(); ((NativeArray <bool>) ref nativeArray8).Dispose(); } if (this.m_FreezeAxis != DynamicBoneV2.FreezeAxis.None) { switch (this.m_FreezeAxis) { case DynamicBoneV2.FreezeAxis.X: ((Plane) ref plane).SetNormalAndPosition(particle2.m_Transform.get_right(), particle2.m_Position); break; case DynamicBoneV2.FreezeAxis.Y: ((Plane) ref plane).SetNormalAndPosition(particle2.m_Transform.get_up(), particle2.m_Position); break; case DynamicBoneV2.FreezeAxis.Z: ((Plane) ref plane).SetNormalAndPosition(particle2.m_Transform.get_forward(), particle2.m_Position); break; } DynamicBoneV2.Particle particle3 = particle1; particle3.m_Position = Vector3.op_Subtraction(particle3.m_Position, Vector3.op_Multiply(((Plane) ref plane).get_normal(), ((Plane) ref plane).GetDistanceToPoint(particle1.m_Position))); } Vector3 vector3_4 = Vector3.op_Subtraction(particle2.m_Position, particle1.m_Position); float magnitude3 = ((Vector3) ref vector3_4).get_magnitude(); if ((double)magnitude3 > 0.0) { DynamicBoneV2.Particle particle3 = particle1; particle3.m_Position = Vector3.op_Addition(particle3.m_Position, Vector3.op_Multiply(vector3_4, (magnitude3 - magnitude1) / magnitude3)); } } }