public bool TryRemoveJob(Guid jobId, out JobHandle jobHandle) { var removed = _jobs.TryRemove(jobId, out jobHandle); if (removed) { if (_log.IsDebugEnabled) _log.DebugFormat("Removed job: {0} ({1})", jobId, jobHandle.Status); return true; } return false; }
protected override JobHandle OnUpdate(JobHandle inputDeps) { // Realloc(); if (formations.Length == 0) { return(inputDeps); } //NativeArrayExtensions.ResizeNativeArray(ref raycastHits, math.max(raycastHits.Length,minions.Length)); //NativeArrayExtensions.ResizeNativeArray(ref raycastCommands, math.max(raycastCommands.Length, minions.Length)); var copyNavigationJob = new CopyNavigationPositionToFormation { formations = formations.data, agents = formations.agents, navigators = formations.navigators, navigationData = formations.navigationData, dt = Time.deltaTime }; var copyNavigationJobHandle = copyNavigationJob.Schedule(formations.Length, SimulationState.SmallBatchSize, inputDeps); var copyFormations = new NativeArray <FormationData>(formations.data.Length, Allocator.TempJob); var copyFormationsJob = new CopyComponentData <FormationData> { Source = formations.data, Results = copyFormations }; var copyFormationJobHandle = copyFormationsJob.Schedule(formations.data.Length, SimulationState.HugeBatchSize, copyNavigationJobHandle); var copyFormationEntities = new NativeArray <Entity>(formations.entities.Length, Allocator.TempJob); var copyFormationEntitiesJob = new CopyEntities { Source = formations.entities, Results = copyFormationEntities }; var copyFormationEntitiesJobHandle = copyFormationEntitiesJob.Schedule(formations.entities.Length, SimulationState.HugeBatchSize, copyNavigationJobHandle); var copyBarrier = JobHandle.CombineDependencies(copyFormationJobHandle, copyFormationEntitiesJobHandle); var closestSearchJob = new SearchClosestFormations { formations = copyFormations, closestFormations = formations.closestFormations, formationEntities = copyFormationEntities }; var closestSearchJobHandle = closestSearchJob.Schedule(formations.Length, SimulationState.HugeBatchSize, copyBarrier); var updateFormationsJob = new UpdateFormations { closestFormations = formations.closestFormations, formationNavigators = formations.navigators, formationHighLevelPath = formations.highLevelPaths, formations = formations.data, }; var updateFormationsJobHandle = updateFormationsJob.Schedule(formations.Length, SimulationState.SmallBatchSize, closestSearchJobHandle); // Pass two, rearrangeing the minion indices // TODO Split this system into systems that make sense // calculate formation movement // advance formations // calculate minion position and populate the array return(updateFormationsJobHandle); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { float deltaTime = Time.DeltaTime; JobHandle jobHandle = Entities .WithAll <PlayerTag>() .ForEach((ref PhysicsVelocity physicsVelocity, ref RotationEulerXYZ rotationEuler, ref PlayerMovementInputData playerMovementInputData, in Rotation rotation) => { // Boost float boostAmount = 0; if (playerMovementInputData._boostRechargeTimeLeft > 0) { playerMovementInputData._boostRechargeTimeLeft -= deltaTime; } else { if (playerMovementInputData._boostTimeLeft > 0) { playerMovementInputData._boostTimeLeft -= deltaTime; if (playerMovementInputData._boostTimeLeft <= 0) { playerMovementInputData._boostRechargeTimeLeft = playerMovementInputData.boostRechargeTime; } } if (playerMovementInputData._boostPressedLastFrame) { if (playerMovementInputData._boostTimeLeft <= 0) { boostAmount = playerMovementInputData.boostMultiplier; playerMovementInputData._boostTimeLeft = playerMovementInputData.boostLifeTime; } } } // Movement playerMovementInputData._currentVelocity += playerMovementInputData._movementData.y * playerMovementInputData.velocityChangeRate * deltaTime; if (playerMovementInputData._movementData.y == 0) { playerMovementInputData._currentVelocity -= playerMovementInputData.velocityChangeRate * deltaTime; } playerMovementInputData._currentVelocity = math.clamp(playerMovementInputData._currentVelocity, 0, playerMovementInputData.maxVelocity); physicsVelocity.Linear = math.forward(rotation.Value) * playerMovementInputData._currentVelocity; // Rotation float3 currentRotation = rotationEuler.Value; currentRotation.y += playerMovementInputData._movementData.x * playerMovementInputData.rotationSpeed * deltaTime; rotationEuler.Value = currentRotation; }) .Schedule(inputDeps); return(jobHandle); }
public static JobHandle ScheduleParallel(this BuildCollisionLayerConfig config, out CollisionLayer layer, Allocator allocator, JobHandle inputDeps = default) { config.ValidateSettings(); var jh = inputDeps; if (config.hasQueryData) { int count = config.query.CalculateEntityCount(); layer = new CollisionLayer(count, config.settings, allocator); var layerIndices = new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var xmins = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var aos = new NativeArray <BuildCollisionLayerInternal.ColliderAoSData>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> remapSrcIndices = config.hasRemapSrcIndices ? config.remapSrcIndices : new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); jh = new BuildCollisionLayerInternal.Part1FromQueryJob { layer = layer, typeGroup = config.typeGroup, layerIndices = layerIndices, xmins = xmins, colliderAoS = aos }.ScheduleParallel(config.query, jh); jh = new BuildCollisionLayerInternal.Part2Job { layer = layer, layerIndices = layerIndices }.Schedule(jh); jh = new BuildCollisionLayerInternal.Part3Job { layerIndices = layerIndices, unsortedSrcIndices = remapSrcIndices }.ScheduleParallel(count, 512, jh); jh = new BuildCollisionLayerInternal.Part4Job { unsortedSrcIndices = remapSrcIndices, xmins = xmins, bucketStartAndCounts = layer.bucketStartsAndCounts }.ScheduleParallel(layer.BucketCount, 1, jh); jh = new BuildCollisionLayerInternal.Part5FromQueryJob { layer = layer, colliderAoS = aos, remapSrcIndices = remapSrcIndices }.ScheduleParallel(count, 128, jh); if (!config.hasRemapSrcIndices) { jh = remapSrcIndices.Dispose(jh); } return(jh); } else if (config.hasBodiesArray) { layer = new CollisionLayer(config.bodies.Length, config.settings, allocator); int count = config.bodies.Length; var layerIndices = new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var xmins = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> remapSrcIndices = config.hasRemapSrcIndices ? config.remapSrcIndices : new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <Aabb> aabbs = config.hasAabbsArray ? config.aabbs : new NativeArray <Aabb>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); if (config.hasAabbsArray) { jh = new BuildCollisionLayerInternal.Part1FromDualArraysJob { layer = layer, aabbs = aabbs, layerIndices = layerIndices, xmins = xmins }.ScheduleParallel(count, 64, jh); } else { jh = new BuildCollisionLayerInternal.Part1FromColliderBodyArrayJob { layer = layer, aabbs = aabbs, colliderBodies = config.bodies, layerIndices = layerIndices, xmins = xmins }.ScheduleParallel(count, 64, jh); } jh = new BuildCollisionLayerInternal.Part2Job { layer = layer, layerIndices = layerIndices }.Schedule(jh); jh = new BuildCollisionLayerInternal.Part3Job { layerIndices = layerIndices, unsortedSrcIndices = remapSrcIndices }.ScheduleParallel(count, 512, jh); jh = new BuildCollisionLayerInternal.Part4Job { bucketStartAndCounts = layer.bucketStartsAndCounts, unsortedSrcIndices = remapSrcIndices, xmins = xmins }.ScheduleParallel(layer.BucketCount, 1, jh); jh = new BuildCollisionLayerInternal.Part5FromArraysJob { aabbs = aabbs, bodies = config.bodies, layer = layer, remapSrcIndices = remapSrcIndices }.ScheduleParallel(count, 128, jh); if ((!config.hasAabbsArray) && (!config.hasRemapSrcIndices)) { jh = JobHandle.CombineDependencies(remapSrcIndices.Dispose(jh), aabbs.Dispose(jh)); } else if (!config.hasRemapSrcIndices) { jh = remapSrcIndices.Dispose(jh); } else if (!config.hasAabbsArray) { jh = aabbs.Dispose(jh); } return(jh); } else { throw new InvalidOperationException("Something went wrong with the BuildCollisionError configuration."); } }
public static void MoveChunks(EntityManager srcEntities, ArchetypeManager dstArchetypeManager, EntityGroupManager dstGroupManager, EntityDataManager *dstEntityDataManager, SharedComponentDataManager dstSharedComponents) { var srcArchetypeManager = srcEntities.ArchetypeManager; var srcEntityDataManager = srcEntities.Entities; var srcSharedComponents = srcEntities.m_SharedComponentManager; var entityRemapping = new NativeArray <EntityRemapUtility.EntityRemapInfo>(srcEntityDataManager->Capacity, Allocator.TempJob); var moveChunksJob = new MoveChunksJob { srcEntityDataManager = srcEntityDataManager, dstEntityDataManager = dstEntityDataManager, entityRemapping = entityRemapping }.Schedule(); JobHandle.ScheduleBatchedJobs(); var samplerShared = CustomSampler.Create("MoveAllSharedComponents"); samplerShared.Begin(); var remapShared = dstSharedComponents.MoveAllSharedComponents(srcSharedComponents, Allocator.TempJob); samplerShared.End(); Archetype *srcArchetype; int chunkCount = 0; int archetypeCount = 0; srcArchetype = srcArchetypeManager.m_LastArchetype; while (srcArchetype != null) { archetypeCount++; chunkCount += srcArchetype->ChunkCount; srcArchetype = srcArchetype->PrevArchetype; } var remapChunks = new NativeArray <RemapChunk>(chunkCount, Allocator.TempJob); var remapArchetypes = new NativeArray <RemapArchetype>(archetypeCount, Allocator.TempJob); int chunkIndex = 0; int archetypeIndex = 0; srcArchetype = srcArchetypeManager.m_LastArchetype; while (srcArchetype != null) { if (srcArchetype->ChunkCount != 0) { if (srcArchetype->NumManagedArrays != 0) { throw new ArgumentException("MoveEntitiesFrom is not supported with managed arrays"); } var dstArchetype = dstArchetypeManager.GetOrCreateArchetype(srcArchetype->Types, srcArchetype->TypesCount, dstGroupManager); remapArchetypes[archetypeIndex] = new RemapArchetype { srcArchetype = srcArchetype, dstArchetype = dstArchetype }; for (var c = srcArchetype->ChunkList.Begin; c != srcArchetype->ChunkList.End; c = c->Next) { remapChunks[chunkIndex] = new RemapChunk { chunk = (Chunk *)c, dstArchetype = dstArchetype }; chunkIndex++; } archetypeIndex++; dstEntityDataManager->IncrementComponentTypeOrderVersion(dstArchetype); } srcArchetype = srcArchetype->PrevArchetype; } var remapChunksJob = new RemapChunksJob { dstEntityDataManager = dstEntityDataManager, remapChunks = remapChunks, remapShared = remapShared, entityRemapping = entityRemapping }.Schedule(remapChunks.Length, 1, moveChunksJob); var remapArchetypesJob = new RemapArchetypesJob { remapArchetypes = remapArchetypes }.Schedule(archetypeIndex, 1, remapChunksJob); remapArchetypesJob.Complete(); srcArchetype = srcArchetypeManager.m_LastArchetype; while (srcArchetype != null) { if (srcArchetype->NumSharedComponents != 0) { var dstArchetype = dstArchetypeManager.GetOrCreateArchetype(srcArchetype->Types, srcArchetype->TypesCount, dstGroupManager); dstArchetype->FreeChunksBySharedComponents.AppendFrom(&srcArchetype->FreeChunksBySharedComponents); } srcArchetype = srcArchetype->PrevArchetype; } entityRemapping.Dispose(); remapShared.Dispose(); }
internal static JobHandle PreparePrefilteredChunkLists(int unfilteredChunkCount, MatchingArchetypeList archetypes, EntityQueryFilter filter, JobHandle dependsOn, ScheduleMode mode, out NativeArray <byte> prefilterDataArray, out void *deferredCountData) { // Allocate one buffer for all prefilter data and distribute it // We keep the full buffer as a "dummy array" so we can deallocate it later with [DeallocateOnJobCompletion] var sizeofChunkArray = sizeof(ArchetypeChunk) * unfilteredChunkCount; var sizeofIndexArray = sizeof(int) * unfilteredChunkCount; var prefilterDataSize = sizeofChunkArray + sizeofIndexArray + sizeof(int); var prefilterData = (byte *)UnsafeUtility.Malloc(prefilterDataSize, 64, Allocator.TempJob); prefilterDataArray = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <byte>(prefilterData, prefilterDataSize, Allocator.TempJob); #if ENABLE_UNITY_COLLECTIONS_CHECKS NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref prefilterDataArray, AtomicSafetyHandle.Create()); #endif JobHandle prefilterHandle = default(JobHandle); if (filter.RequiresMatchesFilter) { var prefilteringJob = new GatherChunksAndOffsetsWithFilteringJob { Archetypes = archetypes, Filter = filter, PrefilterData = prefilterData, UnfilteredChunkCount = unfilteredChunkCount }; if (mode == ScheduleMode.Batched) { prefilterHandle = prefilteringJob.Schedule(dependsOn); } else { prefilteringJob.Run(); } } else { var gatherJob = new GatherChunksAndOffsetsJob { Archetypes = archetypes, PrefilterData = prefilterData, UnfilteredChunkCount = unfilteredChunkCount, entityComponentStore = archetypes.entityComponentStore }; if (mode == ScheduleMode.Batched) { prefilterHandle = gatherJob.Schedule(dependsOn); } else { gatherJob.Run(); } } // ScheduleParallelForDeferArraySize expects a ptr to a structure with a void* and a count. // It only uses the count, so this is safe to fudge deferredCountData = prefilterData + sizeofChunkArray + sizeofIndexArray; deferredCountData = (byte *)deferredCountData - sizeof(void *); return(prefilterHandle); }
internal void MoveChunksFromAll( NativeArray <EntityRemapUtility.EntityRemapInfo> entityRemapping, EntityComponentStore *srcEntityComponentStore, ManagedComponentStore srcManagedComponentStore) { var moveChunksJob = new MoveAllChunksJob { srcEntityComponentStore = srcEntityComponentStore, dstEntityComponentStore = EntityComponentStore, entityRemapping = entityRemapping }.Schedule(); int managedComponentCount = srcEntityComponentStore->ManagedComponentIndexUsedCount; NativeArray <int> srcManagedIndices = default; NativeArray <int> dstManagedIndices = default; JobHandle gatherManagedComponentIndices = default; if (managedComponentCount > 0) { srcManagedIndices = new NativeArray <int>(managedComponentCount, Allocator.TempJob); dstManagedIndices = new NativeArray <int>(managedComponentCount, Allocator.TempJob); EntityComponentStore->ReserveManagedComponentIndices(managedComponentCount); gatherManagedComponentIndices = new GatherAllManagedComponentIndicesJob { SrcEntityComponentStore = srcEntityComponentStore, DstEntityComponentStore = EntityComponentStore, SrcManagedIndices = srcManagedIndices, DstManagedIndices = dstManagedIndices }.Schedule(); } JobHandle.ScheduleBatchedJobs(); int chunkCount = 0; for (var i = 0; i < srcEntityComponentStore->m_Archetypes.Length; ++i) { var srcArchetype = srcEntityComponentStore->m_Archetypes.Ptr[i]; chunkCount += srcArchetype->Chunks.Count; } var remapChunks = new NativeArray <RemapChunk>(chunkCount, Allocator.TempJob); var remapArchetypes = new NativeArray <RemapArchetype>(srcEntityComponentStore->m_Archetypes.Length, Allocator.TempJob); int chunkIndex = 0; int archetypeIndex = 0; for (var i = 0; i < srcEntityComponentStore->m_Archetypes.Length; ++i) { var srcArchetype = srcEntityComponentStore->m_Archetypes.Ptr[i]; if (srcArchetype->Chunks.Count != 0) { var dstArchetype = EntityComponentStore->GetOrCreateArchetype(srcArchetype->Types, srcArchetype->TypesCount); remapArchetypes[archetypeIndex] = new RemapArchetype { srcArchetype = srcArchetype, dstArchetype = dstArchetype }; for (var j = 0; j < srcArchetype->Chunks.Count; ++j) { var srcChunk = srcArchetype->Chunks.p[j]; remapChunks[chunkIndex] = new RemapChunk { chunk = srcChunk, dstArchetype = dstArchetype }; chunkIndex++; } archetypeIndex++; EntityComponentStore->IncrementComponentTypeOrderVersion(dstArchetype); } } moveChunksJob.Complete(); ManagedComponentStore.Playback(ref EntityComponentStore->ManagedChangesTracker); srcManagedComponentStore.Playback(ref srcEntityComponentStore->ManagedChangesTracker); k_ProfileMoveSharedComponents.Begin(); var remapShared = ManagedComponentStore.MoveAllSharedComponents(srcManagedComponentStore, Allocator.TempJob); k_ProfileMoveSharedComponents.End(); gatherManagedComponentIndices.Complete(); k_ProfileMoveManagedComponents.Begin(); m_ManagedComponentStore.MoveManagedComponentsFromDifferentWorld(srcManagedIndices, dstManagedIndices, srcManagedIndices.Length, srcManagedComponentStore); srcEntityComponentStore->m_ManagedComponentFreeIndex.Size = 0; srcEntityComponentStore->m_ManagedComponentIndex = 1; k_ProfileMoveManagedComponents.End(); new ChunkPatchEntities { RemapChunks = remapChunks, EntityRemapping = entityRemapping, EntityComponentStore = EntityComponentStore }.Run(); var remapAllChunksJob = new RemapAllChunksJob { dstEntityComponentStore = EntityComponentStore, remapChunks = remapChunks, entityRemapping = entityRemapping }.Schedule(remapChunks.Length, 1); var remapArchetypesJob = new RemapAllArchetypesJob { remapArchetypes = remapArchetypes, remapShared = remapShared, dstEntityComponentStore = EntityComponentStore, chunkHeaderType = TypeManager.GetTypeIndex <ChunkHeader>() }.Schedule(archetypeIndex, 1, remapAllChunksJob); ManagedComponentStore.Playback(ref EntityComponentStore->ManagedChangesTracker); if (managedComponentCount > 0) { srcManagedIndices.Dispose(); dstManagedIndices.Dispose(); } remapArchetypesJob.Complete(); remapShared.Dispose(); remapChunks.Dispose(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (m_MouseGroup.CalculateEntityCount() == 0) { return(inputDeps); } ComponentDataFromEntity <Translation> Positions = GetComponentDataFromEntity <Translation>(true); ComponentDataFromEntity <Rotation> Rotations = GetComponentDataFromEntity <Rotation>(true); ComponentDataFromEntity <PhysicsVelocity> Velocities = GetComponentDataFromEntity <PhysicsVelocity>(); ComponentDataFromEntity <PhysicsMass> Masses = GetComponentDataFromEntity <PhysicsMass>(true); // If there's a pick job, wait for it to finish if (m_PickSystem.PickJobHandle != null) { JobHandle.CombineDependencies(inputDeps, m_PickSystem.PickJobHandle.Value).Complete(); } // If there's a picked entity, drag it MousePickSystem.SpringData springData = m_PickSystem.SpringDatas[0]; if (springData.Dragging != 0) { Entity entity = m_PickSystem.SpringDatas[0].Entity; if (!EntityManager.HasComponent <PhysicsMass>(entity)) { return(inputDeps); } PhysicsMass massComponent = Masses[entity]; PhysicsVelocity velocityComponent = Velocities[entity]; if (massComponent.InverseMass == 0) { return(inputDeps); } var worldFromBody = new MTransform(Rotations[entity].Value, Positions[entity].Value); // Body to motion transform var bodyFromMotion = new MTransform(Masses[entity].InertiaOrientation, Masses[entity].CenterOfMass); MTransform worldFromMotion = Mul(worldFromBody, bodyFromMotion); // Damp the current velocity const float gain = 0.95f; velocityComponent.Linear *= gain; velocityComponent.Angular *= gain; // Get the body and mouse points in world space float3 pointBodyWs = Mul(worldFromBody, springData.PointOnBody); float3 pointSpringWs = Camera.main.ScreenToWorldPoint(new Vector3(Input.mousePosition.x, Input.mousePosition.y, springData.MouseDepth)); // Calculate the required change in velocity float3 pointBodyLs = Mul(Inverse(bodyFromMotion), springData.PointOnBody); float3 deltaVelocity; { float3 pointDiff = pointBodyWs - pointSpringWs; float3 relativeVelocityInWorld = velocityComponent.Linear + math.mul(worldFromMotion.Rotation, math.cross(velocityComponent.Angular, pointBodyLs)); const float elasticity = 0.1f; const float damping = 0.5f; deltaVelocity = -pointDiff * (elasticity / Time.fixedDeltaTime) - damping * relativeVelocityInWorld; } // Build effective mass matrix in world space // TODO how are bodies with inf inertia and finite mass represented // TODO the aggressive damping is hiding something wrong in this code if dragging non-uniform shapes float3x3 effectiveMassMatrix; { float3 arm = pointBodyWs - worldFromMotion.Translation; var skew = new float3x3( new float3(0.0f, arm.z, -arm.y), new float3(-arm.z, 0.0f, arm.x), new float3(arm.y, -arm.x, 0.0f) ); // world space inertia = worldFromMotion * inertiaInMotionSpace * motionFromWorld var invInertiaWs = new float3x3( massComponent.InverseInertia.x * worldFromMotion.Rotation.c0, massComponent.InverseInertia.y * worldFromMotion.Rotation.c1, massComponent.InverseInertia.z * worldFromMotion.Rotation.c2 ); invInertiaWs = math.mul(invInertiaWs, math.transpose(worldFromMotion.Rotation)); float3x3 invEffMassMatrix = math.mul(math.mul(skew, invInertiaWs), skew); invEffMassMatrix.c0 = new float3(massComponent.InverseMass, 0.0f, 0.0f) - invEffMassMatrix.c0; invEffMassMatrix.c1 = new float3(0.0f, massComponent.InverseMass, 0.0f) - invEffMassMatrix.c1; invEffMassMatrix.c2 = new float3(0.0f, 0.0f, massComponent.InverseMass) - invEffMassMatrix.c2; effectiveMassMatrix = math.inverse(invEffMassMatrix); } // Calculate impulse to cause the desired change in velocity float3 impulse = math.mul(effectiveMassMatrix, deltaVelocity); // Clip the impulse const float maxAcceleration = 250.0f; float maxImpulse = math.rcp(massComponent.InverseMass) * Time.fixedDeltaTime * maxAcceleration; impulse *= math.min(1.0f, math.sqrt((maxImpulse * maxImpulse) / math.lengthsq(impulse))); // Apply the impulse { velocityComponent.Linear += impulse * massComponent.InverseMass; float3 impulseLs = math.mul(math.transpose(worldFromMotion.Rotation), impulse); float3 angularImpulseLs = math.cross(pointBodyLs, impulseLs); velocityComponent.Angular += angularImpulseLs * massComponent.InverseInertia; } // Write back velocity Velocities[entity] = velocityComponent; } return(inputDeps); }
public void Complete() => JobHandle.Complete();
protected override JobHandle OnUpdate(JobHandle input) { return(input); }
protected override JobHandle OnUpdate(JobHandle dep) { return(dep); }
public override unsafe JobHandle?ScheduleVertexJobs( IGltfBuffers buffers, int positionAccessorIndex, int normalAccessorIndex, int tangentAccessorIndex, int[] uvAccessorIndices, int colorAccessorIndex, int weightsAccessorIndex, int jointsAccessorIndex ) { buffers.GetAccessor(positionAccessorIndex, out var posAcc, out var posData, out var posByteStride); Profiler.BeginSample("ScheduleVertexJobs"); Profiler.BeginSample("AllocateNativeArray"); vData = new NativeArray <VType>(posAcc.count, defaultAllocator); var vDataPtr = (byte *)vData.GetUnsafeReadOnlyPtr(); Profiler.EndSample(); bounds = posAcc.TryGetBounds(); int jobCount = 1; int outputByteStride = 12; // sizeof Vector3 if (posAcc.isSparse && posAcc.bufferView >= 0) { jobCount++; } if (normalAccessorIndex >= 0) { jobCount++; hasNormals = true; } hasNormals |= calculateNormals; if (hasNormals) { outputByteStride += 12; } if (tangentAccessorIndex >= 0) { jobCount++; hasTangents = true; } hasTangents |= calculateTangents; if (hasTangents) { outputByteStride += 16; } if (uvAccessorIndices != null && uvAccessorIndices.Length > 0) { // More than two UV sets are not supported yet Assert.IsTrue(uvAccessorIndices.Length < 3); jobCount += uvAccessorIndices.Length; switch (uvAccessorIndices.Length) { case 1: texCoords = new VertexBufferTexCoords <VTexCoord1>(logger); break; default: texCoords = new VertexBufferTexCoords <VTexCoord2>(logger); break; } } hasColors = colorAccessorIndex >= 0; if (hasColors) { jobCount++; colors = new VertexBufferColors(); } hasBones = weightsAccessorIndex >= 0 && jointsAccessorIndex >= 0; if (hasBones) { jobCount++; bones = new VertexBufferBones(logger); } NativeArray <JobHandle> handles = new NativeArray <JobHandle>(jobCount, defaultAllocator); int handleIndex = 0; { JobHandle?h = null; if (posAcc.bufferView >= 0) { h = GetVector3sJob( posData, posAcc.count, posAcc.componentType, posByteStride, (float3 *)vDataPtr, outputByteStride, posAcc.normalized, false // positional data never needs to be normalized ); } if (posAcc.isSparse) { buffers.GetAccessorSparseIndices(posAcc.sparse.indices, out var posIndexData); buffers.GetAccessorSparseValues(posAcc.sparse.values, out var posValueData); var sparseJobHandle = GetVector3sSparseJob( posIndexData, posValueData, posAcc.sparse.count, posAcc.sparse.indices.componentType, posAcc.componentType, (float3 *)vDataPtr, outputByteStride, dependsOn: ref h, posAcc.normalized ); if (sparseJobHandle.HasValue) { handles[handleIndex] = sparseJobHandle.Value; handleIndex++; } else { Profiler.EndSample(); return(null); } } if (h.HasValue) { handles[handleIndex] = h.Value; handleIndex++; } else { Profiler.EndSample(); return(null); } } if (normalAccessorIndex >= 0) { buffers.GetAccessor(normalAccessorIndex, out var nrmAcc, out var input, out var inputByteStride); if (nrmAcc.isSparse) { logger.Error(LogCode.SparseAccessor, "normals"); } var h = GetVector3sJob( input, nrmAcc.count, nrmAcc.componentType, inputByteStride, (float3 *)(vDataPtr + 12), outputByteStride, nrmAcc.normalized, true // normals need to be unit length ); if (h.HasValue) { handles[handleIndex] = h.Value; handleIndex++; } else { Profiler.EndSample(); return(null); } } if (tangentAccessorIndex >= 0) { buffers.GetAccessor(tangentAccessorIndex, out var tanAcc, out var input, out var inputByteStride); if (tanAcc.isSparse) { logger.Error(LogCode.SparseAccessor, "tangents"); } var h = GetTangentsJob( input, tanAcc.count, tanAcc.componentType, inputByteStride, (float4 *)(vDataPtr + 24), outputByteStride, tanAcc.normalized ); if (h.HasValue) { handles[handleIndex] = h.Value; handleIndex++; } else { Profiler.EndSample(); return(null); } } if (texCoords != null) { texCoords.ScheduleVertexUVJobs( buffers, uvAccessorIndices, posAcc.count, new NativeSlice <JobHandle>( handles, handleIndex, uvAccessorIndices.Length ) ); handleIndex += uvAccessorIndices.Length; } if (hasColors) { colors.ScheduleVertexColorJob( buffers, colorAccessorIndex, new NativeSlice <JobHandle>( handles, handleIndex, 1 ) ); handleIndex++; } if (hasBones) { var h = bones.ScheduleVertexBonesJob( buffers, weightsAccessorIndex, jointsAccessorIndex ); if (h.HasValue) { handles[handleIndex] = h.Value; handleIndex++; } else { Profiler.EndSample(); return(null); } } var handle = (jobCount > 1) ? JobHandle.CombineDependencies(handles) : handles[0]; handles.Dispose(); Profiler.EndSample(); return(handle); }
/// <summary> /// VrmLib.Mesh => UnityEngine.Mesh /// </summary> /// <param name="mesh"></param> /// <param name="src"></param> /// <param name="skin"></param> public static Mesh LoadSharedMesh(VrmLib.Mesh src, Skin skin = null) { Profiler.BeginSample("MeshImporter.LoadSharedMesh"); var mesh = new Mesh(); var positions = src.VertexBuffer.Positions.AsNativeArray <Vector3>(Allocator.TempJob); var normals = src.VertexBuffer.Normals?.AsNativeArray <Vector3>(Allocator.TempJob) ?? default; var texCoords = src.VertexBuffer.TexCoords?.AsNativeArray <Vector2>(Allocator.TempJob) ?? default; var colors = src.VertexBuffer.Colors?.AsNativeArray <Color>(Allocator.TempJob) ?? default; var weights = src.VertexBuffer.Weights?.AsNativeArray <Vector4>(Allocator.TempJob) ?? default; var joints = src.VertexBuffer.Joints?.AsNativeArray <SkinJoints>(Allocator.TempJob) ?? default; var vertices = new NativeArray <MeshVertex>(positions.Length, Allocator.TempJob); // JobとBindPoseの更新を並行して行う var jobHandle = new InterleaveMeshVerticesJob(vertices, positions, normals, texCoords, colors, weights, joints) .Schedule(vertices.Length, 1); JobHandle.ScheduleBatchedJobs(); // BindPoseを更新 if (weights.IsCreated && joints.IsCreated) { if (weights.Length != positions.Length || joints.Length != positions.Length) { throw new ArgumentException(); } if (skin != null) { mesh.bindposes = skin.InverseMatrices.GetSpan <Matrix4x4>().ToArray(); } } // Jobを完了 jobHandle.Complete(); // 入力のNativeArrayを開放 positions.Dispose(); if (normals.IsCreated) { normals.Dispose(); } if (texCoords.IsCreated) { texCoords.Dispose(); } if (colors.IsCreated) { colors.Dispose(); } if (weights.IsCreated) { weights.Dispose(); } if (joints.IsCreated) { joints.Dispose(); } // 頂点を更新 MeshVertex.SetVertexBufferParamsToMesh(mesh, vertices.Length); mesh.SetVertexBufferData(vertices, 0, 0, vertices.Length); // 出力のNativeArrayを開放 vertices.Dispose(); // Indexを更新 switch (src.IndexBuffer.ComponentType) { case AccessorValueType.UNSIGNED_SHORT: var shortIndices = src.IndexBuffer.AsNativeArray <ushort>(Allocator.Temp); mesh.SetIndexBufferParams(shortIndices.Length, IndexFormat.UInt16); mesh.SetIndexBufferData(shortIndices, 0, 0, shortIndices.Length); shortIndices.Dispose(); break; case AccessorValueType.UNSIGNED_INT: var intIndices = src.IndexBuffer.AsNativeArray <uint>(Allocator.Temp); mesh.SetIndexBufferParams(intIndices.Length, IndexFormat.UInt32); mesh.SetIndexBufferData(intIndices, 0, 0, intIndices.Length); intIndices.Dispose(); break; default: throw new NotImplementedException(); } // SubMeshを更新 mesh.subMeshCount = src.Submeshes.Count; for (var i = 0; i < src.Submeshes.Count; ++i) { var subMesh = src.Submeshes[i]; mesh.SetSubMesh(i, new SubMeshDescriptor(subMesh.Offset, subMesh.DrawCount)); } // MorphTargetを更新 foreach (var morphTarget in src.MorphTargets) { var morphTargetPositions = morphTarget.VertexBuffer.Positions != null ? morphTarget.VertexBuffer.Positions.GetSpan <Vector3>().ToArray() : new Vector3[mesh.vertexCount] // dummy ; mesh.AddBlendShapeFrame(morphTarget.Name, 100.0f, morphTargetPositions, null, null); } // 各種パラメーターを再計算 mesh.RecalculateBounds(); mesh.RecalculateTangents(); Profiler.EndSample(); return(mesh); }
void Start() { GameObject MPC_Spawner = GameObject.Find("CorrectPolygons"); Correcting_polygons_Native MPC_Native_Script = MPC_Spawner.GetComponent <Correcting_polygons_Native>(); int MPC2_num = MPC_Native_Script.ActiveV6_MPC2_NativeList.Length; int MPC3_num = MPC_Native_Script.ActiveV6_MPC3_NativeList.Length; NativeArray <V6> MPC2_Native = MPC_Native_Script.ActiveV6_MPC2_NativeList; NativeArray <Vector3> MPC2_Side = MPC_Native_Script.Active_MPC2_Perpendiculars; NativeArray <V6> MPC3_Native = MPC_Native_Script.ActiveV6_MPC3_NativeList; NativeArray <Vector3> MPC3_Side = MPC_Native_Script.Active_MPC3_Perpendiculars; NativeArray <float> MPC3_Att = MPC_Native_Script.ActiveV6_MPC3_Power; Debug.Log("Size of MPC2 List = " + MPC2_Native.Length + "; MPC3 List = " + MPC3_Native.Length); // all about the LookUpTable for MPC2 #region Generating LookUp Table for MPC2 NativeArray <SeenPath2> allpath2 = new NativeArray <SeenPath2>(MPC2_num * (MPC2_num - 1), Allocator.TempJob); NativeArray <RaycastCommand> CommandsNativeArray_MPC2 = new NativeArray <RaycastCommand>(MPC2_num * (MPC2_num - 1), Allocator.TempJob); NativeArray <RaycastHit> ResultsNativeArray_MPC2 = new NativeArray <RaycastHit>(MPC2_num * (MPC2_num - 1), Allocator.TempJob); NativeArray <Vector2Int> MPC2_ID = new NativeArray <Vector2Int>(MPC2_num * (MPC2_num - 1), Allocator.TempJob); float t_V6 = Time.realtimeSinceStartup; #region Paralle Raycasting MPC2 ParallelRayCastingDataV6 RayCastingData_MPC2 = new ParallelRayCastingDataV6 { MPC_Array = MPC2_Native, commands = CommandsNativeArray_MPC2, ID = MPC2_ID, }; JobHandle jobHandle_RayCastingData_MPC2 = RayCastingData_MPC2.Schedule(MPC2_num * (MPC2_num - 1), 1); jobHandle_RayCastingData_MPC2.Complete(); // parallel raycasting JobHandle rayCastJobMPC1 = RaycastCommand.ScheduleBatch(CommandsNativeArray_MPC2, ResultsNativeArray_MPC2, 64, default); rayCastJobMPC1.Complete(); #endregion #region Parallel Calculation of Path2 Parameters // parallel search of possiblte second order of paths ParallelPath2Search parallelPath2Search = new ParallelPath2Search { MPC_Array = MPC2_Native, MPC_Perpendiculars = MPC2_Side, // Who is writing code like this??? It's actually Active_MPC2_Perpendiculars (perpendiculars Karl!!!) commands = CommandsNativeArray_MPC2, results = ResultsNativeArray_MPC2, ID = MPC2_ID, maxDistance = MaxSeenDistance / 3, angleThreshold = (float)0.1, PossiblePath2 = allpath2, }; JobHandle jobHandlePath2Search = parallelPath2Search.Schedule(MPC2_num * (MPC2_num - 1), 64); jobHandlePath2Search.Complete(); #endregion #region Filtering Zero Valued Path2 NativeList <int> indexes = new NativeList <int>(Allocator.TempJob); IndexFilter indexFilter = new IndexFilter { Array = allpath2, }; JobHandle jobHandleIndexFilter = indexFilter.ScheduleAppend(indexes, allpath2.Length, 64); jobHandleIndexFilter.Complete(); LookUpTableMPC2 = new NativeArray <SeenPath2>(indexes.Length, Allocator.Persistent); NativeArray <Vector2Int> LUTIndexArrayMPC2 = new NativeArray <Vector2Int>(indexes.Length, Allocator.TempJob); LookUpTableFinal lookUpTableFinalMPC2 = new LookUpTableFinal { InArray = allpath2, IndexArray = indexes, OutArray = LookUpTableMPC2, IndxArray = LUTIndexArrayMPC2, }; JobHandle jobHandleLookUpTableMPC2 = lookUpTableFinalMPC2.Schedule(indexes.Length, 64); jobHandleLookUpTableMPC2.Complete(); #endregion #region Testing Function for Preparing Indexes //Vector2Int[] MPC2LUTID = new Vector2Int[MPC2_num]; MPC2LUTID = new NativeArray <Vector2Int>(MPC2_num, Allocator.Persistent); int progressionIndex = 0; for (int i = 0; i < MPC2_num; i++) { int index_min = progressionIndex; int index_max = progressionIndex; int flagIndex = 0; // if 1 then something is found for (int j = progressionIndex; j < LookUpTableMPC2.Length; j++) { if (i == LookUpTableMPC2[j].MPC_IDs.x) { index_max = j; flagIndex = 1; } else { break; } } // check if the MPC can see others or not if (flagIndex == 1) { progressionIndex = index_max + 1; MPC2LUTID[i] = new Vector2Int(index_min, index_max); if (index_max - index_min + 1 > maxlengthMPC2) { maxlengthMPC2 = index_max - index_min + 1; } } // in the case, it sees others else { MPC2LUTID[i] = new Vector2Int(-1, -1); } // in the case it doesn't see others } /* for checking weak paths * int weak_path2 = 0; * for (int i = 0; i < LookUpTableMPC2.Length; i++) * { * if (LookUpTableMPC2[i].AngularGain < 0.001) * { * weak_path2 += 1; * } * }*/ #endregion Debug.Log("Time spent for parallel raycasting : " + ((Time.realtimeSinceStartup - t_V6) * 1000f) + " ms"); #region drawing MPC2 connections /* * for (int i = 0; i < LookUpTableMPC2.Length; i++) * { * int fromMPC = LookUpTableMPC2[i].MPC_IDs.x; * int toMPC = LookUpTableMPC2[i].MPC_IDs.y; * //Debug.DrawLine(MPC2_Native[fromMPC].Coordinates + new Vector3(0,1,0), MPC2_Native[toMPC].Coordinates + new Vector3(0, 1, 0), Color.green, 1.0f); * Debug.DrawLine(MPC2_Native[fromMPC].Coordinates, MPC2_Native[toMPC].Coordinates, Color.green, 1.0f); * } * * for (int i = 0; i < allpath2.Length; i++) * { * if (allpath2[i].AngularGain != 0) * { * Debug.DrawLine(MPC2_Native[MPC2_ID[i].x].Coordinates, MPC2_Native[MPC2_ID[i].y].Coordinates, Color.cyan, 1.0f); * Debug.DrawLine(MPC2_Native[MPC2_ID[i].x].Coordinates, MPC2_Native[MPC2_ID[i].x].Coordinates + 5*MPC2_Native[MPC2_ID[i].x].Normal, Color.red, 1.0f); * } * }*/ #endregion #region Disposing MPC2 NativeArrays LUTIndexArrayMPC2.Dispose(); indexes.Dispose(); MPC2_ID.Dispose(); CommandsNativeArray_MPC2.Dispose(); ResultsNativeArray_MPC2.Dispose(); allpath2.Dispose(); #endregion #endregion // all about the LookUpTable for MPC3 #region Generating LookUp Table for MPC3 NativeArray <SeenPath2> allpath3_half = new NativeArray <SeenPath2>(MPC3_num * (MPC3_num - 1), Allocator.TempJob); NativeArray <RaycastCommand> CommandsNativeArray_MPC3 = new NativeArray <RaycastCommand>(MPC3_num * (MPC3_num - 1), Allocator.TempJob); NativeArray <RaycastHit> ResultsNativeArray_MPC3 = new NativeArray <RaycastHit>(MPC3_num * (MPC3_num - 1), Allocator.TempJob); NativeArray <Vector2Int> MPC3_ID = new NativeArray <Vector2Int>(MPC3_num * (MPC3_num - 1), Allocator.TempJob); // Start: calculation of the first level of path3 #region Paralle Raycasting MPC3 ParallelRayCastingDataV6 RayCastingData_MPC3 = new ParallelRayCastingDataV6 { MPC_Array = MPC3_Native, commands = CommandsNativeArray_MPC3, ID = MPC3_ID, }; JobHandle jobHandle_RayCastingData_MPC3 = RayCastingData_MPC3.Schedule(MPC3_num * (MPC3_num - 1), 1); jobHandle_RayCastingData_MPC3.Complete(); // parallel raycasting JobHandle rayCastJobMPC3 = RaycastCommand.ScheduleBatch(CommandsNativeArray_MPC3, ResultsNativeArray_MPC3, 64, default); rayCastJobMPC3.Complete(); #endregion #region Parallel Calculation of Path3 Parameters // parallel search of possiblte second order of paths ParallelPath2Search parallelPath3Search = new ParallelPath2Search { MPC_Array = MPC3_Native, MPC_Perpendiculars = MPC3_Side, commands = CommandsNativeArray_MPC3, results = ResultsNativeArray_MPC3, ID = MPC3_ID, maxDistance = MaxSeenDistance / 5, angleThreshold = (float)0.1, PossiblePath2 = allpath3_half, }; JobHandle jobHandlePath3Search = parallelPath3Search.Schedule(MPC3_num * (MPC3_num - 1), 64); jobHandlePath3Search.Complete(); #endregion #region Filtering Zero Valued First Floor Path3 NativeList <int> indexes3 = new NativeList <int>(Allocator.TempJob); IndexFilter indexFilter3 = new IndexFilter { Array = allpath3_half, }; JobHandle jobHandleIndexFilter3 = indexFilter3.ScheduleAppend(indexes3, allpath3_half.Length, 64); jobHandleIndexFilter3.Complete(); NativeArray <SeenPath2> LookUpTableMPC3_half = new NativeArray <SeenPath2>(indexes3.Length, Allocator.TempJob); NativeArray <Vector2Int> LUTIndexArrayMPC3 = new NativeArray <Vector2Int>(indexes3.Length, Allocator.TempJob); LookUpTableFinal lookUpTableFinalMPC3 = new LookUpTableFinal { InArray = allpath3_half, IndexArray = indexes3, OutArray = LookUpTableMPC3_half, IndxArray = LUTIndexArrayMPC3, }; JobHandle jobHandleLookUpTableMPC3 = lookUpTableFinalMPC3.Schedule(indexes3.Length, 64); jobHandleLookUpTableMPC3.Complete(); #endregion #region Testing Function for Preparing Indexes for MPC3 NativeArray <Vector2Int> MPC3LUTID = new NativeArray <Vector2Int>(MPC3_num, Allocator.TempJob); //Vector2Int[] MPC3LUTID = new Vector2Int[MPC3_num]; int progressionIndex3 = 0; for (int i = 0; i < MPC3_num; i++) { int index_min = progressionIndex3; int index_max = progressionIndex3; int flagIndex = 0; // if 1 then something is found for (int j = progressionIndex3; j < LookUpTableMPC3_half.Length; j++) { if (i == LookUpTableMPC3_half[j].MPC_IDs.x) { index_max = j; flagIndex = 1; } else { break; } } // check if the MPC can see others or not if (flagIndex == 1) { progressionIndex3 = index_max + 1; MPC3LUTID[i] = new Vector2Int(index_min, index_max); } // in the case, it sees others else { MPC3LUTID[i] = new Vector2Int(-1, -1); } // in the case it doesn't see others } #endregion // End: calculation of the first level of path3 // Start: calculation of the whole path3s #region Finding All Possible Path3 NativeArray <SeenPath3> allpath3 = new NativeArray <SeenPath3>(LookUpTableMPC3_half.Length * MPC3_num, Allocator.TempJob); ParallelPath3Search path3Search = new ParallelPath3Search { In_AttArray = MPC3_Att, InArray = LookUpTableMPC3_half, MPC_array = MPC3_Native, EdgeIndexes = MPC3LUTID, OutArray = allpath3, }; JobHandle jobHandle_path3Searc = path3Search.Schedule(LookUpTableMPC3_half.Length * MPC3_num, 64); jobHandle_path3Searc.Complete(); #endregion #region Filtering Out Inactive Path3 NativeList <int> indexes3_full = new NativeList <int>(Allocator.TempJob); IndexFilterPath3 indexFilter3_full = new IndexFilterPath3 { Array = allpath3, }; JobHandle jobHandleFilterPath3 = indexFilter3_full.ScheduleAppend(indexes3_full, allpath3.Length, 64); jobHandleFilterPath3.Complete(); LookUpTableMPC3 = new NativeArray <SeenPath3>(indexes3_full.Length, Allocator.Persistent); NativeArray <Vector3Int> LookUpTable_test = new NativeArray <Vector3Int>(indexes3_full.Length, Allocator.TempJob); LookUpTableFinalPath3 lookUpTableFinalMPC3_full = new LookUpTableFinalPath3 { InArray = allpath3, IndexArray = indexes3_full, OutArray = LookUpTableMPC3, OutVector3Int = LookUpTable_test }; JobHandle jobHandleLUTMPC3 = lookUpTableFinalMPC3_full.Schedule(indexes3_full.Length, 64); jobHandleLUTMPC3.Complete(); /* for checking how many weak paths we have * int weak_path3 = 0; * for (int i = 0; i < LookUpTableMPC3_full.Length; i++) * { * if (LookUpTableMPC3_full[i].AngularGain < 0.001) * { * weak_path3 += 1; * } * } */ #endregion #region Testing Function for Preparing Indexes for Full Path3 //Vector2Int[] MPC3SeenID = new Vector2Int[MPC3_num]; MPC3SeenID = new NativeArray <Vector2Int>(MPC3_num, Allocator.Persistent); int progressionSeenIndex = 0; for (int i = 0; i < MPC3_num; i++) { int index_min = progressionSeenIndex; int index_max = progressionSeenIndex; int flagIndex = 0; // if flagIndex = 1, then something is found for (int j = progressionSeenIndex; j < LookUpTableMPC3.Length; j++) { if (i == LookUpTableMPC3[j].MPC_IDs.x) { index_max = j; flagIndex = 1; } else { break; } } // check if the MPC can see others or not if (flagIndex == 1) { progressionSeenIndex = index_max + 1; MPC3SeenID[i] = new Vector2Int(index_min, index_max); if (index_max - index_min + 1 > maxlengthMPC3) { maxlengthMPC3 = index_max - index_min + 1; } } // in the case, it sees others else { MPC3SeenID[i] = new Vector2Int(-1, -1); } // in the case it doesn't see others } #endregion #region drawing MPC3 connections /* * //for (int i = 0; i < LookUpTableMPC3.Length; i++) * for (int i = 0; i < 100; i++) * { * int first_MPC = LookUpTableMPC3[i].MPC_IDs.x; * int second_MPC = LookUpTableMPC3[i].MPC_IDs.y; * int third_MPC = LookUpTableMPC3[i].MPC_IDs.z; * * Debug.DrawLine(MPC3_Native[first_MPC].Coordinates, MPC3_Native[second_MPC].Coordinates, Color.yellow, 1.0f); * Debug.DrawLine(MPC3_Native[second_MPC].Coordinates + new Vector3(0,1,0), MPC3_Native[third_MPC].Coordinates + new Vector3(0, 1, 0), Color.red, 1.0f); * }*/ #endregion // End: calculation of the whole path3s #region Disposing MPC3 NativeArrays LUTIndexArrayMPC3.Dispose(); LookUpTableMPC3_half.Dispose(); indexes3.Dispose(); MPC3_ID.Dispose(); CommandsNativeArray_MPC3.Dispose(); ResultsNativeArray_MPC3.Dispose(); allpath3_half.Dispose(); allpath3.Dispose(); MPC3LUTID.Dispose(); indexes3_full.Dispose(); LookUpTable_test.Dispose(); #endregion #endregion }
public void Add(JobHandle jobReference) { if (!_jobs.TryAdd(jobReference.JobId, jobReference)) throw new JobAlreadyExistsException(jobReference.JobId); }
public static JobHandle Run <T>(this T jobData, JobHandle dependsOn = default) where T : struct, IJob { jobData.Execute(); return(dependsOn); }
public abstract JobHandle Schedule(JobHandle dependency = default);
protected abstract JobHandle OnFixedUpdate(JobHandle inputDeps);
public override void Update(Level level) { base.Update(level); if (GameManager.GetCurrentLevel() == "arena") { if (!done) { done = true; foreach (AssetBundle bundles in AssetBundle.GetAllLoadedAssetBundles()) { if (bundles.name == "chunks") { bundle = bundles; } } if (!bundle) { bundle = AssetBundle.LoadFromFile(Path.Combine(Application.streamingAssetsPath, "Terrain Generation/chunks.assets")); } GameObject prefab = GameObject.Instantiate(bundle.LoadAsset <GameObject>("StarterChunk"), new Vector3(0, -20, 0), Quaternion.identity); Debug.Log("Spawned starter chunk"); chunkGen = prefab.AddComponent <ChunkGeneration>().Setup(info.chunksToGenerate); chunkGen.y = prefab.transform.position.y; chunkGen.chunk1 = bundle.LoadAsset <GameObject>("chunk 1.prefab"); chunkGen.chunk2 = bundle.LoadAsset <GameObject>("chunk5.prefab"); chunkGen.chunk3 = bundle.LoadAsset <GameObject>("chunk8.prefab"); Debug.Log("Loaded the terrain"); foreach (string item in bundle.GetAllAssetNames()) { Debug.Log(item); } Player.local.transform.position = prefab.transform.position; } if (Time.time - timer > info.cullingTimer) { timer = Time.time; NativeArray <Vector3> chunkPositions = new NativeArray <Vector3>(Manager.chunkDictionary.Count, Allocator.TempJob); try { for (int i = 0; i < Manager.chunkDictionary.Count; i++) { chunkPositions[i] = Manager.chunkDictionary.Values.ElementAt(i); } CullingJob job = new CullingJob() { playerPos = Player.currentCreature.transform.position, cullingDistance = info.cullingDistance, chunkPositions = chunkPositions }; JobHandle jobHandle = job.Schedule(Manager.chunkDictionary.Count, 10); jobHandle.Complete(); chunkPositions.Dispose(); } catch { Debug.LogError("Something went wrong with multithread please report this!"); chunkPositions.Dispose(); } /*for (int i = 0; i < Manager.chunkDictionary.Count; i++) * { * if (i >= Manager.chunkDictionary.Count) * i = 0; * try * { * if (Vector3.Distance(Player.currentCreature.transform.position, Manager.chunkDictionary.Keys.ElementAt(i).transform.position) < info.cullingDistance) * Manager.chunkDictionary.Keys.ElementAt(i).SetActive(true); * if (Vector3.Distance(Player.currentCreature.transform.position, Manager.chunkDictionary.Keys.ElementAt(i).transform.position) > info.cullingDistance) * Manager.chunkDictionary.Keys.ElementAt(i).SetActive(false); * Task.Delay(info.cullingDelay); * } * catch * { * Debug.Log("Errored in culling"); * } * }*/ } if (GameManager.GetCurrentLevel() != "arena" && done) { done = false; } } }
/// <summary> /// Creates a NativeArray with all the chunks in a given archetype. /// </summary> /// <param name="matchingArchetypes">List of matching archetypes.</param> /// <param name="allocator">Allocator to use for the array.</param> /// <param name="jobHandle">Handle to the GatherChunks job used to fill the output array.</param> /// <returns>NativeArray of all the chunks in the matchingArchetypes list.</returns> public static NativeArray <ArchetypeChunk> CreateArchetypeChunkArray(MatchingArchetypeList matchingArchetypes, Allocator allocator, out JobHandle jobHandle, ref EntityQueryFilter filter, JobHandle dependsOn = default(JobHandle)) { var archetypeCount = matchingArchetypes.Count; var offsets = new NativeArray <int>(archetypeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var chunkCount = 0; { for (int i = 0; i < matchingArchetypes.Count; ++i) { var archetype = matchingArchetypes.p[i]->Archetype; offsets[i] = chunkCount; chunkCount += archetype->Chunks.Count; } } if (filter.Type == FilterType.None) { var chunks = new NativeArray <ArchetypeChunk>(chunkCount, allocator, NativeArrayOptions.UninitializedMemory); var gatherChunksJob = new GatherChunks { MatchingArchetypes = matchingArchetypes.p, entityComponentStore = matchingArchetypes.entityComponentStore, Offsets = offsets, Chunks = chunks }; var gatherChunksJobHandle = gatherChunksJob.Schedule(archetypeCount, 1, dependsOn); jobHandle = gatherChunksJobHandle; return(chunks); } else { var filteredCounts = new NativeArray <int>(archetypeCount + 1, Allocator.TempJob); var sparseChunks = new NativeArray <ArchetypeChunk>(chunkCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var gatherChunksJob = new GatherChunksWithFiltering { MatchingArchetypes = matchingArchetypes.p, Filter = filter, Offsets = offsets, FilteredCounts = filteredCounts, SparseChunks = sparseChunks, entityComponentStore = matchingArchetypes.entityComponentStore }; gatherChunksJob.Schedule(archetypeCount, 1, dependsOn).Complete(); // accumulated filtered counts: filteredCounts[i] becomes the destination offset int totalChunks = 0; for (int i = 0; i < archetypeCount; ++i) { int currentCount = filteredCounts[i]; filteredCounts[i] = totalChunks; totalChunks += currentCount; } filteredCounts[archetypeCount] = totalChunks; var joinedChunks = new NativeArray <ArchetypeChunk>(totalChunks, allocator, NativeArrayOptions.UninitializedMemory); jobHandle = new JoinChunksJob { DestinationOffsets = filteredCounts, SparseChunks = sparseChunks, Offsets = offsets, JoinedChunks = joinedChunks }.Schedule(archetypeCount, 1); return(joinedChunks); } }
protected override JobHandle OnUpdate(JobHandle inputDependencies) { var job = new PlayerMovementSystemJob(); return(job.Schedule(this, inputDependencies)); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { if (m_ContactModifierGroup.CalculateLength() == 0) { return(inputDeps); } if (m_StepPhysicsWorld.Simulation.Type == SimulationType.NoPhysics) { return(inputDeps); } SimulationCallbacks.Callback preparationCallback = (ref ISimulation simulation, JobHandle inDeps) => { inDeps.Complete(); // shouldn't be needed (jobify the below) SimulationData.Contacts.Iterator iterator = simulation.Contacts.GetIterator(); while (iterator.HasItemsLeft()) { ContactHeader manifold = iterator.GetNextContactHeader(); // JacobianModifierFlags format for this example // UserData 0 - soft contact // UserData 1 - surface velocity // UserData 2 - infinite inertia // UserData 3 - no torque // UserData 4 - clip impulse // UserData 5 - disabled contact if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 0)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 0))) { manifold.JacobianFlags |= JacobianFlags.UserFlag0; // Soft Contact } if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 1)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 1))) { manifold.JacobianFlags |= JacobianFlags.EnableSurfaceVelocity; } if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 2)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 2))) { manifold.JacobianFlags |= JacobianFlags.EnableMassFactors; } if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 3)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 3))) { manifold.JacobianFlags |= JacobianFlags.UserFlag1; // No Torque } if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 4)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 4))) { manifold.JacobianFlags |= JacobianFlags.EnableMaxImpulse; // No Torque } if (0 != (manifold.BodyCustomDatas.CustomDataA & (byte)(1 << 5)) || 0 != (manifold.BodyCustomDatas.CustomDataB & (byte)(1 << 5))) { manifold.JacobianFlags |= JacobianFlags.Disabled; } iterator.UpdatePreviousContactHeader(manifold); // Just read contacts for (int i = 0; i < manifold.NumContacts; i++) { iterator.GetNextContact(); } } return(inDeps); }; SimulationCallbacks.Callback jacobianModificationCallback = (ref ISimulation simulation, JobHandle inDeps) => { inDeps.Complete(); // shouldn't be needed (jobify the below) JacobianIterator iterator = simulation.Jacobians.Iterator; while (iterator.HasJacobiansLeft()) { // JacobianModifierFlags format for this example // UserFlag0 - soft contact // UserFlag1 - no torque // Jacobian header ref JacobianHeader jacHeader = ref iterator.ReadJacobianHeader(); // Triggers can only be disabled, other modifiers have no effect if (jacHeader.Type == JacobianType.Contact) { // Contact jacobian modification ref ContactJacobian contactJacobian = ref jacHeader.AccessBaseJacobian <ContactJacobian>(); { // Check if NoTorque modifier if ((jacHeader.Flags & JacobianFlags.UserFlag1) != 0) { // Disable all friction angular effects contactJacobian.Friction0.AngularA = 0.0f; contactJacobian.Friction1.AngularA = 0.0f; contactJacobian.AngularFriction.AngularA = 0.0f; contactJacobian.Friction0.AngularB = 0.0f; contactJacobian.Friction1.AngularB = 0.0f; contactJacobian.AngularFriction.AngularB = 0.0f; } // Check if SurfaceVelocity present if (jacHeader.HasSurfaceVelocity) { // Since surface normal can change, make sure angular velocity is always relative to it, not independent float3 angVel = contactJacobian.BaseJacobian.Normal * (new float3(0.0f, 1.0f, 0.0f)); float3 linVel = float3.zero; Math.CalculatePerpendicularNormalized(contactJacobian.BaseJacobian.Normal, out float3 dir0, out float3 dir1); float linVel0 = math.dot(linVel, dir0); float linVel1 = math.dot(linVel, dir1); float angVelProj = math.dot(angVel, contactJacobian.BaseJacobian.Normal); jacHeader.SurfaceVelocity = new SurfaceVelocity { ExtraFrictionDv = new float3(linVel0, linVel1, angVelProj) }; }
protected override JobHandle OnUpdate(JobHandle inputDeps) { Initialize(); if (!initialized) { return(inputDeps); } if (SimulationSettings.Instance.DisableRendering) { return(inputDeps); } float dt = Time.deltaTime; if (perUnitTypeDataHolder != null) { previousFrameFence.Complete(); previousFrameFence = inputDeps; lod0Count = lod1Count = lod2Count = lod3Count = 0; foreach (var data in perUnitTypeDataHolder) { data.Value.Drawer.Draw(); data.Value.Lod1Drawer.Draw(); data.Value.Lod2Drawer.Draw(); data.Value.Lod3Drawer.Draw(); lod0Count += data.Value.Drawer.UnitToDrawCount; lod1Count += data.Value.Lod1Drawer.UnitToDrawCount; lod2Count += data.Value.Lod2Drawer.UnitToDrawCount; lod3Count += data.Value.Lod3Drawer.UnitToDrawCount; data.Value.Count = lod0Count + lod1Count + lod2Count + lod3Count; } var prepareAnimatorJob = new PrepareAnimatorDataJob() { animationClips = animationClipData, dt = dt, textureAnimatorData = units.animationData, }; var prepareAnimatorFence = prepareAnimatorJob.Schedule(units.Length, SimulationState.BigBatchSize, previousFrameFence); NativeArray <JobHandle> jobHandles = new NativeArray <JobHandle>(4, Allocator.Temp); jobHandles[0] = prepareAnimatorFence; foreach (var data in perUnitTypeDataHolder) { switch (data.Key) { case UnitType.Melee: ComputeFences(meleeUnits.animationData, dt, meleeUnits.transforms, data, prepareAnimatorFence, jobHandles, 0); data.Value.Count = meleeUnits.Length; break; case UnitType.Skeleton: ComputeFences(skeletonUnits.animationData, dt, skeletonUnits.transforms, data, prepareAnimatorFence, jobHandles, 3); data.Value.Count = skeletonUnits.Length; break; } } Profiler.BeginSample("Combine all dependencies"); previousFrameFence = JobHandle.CombineDependencies(jobHandles); Profiler.EndSample(); jobHandles.Dispose(); return(previousFrameFence); } return(inputDeps); }
public static JobHandle ScheduleSingle(this BuildCollisionLayerConfig config, out CollisionLayer layer, Allocator allocator, JobHandle inputDeps = default) { config.ValidateSettings(); var jh = inputDeps; if (config.hasQueryData) { int count = config.count; layer = new CollisionLayer(count, config.settings, allocator); var layerIndices = new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var aos = new NativeArray <BuildCollisionLayerInternal.ColliderAoSData>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var xmins = new NativeArray <float>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); NativeArray <int> remapSrcIndices = config.hasRemapSrcIndices ? config.remapSrcIndices : new NativeArray <int>(count, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); jh = new BuildCollisionLayerInternal.Part1FromQueryJob { typeGroup = config.typeGroup, layer = layer, layerIndices = layerIndices, colliderAoS = aos, xmins = xmins }.ScheduleSingle(config.query, jh); jh = new BuildCollisionLayerInternal.Part2Job { layer = layer, layerIndices = layerIndices }.Schedule(jh); jh = new BuildCollisionLayerInternal.Part3Job { layerIndices = layerIndices, unsortedSrcIndices = remapSrcIndices }.Schedule(count, jh); jh = new BuildCollisionLayerInternal.Part4Job { bucketStartAndCounts = layer.bucketStartsAndCounts, unsortedSrcIndices = remapSrcIndices, xmins = xmins }.Schedule(count, jh); jh = new BuildCollisionLayerInternal.Part5FromQueryJob { colliderAoS = aos, layer = layer, remapSrcIndices = remapSrcIndices }.Schedule(count, jh); if (!config.hasRemapSrcIndices) { jh = remapSrcIndices.Dispose(jh); } return(jh); } else if (config.hasAabbsArray && config.hasBodiesArray) { layer = new CollisionLayer(config.aabbs.Length, config.settings, allocator); if (config.hasRemapSrcIndices) { jh = new BuildCollisionLayerInternal.BuildFromDualArraysSingleWithRemapJob { layer = layer, aabbs = config.aabbs, bodies = config.bodies, remapSrcIndices = config.remapSrcIndices }.Schedule(jh); } else { jh = new BuildCollisionLayerInternal.BuildFromDualArraysSingleJob { layer = layer, aabbs = config.aabbs, bodies = config.bodies }.Schedule(jh); } return(jh); } else if (config.hasBodiesArray) { layer = new CollisionLayer(config.bodies.Length, config.settings, allocator); if (config.hasRemapSrcIndices) { jh = new BuildCollisionLayerInternal.BuildFromColliderArraySingleWithRemapJob { layer = layer, bodies = config.bodies, remapSrcIndices = config.remapSrcIndices }.Schedule(jh); } else { jh = new BuildCollisionLayerInternal.BuildFromColliderArraySingleJob { layer = layer, bodies = config.bodies }.Schedule(jh); } return(jh); } else { throw new InvalidOperationException("Something went wrong with the BuildCollisionError configuration."); } }
private void ComputeFences(ComponentDataArray <TextureAnimatorData> textureAnimatorDataForUnitType, float dt, ComponentDataArray <UnitTransformData> unitTransformDataForUnitType, KeyValuePair <UnitType, DataPerUnitType> data, JobHandle previousFence, NativeArray <JobHandle> jobHandles, int i) { Profiler.BeginSample("Scheduling"); // TODO: Replace this with more efficient search. Profiler.BeginSample("Create filtering jobs"); var cameraPosition = Camera.main.transform.position; #if !USE_SAFE_JOBS data.Value.Drawer.BufferCounter.Reset(); data.Value.Lod1Drawer.BufferCounter.Reset(); data.Value.Lod2Drawer.BufferCounter.Reset(); data.Value.Lod3Drawer.BufferCounter.Reset(); var cullAndComputeJob = new CullAndComputeParameters() { unitTransformData = unitTransformDataForUnitType, textureAnimatorData = textureAnimatorDataForUnitType, animationClips = animationClipData, dt = dt, CameraPosition = cameraPosition, DistanceMaxLod0 = data.Value.BakedData.lods.Lod1Distance, DistanceMaxLod1 = data.Value.BakedData.lods.Lod2Distance, DistanceMaxLod2 = data.Value.BakedData.lods.Lod3Distance, BufferCounterLod0 = data.Value.Drawer.BufferCounter, BufferCounterLod1 = data.Value.Lod1Drawer.BufferCounter, BufferCounterLod2 = data.Value.Lod2Drawer.BufferCounter, BufferCounterLod3 = data.Value.Lod3Drawer.BufferCounter, BufferPointers = data.Value.BufferPointers }; Profiler.EndSample(); Profiler.BeginSample("Schedule compute jobs"); var computeShaderJobFence0 = cullAndComputeJob.Schedule(unitTransformDataForUnitType.Length, SimulationState.HumongousBatchSize, previousFence); Profiler.EndSample(); Profiler.BeginSample("Create combined dependency"); jobHandles[i] = computeShaderJobFence0; Profiler.EndSample(); Profiler.EndSample(); #else data.Value.Drawer.ObjectPositions.Clear(); data.Value.Lod1Drawer.ObjectPositions.Clear(); data.Value.Lod2Drawer.ObjectPositions.Clear(); data.Value.Lod3Drawer.ObjectPositions.Clear(); data.Value.Drawer.ObjectRotations.Clear(); data.Value.Lod1Drawer.ObjectRotations.Clear(); data.Value.Lod2Drawer.ObjectRotations.Clear(); data.Value.Lod3Drawer.ObjectRotations.Clear(); data.Value.Drawer.TextureCoordinates.Clear(); data.Value.Lod1Drawer.TextureCoordinates.Clear(); data.Value.Lod2Drawer.TextureCoordinates.Clear(); data.Value.Lod3Drawer.TextureCoordinates.Clear(); var cullAndComputeJob = new CullAndComputeParametersSafe() { unitTransformData = unitTransformDataForUnitType, textureAnimatorData = textureAnimatorDataForUnitType, animationClips = animationClipData, dt = dt, CameraPosition = cameraPosition, DistanceMaxLod0 = data.Value.BakedData.lods.Lod1Distance, DistanceMaxLod1 = data.Value.BakedData.lods.Lod2Distance, DistanceMaxLod2 = data.Value.BakedData.lods.Lod3Distance, Lod0Positions = data.Value.Drawer.ObjectPositions, Lod0Rotations = data.Value.Drawer.ObjectRotations, Lod0TexturePositions = data.Value.Drawer.TextureCoordinates, Lod1Positions = data.Value.Lod1Drawer.ObjectPositions, Lod1Rotations = data.Value.Lod1Drawer.ObjectRotations, Lod1TexturePositions = data.Value.Lod1Drawer.TextureCoordinates, Lod2Positions = data.Value.Lod2Drawer.ObjectPositions, Lod2Rotations = data.Value.Lod2Drawer.ObjectRotations, Lod2TexturePositions = data.Value.Lod2Drawer.TextureCoordinates, Lod3Positions = data.Value.Lod3Drawer.ObjectPositions, Lod3Rotations = data.Value.Lod3Drawer.ObjectRotations, Lod3TexturePositions = data.Value.Lod3Drawer.TextureCoordinates, }; Profiler.EndSample(); Profiler.BeginSample("Schedule compute jobs"); var computeShaderJobFence0 = cullAndComputeJob.Schedule(previousFence); Profiler.EndSample(); Profiler.BeginSample("Create combined dependency"); jobHandles[i] = computeShaderJobFence0; Profiler.EndSample(); Profiler.EndSample(); #endif }
protected override JobHandle OnUpdate(JobHandle inputDeps) { throw new System.NotImplementedException(); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { return(new cubeJob { deltaTime = Time.deltaTime }.Schedule(this, inputDeps)); }
/// <summary> /// Adds the nodes debugging. /// </summary> public void RecalculateDebug() { DisposeDebugNativeDatastructures(); int numNodes = gridDepth * gridWidth; // prepare the job that calculates the vertices for the neighbor connection lines int arrayLength = numNodes * NodeNeighbors * 2; connectionsMeshVertices = new NativeArray <Vector3>(arrayLength, Allocator.Persistent); connectionsMeshIndices = new NativeArray <int>(arrayLength, Allocator.Persistent); CalculateConnectionMeshJob calcConnectionsMeshJob = new CalculateConnectionMeshJob(NodeNeighbors, nodesTransforms, nodesNeighbors, connectionsMeshVertices, connectionsMeshIndices); JobHandle calcConnectionMeshHandle = calcConnectionsMeshJob.Schedule(numNodes, 8); // do other required stuff before calling complete so we have actual parallelism MeshRenderer mr = Utils.GetOrAddComponent <MeshRenderer>(transform, out bool createdRenderer); mr.shadowCastingMode = ShadowCastingMode.Off; mr.sharedMaterial = nodeConnectionsMaterial; mr.lightProbeUsage = LightProbeUsage.Off; mr.reflectionProbeUsage = ReflectionProbeUsage.Off; mr.enabled = showNodesConnections; MeshFilter filter = Utils.GetOrAddComponent <MeshFilter>(transform, out bool createdFilter); filter.sharedMesh = connectionsMesh; // the nodes themselves nodeBatcher.Clear(); if (showNodes) { for (int i = 0; i < numNodes; i++) { NodeTransform nt = nodesTransforms[i]; NodeType nodeType = nodesTypes[i]; Color32 c; if (nodeType == NodeType.Invalid) { c = invalidNodeColor; } else if (nodeType == NodeType.OccupiedByObstacle) { c = nonWalkableNodeColor; } else { c = walkableNodeColor; } Vector3 pos = nt.Pos + (nt.Up * NodeVisualNormalOffset); Matrix4x4 trs = Matrix4x4.TRS(pos, nt.GetRotation(), Vector3.one); // batch each node quad debug nodeBatcher.AddItem(c, trs); } } calcConnectionMeshHandle.Complete(); // set the mesh using the results of the job connectionsMesh.SetVertices(calcConnectionsMeshJob.vertices); connectionsMesh.SetIndices(calcConnectionsMeshJob.indices, MeshTopology.Lines, 0); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { // EntityCommandBuffer ecb = barrier.CreateCommandBuffer () ; EntityArray a_collisionChecksEntities = group.GetEntityArray(); ComponentDataFromEntity <OctreeEntityPair4CollisionData> a_octreeEntityPair4CollisionData = GetComponentDataFromEntity <OctreeEntityPair4CollisionData> (); ComponentDataFromEntity <RayData> a_rayData = GetComponentDataFromEntity <RayData> (); ComponentDataFromEntity <RayMaxDistanceData> a_rayMaxDistanceData = GetComponentDataFromEntity <RayMaxDistanceData> (); ComponentDataFromEntity <IsCollidingData> a_isCollidingData = GetComponentDataFromEntity <IsCollidingData> (); ComponentDataFromEntity <IsActiveTag> a_isActiveTag = GetComponentDataFromEntity <IsActiveTag> (); // Octree entity pair, for collision checks ComponentDataFromEntity <RootNodeData> a_octreeRootNodeData = GetComponentDataFromEntity <RootNodeData> (); BufferFromEntity <NodeBufferElement> nodeBufferElement = GetBufferFromEntity <NodeBufferElement> (); BufferFromEntity <NodeInstancesIndexBufferElement> nodeInstancesIndexBufferElement = GetBufferFromEntity <NodeInstancesIndexBufferElement> (); BufferFromEntity <NodeChildrenBufferElement> nodeChildrenBufferElement = GetBufferFromEntity <NodeChildrenBufferElement> (); BufferFromEntity <InstanceBufferElement> instanceBufferElement = GetBufferFromEntity <InstanceBufferElement> (); // Test ray // Debug // ! Ensure test this only with single, or at most few ray entiities. ComponentDataFromEntity <RayEntityPair4CollisionData> a_rayEntityPair4CollisionData = new ComponentDataFromEntity <RayEntityPair4CollisionData> (); // As empty. IsRayColliding_Common._DebugRays(a_collisionChecksEntities, a_rayData, a_rayMaxDistanceData, a_isCollidingData, a_rayEntityPair4CollisionData, false, false); // Test ray Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition); // Debug.DrawLine ( ray.origin, ray.origin + ray.direction * 100, Color.red ) ; int i_groupLength = group.CalculateLength(); var setRayTestJob = new SetRayTestJob { a_collisionChecksEntities = a_collisionChecksEntities, ray = ray, a_rayData = a_rayData, // a_rayMaxDistanceData = a_rayMaxDistanceData, }.Schedule(i_groupLength, 8, inputDeps); var job = new Job { a_collisionChecksEntities = a_collisionChecksEntities, a_octreeEntityPair4CollisionData = a_octreeEntityPair4CollisionData, a_rayData = a_rayData, a_rayMaxDistanceData = a_rayMaxDistanceData, a_isCollidingData = a_isCollidingData, // Octree entity pair, for collision checks a_isActiveTag = a_isActiveTag, a_octreeRootNodeData = a_octreeRootNodeData, nodeBufferElement = nodeBufferElement, nodeInstancesIndexBufferElement = nodeInstancesIndexBufferElement, nodeChildrenBufferElement = nodeChildrenBufferElement, instanceBufferElement = instanceBufferElement }.Schedule(i_groupLength, 8, setRayTestJob); return(job); }
public bool TryGetJob(Guid jobId, out JobHandle jobReference) { return _jobs.TryGetValue(jobId, out jobReference); }
/// <summary> /// Adds the specified JobHandle to this system's list of dependencies. /// </summary> /// <remarks> /// When you write to a command buffer from a Job, you must add the <see cref="JobHandle"/> of that Job to this buffer /// system's dependency list by calling this function. Otherwise, the buffer system could execute the commands /// currently in the command buffer while the writing Job is still in progress. /// </remarks> /// <param name="producerJob">The JobHandle of a Job which this buffer system should wait for before playing back its /// pending command buffers.</param> /// <example> /// The following example illustrates how to use one of the default <see cref="EntityCommandBuffer"/> systems. /// The code selects all entities that have one custom component, in this case, `AsyncProcessInfo`, and /// processes each entity in the `Execute()` function of an <see cref="IJobForEachWithEntity{T0}"/> Job (the /// actual process is not shown since that part of the example is hypothetical). After processing, the Job /// uses an EntityCommandBuffer to remove the `ProcessInfo` component and add an `ProcessCompleteTag` /// component. Another system could use the `ProcessCompleteTag` to find entities that represent the end /// results of the process. /// <code> /// public struct ProcessInfo: IComponentData{ public float Value; } /// public struct ProcessCompleteTag : IComponentData{} /// /// public class AsyncProcessJobSystem : JobComponentSystem /// { /// [BurstCompile] /// public struct ProcessInBackgroundJob : IJobForEachWithEntity<ProcessInfo> /// { /// [ReadOnly] /// public EntityCommandBuffer.Concurrent ConcurrentCommands; /// /// public void Execute(Entity entity, int index, [ReadOnly] ref ProcessInfo info) /// { /// // Process based on the ProcessInfo component, /// // then remove ProcessInfo and add a ProcessCompleteTag... /// /// ConcurrentCommands.RemoveComponent<ProcessInfo>(index, entity); /// ConcurrentCommands.AddComponent(index, entity, new ProcessCompleteTag()); /// } /// } /// /// protected override JobHandle OnUpdate(JobHandle inputDeps) /// { /// var job = new ProcessInBackgroundJob(); /// /// var ecbSystem = /// World.GetOrCreateSystem<EndSimulationEntityCommandBufferSystem>(); /// job.ConcurrentCommands = ecbSystem.CreateCommandBuffer().ToConcurrent(); /// /// var handle = job.Schedule(this, inputDeps); /// ecbSystem.AddJobHandleForProducer(handle); /// /// return handle; /// } /// } /// </code> /// </example> public void AddJobHandleForProducer(JobHandle producerJob) { m_ProducerHandle = JobHandle.CombineDependencies(m_ProducerHandle, producerJob); }
internal void FlushPendingBuffers(bool playBack) { m_ProducerHandle.Complete(); m_ProducerHandle = new JobHandle(); int length; #if ENABLE_UNITY_COLLECTIONS_CHECKS length = m_PendingBuffers.Count; #else length = m_PendingBuffers.Length; #endif #if ENABLE_UNITY_COLLECTIONS_CHECKS List <string> playbackErrorLog = null; bool completeAllJobsBeforeDispose = false; #endif for (int i = 0; i < length; ++i) { var buffer = m_PendingBuffers[i]; if (!buffer.IsCreated) { continue; } if (playBack) { #if ENABLE_UNITY_COLLECTIONS_CHECKS try { buffer.Playback(EntityManager); } catch (Exception e) { var system = GetSystemFromSystemID(World, buffer.SystemID); var systemType = system != null?system.GetType().ToString() : "Unknown"; var error = $"{e.Message}\nEntityCommandBuffer was recorded in {systemType} and played back in {GetType()}.\n" + e.StackTrace; if (playbackErrorLog == null) { playbackErrorLog = new List <string>(); } playbackErrorLog.Add(error); completeAllJobsBeforeDispose = true; } #else buffer.Playback(EntityManager); #endif } #if ENABLE_UNITY_COLLECTIONS_CHECKS try { if (completeAllJobsBeforeDispose) { // If we get here, there was an error during playback (potentially a race condition on the // buffer itself), and we should wait for all jobs writing to this command buffer to complete before attempting // to dispose of the command buffer to prevent a potential race condition. buffer.WaitForWriterJobs(); completeAllJobsBeforeDispose = false; } buffer.Dispose(); } catch (Exception e) { var system = GetSystemFromSystemID(World, buffer.SystemID); var systemType = system != null?system.GetType().ToString() : "Unknown"; var error = $"{e.Message}\nEntityCommandBuffer was recorded in {systemType} and disposed in {GetType()}.\n" + e.StackTrace; if (playbackErrorLog == null) { playbackErrorLog = new List <string>(); } playbackErrorLog.Add(error); } #else buffer.Dispose(); #endif m_PendingBuffers[i] = buffer; } #if ENABLE_UNITY_COLLECTIONS_CHECKS if (playbackErrorLog != null) { #if !NET_DOTS var exceptionMessage = new StringBuilder(); foreach (var err in playbackErrorLog) { exceptionMessage.AppendLine(err); } #else var exceptionMessage = ""; foreach (var err in playbackErrorLog) { exceptionMessage += err; exceptionMessage += '\n'; } #endif throw new System.ArgumentException(exceptionMessage.ToString()); } #endif }
public static JobHandle Run <T, U>(this T jobData, NativeList <U> list, int innerloopBatchCount, JobHandle dependsOn = default) where T : struct, IJobParallelFor where U : struct { for (int i = 0; i < list.Length; i++) { jobData.Execute(i); } return(dependsOn); }