public Entity GetSingletonEntity() { #if ENABLE_UNITY_COLLECTIONS_CHECKS var entityCount = CalculateLength(); if (entityCount != 1) { throw new System.InvalidOperationException($"GetSingletonEntity() requires that exactly one exists but there are {entityCount}."); } #endif var iterator = GetComponentChunkIterator(); iterator.MoveToChunkWithoutFiltering(0); Entity entity; var array = iterator.GetCurrentChunkComponentDataPtr(false, 0); UnsafeUtility.CopyPtrToStructure(array, out entity); return(entity); }
public unsafe static void Execute(ref T jobData, IntPtr jobData2, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { IntPtr transformArrayIntPtr; UnsafeUtility.CopyPtrToStructure <IntPtr>(jobData2, out transformArrayIntPtr); int * ptr = (int *)((void *)TransformAccessArray.GetSortedToUserIndex(transformArrayIntPtr)); TransformAccess *ptr2 = (TransformAccess *)((void *)TransformAccessArray.GetSortedTransformAccess(transformArrayIntPtr)); int num; int num2; JobsUtility.GetJobRange(ref ranges, jobIndex, out num, out num2); for (int i = num; i < num2; i++) { int num3 = i; int num4 = ptr[num3]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf <T>(ref jobData), num4, 1); jobData.Execute(num4, ptr2[num3]); } }
public T this[Entity entity] { get { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckReadAndThrow(m_Safety); #endif m_Entities->AssertEntityHasComponent(entity, m_TypeIndex); // if the component is zero-sized, we return a default-initialized T. // this is to support users who transition to zero-sized T and back, // or who write generics over T and don't wish to branch over zero-sizedness. if (m_IsZeroSized) { return(default(T)); } T data; void *ptr = m_Entities->GetComponentDataWithTypeRO(entity, m_TypeIndex, ref m_TypeLookupCache); UnsafeUtility.CopyPtrToStructure(ptr, out data); return(data); } set { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckWriteAndThrow(m_Safety); #endif m_Entities->AssertEntityHasComponent(entity, m_TypeIndex); // if the component is zero-sized, we make no attempt to set a value. // this is to support users who transition to zero-sized T and back, // or who write generics over T and don't wish to branch over zero-sizedness. if (m_IsZeroSized) { return; } void *ptr = m_Entities->GetComponentDataWithTypeRW(entity, m_TypeIndex, m_GlobalSystemVersion, ref m_TypeLookupCache); UnsafeUtility.CopyStructureToPtr(ref value, ptr); } }
public static unsafe void Execute(ref T jobData, System.IntPtr jobData2, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { UnsafeUtility.CopyPtrToStructure((void *)jobData2, out TransformJobData transformJobData); int * sortedToUserIndex = (int *)TransformAccessArray.GetSortedToUserIndex(transformJobData.TransformAccessArray); TransformAccess *sortedTransformAccess = (TransformAccess *)TransformAccessArray.GetSortedTransformAccess(transformJobData.TransformAccessArray); if (transformJobData.IsReadOnly == 1) { while (true) { if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out var begin, out var end)) { break; } var endThatCompilerCanSeeWillNeverChange = end; for (var i = begin; i < endThatCompilerCanSeeWillNeverChange; ++i) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), userIndex, 1); var transformAccess = sortedTransformAccess[sortedIndex]; transformAccess.MarkReadOnly(); jobData.Execute(userIndex, transformAccess); } } } else { JobsUtility.GetJobRange(ref ranges, jobIndex, out var begin, out var end); for (int i = begin; i < end; i++) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobData), userIndex, 1); var transformAccess = sortedTransformAccess[sortedIndex]; transformAccess.MarkReadWrite(); jobData.Execute(userIndex, transformAccess); } } }
public static unsafe World[] Deserialize(string filePath) { long headerSize = UnsafeUtility.SizeOf <Header>(); long fileSize = new System.IO.FileInfo(filePath).Length; using (MemoryMappedFile file = MemoryMappedFile.CreateFromFile(filePath, System.IO.FileMode.Open, null, fileSize)) { using (MemoryMappedViewAccessor viewAccessor = file.CreateViewAccessor()) { byte *startPtr = (byte *)0; viewAccessor.SafeMemoryMappedViewHandle.AcquirePointer(ref startPtr); try { UnsafeUtility.CopyPtrToStructure(startPtr, out Header header); byte *ptr = startPtr + headerSize; long[] offsets = new long[header.WorldCount * 2]; long * offsetsFile = (long *)ptr; for (int i = 0; i < offsets.Length; i++) { offsets[i] = offsetsFile[i]; } World[] worlds = new World[header.WorldCount]; int3 dimensions = int3(header.DimensionX, header.DimensionY, header.DimensionZ); for (int i = 0; i < worlds.Length; i++) { long offset = offsets[i * 2]; long count = offsets[i * 2 + 1]; void *source = startPtr + offset; void *goal = UnsafeUtility.Malloc(count, UnsafeUtility.AlignOf <World.RLEColumn>(), Unity.Collections.Allocator.Persistent); UnsafeUtility.MemCpy(goal, source, count); worlds[i] = new World(dimensions, i, goal); } return(worlds); } finally { viewAccessor.SafeMemoryMappedViewHandle.ReleasePointer(); } } } }
public T GetComponentData <T>(Entity entity) where T : struct, IComponentData { var typeIndex = TypeManager.GetTypeIndex <T>(); Entities->AssertEntityHasComponent(entity, typeIndex); #if ENABLE_UNITY_COLLECTIONS_CHECKS if (ComponentType.FromTypeIndex(typeIndex).IsZeroSized) { throw new System.ArgumentException($"GetComponentData<{typeof(T)}> can not be called with a zero sized component."); } #endif ComponentJobSafetyManager.CompleteWriteDependency(typeIndex); var ptr = Entities->GetComponentDataWithTypeRO(entity, typeIndex); T value; UnsafeUtility.CopyPtrToStructure(ptr, out value); return(value); }
public static unsafe void Execute(ref T jobData, System.IntPtr jobData2, System.IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { IntPtr transformAccessArray; UnsafeUtility.CopyPtrToStructure((void *)jobData2, out transformAccessArray); int * sortedToUserIndex = (int *)TransformAccessArray.GetSortedToUserIndex(transformAccessArray); TransformAccess *sortedTransformAccess = (TransformAccess *)TransformAccessArray.GetSortedTransformAccess(transformAccessArray); int begin; int end; JobsUtility.GetJobRange(ref ranges, jobIndex, out begin, out end); for (int i = begin; i < end; i++) { int sortedIndex = i; int userIndex = sortedToUserIndex[sortedIndex]; jobData.Execute(userIndex, sortedTransformAccess[sortedIndex]); } }
public T this[int index] { get { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckReadAndThrow(m_Safety); #endif void *data = ((byte *)m_Buffer + m_AlignedBytes * m_ThreadIndex + index * sizeofT); T value; UnsafeUtility.CopyPtrToStructure(data, out value); return(value); } set { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckWriteAndThrow(m_Safety); #endif void *data = ((byte *)m_Buffer + m_AlignedBytes * m_ThreadIndex + index * sizeofT); UnsafeUtility.CopyStructureToPtr <T>(ref value, data); } }
/// <summary> /// Gets the <see cref="IComponentData"/> instance of type T for the specified entity. /// </summary> /// <param name="entity">The entity.</param> /// <returns>An <see cref="IComponentData"/> type.</returns> /// <remarks>You cannot use ComponentDataFromEntity to get zero-sized <see cref="IComponentData"/>. /// Use <see cref="Exists"/> to check whether an entity has the zero-sized component instead. /// /// Normally, you cannot write to components accessed using a ComponentDataFromEntity instance /// in a parallel Job. This restriction is in place because multiple threads could write to the same component, /// leading to a race condition and nondeterministic results. However, when you are certain that your algorithm /// cannot write to the same component from different threads, you can manually disable this safety check /// by putting the /// [NativeDisableParallelForRestrictions](https://docs.unity3d.com/ScriptReference/Unity.Collections.NativeDisableParallelForRestrictionAttribute.html) /// attribute on the ComponentDataFromEntity field in the Job. /// </remarks> /// <exception cref="System.ArgumentException">Thrown if T is zero-size.</exception> public T this[Entity entity] { get { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckReadAndThrow(m_Safety); #endif m_EntityComponentStore->AssertEntityHasComponent(entity, m_TypeIndex); #if ENABLE_UNITY_COLLECTIONS_CHECKS if (m_IsZeroSized) { throw new System.ArgumentException($"ComponentDataFromEntity<{typeof(T)}> indexer can not get the component because it is zero sized, you can use Exists instead."); } #endif T data; void *ptr = m_EntityComponentStore->GetComponentDataWithTypeRO(entity, m_TypeIndex, ref m_TypeLookupCache); UnsafeUtility.CopyPtrToStructure(ptr, out data); return(data); } set { #if ENABLE_UNITY_COLLECTIONS_CHECKS AtomicSafetyHandle.CheckWriteAndThrow(m_Safety); #endif m_EntityComponentStore->AssertEntityHasComponent(entity, m_TypeIndex); #if ENABLE_UNITY_COLLECTIONS_CHECKS if (m_IsZeroSized) { throw new System.ArgumentException($"ComponentDataFromEntity<{typeof(T)}> indexer can not set the component because it is zero sized, you can use Exists instead."); } #endif void *ptr = m_EntityComponentStore->GetComponentDataWithTypeRW(entity, m_TypeIndex, m_GlobalSystemVersion, ref m_TypeLookupCache); UnsafeUtility.CopyStructureToPtr(ref value, ptr); } }
public unsafe static void Execute(ref T data, IntPtr animationStreamPtr, IntPtr methodIndex, ref JobRanges ranges, int jobIndex) { AnimationStream stream; UnsafeUtility.CopyPtrToStructure <AnimationStream>((void *)animationStreamPtr, out stream); JobMethodIndex jobMethodIndex = (JobMethodIndex)methodIndex.ToInt32(); JobMethodIndex jobMethodIndex2 = jobMethodIndex; JobMethodIndex jobMethodIndex3 = jobMethodIndex2; if (jobMethodIndex3 != JobMethodIndex.ProcessRootMotionMethodIndex) { if (jobMethodIndex3 != JobMethodIndex.ProcessAnimationMethodIndex) { throw new NotImplementedException("Invalid Animation jobs method index."); } data.ProcessAnimation(stream); } else { data.ProcessRootMotion(stream); } }
unsafe static void DownHeap <T>(ref NativeArrayData <T> array, int i, int n, int low) where T : struct, IComparable <T> { var typeSize = UnsafeUtility.SizeOf <T>(); UnsafeUtility.CopyPtrToStructure(array.ptr + ((low + i - 1) * typeSize), out array.aux_first); int child; while (i <= n / 2) { child = 2 * i; void *cChildAddr = array.ptr + ((low + child - 1) * typeSize); void *nChildAddr = array.ptr + ((low + child) * typeSize); UnsafeUtility.CopyPtrToStructure(cChildAddr, out array.aux_second); UnsafeUtility.CopyPtrToStructure(nChildAddr, out array.aux_third); if (child < n && array.aux_second.CompareTo(array.aux_third) < 0) { ++child; cChildAddr = nChildAddr; if (!(array.aux_first.CompareTo(array.aux_third) < 0)) { break; } } else { if (!(array.aux_first.CompareTo(array.aux_second) < 0)) { break; } } UnsafeUtility.MemCpy(array.ptr + ((low + i - 1) * typeSize), cChildAddr, typeSize); i = child; } UnsafeUtility.CopyStructureToPtr(ref array.aux_first, array.ptr + ((low + i - 1) * typeSize)); }
static void SwapIfGreater <T>(ref NativeArrayData <T> array, int lhs, int rhs) where T : struct, IComparable <T> { if (lhs != rhs) { unsafe { var typeSize = UnsafeUtility.SizeOf <T>(); void *leftAddr = array.ptr + (typeSize * lhs); void *rightAddr = array.ptr + (typeSize * rhs); UnsafeUtility.CopyPtrToStructure(leftAddr, out array.aux_first); UnsafeUtility.CopyPtrToStructure(rightAddr, out array.aux_second); if (array.aux_first.CompareTo(array.aux_second) > 0) { UnsafeUtility.MemCpy(rightAddr, leftAddr, typeSize); UnsafeUtility.CopyStructureToPtr(ref array.aux_second, leftAddr); } } } }
public static unsafe void Execute(ref T data, IntPtr animationStreamPtr, IntPtr methodIndex, ref JobRanges ranges, int jobIndex) { AnimationStream animationStream; UnsafeUtility.CopyPtrToStructure((void *)animationStreamPtr, out animationStream); JobMethodIndex jobMethodIndex = (JobMethodIndex)methodIndex.ToInt32(); switch (jobMethodIndex) { case JobMethodIndex.ProcessRootMotionMethodIndex: data.ProcessRootMotion(animationStream); break; case JobMethodIndex.ProcessAnimationMethodIndex: data.ProcessAnimation(animationStream); break; default: throw new NotImplementedException("Invalid Animation jobs method index."); } }
unsafe static void InsertionSort <T>(ref NativeArrayData <T> array, int low, int high) where T : struct, IComparable <T> { int i, j; var typeSize = UnsafeUtility.SizeOf <T>(); for (i = low; i < high; ++i) { j = i; UnsafeUtility.CopyPtrToStructure(array.ptr + ((i + 1) * typeSize), out array.aux_first); while (j >= low) { UnsafeUtility.CopyPtrToStructure(array.ptr + (j * typeSize), out array.aux_second); if (!(array.aux_first.CompareTo(array.aux_second) < 0)) { break; } UnsafeUtility.CopyStructureToPtr(ref array.aux_second, array.ptr + ((j + 1) * typeSize)); j--; } UnsafeUtility.CopyStructureToPtr(ref array.aux_first, array.ptr + ((j + 1) * typeSize)); } }
public static unsafe void Execute(ref T jobData, IntPtr jobData2, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { /*IntPtr output; * UnsafeUtility.CopyPtrToStructure<IntPtr>(jobData2, out output); * int* sortedToUserIndex = (int*) (void*) EudiReplicaTransformAccessArray.GetSortedToUserIndex(output); * TransformAccess* sortedTransformAccess = (TransformAccess*) (void*) EudiReplicaTransformAccessArray.GetSortedTransformAccess(output);*/ IntPtr output; UnsafeUtility.CopyPtrToStructure <IntPtr>(jobData2, out output); var transformArray = (EudiReplicaTransformAccessArray)GCHandle.FromIntPtr(output).Target; int beginIndex; int endIndex; JobsUtility.GetJobRange(ref ranges, jobIndex, out beginIndex, out endIndex); for (int index1 = beginIndex; index1 < endIndex; ++index1) { int index2 = index1; //int num = sortedToUserIndex[index2]; //JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf<T>(ref jobData), num, 1); jobData.Execute(index2, transformArray[index2]); } }
public void GetElementValues <T>(NativeArray <T> values) where T : struct { int dst = 0; for (int threadBlockId = 0; threadBlockId < JobsUtility.MaxJobThreadCount; threadBlockId++) { var blockList = m_perThreadBlockLists + threadBlockId; if (blockList->elementCount > 0) { int src = 0; CheckBlockCountMatchesCount(blockList->elementCount, blockList->blocks.Length); for (int blockId = 0; blockId < blockList->blocks.Length - 1; blockId++) { var address = ((BlockPtr *)blockList->blocks.Ptr)[blockId].ptr; for (int i = 0; i < m_elementsPerBlock; i++) { UnsafeUtility.CopyPtrToStructure(address, out T temp); values[dst] = temp; address += m_elementSize; src++; dst++; } } { var address = ((BlockPtr *)blockList->blocks.Ptr)[blockList->blocks.Length - 1].ptr; for (int i = src; i < blockList->elementCount; i++) { UnsafeUtility.CopyPtrToStructure(address, out T temp); values[dst] = temp; address += m_elementSize; dst++; } } } } }
public T GetComponentData <T>(Entity entity) where T : struct, IComponentData { var typeIndex = TypeManager.GetTypeIndex <T>(); Entities->AssertEntityHasComponent(entity, typeIndex); // If the user attempts to get a zero-sized type, we return a default-initialized value instead. // This is to prevent requiring users from checking for zero-size before calling this API. var componentType = ComponentType.FromTypeIndex(typeIndex); if (componentType.IsZeroSized) { return(default(T)); } ComponentJobSafetyManager.CompleteWriteDependency(typeIndex); var ptr = Entities->GetComponentDataWithTypeRO(entity, typeIndex); T value; UnsafeUtility.CopyPtrToStructure(ptr, out value); return(value); }
public unsafe T For(Entity entity, out T originalComponent) { UnsafeUtility.CopyPtrToStructure(_manager.EntityComponentStore->GetComponentDataWithTypeRO(entity, _typeIndex), out originalComponent); return(originalComponent); }
private static void Method(ref void *data) { UnsafeUtility.CopyPtrToStructure(data, out T j); j.Execute(); UnsafeUtility.CopyStructureToPtr(ref j, data); }
protected override void OnUpdate() { var commandBuffer = barrier.CreateCommandBuffer().AsParallelWriter(); var localToWorldFromEntity = GetComponentDataFromEntity <LocalToWorld>(true); var jumpingFromEntity = GetComponentDataFromEntity <NavJumping>(true); var pathBufferFromEntity = GetBufferFromEntity <NavPathBufferElement>(); var jumpBufferFromEntity = GetBufferFromEntity <NavJumpBufferElement>(); var navMeshQueryPointerArray = World.GetExistingSystem <NavMeshQuerySystem>().PointerArray; var settings = navSystem.Settings; Entities .WithNone <NavProblem>() .WithAll <NavPlanning, LocalToParent>() .WithReadOnly(localToWorldFromEntity) .WithReadOnly(jumpingFromEntity) .WithNativeDisableParallelForRestriction(pathBufferFromEntity) .WithNativeDisableParallelForRestriction(jumpBufferFromEntity) .WithNativeDisableParallelForRestriction(navMeshQueryPointerArray) .ForEach((Entity entity, int entityInQueryIndex, int nativeThreadIndex, ref NavAgent agent, in Parent surface, in NavDestination destination) => { if ( surface.Value.Equals(Entity.Null) || agent.DestinationSurface.Equals(Entity.Null) || !localToWorldFromEntity.HasComponent(surface.Value) || !localToWorldFromEntity.HasComponent(agent.DestinationSurface) ) { return; } var agentPosition = localToWorldFromEntity[entity].Position; var worldPosition = agentPosition; var worldDestination = agent.LocalDestination.ToWorld(localToWorldFromEntity[agent.DestinationSurface]); var jumping = jumpingFromEntity.HasComponent(entity); if (jumping) { worldPosition = worldDestination; worldDestination = agentPosition; } var navMeshQueryPointer = navMeshQueryPointerArray[nativeThreadIndex]; UnsafeUtility.CopyPtrToStructure(navMeshQueryPointer.Value, out NavMeshQuery navMeshQuery); var one = new float3(1); var status = navMeshQuery.BeginFindPath( navMeshQuery.MapLocation(worldPosition, one * settings.PathSearchMax, agent.TypeID), navMeshQuery.MapLocation(worldDestination, one * settings.PathSearchMax, agent.TypeID), NavMesh.AllAreas ); while (NavUtil.HasStatus(status, PathQueryStatus.InProgress)) { status = navMeshQuery.UpdateFindPath( settings.IterationMax, out int iterationsPerformed ); } var customLerp = destination.CustomLerp; if (!NavUtil.HasStatus(status, PathQueryStatus.Success)) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); commandBuffer.RemoveComponent <NavDestination>(entityInQueryIndex, entity); commandBuffer.AddComponent(entityInQueryIndex, entity, new NavProblem { Value = status }); return; } navMeshQuery.EndFindPath(out int pathLength); var polygonIdArray = new NativeArray <PolygonId>( NavConstants.PATH_NODE_MAX, Allocator.Temp ); navMeshQuery.GetPathResult(polygonIdArray); var len = pathLength + 1; var straightPath = new NativeArray <NavMeshLocation>(len, Allocator.Temp); var straightPathFlags = new NativeArray <StraightPathFlags>(len, Allocator.Temp); var vertexSide = new NativeArray <float>(len, Allocator.Temp); var straightPathCount = 0; status = PathUtils.FindStraightPath( navMeshQuery, worldPosition, worldDestination, polygonIdArray, pathLength, ref straightPath, ref straightPathFlags, ref vertexSide, ref straightPathCount, NavConstants.PATH_NODE_MAX ); var jumpBuffer = !jumpBufferFromEntity.HasComponent(entity) ? commandBuffer.AddBuffer <NavJumpBufferElement>(entityInQueryIndex, entity) : jumpBufferFromEntity[entity]; var pathBuffer = !pathBufferFromEntity.HasComponent(entity) ? commandBuffer.AddBuffer <NavPathBufferElement>(entityInQueryIndex, entity) : pathBufferFromEntity[entity]; if (jumping) { var lastValidPoint = float3.zero; for (var i = 0; i < straightPath.Length; ++i) { if (navMeshQuery.IsValid(straightPath[i].polygon)) { lastValidPoint = straightPath[i].position; } else { break; } } jumpBuffer.Add((lastValidPoint + agent.Offset).ToLocal(localToWorldFromEntity[agent.DestinationSurface])); if (jumpBuffer.Length > 0) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); if (customLerp) { commandBuffer.AddComponent <NavCustomLerping>(entityInQueryIndex, entity); } else { commandBuffer.AddComponent <NavJumping>(entityInQueryIndex, entity); } } } else if (status == PathQueryStatus.Success) { if (pathBuffer.Length > 0) { pathBuffer.RemoveAt(pathBuffer.Length - 1); } for (var i = straightPathCount - 1; i > 0; --i) { pathBuffer.Add( ((float3)straightPath[i].position + agent.Offset).ToLocal(localToWorldFromEntity[surface.Value]) ); } if (pathBuffer.Length > 0) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); if (customLerp) { commandBuffer.AddComponent <NavCustomLerping>(entityInQueryIndex, entity); } else { commandBuffer.AddComponent <NavWalking>(entityInQueryIndex, entity); commandBuffer.AddComponent <NavSteering>(entityInQueryIndex, entity); } } } polygonIdArray.Dispose(); straightPath.Dispose(); straightPathFlags.Dispose(); vertexSide.Dispose(); })
public void Exec <T>() where T : struct, IInstruction { UnsafeUtility.CopyPtrToStructure((byte *)pc + sizeof(uint), out T instruction); instruction.Exec(ref stack, ref pos0, ref pos1, ref pos2, ref pos3); pc = (byte *)pc + sizeof(uint) + UnsafeUtility.SizeOf <T>(); }
protected override void OnUpdate() { var commandBuffer = barrier.CreateCommandBuffer().AsParallelWriter(); var pathBufferFromEntity = GetBufferFromEntity <PathBufferElement>(); var navMeshQueryPointerArray = World.GetExistingSystem <PathMeshQuerySystem>().PointerArray; var settings = pathSystem.Settings; Entities .WithNone <PathProblem>() .WithAll <PathPlanning>() .WithNativeDisableParallelForRestriction(pathBufferFromEntity) .WithNativeDisableParallelForRestriction(navMeshQueryPointerArray) .ForEach((Entity entity, int entityInQueryIndex, int nativeThreadIndex, ref PathAgent agent, in PathDestination destination, in LocalToWorld localToWorld) => { var worldPosition = localToWorld.Position; var worldDestination = agent.WorldDestination; var navMeshQueryPointer = navMeshQueryPointerArray[nativeThreadIndex]; UnsafeUtility.CopyPtrToStructure(navMeshQueryPointer.Value, out NavMeshQuery navMeshQuery); var one = new float3(1); var status = navMeshQuery.BeginFindPath( navMeshQuery.MapLocation(worldPosition, one * settings.PathSearchMax, agent.TypeID), navMeshQuery.MapLocation(worldDestination, one * settings.PathSearchMax, agent.TypeID), NavMesh.AllAreas ); while (PathUtil.HasStatus(status, PathQueryStatus.InProgress)) { status = navMeshQuery.UpdateFindPath( settings.IterationMax, out int iterationsPerformed ); } if (!PathUtil.HasStatus(status, PathQueryStatus.Success)) { commandBuffer.RemoveComponent <PathPlanning>(entityInQueryIndex, entity); commandBuffer.RemoveComponent <PathDestination>(entityInQueryIndex, entity); commandBuffer.AddComponent(entityInQueryIndex, entity, new PathProblem { Value = status }); return; } navMeshQuery.EndFindPath(out int pathLength); var polygonIdArray = new NativeArray <PolygonId>( PathConstants.PATH_NODE_MAX, Allocator.Temp ); navMeshQuery.GetPathResult(polygonIdArray); var len = pathLength + 1; var straightPath = new NativeArray <NavMeshLocation>(len, Allocator.Temp); var straightPathFlags = new NativeArray <StraightPathFlags>(len, Allocator.Temp); var vertexSide = new NativeArray <float>(len, Allocator.Temp); var straightPathCount = 0; status = PathUtils.FindStraightPath( navMeshQuery, worldPosition, worldDestination, polygonIdArray, pathLength, ref straightPath, ref straightPathFlags, ref vertexSide, ref straightPathCount, PathConstants.PATH_NODE_MAX ); var pathBuffer = !pathBufferFromEntity.HasComponent(entity) ? commandBuffer.AddBuffer <PathBufferElement>(entityInQueryIndex, entity) : pathBufferFromEntity[entity]; if (status == PathQueryStatus.Success) { if (pathBuffer.Length > 0) { pathBuffer.RemoveAt(pathBuffer.Length - 1); } for (var i = straightPathCount - 1; i > 0; --i) { pathBuffer.Add((float3)straightPath[i].position + agent.Offset); } if (pathBuffer.Length > 0) { commandBuffer.RemoveComponent <PathPlanning>(entityInQueryIndex, entity); commandBuffer.RemoveComponent <PathDestination>(entityInQueryIndex, entity); } } polygonIdArray.Dispose(); straightPath.Dispose(); straightPathFlags.Dispose(); vertexSide.Dispose(); })
static unsafe JobHandle ScheduleParallelForInternal(ref JobScheduleParameters parameters, int arrayLength, void *deferredDataPtr, int innerloopBatchCount) { // Ensure the user has not set the schedule mode to a currently unsupported type Assert.IsTrue(parameters.ScheduleMode != ScheduleMode.Single); // May provide an arrayLength (>=0) OR a deferredDataPtr, but both is senseless. Assert.IsTrue((arrayLength >= 0 && deferredDataPtr == null) || (arrayLength < 0 && deferredDataPtr != null)); UnsafeUtility.AssertHeap(parameters.JobDataPtr); UnsafeUtility.AssertHeap(parameters.ReflectionData); ReflectionDataProxy jobReflectionData = UnsafeUtility.AsRef <ReflectionDataProxy>(parameters.ReflectionData); Assert.IsFalse(jobReflectionData.ExecuteFunctionPtr.ToPointer() == null); Assert.IsFalse(jobReflectionData.CleanupFunctionPtr.ToPointer() == null); #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP Assert.IsTrue((jobReflectionData.UnmanagedSize != -1 && jobReflectionData.MarshalToBurstFunctionPtr != IntPtr.Zero) || (jobReflectionData.UnmanagedSize == -1 && jobReflectionData.MarshalToBurstFunctionPtr == IntPtr.Zero)); #endif JobMetaData *managedJobDataPtr = parameters.JobDataPtr; JobMetaData jobMetaData; UnsafeUtility.CopyPtrToStructure(parameters.JobDataPtr, out jobMetaData); Assert.IsTrue(jobMetaData.jobDataSize > 0); // set by JobScheduleParameters Assert.IsTrue(sizeof(JobRanges) <= JobMetaData.kJobMetaDataIsParallelOffset); jobMetaData.JobRanges.ArrayLength = (arrayLength >= 0) ? arrayLength : 0; jobMetaData.JobRanges.IndicesPerPhase = (arrayLength >= 0) ? GetDefaultIndicesPerPhase(arrayLength) : 1; // TODO indicesPerPhase isn't actually used, except as a flag. // If this is set to -1 by codegen, that indicates an error if we schedule the job as parallel for because // it potentially consists of write operations which are not parallel compatible if (jobMetaData.isParallelFor == -1) { throw new InvalidOperationException("Parallel writing not supported in this job. Parallel scheduling invalid."); } jobMetaData.isParallelFor = 1; jobMetaData.deferredDataPtr = deferredDataPtr; JobHandle jobHandle = default; #if !UNITY_SINGLETHREADED_JOBS bool runSingleThreadSynchronous = parameters.ScheduleMode == ScheduleMode.RunOnMainThread || parameters.ScheduleMode == ScheduleMode.ScheduleOnMainThread; #else bool runSingleThreadSynchronous = true; #endif jobMetaData.JobRanges.runOnMainThread = runSingleThreadSynchronous ? 1 : 0; if (runSingleThreadSynchronous) { bool syncNow = parameters.ScheduleMode == ScheduleMode.Run || parameters.ScheduleMode == ScheduleMode.RunOnMainThread; #if UNITY_SINGLETHREADED_JOBS // Nativejobs needs further support in creating a JobHandle not linked to an actual job in order to support this correctly // in multithreaded builds if (!syncNow) { jobHandle.JobGroup = GetFakeJobGroupId(); #if ENABLE_UNITY_COLLECTIONS_CHECKS DebugDidScheduleJob(ref jobHandle, (JobHandle *)UnsafeUtility.AddressOf(ref parameters.Dependency), 1); #endif } #endif parameters.Dependency.Complete(); UnsafeUtility.SetInJob(1); try { // We assume there are no non-blittable fields in a bursted job (i.e. DisposeSentinel) if // collections checks are not enabled #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP // If the job was bursted, and the job structure contained non-blittable fields, the UnmanagedSize will // be something other than -1 meaning we need to marshal the managed representation before calling the ExecuteFn if (jobReflectionData.UnmanagedSize != -1) { JobMetaData *unmanagedJobData = AllocateJobHeapMemory(jobReflectionData.UnmanagedSize, 1); void *dst = (byte *)unmanagedJobData + sizeof(JobMetaData); void *src = (byte *)managedJobDataPtr + sizeof(JobMetaData); // In the single threaded case, this is synchronous execution. UnsafeUtility.EnterTempScope(); try { UnsafeUtility.CallFunctionPtr_pp(jobReflectionData.MarshalToBurstFunctionPtr.ToPointer(), dst, src); CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, unmanagedJobData); UnsafeUtility.CallFunctionPtr_pi(jobReflectionData.ExecuteFunctionPtr.ToPointer(), unmanagedJobData, k_MainThreadWorkerIndex); UnsafeUtility.CallFunctionPtr_p(jobReflectionData.CleanupFunctionPtr.ToPointer(), unmanagedJobData); } finally { UnsafeUtility.ExitTempScope(); } } else #endif { CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, null); // In the single threaded case, this is synchronous execution. UnsafeUtility.EnterTempScope(); try { UnsafeUtility.CallFunctionPtr_pi(jobReflectionData.ExecuteFunctionPtr.ToPointer(), managedJobDataPtr, k_MainThreadWorkerIndex); UnsafeUtility.CallFunctionPtr_p(jobReflectionData.CleanupFunctionPtr.ToPointer(), managedJobDataPtr); } finally { UnsafeUtility.ExitTempScope(); } } } finally { UnsafeUtility.SetInJob(0); } return(jobHandle); } #if !UNITY_SINGLETHREADED_JOBS #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP // If the job was bursted, and the job structure contained non-blittable fields, the UnmanagedSize will // be something other than -1 meaning we need to marshal the managed representation before calling the ExecuteFn if (jobReflectionData.UnmanagedSize != -1) { int nWorker = JobWorkerCount > 1 ? JobWorkerCount : 1; JobMetaData *unmanagedJobData = AllocateJobHeapMemory(jobReflectionData.UnmanagedSize, nWorker); for (int i = 0; i < nWorker; i++) { void *dst = (byte *)unmanagedJobData + sizeof(JobMetaData) + i * jobReflectionData.UnmanagedSize; void *src = (byte *)managedJobDataPtr + sizeof(JobMetaData) + i * jobMetaData.jobDataSize; UnsafeUtility.CallFunctionPtr_pp(jobReflectionData.MarshalToBurstFunctionPtr.ToPointer(), dst, src); } // Need to change the jobDataSize so the job will have the correct stride when finding // the correct jobData for a thread. JobMetaData unmanagedJobMetaData = jobMetaData; unmanagedJobMetaData.jobDataSize = jobReflectionData.UnmanagedSize; CopyMetaDataToJobData(ref unmanagedJobMetaData, managedJobDataPtr, unmanagedJobData); jobHandle = ScheduleJobParallelFor(jobReflectionData.ExecuteFunctionPtr, jobReflectionData.CleanupFunctionPtr, unmanagedJobData, arrayLength, innerloopBatchCount, parameters.Dependency); } else #endif { CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, null); jobHandle = ScheduleJobParallelFor(jobReflectionData.ExecuteFunctionPtr, jobReflectionData.CleanupFunctionPtr, parameters.JobDataPtr, arrayLength, innerloopBatchCount, parameters.Dependency); } if (parameters.ScheduleMode == ScheduleMode.Run) { jobHandle.Complete(); } #endif return(jobHandle); }
public static unsafe JobHandle Schedule(ref JobScheduleParameters parameters) { // Ensure the user has not set the schedule mode to a currently unsupported type Assert.IsTrue(parameters.ScheduleMode != ScheduleMode.Single); // Heap memory must be passed to schedule, so that Cleanup can free() it. UnsafeUtility.AssertHeap(parameters.JobDataPtr); UnsafeUtility.AssertHeap(parameters.ReflectionData); ReflectionDataProxy jobReflectionData = UnsafeUtility.AsRef <ReflectionDataProxy>(parameters.ReflectionData); Assert.IsTrue(jobReflectionData.ExecuteFunctionPtr.ToPointer() != null); Assert.IsTrue(jobReflectionData.CleanupFunctionPtr.ToPointer() != null); #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP Assert.IsTrue((jobReflectionData.UnmanagedSize != -1 && jobReflectionData.MarshalToBurstFunctionPtr != IntPtr.Zero) || (jobReflectionData.UnmanagedSize == -1 && jobReflectionData.MarshalToBurstFunctionPtr == IntPtr.Zero)); #endif JobMetaData *managedJobDataPtr = parameters.JobDataPtr; JobMetaData jobMetaData; Assert.IsTrue(sizeof(JobRanges) <= JobMetaData.kJobMetaDataIsParallelOffset); UnsafeUtility.CopyPtrToStructure(managedJobDataPtr, out jobMetaData); Assert.IsTrue(jobMetaData.jobDataSize > 0); // set by JobScheduleParameters jobMetaData.managedPtr = managedJobDataPtr; jobMetaData.isParallelFor = 0; UnsafeUtility.CopyStructureToPtr(ref jobMetaData, managedJobDataPtr); JobHandle jobHandle = default; #if !UNITY_SINGLETHREADED_JOBS bool runSingleThreadSynchronous = parameters.ScheduleMode == ScheduleMode.RunOnMainThread || parameters.ScheduleMode == ScheduleMode.Run || parameters.ScheduleMode == ScheduleMode.ScheduleOnMainThread; #else bool runSingleThreadSynchronous = true; #endif if (runSingleThreadSynchronous) { bool syncNow = parameters.ScheduleMode == ScheduleMode.Run || parameters.ScheduleMode == ScheduleMode.RunOnMainThread; #if UNITY_SINGLETHREADED_JOBS if (!syncNow) { jobHandle.JobGroup = GetFakeJobGroupId(); #if ENABLE_UNITY_COLLECTIONS_CHECKS DebugDidScheduleJob(ref jobHandle, (JobHandle *)UnsafeUtility.AddressOf(ref parameters.Dependency), 1); #endif } #endif parameters.Dependency.Complete(); UnsafeUtility.SetInJob(1); try { // We assume there are no non-blittable fields in a bursted job (i.e. DisposeSentinel) if // collections checks are not enabled #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP // If the job was bursted, and the job structure contained non-blittable fields, the UnmanagedSize will // be something other than -1 meaning we need to marshal the managed representation before calling the ExecuteFn if (jobReflectionData.UnmanagedSize != -1) { JobMetaData *unmanagedJobData = AllocateJobHeapMemory(jobReflectionData.UnmanagedSize, 1); void *dst = (byte *)unmanagedJobData + sizeof(JobMetaData); void *src = (byte *)managedJobDataPtr + sizeof(JobMetaData); UnsafeUtility.EnterTempScope(); try { UnsafeUtility.CallFunctionPtr_pp(jobReflectionData.MarshalToBurstFunctionPtr.ToPointer(), dst, src); // In the single threaded case, this is synchronous execution. // The cleanup *is* bursted, so pass in the unmanangedJobDataPtr CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, unmanagedJobData); UnsafeUtility.CallFunctionPtr_pi(jobReflectionData.ExecuteFunctionPtr.ToPointer(), unmanagedJobData, k_MainThreadWorkerIndex); } finally { UnsafeUtility.ExitTempScope(); } } else #endif { CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, null); // In the single threaded case, this is synchronous execution. UnsafeUtility.EnterTempScope(); try { UnsafeUtility.CallFunctionPtr_pi(jobReflectionData.ExecuteFunctionPtr.ToPointer(), managedJobDataPtr, k_MainThreadWorkerIndex); } finally { UnsafeUtility.ExitTempScope(); } } } finally { UnsafeUtility.SetInJob(0); } return(jobHandle); } #if !UNITY_SINGLETHREADED_JOBS #if ENABLE_UNITY_COLLECTIONS_CHECKS && !UNITY_DOTSRUNTIME_IL2CPP // If the job was bursted, and the job structure contained non-blittable fields, the UnmanagedSize will // be something other than -1 meaning we need to marshal the managed representation before calling the ExecuteFn. // This time though, we have a whole bunch of jobs that need to be processed. if (jobReflectionData.UnmanagedSize != -1) { JobMetaData *unmanagedJobData = AllocateJobHeapMemory(jobReflectionData.UnmanagedSize, 1); void *dst = (byte *)unmanagedJobData + sizeof(JobMetaData); void *src = (byte *)managedJobDataPtr + sizeof(JobMetaData); UnsafeUtility.CallFunctionPtr_pp(jobReflectionData.MarshalToBurstFunctionPtr.ToPointer(), dst, src); CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, unmanagedJobData); jobHandle = ScheduleJob(jobReflectionData.ExecuteFunctionPtr, unmanagedJobData, parameters.Dependency); } else #endif { CopyMetaDataToJobData(ref jobMetaData, managedJobDataPtr, null); jobHandle = ScheduleJob(jobReflectionData.ExecuteFunctionPtr, parameters.JobDataPtr, parameters.Dependency); } #endif return(jobHandle); }
bool Get(int id, out NativeList <T> items) { UnsafeUtility.CopyPtrToStructure(_lists + id * _sizeOf, out items); return(items.IsCreated); }
protected override void OnUpdate() { var commandBuffer = barrier.CreateCommandBuffer().AsParallelWriter(); var localToWorldFromEntity = GetComponentDataFromEntity <LocalToWorld>(true); var translationFromEntity = GetComponentDataFromEntity <Translation>(true); var jumpingFromEntity = GetComponentDataFromEntity <NavJumping>(true); var pathBufferFromEntity = GetBufferFromEntity <NavPathBufferElement>(); var jumpBufferFromEntity = GetBufferFromEntity <NavJumpBufferElement>(); var navMeshQueryPointerArray = World.GetExistingSystem <NavMeshQuerySystem>().PointerArray; Entities .WithNone <NavHasProblem>() .WithAll <NavPlanning, LocalToParent>() .WithReadOnly(localToWorldFromEntity) .WithReadOnly(jumpingFromEntity) .WithNativeDisableParallelForRestriction(pathBufferFromEntity) .WithNativeDisableParallelForRestriction(jumpBufferFromEntity) .WithNativeDisableParallelForRestriction(navMeshQueryPointerArray) .ForEach((Entity entity, int entityInQueryIndex, int nativeThreadIndex, ref NavAgent agent, in Parent surface) => { if ( surface.Value.Equals(Entity.Null) || agent.DestinationSurface.Equals(Entity.Null) || !localToWorldFromEntity.HasComponent(surface.Value) || !localToWorldFromEntity.HasComponent(agent.DestinationSurface) ) { return; } var agentPosition = localToWorldFromEntity[entity].Position; var worldPosition = agentPosition; var worldDestination = NavUtil.MultiplyPoint3x4( localToWorldFromEntity[agent.DestinationSurface].Value, agent.LocalDestination ); var jumping = jumpingFromEntity.HasComponent(entity); if (jumping) { worldPosition = worldDestination; worldDestination = agentPosition; } var navMeshQueryPointer = navMeshQueryPointerArray[nativeThreadIndex]; UnsafeUtility.CopyPtrToStructure(navMeshQueryPointer.Value, out NavMeshQuery navMeshQuery); var status = navMeshQuery.BeginFindPath( navMeshQuery.MapLocation(worldPosition, Vector3.one * NavConstants.PATH_SEARCH_MAX, agent.TypeID), navMeshQuery.MapLocation(worldDestination, Vector3.one * NavConstants.PATH_SEARCH_MAX, agent.TypeID), NavMesh.AllAreas ); while (NavUtil.HasStatus(status, PathQueryStatus.InProgress)) { status = navMeshQuery.UpdateFindPath( NavConstants.ITERATION_MAX, out int iterationsPerformed ); } if (!NavUtil.HasStatus(status, PathQueryStatus.Success)) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); commandBuffer.RemoveComponent <NavNeedsDestination>(entityInQueryIndex, entity); commandBuffer.AddComponent <NavHasProblem>(entityInQueryIndex, entity, new NavHasProblem { Value = status }); return; } navMeshQuery.EndFindPath(out int pathLength); var polygonIdArray = new NativeArray <PolygonId>( NavConstants.PATH_NODE_MAX, Allocator.Temp ); navMeshQuery.GetPathResult(polygonIdArray); var len = pathLength + 1; var straightPath = new NativeArray <NavMeshLocation>(len, Allocator.Temp); var straightPathFlags = new NativeArray <StraightPathFlags>(len, Allocator.Temp); var vertexSide = new NativeArray <float>(len, Allocator.Temp); var straightPathCount = 0; status = PathUtils.FindStraightPath( navMeshQuery, worldPosition, worldDestination, polygonIdArray, pathLength, ref straightPath, ref straightPathFlags, ref vertexSide, ref straightPathCount, NavConstants.PATH_NODE_MAX ); var jumpBuffer = !jumpBufferFromEntity.HasComponent(entity) ? commandBuffer.AddBuffer <NavJumpBufferElement>(entityInQueryIndex, entity) : jumpBufferFromEntity[entity]; var pathBuffer = !pathBufferFromEntity.HasComponent(entity) ? commandBuffer.AddBuffer <NavPathBufferElement>(entityInQueryIndex, entity) : pathBufferFromEntity[entity]; if (jumping) { var lastValidPoint = float3.zero; for (int i = 0; i < straightPath.Length; ++i) { if (navMeshQuery.IsValid(straightPath[i].polygon)) { lastValidPoint = straightPath[i].position; } else { break; } } jumpBuffer.Add( NavUtil.MultiplyPoint3x4( math.inverse(localToWorldFromEntity[agent.DestinationSurface].Value), (float3)lastValidPoint + agent.Offset ) ); if (jumpBuffer.Length > 0) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); commandBuffer.AddComponent <NavLerping>(entityInQueryIndex, entity); } } else if (status == PathQueryStatus.Success) { pathBuffer.Clear(); agent.PathBufferIndex = 0; for (int i = 0; i < straightPathCount; ++i) { pathBuffer.Add( NavUtil.MultiplyPoint3x4( math.inverse(localToWorldFromEntity[surface.Value].Value), (float3)straightPath[i].position + agent.Offset ) ); } if (pathBuffer.Length > 0) { commandBuffer.RemoveComponent <NavPlanning>(entityInQueryIndex, entity); commandBuffer.AddComponent <NavLerping>(entityInQueryIndex, entity); } } polygonIdArray.Dispose(); straightPath.Dispose(); straightPathFlags.Dispose(); vertexSide.Dispose(); }) .WithName("NavPlanJob") .ScheduleParallel(); NavMeshWorld.GetDefaultWorld().AddDependency(Dependency); barrier.AddJobHandleForProducer(Dependency); }