public void AddComponentWithValidation(UnsafeMatchingArchetypePtrList archetypeList, EntityQueryFilter filter, ComponentType componentType, ComponentDependencyManager *dependencyManager) { AssertCanAddComponent(archetypeList, componentType); using (var chunks = ChunkIterationUtility.CreateArchetypeChunkArray(archetypeList, Collections.Allocator.TempJob, ref filter, dependencyManager)) { if (chunks.Length == 0) { return; } AssertCanAddComponent(chunks, componentType); //@TODO the fast path for a chunk that contains a single entity is only possible if the chunk doesn't have a Locked Entity Order //but we should still be allowed to add zero sized components to chunks with a Locked Entity Order, even ones that only contain a single entity /* * if ((chunks.Length == 1) && (chunks[0].Count == 1)) * { * var entityPtr = (Entity*) chunks[0].m_Chunk->Buffer; * StructuralChange.AddComponentEntity(EntityComponentStore, entityPtr, componentType.TypeIndex); * } * else * { */ AddComponent((ArchetypeChunk *)NativeArrayUnsafeUtility.GetUnsafePtr(chunks), chunks.Length, componentType); /* * } */ } }
internal void DestroyEntity(UnsafeMatchingArchetypePtrList archetypeList, EntityQueryFilter filter) { Profiler.BeginSample("DestroyEntity(EntityQuery entityQueryFilter)"); Profiler.BeginSample("GetAllMatchingChunks"); var jobHandle = new JobHandle(); // @TODO: Missing EntityQuery.SyncFilter using (var chunks = ComponentChunkIterator.CreateArchetypeChunkArray(archetypeList, Allocator.TempJob, out jobHandle, ref filter)) { jobHandle.Complete(); Profiler.EndSample(); if (chunks.Length != 0) { BeforeStructuralChange(); Profiler.BeginSample("EditorOnlyChecks"); EntityComponentStore->AssertCanDestroy(chunks); EntityComponentStore->AssertWillDestroyAllInLinkedEntityGroup(chunks, GetArchetypeChunkBufferType <LinkedEntityGroup>(false)); Profiler.EndSample(); // #todo @macton DestroyEntities should support IJobChunk. But internal writes need to be handled. Profiler.BeginSample("DeleteChunks"); new DestroyChunks { EntityComponentStore = EntityComponentStore, Chunks = chunks }.Run(); Profiler.EndSample(); Profiler.BeginSample("Managed Playback"); ManagedComponentStore.Playback(ref EntityComponentStore->ManagedChangesTracker); Profiler.EndSample(); } } Profiler.EndSample(); }
/// <summary> /// Creates a NativeArray with all the chunks in a given archetype filtered by the provided EntityQueryFilter. /// This function will not sync the needed types in the EntityQueryFilter so they have to be synced manually before calling this function. /// </summary> /// <param name="matchingArchetypes">List of matching archetypes.</param> /// <param name="allocator">Allocator to use for the array.</param> /// <param name="jobHandle">Handle to the GatherChunks job used to fill the output array.</param> /// <param name="filter">Filter used to filter the resulting chunks</param> /// <param name="dependsOn">All jobs spawned will depend on this JobHandle</param> /// <returns>NativeArray of all the chunks in the matchingArchetypes list.</returns> public static NativeArray <ArchetypeChunk> CreateArchetypeChunkArrayAsync(UnsafeMatchingArchetypePtrList matchingArchetypes, Allocator allocator, out JobHandle jobHandle, ref EntityQueryFilter filter, JobHandle dependsOn = default(JobHandle)) { var archetypeCount = matchingArchetypes.Length; var offsets = new NativeArray <int>(archetypeCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var chunkCount = 0; { for (int i = 0; i < matchingArchetypes.Length; ++i) { var archetype = matchingArchetypes.Ptr[i]->Archetype; offsets[i] = chunkCount; chunkCount += archetype->Chunks.Count; } } if (!filter.RequiresMatchesFilter) { var chunks = new NativeArray <ArchetypeChunk>(chunkCount, allocator, NativeArrayOptions.UninitializedMemory); var gatherChunksJob = new GatherChunksJob { MatchingArchetypes = matchingArchetypes.Ptr, entityComponentStore = matchingArchetypes.entityComponentStore, Offsets = offsets, Chunks = chunks }; jobHandle = gatherChunksJob.Schedule(archetypeCount, 1, dependsOn); return(chunks); } else { var filteredCounts = new NativeArray <int>(archetypeCount + 1, Allocator.TempJob); var sparseChunks = new NativeArray <ArchetypeChunk>(chunkCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var gatherChunksJob = new GatherChunksWithFilteringJob { MatchingArchetypes = matchingArchetypes.Ptr, Filter = filter, Offsets = offsets, FilteredCounts = filteredCounts, SparseChunks = sparseChunks, entityComponentStore = matchingArchetypes.entityComponentStore }; gatherChunksJob.Schedule(archetypeCount, 1, dependsOn).Complete(); // accumulated filtered counts: filteredCounts[i] becomes the destination offset int totalChunks = 0; for (int i = 0; i < archetypeCount; ++i) { int currentCount = filteredCounts[i]; filteredCounts[i] = totalChunks; totalChunks += currentCount; } filteredCounts[archetypeCount] = totalChunks; var joinedChunks = new NativeArray <ArchetypeChunk>(totalChunks, allocator, NativeArrayOptions.UninitializedMemory); jobHandle = new JoinChunksJob { DestinationOffsets = filteredCounts, SparseChunks = sparseChunks, Offsets = offsets, JoinedChunks = joinedChunks }.Schedule(archetypeCount, 1); return(joinedChunks); } }
void DestroyEntity(UnsafeMatchingArchetypePtrList archetypeList, EntityQueryFilter filter) { m_EntityDataAccess.DestroyEntity(archetypeList, filter); }
public UnsafeMatchingArchetypePtrListDebugView(UnsafeMatchingArchetypePtrList MatchingArchetypeList) { m_MatchingArchetypeList = MatchingArchetypeList; }
void DestroyEntity(UnsafeMatchingArchetypePtrList archetypeList, EntityQueryFilter filter) { var access = GetCheckedEntityDataAccess(); access->DestroyEntity(archetypeList, filter); }
internal static JobHandle PreparePrefilteredChunkLists(int unfilteredChunkCount, UnsafeMatchingArchetypePtrList archetypes, EntityQueryFilter filter, JobHandle dependsOn, ScheduleMode mode, out NativeArray <byte> prefilterDataArray, out void *deferredCountData) { // Allocate one buffer for all prefilter data and distribute it // We keep the full buffer as a "dummy array" so we can deallocate it later with [DeallocateOnJobCompletion] var sizeofChunkArray = sizeof(ArchetypeChunk) * unfilteredChunkCount; var sizeofIndexArray = sizeof(int) * unfilteredChunkCount; var prefilterDataSize = sizeofChunkArray + sizeofIndexArray + sizeof(int); var prefilterData = (byte *)UnsafeUtility.Malloc(prefilterDataSize, 64, Allocator.TempJob); prefilterDataArray = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <byte>(prefilterData, prefilterDataSize, Allocator.TempJob); #if ENABLE_UNITY_COLLECTIONS_CHECKS NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref prefilterDataArray, AtomicSafetyHandle.Create()); #endif JobHandle prefilterHandle = default(JobHandle); if (filter.RequiresMatchesFilter) { var prefilteringJob = new GatherChunksAndOffsetsWithFilteringJob { Archetypes = archetypes, Filter = filter, PrefilterData = prefilterData, UnfilteredChunkCount = unfilteredChunkCount }; if (mode == ScheduleMode.Batched) { prefilterHandle = prefilteringJob.Schedule(dependsOn); } else { prefilteringJob.Run(); } } else { var gatherJob = new GatherChunksAndOffsetsJob { Archetypes = archetypes, PrefilterData = prefilterData, UnfilteredChunkCount = unfilteredChunkCount, entityComponentStore = archetypes.entityComponentStore }; if (mode == ScheduleMode.Batched) { prefilterHandle = gatherJob.Schedule(dependsOn); } else { gatherJob.Run(); } } // ScheduleParallelForDeferArraySize expects a ptr to a structure with a void* and a count. // It only uses the count, so this is safe to fudge deferredCountData = prefilterData + sizeofChunkArray + sizeofIndexArray; deferredCountData = (byte *)deferredCountData - sizeof(void *); return(prefilterHandle); }
/// <summary> /// Total number of chunks in a given MatchingArchetype list. /// </summary> /// <param name="matchingArchetypes">List of matching archetypes.</param> /// <returns>Number of chunks in a list of archetypes.</returns> internal static int CalculateChunkCount(UnsafeMatchingArchetypePtrList matchingArchetypes, ref EntityQueryFilter filter) { var totalChunkCount = 0; // If no filter, then fast path it if (!filter.RequiresMatchesFilter) { for (var m = matchingArchetypes.Length - 1; m >= 0; --m) { var match = matchingArchetypes.Ptr[m]; totalChunkCount += match->Archetype->Chunks.Count; } return(totalChunkCount); } // Otherwise do filtering for (var m = matchingArchetypes.Length - 1; m >= 0; --m) { var match = matchingArchetypes.Ptr[m]; var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; if (filter.Type == FilterType.SharedComponent) { var indexInEntityQuery1 = filter.Shared.IndexInEntityQuery[0]; var sharedComponentIndex1 = filter.Shared.SharedComponentIndex[0]; var componentIndexInChunk1 = match->IndexInArchetype[indexInEntityQuery1] - archetype->FirstSharedComponent; var sharedComponents1 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk1); if (filter.Shared.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (sharedComponents1[i] == sharedComponentIndex1) { totalChunkCount++; } } } else { var indexInEntityQuery2 = filter.Shared.IndexInEntityQuery[1]; var sharedComponentIndex2 = filter.Shared.SharedComponentIndex[1]; var componentIndexInChunk2 = match->IndexInArchetype[indexInEntityQuery2] - archetype->FirstSharedComponent; var sharedComponents2 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk2); for (var i = 0; i < chunkCount; ++i) { if (sharedComponents1[i] == sharedComponentIndex1 && sharedComponents2[i] == sharedComponentIndex2) { totalChunkCount++; } } } } else { var indexInEntityQuery1 = filter.Changed.IndexInEntityQuery[0]; var componentIndexInChunk1 = match->IndexInArchetype[indexInEntityQuery1]; var changeVersions1 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk1); var requiredVersion = filter.RequiredChangeVersion; if (filter.Changed.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion)) { totalChunkCount++; } } } else { var indexInEntityQuery2 = filter.Changed.IndexInEntityQuery[1]; var componentIndexInChunk2 = match->IndexInArchetype[indexInEntityQuery2]; var changeVersions2 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk2); for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion) || ChangeVersionUtility.DidChange(changeVersions2[i], requiredVersion)) { totalChunkCount++; } } } } } return(totalChunkCount); }
/// <summary> /// Total number of entities contained in a given MatchingArchetype list. /// </summary> /// <param name="matchingArchetypes">List of matching archetypes.</param> /// <param name="filter">EntityQueryFilter to use when calculating total number of entities.</param> /// <returns>Number of entities</returns> public static int CalculateEntityCount(UnsafeMatchingArchetypePtrList matchingArchetypes, ref EntityQueryFilter filter) { var filterCopy = filter; // Necessary to avoid a nasty compiler error cause by fixed buffer types var length = 0; if (!filter.RequiresMatchesFilter) { for (var m = matchingArchetypes.Length - 1; m >= 0; --m) { var match = matchingArchetypes.Ptr[m]; length += match->Archetype->EntityCount; } } else { for (var m = matchingArchetypes.Length - 1; m >= 0; --m) { var match = matchingArchetypes.Ptr[m]; if (match->Archetype->EntityCount <= 0) { continue; } int filteredCount = 0; var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); if (filter.Type == FilterType.SharedComponent) { var indexInEntityQuery0 = filterCopy.Shared.IndexInEntityQuery[0]; var sharedComponentIndex0 = filterCopy.Shared.SharedComponentIndex[0]; var componentIndexInChunk0 = match->IndexInArchetype[indexInEntityQuery0] - archetype->FirstSharedComponent; var sharedComponents0 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk0); if (filter.Shared.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (sharedComponents0[i] == sharedComponentIndex0) { filteredCount += chunkEntityCountArray[i]; } } } else { var indexInEntityQuery1 = filterCopy.Shared.IndexInEntityQuery[1]; var sharedComponentIndex1 = filterCopy.Shared.SharedComponentIndex[1]; var componentIndexInChunk1 = match->IndexInArchetype[indexInEntityQuery1] - archetype->FirstSharedComponent; var sharedComponents1 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk1); for (var i = 0; i < chunkCount; ++i) { if (sharedComponents0[i] == sharedComponentIndex0 && sharedComponents1[i] == sharedComponentIndex1) { filteredCount += chunkEntityCountArray[i]; } } } } else { var indexInEntityQuery0 = filterCopy.Changed.IndexInEntityQuery[0]; var componentIndexInChunk0 = match->IndexInArchetype[indexInEntityQuery0]; var changeVersions0 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk0); var requiredVersion = filter.RequiredChangeVersion; if (filter.Changed.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions0[i], requiredVersion)) { filteredCount += chunkEntityCountArray[i]; } } } else { var indexInEntityQuery1 = filterCopy.Changed.IndexInEntityQuery[1]; var componentIndexInChunk1 = match->IndexInArchetype[indexInEntityQuery1]; var changeVersions1 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk1); for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions0[i], requiredVersion) || ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion)) { filteredCount += chunkEntityCountArray[i]; } } } } length += filteredCount; } } return(length); }