public void SwapComponents(ArchetypeChunk leftChunk, int leftIndex, ArchetypeChunk rightChunk, int rightIndex) { CheckWriteAccess(); m_EntityDataAccess.SwapComponents(leftChunk, leftIndex, rightChunk, rightIndex); }
public ArchetypeChunkDebugView(ArchetypeChunk ArchetypeChunk) { m_ArchetypeChunk = ArchetypeChunk; }
public void Execute() { var chunks = (ArchetypeChunk *)PrefilterData; var entityIndices = (int *)(chunks + UnfilteredChunkCount); var filter = Filter; var filteredChunkCount = 0; var filteredEntityOffset = 0; for (var m = Archetypes.Count - 1; m >= 0; --m) { var match = Archetypes.p[m]; if (match->Archetype->EntityCount <= 0) { continue; } var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); if (filter.Type == FilterType.SharedComponent) { var indexInComponentGroup0 = filter.Shared.IndexInComponentGroup[0]; var sharedComponentIndex0 = filter.Shared.SharedComponentIndex[0]; var componentIndexInChunk0 = match->IndexInArchetype[indexInComponentGroup0] - archetype->FirstSharedComponent; var sharedComponents0 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk0); if (filter.Shared.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (sharedComponents0[i] == sharedComponentIndex0) { chunks[filteredChunkCount] = new ArchetypeChunk { m_Chunk = archetype->Chunks.p[i] }; entityIndices[filteredChunkCount++] = filteredEntityOffset; filteredEntityOffset += chunkEntityCountArray[i]; } } } else { var indexInComponentGroup1 = filter.Shared.IndexInComponentGroup[1]; var sharedComponentIndex1 = filter.Shared.SharedComponentIndex[1]; var componentIndexInChunk1 = match->IndexInArchetype[indexInComponentGroup1] - archetype->FirstSharedComponent; var sharedComponents1 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk1); for (var i = 0; i < chunkCount; ++i) { if (sharedComponents0[i] == sharedComponentIndex0 && sharedComponents1[i] == sharedComponentIndex1) { chunks[filteredChunkCount] = new ArchetypeChunk { m_Chunk = archetype->Chunks.p[i] }; entityIndices[filteredChunkCount++] = filteredEntityOffset; filteredEntityOffset += chunkEntityCountArray[i]; } } } } else { var indexInComponentGroup0 = filter.Changed.IndexInComponentGroup[0]; var componentIndexInChunk0 = match->IndexInArchetype[indexInComponentGroup0]; var changeVersions0 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk0); var requiredVersion = filter.RequiredChangeVersion; if (filter.Changed.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions0[i], requiredVersion)) { chunks[filteredChunkCount] = new ArchetypeChunk { m_Chunk = archetype->Chunks.p[i] }; entityIndices[filteredChunkCount++] = filteredEntityOffset; filteredEntityOffset += chunkEntityCountArray[i]; } } } else { var indexInComponentGroup1 = filter.Changed.IndexInComponentGroup[1]; var componentIndexInChunk1 = match->IndexInArchetype[indexInComponentGroup1]; var changeVersions1 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk1); for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions0[i], requiredVersion) || ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion)) { chunks[filteredChunkCount] = new ArchetypeChunk { m_Chunk = archetype->Chunks.p[i] }; entityIndices[filteredChunkCount++] = filteredEntityOffset; filteredEntityOffset += chunkEntityCountArray[i]; } } } } } UnsafeUtility.MemMove(chunks + filteredChunkCount, chunks + UnfilteredChunkCount, filteredChunkCount * sizeof(int)); var chunkCounter = entityIndices + UnfilteredChunkCount; *chunkCounter = filteredChunkCount; }
public void Execute(int index) { var filter = Filter; int filteredCount = 0; var match = MatchingArchetypes[index]; var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var writeIndex = Offsets[index]; var archetypeChunks = archetype->Chunks.p; if (filter.Type == FilterType.SharedComponent) { var indexInComponentGroup1 = filter.Shared.IndexInComponentGroup[0]; var sharedComponentIndex1 = filter.Shared.SharedComponentIndex[0]; var componentIndexInChunk1 = match->IndexInArchetype[indexInComponentGroup1] - archetype->FirstSharedComponent; var sharedComponents1 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk1); if (filter.Shared.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (sharedComponents1[i] == sharedComponentIndex1) { SparseChunks[writeIndex + filteredCount++] = new ArchetypeChunk { m_Chunk = archetypeChunks[i] } } ; } } else { var indexInComponentGroup2 = filter.Shared.IndexInComponentGroup[1]; var sharedComponentIndex2 = filter.Shared.SharedComponentIndex[1]; var componentIndexInChunk2 = match->IndexInArchetype[indexInComponentGroup2] - archetype->FirstSharedComponent; var sharedComponents2 = archetype->Chunks.GetSharedComponentValueArrayForType(componentIndexInChunk2); for (var i = 0; i < chunkCount; ++i) { if (sharedComponents1[i] == sharedComponentIndex1 && sharedComponents2[i] == sharedComponentIndex2) { SparseChunks[writeIndex + filteredCount++] = new ArchetypeChunk { m_Chunk = archetypeChunks[i] } } ; } } } else { var indexInComponentGroup1 = filter.Changed.IndexInComponentGroup[0]; var componentIndexInChunk1 = match->IndexInArchetype[indexInComponentGroup1]; var changeVersions1 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk1); var requiredVersion = filter.RequiredChangeVersion; if (filter.Changed.Count == 1) { for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion)) { SparseChunks[writeIndex + filteredCount++] = new ArchetypeChunk { m_Chunk = archetypeChunks[i] } } ; } } else { var indexInComponentGroup2 = filter.Changed.IndexInComponentGroup[1]; var componentIndexInChunk2 = match->IndexInArchetype[indexInComponentGroup2]; var changeVersions2 = archetype->Chunks.GetChangeVersionArrayForType(componentIndexInChunk2); for (var i = 0; i < chunkCount; ++i) { if (ChangeVersionUtility.DidChange(changeVersions1[i], requiredVersion) || ChangeVersionUtility.DidChange(changeVersions2[i], requiredVersion)) { SparseChunks[writeIndex + filteredCount++] = new ArchetypeChunk { m_Chunk = archetypeChunks[i] } } ; } } } FilteredCounts[index] = filteredCount; } }
public void SwapComponents(ArchetypeChunk leftChunk, int leftIndex, ArchetypeChunk rightChunk, int rightIndex) { var ecs = GetCheckedEntityDataAccess(); ecs->SwapComponents(leftChunk, leftIndex, rightChunk, rightIndex); }
/// <summary> /// Swaps the components of two entities. /// </summary> /// <remarks> /// The entities must have the same components. However, this function can swap the components of entities in /// different worlds, so they do not need to have identical archetype instances. /// /// **Important:** This function creates a sync point, which means that the EntityManager waits for all /// currently running Jobs to complete before swapping the components and no additional Jobs can start before /// the function is finished. A sync point can cause a drop in performance because the ECS framework may not /// be able to make use of the processing power of all available cores. /// </remarks> /// <param name="leftChunk">A chunk containing one of the entities to swap.</param> /// <param name="leftIndex">The index within the `leftChunk` of the entity and components to swap.</param> /// <param name="rightChunk">The chunk containing the other entity to swap. This chunk can be the same as /// the `leftChunk`. It also does not need to be in the same World as `leftChunk`.</param> /// <param name="rightIndex">The index within the `rightChunk` of the entity and components to swap.</param> public void SwapComponents(ArchetypeChunk leftChunk, int leftIndex, ArchetypeChunk rightChunk, int rightIndex) { BeforeStructuralChange(); ChunkDataUtility.SwapComponents(leftChunk.m_Chunk, leftIndex, rightChunk.m_Chunk, rightIndex, 1, GlobalSystemVersion, GlobalSystemVersion); }
public void UnlockChunkOrder(ArchetypeChunk chunk) { }
// This must be run after chunks have been remapped since FreeChunksBySharedComponents needs the shared component // indices in the chunks to be remapped public void Execute(int index) { var srcArchetype = remapArchetypes[index].srcArchetype; int srcChunkCount = srcArchetype->Chunks.Count; var dstArchetype = remapArchetypes[index].dstArchetype; int dstChunkCount = dstArchetype->Chunks.Count; dstArchetype->Chunks.MoveChunks(srcArchetype->Chunks); if (srcArchetype->NumSharedComponents == 0) { if (srcArchetype->ChunksWithEmptySlots.Length != 0) { dstArchetype->ChunksWithEmptySlots.SetCapacity( srcArchetype->ChunksWithEmptySlots.Length + dstArchetype->ChunksWithEmptySlots.Length); dstArchetype->ChunksWithEmptySlots.AddRange(srcArchetype->ChunksWithEmptySlots); srcArchetype->ChunksWithEmptySlots.Resize(0); } } else { for (int i = 0; i < dstArchetype->NumSharedComponents; ++i) { var srcArray = srcArchetype->Chunks.GetSharedComponentValueArrayForType(i); var dstArray = dstArchetype->Chunks.GetSharedComponentValueArrayForType(i) + dstChunkCount; for (int j = 0; j < srcChunkCount; ++j) { int srcIndex = srcArray[j]; int remapped = remapShared[srcIndex]; dstArray[j] = remapped; } } for (int i = 0; i < srcChunkCount; ++i) { var chunk = dstArchetype->Chunks[i + dstChunkCount]; if (chunk->Count < chunk->Capacity) { dstArchetype->FreeChunksBySharedComponents.Add(dstArchetype->Chunks[i + dstChunkCount]); } } srcArchetype->FreeChunksBySharedComponents.Init(16); } var globalSystemVersion = dstEntityComponentStore->GlobalSystemVersion; // Set change versions to GlobalSystemVersion for (int iType = 0; iType < dstArchetype->TypesCount; ++iType) { var dstArray = dstArchetype->Chunks.GetChangeVersionArrayForType(iType) + dstChunkCount; for (int i = 0; i < srcChunkCount; ++i) { dstArray[i] = globalSystemVersion; } } // Copy chunk count array var dstCountArray = dstArchetype->Chunks.GetChunkEntityCountArray() + dstChunkCount; UnsafeUtility.MemCpy(dstCountArray, srcArchetype->Chunks.GetChunkEntityCountArray(), sizeof(int) * srcChunkCount); // Fix up chunk pointers in ChunkHeaders if (dstArchetype->HasChunkComponents) { var metaArchetype = dstArchetype->MetaChunkArchetype; var indexInTypeArray = ChunkDataUtility.GetIndexInTypeArray(metaArchetype, chunkHeaderType); var offset = metaArchetype->Offsets[indexInTypeArray]; var sizeOf = metaArchetype->SizeOfs[indexInTypeArray]; for (int i = 0; i < srcChunkCount; ++i) { // Set chunk header without bumping change versions since they are zeroed when processing meta chunk // modifying them here would be a race condition var chunk = dstArchetype->Chunks[i + dstChunkCount]; var metaChunkEntity = chunk->metaChunkEntity; var metaEntityInChunk = dstEntityComponentStore->GetEntityInChunk(metaChunkEntity); var chunkHeader = (ChunkHeader *)(metaEntityInChunk.Chunk->Buffer + (offset + sizeOf * metaEntityInChunk.IndexInChunk)); chunkHeader->ArchetypeChunk = new ArchetypeChunk(chunk, dstEntityComponentStore); } } dstArchetype->EntityCount += srcArchetype->EntityCount; srcArchetype->Chunks.Dispose(); srcArchetype->EntityCount = 0; }
public void Execute(ArchetypeChunk batchInChunk, int batchIndex) { }
/// <summary> /// Unlocks a chunk /// </summary> /// <param name="chunk">The chunk to unlock.</param> public void UnlockChunk(ArchetypeChunk chunk) { EntityComponentStore->UnlockChunks(&chunk, 1, ChunkFlags.Locked); }
internal unsafe static void ExecuteInternal( ref JobEntityBatchWrapper <T> jobWrapper, IntPtr bufferRangePatchData, ref JobRanges ranges, int jobIndex) { var chunks = jobWrapper.CachedChunks; var prebuiltBatches = (ArchetypeChunk *)jobWrapper.PrebuiltBatchList.Ptr; bool isParallel = jobWrapper.IsParallel == 1; bool isFiltering = jobWrapper.Filter.RequiresMatchesFilter; while (true) { int beginBatchIndex = 0; int endBatchIndex = jobWrapper.UsePrebuiltBatchList == 1 ? jobWrapper.PrebuiltBatchList.Length : chunks.Length; // If we are running the job in parallel, steal some work. if (isParallel) { // If we have no range to steal, exit the loop. if (!JobsUtility.GetWorkStealingRange(ref ranges, jobIndex, out beginBatchIndex, out endBatchIndex)) { break; } JobsUtility.PatchBufferMinMaxRanges(bufferRangePatchData, UnsafeUtility.AddressOf(ref jobWrapper), 0, 0); } // Do the actual user work. if (jobWrapper.UsePrebuiltBatchList == 1) { for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var batch = prebuiltBatches[batchIndex]; if (isFiltering && !batch.m_Chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[jobWrapper.PrebuiltBatchListMatchingArchetypeIndices.Ptr[batchIndex]], ref jobWrapper.Filter)) { continue; } Assert.AreNotEqual(0, batch.Count); jobWrapper.JobData.Execute(batch, batchIndex); } } else { if (jobWrapper.JobsPerChunk == 1) { // 1 batch per chunk, with/without filtering for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var chunkIndex = batchIndex; var chunk = chunks.Ptr[chunkIndex]; if (isFiltering && !chunk->MatchesFilter(jobWrapper.MatchingArchetypes.Ptr[chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter)) { continue; } var batch = new ArchetypeChunk(chunk, chunks.EntityComponentStore); Assert.AreNotEqual(0, batch.Count); jobWrapper.JobData.Execute(batch, batchIndex); } } else { // 2+ batches per chunk, with/without filtering // This is the most general case; if only one code path survives, it should be this one. for (int batchIndex = beginBatchIndex; batchIndex < endBatchIndex; ++batchIndex) { var chunkIndex = batchIndex / jobWrapper.JobsPerChunk; var batchIndexInChunk = batchIndex % jobWrapper.JobsPerChunk; var chunk = chunks.Ptr[chunkIndex]; if (isFiltering && !chunk->MatchesFilter( jobWrapper.MatchingArchetypes.Ptr[ chunks.PerChunkMatchingArchetypeIndex.Ptr[chunkIndex]], ref jobWrapper.Filter)) { continue; } if (ArchetypeChunk.EntityBatchFromChunk(chunk, chunk->Count, jobWrapper.JobsPerChunk, batchIndexInChunk, chunks.EntityComponentStore, out var batch)) { jobWrapper.JobData.Execute(batch, batchIndex); } } } } // If we are not running in parallel, our job is done. if (!isParallel) { break; } } }
public void LockChunkOrder(ArchetypeChunk chunk) { EntityComponentStore->LockChunks(&chunk, 1, ChunkFlags.LockedEntityOrder); }
public void Execute(ArchetypeChunk batchInChunk, int batchIndex, int indexOfFirstEntityInQuery) { }
public void Execute() { var batches = (ArchetypeChunk *)PrefilterData; var entityIndices = (int *)(batches + FilteredChunkCount * BatchesPerChunk); var filteredBatchCounter = 0; var entityIndexAggregate = 0; if (BatchesPerChunk == 1) { if (Filter.RequiresMatchesFilter) { // one batch per chunk, filtering enabled for (var m = 0; m < MatchingArchetypes.Length; ++m) { var match = MatchingArchetypes.Ptr[m]; if (match->Archetype->EntityCount <= 0) { continue; } var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); for (int chunkIndex = 0; chunkIndex < chunkCount; ++chunkIndex) { var chunk = archetype->Chunks[chunkIndex]; if (match->ChunkMatchesFilter(chunkIndex, ref Filter)) { var batch = new ArchetypeChunk(chunk, EntityComponentStore); batches[filteredBatchCounter] = batch; entityIndices[filteredBatchCounter] = entityIndexAggregate; ++filteredBatchCounter; entityIndexAggregate += chunkEntityCountArray[chunkIndex]; } } } } else { // one batch per chunk, filtering disabled for (var m = 0; m < MatchingArchetypes.Length; ++m) { var match = MatchingArchetypes.Ptr[m]; if (match->Archetype->EntityCount <= 0) { continue; } var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); for (int chunkIndex = 0; chunkIndex < chunkCount; ++chunkIndex) { var chunk = archetype->Chunks[chunkIndex]; var batch = new ArchetypeChunk(chunk, EntityComponentStore); batches[filteredBatchCounter] = batch; entityIndices[filteredBatchCounter] = entityIndexAggregate; ++filteredBatchCounter; entityIndexAggregate += chunkEntityCountArray[chunkIndex]; } } } } else { if (Filter.RequiresMatchesFilter) { // 2+ batches per chunk, filtering enabled. // This is the most general case; if only one code path survives, it should be this one. for (var m = 0; m < MatchingArchetypes.Length; ++m) { var match = MatchingArchetypes.Ptr[m]; if (match->Archetype->EntityCount <= 0) { continue; } var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); for (int chunkIndex = 0; chunkIndex < chunkCount; ++chunkIndex) { if (match->ChunkMatchesFilter(chunkIndex, ref Filter)) { var chunk = archetype->Chunks[chunkIndex]; var chunkEntityCount = chunkEntityCountArray[chunkIndex]; for (int batchIndex = 0; batchIndex < BatchesPerChunk; ++batchIndex) { if (ArchetypeChunk.EntityBatchFromChunk(chunk, chunkEntityCount, BatchesPerChunk, batchIndex, EntityComponentStore, out var batch)) { batches[filteredBatchCounter] = batch; entityIndices[filteredBatchCounter] = entityIndexAggregate; ++filteredBatchCounter; entityIndexAggregate += batch.Count; } } } } } } else { // 2+ batches per chunk, filtering disabled for (var m = 0; m < MatchingArchetypes.Length; ++m) { var match = MatchingArchetypes.Ptr[m]; if (match->Archetype->EntityCount <= 0) { continue; } var archetype = match->Archetype; int chunkCount = archetype->Chunks.Count; var chunkEntityCountArray = archetype->Chunks.GetChunkEntityCountArray(); for (int chunkIndex = 0; chunkIndex < chunkCount; ++chunkIndex) { var chunk = archetype->Chunks[chunkIndex]; var chunkEntityCount = chunkEntityCountArray[chunkIndex]; for (int batchIndex = 0; batchIndex < BatchesPerChunk; ++batchIndex) { if (ArchetypeChunk.EntityBatchFromChunk(chunk, chunkEntityCount, BatchesPerChunk, batchIndex, EntityComponentStore, out var batch)) { batches[filteredBatchCounter] = batch; entityIndices[filteredBatchCounter] = entityIndexAggregate; ++filteredBatchCounter; entityIndexAggregate += batch.Count; } } } } } } var chunkCounter = entityIndices + FilteredChunkCount * BatchesPerChunk; *chunkCounter = filteredBatchCounter; }
public void SwapComponents(ArchetypeChunk leftChunk, int leftIndex, ArchetypeChunk rightChunk, int rightIndex) { CheckAccess(); ChunkDataUtility.SwapComponents(leftChunk.m_Chunk, leftIndex, rightChunk.m_Chunk, rightIndex, 1); }