public void IJobEntityBatch_WithoutFiltering_GeneratesExpectedBatches([Values(1, 4, 17, 100)] int batchesPerChunk) { var archetype = m_Manager.CreateArchetype(typeof(EcsTestData)); var query = m_Manager.CreateEntityQuery(typeof(EcsTestData)); var entityCount = 10000; using (var entities = m_Manager.CreateEntity(archetype, entityCount, Allocator.TempJob)) using (var batches = new NativeArray <ArchetypeChunk>(archetype.ChunkCount * batchesPerChunk, Allocator.TempJob)) { for (var i = 0; i < entityCount; ++i) { m_Manager.SetComponentData(entities[i], new EcsTestData { value = -1 }); } var job = new WriteBatchInfoToArray { BatchInfos = batches, }; job.ScheduleParallel(query, batchesPerChunk).Complete(); var entityTypeHandle = m_Manager.GetEntityTypeHandle(); int markedEntityCount = 0; for (int batchIndex = 0; batchIndex < batches.Length; ++batchIndex) { var batch = batches[batchIndex]; if (!IsBatchInitialized(batch)) { continue; // this is fine; empty/filtered batches will be skipped and left uninitialized. } Assert.Greater(batch.Count, 0); // empty batches should not have been Execute()ed Assert.LessOrEqual(batch.Count, (batch.ChunkEntityCount / batchesPerChunk) + 1); var batchEntities = batch.GetNativeArray(entityTypeHandle); for (int i = 0; i < batchEntities.Length; ++i) { Assert.AreEqual(-1, m_Manager.GetComponentData <EcsTestData>(batchEntities[i]).value); m_Manager.SetComponentData(batchEntities[i], new EcsTestData { value = 1 }); markedEntityCount++; } } Assert.AreEqual(entities.Length, markedEntityCount); for (int i = 0; i < entities.Length; ++i) { Assert.AreEqual(1, m_Manager.GetComponentData <EcsTestData>(entities[i]).value); } } query.Dispose(); }
public void IJobEntityBatch_GeneratesExpectedBatches_WithEntityList_WithFiltering() { var archetypeA = m_Manager.CreateArchetype(typeof(EcsTestData), typeof(EcsTestSharedComp)); var archetypeB = m_Manager.CreateArchetype(typeof(EcsTestData2), typeof(EcsTestSharedComp)); var archetypeC = m_Manager.CreateArchetype(typeof(EcsTestData3), typeof(EcsTestSharedComp)); using (var query = m_Manager.CreateEntityQuery(typeof(EcsTestData), typeof(EcsTestSharedComp))) using (var entitiesA = m_Manager.CreateEntity(archetypeA, 100, Allocator.TempJob)) using (var entitiesB = m_Manager.CreateEntity(archetypeB, 100, Allocator.TempJob)) using (var entitiesC = m_Manager.CreateEntity(archetypeC, 100, Allocator.TempJob)) { for (int i = 0; i < entitiesA.Length; ++i) { m_Manager.SetComponentData(entitiesA[i], new EcsTestData(i)); var mod = i % 5; var val = mod < 3 ? 17 : 7; m_Manager.SetSharedComponentData(entitiesA[i], new EcsTestSharedComp(val)); } query.SetSharedComponentFilter(new EcsTestSharedComp(17)); // AAAAABBBBCAAAAABBBBC... // With filtering its A1A1A1A2A2BBBBCA1A1A1A2A2BBBBC... var limitEntities = new NativeArray <Entity>(100, Allocator.TempJob); for (int i = 0; i < 100; ++i) { var mod = i % 10; if (mod < 5) { limitEntities[i] = entitiesA[i]; } else if (mod < 9) { limitEntities[i] = entitiesB[i]; } else { limitEntities[i] = entitiesC[i]; } } var batches = new NativeArray <ArchetypeChunk>(20, Allocator.TempJob); var job = new WriteBatchInfoToArray { BatchInfos = batches, }; job.ScheduleParallel(query, limitEntities).Complete(); using (var matchingEntities = query.ToEntityArray(limitEntities, Allocator.TempJob)) { var entityTypeHandle = m_Manager.GetEntityTypeHandle(); int markedEntityCount = 0; int validBatchCount = 0; for (int batchIndex = 0; batchIndex < batches.Length; ++batchIndex) { var batch = batches[batchIndex]; if (!IsBatchInitialized(batch)) { continue; // this is fine; empty/filtered batches will be skipped and left uninitialized. } Assert.AreEqual(3, batch.Count); var batchEntities = batch.GetNativeArray(entityTypeHandle); for (int i = 0; i < batchEntities.Length; ++i) { Assert.AreEqual(validBatchCount * 10 + i, m_Manager.GetComponentData <EcsTestData>(batchEntities[i]).value); Assert.AreEqual(matchingEntities[markedEntityCount], batchEntities[i]); markedEntityCount++; } validBatchCount++; } Assert.AreEqual(query.CalculateEntityCount(limitEntities), markedEntityCount); } limitEntities.Dispose(); batches.Dispose(); } }
public void IJobEntityBatch_GeneratesExpectedBatches_WithEntityList() { var archetypeA = m_Manager.CreateArchetype(typeof(EcsTestData)); var archetypeB = m_Manager.CreateArchetype(typeof(EcsTestData2)); var archetypeC = m_Manager.CreateArchetype(typeof(EcsTestData3)); using (var query = m_Manager.CreateEntityQuery(typeof(EcsTestData))) using (var entitiesA = m_Manager.CreateEntity(archetypeA, 100, Allocator.TempJob)) using (var entitiesB = m_Manager.CreateEntity(archetypeB, 100, Allocator.TempJob)) using (var entitiesC = m_Manager.CreateEntity(archetypeC, 100, Allocator.TempJob)) { for (int i = 0; i < entitiesA.Length; ++i) { m_Manager.SetComponentData(entitiesA[i], new EcsTestData(i)); } // AAAAABBBBCAAAAABBBBC... var limitEntities = new NativeArray <Entity>(100, Allocator.TempJob); for (int i = 0; i < 100; ++i) { var mod = i % 10; if (mod < 5) { limitEntities[i] = entitiesA[i]; } else if (mod < 9) { limitEntities[i] = entitiesB[i]; } else { limitEntities[i] = entitiesC[i]; } } var batches = new NativeArray <ArchetypeChunk>(10, Allocator.TempJob); var job = new WriteBatchInfoToArray { BatchInfos = batches, }; job.ScheduleParallel(query, limitEntities).Complete(); using (var matchingEntities = query.ToEntityArray(limitEntities, Allocator.TempJob)) { var entityTypeHandle = m_Manager.GetEntityTypeHandle(); int markedEntityCount = 0; for (int batchIndex = 0; batchIndex < batches.Length; ++batchIndex) { var batch = batches[batchIndex]; Assert.IsTrue(IsBatchInitialized(batch)); Assert.AreEqual(5, batch.Count); var batchEntities = batch.GetNativeArray(entityTypeHandle); for (int i = 0; i < batchEntities.Length; ++i) { Assert.AreEqual(batchIndex * 10 + i, m_Manager.GetComponentData <EcsTestData>(batchEntities[i]).value); Assert.AreEqual(matchingEntities[markedEntityCount], batchEntities[i]); markedEntityCount++; } } Assert.AreEqual(query.CalculateEntityCount(limitEntities), markedEntityCount); } limitEntities.Dispose(); batches.Dispose(); } }