protected override void OnUpdate() { Dependency = JobHandle.CombineDependencies( Dependency, World.GetExistingSystem <HashCollidablesSystem>().m_StaticCollidableHashMapJobHandle, World.GetExistingSystem <HashCollidablesSystem>().m_DynamicCollidableHashMapJobHandle ); var staticCollidableHashMap = World.GetExistingSystem <HashCollidablesSystem>().m_StaticCollidableHashMap; var dynamicCollidableHashMap = World.GetExistingSystem <HashCollidablesSystem>().m_DynamicCollidableHashMap; var viewDistance = GameController.instance.zombieVisionDistance; var followTargetCount = m_FollowTargetQuery.CalculateEntityCount(); var followTargetHashMap = new NativeHashMap <int, int>(followTargetCount, Allocator.TempJob); var followTargetParallelWriter = followTargetHashMap.AsParallelWriter(); // We need either "(X * Y) / visionDistance" or "numUnitsToFollow" hash buckets, whichever is smaller var zombieVisionHashMapCellSize = viewDistance * 2 + 1; var zombieVisionHashMap = new NativeHashMap <int, int>(followTargetCount, Allocator.TempJob); var zombieVisionParallelWriter = zombieVisionHashMap.AsParallelWriter(); var hashFollowTargetGridPositionsJobHandle = Entities .WithName("HashFollowTargetGridPositions") .WithAll <FollowTarget>() .WithStoreEntityQueryInField(ref m_FollowTargetQuery) .WithBurst() .ForEach((int entityInQueryIndex, in GridPosition gridPosition) => { var hash = (int)math.hash(gridPosition.Value); followTargetParallelWriter.TryAdd(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var hashFollowTargetVisionJobHandle = Entities .WithName("HashFollowTargetVision") .WithAll <FollowTarget>() .WithStoreEntityQueryInField(ref m_FollowTargetQuery) .WithBurst() .ForEach((int entityInQueryIndex, in GridPosition gridPosition) => { var hash = (int)math.hash(gridPosition.Value / zombieVisionHashMapCellSize); zombieVisionParallelWriter.TryAdd(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var hearingDistance = GameController.instance.zombieHearingDistance; var audibleCount = m_AudibleQuery.CalculateEntityCount(); var audibleHashMap = new NativeMultiHashMap <int, int3>(audibleCount, Allocator.TempJob); var audibleParallelWriter = audibleHashMap.AsParallelWriter(); // We need either "(X * Y) / visionDistance" or "numAudiblesToFollow" hash buckets, whichever is smaller var zombieHearingHashMapCellSize = viewDistance * 2 + 1; var zombieHearingHashMap = new NativeHashMap <int, int>(audibleCount, Allocator.TempJob); var zombieHearingParallelWriter = zombieHearingHashMap.AsParallelWriter(); var hashAudiblesJobHandle = Entities .WithName("HashAudibles") .WithStoreEntityQueryInField(ref m_AudibleQuery) .WithBurst() .ForEach((int entityInQueryIndex, in Audible audible) => { var hash = (int)math.hash(audible.GridPositionValue); audibleParallelWriter.Add(hash, audible.Target); }) .ScheduleParallel(Dependency); var hashHearingJobHandle = Entities .WithName("HashHearing") .WithStoreEntityQueryInField(ref m_AudibleQuery) .WithBurst() .ForEach((int entityInQueryIndex, in Audible audible) => { var hash = (int)math.hash(audible.GridPositionValue / zombieHearingHashMapCellSize); zombieHearingParallelWriter.TryAdd(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var movementBarrierHandle = JobHandle.CombineDependencies( Dependency, hashFollowTargetGridPositionsJobHandle, hashFollowTargetVisionJobHandle ); movementBarrierHandle = JobHandle.CombineDependencies( movementBarrierHandle, hashAudiblesJobHandle, hashHearingJobHandle ); var Commands = m_EntityCommandBufferSystem.CreateCommandBuffer().AsParallelWriter(); var audibleArchetype = Archetypes.AudibleArchetype; var tick = UnityEngine.Time.frameCount; var moveTowardsTargetJobHandle = Entities .WithName("MoveTowardsTargets") .WithAll <MoveTowardsTarget>() .WithChangeFilter <TurnsUntilActive>() .WithReadOnly(staticCollidableHashMap) .WithReadOnly(dynamicCollidableHashMap) .WithReadOnly(followTargetHashMap) .WithReadOnly(audibleHashMap) .WithReadOnly(zombieVisionHashMap) .WithReadOnly(zombieHearingHashMap) .WithDisposeOnCompletion(followTargetHashMap) .WithDisposeOnCompletion(audibleHashMap) .WithDisposeOnCompletion(zombieVisionHashMap) .WithDisposeOnCompletion(zombieHearingHashMap) .WithBurst() .ForEach((Entity entity, int entityInQueryIndex, ref NextGridPosition nextGridPosition, ref RandomGenerator random, in TurnsUntilActive turnsUntilActive, in GridPosition gridPosition) => { if (turnsUntilActive.Value != 1) { return; } int3 myGridPositionValue = gridPosition.Value; int3 nearestTarget = myGridPositionValue; bool moved = false; bool foundByHearing = zombieHearingHashMap.TryGetValue((int)math.hash(myGridPositionValue / zombieHearingHashMapCellSize), out _) || zombieHearingHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - hearingDistance, myGridPositionValue.y, myGridPositionValue.z - hearingDistance) / zombieHearingHashMapCellSize), out _) || zombieHearingHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + hearingDistance, myGridPositionValue.y, myGridPositionValue.z - hearingDistance) / zombieHearingHashMapCellSize), out _) || zombieHearingHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - hearingDistance, myGridPositionValue.y, myGridPositionValue.z + hearingDistance) / zombieHearingHashMapCellSize), out _) || zombieHearingHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + hearingDistance, myGridPositionValue.y, myGridPositionValue.z + hearingDistance) / zombieHearingHashMapCellSize), out _); bool foundBySight = zombieVisionHashMap.TryGetValue((int)math.hash(myGridPositionValue / zombieVisionHashMapCellSize), out _) || zombieVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - viewDistance, myGridPositionValue.y, myGridPositionValue.z - viewDistance) / zombieVisionHashMapCellSize), out _) || zombieVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + viewDistance, myGridPositionValue.y, myGridPositionValue.z - viewDistance) / zombieVisionHashMapCellSize), out _) || zombieVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - viewDistance, myGridPositionValue.y, myGridPositionValue.z + viewDistance) / zombieVisionHashMapCellSize), out _) || zombieVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + viewDistance, myGridPositionValue.y, myGridPositionValue.z + viewDistance) / zombieVisionHashMapCellSize), out _); bool foundTarget = foundByHearing || foundBySight; if (foundTarget) { foundByHearing = false; foundBySight = false; foundTarget = false; // Get nearest target // Check all grid positions that are checkDist away in the x or y direction for (int checkDist = 1; (checkDist <= viewDistance || checkDist <= hearingDistance) && !foundTarget; checkDist++) { float nearestDistance = (checkDist + 2) * (checkDist + 2); for (int z = -checkDist; z <= checkDist; z++) { for (int x = -checkDist; x <= checkDist; x++) { if (math.abs(x) == checkDist || math.abs(z) == checkDist) { var targetGridPosition = new int3(myGridPositionValue.x + x, myGridPositionValue.y, myGridPositionValue.z + z); int targetKey = (int)math.hash(targetGridPosition); if (checkDist <= viewDistance && followTargetHashMap.TryGetValue(targetKey, out _)) { // Check if we have line of sight to the target if (LineOfSightUtilities.InLineOfSight(myGridPositionValue, targetGridPosition, staticCollidableHashMap)) { var distance = math.lengthsq(new float3(myGridPositionValue) - new float3(targetGridPosition)); var nearest = distance < nearestDistance; nearestDistance = math.select(nearestDistance, distance, nearest); nearestTarget = math.select(nearestTarget, targetGridPosition, nearest); foundBySight = true; } } if (!foundBySight && checkDist <= hearingDistance && audibleHashMap.TryGetFirstValue(targetKey, out int3 audibleTarget, out _)) { var distance = math.lengthsq(new float3(myGridPositionValue) - new float3(targetGridPosition)); var nearest = distance < nearestDistance; nearestDistance = math.select(nearestDistance, distance, nearest); nearestTarget = math.select(nearestTarget, audibleTarget, nearest); foundByHearing = true; } } foundTarget = foundByHearing || foundBySight; } } } } var leftMoveAvail = true; var rightMoveAvail = true; var downMoveAvail = true; var upMoveAvail = true; int moveLeftKey = (int)math.hash(new int3(myGridPositionValue.x - 1, myGridPositionValue.y, myGridPositionValue.z)); int moveRightKey = (int)math.hash(new int3(myGridPositionValue.x + 1, myGridPositionValue.y, myGridPositionValue.z)); int moveDownKey = (int)math.hash(new int3(myGridPositionValue.x, myGridPositionValue.y, myGridPositionValue.z - 1)); int moveUpKey = (int)math.hash(new int3(myGridPositionValue.x, myGridPositionValue.y, myGridPositionValue.z + 1)); if (staticCollidableHashMap.TryGetValue(moveLeftKey, out _) || dynamicCollidableHashMap.TryGetValue(moveLeftKey, out _)) { leftMoveAvail = false; } if (staticCollidableHashMap.TryGetValue(moveRightKey, out _) || dynamicCollidableHashMap.TryGetValue(moveRightKey, out _)) { rightMoveAvail = false; } if (staticCollidableHashMap.TryGetValue(moveDownKey, out _) || dynamicCollidableHashMap.TryGetValue(moveDownKey, out _)) { downMoveAvail = false; } if (staticCollidableHashMap.TryGetValue(moveUpKey, out _) || dynamicCollidableHashMap.TryGetValue(moveUpKey, out _)) { upMoveAvail = false; } if (foundTarget) { int3 direction = nearestTarget - myGridPositionValue; if (math.abs(direction.x) >= math.abs(direction.z)) { // Move horizontally if (direction.x < 0) { if (leftMoveAvail) { myGridPositionValue.x--; moved = true; } } else if (direction.x > 0) { if (rightMoveAvail) { myGridPositionValue.x++; moved = true; } } } // Unit maybe wanted to move horizontally but couldn't, so check if it wants to move vertically if (!moved) { // Move vertically if (direction.z < 0) { if (downMoveAvail) { myGridPositionValue.z--; moved = true; } } else if (direction.z > 0) { if (upMoveAvail) { myGridPositionValue.z++; moved = true; } } // Unit wanted to move vertically but couldn't, so check if it wants to move horizontally if (!moved) { // Move horizontally if (direction.x < 0) { if (leftMoveAvail) { myGridPositionValue.x--; moved = true; } } else if (direction.x > 0) { if (rightMoveAvail) { myGridPositionValue.x++; moved = true; } } } } // If a unit is close, set 'moved = true' so we don't move randomly if ((math.abs(direction.x) == 1 && math.abs(direction.z) == 0) || math.abs(direction.x) == 0 && math.abs(direction.z) == 1) { moved = true; } } if (!moved) { int randomDirIndex = random.Value.NextInt(0, 4); for (int i = 0; i < 4 && !moved; i++) { int direction = (randomDirIndex + i) % 4; switch (direction) { case 0: if (upMoveAvail) { myGridPositionValue.z += 1; moved = true; } break; case 1: if (rightMoveAvail) { myGridPositionValue.x += 1; moved = true; } break; case 2: if (downMoveAvail) { myGridPositionValue.z -= 1; moved = true; } break; case 3: if (leftMoveAvail) { myGridPositionValue.x -= 1; moved = true; } break; } } } if (foundBySight) { Entity audibleEntity = Commands.CreateEntity(entityInQueryIndex, audibleArchetype); Commands.SetComponent(entityInQueryIndex, audibleEntity, new Audible { GridPositionValue = myGridPositionValue, Target = nearestTarget, Age = 0 }); } nextGridPosition = new NextGridPosition { Value = myGridPositionValue }; })
public NativeULongSet(int capacity, Allocator allocator) { hashMap = new NativeHashMap <ulong, int>(capacity, allocator); values = new NativeList <ulong>(capacity, allocator); }
public unsafe static NativeMeshSOA Parse(string path, VertexAttribs vertexAttribs = VertexAttribs.Position, VertexOrder vertexOrder = VertexOrder.ByDefinition) { #if VERBOSE Debug.LogFormat("trying {0}", path); #endif var text = File.ReadAllText(path); //TODO replace with native variant var textSize = text.Length; // measure the data int numPositions = 0; int numTexCoords = 0; int numNormals = 0; int numFaces = 0; for (int i = 0; i < text.Length; i++) { if (ReadChar(text, ref i, 'v')) { if (ReadBlank(text, ref i)) { numPositions++; } else if (ReadChar(text, ref i, 't') && ReadBlank(text, ref i)) { numTexCoords++; } else if (ReadChar(text, ref i, 'n') && ReadBlank(text, ref i)) { numNormals++; } } else if (ReadChar(text, ref i, 'f') && ReadBlankGreedy(text, ref i)) { int readVerts = 0; while (ReadDigit(text, ref i)) { ReadUntilNewlineOrBlank(text, ref i); ReadBlankGreedy(text, ref i); readVerts++; } if (readVerts > 2) { numFaces += readVerts - 2; } } ReadUntilNewline(text, ref i); } #if VERBOSE Debug.LogFormat("-- numPositions = {0}", numPositions); Debug.LogFormat("-- numTexCoords = {0}", numTexCoords); Debug.LogFormat("-- numNormals = {0}", numNormals); Debug.LogFormat("-- numFaces = {0}", numFaces); #endif // allocate buffers var inputPositions = new NativeArray <Vector3>(numPositions, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var inputTexCoords = new NativeArray <Vector2>(numTexCoords, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var inputNormals = new NativeArray <Vector3>(numNormals, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var inputFaces = new NativeArray <InputFace>(numFaces, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var outputIndicesMax = numFaces * 3; var outputIndicesLUT = new NativeHashMap <Hash128, int>(outputIndicesMax, Allocator.Temp); var outputPositions = new NativeArray <Vector3>(outputIndicesMax, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var outputTexCoords = new NativeArray <Vector2>(outputIndicesMax, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var outputNormals = new NativeArray <Vector3>(outputIndicesMax, Allocator.Temp, NativeArrayOptions.UninitializedMemory); var outputIndices = new NativeArray <int>(outputIndicesMax, Allocator.Temp, NativeArrayOptions.UninitializedMemory); // read the data numPositions = 0; numTexCoords = 0; numNormals = 0; numFaces = 0; for (int i = 0; i < text.Length; i++) { if (ReadChar(text, ref i, 'v')) { if (ReadBlank(text, ref i)) { Vector3 position; ReadFloat(text, ref i, out position.x); position.x *= -1.0f; //TODO remove this hack ReadBlankGreedy(text, ref i); ReadFloat(text, ref i, out position.y); ReadBlankGreedy(text, ref i); ReadFloat(text, ref i, out position.z); inputPositions[numPositions++] = position; } else if (ReadChar(text, ref i, 't') && ReadBlank(text, ref i)) { Vector2 texCoord; ReadFloat(text, ref i, out texCoord.x); ReadBlankGreedy(text, ref i); ReadFloat(text, ref i, out texCoord.y); inputTexCoords[numTexCoords++] = texCoord; } else if (ReadChar(text, ref i, 'n') && ReadBlank(text, ref i)) { Vector3 normal; ReadFloat(text, ref i, out normal.x); normal.x *= -1.0f; //TODO remove this hack ReadBlankGreedy(text, ref i); ReadFloat(text, ref i, out normal.y); ReadBlankGreedy(text, ref i); ReadFloat(text, ref i, out normal.z); inputNormals[numNormals++] = normal; } } else if (ReadChar(text, ref i, 'f') && ReadBlankGreedy(text, ref i)) { InputFace face = new InputFace(); if (ReadUInt(text, ref i, out face.v0.idxPosition)) { ReadChar(text, ref i, '/'); ReadUInt(text, ref i, out face.v0.idxTexCoord); ReadChar(text, ref i, '/'); ReadUInt(text, ref i, out face.v0.idxNormal); int readVerts = 1; while (ReadBlankGreedy(text, ref i)) { face.v1 = face.v2; if (ReadUInt(text, ref i, out face.v2.idxPosition)) { ReadChar(text, ref i, '/'); ReadUInt(text, ref i, out face.v2.idxTexCoord); ReadChar(text, ref i, '/'); ReadUInt(text, ref i, out face.v2.idxNormal); if (++readVerts > 2) { inputFaces[numFaces++] = face; } } } } } ReadUntilNewline(text, ref i); } // process the data int numOutputVertices = 0; int numOutputIndices = 0; if (vertexOrder == VertexOrder.ByReference) { for (int i = 0; i != numFaces; i++) { InputFace face = inputFaces[i]; var key0 = Hash(in face.v0); var key1 = Hash(in face.v1); var key2 = Hash(in face.v2); int idx0, idx1, idx2; if (outputIndicesLUT.TryGetValue(key0, out idx0) == false) { outputIndicesLUT[key0] = idx0 = numOutputVertices++; } if (outputIndicesLUT.TryGetValue(key1, out idx1) == false) { outputIndicesLUT[key1] = idx1 = numOutputVertices++; } if (outputIndicesLUT.TryGetValue(key2, out idx2) == false) { outputIndicesLUT[key2] = idx2 = numOutputVertices++; } outputPositions[idx0] = inputPositions[(int)face.v0.idxPosition - 1]; outputPositions[idx1] = inputPositions[(int)face.v1.idxPosition - 1]; outputPositions[idx2] = inputPositions[(int)face.v2.idxPosition - 1]; outputTexCoords[idx0] = inputTexCoords[(int)face.v0.idxTexCoord - 1]; outputTexCoords[idx1] = inputTexCoords[(int)face.v1.idxTexCoord - 1]; outputTexCoords[idx2] = inputTexCoords[(int)face.v2.idxTexCoord - 1]; outputNormals[idx0] = inputNormals[(int)face.v0.idxNormal - 1]; outputNormals[idx1] = inputNormals[(int)face.v1.idxNormal - 1]; outputNormals[idx2] = inputNormals[(int)face.v2.idxNormal - 1]; outputIndices[numOutputIndices++] = idx0; outputIndices[numOutputIndices++] = idx1; outputIndices[numOutputIndices++] = idx2; } } else if (vertexOrder == VertexOrder.ByDefinition) { numOutputVertices = numPositions; var indexVisited = new NativeArray <bool>(numPositions, Allocator.Temp, NativeArrayOptions.ClearMemory); for (int i = 0; i != numFaces; i++) { InputFace face = inputFaces[i]; var key0 = Hash(in face.v0); var key1 = Hash(in face.v1); var key2 = Hash(in face.v2); int idx0, idx1, idx2; if (outputIndicesLUT.TryGetValue(key0, out idx0) == false) { if (indexVisited[idx0 = (int)face.v0.idxPosition - 1]) { outputIndicesLUT[key0] = idx0 = numOutputVertices++; } else { outputIndicesLUT[key0] = idx0; } } if (outputIndicesLUT.TryGetValue(key1, out idx1) == false) { if (indexVisited[idx1 = (int)face.v1.idxPosition - 1]) { outputIndicesLUT[key1] = idx1 = numOutputVertices++; } else { outputIndicesLUT[key1] = idx1; } } if (outputIndicesLUT.TryGetValue(key2, out idx2) == false) { if (indexVisited[idx2 = (int)face.v2.idxPosition - 1]) { outputIndicesLUT[key2] = idx2 = numOutputVertices++; } else { outputIndicesLUT[key2] = idx2; } } indexVisited[(int)face.v0.idxPosition - 1] = true; indexVisited[(int)face.v1.idxPosition - 1] = true; indexVisited[(int)face.v2.idxPosition - 1] = true; outputPositions[idx0] = inputPositions[(int)face.v0.idxPosition - 1]; outputPositions[idx1] = inputPositions[(int)face.v1.idxPosition - 1]; outputPositions[idx2] = inputPositions[(int)face.v2.idxPosition - 1]; outputTexCoords[idx0] = inputTexCoords[(int)face.v0.idxTexCoord - 1]; outputTexCoords[idx1] = inputTexCoords[(int)face.v1.idxTexCoord - 1]; outputTexCoords[idx2] = inputTexCoords[(int)face.v2.idxTexCoord - 1]; outputNormals[idx0] = inputNormals[(int)face.v0.idxNormal - 1]; outputNormals[idx1] = inputNormals[(int)face.v1.idxNormal - 1]; outputNormals[idx2] = inputNormals[(int)face.v2.idxNormal - 1]; outputIndices[numOutputIndices++] = idx0; outputIndices[numOutputIndices++] = idx1; outputIndices[numOutputIndices++] = idx2; } indexVisited.Dispose(); } #if VERBOSE Debug.LogFormat("output vertex count = {0}", numOutputVertices); Debug.LogFormat("output index count = {0}", numOutputIndices); #endif // copy to container NativeMeshSOA mesh = new NativeMeshSOA() { vertexPositions = new NativeArray <Vector3>(numOutputVertices, Allocator.Persistent, NativeArrayOptions.UninitializedMemory), vertexTexCoords = new NativeArray <Vector2>(numOutputVertices, Allocator.Persistent, NativeArrayOptions.UninitializedMemory), vertexNormals = new NativeArray <Vector3>(numOutputVertices, Allocator.Persistent, NativeArrayOptions.UninitializedMemory), vertexCount = numOutputVertices, faceIndices = new NativeArray <int>(numOutputIndices, Allocator.Persistent, NativeArrayOptions.UninitializedMemory), faceIndicesCount = numOutputIndices, }; NativeArray <Vector3> .Copy(outputPositions, mesh.vertexPositions, numOutputVertices); NativeArray <Vector2> .Copy(outputTexCoords, mesh.vertexTexCoords, numOutputVertices); NativeArray <Vector3> .Copy(outputNormals, mesh.vertexNormals, numOutputVertices); NativeArray <int> .Copy(outputIndices, mesh.faceIndices, numOutputIndices); // free buffers inputPositions.Dispose(); inputTexCoords.Dispose(); inputNormals.Dispose(); inputFaces.Dispose(); outputIndicesLUT.Dispose(); outputPositions.Dispose(); outputTexCoords.Dispose(); outputNormals.Dispose(); outputIndices.Dispose(); // done return(mesh); }
public void Execute( ) { NativeHashMap <int, bool> nhm_checkedEliteEntities = new NativeHashMap <int, bool> (na_elities.Length, Allocator.Temp); int i_parentUniqueKeyIndex = 0; bool isNextParentUniqueKey = true; NativeMultiHashMapIterator <int> it = default; for (int i_eliteIndex = 0; i_eliteIndex < na_elities.Length; i_eliteIndex++) { EntityIndex currentEntityIndex = na_elities [i_eliteIndex]; Entity currentEliteEntity = currentEntityIndex.entity; // Check if this entity has not been tested already. if (nhm_checkedEliteEntities.TryAdd(currentEliteEntity.Index, true)) { int i_currentEntityIndex = currentEntityIndex.i_index; int i_currentPopulationBrainScore = a_brainScore [currentEliteEntity].i; // Debug.Log ( "* Inject Elite: " + i_eliteIndex + " / " + na_currentEliteIndexProbability.Length + "; " + currentEliteEntity + "; with current score: " + i_currentPopulationBrainScore ) ; // Debug.Log ( "* Inject Elite: " + i_probablity + " / " + i_perentageOfElites + "; " + eliteEntity + "; with current score: " + i_currentPopulationBrainScore ) ; EntityIndex parentIndex = default; // Look up through parents' scores, starting from lowest score ascending. if (!isNextParentUniqueKey && nmhm_parentEntitiesScore.TryGetNextValue(out parentIndex, ref it)) { // Debug.Log ( "try get next value: " + parentIndex.entity + "; " + parentIndex.i_index ) ; } else { // Debug.LogWarning ( "try get but NO MORE." ) ; isNextParentUniqueKey = true; } if (isNextParentUniqueKey && nmhm_parentEntitiesScore.TryGetFirstValue(na_parentKeysWithDuplicates [i_parentUniqueKeyIndex], out parentIndex, out it)) { // Debug.LogWarning ( "try get first value: " + parentIndex.entity + "; " + parentIndex.i_index ) ; i_parentUniqueKeyIndex++; isNextParentUniqueKey = false; } // Parent is valid. if (!isNextParentUniqueKey && parentIndex.entity.Version > 0) { int i_parentPopulationBrainScore = a_brainScore [parentIndex.entity].i; // Debug.Log ( "score: " + i_currentPopulationBrainScore + " >> " + i_parentPopulationBrainScore ) ; if (i_currentPopulationBrainScore > i_parentPopulationBrainScore) { // isBetterScore = true ; na_parentPopulationEntities [parentIndex.i_index] = currentEliteEntity; // Debug.LogWarning ( "Parent index: " + parentIndex.i_index + " / " + na_parentPopulationEntities.Length + "; inject : " + currentEliteEntity + "; for: " + parentIndex.entity + "; score: " + i_currentPopulationBrainScore + " >> " + i_parentPopulationBrainScore ) ; // Debug.LogWarning ( "Parent index: " + parent.i_index + " / " + na_parentPopulationEntities.Length + "; inject : " + eliteEntity + "; for: " + parent.entity + "; score: " + i_currentPopulationBrainScore + " >> " + i_parentPopulationBrainScore + "; index prob: " + i_indexProbability ) ; // Swap entities. na_currentPopulationEntities [i_currentEntityIndex] = parentIndex.entity; // na_elities [i_eliteIndex] = parent.entity ; // na_currentPopulationEntities [i_indexProbability] = parent.entity ; } } if (isNextParentUniqueKey || i_parentUniqueKeyIndex >= na_parentKeysWithDuplicates.Length) { // Debug.LogError ( "No more parent entities." ) ; break; // No more parent entities. } } } // for nhm_checkedEliteEntities.Dispose(); // na_currentPopulationEntitiesCopy.Dispose () ; }
private void Awake() { // Disabling physics. Not used in project. Physics.autoSimulation = false; // Parsing Countries. var(_, tagLookup, _) = CountriesLoad.Names(); // Creating StateCountryProcessing system var stateCountryProcessing = World.DefaultGameObjectInjectionWorld .GetOrCreateSystem(typeof(StateCountryProcessing)); // Parsing goods /* * var fileTree = new List<(string, object)>(); * ParseFile(Path.Combine(Application.streamingAssetsPath, "common", "goods.txt"), fileTree); * var goodsLookup = new Dictionary<string, int>(); * // Ignoring good groups * var counter = 0; * foreach (var (_, value) in fileTree) * foreach (var (key, innerValue) in (List<(string, object)>) value) * { * var good = new Goods {Name = key}; * goodsLookup.Add(key, counter++); * foreach (var (type, data) in (List<(string, object)>) innerValue) * { * switch (type) * { * case "cost": * good.Cost = float.Parse((string) data); * continue; * case "color": * good.Color = ParseColor32((string) data); * continue; * } * } * * em.SetComponentData(em.CreateEntity(typeof(Goods)), good); * } */ // Parsing provinces var colorLookup = new NativeHashMap <Color, int>(1, Allocator.TempJob); var oceanDefault = tagLookup["OCEAN"]; var(provNames, idLookup, provEntityLookup) = DefinitionsLoad.Main(colorLookup, oceanDefault); //var map = LoadPng(Path.Combine(Application.streamingAssetsPath, "map", "provinces.png")); // DEBUG var map = new Texture2D(ProvinceMap.width, ProvinceMap.height, TextureFormat.RGBA32, false) { filterMode = FilterMode.Point }; Graphics.CopyTexture(ProvinceMap, map); // Begin CPU pixel processing jobs. var colorMap = new NativeArray <Color32>(map.GetPixels32(), Allocator.TempJob); var pixelCollector = new NativeMultiHashMap <int, int>(colorMap.Length, Allocator.TempJob); var pixelHandle = new CollectPixels { ColorMap = colorMap, ColorLookup = colorLookup, Collector = pixelCollector.AsParallelWriter() }.Schedule(colorMap.Length, 32); // Parsing states var stateLookup = new NativeHashMap <int, int>(1, Allocator.TempJob); var(stateNames, stateToProvReference, provToStateReference) = StatesLoad.Main(idLookup, stateLookup, provEntityLookup); BlobAssetReferences.Enqueue(stateToProvReference); BlobAssetReferences.Enqueue(provToStateReference); var idMap = new NativeArray <Color32>(colorMap.Length, Allocator.TempJob); pixelHandle = new ProcessPixel { ColorLookup = colorLookup, ColorMap = colorMap, IdMap = idMap, StateLookup = stateLookup }.Schedule(colorMap.Length, 32, pixelHandle); stateLookup.Dispose(pixelHandle); colorMap.Dispose(pixelHandle); colorLookup.Dispose(pixelHandle); var centroids = new NativeArray <Color32>(idLookup.Count, Allocator.TempJob); pixelHandle = new FindCentroid { Collector = pixelCollector, Width = ProvinceMap.width, Centroids = centroids }.Schedule(centroids.Length, 2, pixelHandle); pixelCollector.Dispose(pixelHandle); var(factories, maxEmploy) = AgentsLoad.Main(); foreach (var blobAssetReference in factories) { BlobAssetReferences.Enqueue(blobAssetReference); } ProvinceLoad.Main(provEntityLookup, tagLookup, factories, maxEmploy, provToStateReference); // Pops load outputs a blob asset reference. Just inlining the two calls. BlobAssetReferences.Enqueue(PopsLoad.Main(provToStateReference)); // Tag states that are not completely owned. // Also attaching owned states (plus incomplete which is duplicated) to countries. StateCountryProcessing.CallMethod = StateCountryProcessing.ManualMethodCall.TagOwnedStatesAndAttachToCountry; stateCountryProcessing.Update(); // DEBUG StateCountryProcessing.MarketIdentities = factories; StateCountryProcessing.MaxEmploy = maxEmploy; StateCountryProcessing.CallMethod = StateCountryProcessing.ManualMethodCall.SetDebugValues; stateCountryProcessing.Update(); StateCountryProcessing.CallMethod = StateCountryProcessing.ManualMethodCall.DebugSpawnFactories; stateCountryProcessing.Update(); StateCountryProcessing.CallMethod = StateCountryProcessing.ManualMethodCall.DisposeDebugFactoryTemplates; stateCountryProcessing.Update(); // Deleting initialization system. World.DefaultGameObjectInjectionWorld.DestroySystem(stateCountryProcessing); pixelHandle.Complete(); map.SetPixels32(idMap.ToArray()); map.Apply(); var centroidTex = new Texture2D(centroids.Length, 1, TextureFormat.RGBA32, false) { filterMode = FilterMode.Point }; centroidTex.SetPixels32(centroids.ToArray()); centroidTex.Apply(); LoadMap.MapTexture = map; ScalarSystem.IdMapTex = map; ScalarSystem.CentroidTex = centroidTex; //File.WriteAllBytes(Path.Combine(Application.streamingAssetsPath, "test.png"), centroidTex.EncodeToPNG()); idMap.Dispose(); centroids.Dispose(); /* * Texture2D LoadPng(string filePath) * { * if (!File.Exists(filePath)) * throw new Exception("Texture: " + filePath + " does not exist."); * * var fileData = File.ReadAllBytes(filePath); * var tex = new Texture2D(2, 2, TextureFormat.RGBA32, false); * tex.LoadImage(fileData); //..this will auto-resize the texture dimensions. * return tex; * } */ }
public BlobAssetCache(Allocator allocator) { BlobAssetBatch = DynamicBlobAssetBatch.Allocate(allocator); BlobAssetRemap = new NativeHashMap <BlobAssetPtr, BlobAssetPtr>(1, allocator); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { ComponentDataFromEntity <Translation> translations = GetComponentDataFromEntity <Translation>(); var shades = GetEntityQuery(ComponentType.ReadOnly <Shade>()) .ToEntityArray(Allocator.TempJob); var shadesCount = shades.Length; shades.Dispose(); var unfertilizeds = GetEntityQuery(ComponentType.ReadOnly <TxAutotrophGamete>(), ComponentType.ReadOnly <TxAutotroph>() ) .ToEntityArray(Allocator.TempJob); var unfertilizedCount = unfertilizeds.Length; unfertilizeds.Dispose(); var shadeDict = new NativeHashMap <Entity, float>(shadesCount, Allocator.TempJob); var fertilizeDict = new NativeHashMap <Entity, Entity>(unfertilizedCount, Allocator.TempJob); JobHandle makeShadeDictJobHandle = new MakeShadeDict { translations = GetComponentDataFromEntity <Translation>(), txAutotrophPhenotype = GetComponentDataFromEntity <TxAutotrophPhenotype>(), environmentSettings = Environment.environmentSettings, shadeDict = shadeDict, }.Schedule(m_StepPhysicsWorldSystem.Simulation, ref m_BuildPhysicsWorldSystem.PhysicsWorld, inputDeps); JobHandle makeFertilizeDictJobHandle = new MakeTFertilizerDict() { txAutotrophChrome2AB = GetComponentDataFromEntity <TxAutotrophChrome2AB>(), TxAutotrophPollen = GetComponentDataFromEntity <TxAutotrophPollen>(), environmentSettings = Environment.environmentSettings, fertilizeDict = fertilizeDict, random = new Unity.Mathematics.Random(Environment.environmentSettings[0].random.NextUInt()) }.Schedule(m_StepPhysicsWorldSystem.Simulation, ref m_BuildPhysicsWorldSystem.PhysicsWorld, inputDeps); makeFertilizeDictJobHandle.Complete(); makeShadeDictJobHandle.Complete(); /* * //Debug.Log("ShadeDict: " + shadeDict.Length); * Debug.Log("FertilizeDict: " + fertilizeDict.Length); * var keys = fertilizeDict.GetKeyArray(Allocator.Persistent); * //if (keys.Length > 0) { * // Debug.Log("fertilizeDict gamete: " + keys[0] + " pollen: " + fertilizeDict[keys[0]]); * //} * foreach (var c in keys ) { * Debug.Log("fertilizeDict: " + c + " : " + fertilizeDict[c] ); * } * * keys.Dispose(); */ JobHandle autotrophFertilize = new AutotrophFertilize() { fertilizeDict = fertilizeDict, txAutotrophChrome1AB = GetComponentDataFromEntity <TxAutotrophChrome1AB>(), txAutotrophChrome2AB = GetComponentDataFromEntity <TxAutotrophChrome2AB>(), txAutotrophPollen = GetComponentDataFromEntity <TxAutotrophPollen>() }.Schedule(m_GroupGamete, makeFertilizeDictJobHandle); JobHandle addShadeJobHandle = new AddShade { shadeDict = shadeDict, }.Schedule(m_GroupShade, makeShadeDictJobHandle); addShadeJobHandle.Complete(); autotrophFertilize.Complete(); shadeDict.Dispose(); fertilizeDict.Dispose(); return(addShadeJobHandle); }
internal CollisionWorld(NativeArray <RigidBody> bodies, Broadphase broadphase) { m_Bodies = bodies; Broadphase = broadphase; EntityBodyIndexMap = new NativeHashMap <Entity, int>(m_Bodies.Length, Allocator.Persistent); }
public T GetDstWorldSharedData <T>(EntityGuid guid, NativeHashMap <EntityGuid, Entity> lookup) where T : struct, ISharedComponentData { var e = lookup[guid]; return(m_DstManager.GetSharedComponentData <T>(e)); }
public BlobAssetStore() { m_BlobAssets = new NativeHashMap <Hash128, BlobAssetReferenceData>(128, Allocator.Persistent); m_HashByOwner = new NativeMultiHashMap <int, Hash128>(128, Allocator.Persistent); m_RefCounterPerBlobHash = new NativeHashMap <Hash128, int>(128, Allocator.Persistent); }
public sfloat CollisionTolerance => sfloat.FromRaw(0x3a83126f); // todo - make this configurable? // Construct a collision world with the given number of uninitialized rigid bodies public CollisionWorld(int numStaticBodies, int numDynamicBodies) { m_Bodies = new NativeArray <RigidBody>(numStaticBodies + numDynamicBodies, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); Broadphase = new Broadphase(numStaticBodies, numDynamicBodies); EntityBodyIndexMap = new NativeHashMap <Entity, int>(m_Bodies.Length, Allocator.Persistent); }
public void Execute() { { var collectedFrames = new NativeList <FrameSortData>(Allocator.Temp); var oldToNewFrameIndex = new NativeHashMap <int, int>(StackFrames.Length, Allocator.Temp); var inSamples = (SampleData *)Samples.GetUnsafeReadOnlyPtr(); var inFrames = (StackFrameData *)StackFrames.GetUnsafeReadOnlyPtr(); // collect all samples in this thread and remap the stack frame indices for (int s = 0, n = Samples.Length; s < n; s++) { if (inSamples[s].ThreadIdx == Thread) { int frameIndex = inSamples[s].StackTrace; if (frameIndex != -1 && !oldToNewFrameIndex.TryGetValue(frameIndex, out int _)) { oldToNewFrameIndex.Add(frameIndex, -1); collectedFrames.Add(new FrameSortData { Depth = inFrames[frameIndex].Depth, Index = frameIndex }); } SamplesInThread.Add(inSamples[s]); } } // collect all remaining stack frames and remap the caller indices // NB this loop adds new entries to FramesInThread during the iteration for (int s = 0; s < collectedFrames.Length; s++) { int caller = inFrames[collectedFrames[s].Index].CallerStackFrame; if (caller != -1 && !oldToNewFrameIndex.TryGetValue(caller, out int _)) { oldToNewFrameIndex.Add(caller, -1); collectedFrames.Add(new FrameSortData { Depth = inFrames[caller].Depth, Index = caller }); } } // sort all frames by their depth so that the lowest depth stack frames are at the beginning NativeSortExtension.Sort((FrameSortData *)collectedFrames.GetUnsafePtr(), collectedFrames.Length, new FrameComp()); // map old indices to new indices oldToNewFrameIndex.Clear(); for (int i = 0; i < collectedFrames.Length; i++) { oldToNewFrameIndex.Add(collectedFrames[i].Index, i); } { // copy stack frames to output and adjust caller indices FramesInThread.ResizeUninitialized(collectedFrames.Length); var outputPtr = (StackFrameSamples *)FramesInThread.GetUnsafePtr(); for (int i = 0, n = collectedFrames.Length; i < n; i++) { outputPtr[i] = new StackFrameSamples { FrameData = StackFrames[collectedFrames[i].Index], }; ref int caller = ref outputPtr[i].FrameData.CallerStackFrame; if (oldToNewFrameIndex.TryGetValue(caller, out var newCaller)) { caller = newCaller; } } } { // adjust stack frame references in the samples var outputPtr = (SampleData *)SamplesInThread.GetUnsafePtr(); for (int i = 0, n = SamplesInThread.Length; i < n; i++) { ref int trace = ref outputPtr[i].StackTrace; if (oldToNewFrameIndex.TryGetValue(trace, out var newTrace)) { trace = newTrace; } } }
public RemapVisitor(NativeHashMap <T, T> remap) { AddAdapter(new Adapter(remap)); }
public Adapter(NativeHashMap <T, T> remap) { m_Remap = remap; }
void UpdateChangeParents() { if (m_ExistingParentsGroup.IsEmptyIgnoreFilter) { return; } var count = m_ExistingParentsGroup.CalculateEntityCount(); if (count == 0) { return; } // 1. Get (Parent,Child) to remove // 2. Get (Parent,Child) to add // 3. Get unique Parent change list // 4. Set PreviousParent to new Parent var parentChildrenToAdd = new NativeMultiHashMap <Entity, Entity>(count, Allocator.TempJob); var parentChildrenToRemove = new NativeMultiHashMap <Entity, Entity>(count, Allocator.TempJob); var uniqueParents = new NativeHashMap <Entity, int>(count, Allocator.TempJob); var gatherChangedParentsJob = new GatherChangedParents { ParentChildrenToAdd = parentChildrenToAdd.AsParallelWriter(), ParentChildrenToRemove = parentChildrenToRemove.AsParallelWriter(), UniqueParents = uniqueParents.AsParallelWriter(), PreviousParentType = GetArchetypeChunkComponentType <PreviousParent>(false), ParentType = GetArchetypeChunkComponentType <Parent>(true), EntityType = GetArchetypeChunkEntityType(), LastSystemVersion = LastSystemVersion }; var gatherChangedParentsJobHandle = gatherChangedParentsJob.Schedule(m_ExistingParentsGroup); gatherChangedParentsJobHandle.Complete(); // 5. (Structural change) Add any missing Child to Parents var parentsMissingChild = new NativeList <Entity>(Allocator.TempJob); var findMissingChildJob = new FindMissingChild { UniqueParents = uniqueParents, ChildFromEntity = GetBufferFromEntity <Child>(), ParentsMissingChild = parentsMissingChild }; var findMissingChildJobHandle = findMissingChildJob.Schedule(); findMissingChildJobHandle.Complete(); EntityManager.AddComponent(parentsMissingChild.AsArray(), typeof(Child)); // 6. Get Child[] for each unique Parent // 7. Update Child[] for each unique Parent var fixupChangedChildrenJob = new FixupChangedChildren { ParentChildrenToAdd = parentChildrenToAdd, ParentChildrenToRemove = parentChildrenToRemove, UniqueParents = uniqueParents, ChildFromEntity = GetBufferFromEntity <Child>() }; var fixupChangedChildrenJobHandle = fixupChangedChildrenJob.Schedule(); fixupChangedChildrenJobHandle.Complete(); parentChildrenToAdd.Dispose(); parentChildrenToRemove.Dispose(); uniqueParents.Dispose(); parentsMissingChild.Dispose(); }
public bool HasDstWorldData <T>(EntityGuid guid, NativeHashMap <EntityGuid, Entity> lookup) where T : struct { var e = lookup[guid]; return(m_DstManager.HasComponent <T>(e)); }
public void DisposeJob() { var container0 = new NativeHashMap <int, int>(1, Allocator.Persistent); var container1 = new NativeMultiHashMap <int, int>(1, Allocator.Persistent); var disposeJob = container1.Dispose(container0.Dispose(default));
public void SetDstWorldData <T>(EntityGuid guid, T value, NativeHashMap <EntityGuid, Entity> lookup) where T : struct, IComponentData { var e = lookup[guid]; m_DstManager.SetComponentData(e, value); }
public void Execute(Entity entity, int jobIndex, DynamicBuffer <Node> nodes, DynamicBuffer <State> states, DynamicBuffer <ContainedItemRef> containedItems, ref Agent agent) { //执行进度要处于正确的id上 var currentNode = nodes[agent.ExecutingNodeId]; if (!currentNode.Name.Equals(new NativeString64(nameof(CookAction)))) { return; } //从precondition里找原料 var inputItemNames = new NativeHashMap <NativeString64, int>(2, Allocator.Temp); for (var i = 0; i < states.Length; i++) { if ((currentNode.PreconditionsBitmask & (ulong)1 << i) <= 0) { continue; } var precondition = states[i]; if (precondition.Trait != typeof(ItemContainerTrait)) { continue; } var itemName = precondition.ValueString; Assert.IsFalse(itemName.Equals(new NativeString64())); if (!inputItemNames.ContainsKey(itemName)) { inputItemNames.TryAdd(itemName, 1); } else { inputItemNames[precondition.ValueString]++; } } //从effect获取产物 var outputItemName = new NativeString64(); for (var i = 0; i < states.Length; i++) { if ((currentNode.EffectsBitmask & (ulong)1 << i) <= 0) { continue; } var itemName = states[i].ValueString; Assert.IsFalse(itemName.Equals(new NativeString64())); outputItemName = itemName; break; } //从自身找到原料物品引用,并移除 //简便考虑,示例项目就不真的移除物品entity了 for (var i = containedItems.Length - 1; i >= 0; i--) { var containedItemRef = containedItems[i]; if (!inputItemNames.ContainsKey(containedItemRef.ItemName)) { continue; } if (inputItemNames[containedItemRef.ItemName] == 0) { continue; } containedItems.RemoveAt(i); } inputItemNames.Dispose(); //自身获得产物 //简便考虑,示例项目就不真的创建物品entity了 containedItems.Add(new ContainedItemRef { ItemName = outputItemName }); //通知执行完毕 Utils.NextAgentState <ReadyToAct, ReadyToNavigate>( entity, jobIndex, ref ECBuffer, agent, true); }
public PackedCollection(int capacity, Allocator label) { m_List = new NativeList <T>(capacity, label); m_Lookup = new NativeHashMap <T, int>(capacity, label); }
public static SceneSectionData[] WriteEntityScene(Scene scene, GameObjectConversionSettings settings, List <ReferencedUnityObjects> sectionRefObjs = null) { int framesToRetainBlobAssets = RetainBlobAssetsSetting.GetFramesToRetainBlobAssets(settings.BuildConfiguration); var world = new World("ConversionWorld"); var entityManager = world.EntityManager; settings.DestinationWorld = world; bool disposeBlobAssetCache = false; if (settings.BlobAssetStore == null) { settings.BlobAssetStore = new BlobAssetStore(); disposeBlobAssetCache = true; } List <(int, LogEventData)> journalData = null; settings.ConversionWorldPreDispose += conversionWorld => { var mappingSystem = conversionWorld.GetExistingSystem <GameObjectConversionMappingSystem>(); journalData = mappingSystem.JournalData.SelectLogEventsOrdered().ToList(); }; ConvertScene(scene, settings); EntitySceneOptimization.Optimize(world); if (settings.AssetImportContext != null) { using (var allTypes = new NativeHashMap <ComponentType, int>(100, Allocator.Temp)) using (var archetypes = new NativeList <EntityArchetype>(Allocator.Temp)) { entityManager.GetAllArchetypes(archetypes); foreach (var archetype in archetypes) { using (var componentTypes = archetype.GetComponentTypes()) foreach (var componentType in componentTypes) { if (allTypes.TryAdd(componentType, 0)) { TypeDependencyCache.AddDependency(settings.AssetImportContext, componentType); } } } } TypeDependencyCache.AddAllSystemsDependency(settings.AssetImportContext); } var sceneSections = new List <SceneSectionData>(); var subSectionList = new List <SceneSection>(); entityManager.GetAllUniqueSharedComponentData(subSectionList); var extRefInfoEntities = new NativeArray <Entity>(subSectionList.Count, Allocator.Temp); NativeArray <Entity> entitiesInMainSection; var sectionQuery = entityManager.CreateEntityQuery( new EntityQueryDesc { All = new[] { ComponentType.ReadWrite <SceneSection>() }, Options = EntityQueryOptions.IncludePrefab | EntityQueryOptions.IncludeDisabled } ); var sectionBoundsQuery = entityManager.CreateEntityQuery( new EntityQueryDesc { All = new[] { ComponentType.ReadWrite <SceneBoundingVolume>(), ComponentType.ReadWrite <SceneSection>() }, Options = EntityQueryOptions.IncludePrefab | EntityQueryOptions.IncludeDisabled } ); var sceneGUID = settings.SceneGUID; { var section = new SceneSection { SceneGUID = sceneGUID, Section = 0 }; sectionQuery.SetSharedComponentFilter(new SceneSection { SceneGUID = sceneGUID, Section = 0 }); sectionBoundsQuery.SetSharedComponentFilter(new SceneSection { SceneGUID = sceneGUID, Section = 0 }); entitiesInMainSection = sectionQuery.ToEntityArray(Allocator.TempJob); var bounds = GetBoundsAndRemove(entityManager, sectionBoundsQuery); // Each section will be serialized in its own world, entities that don't have a section are part of the main scene. // An entity that holds the array of external references to the main scene is required for each section. // We need to create them all before we start moving entities to section scenes, // otherwise they would reuse entities that have been moved and mess up the remapping tables. for (int sectionIndex = 1; sectionIndex < subSectionList.Count; ++sectionIndex) { if (subSectionList[sectionIndex].Section == 0) { // Main section, the only one that doesn't need an external ref array continue; } var extRefInfoEntity = entityManager.CreateEntity(); entityManager.AddSharedComponentData(extRefInfoEntity, subSectionList[sectionIndex]); extRefInfoEntities[sectionIndex] = extRefInfoEntity; } // Public references array, only on the main section. var refInfoEntity = entityManager.CreateEntity(); entityManager.AddBuffer <PublicEntityRef>(refInfoEntity); entityManager.AddSharedComponentData(refInfoEntity, section); var publicRefs = entityManager.GetBuffer <PublicEntityRef>(refInfoEntity); // entityManager.Debug.CheckInternalConsistency(); //@TODO do we need to keep this index? doesn't carry any additional info for (int i = 0; i < entitiesInMainSection.Length; ++i) { PublicEntityRef.Add(ref publicRefs, new PublicEntityRef { entityIndex = i, targetEntity = entitiesInMainSection[i] }); } UnityEngine.Debug.Assert(publicRefs.Length == entitiesInMainSection.Length); // Save main section var sectionWorld = new World("SectionWorld"); var sectionManager = sectionWorld.EntityManager; var entityRemapping = entityManager.CreateEntityRemapArray(Allocator.TempJob); sectionManager.MoveEntitiesFrom(entityManager, sectionQuery, entityRemapping); AddRetainBlobAssetsEntity(sectionManager, framesToRetainBlobAssets); // The section component is only there to break the conversion world into different sections // We don't want to store that on the disk //@TODO: Component should be removed but currently leads to corrupt data file. Figure out why. //sectionManager.RemoveComponent(sectionManager.UniversalQuery, typeof(SceneSection)); var sectionFileSize = WriteEntityScene(sectionManager, sceneGUID, "0", settings, out var objectRefCount, out var objRefs); sectionRefObjs?.Add(objRefs); sceneSections.Add(new SceneSectionData { FileSize = sectionFileSize, SceneGUID = sceneGUID, ObjectReferenceCount = objectRefCount, SubSectionIndex = 0, BoundingVolume = bounds }); entityRemapping.Dispose(); sectionWorld.Dispose(); } { // Index 0 is the default value of the shared component, not an actual section for (int subSectionIndex = 0; subSectionIndex < subSectionList.Count; ++subSectionIndex) { var subSection = subSectionList[subSectionIndex]; if (subSection.Section == 0) { continue; } sectionQuery.SetSharedComponentFilter(subSection); sectionBoundsQuery.SetSharedComponentFilter(subSection); var bounds = GetBoundsAndRemove(entityManager, sectionBoundsQuery); var entitiesInSection = sectionQuery.ToEntityArray(Allocator.TempJob); if (entitiesInSection.Length > 0) { // Fetch back the external reference entity we created earlier to not disturb the mapping var refInfoEntity = extRefInfoEntities[subSectionIndex]; entityManager.AddBuffer <ExternalEntityRef>(refInfoEntity); var externRefs = entityManager.GetBuffer <ExternalEntityRef>(refInfoEntity); // Store the mapping to everything in the main section //@TODO maybe we don't need all that? is this worth worrying about? for (int i = 0; i < entitiesInMainSection.Length; ++i) { ExternalEntityRef.Add(ref externRefs, new ExternalEntityRef { entityIndex = i }); } var entityRemapping = entityManager.CreateEntityRemapArray(Allocator.TempJob); // Entities will be remapped to a contiguous range in the section world, but they will // also come with an unpredictable amount of meta entities. We have the guarantee that // the entities in the main section won't be moved over, so there's a free range of that // size at the end of the remapping table. So we use that range for external references. var externEntityIndexStart = entityRemapping.Length - entitiesInMainSection.Length; entityManager.AddComponentData(refInfoEntity, new ExternalEntityRefInfo { SceneGUID = sceneGUID, EntityIndexStart = externEntityIndexStart }); var sectionWorld = new World("SectionWorld"); var sectionManager = sectionWorld.EntityManager; // Insert mapping for external references, conversion world entity to virtual index in section for (int i = 0; i < entitiesInMainSection.Length; ++i) { EntityRemapUtility.AddEntityRemapping(ref entityRemapping, entitiesInMainSection[i], new Entity { Index = i + externEntityIndexStart, Version = 1 }); } sectionManager.MoveEntitiesFrom(entityManager, sectionQuery, entityRemapping); AddRetainBlobAssetsEntity(sectionManager, framesToRetainBlobAssets); // Now that all the required entities have been moved over, we can get rid of the gap between // real entities and external references. This allows remapping during load to deal with a // smaller remap table, containing only useful entries. int highestEntityIndexInUse = 0; for (int i = 0; i < externEntityIndexStart; ++i) { var targetIndex = entityRemapping[i].Target.Index; if (targetIndex < externEntityIndexStart && targetIndex > highestEntityIndexInUse) { highestEntityIndexInUse = targetIndex; } } var oldExternEntityIndexStart = externEntityIndexStart; externEntityIndexStart = highestEntityIndexInUse + 1; sectionManager.SetComponentData ( EntityRemapUtility.RemapEntity(ref entityRemapping, refInfoEntity), new ExternalEntityRefInfo { SceneGUID = sceneGUID, EntityIndexStart = externEntityIndexStart } ); // When writing the scene, references to missing entities are set to Entity.Null by default // (but only if they have been used, otherwise they remain untouched) // We obviously don't want that to happen to our external references, so we add explicit mapping // And at the same time, we put them back at the end of the effective range of real entities. for (int i = 0; i < entitiesInMainSection.Length; ++i) { var src = new Entity { Index = i + oldExternEntityIndexStart, Version = 1 }; var dst = new Entity { Index = i + externEntityIndexStart, Version = 1 }; EntityRemapUtility.AddEntityRemapping(ref entityRemapping, src, dst); } // The section component is only there to break the conversion world into different sections // We don't want to store that on the disk //@TODO: Component should be removed but currently leads to corrupt data file. Figure out why. //sectionManager.RemoveComponent(sectionManager.UniversalQuery, typeof(SceneSection)); var fileSize = WriteEntityScene(sectionManager, sceneGUID, subSection.Section.ToString(), settings, out var objectRefCount, out var objRefs, entityRemapping); sectionRefObjs?.Add(objRefs); sceneSections.Add(new SceneSectionData { FileSize = fileSize, SceneGUID = sceneGUID, ObjectReferenceCount = objectRefCount, SubSectionIndex = subSection.Section, BoundingVolume = bounds }); entityRemapping.Dispose(); sectionWorld.Dispose(); } entitiesInSection.Dispose(); } } { var noSectionQuery = entityManager.CreateEntityQuery( new EntityQueryDesc { None = new[] { ComponentType.ReadWrite <SceneSection>() }, Options = EntityQueryOptions.IncludePrefab | EntityQueryOptions.IncludeDisabled } ); if (noSectionQuery.CalculateEntityCount() != 0) { Debug.LogWarning($"{noSectionQuery.CalculateEntityCount()} entities in the scene '{scene.path}' had no SceneSection and as a result were not serialized at all."); } } sectionQuery.Dispose(); sectionBoundsQuery.Dispose(); entitiesInMainSection.Dispose(); world.Dispose(); // Save the new header var sceneSectionsArray = sceneSections.ToArray(); WriteHeader(sceneGUID, sceneSectionsArray, scene.name, settings.AssetImportContext); // If we are writing assets to assets folder directly, then we need to make sure the asset database see them so they can be loaded. if (settings.AssetImportContext == null) { AssetDatabase.Refresh(); } if (disposeBlobAssetCache) { settings.BlobAssetStore.Dispose(); } // Save the log of issues that happened during conversion WriteConversionLog(sceneGUID, journalData, scene.name, settings.AssetImportContext); return(sceneSectionsArray); }
protected override void OnUpdate() { Profiler.BeginSample("Build Compound Colliders"); // Assign PhysicsCollider components to rigid body entities, merging multiples into compounds as needed var changedBodies = m_ChangedLeavesByBody.GetUniqueKeyArray(Allocator.Temp); using (changedBodies.Item1) { // Loop through all the bodies that changed for (var k = 0; k < changedBodies.Item2; ++k) { var body = changedBodies.Item1[k]; var collider = DstEntityManager.HasComponent <PhysicsCollider>(body) ? DstEntityManager.GetComponentData <PhysicsCollider>(body) : new PhysicsCollider(); var children = new NativeHashMap <ColliderInstanceId, ChildInstance>(16, Allocator.Temp); var isLeafEntityBody = true; // The current body that changed may have one or more shape to process, loop through all of them if (m_ChangedLeavesByBody.TryGetFirstValue(body, out var shape, out var iterator)) { do { var replaced = false; // Look for existing known shape. For this there is no magic, O(n) scan on the body's shapes if (m_AllLeafCollidersByBody.TryGetFirstValue(body, out var existingShape, out var existingIterator)) { do { // If the current child is the one we care about then replace its associated data if (existingShape.ShapeEntity.Equals(shape.ShapeEntity)) { m_AllLeafCollidersByBody.SetValue(shape, existingIterator); replaced = true; break; } } while (m_AllLeafCollidersByBody.TryGetNextValue(out existingShape, ref existingIterator)); } // Add the shape if it did not exist already if (!replaced) { m_AllLeafCollidersByBody.Add(body, shape); } // Add the shape to the list of children to process later if (BlobComputationContext.GetBlobAsset(shape.Hash, out var blobAsset)) { var child = new ChildInstance { Hash = shape.Hash, Child = new CompoundCollider.ColliderBlobInstance { Collider = blobAsset, CompoundFromChild = shape.BodyFromShape } }; children.Add(shape.ToColliderInstanceId(), child); isLeafEntityBody &= shape.ShapeEntity.Equals(body); } else { var gameObject = m_EndColliderConversionSystem.GetConvertedAuthoringComponent(shape.ConvertedAuthoringComponentIndex).gameObject; Debug.LogWarning($"Couldn't convert Collider for GameObject '{gameObject.name}'."); } } while (m_ChangedLeavesByBody.TryGetNextValue(out shape, ref iterator)); } // Add all children that did not change if (m_AllLeafCollidersByBody.TryGetFirstValue(body, out shape, out var it)) { do { isLeafEntityBody &= shape.ShapeEntity.Equals(body); if (BlobComputationContext.GetBlobAsset(shape.Hash, out var blobAsset)) { var child = new ChildInstance { Hash = shape.Hash, Child = new CompoundCollider.ColliderBlobInstance { Collider = blobAsset, CompoundFromChild = shape.BodyFromShape } }; children.TryAdd(shape.ToColliderInstanceId(), child); } } while (m_AllLeafCollidersByBody.TryGetNextValue(out shape, ref it)); } // Get the list of colliders to (re)build var colliders = children.GetValueArray(Allocator.TempJob); // If the leaf is the same entity as the body, and there is a single shape, use it as-is; otherwise create a compound // (assume a single leaf should still be a compound so that local offset values in authoring representation are retained) if (colliders.Length > 0) { if (isLeafEntityBody && colliders.Length == 1) { collider.Value = colliders[0].Child.Collider; } else { // otherwise it is a compound var childHashes = new NativeArray <Hash128>(colliders.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var childOffsets = new NativeArray <RigidTransform>(colliders.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); var childBlobs = new NativeArray <CompoundCollider.ColliderBlobInstance>(colliders.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); for (var i = 0; i < children.Length; ++i) { childHashes[i] = colliders[i].Hash; childOffsets[i] = colliders[i].Child.CompoundFromChild; childBlobs[i] = colliders[i].Child; } Profiler.BeginSample("Generate Hash for Compound"); var compoundHash = new NativeArray <Hash128>(1, Allocator.TempJob); new HashChildrenJob { ChildHashes = childHashes, ChildOffsets = childOffsets, Output = compoundHash }.Run(); Profiler.EndSample(); var gameObject = m_EndColliderConversionSystem.GetConvertedAuthoringComponent(shape.ConvertedBodyTransformIndex).gameObject; BlobComputationContext.AssociateBlobAssetWithUnityObject(compoundHash[0], gameObject); if (!BlobComputationContext.NeedToComputeBlobAsset(compoundHash[0])) { BlobComputationContext.GetBlobAsset(compoundHash[0], out collider.Value); } else { BlobComputationContext.AddBlobAssetToCompute(compoundHash[0], 0); using (var compound = new NativeArray <BlobAssetReference <Collider> >(1, Allocator.TempJob)) { new CreateCompoundJob { Children = childBlobs, Output = compound }.Run(); collider.Value = compound[0]; BlobComputationContext.AddComputedBlobAsset(compoundHash[0], collider.Value); } } compoundHash.Dispose(); childBlobs.Dispose(); } } colliders.Dispose(); children.Dispose(); DstEntityManager.AddOrSetComponent(body, collider); } } m_ChangedLeavesByBody.Clear(); Profiler.EndSample(); }
protected override void OnCreateManager() { base.OnCreateManager(); agents = new NativeHashMap <int, Entity>(maxLimit + 300, Allocator.Persistent); Enabled = false; }
void CreateMaze() { //allocate space for the entities NativeArray <Entity> entities = new NativeArray <Entity>(p.Count, Allocator.Temp); //create them all at once this is much faster like this entityManager.CreateEntity(entityArchetype, entities); //create a hashgrid where each wall is added with its position so I won't have to iterate over them huge timesaver //initially I had an array but it would have a lot of empty space hashGrid = new NativeHashMap <int, bool>(entities.Length, Allocator.Persistent); ///set the required components for (int i = 0; i < entities.Length; i++) { Entity entity = entities[i]; entityManager.SetComponentData(entity, new Translation { Value = new float3(p[i].x, p[i].y + 0.5f, p[i].z) }); entityManager.SetComponentData(entity, new NonUniformScale { Value = new float3(1, 2, 1) }); entityManager.SetSharedComponentData(entity, new RenderMesh { mesh = mesh, material = mat, castShadows = UnityEngine.Rendering.ShadowCastingMode.Off }); //index it like a 2d array this is what 2d arrays do internally anyways afaik hashGrid.TryAdd((int)p[i].z * width + (int)p[i].x, true); } entities.Dispose(); //ALWAYS DISPOSE //I don't dispose of the hashgrid and the leak detection is bitching about that even tho its persistent but I need it for the lifetime of the maze so what. //create ground Entity ground = entityManager.CreateEntity(entityArchetype); entityManager.SetComponentData(ground, new Translation { Value = new float3(width / 2, -1, height / 2) }); entityManager.SetSharedComponentData(ground, new RenderMesh { mesh = mesh, material = mat }); entityManager.SetComponentData(ground, new NonUniformScale { Value = new float3(width, 1, height) }); //create the communicator, i use it to share some infos between the systems var Communicator = entityManager.CreateArchetype( typeof(Communicator), typeof(Translation) ); Entity comEntity = entityManager.CreateEntity(Communicator); entityManager.SetComponentData(comEntity, new Translation { Value = new float3(0, 0, 0) }); entityManager.SetComponentData(comEntity, new Communicator { generationFinished = true, numberOfWalls = p.Count, numberOfEnemies = numberOfEnemies, width = width, height = height, random = UnityEngine.Random.Range(0, 100) }); //creatthe enemycontrol I use this and the playerControl to give states to the system, without them i wouldnt be able to restart the scene properly var EnemyControl = entityManager.CreateArchetype( typeof(EnemyControl), typeof(Translation) ); Entity enemyEntity = entityManager.CreateEntity(EnemyControl); entityManager.SetComponentData(enemyEntity, new Translation { Value = new float3(0, 0, 0) }); entityManager.SetComponentData(enemyEntity, new EnemyControl { run = true, initialized = false }); var PlayerControl = entityManager.CreateArchetype( typeof(PlayerControl), typeof(Translation) ); Entity playerEntity = entityManager.CreateEntity(PlayerControl); entityManager.SetComponentData(playerEntity, new Translation { Value = new float3(0, 0, 0) }); entityManager.SetComponentData(playerEntity, new PlayerControl { run = true, initialized = false }); }
protected override void OnCreate() => needsSurfaceMap = new NativeHashMap <int, bool>( NavConstants.NEEDS_SURFACE_MAP_SIZE, Allocator.Persistent );
private void Boot() { destroyedEntities = World.GetExistingSystem <NetworkSystem>().DestroyedEntities; booted = false; }
public void Execute() { var visitedChunks = new NativeHashMap <ulong, int>(1, Allocator.Temp); var createdChunkEntityCounts = 0; var destroyedChunkEntityCount = 0; // Scan through the destination chunks. for (var i = 0; i < DstChunks.Length; i++) { var dstChunk = DstChunks[i]; var srcChunk = default(ArchetypeChunk); // Any look for a matching chunk in the destination world. SrcChunksBySequenceNumber.TryGetValue(dstChunk.m_Chunk->SequenceNumber, out srcChunk); if (srcChunk.m_Chunk == null) { // This chunk exists in the destination world but NOT in the source world. // This means the chunk was simply destroyed. DestroyedChunks.Add(dstChunk); DestroyedChunkFlags.Add(ArchetypeChunkChangeFlags.None); DestroyedChunkEntityCounts.Add(destroyedChunkEntityCount); destroyedChunkEntityCount += dstChunk.m_Chunk->Count; } else { if (ChunksAreDifferent(dstChunk.m_Chunk, srcChunk.m_Chunk)) { // The chunk exists in both worlds, but it has been changed in some way. // Treat this chunk as being destroyed and re-created. DestroyedChunks.Add(dstChunk); DestroyedChunkFlags.Add(ArchetypeChunkChangeFlags.Cloned); DestroyedChunkEntityCounts.Add(destroyedChunkEntityCount); destroyedChunkEntityCount += dstChunk.m_Chunk->Count; CreatedChunks.Add(srcChunk); CreatedChunkFlags.Add(ArchetypeChunkChangeFlags.Cloned); CreatedChunkEntityCounts.Add(createdChunkEntityCounts); createdChunkEntityCounts += srcChunk.m_Chunk->Count; } visitedChunks.TryAdd(srcChunk.m_Chunk->SequenceNumber, 1); } } // Scan through the source chunks. for (var i = 0; i < SrcChunks.Length; i++) { var srcChunk = SrcChunks[i]; // We only care about chunks we have not visited yet. if (!visitedChunks.TryGetValue(srcChunk.m_Chunk->SequenceNumber, out _)) { // This chunk exists in the source world but NOT in the destination world. // This means the chunk was created. CreatedChunks.Add(srcChunk); CreatedChunkFlags.Add(ArchetypeChunkChangeFlags.Cloned); CreatedChunkEntityCounts.Add(createdChunkEntityCounts); createdChunkEntityCounts += srcChunk.m_Chunk->Count; } } CreatedChunkEntityCounts.Add(createdChunkEntityCounts); DestroyedChunkEntityCounts.Add(destroyedChunkEntityCount); }
protected override void OnUpdate() { Dependency = JobHandle.CombineDependencies( Dependency, World.GetExistingSystem <HashCollidablesSystem>().m_StaticCollidableHashMapJobHandle, World.GetExistingSystem <HashCollidablesSystem>().m_DynamicCollidableHashMapJobHandle ); var staticCollidableHashMap = World.GetExistingSystem <HashCollidablesSystem>().m_StaticCollidableHashMap; var dynamicCollidableHashMap = World.GetExistingSystem <HashCollidablesSystem>().m_DynamicCollidableHashMap; var moveEscapeTargetCount = m_MoveEscapeTargetQuery.CalculateEntityCount(); var moveEscapeTargetHashMap = new NativeHashMap <int, int>(moveEscapeTargetCount, Allocator.TempJob); var moveEscapeTargetParallelWriter = moveEscapeTargetHashMap.AsParallelWriter(); // We need either "(X * Y) / visionDistance" or "numUnitsToEscapeFrom" hash buckets, whichever is smaller var viewDistance = GameController.instance.humanVisionDistance; var humanVisionHashMapCellSize = viewDistance * 2 + 1; var humanVisionHashMap = new NativeHashMap <int, int>(moveEscapeTargetCount, Allocator.TempJob); var humanVisionParallelWriter = humanVisionHashMap.AsParallelWriter(); var hashMoveEscapeTargetGridPositionsJobHandle = Entities .WithName("HashMoveEscapeTargetGridPositions") .WithAll <MoveEscapeTarget>() .WithStoreEntityQueryInField(ref m_MoveEscapeTargetQuery) .WithBurst() .ForEach((int entityInQueryIndex, in GridPosition gridPosition) => { var hash = (int)math.hash(gridPosition.Value); moveEscapeTargetParallelWriter.TryAdd(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var hashMoveEscapeTargetVisionJobHandle = Entities .WithName("HashMoveEscapeTargetVision") .WithAll <MoveEscapeTarget>() .WithStoreEntityQueryInField(ref m_MoveEscapeTargetQuery) .WithBurst() .ForEach((int entityInQueryIndex, in GridPosition gridPosition) => { var hash = (int)math.hash(gridPosition.Value / humanVisionHashMapCellSize); humanVisionParallelWriter.TryAdd(hash, entityInQueryIndex); }) .ScheduleParallel(Dependency); var movementBarrierHandle = JobHandle.CombineDependencies( Dependency, hashMoveEscapeTargetGridPositionsJobHandle, hashMoveEscapeTargetVisionJobHandle ); var moveEscapeTargetsJobHandle = Entities .WithName("MoveEscapeTargets") .WithAll <LineOfSight>() .WithReadOnly(staticCollidableHashMap) .WithReadOnly(dynamicCollidableHashMap) .WithReadOnly(moveEscapeTargetHashMap) .WithReadOnly(humanVisionHashMap) .WithDisposeOnCompletion(moveEscapeTargetHashMap) .WithDisposeOnCompletion(humanVisionHashMap) .WithBurst() .ForEach((ref NextGridPosition nextGridPosition, in TurnsUntilActive turnsUntilActive, in GridPosition gridPosition) => { if (turnsUntilActive.Value != 1) { return; } int3 myGridPositionValue = gridPosition.Value; float3 averageTarget = new int3(0, 0, 0); bool moved = false; bool foundTarget = humanVisionHashMap.TryGetValue((int)math.hash(myGridPositionValue / humanVisionHashMapCellSize), out _) || humanVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - viewDistance, myGridPositionValue.y, myGridPositionValue.z - viewDistance) / humanVisionHashMapCellSize), out _) || humanVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + viewDistance, myGridPositionValue.y, myGridPositionValue.z - viewDistance) / humanVisionHashMapCellSize), out _) || humanVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x - viewDistance, myGridPositionValue.y, myGridPositionValue.z + viewDistance) / humanVisionHashMapCellSize), out _) || humanVisionHashMap.TryGetValue((int)math.hash(new int3(myGridPositionValue.x + viewDistance, myGridPositionValue.y, myGridPositionValue.z + viewDistance) / humanVisionHashMapCellSize), out _); if (foundTarget) { foundTarget = false; int targetCount = 0; for (int checkDist = 1; checkDist <= viewDistance; checkDist++) { for (int z = -checkDist; z <= checkDist; z++) { for (int x = -checkDist; x <= checkDist; x++) { if (math.abs(x) == checkDist || math.abs(z) == checkDist) { int3 targetGridPosition = new int3(myGridPositionValue.x + x, myGridPositionValue.y, myGridPositionValue.z + z); int targetKey = (int)math.hash(targetGridPosition); if (moveEscapeTargetHashMap.TryGetValue(targetKey, out _)) { // Check if we have line of sight to the target if (LineOfSightUtilities.InLineOfSight(myGridPositionValue, targetGridPosition, staticCollidableHashMap)) { averageTarget = averageTarget * targetCount + new float3(x, 0, z); targetCount++; averageTarget /= targetCount; foundTarget = true; } } } } } } } if (foundTarget) { int3 direction = new int3((int)-averageTarget.x, (int)averageTarget.y, (int)-averageTarget.z); // Check if space is already occupied int moveLeftKey = (int)math.hash(new int3(myGridPositionValue.x - 1, myGridPositionValue.y, myGridPositionValue.z)); int moveRightKey = (int)math.hash(new int3(myGridPositionValue.x + 1, myGridPositionValue.y, myGridPositionValue.z)); int moveDownKey = (int)math.hash(new int3(myGridPositionValue.x, myGridPositionValue.y, myGridPositionValue.z - 1)); int moveUpKey = (int)math.hash(new int3(myGridPositionValue.x, myGridPositionValue.y, myGridPositionValue.z + 1)); if (math.abs(direction.x) >= math.abs(direction.z)) { // Move horizontally if (direction.x < 0) { if (!staticCollidableHashMap.TryGetValue(moveLeftKey, out _) && !dynamicCollidableHashMap.TryGetValue(moveLeftKey, out _)) { myGridPositionValue.x--; moved = true; } } else { if (!staticCollidableHashMap.TryGetValue(moveRightKey, out _) && !dynamicCollidableHashMap.TryGetValue(moveRightKey, out _)) { myGridPositionValue.x++; moved = true; } } } // Unit maybe wanted to move horizontally but couldn't, so check if it wants to move vertically if (!moved) { // Move vertically if (direction.z < 0) { if (!staticCollidableHashMap.TryGetValue(moveDownKey, out _) && !dynamicCollidableHashMap.TryGetValue(moveDownKey, out _)) { myGridPositionValue.z--; moved = true; } } else { if (!staticCollidableHashMap.TryGetValue(moveUpKey, out _) && !dynamicCollidableHashMap.TryGetValue(moveUpKey, out _)) { myGridPositionValue.z++; moved = true; } } } } nextGridPosition = new NextGridPosition { Value = myGridPositionValue }; })
protected override void OnUpdate() { var conToColor = new NativeHashMap <Entity, int>(SystemConstants.MapNodeSize, Allocator.Temp); var colorToColor = new NativeHashMap <int, int>(SystemConstants.MapNodeSize, Allocator.Temp); int newColor = 0; var connections = _query.ToComponentDataArray <Connection>(Allocator.TempJob); var conEnts = _query.ToEntityArray(Allocator.TempJob); for (int i = 0; i < connections.Length; i++) { var conA = connections[i]; var entA = conEnts[i]; for (int j = 0; j < connections.Length; j++) { var conB = connections[j]; var entB = conEnts[j]; if (!conToColor.TryGetValue(entA, out int startColor)) { startColor = int.MaxValue; } ; if (!conToColor.TryGetValue(entB, out int endColor)) { endColor = int.MaxValue; } if (startColor == endColor) { if (startColor == int.MaxValue) { conToColor.TryAdd(entA, newColor); conToColor.TryAdd(entB, newColor); newColor++; } } else { int minColor = math.min(startColor, endColor); int maxColor = math.max(startColor, endColor); var changedCon = startColor > endColor ? entA : entB; int trueColor = minColor; while (colorToColor.TryGetValue(trueColor, out int nextColor)) { trueColor = nextColor; } if (maxColor < int.MaxValue) { conToColor.Remove(changedCon); } conToColor.TryAdd(changedCon, trueColor); if (maxColor < int.MaxValue) { if (colorToColor.TryGetValue(maxColor, out int temp)) { colorToColor.Remove(maxColor); } colorToColor.TryAdd(maxColor, trueColor); } } } } connections.Dispose(); conEnts.Dispose(); if (conToColor.Length > 0) { var finalColor = new NativeHashMap <Entity, int>(SystemConstants.MapNodeSize, Allocator.Temp); var colorToNetwork = new NativeHashMap <int, Entity>(SystemConstants.MapNodeSize, Allocator.Temp); var keys = conToColor.GetKeyArray(Allocator.Temp); var values = conToColor.GetValueArray(Allocator.Temp); for (int i = 0; i < keys.Length; i++) { var con = keys[i]; int trueColor = values[i]; while (colorToColor.TryGetValue(trueColor, out int nextColor)) { trueColor = nextColor; } finalColor.TryAdd(con, trueColor); if (!colorToNetwork.TryGetValue(trueColor, out var network)) { network = PostUpdateCommands.CreateEntity(_networkArchetype); colorToNetwork.TryAdd(trueColor, network); } } keys.Dispose(); values.Dispose(); var networkToBuffer = new NativeHashMap <Entity, DynamicBuffer <NetAdjust> >(SystemConstants.MapNodeSize, Allocator.Temp); Entities.WithNone <NetworkGroup>().ForEach((Entity connectionEnt, ref Connection connection, ref ConnectionLengthInt conLength, ref ConnectionSpeedInt conSpeed) => { int color = finalColor[connectionEnt]; var network = colorToNetwork[color]; PostUpdateCommands.AddSharedComponent(connectionEnt, new NetworkGroup { NetworkId = network.Index }); DynamicBuffer <NetAdjust> buffer; if (!networkToBuffer.TryGetValue(network, out buffer)) { buffer = PostUpdateCommands.SetBuffer <NetAdjust>(network); networkToBuffer.TryAdd(network, buffer); } buffer.Add(new NetAdjust { Connection = connectionEnt, Cost = (float)conLength.Length / conSpeed.Speed, StartNode = connection.StartNode, EndNode = connection.EndNode }); }); colorToNetwork.Dispose(); networkToBuffer.Dispose(); finalColor.Dispose(); } conToColor.Dispose(); colorToColor.Dispose(); }
//TODO:generate flow field data //TODO:update formation index by current members position and destination position protected override void OnUpdate() { //var memberSorter = GroupFormation.FormationMemberSortInstance; var ecb = _endSimulationEcbSystem.CreateCommandBuffer().AsParallelWriter(); var transformDataMap = GetComponentDataFromEntity <TransformData>(true); var steerArriveDataMap = GetComponentDataFromEntity <SteerArriveData>(true); //keep formation //method 1 use average as group centroid of the formation pivot //method 2 use a logic group entity with transform data & steer and find path on this group logic #region simple group move formation algorithm reference from https: //www.gdcvault.com/play/1020832/The-Simplest-AI-Trick-in //hack use queue disable parallel restriction for now //TODO: process each move command on a single group in a job other than entities.foreach parallel running var queue = new NativeQueue <int>(Allocator.TempJob); var formationSlots = GroupFormation.GetDefaultFormationSlots(); Entities.WithName("GroupStartMoveJob").WithNativeDisableParallelForRestriction(queue) .WithReadOnly(transformDataMap).WithReadOnly(formationSlots).WithAll <GroupFlag>() .ForEach((Entity entity, int entityInQueryIndex, ref DynamicBuffer <GroupMemberElement> groupMembers, in GroupMoveEventData groupMoveData) => { var localToWorldMatrix = Math.TrsFloat3x3(groupMoveData.Destination, groupMoveData.Forward); var destinationArray = new NativeArray <float2>(groupMembers.Length, Allocator.Temp); var destinationList = new NativeList <float2>(groupMembers.Length, Allocator.Temp); var validFormation = groupMembers.Length < formationSlots.Length; for (int i = 0; i < groupMembers.Length; i++) { var position = math.mul(localToWorldMatrix, new float3(formationSlots[i], 1)).xy; destinationArray[i] = position; destinationList.Add(formationSlots[i]); } var destinationMatrix = new NativeArray2D <int>(new int2(groupMembers.Length, groupMembers.Length), Allocator.Temp); for (int memberIndex = 0; memberIndex < groupMembers.Length; memberIndex++) { for (int destinationIndex = 0; destinationIndex < groupMembers.Length; destinationIndex++) { var memberPosition = transformDataMap[groupMembers[memberIndex]].Position; var destinationPosition = destinationArray[destinationIndex]; destinationMatrix[memberIndex, destinationIndex] = (int)(math.distancesq(memberPosition, destinationPosition) * 10); } } var bestMatchResult = new NativeArray <int>(groupMembers.Length, Allocator.Temp); //may be performance bottleneck here new HungarianAlgorithm { CostMatrix = destinationMatrix, MatchX = bestMatchResult, Queue = queue, }.Run(); destinationMatrix.Dispose(); for (int i = 0; i < groupMembers.Length; i++) { ecb.AddComponent(entityInQueryIndex, groupMembers[i], new SteerArriveData { Goal = destinationArray[bestMatchResult[i]], ArriveRadius = groupMoveData.ArriveRadius, }); ecb.SetComponent(entityInQueryIndex, groupMembers[i], new FormationLocalPosition { Value = formationSlots[bestMatchResult[i]] }); } bestMatchResult.Dispose(); destinationArray.Dispose(); //memberSortData.Dispose(); ecb.RemoveComponent <GroupMoveEventData>(entityInQueryIndex, entity); }) .ScheduleParallel(); #endregion queue.Dispose(Dependency); _endSimulationEcbSystem.AddJobHandleForProducer(Dependency); //return; #region keep formation add force on member to go to position in group(slot) var deltaTime = Time.DeltaTime; var groupCount = _groupQuery.CalculateEntityCount(); var groupMatrixMap = new NativeHashMap <Entity, float3x3>(groupCount, Allocator.TempJob); var groupMatrixMapWriter = groupMatrixMap.AsParallelWriter(); Entities.WithName("BuildGroupLocalToWorldMatrixJob").WithAll <GroupFlag>().ForEach( (Entity groupEntity, in TransformData transformData) => { var matrix = Math.TrsFloat3x3(transformData.Position, transformData.Forward); groupMatrixMapWriter.TryAdd(groupEntity, matrix); }).ScheduleParallel(); Entities.WithReadOnly(groupMatrixMap).ForEach( (ref SteerData steerData, in GroupOwner groupOwner, in TransformData transformData, in FormationLocalPosition formationLocalPosition) => { var localToWorld = groupMatrixMap[groupOwner.GroupEntity]; var formationSlotWorldPosition = math.mul(localToWorld, new float3(formationLocalPosition.Value, 1)).xy; var curPosition = transformData.Position; var weight = 1; //0.5f; var desireVelocity = (formationSlotWorldPosition - curPosition) * steerData.MaxSpeed * steerData.MaxSpeedRate; desireVelocity = math.normalizesafe(desireVelocity) * math.min(math.length(desireVelocity), steerData.MaxSpeed * steerData.MaxSpeedRate); var steer = (desireVelocity - steerData.Velocity) * deltaTime * weight; steerData.Steer += steer; var positionInLocalFormation = math.mul(math.inverse(localToWorld), new float3(transformData.Position, 1)).xy; var offset = positionInLocalFormation - formationLocalPosition.Value; steerData.MaxSpeedRate = math.clamp(1 + (-offset.y + math.abs(offset.x)) * 1, 0.7f, 1.3f); })