private void ProcessGeneratorJobs(NativeList <int3> jobsToRemove) { NativeKeyValueArrays <int3, ChunkJobData> jobs = generateJobs.GetKeyValueArrays(Allocator.Temp); for (int i = 0; i < jobs.Keys.Length; i++) { ChunkJobData data = jobs.Values[i]; if (data.urgent || data.job.IsCompleted || data.frameCounter >= maxJobFrames) { data.job.Complete(); Chunk chunk = chunks[data.position]; chunk.CompleteGenerating(); if (!chunk.RequestedRemoval && chunk.render) { TryToQueueChunkRender(chunk, data.priority); } jobsToRemove.Add(jobs.Keys[i]); } else { data.frameCounter++; generateJobs[jobs.Keys[i]] = data; } } jobs.Dispose(); for (int i = 0; i < jobsToRemove.Length; i++) { generateJobs.Remove(jobsToRemove[i]); } }
/// <summary> /// Returns arrays populated with keys and values. /// </summary> /// <remarks>If key contains multiple values, returned key array will contain multiple identical keys.</remarks> /// <param name="allocator">A member of the /// [Unity.Collections.Allocator](https://docs.unity3d.com/ScriptReference/Unity.Collections.Allocator.html) enumeration.</param> /// <returns>Array of keys-values.</returns> public NativeKeyValueArrays <TKey, TValue> GetKeyValueArrays(Allocator allocator) { var result = new NativeKeyValueArrays <TKey, TValue>(Count(), allocator, NativeArrayOptions.UninitializedMemory); UnsafeHashMapData.GetKeyValueArrays(m_Buffer, result); return(result); }
internal static void ChangeParents(IncrementalHierarchy hierarchy, NativeKeyValueArrays <int, int> parentChange, NativeList <int> outChangeFailed, NativeList <IncrementalConversionChanges.ParentChange> outChangeSuccessful) { var instanceIds = parentChange.Keys; var parentInstanceIds = parentChange.Values; for (int i = 0; i < instanceIds.Length; i++) { var instanceId = instanceIds[i]; if (!hierarchy.IndexByInstanceId.TryGetValue(instanceId, out int idx)) { outChangeFailed.Add(instanceId); // this case might happen when an instance was already removed continue; } int oldParentIdx = hierarchy.ParentIndex[idx]; int oldParentId = 0; if (oldParentIdx != -1) { oldParentId = hierarchy.InstanceId[oldParentIdx]; hierarchy.ChildIndicesByIndex.Remove(oldParentIdx, idx); } int newParentId = parentInstanceIds[i]; if (hierarchy.IndexByInstanceId.TryGetValue(newParentId, out int newParentIdx)) { hierarchy.ChildIndicesByIndex.Add(newParentIdx, idx); hierarchy.ParentIndex[idx] = newParentIdx; outChangeSuccessful.Add(new IncrementalConversionChanges.ParentChange { InstanceId = instanceId, NewParentInstanceId = newParentId, PreviousParentInstanceId = oldParentId }); } else { if (newParentId != 0) { outChangeFailed.Add(instanceId); } else { outChangeSuccessful.Add(new IncrementalConversionChanges.ParentChange { InstanceId = instanceId, NewParentInstanceId = newParentId, PreviousParentInstanceId = oldParentId }); } hierarchy.ParentIndex[idx] = -1; } } }
private void ProcessRenderJobs(NativeList <int3> jobsToRemove) { NativeKeyValueArrays <int3, ChunkJobData> jobs = renderJobs.GetKeyValueArrays(Allocator.Temp); for (int i = 0; i < jobs.Keys.Length; i++) { ChunkJobData data = jobs.Values[i]; if (data.urgent || data.job.IsCompleted || data.frameCounter >= maxJobFrames) { data.job.Complete(); Chunk chunk = chunks[data.position]; if (useRendererPrefab) { CreateChunkRenderer(data.position, chunk.CompleteMeshUpdate(chunk.mesh)); } else { chunk.CompleteMeshUpdate(chunk.mesh); } if (!chunk.RequestedRemoval) { AddToQueue(colliderQueue, data.position, data.priority); } jobsToRemove.Add(jobs.Keys[i]); } else { data.frameCounter++; renderJobs[jobs.Keys[i]] = data; } } jobs.Dispose(); for (int i = 0; i < jobsToRemove.Length; i++) { renderJobs.Remove(jobsToRemove[i]); } }
private void ProcessColliderJobs(NativeList <int3> jobsToRemove) { NativeKeyValueArrays <int3, ChunkJobData> jobs = colliderJobs.GetKeyValueArrays(Allocator.Temp); for (int i = 0; i < jobs.Keys.Length; i++) { ChunkJobData data = jobs.Values[i]; if (data.urgent || data.job.IsCompleted || data.frameCounter >= maxJobFrames) { data.job.Complete(); Chunk chunk = chunks[data.position]; if (!chunkColliders.TryGetValue(data.position, out MeshCollider collider)) { collider = GetChunkCollider(); chunkColliders.Add(data.position, collider); #if DEBUG collider.gameObject.name = "Collider [" + data.position.x + "," + data.position.y + "," + data.position.z + "]"; #endif } collider.sharedMesh = chunk.CompleteColliderMeshUpdate(collider.sharedMesh); jobsToRemove.Add(jobs.Keys[i]); } else { data.frameCounter++; colliderJobs[jobs.Keys[i]] = data; } } jobs.Dispose(); for (int i = 0; i < jobsToRemove.Length; i++) { colliderJobs.Remove(jobsToRemove[i]); } }
internal static void GetKeyValueArrays <TKey, TValue>(UnsafeHashMapData *data, NativeKeyValueArrays <TKey, TValue> result) where TKey : struct where TValue : struct { var bucketArray = (int *)data->buckets; var bucketNext = (int *)data->next; int o = 0; for (int i = 0; i <= data->bucketCapacityMask; ++i) { int b = bucketArray[i]; while (b != -1) { result.Keys[o] = UnsafeUtility.ReadArrayElement <TKey>(data->keys, b); result.Values[o] = UnsafeUtility.ReadArrayElement <TValue>(data->values, b); o++; b = bucketNext[b]; } } Assert.AreEqual(result.Keys.Length, o); Assert.AreEqual(result.Values.Length, o); }
public void FindPath() { NativeHashMap <int2, bool> isObstacle = new NativeHashMap <int2, bool>(obstacles.Count, Allocator.TempJob); NativeArray <int2> offsets = new NativeArray <int2>(8, Allocator.TempJob); NativeArray <Node> nativeStarts = new NativeArray <Node>(starts.Count, Allocator.TempJob); NativeMultiHashMap <int2, Node> resultList = new NativeMultiHashMap <int2, Node>((starts.Count) * safeGuard, Allocator.TempJob); foreach (int2 o in obstacles.Keys) { isObstacle.Add(o, true); } int counter = 0; foreach (Node n in starts.Values) { nativeStarts[counter] = n; counter++; } offsets[0] = new int2(0, 1); offsets[1] = new int2(1, 1); offsets[2] = new int2(1, 0); offsets[3] = new int2(1, -1); offsets[4] = new int2(0, -1); offsets[5] = new int2(-1, -1); offsets[6] = new int2(-1, 0); offsets[7] = new int2(-1, 1); AStar aStar = new AStar { isObstacle = isObstacle, offsets = offsets, starts = nativeStarts, resultList = resultList, end = end, safeGuard = safeGuard, }; JobHandle handle = aStar.Schedule(starts.Count, 16); handle.Complete(); NativeKeyValueArrays <int2, Node> keyValueArray = resultList.GetKeyValueArrays(Allocator.Temp); Dictionary <int2, Queue <Node> > paths = new Dictionary <int2, Queue <Node> >(); for (int i = 0; i < keyValueArray.Keys.Length; i++) { if (!paths.ContainsKey(keyValueArray.Keys[i])) { paths.Add(keyValueArray.Keys[i], new Queue <Node>()); paths[keyValueArray.Keys[i]].Enqueue(keyValueArray.Values[i]); } else { paths[keyValueArray.Keys[i]].Enqueue(keyValueArray.Values[i]); } } foreach (int2 start in paths.Keys) { StartCoroutine(MoveUnitCoroutine(start, paths[start])); } isObstacle.Dispose(); offsets.Dispose(); nativeStarts.Dispose(); resultList.Dispose(); }