Example #1
0
        public NativeList <Node> GetNodes(Allocator allocator)
        {
            var keys  = _nodeToParent.GetKeyArray(Allocator.Temp);
            var nodes = new NativeList <Node>(keys.Length + 1, allocator)
            {
                _goalNode
            };

            nodes.AddRange(keys);
            keys.Dispose();
            return(nodes);
        }
Example #2
0
        public void Execute()
        {
            var keys = IntersectionsToRoadsMap.GetKeyArray(Allocator.Temp);

            for (int i = 1, startIndex = 0; i <= keys.Length; i++)
            {
                if (i != keys.Length && keys[i] == keys[i - 1])
                {
                    continue;
                }
                RoadConnectionRanges.Add(new RoadConnectionArrayRange
                {
                    IntersectionEntity = keys[i - 1],
                    StartIndex         = startIndex,
                    Length             = i - startIndex
                });
                startIndex = i;
            }
            keys.Dispose();

            var values = IntersectionsToRoadsMap.GetValueArray(Allocator.Temp);

            RoadConnections.AddRange(values);
            values.Dispose();
        }
Example #3
0
        public void Execute()
        {
            var targets = TargetToEvent.GetKeyArray(Allocator.Temp);
            NativeList <PointerInputBuffer> eventList = new NativeList <PointerInputBuffer>(4, Allocator.Temp);

            for (int i = 0; i < targets.Length; i++)
            {
                var           target        = targets[i];
                EventComparer eventComparer = new EventComparer();
                if (TargetToEvent.TryGetFirstValue(target, out var item, out var it))
                {
                    var eventEntity = Ecb.CreateEntity(EventArchetype);
                    Ecb.SetComponent(eventEntity, new PointerEvent
                    {
                        Target = target
                    });
                    var buffer = Ecb.SetBuffer <PointerInputBuffer>(eventEntity);
                    do
                    {
                        eventList.Add(item);
                    } while (TargetToEvent.TryGetNextValue(out item, ref it));
                    eventList.Sort(eventComparer);
                    buffer.ResizeUninitialized(eventList.Length);
                    for (int j = 0; j < eventList.Length; j++)
                    {
                        buffer[j] = eventList[j];
                    }
                    eventList.Clear();
                    eventList.Clear();
                }
            }
        }
Example #4
0
    public void NativeMultiHashMap_GetKeys()
    {
        var container = new NativeMultiHashMap <int, int>(1, Allocator.Temp);

        for (int i = 0; i < 30; ++i)
        {
            container.Add(i, 2 * i);
            container.Add(i, 3 * i);
        }
        var keys = container.GetKeyArray(Allocator.Temp);

#if !NET_DOTS // Tuple is not supported by TinyBCL
        var(unique, uniqueLength) = container.GetUniqueKeyArray(Allocator.Temp);
        Assert.AreEqual(30, uniqueLength);
#endif

        Assert.AreEqual(60, keys.Length);
        keys.Sort();
        for (int i = 0; i < 30; ++i)
        {
            Assert.AreEqual(i, keys[i * 2 + 0]);
            Assert.AreEqual(i, keys[i * 2 + 1]);
#if !NET_DOTS // Tuple is not supported by TinyBCL
            Assert.AreEqual(i, unique[i]);
#endif
        }
    }
Example #5
0
        public NativeHashMap <int, float3>              averageHeading; // bucketIndex -> average heading of boids

        public void Execute()
        {
            NativeMultiHashMapIterator <int> iterator;
            NativeArray <int> keys = bucketEntityMap.GetKeyArray(Allocator.Temp);
            int    entityIndex     = 0;
            int    bucketIndex     = 0;
            int    counter         = 0;
            float3 sumPos          = float3.zero;
            float3 sumHeading      = float3.zero;

            for (int i = 0; i < keys.Length; i++)
            {
                bucketIndex = keys[i];
                if (bucketEntityMap.TryGetFirstValue(bucketIndex, out entityIndex, out iterator))
                {
                    do
                    {
                        float3 pos = positions[entityIndex];
                        sumPos     += pos;
                        sumHeading += math.forward(rotations[entityIndex]);
                        counter++;
                    }while (bucketEntityMap.TryGetNextValue(out entityIndex, ref iterator));

                    centerPosition[bucketIndex] = sumPos / counter;
                    averageHeading[bucketIndex] = sumHeading / counter;
                    //Debug.DrawLine(sumPos / counter, sumPos / counter + sumHeading / counter, Color.white);
                    //Debug.Log(string.Format("assign position for bucket: {0}    pos {1}  ", bucketIndex, centerPositions[bucketIndex]));
                    counter    = 0;
                    sumPos     = float3.zero;
                    sumHeading = float3.zero;
                }
            }
        }
        public void Execute()
        {
            // Resize containers
            int graphSize = DepthMap.Count();

            SelectedStatesByHorizon.Capacity = math.max(SelectedStatesByHorizon.Capacity, MaxDepth + 1);
            PredecessorStates.Capacity       = math.max(PredecessorStates.Capacity, graphSize);
            HorizonStateList.Capacity        = math.max(HorizonStateList.Capacity, graphSize);

            var selectedStateKeys = SelectedStates.GetKeyArray(Allocator.Temp);

            for (int i = 0; i < selectedStateKeys.Length; i++)
            {
                var stateKey   = selectedStateKeys[i];
                int stateDepth = int.MinValue;
                SelectedStates.TryGetFirstValue(stateKey, out var selectedDepth, out var iterator);
                do
                {
                    stateDepth = math.max(stateDepth, selectedDepth);
                }while (SelectedStates.TryGetNextValue(out selectedDepth, ref iterator));

                // Update depth map
                DepthMap[stateKey] = stateDepth;
                SelectedStatesByHorizon.AddValueIfUnique(stateDepth, stateKey);
            }
        }
Example #7
0
        public void Execute()
        {
            var keys = map.GetKeyArray(Allocator.Temp);

            for (var i = 0; i < keys.Length; i++)
            {
                var n = tree[keys[i]];
                if (n.children.length > 0)
                {
                    continue;
                }

                var start  = tree.Length;
                var values = map.GetValuesForKey(keys[i]);
                var c      = 0;
                while (values.MoveNext())
                {
                    var(node, board) = values.Current;
                    tree.Add(node);
                    boards.Add(board);
                    c++;
                }

                n.children    = new ChildrenRef(start, c);
                tree[keys[i]] = n;
            }
        }
Example #8
0
        /// <summary>
        /// Smooth mesh.
        /// </summary>
        /// <param name="vert">Vertex array</param>
        /// <param name="tris">Triangle array</param>
        /// <param name="normals">Array that normals would be stored in</param>
        /// <param name="angle">Smoothing angle</param>
        public static void RecalculateNormals(NativeArray <Vertex> vert, NativeArray <Triangle> tris, ref NativeArray <float3> normals, float angle = 60)
        {
            var cosineThreshold = Mathf.Cos(angle * Mathf.Deg2Rad);
            var triNormals      = new NativeArray <float3>(tris.Length, Allocator.Temp);
            var dictionary      = new NativeMultiHashMap <VertexKey, VertexEntry>(vert.Length * 2, Allocator.Temp);

            for (var i = 0; i < tris.Length; i++)
            {
                var i1 = tris[i].indices[0];
                var i2 = tris[i].indices[1];
                var i3 = tris[i].indices[2];

                // Calculate the normal of the triangle
                var p1     = vert[i2].pos - vert[i1].pos;
                var p2     = vert[i3].pos - vert[i1].pos;
                var normal = math.normalize(math.cross(p1, p2));

                triNormals[i] = normal;

                dictionary.Add(new VertexKey(vert[i1].pos), new VertexEntry(0, i, i1));
                dictionary.Add(new VertexKey(vert[i2].pos), new VertexEntry(0, i, i2));
                dictionary.Add(new VertexKey(vert[i3].pos), new VertexEntry(0, i, i3));
            }

            var keys = dictionary.GetKeyArray(Allocator.Temp);

            for (var i = 0; i < keys.Length; i++)
            {
                var enumerator1 = dictionary.GetValuesForKey(keys[i]);
                do
                {
                    var sum         = new float3();
                    var lhs         = enumerator1.Current;
                    var enumerator2 = dictionary.GetValuesForKey(keys[i]);
                    do
                    {
                        var rhs = enumerator2.Current;

                        if (lhs.VertexIndex == rhs.VertexIndex)
                        {
                            sum += triNormals[rhs.TriangleIndex];
                        }
                        else
                        {
                            // The dot product is the cosine of the angle between the two triangles.
                            // A larger cosine means a smaller angle.
                            var dot = math.dot(triNormals[lhs.TriangleIndex], triNormals[rhs.TriangleIndex]);

                            if (dot >= cosineThreshold)
                            {
                                sum += triNormals[rhs.TriangleIndex];
                            }
                        }
                    } while(enumerator2.MoveNext());

                    normals[lhs.VertexIndex] = math.normalize(sum);
                } while(enumerator1.MoveNext());
            }
        }
Example #9
0
        public void Execute()
        {
            if (SplitIndicesMap.Count() == 0)
            {
                return;
            }

            var roadIndices          = SplitIndicesMap.GetKeyArray(Allocator.Temp);
            var intersectionMetaData = SplitIndicesMap.GetValueArray(Allocator.Temp);

            // Sort intersection metadata by spline interpolation factor
            for (var i = 1; i < roadIndices.Length; i++)
            {
                var j         = i - 1;
                var prevIndex = roadIndices[j];
                while (i < roadIndices.Length && roadIndices[i] == prevIndex)
                {
                    i++;
                }
                var subArray = intersectionMetaData.GetSubArray(j, i - j);
                subArray.Sort(new IntersectionPointComparer());
            }

            IntersectionMetaData.AddRange(intersectionMetaData);
            intersectionMetaData.Dispose();

            RemoveDuplicateIntersectionRecords(roadIndices);
            roadIndices.Dispose();

            // Identify all unique intersection points and save them to a list
            for (var i = 0; i < IntersectionMetaData.Length; i++)
            {
                var foundDuplicate          = false;
                var metaData                = IntersectionMetaData[i];
                var intersectionEntityIndex = 0;

                // Search all known unique intersection points for a duplicate
                for (var j = 0; j < UniqueIntersectionPoints.Length; j++)
                {
                    if (!Utilities.GeometryUtility.ApproximatelyEqual(UniqueIntersectionPoints[j], metaData.Point))
                    {
                        continue;
                    }
                    foundDuplicate          = true;
                    intersectionEntityIndex = j;
                    break;
                }

                if (!foundDuplicate)
                {
                    UniqueIntersectionPoints.Add(metaData.Point);
                    intersectionEntityIndex = UniqueIntersectionPoints.Length - 1;
                }

                metaData.IntersectionEntityIndex = intersectionEntityIndex;
                IntersectionMetaData[i]          = metaData;
            }
        }
Example #10
0
            public unsafe void Execute()
            {
                NativeArray <TKey> withDuplicates = multiHashMap.GetKeyArray(Allocator.Temp);

                withDuplicates.Sort();
                var uniqueCount = withDuplicates.Unique();

                keys.AddRange(withDuplicates.GetUnsafeReadOnlyPtr(), uniqueCount);
                withDuplicates.Dispose();
            }
Example #11
0
        public static NativeArray <Tkey> GetUniqueKeys <Tkey, Tval>(this NativeMultiHashMap <Tkey, Tval> map, Allocator alocator) where Tkey : struct, IEquatable <Tkey> where Tval : struct
        {
            var tempKeys = map.GetKeyArray(Allocator.TempJob);
            var tempMap  = new NativeHashMap <Tkey, Tkey>(tempKeys.Length, Allocator.TempJob);

            for (int i = 0; i < tempKeys.Length; i++)
            {
                tempMap.TryAdd(tempKeys[i], tempKeys[i]);
            }
            var res = tempMap.GetKeyArray(alocator);

            tempKeys.Dispose();
            tempMap.Dispose();

            return(res);
        }
Example #12
0
    public static Dictionary <int, List <Edge> > GetGraph(IEnumerable <Node> nodes, float jumpRadius, float padding = 0)
    {
        var result = new Dictionary <int, List <Edge> >();

        var nativeNodes = new NativeArray <NodeStruct>(nodes.Select(n => new NodeStruct
        {
            start = n.start,
            end   = n.end,
            id    = n.id
        }).ToArray(), Allocator.TempJob);

        var map = new NativeMultiHashMap <int, EdgeStruct>(nodes.Count(), Allocator.TempJob);

        var job = new CalculateEdgeJob
        {
            map        = map,
            nodes      = nativeNodes,
            jumpRadius = jumpRadius,
            padding    = padding
        };

        job.Run();

        var keys = map.GetKeyArray(Allocator.Temp).Distinct().ToArray();
        var c    = 0;

        for (int i = 0; i < keys.Length; i++)
        {
            var edges = new List <Edge>();
            foreach (var item in map.GetValuesForKey(keys[i]))
            {
                var edge = new Edge {
                    start = item.start, end = item.end, jumpAngle = item.jumpAngle
                };
                edge.node = nodes.Single(n => n.id == item.nodeId);
                edges.Add(edge);
                c++;
            }
            result.Add(keys[i], edges);
        }


        nativeNodes.Dispose();
        map.Dispose();

        return(result);
    }
Example #13
0
        public void Execute()
        {
            NativeMultiHashMapIterator <int> iterator;
            NativeArray <int> keys = bucketEntityMap.GetKeyArray(Allocator.Temp);
            int   entityIndex      = 0;
            int   bucketIndex      = 0;
            float nearestDistance  = StaticValues.NEIGHBOUR_RADIUS;
            float distance         = -1;
            int   nearestEntity    = 0;

            for (int i = 0; i < keys.Length; i++)
            {
                bucketIndex = keys[i];

                if (bucketEntityMap.TryGetFirstValue(bucketIndex, out entityIndex, out iterator))
                {
                    do
                    {
                        float3 ownPos = positions[entityIndex];
                        nearestEntity = entityIndex;
                        NativeMultiHashMap <int, int> .Enumerator e = bucketEntityMap.GetValuesForKey(bucketIndex);
                        while (e.MoveNext())
                        {
                            int nextEntIdx = e.Current;
                            if (nextEntIdx != entityIndex)
                            {
                                float3 compPos = positions[nextEntIdx];
                                distance = math.distancesq(ownPos, compPos);
                                if (distance < StaticValues.NEIGHBOUR_RADIUS)
                                {
                                    nearestEntity   = math.select(nearestEntity, nextEntIdx, distance < nearestDistance);
                                    nearestDistance = math.select(nearestDistance, distance, distance < nearestDistance);
                                }
                            }
                        }
                        float3 dirToNearest = positions[entityIndex] - positions[nearestEntity];
                        float3 ownFwd       = math.forward(rotations[entityIndex]);

                        float3 newValue = math.lerp(ownFwd, dirToNearest, separationValues[entityIndex]);
                        entityTargetDirection.TryAdd(entityIndex, newValue);

                        nearestDistance = StaticValues.NEIGHBOUR_RADIUS;
                    }while (bucketEntityMap.TryGetNextValue(out entityIndex, ref iterator));
                }
            }
        }
Example #14
0
        public                                                  Node this[int hashCode]
        {
            get
            {
                var resultNode = default(Node);
                var keys       = _nodeStates.GetKeyArray(Allocator.Temp);
                foreach (var node in keys)
                {
                    if (node.GetHashCode() == hashCode)
                    {
                        resultNode = node;
                        break;
                    }
                }

                keys.Dispose();
                return(resultNode);
            }
        }
        public void Execute()
        {
            if (MaterialToSubMeshMap.Count() == 0)
            {
                return;
            }

            // Copy the values array of MaterialToSubMeshMap to the SubMeshRecords NativeList
            {
                var subMeshRecords = MaterialToSubMeshMap.GetValueArray(Allocator.Temp);
                SubMeshRecords.AddRange(subMeshRecords);
                SubMeshMap.ResizeUninitialized(SubMeshRecords.Length);
                subMeshRecords.Dispose();
            }

            var keys = MaterialToSubMeshMap.GetKeyArray(Allocator.Temp);

            for (var i = 0; i < SubMeshRecords.Length; i++)
            {
                var subMesh = SubMeshRecords[i];
                SubMeshMap[i] = new MappedSubMeshIndices
                {
                    TriangleIndexOffset = m_TriangleIndexOffset,
                    VertexStartIndex    = m_VertexCount,
                    TriangleStartIndex  = m_TriangleCount
                };
                m_VertexCount         += subMesh.VertexCount;
                m_TriangleCount       += subMesh.TriangleCount;
                m_TriangleIndexOffset += subMesh.VertexCount;

                var material = keys[i];
                if (i == SubMeshRecords.Length - 1 || material != keys[i + 1])
                {
                    AddSubMesh(material);
                }
            }

            Vertices.ResizeUninitialized(m_VertexCount);
            Triangles.ResizeUninitialized(m_TriangleCount);
        }
    public void NativeMultiHashMap_GetKeys()
    {
        var hashMap = new NativeMultiHashMap <int, int> (1, Allocator.Temp);

        for (int i = 0; i < 30; ++i)
        {
            hashMap.Add(i, 2 * i);
            hashMap.Add(i, 3 * i);
        }
        var keys = hashMap.GetKeyArray(Allocator.Temp);

        hashMap.Dispose();

        Assert.AreEqual(60, keys.Length);
        keys.Sort();
        for (int i = 0; i < 30; ++i)
        {
            Assert.AreEqual(i, keys[i * 2 + 0]);
            Assert.AreEqual(i, keys[i * 2 + 1]);
        }
        keys.Dispose();
    }
Example #17
0
        public void Execute()
        {
            var keys = spatialMap.GetKeyArray(Allocator.Temp);

            if (keys.Length == 0)
            {
                return;
            }

            outKeys.Add(keys[0]);
            int lastKey = keys[0];

            for (int i = 0; i < keys.Length; ++i)
            {
                if (lastKey == keys[i] || outKeys.Contains(keys[i]))
                {
                    continue;
                }
                lastKey = keys[i];
                outKeys.Add(keys[i]);
            }
        }
        protected override JobHandle OnUpdate(JobHandle inputDeps)
        {
            // TODO: Capacity management?
            var hashmap       = new NativeMultiHashMap <Entity, Entity>(32 * 32 * 32, Allocator.TempJob);
            var changedChunks = new NativeMultiHashMap <Entity, Entity>(32 * 32 * 4, Allocator.TempJob);

            var handle = inputDeps;

            handle = new Job
            {
                Chunks = mapSystem.Chunks,
                VolumeToChunksInjectionMap = hashmap.ToConcurrent()
            }.Schedule(this, handle);

            handle = JobNativeMultiHashMapVisitKeyValue.Schedule(new InjectionJob
            {
                UpdateChunksMap = changedChunks.ToConcurrent()
            }, hashmap, 16, handle);

            handle.Complete();

            var chunks       = changedChunks.GetKeyArray(Allocator.TempJob);
            var uniqueChunks = temporaryEntitySet;

            uniqueChunks.Clear();
            foreach (var entity in chunks)
            {
                uniqueChunks.Add(entity);
            }


            Process(uniqueChunks, changedChunks);

            chunks.Dispose();
            hashmap.Dispose();
            changedChunks.Dispose();

            return(inputDeps);
        }
Example #19
0
        // PRIVATES METHODS

        private void CreateCommandsQueries()
        {
            if (commands.Length > 0)
            {
                var priorities = commands.GetKeyArray(Allocator.Temp);
                var values     = commands.GetValueArray(Allocator.Temp);

                priorities.Sort();

                for (int i = 0; i < priorities.Length; ++i)
                {
                    var priority = priorities[i];
                    var value    = values[i];

                    CacheCommandType(value, priority);
                }

                priorities.Dispose();
                values.Dispose();
            }

            commands.Clear();
        }
Example #20
0
        public void Execute()
        {
            var keys = connections.GetKeyArray(Allocator.Temp);

            for (int i = 0; i < keys.Length; i++)
            {
                float bestRate  = -1;
                int   bestIndex = -1;
                int2  key       = keys[i];
                var   iter      = connections.GetValuesForKey(key);
                while (iter.MoveNext())
                {
                    int   index = iter.Current;
                    float rate  = dataRate[index];
                    if (rate > bestRate)
                    {
                        bestRate  = rate;
                        bestIndex = index;
                    }
                }
                best.TryAdd(key, bestIndex);
            }
            keys.Dispose();
        }
        public void Execute()
        {
            var statesToUpdateLength = SelectedStates.Count();
            var statesToUpdate       = new NativeMultiHashMap <int, TStateKey>(statesToUpdateLength, Allocator.Temp);
            var currentHorizon       = new NativeList <TStateKey>(statesToUpdateLength, Allocator.Temp);
            var nextHorizon          = new NativeList <TStateKey>(statesToUpdateLength, Allocator.Temp);

            var maxDepth          = int.MinValue;
            var selectedStateKeys = SelectedStates.GetKeyArray(Allocator.Temp);

            for (int i = 0; i < selectedStateKeys.Length; i++)
            {
                var stateKey   = selectedStateKeys[i];
                int stateDepth = int.MinValue;
                SelectedStates.TryGetFirstValue(stateKey, out var selectedDepth, out var iterator);
                do
                {
                    stateDepth = math.max(stateDepth, selectedDepth);
                }while (SelectedStates.TryGetNextValue(out selectedDepth, ref iterator));

                // Update depth map
                DepthMap[stateKey] = stateDepth;

                // Queue state and track max depth of backpropagation
                statesToUpdate.AddValueIfUnique(stateDepth, stateKey);
                maxDepth = math.max(maxDepth, stateDepth);
            }
            selectedStateKeys.Dispose();

            var actionLookup              = planGraph.ActionLookup;
            var resultingStateLookup      = planGraph.ResultingStateLookup;
            var actionInfoLookup          = planGraph.ActionInfoLookup;
            var stateInfoLookup           = planGraph.StateInfoLookup;
            var stateTransitionInfoLookup = planGraph.StateTransitionInfoLookup;
            var predecessorLookup         = planGraph.PredecessorGraph;
            var depth = maxDepth;

            // Pull states from statesToUpdate
            if (statesToUpdate.TryGetFirstValue(depth, out var stateToAdd, out var stateIterator))
            {
                do
                {
                    currentHorizon.AddIfUnique(stateToAdd);
                }while (statesToUpdate.TryGetNextValue(out stateToAdd, ref stateIterator));
            }

            // Update values from leaf state(s) to root
            while (depth >= 0)
            {
                for (int i = 0; i < currentHorizon.Length; i++)
                {
                    var stateKey    = currentHorizon[i];
                    var updateState = true;
                    if (actionLookup.TryGetFirstValue(stateKey, out var actionKey, out var stateActionIterator))
                    {
                        // Expanded state. Only update if one or more actions have updated.
                        updateState = false;

                        // Update all actions
                        do
                        {
                            updateState |= UpdateCumulativeReward(new StateActionPair <TStateKey, TActionKey>(stateKey, actionKey), resultingStateLookup, stateInfoLookup, actionInfoLookup, stateTransitionInfoLookup);
                        }while (actionLookup.TryGetNextValue(out actionKey, ref stateActionIterator));
                    }

                    if (!updateState)
                    {
                        continue;
                    }

                    // Update state
                    if (UpdateStateValue(stateKey, actionLookup, stateInfoLookup, actionInfoLookup))
                    {
                        // If a change has occured, update predecessors
                        if (predecessorLookup.TryGetFirstValue(stateKey, out var predecessorStateKey, out var predecessorIterator))
                        {
                            do
                            {
                                nextHorizon.AddIfUnique(predecessorStateKey);
                            }while (predecessorLookup.TryGetNextValue(out predecessorStateKey, ref predecessorIterator));
                        }
                    }
                }

                var temp = currentHorizon;
                currentHorizon = nextHorizon;
                nextHorizon    = temp;
                nextHorizon.Clear();

                depth--;

                // pull out states from statesToUpdate
                if (statesToUpdate.TryGetFirstValue(depth, out stateToAdd, out stateIterator))
                {
                    do
                    {
                        currentHorizon.AddIfUnique(stateToAdd);
                    }while (statesToUpdate.TryGetNextValue(out stateToAdd, ref stateIterator));
                }
            }

            // new: continue propagating complete flag changes
            while (currentHorizon.Length > 0)
            {
                for (int i = 0; i < currentHorizon.Length; i++)
                {
                    var stateKey    = currentHorizon[i];
                    var updateState = false;

                    // Update all actions
                    actionLookup.TryGetFirstValue(stateKey, out var actionKey, out var stateActionIterator);
                    do
                    {
                        var stateActionPair = new StateActionPair <TStateKey, TActionKey>(stateKey, actionKey);
                        if (UpdateSubplanCompleteStatus(stateActionPair, out var updatedActionInfo))
                        {
                            updateState = true;

                            // Write back updated info
                            actionInfoLookup[stateActionPair] = updatedActionInfo;
                        }
                    }while (actionLookup.TryGetNextValue(out actionKey, ref stateActionIterator));

                    // Update state
                    if (updateState && UpdateSubplanCompleteStatus(stateKey, out var updatedStateInfo))
                    {
                        // Write back updated info
                        stateInfoLookup[stateKey] = updatedStateInfo;

                        // If a change has occured, update predecessors
                        if (predecessorLookup.TryGetFirstValue(stateKey, out var predecessorStateKey, out var predecessorIterator))
                        {
                            do
                            {
                                nextHorizon.AddIfUnique(predecessorStateKey);
                            }while (predecessorLookup.TryGetNextValue(out predecessorStateKey, ref predecessorIterator));
                        }
                    }
                }

                var temp = currentHorizon;
                currentHorizon = nextHorizon;
                nextHorizon    = temp;
                nextHorizon.Clear();
            }

            currentHorizon.Dispose();
            nextHorizon.Dispose();
            statesToUpdate.Dispose();
        }
Example #22
0
 public void Execute()
 {
     MultiHashmap.GetKeyArray(Array);
 }
        static void ApplyBlobAssetChanges(
            EntityManager entityManager,
            NativeArray <EntityGuid> packedEntityGuids,
            NativeMultiHashMap <int, Entity> packedEntities,
            NativeArray <ComponentType> packedTypes,
            NativeArray <BlobAssetChange> createdBlobAssets,
            NativeArray <byte> createdBlobAssetData,
            NativeArray <ulong> destroyedBlobAssets,
            NativeArray <BlobAssetReferenceChange> blobAssetReferenceChanges)
        {
            if (createdBlobAssets.Length == 0 && blobAssetReferenceChanges.Length == 0)
            {
                return;
            }

            s_ApplyBlobAssetChangesProfilerMarker.Begin();

            var managedObjectBlobAssetReferencePatches = new NativeMultiHashMap <EntityComponentPair, ManagedObjectBlobAssetReferencePatch>(blobAssetReferenceChanges.Length, Allocator.Temp);

            var patcherBlobAssetSystem = entityManager.World.GetOrCreateSystem <EntityPatcherBlobAssetSystem>();

            var blobAssetDataPtr = (byte *)createdBlobAssetData.GetUnsafePtr();

            for (var i = 0; i < createdBlobAssets.Length; i++)
            {
                if (!patcherBlobAssetSystem.TryGetBlobAsset(createdBlobAssets[i].Hash, out _))
                {
                    patcherBlobAssetSystem.AllocateBlobAsset(blobAssetDataPtr, createdBlobAssets[i].Length, createdBlobAssets[i].Hash);
                }

                blobAssetDataPtr += createdBlobAssets[i].Length;
            }

            for (var i = 0; i < destroyedBlobAssets.Length; i++)
            {
                patcherBlobAssetSystem.ReleaseBlobAsset(entityManager, destroyedBlobAssets[i]);
            }

            for (var i = 0; i < blobAssetReferenceChanges.Length; i++)
            {
                var packedComponent = blobAssetReferenceChanges[i].Component;
                var component       = packedTypes[packedComponent.PackedTypeIndex];
                var targetOffset    = blobAssetReferenceChanges[i].Offset;

                BlobAssetReferenceData targetBlobAssetReferenceData;
                if (patcherBlobAssetSystem.TryGetBlobAsset(blobAssetReferenceChanges[i].Value, out var blobAssetPtr))
                {
                    targetBlobAssetReferenceData = new BlobAssetReferenceData {
                        m_Ptr = (byte *)blobAssetPtr.Data
                    };
                }

                if (packedEntities.TryGetFirstValue(packedComponent.PackedEntityIndex, out var entity, out var iterator))
                {
                    do
                    {
                        if (!entityManager.Exists(entity))
                        {
                            Debug.LogWarning($"ApplyBlobAssetReferencePatches<{component}>({packedEntityGuids[packedComponent.PackedEntityIndex]}) but entity to patch does not exist.");
                        }
                        else if (!entityManager.HasComponent(entity, component))
                        {
                            Debug.LogWarning($"ApplyBlobAssetReferencePatches<{component}>({packedEntityGuids[packedComponent.PackedEntityIndex]}) but component in entity to patch does not exist.");
                        }
                        else
                        {
                            if (component.IsBuffer)
                            {
                                var pointer = (byte *)entityManager.GetBufferRawRW(entity, component.TypeIndex);
                                UnsafeUtility.MemCpy(pointer + targetOffset, &targetBlobAssetReferenceData, sizeof(BlobAssetReferenceData));
                            }
                            else if (component.IsManagedComponent || component.IsSharedComponent)
                            {
                                managedObjectBlobAssetReferencePatches.Add(
                                    new EntityComponentPair {
                                    Entity = entity, Component = component
                                },
                                    new ManagedObjectBlobAssetReferencePatch {
                                    Id = targetOffset, Target = blobAssetReferenceChanges[i].Value
                                });
                            }
                            else
                            {
                                var pointer = (byte *)entityManager.GetComponentDataRawRW(entity, component.TypeIndex);
                                UnsafeUtility.MemCpy(pointer + targetOffset, &targetBlobAssetReferenceData, sizeof(BlobAssetReferenceData));
                            }
                        }
                    }while (packedEntities.TryGetNextValue(out entity, ref iterator));
                }
            }
            s_ApplyBlobAssetChangesProfilerMarker.End();

#if !UNITY_DOTSRUNTIME
            var managedObjectPatcher = new ManagedObjectBlobAssetReferencePatcher(patcherBlobAssetSystem);

            // Apply all managed entity patches
            using (var keys = managedObjectBlobAssetReferencePatches.GetKeyArray(Allocator.Temp))
            {
                keys.Sort();
                var uniqueCount = keys.Unique();

                for (var i = 0; i < uniqueCount; i++)
                {
                    var pair    = keys[i];
                    var patches = managedObjectBlobAssetReferencePatches.GetValuesForKey(pair);

                    if (pair.Component.IsManagedComponent)
                    {
                        var obj = entityManager.GetComponentObject <object>(pair.Entity, pair.Component);
                        managedObjectPatcher.ApplyPatches(ref obj, patches);
                    }
                    else if (pair.Component.IsSharedComponent)
                    {
                        var obj = entityManager.GetSharedComponentData(pair.Entity, pair.Component.TypeIndex);
                        managedObjectPatcher.ApplyPatches(ref obj, patches);
                        entityManager.SetSharedComponentDataBoxedDefaultMustBeNull(pair.Entity, pair.Component.TypeIndex, obj);
                    }

                    patches.Dispose();
                }
            }
#endif

            managedObjectBlobAssetReferencePatches.Dispose();

            // Workaround to catch some special cases where the memory is never released. (e.g. reloading a scene, toggling live-link on/off).
            patcherBlobAssetSystem.ReleaseUnusedBlobAssets();
        }
Example #24
0
        protected override void OnCreate()
        {
            stopwatch = new System.Diagnostics.Stopwatch();
            stopwatch.Restart();
            stopwatch.Start();

            int i_len = 10000;

            NativeMultiHashMap <int, int> dic  = new NativeMultiHashMap <int, int> (i_len, Allocator.Temp);
            NativeMultiHashMap <int, int> dic2 = new NativeMultiHashMap <int, int> (i_len, Allocator.TempJob);

            NativeHashMap <int, int> dic3 = new NativeHashMap <int, int> (i_len, Allocator.Temp);

            for (int i = 0; i < i_len; i++)
            {
                var v2 = new Vector2Int(Random.Range(10, -10), Random.Range(-1000, 1000));
                dic.Add(v2.x, v2.y);
                dic2.Add(v2.x, v2.y);
                dic3.TryAdd(v2.x, v2.y);
                // Debug.Log ( "v2: " + v2 ) ;
            }

            Debug.Log("populate hashmaps " + stopwatch.ElapsedMilliseconds + "ms");
            stopwatch.Restart();
            // NativeMultiHashMap <int,int> dic = nmhm_newBornEntitiesPerTile ;

            var withDuplicates = dic.GetKeyArray(Allocator.Temp);

            Debug.Log("multi hashmap get keys " + stopwatch.ElapsedMilliseconds + "ms");
            stopwatch.Restart();

            NativeArray <int> withDuplicates2 = new NativeArray <int> (dic2.Count(), Allocator.TempJob, NativeArrayOptions.UninitializedMemory);

            Dependency = new GetArray( )
            {
                dic            = dic2,
                withDuplicates = withDuplicates2
            }.Schedule(Dependency);
            Dependency.Complete();

            Debug.Log("multi hashmap get keys job burst B " + stopwatch.ElapsedMilliseconds + "ms");
            stopwatch.Restart();

            var noDuplicates = dic3.GetKeyArray(Allocator.Temp);

            Debug.Log("hashmap get keys " + stopwatch.ElapsedMilliseconds + "ms");


            /*
             * for ( int i = 0; i < noDuplicates.Length; i ++ )
             * {
             *  Debug.Log ( "no duplicates: " + noDuplicates [i] ) ;
             * }
             */


            stopwatch.Restart();


            withDuplicates.Sort();
            Debug.Log("sort A " + stopwatch.ElapsedMilliseconds + "ms");
            stopwatch.Restart();

            int uniqueCount = withDuplicates.Unique();

            Debug.Log("multi hashmap unique A " + stopwatch.ElapsedMilliseconds + "ms");
            Debug.Log("uniqueCount " + uniqueCount);
            stopwatch.Restart();
            Dependency = new Sort( )
            {
                withDuplicates = withDuplicates2
            }.Schedule(Dependency);
            Dependency.Complete();

            Debug.Log("sort job burst B " + stopwatch.ElapsedMilliseconds + "ms");
            stopwatch.Restart();

            NativeArray <int> na_i = new NativeArray <int> (3, Allocator.TempJob);

            Dependency = new Unique( )
            {
                i = na_i,
                withDuplicates = withDuplicates2
            }.Schedule(Dependency);
            Dependency.Complete();

            uniqueCount = na_i [0];

            Debug.Log("multi hashmap unique job burst B " + stopwatch.ElapsedMilliseconds + "ms");
            Debug.Log("uniqueCount " + uniqueCount);
            stopwatch.Restart();


            Debug.Log("uniqueCount hashmap " + noDuplicates.Length);

            /*
             *  for ( int i = 0; i < i_len; i ++ )
             *  {
             *      Debug.Log ( "B: " + withDuplicates [i] ) ;
             *  }
             */
            withDuplicates2.Dispose();
            dic2.Dispose();
            withDuplicates.Dispose();
            noDuplicates.Dispose();
            dic.Dispose();

            Debug.LogError("Stop");
        }
Example #25
0
 public void Execute()
 {
     withDuplicates = dic.GetKeyArray(Allocator.Temp);
 }
        protected override void OnUpdate( )
        {
            if (group_MMMamager.CalculateChunkCount() == 0)
            {
                Debug.LogWarning("There is no active manager.");
                return;
            }

            EntityCommandBuffer ecb = becb.CreateCommandBuffer();

            EntityCommandBuffer.ParallelWriter ecbp = ecb.AsParallelWriter();


            l_managerSharedData.Clear();
            EntityManager.GetAllUniqueSharedComponentData(l_managerSharedData);

            ComponentDataFromEntity <NNManagerBestFitnessComponent> a_managerBestFitness = GetComponentDataFromEntity <NNManagerBestFitnessComponent> (false);
            ComponentDataFromEntity <NNManagerComponent>            a_manager            = GetComponentDataFromEntity <NNManagerComponent> (true);
            ComponentDataFromEntity <NNScoreComponent> a_managerScore = GetComponentDataFromEntity <NNScoreComponent> (true);

            ComponentDataFromEntity <NNBrainScoreComponent> a_brainScore = GetComponentDataFromEntity <NNBrainScoreComponent> (true);

            ComponentDataFromEntity <NNMangerIsSpawningNewGenerationTag> a_mangerIsSpawningNewGeneration = GetComponentDataFromEntity <NNMangerIsSpawningNewGenerationTag> (false);

            BufferFromEntity <NNInput2HiddenLayersWeightsBuffer>  NNInput2HiddenLayersWeightsBuffer  = GetBufferFromEntity <NNInput2HiddenLayersWeightsBuffer> (false);
            BufferFromEntity <NNHidden2OutputLayersWeightsBuffer> NNHidden2OutputLayersWeightsBuffer = GetBufferFromEntity <NNHidden2OutputLayersWeightsBuffer> (false);

            // BufferFromEntity <NNHiddenLayersNeuronsBiasBuffer> NNHiddenLayersNeuronsBiasBuffer           = GetBufferFromEntity <NNHiddenLayersNeuronsBiasBuffer> ( false ) ;


            // ComponentDataFromEntity <NNScoreComponent> a_managerScore                                           = GetComponentDataFromEntity <NNScoreComponent> ( true ) ;

            BufferFromEntity <NNINdexProbabilityBuffer> indexProbabilityBuffer = GetBufferFromEntity <NNINdexProbabilityBuffer> (false);

            // int i_validManagersCount                                                                     = 0 ;
            // bool canCalculateCrossovers                                                                  = false ;

            for (int i = 0; i < l_managerSharedData.Count; i++)
            {
                NNManagerSharedComponent mangerSharedComponent = l_managerSharedData [i];
                Entity nnManagerEntity = new Entity()
                {
                    Index = mangerSharedComponent.i_entityIndex, Version = mangerSharedComponent.i_entityVersion
                };

                if (a_mangerIsSpawningNewGeneration.HasComponent(nnManagerEntity))
                {
                    group_parentPopulation.SetSharedComponentFilter(mangerSharedComponent);
                    group_offspringPopulation.SetSharedComponentFilter(mangerSharedComponent);

                    NativeArray <Entity> na_parentPopulationEntities    = group_parentPopulation.ToEntityArray(Allocator.TempJob);
                    NativeArray <Entity> na_offspringPopulationEntities = group_offspringPopulation.ToEntityArray(Allocator.TempJob);

                    DynamicBuffer <NNINdexProbabilityBuffer> a_indexProbability = indexProbabilityBuffer [nnManagerEntity];


                    NNScoreComponent managerScore = a_managerScore [nnManagerEntity];
                    // int i_eliteScore                                            = managerScore.i ;



                    Debug.Log("Total score: " + managerScore.i + "; elite score: " + managerScore.i_elite);


                    if (managerScore.i_elite <= 1)
                    {
                        Dependency = new CopyLastBestGenerationDNAJob()
                        {
                            na_parentPopulationEntities    = na_parentPopulationEntities,
                            na_offspringPopulationEntities = na_offspringPopulationEntities,

                            // na_indexProbability              = na_indexProbability,

                            input2HiddenLayersWeightsBuffer  = NNInput2HiddenLayersWeightsBuffer,
                            hidden2OutputLayersWeightsBuffer = NNHidden2OutputLayersWeightsBuffer,

                            // hiddenLayersNeuronsBiasBuffer    = NNHiddenLayersNeuronsBiasBuffer
                        }.Schedule(na_parentPopulationEntities.Length, 256, Dependency);

                        Dependency.Complete();
                    }
                    else
                    {
                        // New score is fine.

                        // Calculate index probability, to get best parents.
                        // Each entity indicies will be in the array, as many times, as many score has
                        // e.g.
                        // 0th entity with 0 points won't be in the array
                        // 1st entity with 2 points will be 2 times
                        // nth entity with xth score will be xth times in the array

                        NNManagerComponent manager = a_manager [nnManagerEntity];

                        NativeMultiHashMap <int, EntityIndex> nmhm_parentEntitiesScore = new NativeMultiHashMap <int, EntityIndex> (na_parentPopulationEntities.Length, Allocator.TempJob);

// Debug.Log ( "crossover parent score" ) ;
                        Dependency = new CommonJobs.GetPopulationScoreJob( )
                        {
                            canGetEachScore       = false,
                            na_populationEntities = na_parentPopulationEntities,
                            a_brainScore          = a_brainScore,

                            nmhm_populationEntitiesScore = nmhm_parentEntitiesScore.AsParallelWriter()
                        }.Schedule(na_parentPopulationEntities.Length, 256, Dependency);

                        Dependency.Complete();

                        NativeArray <int> na_parentSortedKeysWithDuplicates = nmhm_parentEntitiesScore.GetKeyArray(Allocator.TempJob);
                        // This stores key keys in order. But keeps first unique keys at the front of an array.
                        // Total array size matches of total elements.
                        na_parentSortedKeysWithDuplicates.Sort();
                        // Sorted.
                        int i_uniqueKeyCount = na_parentSortedKeysWithDuplicates.Unique();

                        int i_eltieCountTemp = (int)(na_parentSortedKeysWithDuplicates.Length * manager.f_eliteSize);
                        // Minimum elite size mus be met.
                        int i_eltiesCount = i_eltieCountTemp > 0 ? i_eltieCountTemp : na_parentSortedKeysWithDuplicates.Length;

                        if (na_parentSortedKeysWithDuplicates.Length == 0)
                        {
                            Debug.LogError("Not enough elites for training. Please increase population, or elites %.");

                            na_offspringPopulationEntities.Dispose();
                            na_parentPopulationEntities.Dispose();
                            nmhm_parentEntitiesScore.Dispose();
                            na_parentSortedKeysWithDuplicates.Dispose();

                            continue;
                        }

                        NativeArray <EntityIndex> na_elities = new NativeArray <EntityIndex> (i_eltiesCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);

                        DynamicBuffer <NNINdexProbabilityBuffer> a_eliteIndexProbability = indexProbabilityBuffer [nnManagerEntity];
                        int i_totalElitesScore = managerScore.i_elite;
                        a_eliteIndexProbability.ResizeUninitialized(i_totalElitesScore);

                        Dependency = new CommonJobs.GetElitesEntitiesJob()
                        {
                            i_eltiesCount = i_eltiesCount,

                            na_elities         = na_elities,
                            nmhm_entitiesScore = nmhm_parentEntitiesScore,
                            na_currentSortedKeysWithDuplicates = na_parentSortedKeysWithDuplicates
                        }.Schedule();



                        Dependency = new CalculateIndexProbabilityOfPopulationJob()
                        {
                            na_populationEntities = na_elities,

                            a_indexProbability = a_eliteIndexProbability,

                            a_brainScore = a_brainScore
                        }.Schedule(Dependency);


                        NativeArray <int> na_randomValues = new NativeArray <int> (na_parentPopulationEntities.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);

                        random.NextInt2();
                        Dependency = new RandomIntsJob()
                        {
                            na_randomValues = na_randomValues,
                            random          = random
                        }.Schedule(Dependency);

                        Dependency.Complete();

// Debug.LogError ( "parent pop: " + na_parentPopulationEntities.Length + "; offspring pop: " + na_offspringPopulationEntities.Length ) ;
                        Dependency = new DNACrossOverJob()
                        {
                            na_parentPopulationEntities    = na_parentPopulationEntities,
                            na_offspringPopulationEntities = na_offspringPopulationEntities,

                            na_indexProbability = a_eliteIndexProbability.Reinterpret <int> ().AsNativeArray(),

                            input2HiddenLayersWeightsBuffer  = NNInput2HiddenLayersWeightsBuffer,
                            hidden2OutputLayersWeightsBuffer = NNHidden2OutputLayersWeightsBuffer,

                            na_randomValues = na_randomValues,
                            random          = random,

                            // i_eliteScore                     = i_eliteScore
                        }.Schedule(na_parentPopulationEntities.Length, 256, Dependency);

                        Dependency.Complete();

                        na_randomValues.Dispose();
                        na_elities.Dispose();
                        nmhm_parentEntitiesScore.Dispose();
                        na_parentSortedKeysWithDuplicates.Dispose();
                    }

                    ecb.RemoveComponent <NNMangerIsSpawningNewGenerationTag> (nnManagerEntity);
                    becb.AddJobHandleForProducer(Dependency);


                    na_offspringPopulationEntities.Dispose();
                    na_parentPopulationEntities.Dispose();
                }
            } // for


            Entities
            .WithName("GenerationSpawningIsCompleteJob")
            .WithAll <NNBrainTag, IsSpawningTag> ()
            .ForEach((Entity entity, int entityInQueryIndex) =>
            {
                ecbp.RemoveComponent <IsSpawningTag> (entityInQueryIndex, entity);
                ecbp.AddComponent <IsSpawningCompleteTag> (entityInQueryIndex, entity);
            }).ScheduleParallel();

            becb.AddJobHandleForProducer(Dependency);
        }