Exemplo n.º 1
0
    protected override void OnUpdate()
    {
        unitQuery = GetEntityQuery(typeof(UnitTag));     // Must update every frame as units may die or spawn, in my game they only die
        // THIS IS THE ARRAY WHICH THE GRID REFERENCES
        // This array is perfectly aligned with the unit query
        // This means anytime we use a job with unitQuery, the array of entities is aligned with this temp array,
        // because of how we will fill this array
        // The spatial grid values reference an index into this array
        NativeArray <float2> copyPositions = new NativeArray <float2>(unitQuery.CalculateEntityCount(), Allocator.TempJob, NativeArrayOptions.ClearMemory);

        // These three jobs all run on a separate threed, but are single threded on that thread
        // This is faster than spreading each job over multiple threads
        //
        // So each frame
        // We must loop through all units and copy their positions to the temp array
        JobHandle copyJob = new CopyPositionsJob
        {
            archetypeTranslation = GetArchetypeChunkComponentType <Translation>(),
            copyPositions        = copyPositions,
        }.ScheduleSingle(unitQuery, Dependency);
        // At the same time, we must build the map on a separate thread
        // This means we are looping through all units and updating the grid
        JobHandle updateMapJob = new BuildMapJob
        {
            BUCKET_SIZE          = BUCKET_SIZE,
            N_CELLS_ACROSS       = CELLS_ACROSS,
            CELL_SIZE            = CELL_SIZE,
            archetypeTranslation = GetArchetypeChunkComponentType <Translation>(),
            grid = grid,
        }.ScheduleSingle(unitQuery, Dependency);
        // At the same time, we must record which cells have changed
        // This is an optimization, as you will see
        // Basically, we are checking each unit the same was as in updateMapJob, but we are updating a separate, small list
        JobHandle recordActiveCellsJob = new RecordActiveBucketsJob
        {
            BUCKET_SIZE          = BUCKET_SIZE,
            nCellsAcross         = CELLS_ACROSS,
            cellSize             = CELL_SIZE,
            archetypeTranslation = GetArchetypeChunkComponentType <Translation>(),
            activeBuckets        = activeCells,
        }.ScheduleSingle(unitQuery, Dependency);

        // All these jobs must be completed in order for the next to run, so here is a sync point
        JobHandle barrier = JobHandle.CombineDependencies(updateMapJob, recordActiveCellsJob);

        barrier = JobHandle.CombineDependencies(barrier, copyJob);
        barrier.Complete();

        // Here
        JobHandle resolveCollisionsJob = new ResolveCollisionsJob
        {
            BUCKET_SIZE    = BUCKET_SIZE,
            CELL_SIZE      = CELL_SIZE,
            N_CELLS_ACROSS = CELLS_ACROSS,
            copyPositions  = copyPositions,
            grid           = grid,
        }.Schedule(copyPositions.Length, 128, barrier);
        JobHandle clearGridCountersJob = new ClearGridCountersJob
        {
            activeCells = activeCells,
            grid        = grid,
        }.Schedule(resolveCollisionsJob);
        JobHandle clearChangesJob = new ClearChangesListJob
        {
            list = activeCells,
        }.Schedule(clearGridCountersJob);
        JobHandle writeToUnitsJob = new WriteToUnitsJob
        {
            N_CELLS_ACROSS       = CELLS_ACROSS,
            CELL_SIZE            = CELL_SIZE,
            archetypeTranslation = GetArchetypeChunkComponentType <Translation>(),
            copyPositions        = copyPositions,
        }.Schedule(unitQuery, resolveCollisionsJob);

        JobHandle disposeHandle = copyPositions.Dispose(writeToUnitsJob);
        JobHandle final         = JobHandle.CombineDependencies(disposeHandle, clearChangesJob);

        Dependency = final;
    }
Exemplo n.º 2
0
    protected override JobHandle OnUpdate(JobHandle inputDeps)
    {
        m_allClothPointsGroup.ResetFilter();

        // todo: preallocate this, garbage is gross
        var garmentList = new List <ClothGarment>();

        EntityManager.GetAllUniqueSharedComponentData(garmentList);
        var garmentCount = garmentList.Count;

        // First project positions for all points
        var dt         = 1.0f / 60.0f;
        var gravity    = new float4(0.0f, -9.8f * dt, 0.0f, 0.0f);
        var projectJob = new ProjectPositionJob
        {
            Dt        = dt,
            GravityDt = gravity,

            WorldToLocals = s_allGarmentWorldToLocals
        };
        var projectHandle = projectJob.Schedule(m_allClothPointsGroup, inputDeps);

        // foreach cluster run solver iterations
        var clusterList = new List <ConstraintCluster>();

        EntityManager.GetAllUniqueSharedComponentData(clusterList);

        var clusterCount             = clusterList.Count;
        var allPreviousSolversHandle = projectHandle;

        const int kIterationCount = 8;

        for (int iterationIndex = 0; iterationIndex < kIterationCount; ++iterationIndex)
        {
            for (int clusterIndex = 1; clusterIndex < clusterCount; ++clusterIndex)
            {
                m_clothConstraintsGroup.SetFilter(new ConstraintCluster {
                    ClusterIndex = clusterIndex
                });
                var solverJob = new ConstraintSolverJob
                {
                    Positions = GetComponentDataFromEntity <ProjectedPosition>(),
                    Pins      = GetComponentDataFromEntity <PinWeight>()
                };
                var solverHandle = solverJob.Schedule(m_clothConstraintsGroup, allPreviousSolversHandle);
                allPreviousSolversHandle = JobHandle.CombineDependencies(allPreviousSolversHandle, solverHandle);
            }
        }

        var copyPositionsJob = new CopyPositionsJob();
        var copyHandle       = copyPositionsJob.Schedule(m_allClothPointsGroup, allPreviousSolversHandle);

        // todo: Mesh update currently requires syncing the jobs and causes a stall.
        // better way to do this: copy out the results from the previous frame at the beginning of each frame
        // then run mesh update on main thread at the same time as the sim for the current frame

        #region update mesh

        copyHandle.Complete();

        var vertexPositions = new List <NativeArray <Vector3> >(garmentCount - 1);

        // Write last frame's results to mesh for each garment
        // Slowest part of the process and unfortunately must be done on main thread until we have unmanaged mesh type
        var copyAllVertexPositionsHandle = inputDeps;
        for (int i = 1; i < garmentCount; ++i)
        {
            var garment     = garmentList[i];
            var garmentMesh = s_allGarmentMeshes[garment.GarmentIndex];
            var vertexCount = garmentMesh.vertexCount;

            var simPointsAsVertices = new NativeArray <Vector3>(vertexCount, Allocator.TempJob);
            vertexPositions.Add(simPointsAsVertices);

            m_allClothPointsGroup.SetFilter(garment);
            var copyToVertexJob = new CopySimPointsToVerticesJob
            {
                Vertices = simPointsAsVertices
            };
            var copyToVertexHandle = copyToVertexJob.Schedule(m_allClothPointsGroup, inputDeps);
            copyAllVertexPositionsHandle = JobHandle.CombineDependencies(copyAllVertexPositionsHandle, copyToVertexHandle);
        }

        // todo: Gross sync point, can we get rid of this?
        // we probably can since all we really need is a copy of the data
        // then we can do the write back to mesh on the main thread while sim runs
        copyAllVertexPositionsHandle.Complete();

        // Write back to mesh (slow and single threaded by necessity :(
        for (int i = 1; i < garmentCount; ++i)
        {
            var garment        = garmentList[i];
            var garmentMesh    = s_allGarmentMeshes[garment.GarmentIndex];
            var verticesNative = vertexPositions[i - 1];
            var newVertexArray = new Vector3[verticesNative.Length]; // todo: preallocate this garbage is gross
            verticesNative.CopyTo(newVertexArray);

            garmentMesh.vertices = newVertexArray;

            verticesNative.Dispose();
        }
        #endregion

        return(allPreviousSolversHandle);
    }