public static ITweenJob CreateJob <T>() { ITweenJob job = null; if (typeof(RectTransform) == typeof(T)) { job = new RectTransformJob(); } else if (typeof(UnityEngine.Graphics) == typeof(T)) { job = new GraphicsJob(); } else if (typeof(UnityEngine.UI.Graphic) == typeof(T)) { job = new GraphicsJob(); } else if (typeof(Material) == typeof(T)) { job = new MaterialJob(); } else if (typeof(Transform) == typeof(T)) { job = new TransformJob(); } else if (typeof(CanvasGroup) == typeof(T)) { job = new CanvasGroupJob(); } job?.Initialize(typeof(T)); return(job); }
private void Update() { var movementParameters = movement.Parameters; var transformParameters = new NativeArray <TransformData>(movementParameters, Allocator.TempJob); var transformJob = new TransformJob(transformParameters); var transformJobHandle = transformJob.Schedule(transforms); JobHandle.ScheduleBatchedJobs(); transformJobHandle.Complete(); transformParameters.Dispose(); }
protected override void _SimulationStep(float timestep) { TransformJob job = new TransformJob() { initialVertices = simulation.initialVertices, vertices = simulation.vertices, matrix = Matrix4x4.TRS(this.transform.position, this.transform.rotation, this.transform.lossyScale) }; job.Schedule(simulation.vertices.Length, 128).Complete(); this.instance.vertices.CpuReference.CopyFrom(simulation.vertices); this.instance.vertices.SetGPUDirty(); }
private void Update() { if (useJob == false) { for (int i = 0; i < players.Count; ++i) { var p = players[i]; p.transform.position += new Vector3(0f, p.moveSpeed * UnityEngine.Time.deltaTime, 0f); if (p.transform.position.y > 20f) { p.moveSpeed = -Mathf.Abs(p.moveSpeed); } else if (p.transform.position.y < -20f) { p.moveSpeed = Mathf.Abs(p.moveSpeed); } } } else { NativeArray <float3> _positions = new NativeArray <float3>(players.Count, Allocator.TempJob); NativeArray <float> _moveSpeeds = new NativeArray <float>(players.Count, Allocator.TempJob); for (int i = 0; i < players.Count; ++i) { _positions[i] = players[i].transform.position; _moveSpeeds[i] = players[i].moveSpeed; } TransformJob job = new TransformJob() { positions = _positions, moveSpeeds = _moveSpeeds, deltaTime = UnityEngine.Time.deltaTime, }; JobHandle handle = job.Schedule(players.Count, 1000); handle.Complete(); for (int i = 0; i < players.Count; ++i) { players[i].transform.position = _positions[i]; players[i].moveSpeed = _moveSpeeds[i]; } _positions.Dispose(); _moveSpeeds.Dispose(); } }
void jobTest4() { TransformAccessArray result = new TransformAccessArray(1); result.Add(transform); TransformJob tjob = new TransformJob { vec1 = tr1.transform.position, vec2 = tr2.transform.position }; tjob.Schedule(result).Complete(); result.Dispose(); }
/// <summary> /// Queues new or rebuild of the specified group. /// </summary> /// <param name="group">The group to rebuild.</param> private void QueueRebuild(StaticBatchingGroupKey group) { // Read instances List <StaticMeshInstance> instances; if (!this.meshInstances.TryGetValue(group, out instances)) { return; } // Prepare data arrays for job int vCounter = 0, iCounter = 0; // Vertex and index counter StaticBatchingRebuildJobData jobData = new StaticBatchingRebuildJobData() { groupKey = group }; jobData.PreAllocate(); foreach (var instance in instances) { var cache = GetVisCache(instance.mesh); jobData.meshInstances.Add(instance); jobData.vertexOffsets.Add(vCounter); jobData.indexOffsets.Add(iCounter); vCounter += cache.vertices.Length; iCounter += cache.indices.Length; } // Allocate job data memory jobData.Allocate(vCounter, iCounter); // Dispatch jobs var dep = new TransformJob() { dataId = this.runningJobDataCounter }.Schedule(jobData.meshInstances.Count, 4); dep = new CopyJob() { dataId = this.runningJobDataCounter }.Schedule(dep); jobData.jobHandle = dep; jobData.jobId = this.runningJobDataCounter++; this.runningJobs.Add(jobData.jobId, jobData); JobHandle.ScheduleBatchedJobs(); }
// Start is called before the first frame update void Start() { SimpleJob simpleJob = new SimpleJob { number = myNumber, data = myData, }; ParallelJob parallelJob = new ParallelJob { number = myNumber, data = myData, }; TransformJob transformJob = new TransformJob { number = myNumber, data = myData, deltaTime = Time.deltaTime, }; simpleJobHandle = simpleJob.Schedule(); parallelJobHandle = parallelJob.Schedule(myData.Length, 32, simpleJobHandle); transformJobHandle = transformJob.Schedule(transformAccessArray, parallelJobHandle); // dependency like a->b->c... JobHandle.ScheduleBatchedJobs(); simpleJobHandle.Complete(); parallelJobHandle.Complete(); transformJobHandle.Complete(); if (simpleJobHandle.IsCompleted) { Debug.Log("simple job result " + simpleJob.data[0]); } if (parallelJobHandle.IsCompleted) { for (int i = 0; i < myData.Length; ++i) { Debug.Log("parallel job result " + parallelJob.data[i]); } } myData.Dispose(); transformAccessArray.Dispose(); }
private void Update() { TransformJob transformJob = new TransformJob { number = myNumber, data = myData, deltaTime = Time.deltaTime, }; transformJobHandle = transformJob.Schedule(transformAccessArray); JobHandle.ScheduleBatchedJobs(); transformJobHandle.Complete(); if (transformJobHandle.IsCompleted && transform.position.x >= Vector3.one.x * 3f) { // Debug.Log("Transform job completed"); } }
public override void DoAnim(int Frame) { if (instance == null || m_count == 0) { return; } // Initialize the job data var job = new TransformJob() { frame = Frame, rotate = _Rotate, position = _pos, MoveSpeed = moveSpeed * Time.deltaTime, RotateSpeed = Quaternion.Euler(rotateSpeed * Time.deltaTime) }; JobHandle jobHandle = job.Schedule(m_count, 64); jobHandle.Complete(); }
public async Task Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 54 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } var createProxyJob = new TransformJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["repositoryFile"], ["outputLocation"] = new S3Locator { AwsS3Bucket = REPOSITORY_BUCKET, AwsS3KeyPrefix = "TransformJobResults/" } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; createProxyJob = await resourceManager.CreateAsync(createProxyJob); }
private void Update() { var scale = 1.0f / Size; if (m_useJobSystem) { m_perlinJob = new PerlineNoiseJob() { inputs = m_gridInputs, output = m_perlins, Timestamp = Time.time, FractalLevel = m_fractalLevel, Scale = scale, Seed = m_seed, }; m_transJob = new TransformJob() { OwnerPos = transform.position, GridSize = m_gridSize, noiseArray = m_perlins, DeltaTime = Time.deltaTime, }; m_JobHandle = m_perlinJob.Schedule(m_gridSize.x * m_gridSize.y * m_gridSize.z, 35); m_transJobHandle = m_transJob.Schedule(m_transformsAccessArray, m_JobHandle); } else { for (int x = 0; x < m_gridSize.x; x++) { for (int y = 0; y < m_gridSize.y; y++) { for (int z = 0; z < m_gridSize.z; z++) { m_flowFields[x + m_gridSize.y * (y + m_gridSize.z * z)] = (Perlin.Fbm((m_seed + x * scale), m_seed + y * scale, (z + Time.time) * scale, m_fractalLevel) + 1) * 0.5f; } } } foreach (var cube in m_cubes) { if (cube.position.x > transform.position.x + m_gridSize.x * 0.5f) { cube.position = new Vector3(transform.position.x - m_gridSize.x * 0.5f, cube.position.y, cube.position.z); } if (cube.position.x < transform.position.x - m_gridSize.x * 0.5f) { cube.position = new Vector3(transform.position.x + m_gridSize.x * 0.5f, cube.position.y, cube.position.z); } if (cube.position.y > transform.position.y + m_gridSize.y * 0.5f) { cube.position = new Vector3(cube.position.x, transform.position.y - m_gridSize.y * 0.5f, cube.position.z); } if (cube.position.y < transform.position.y - m_gridSize.y * 0.5f) { cube.position = new Vector3(cube.position.x, transform.position.y + m_gridSize.y * 0.5f, cube.position.z); } if (cube.position.z > transform.position.z + m_gridSize.z * 0.5f) { cube.position = new Vector3(cube.position.x, cube.position.y, transform.position.z - m_gridSize.z * 0.5f); } if (cube.position.z < transform.position.z - m_gridSize.z * 0.5f) { cube.position = new Vector3(cube.position.x, cube.position.y, transform.position.z + m_gridSize.z * 0.5f); } var x = Mathf.Clamp(Mathf.RoundToInt(transform.position.x + cube.position.x + m_gridSize.x * 0.5f), 0, m_gridSize.x - 1); var y = Mathf.Clamp(Mathf.RoundToInt(transform.position.y + cube.position.y + m_gridSize.y * 0.5f), 0, m_gridSize.y - 1); var z = Mathf.Clamp(Mathf.RoundToInt(transform.position.z + cube.position.z + m_gridSize.y * 0.5f), 0, m_gridSize.z - 1); var noiseValue = m_flowFields[x + m_gridSize.y * (y + m_gridSize.z * z)]; var noiseDir = new Vector3(Mathf.Cos(noiseValue * 2 * Mathf.PI), Mathf.Sin(noiseValue * 2 * Mathf.PI), Mathf.Cos(noiseValue * Mathf.PI)); var newDir = Vector3.RotateTowards(cube.forward, noiseDir, 5 * Time.deltaTime, 0.0f); cube.rotation = Quaternion.LookRotation(newDir); cube.position += cube.forward * 15 * Time.deltaTime; } } }
public JobDeadException(TransformJob job) : base(job, $"Job {job.Uuid} is dead, and will never complete. " + "This likely means that it was running on a driver that has crashed.") { }
public JobFailedException(TransformJob job) : base(job, $"Job {job.Uuid} failed with the following error:\n{job.Error}") { }
public JobStatusException(TransformJob job, string message) : base(message) { Job = job; }
void Update() { t += speed * Time.deltaTime; var animJob = new AnimationJob() { Time = t, ShapeRadius = shapeRadius, ShapeSides = shapeSides, SideSize = sideSize, Radius = Radius, Rings = rings, CubesPerRing = cubesPerRing, //RingPositions = ringPositions, CubePositions = cubePositions, //RingRotations = ringRotations, CubeRotations = cubeRotations, CubeScales = cubeScales, MaterialParams = materialParams }; var animJobDep = animJob.Schedule(cubesPerRing * rings, 32); var transformJob = new TransformJob() { WorldMatrix = transform.localToWorldMatrix, //RingPositions = ringPositions, CubePositions = cubePositions, //RingRotations = ringRotations, CubeRotations = cubeRotations, CubeScales = cubeScales, RenderMatrices = renderMatrices, }; var transformJobDep = transformJob.Schedule(cubesPerRing * rings, 32, animJobDep); transformJobDep.Complete(); int objectCount = rings * cubesPerRing; int loopRenderBatchSize; MaterialPropertyBlock mpb = new MaterialPropertyBlock(); for (int i = 0; i < objectCount; i += renderBatchSize) { if ((i + renderBatchSize) < objectCount) { loopRenderBatchSize = renderBatchSize; } else { loopRenderBatchSize = objectCount - i; } if (renderBatchMat.Length != loopRenderBatchSize) { renderBatchMat = new Matrix4x4[loopRenderBatchSize]; instMatParamsArray = new float[loopRenderBatchSize]; } renderMatricesBatch = new NativeSlice <Matrix4x4>(renderMatrices, i, loopRenderBatchSize); renderMatricesBatch.CopyTo(renderBatchMat); materialParamsBatch = new NativeSlice <float>(materialParams, i, loopRenderBatchSize); materialParamsBatch.CopyTo(instMatParamsArray); mpb.SetFloatArray("FloatMPB", instMatParamsArray); Graphics.DrawMeshInstanced(mesh, 0, material, renderBatchMat, loopRenderBatchSize, mpb); } }