public void OnDestroy() { Library.Deinitialize(); _jobHandle?.Complete(); _jobCount.Dispose(); _jobEvents.Dispose(); }
public override void FrameUpdate(PipelineCamera cam, ref PipelineCommandData data) { SortingSettings sortSettings = new SortingSettings(cam.cam); sortSettings.criteria = SortingCriteria.CommonTransparent; DrawingSettings drawSettings = new DrawingSettings(new ShaderTagId("Transparent"), sortSettings) { enableDynamicBatching = false, enableInstancing = false, perObjectData = UnityEngine.Rendering.PerObjectData.None }; FilteringSettings filter = new FilteringSettings { excludeMotionVectorObjects = false, layerMask = cam.cam.cullingMask, renderQueueRange = RenderQueueRange.transparent, renderingLayerMask = (uint)cam.cam.cullingMask, sortingLayerRange = SortingLayerRange.all }; int blurTex = blur.Render(data.buffer, new Vector2Int(cam.cam.pixelWidth, cam.cam.pixelHeight), cam.targets.renderTargetIdentifier); data.buffer.SetGlobalTexture(ShaderIDs._GrabTexture, blurTex); transparentOutput[0] = cam.targets.renderTargetIdentifier; transparentOutput[1] = ShaderIDs._CameraDepthTexture; data.buffer.SetRenderTarget(colors: transparentOutput, depth: ShaderIDs._DepthBufferTexture); cullJob.Complete(); var lst = CustomDrawRequest.allEvents; foreach (var i in customCullResults) { lst[i].DrawTransparent(data.buffer); } data.ExecuteCommandBuffer(); data.context.DrawRenderers(proper.cullResults, ref drawSettings, ref filter); data.buffer.ReleaseTemporaryRT(blurTex); }
protected override JobHandle OnUpdate(JobHandle inputDependencies) { _lastAllocation++; LastJob.Complete(); if (_batchQueue.Count > 0) { return(inputDependencies); } Profiler.BeginSample("Dependencies"); var voxelColorType = GetArchetypeChunkComponentType <VoxelColor>(true); var matrixType = GetArchetypeChunkComponentType <LocalToWorld>(true); var chunks = _query.CreateArchetypeChunkArrayAsync(Allocator.TempJob, out var chunksHandle); var chunkCount = _query.CalculateChunkCount(); var spaceRequirement = _query.CalculateEntityCount(); var indexMappings = new NativeArray <int>(chunkCount, Allocator.TempJob); var indexing = new IndexingJob { IndexMappings = indexMappings, Chunks = chunks, ChunksPerBatch = chunksPerBatch }.Schedule(JobHandle.CombineDependencies(inputDependencies, chunksHandle)); Profiler.EndSample(); JobHandle deps = indexing; Profiler.BeginSample("Prepare Space"); if (_lastColors.Length == spaceRequirement && _lastAllocation < 4) { var colorClear = new MemsetNativeArray <VoxelColor> { Source = _lastColors, Value = default }.Schedule(_lastColors.Length, 256, inputDependencies);
/// <summary> /// Calculates global heights. This method uses Unity Job System. /// Each column described by its x and z values has three heights. /// One for Bedrock, Stone and Dirt. Heights determines up to where certain types appear. /// x is Bedrock, y is Stone and z is Dirt. /// </summary> internal static ReadonlyVector3Int[] CalculateHeights_JobSystem_NoiseFunction() { // output data var heights = new ReadonlyVector3Int[TotalBlockNumberX * TotalBlockNumberZ]; var heightJob = new HeightJob_NoiseFunction() { // input TotalBlockNumberX = TotalBlockNumberX, // output Result = new NativeArray <ReadonlyVector3Int>(heights, Allocator.TempJob) }; JobHandle heightJobHandle = heightJob.Schedule(TotalBlockNumberX * TotalBlockNumberZ, 8); heightJobHandle.Complete(); heightJob.Result.CopyTo(heights); // cleanup heightJob.Result.Dispose(); return(heights); }
// Writes Chunk c's data using a Pallete's compression into given buffer // and returns the amount of bytes written public static int CompressBlocks(Chunk c, byte[] buffer, int targetPos = 0) { int bytes; Pallete p = Compression.BiomeToPallete(c.biomeName); List <ushort> palleteList = Compression.GetPallete(p); NativeArray <int> writtenBytes = new NativeArray <int>(new int[1] { 0 }, Allocator.TempJob); NativeArray <ushort> chunkData = NativeTools.CopyToNative(c.data.GetData()); NativeArray <byte> buff = NativeTools.CopyToNative(buffer); NativeArray <ushort> palleteArray = NativeTools.CopyToNative(palleteList.ToArray()); CompressionJob cbJob = new CompressionJob { chunkData = chunkData, buffer = buff, palleteArray = palleteArray, writtenBytes = writtenBytes }; JobHandle handle = cbJob.Schedule(); handle.Complete(); NativeArray <byte> .Copy(buff, 0, buffer, targetPos, writtenBytes[0]); bytes = writtenBytes[0]; chunkData.Dispose(); palleteArray.Dispose(); buff.Dispose(); writtenBytes.Dispose(); return(bytes); }
public void SigleData() { NativeArray <float> result = new NativeArray <float>(1, Allocator.Temp); //设置工作数据 MyJob myJob = new MyJob(); myJob.a = 10; myJob.b = 20; myJob.result = result; //安排作业 JobHandle jobHandle = myJob.Schedule(); MyJob1 myJob1 = new MyJob1(); myJob1.result = result; JobHandle jobHandle1 = myJob1.Schedule(jobHandle);//第二个依赖第一个的数据 //等待作业完成 jobHandle1.Complete(); //NativeArray的所有副本指向同一个内存,我们可以访问NativeArray中的结果 float results = result[0]; result.Dispose(); Debug.Log("获取的结果" + results); }
private static void TestCustomJob() { NativeArray <int> result = new NativeArray <int>( length: ARRAY_SIZE, allocator: Allocator.TempJob, options: NativeArrayOptions.UninitializedMemory); for (int i = 0; i < ARRAY_SIZE; i++) { result[i] = i; } AddWithIntJob addWithIntJob = new AddWithIntJob(); addWithIntJob.result = result; JobHandle addWithIntJobHandle = addWithIntJob.ScheduleWithInt(value: ARRAY_SIZE); addWithIntJobHandle.Complete(); Print5IntNativeArray(result); result.Dispose(); }
public new void Execute(JobHandle jobHandle) { //Sync Point as we must be sure that jobs that create/swap/remove entities are done jobHandle.Complete(); if (_submissionScheduler.paused) { return; } //prepare the entity command buffer to be used by the registered engines var entityCommandBuffer = _ECBSystem.CreateCommandBuffer(); foreach (var system in _engines) { system.ECB = entityCommandBuffer; system.EM = _ECBSystem.EntityManager; } //Submit Svelto Entities, calls Add/Remove/MoveTo that can be used by the IUECSSubmissionEngines _submissionScheduler.SubmitEntities(); //execute submission engines and complete jobs base.Execute(default).Complete();
public void FindPath() { NativeHashMap <int2, bool> isObstacle = new NativeHashMap <int2, bool>(obstacles.Count, Allocator.TempJob); NativeHashMap <int2, Node> nodes = new NativeHashMap <int2, Node>(safeGuard, Allocator.TempJob); NativeHashMap <int2, Node> openSet = new NativeHashMap <int2, Node>(safeGuard, Allocator.TempJob); NativeArray <int2> offsets = new NativeArray <int2>(8, Allocator.TempJob); foreach (int2 o in obstacles.Keys) { isObstacle.Add(o, true); } AStar aStar = new AStar { isObstacle = isObstacle, offsets = offsets, nodes = nodes, openSet = openSet, start = start, end = end, safeGuard = safeGuard }; JobHandle handle = aStar.Schedule(); handle.Complete(); NativeArray <Node> nodeArray = nodes.GetValueArray(Allocator.TempJob); for (int i = 0; i < nodeArray.Length; i++) { Vector3Int currentNode = new Vector3Int(nodeArray[i].coord.x, nodeArray[i].coord.y, 0); if (!start.coord.Equals(nodeArray[i].coord) && !end.coord.Equals(nodeArray[i].coord) && !obstacles.ContainsKey(nodeArray[i].coord)) { map.SetTile(currentNode, defaultTile); map.SetTileFlags(currentNode, TileFlags.None); map.SetColor(currentNode, Color.white); } } if (nodes.ContainsKey(end.coord)) { int2 currentCoord = end.coord; while (!currentCoord.Equals(start.coord)) { currentCoord = nodes[currentCoord].parent; Vector3Int currentTile = new Vector3Int(currentCoord.x, currentCoord.y, 0); map.SetTile(currentTile, defaultTile); map.SetTileFlags(currentTile, TileFlags.None); map.SetColor(currentTile, Color.green); } } nodes.Dispose(); openSet.Dispose(); isObstacle.Dispose(); offsets.Dispose(); nodeArray.Dispose(); }
public static void Sync() { handle_.Complete(); }
private void FixedUpdate() { FixUpdateCount += 1; Rx_MPC1 = Rx_Seen_MPC_Script.seen_MPC1; Rx_MPC1_att = Rx_Seen_MPC_Script.seen_MPC1_att; Rx_MPC2 = Rx_Seen_MPC_Script.seen_MPC2; Rx_MPC2_att = Rx_Seen_MPC_Script.seen_MPC2_att; Rx_MPC3 = Rx_Seen_MPC_Script.seen_MPC3; Rx_MPC3_att = Rx_Seen_MPC_Script.seen_MPC3_att; Tx_MPC1 = Tx_Seen_MPC_Script.seen_MPC1; Tx_MPC1_att = Tx_Seen_MPC_Script.seen_MPC1_att; Tx_MPC2 = Tx_Seen_MPC_Script.seen_MPC2; Tx_MPC2_att = Tx_Seen_MPC_Script.seen_MPC2_att; Tx_MPC3 = Tx_Seen_MPC_Script.seen_MPC3; Tx_MPC3_att = Tx_Seen_MPC_Script.seen_MPC3_att; /////////////////////////////////////////////////////////////////////////////////// /// LOS /////////////////////////////////////////////////////////////////////////////////// float dtLoS = 0; float PathGainLoS = 0; float LOS_distance = 0; if (!Physics.Linecast(Tx.transform.position, Rx.transform.position)) { Vector3 LoS_dir = (Tx.transform.position - Rx.transform.position).normalized; Vector3 Tx_fwd = Tx.transform.forward; Vector3 Rx_fwd = Rx.transform.forward; float cos_Tx = -1; float cos_Rx = -1; float K_antanne_pattern; if (Tx_Antenna_pattern) { cos_Tx = Vector3.Dot(-LoS_dir, Tx_fwd); } if (Rx_Antenna_pattern) { cos_Rx = Vector3.Dot(LoS_dir, Rx_fwd); } K_antanne_pattern = 0.25f * (1 - cos_Rx - cos_Tx + cos_Rx * cos_Tx); flag_LoS = 1; if (LoS_Start == 0) { LoS_Start = FixUpdateCount; Debug.Log("LoS Start " + LoS_Start); } // finding the start time of LoS if (LOS_Tracer) { Debug.DrawLine(Tx.transform.position, Rx.transform.position, Color.magenta); } LOS_distance = (Tx.transform.position - Rx.transform.position).magnitude; //Debug.Log("Distance = " + LOS_distance); //LOS_distance = 200; dtLoS = LOS_distance / SpeedofLight;// + 1000/SpeedofLight; PathGainLoS = (1 / (LOS_distance)) * K_antanne_pattern; float hbyd = 3.4f / LOS_distance; // 2h/d; h = 1.7meters float Rparallel = (RelativePermitivity * hbyd - Z) / (RelativePermitivity * hbyd + Z); float Rperpendicular = (hbyd - Z) / (hbyd + Z); float Rcoef = (float)Math.Sqrt(0.5f * (Rparallel * Rparallel + Rperpendicular * Rperpendicular)); //PathGainLoS -= PathGainLoS * Rcoef * Rcoef / 2; //float h2byd = 5.78f / LOS_distance; // 2 h^2/2 EdgeEffect(FixUpdateCount - LoS_Start, out EdgeEffect_LoS); // the follwing can be done due to manual calculation of the LoS End if (FixUpdateCount > 100 && FixUpdateCount < 126) { EdgeEffect(124 - FixUpdateCount, out EdgeEffect_LoS); } } /*else * { * if (flag_LoS == 1) * { * flag_LoS = 2; * LoS_End = FixUpdateCount; * Debug.Log("LoS End " + LoS_End); * // remember the last LoS channel * for (int i = 0; i < H_LoS.Length; i++) * { * H_old[i] = H_LoS[i]; * } * } // finding the end time of LoS * }*/ //Debug.Log("Edge effect coefficient = " + EdgeEffect_LoS); var dtLoSParallel = new NativeArray <float>(1, Allocator.TempJob); var distanceLoSParallel = new NativeArray <float>(1, Allocator.TempJob); var PathGainLoSParallel = new NativeArray <float>(1, Allocator.TempJob); for (int i = 0; i < dtLoSParallel.Length; i++) { dtLoSParallel[i] = dtLoS; distanceLoSParallel[i] = LOS_distance; PathGainLoSParallel[i] = PathGainLoS; } dtLoSParallel.Dispose(); distanceLoSParallel.Dispose(); PathGainLoSParallel.Dispose(); /////////////////////////////////////////////////////////////////////////////////// /// MPC1 /////////////////////////////////////////////////////////////////////////////////// var RxArray1 = new NativeArray <int>(Rx_MPC1.Count, Allocator.TempJob); var RxArray1_att = new NativeArray <float>(Rx_MPC1.Count, Allocator.TempJob); var TxArray1 = new NativeArray <int>(Tx_MPC1.Count, Allocator.TempJob); var TxArray1_att = new NativeArray <float>(Tx_MPC1.Count, Allocator.TempJob); var possiblePath1 = new NativeArray <Path1>(Tx_MPC1.Count, Allocator.TempJob); var dtMPC1Array = new NativeArray <float>(Tx_MPC1.Count, Allocator.TempJob); var PathGainMPC1 = new NativeArray <float>(Tx_MPC1.Count, Allocator.TempJob); for (int i = 0; i < Rx_MPC1.Count; i++) { RxArray1[i] = Rx_MPC1[i]; RxArray1_att[i] = Rx_MPC1_att[i]; } for (int i = 0; i < Tx_MPC1.Count; i++) { TxArray1[i] = Tx_MPC1[i]; TxArray1_att[i] = Tx_MPC1_att[i]; possiblePath1[i] = empty_path; } CommonMPC1Parallel commonMPC1Parallel = new CommonMPC1Parallel { Speed_of_Light = SpeedofLight, MPC1 = SeenMPC1Table, Array1 = RxArray1, Array1_att = RxArray1_att, Array2 = TxArray1, Array2_att = TxArray1_att, Rx_Point = Rx.transform.position, Tx_Point = Tx.transform.position, Output = possiblePath1, OutputDelays = dtMPC1Array, OutputAmplitudes = PathGainMPC1, }; JobHandle jobHandleMPC1 = commonMPC1Parallel.Schedule(TxArray1.Length, 2); jobHandleMPC1.Complete(); // transition from NativeArrays to Lists List <Path1> first_order_paths_full_parallel = new List <Path1>(); if (MPC1_Tracer) { for (int i = 0; i < possiblePath1.Length; i++) { if (possiblePath1[i].Distance > 0) { first_order_paths_full_parallel.Add(possiblePath1[i]); Debug.DrawLine(possiblePath1[i].Rx_Point, possiblePath1[i].MPC1, Color.cyan); Debug.DrawLine(possiblePath1[i].Tx_Point, possiblePath1[i].MPC1, Color.cyan); } } } RxArray1.Dispose(); RxArray1_att.Dispose(); TxArray1.Dispose(); TxArray1_att.Dispose(); possiblePath1.Dispose(); dtMPC1Array.Dispose(); PathGainMPC1.Dispose(); /////////////////////////////////////////////////////////////////////////////////// /// MPC2 Parallel /////////////////////////////////////////////////////////////////////////////////// var level2MPC2 = new NativeArray <int>(Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists, Allocator.TempJob); var level2MPC2_att = new NativeArray <float>(Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists, Allocator.TempJob); var possiblePath2 = new NativeArray <Path2>(Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists, Allocator.TempJob); var dtArrayMPC2 = new NativeArray <float>(Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists, Allocator.TempJob); var PathGainMPC2 = new NativeArray <float>(Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists, Allocator.TempJob); for (int l = 0; l < Rx_MPC2.Count * MaxLengthOfSeenMPC2Lists; l++) { level2MPC2[l] = Rx_MPC2[Mathf.FloorToInt(l / MaxLengthOfSeenMPC2Lists)]; level2MPC2_att[l] = Rx_MPC2_att[Mathf.FloorToInt(l / MaxLengthOfSeenMPC2Lists)]; possiblePath2[l] = empty_path2; } var TxMPC2Array = new NativeArray <int>(Tx_MPC2.Count, Allocator.TempJob); var TxMPC2Array_att = new NativeArray <float>(Tx_MPC2.Count, Allocator.TempJob); for (int l = 0; l < Tx_MPC2.Count; l++) { TxMPC2Array[l] = Tx_MPC2[l]; TxMPC2Array_att[l] = Tx_MPC2_att[l]; } Path2ParallelSearch path2ParallelSearch = new Path2ParallelSearch { // common data SeenMPC2Table = SeenMPC2Table, LookUpTable2 = LookUpTable2, LookUpTable2ID = LookUpTable2ID, // must be disposed Rx_MPC2Array = level2MPC2, Rx_MPC2Array_att = level2MPC2_att, Tx_MPC2 = TxMPC2Array, Tx_MPC2_att = TxMPC2Array_att, // other data Rx_Position = Rx.transform.position, Tx_Position = Tx.transform.position, MaxListsLength = MaxLengthOfSeenMPC2Lists, Speed_of_Light = SpeedofLight, SecondOrderPaths = possiblePath2, OutputDelays = dtArrayMPC2, OutputAmplitudes = PathGainMPC2, }; // create a job handle list JobHandle jobHandleMPC2 = path2ParallelSearch.Schedule(level2MPC2.Length, MaxLengthOfSeenMPC2Lists, jobHandleMPC1); jobHandleMPC2.Complete(); /// MPC2 List <Path2> second_order_paths_full_parallel = new List <Path2>(); if (MPC2_Tracer) { for (int l = 0; l < possiblePath2.Length; l++) { if (possiblePath2[l].Distance > 0) { second_order_paths_full_parallel.Add(possiblePath2[l]); Debug.DrawLine(possiblePath2[l].Rx_Point, possiblePath2[l].MPC2_1, Color.white); Debug.DrawLine(possiblePath2[l].MPC2_1, possiblePath2[l].MPC2_2, Color.white); Debug.DrawLine(possiblePath2[l].MPC2_2, possiblePath2[l].Tx_Point, Color.white); } } } level2MPC2.Dispose(); level2MPC2_att.Dispose(); possiblePath2.Dispose(); TxMPC2Array.Dispose(); TxMPC2Array_att.Dispose(); dtArrayMPC2.Dispose(); PathGainMPC2.Dispose(); /////////////////////////////////////////////////////////////////////////////////// /// MPC3 /////////////////////////////////////////////////////////////////////////////////// if (If_we_need_MPC3 == true) { // define how many elements should be processed in a single core int innerloopBatchCount = MaxLengthOfSeenMPC3Lists; Vector3 Rx_Point = Rx.transform.position; Vector3 Tx_Point = Tx.transform.position; NativeArray <int> Rx_Seen_MPC3 = new NativeArray <int>(Rx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <float> Rx_Seen_MPC3_att = new NativeArray <float>(Rx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <Path3Half> RxReachableHalfPath3Array = new NativeArray <Path3Half>(Rx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); for (int l = 0; l < Rx_MPC3.Count * MaxLengthOfSeenMPC3Lists; l++) { Rx_Seen_MPC3[l] = Rx_MPC3[Mathf.FloorToInt(l / MaxLengthOfSeenMPC3Lists)]; Rx_Seen_MPC3_att[l] = Rx_MPC3_att[Mathf.FloorToInt(l / MaxLengthOfSeenMPC3Lists)]; RxReachableHalfPath3Array[l] = empty_path3Half; } NativeArray <int> Tx_Seen_MPC3 = new NativeArray <int>(Tx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <float> Tx_Seen_MPC3_att = new NativeArray <float>(Tx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <Path3Half> TxReachableHalfPath3Array = new NativeArray <Path3Half>(Tx_MPC3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); for (int l = 0; l < Tx_MPC3.Count * MaxLengthOfSeenMPC3Lists; l++) { Tx_Seen_MPC3[l] = Tx_MPC3[Mathf.FloorToInt(l / MaxLengthOfSeenMPC3Lists)]; Tx_Seen_MPC3_att[l] = Tx_MPC3_att[Mathf.FloorToInt(l / MaxLengthOfSeenMPC3Lists)]; TxReachableHalfPath3Array[l] = empty_path3Half; } HalfPath3Set RxhalfPath3Set = new HalfPath3Set { // common data SeenMPC3Table = SeenMPC3Table, LookUpTable3 = LookUpTable3, LookUpTable3ID = LookUpTable3ID, MaxListsLength = MaxLengthOfSeenMPC3Lists, // Car specific data Point = Rx_Point, // must be disposed Seen_MPC3 = Rx_Seen_MPC3, Seen_MPC3_att = Rx_Seen_MPC3_att, ReachableHalfPath3 = RxReachableHalfPath3Array, }; // create a job handle list NativeList <JobHandle> jobHandleList = new NativeList <JobHandle>(Allocator.Temp); JobHandle RxjobHandleMPC3 = RxhalfPath3Set.Schedule(Rx_Seen_MPC3.Length, innerloopBatchCount, jobHandleMPC2); jobHandleList.Add(RxjobHandleMPC3); HalfPath3Set TxhalfPath3Set = new HalfPath3Set { // common data SeenMPC3Table = SeenMPC3Table, LookUpTable3 = LookUpTable3, LookUpTable3ID = LookUpTable3ID, MaxListsLength = MaxLengthOfSeenMPC3Lists, // Car specific data Point = Tx_Point, // must be disposed Seen_MPC3 = Tx_Seen_MPC3, Seen_MPC3_att = Tx_Seen_MPC3_att, ReachableHalfPath3 = TxReachableHalfPath3Array, }; JobHandle TxjobHandleMPC3 = TxhalfPath3Set.Schedule(Tx_Seen_MPC3.Length, innerloopBatchCount, jobHandleMPC2); jobHandleList.Add(TxjobHandleMPC3); JobHandle.CompleteAll(jobHandleList); // storing nonempty path3s List <Path3Half> TxHalfPath3 = new List <Path3Half>(); List <Path3Half> RxHalfPath3 = new List <Path3Half>(); // introducing a little bit of randomness to the third order of paths selection int MPC3PathStep = 3; // otherwise, the sets of possible third order of paths become too big for (int l = 0; l < TxReachableHalfPath3Array.Length; l += MPC3PathStep) { if (TxReachableHalfPath3Array[l].Distance > 0) { TxHalfPath3.Add(TxReachableHalfPath3Array[l]); } } for (int l = 0; l < RxReachableHalfPath3Array.Length; l += MPC3PathStep) { if (RxReachableHalfPath3Array[l].Distance > 0) { RxHalfPath3.Add(RxReachableHalfPath3Array[l]); } } //float startTime2 = Time.realtimeSinceStartup; NativeArray <Path3Half> RxNativeArray = new NativeArray <Path3Half>(RxHalfPath3.Count, Allocator.TempJob); for (int i = 0; i < RxNativeArray.Length; i++) { RxNativeArray[i] = RxHalfPath3[i]; } NativeArray <Path3Half> TxNativeArray = new NativeArray <Path3Half>(TxHalfPath3.Count, Allocator.TempJob); for (int i = 0; i < TxNativeArray.Length; i++) { TxNativeArray[i] = TxHalfPath3[i]; } NativeArray <Path3> activepath3 = new NativeArray <Path3>(RxHalfPath3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <float> dtArrayMPC3 = new NativeArray <float>(RxHalfPath3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); NativeArray <float> PathGainMPC3 = new NativeArray <float>(RxHalfPath3.Count * MaxLengthOfSeenMPC3Lists, Allocator.TempJob); Path3ActiveSet Rx_path3ActiveSet = new Path3ActiveSet { SeenMPC3Table = SeenMPC3Table, InputArray = RxNativeArray, CompareArray = TxNativeArray, MaxListsLength = MaxLengthOfSeenMPC3Lists, EmptyElement = empty_path3, Speed_of_Light = SpeedofLight, Output = activepath3, OutputDelays = dtArrayMPC3, OutputAmplitudes = PathGainMPC3 }; JobHandle Jobforpath3ActiveSet = Rx_path3ActiveSet.Schedule(activepath3.Length, MaxLengthOfSeenMPC3Lists, TxjobHandleMPC3); Jobforpath3ActiveSet.Complete(); List <Path3> third_order_paths_full_parallel = new List <Path3>(); if (MPC3_Tracer) { int trace_count = 0; for (int l = 0; l < activepath3.Length; l++) //for (int l = 0; l < 10; l++) { if (activepath3[l].Distance > 0) { trace_count += 1; third_order_paths_full_parallel.Add(activepath3[l]); Debug.DrawLine(activepath3[l].Rx_Point, activepath3[l].MPC3_1, Color.green); Debug.DrawLine(activepath3[l].MPC3_1, activepath3[l].MPC3_2, Color.blue); Debug.DrawLine(activepath3[l].MPC3_2, activepath3[l].MPC3_3, Color.yellow); Debug.DrawLine(activepath3[l].MPC3_3, activepath3[l].Tx_Point, Color.red); //if (trace_count == 10) //{ break; } } } } //Debug.Log("Number of 3rd order paths = " + third_order_paths_full_parallel.Count); TxNativeArray.Dispose(); RxNativeArray.Dispose(); activepath3.Dispose(); dtArrayMPC3.Dispose(); PathGainMPC3.Dispose(); //Debug.Log("Check 2: " + ((Time.realtimeSinceStartup - startTime2) * 1000000f) + " microsec"); Rx_Seen_MPC3.Dispose(); Rx_Seen_MPC3_att.Dispose(); RxReachableHalfPath3Array.Dispose(); Tx_Seen_MPC3.Dispose(); Tx_Seen_MPC3_att.Dispose(); TxReachableHalfPath3Array.Dispose(); } /////////////////////////////////////////////////////////////////////////////////// /// complete the works /////////////////////////////////////////////////////////////////////////////////// Y_output = new double[Nfft]; H_output = new double[Nfft]; Y_noise_output = new double[Nfft]; H_noise_output = new double[Nfft]; Drawing.drawChart(tfTime, X_inputValues, Y_output, "time"); Drawing.drawChart(tfFreq, X_inputValues, H_output, "frequency"); //Debug.Log("RSS = " + 10* Math.Log10( RSS ) ); }
//protected override void OnUpdate() //{ // matrices.Clear(); // block.Clear(); // EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(EnemyComponent)); // NativeArray<Translation> nativeArray = entityQuery.ToComponentDataArray<Translation>(Allocator.Temp); // for (int i = 0; i < nativeArray.Length; i++) // { // for (int j = i; j < nativeArray.Length - 1; j++) // { // if (nativeArray[j].Value.y < nativeArray[j + 1].Value.y) // { // Translation temp = nativeArray[j]; // nativeArray[j] = nativeArray[j + 1]; // nativeArray[j + 1] = temp; // } // } // } // foreach (var item in nativeArray) // { // matrices.Add(Matrix4x4.TRS(item.Value, Quaternion.identity, Vector3.one)); // if (matrices.Count >= 1023) // { // Graphics.DrawMeshInstanced(GameSetting.Instance.entityMesh, 0, GameSetting.Instance.enemyMat, matrices, block); // matrices.Clear(); // } // } // Graphics.DrawMeshInstanced(GameSetting.Instance.entityMesh, 0, GameSetting.Instance.enemyMat, matrices, block); // EntityQueryBuilder queryBuilder = Entities.WithNone(typeof(EnemyComponent)).WithAll(typeof(Translation)); // queryBuilder.ForEach((ref Translation translation, ref Rotation rotation) => // { // Graphics.DrawMesh(GameSetting.Instance.entityMesh, translation.Value, rotation.Value, GameSetting.Instance.playerMat, LayerMask.GetMask("Default")); // }); //} protected override void OnUpdate() { matrices.Clear(); block.Clear(); int jobHandleindex = 0; NativeArray <JobHandle> jobHandles = new NativeArray <JobHandle>(4, Allocator.Temp); EntityQuery entityQuery = GetEntityQuery(typeof(Translation), typeof(EnemyComponent)); NativeQueue <RenderData> renderQueue0 = new NativeQueue <RenderData>(Allocator.TempJob); NativeQueue <RenderData> renderQueue1 = new NativeQueue <RenderData>(Allocator.TempJob); NativeQueue <RenderData> renderQueue2 = new NativeQueue <RenderData>(Allocator.TempJob); NativeQueue <RenderData> renderQueue3 = new NativeQueue <RenderData>(Allocator.TempJob); NativeQueue <Entity> toRemoveEntity = new NativeQueue <Entity>(Allocator.TempJob); Camera camera = Camera.main; NativeQueue <RenderData> .ParallelWriter pw0 = renderQueue0.AsParallelWriter(); NativeQueue <RenderData> .ParallelWriter pw1 = renderQueue1.AsParallelWriter(); NativeQueue <RenderData> .ParallelWriter pw2 = renderQueue2.AsParallelWriter(); NativeQueue <RenderData> .ParallelWriter pw3 = renderQueue3.AsParallelWriter(); NativeQueue <Entity> .ParallelWriter tre = toRemoveEntity.AsParallelWriter(); float halfW = camera.orthographicSize / Screen.currentResolution.height * Screen.currentResolution.width; SpriteCullJob cullJob = new SpriteCullJob() { top = camera.transform.position.y + camera.orthographicSize, bottom = camera.transform.position.y - camera.orthographicSize, left = camera.transform.position.x - halfW, right = camera.transform.position.x + halfW, nativeQueue0 = pw0, nativeQueue1 = pw1, nativeQueue2 = pw2, nativeQueue3 = pw3, typeTranslation = GetArchetypeChunkComponentType <Translation>(), entityType = GetArchetypeChunkEntityType(), needRemoveEntity = tre, }; JobHandle handle = cullJob.Schedule(entityQuery); handle.Complete(); NativeArray <JobHandle> switchHandles = new NativeArray <JobHandle>(4, Allocator.TempJob); int switchJobIndex = 0; NativeArray <RenderData> rd0 = new NativeArray <RenderData>(renderQueue0.Count, Allocator.TempJob), rd1 = new NativeArray <RenderData>(renderQueue1.Count, Allocator.TempJob), rd2 = new NativeArray <RenderData>(renderQueue2.Count, Allocator.TempJob), rd3 = new NativeArray <RenderData>(renderQueue3.Count, Allocator.TempJob); NativeQueue2ArrayJob nativeQueue2Array0 = new NativeQueue2ArrayJob() { nativeQueue = renderQueue0, nativeArray = rd0 }; switchHandles[switchJobIndex++] = nativeQueue2Array0.Schedule(); NativeQueue2ArrayJob nativeQueue2Array1 = new NativeQueue2ArrayJob() { nativeQueue = renderQueue1, nativeArray = rd1 }; switchHandles[switchJobIndex++] = nativeQueue2Array1.Schedule(); NativeQueue2ArrayJob nativeQueue2Array2 = new NativeQueue2ArrayJob() { nativeQueue = renderQueue2, nativeArray = rd2 }; switchHandles[switchJobIndex++] = nativeQueue2Array2.Schedule(); NativeQueue2ArrayJob nativeQueue2Array3 = new NativeQueue2ArrayJob() { nativeQueue = renderQueue3, nativeArray = rd3 }; switchHandles[switchJobIndex++] = nativeQueue2Array3.Schedule(); JobHandle.CompleteAll(switchHandles); switchHandles.Dispose(); renderQueue0.Dispose(); renderQueue1.Dispose(); renderQueue2.Dispose(); renderQueue3.Dispose(); SpriteSortJobs sortJobs0 = new SpriteSortJobs() { renderDatas = rd0, }; jobHandles[jobHandleindex++] = sortJobs0.Schedule(); SpriteSortJobs sortJobs1 = new SpriteSortJobs() { renderDatas = rd1, }; jobHandles[jobHandleindex++] = sortJobs1.Schedule(); SpriteSortJobs sortJobs2 = new SpriteSortJobs() { renderDatas = rd2, }; jobHandles[jobHandleindex++] = sortJobs2.Schedule(); SpriteSortJobs sortJobs3 = new SpriteSortJobs() { renderDatas = rd3, }; jobHandles[jobHandleindex++] = sortJobs3.Schedule(); JobHandle.CompleteAll(jobHandles); foreach (var item in rd0) { AddData(item); } foreach (var item in rd1) { AddData(item); } foreach (var item in rd2) { AddData(item); } foreach (var item in rd3) { AddData(item); } if (matrices.Count > 0) { Graphics.DrawMeshInstanced(GameSetting.Instance.entityMesh, 0, GameSetting.Instance.enemyMat, matrices, block); } rd0.Dispose(); rd1.Dispose(); rd2.Dispose(); rd3.Dispose(); jobHandles.Dispose(); if (toRemoveEntity.Count > 0) { NativeArray <Entity> toRemoveEntityArray = toRemoveEntity.ToArray(Allocator.Temp); this.EntityManager.DestroyEntity(toRemoveEntityArray); toRemoveEntityArray.Dispose(); } toRemoveEntity.Dispose(); Entities.ForEach((ref PlayerComponent playerComponent, ref Translation translation, ref Rotation rotation) => { Graphics.DrawMesh(GameSetting.Instance.entityMesh, translation.Value, rotation.Value, GameSetting.Instance.playerMat, LayerMask.GetMask("Default")); }); }
private void LateUpdate() { if (Terrain == null) { return; } if (Terrain.TerrainData == null) { return; } if (Terrain.TerrainData.Foliage.Trees == null) { return; } if (Terrain.TerrainData.Foliage.Trees.Prototypes.Count == 0) { return; } if (treeInstances == null || treeInstances.Length == 0) { return; } GameObject actualTarget = null; if (Target != null) { actualTarget = Target; } else if (Camera.main != null) { actualTarget = Camera.main.gameObject; } if (actualTarget == null) { return; } Vector3 targetLocalPos = Terrain.transform.InverseTransformPoint(actualTarget.transform.position); GTreeColliderCullJob job = new GTreeColliderCullJob() { instances = nativeTreeInstances, cullResults = nativeCullResults, maxDistance = distance, targetPos = targetLocalPos }; JobHandle handle = job.Schedule(nativeTreeInstances.Length, 100); handle.Complete(); if (cullResults == null || cullResults.Length != nativeCullResults.Length) { cullResults = new bool[nativeCullResults.Length]; } nativeCullResults.CopyTo(cullResults); List <GTreePrototype> prototypes = Terrain.TerrainData.Foliage.Trees.Prototypes; int colliderIndex = 0; Vector3 terrainPos = Terrain.transform.position; Vector3 worldPos = Vector3.zero; if (terrain.TerrainData.Rendering.DrawTrees) { for (int i = 0; i < treeInstances.Length; ++i) { if (cullResults[i] == false) { continue; } GTreeInstance tree = treeInstances[i]; GTreePrototype prototype = prototypes[tree.prototypeIndex]; if (prototype.prefab == null) { continue; } if (!prototype.hasCollider) { continue; } CapsuleCollider col = GetCollider(colliderIndex); colliderIndex += 1; worldPos.Set( tree.position.x + terrainPos.x, tree.position.y + terrainPos.y, tree.position.z + terrainPos.z); col.transform.position = worldPos; col.transform.rotation = tree.rotation; col.transform.localScale = tree.scale; GTreeColliderInfo colliderInfo = prototype.colliderInfo; col.center = colliderInfo.center; col.radius = colliderInfo.radius; col.height = colliderInfo.height; col.direction = colliderInfo.direction; col.gameObject.layer = prototype.layer; if (CopyTreeTag) { col.gameObject.tag = prototype.prefab.tag; } col.gameObject.SetActive(true); } } int colliderCount = Colliders.Count; for (int i = colliderIndex; i < colliderCount; ++i) { CapsuleCollider col = GetCollider(i); col.gameObject.SetActive(false); } }
//Handles increment and decrement of parameters of HumanComponent protected override void OnUpdate() { var ecb = ecbSystem.CreateCommandBuffer().ToConcurrent(); float deltaTime = Time.DeltaTime; var width = Width; var cellSize = CellSize; var grid = Grid; var lockdown = Human.conf.lockdown; JobHandle jobhandle = Entities.ForEach((ref HumanComponent hc, in InfectionComponent ic) => { if (!lockdown) { if (ic.symptomatic && hc.socialResposibility > 0.5) { hc.fatigue = math.min(hc.fatigue + 1f * deltaTime, 17 * 60); } else { //increment of 1 value per second for each HumanComponent parameters hc.hunger = math.min(hc.hunger + 1f * deltaTime, 7 * 60); hc.fatigue = math.min(hc.fatigue + 1f * deltaTime, 17 * 60); hc.sociality = math.min(hc.sociality + 1f * deltaTime, 23 * 60); hc.sportivity = math.min(hc.sportivity + 1f * deltaTime, 2 * 23 * 60); hc.grocery = math.min(hc.grocery + 1f * deltaTime, 3 * 25 * 60); hc.work = math.min(hc.work + 1f * deltaTime, 17 * 60); } } else { if (ic.symptomatic) { hc.fatigue = math.min(hc.fatigue + 1f * deltaTime, 17 * 60); } else { hc.hunger = math.min(hc.hunger + 1f * deltaTime, 7 * 60); hc.fatigue = math.min(hc.fatigue + 1f * deltaTime, 17 * 60); hc.work = math.min(hc.work + hc.jobEssentiality * deltaTime, 17 * 60); hc.grocery = math.min(hc.grocery + 0.5f * deltaTime, 3 * 25 * 60); hc.sociality = math.min(hc.sociality + (1 - hc.socialResposibility) * 0.1f * deltaTime, 23 * 60); hc.sportivity = math.min(hc.sportivity + (1 - hc.socialResposibility) * 0.1f * deltaTime, 2 * 23 * 60); } } }).ScheduleParallel(Dependency); jobhandle.Complete(); //cycle all the entities without a NeedComponent and assign it according to parameters JobHandle jobhandle1 = Entities.WithNone <NeedComponent>().ForEach((Entity entity, int nativeThreadIndex, in HumanComponent hc) => { //set searchRadius for retrieving areas in the map included in that radius if the need is over a certain threshold if (hc.hunger > 60 * 6) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needForFood }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } else if (hc.fatigue > 16 * 60) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needToRest }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } else if (hc.sportivity > 2 * 22 * 60) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needForSport }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } else if (hc.sociality > 21 * 60) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needForSociality }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } else if (hc.grocery > 3 * 24 * 60) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needForGrocery }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } else if (hc.work > 16 * 60) { ecb.AddComponent <NeedComponent>(nativeThreadIndex, entity, new NeedComponent { currentNeed = NeedType.needToWork }); ecb.AddComponent <NeedPathParams>(nativeThreadIndex, entity, new NeedPathParams { searchRadius = 2 }); } }).ScheduleParallel(jobhandle); jobhandle1.Complete(); //manage satisfied needs, when value for a parameter decreases under 25% as threshold JobHandle jobhandle2 = Entities.ForEach((Entity entity, int nativeThreadIndex, ref HumanComponent hc, in Translation t, in NeedComponent needComponent) => { //retrieve entity position GetXY(t.Value, Vector3.zero, cellSize, out int currentX, out int currentY); //TODO fix hardcoded origin //decrement based to position: //home -> decrement fatigue //park -> decrement sociality and sportivity //pub -> decrement hunger and sociality //road -> decrement sportivity switch (grid[currentX + currentY * width]) { case TileMapEnum.TileMapSprite.Home: case TileMapEnum.TileMapSprite.Home2: if (hc.homePosition.x == currentX && hc.homePosition.y == currentY) { if (needComponent.currentNeed == NeedType.needToRest) { hc.fatigue = Math.Max(0, hc.fatigue - (2f + 1f) * deltaTime); } else if (needComponent.currentNeed == NeedType.needForFood) { hc.hunger = Math.Max(0, hc.hunger - (7f + 1f) * deltaTime); } } else { hc.sociality = Math.Max(0, hc.sociality - (5f + 1f) * deltaTime); } break; case TileMapEnum.TileMapSprite.Park: if (needComponent.currentNeed == NeedType.needForSport) { hc.sportivity = Math.Max(0, hc.sportivity - (30f + 1f) * deltaTime); } else if (needComponent.currentNeed == NeedType.needForSociality) { hc.sociality = Math.Max(0, hc.sociality - (15f + 1f) * deltaTime); } break; case TileMapEnum.TileMapSprite.Pub: if (needComponent.currentNeed == NeedType.needForFood) { hc.hunger = Math.Max(0, hc.hunger - (7f + 1f) * deltaTime); } else if (needComponent.currentNeed == NeedType.needForSociality) { hc.sociality = Math.Max(0, hc.sociality - (15f + 1f) * deltaTime); } break; case TileMapEnum.TileMapSprite.Supermarket: hc.grocery = Math.Max(0, hc.grocery - (3 * 24f + 1f) * deltaTime); break; case TileMapEnum.TileMapSprite.Office: hc.work = Math.Max(0, hc.work - (2f + 1f) * deltaTime); break; case TileMapEnum.TileMapSprite.RoadHorizontal: case TileMapEnum.TileMapSprite.RoadVertical: case TileMapEnum.TileMapSprite.RoadCrossing: break; } if (needComponent.currentNeed == NeedType.needForFood && hc.hunger < 25f * 7 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } else if (needComponent.currentNeed == NeedType.needToRest && hc.fatigue < 25f * 17 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } else if (needComponent.currentNeed == NeedType.needForSport && hc.sportivity < 25f * 23 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } else if (needComponent.currentNeed == NeedType.needForSociality && hc.sociality < 25f * 11 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } else if (needComponent.currentNeed == NeedType.needForGrocery && hc.grocery < 25f * 25 * 3 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } else if (needComponent.currentNeed == NeedType.needToWork && hc.work < 25f * 17 * 0.6) { ecb.RemoveComponent <NeedComponent>(nativeThreadIndex, entity); } }).ScheduleParallel(jobhandle1);
/// <summary> /// Creates the grid of nodes. /// </summary> public void CreateGrid() { DestroyGrid(); // TODO: Perhaps we might want to snap the extents value when editing the bounding box // in the editor? Bounds scanBounds = scanCollider.bounds; ScanAreaSettings scanSettings = new ScanAreaSettings((float3)scanBounds.center, (float3)scanBounds.extents, walkableMask); int expectedGridDimension = scanSettings.gridDimension; // TODO: Could I use nodesTypes invalid to avoid any kind of computation from them? nodesTransforms = new NativeArray <NodeTransform>(expectedGridDimension, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); nodesTypes = new NativeArray <NodeType>(expectedGridDimension, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); nodesNeighbors = new NativeArray <NodeNeighbor>(expectedGridDimension * NodeNumNeighbors, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); // calculate the initial raycast commands NativeArray <RaycastCommand> mainCommands = new NativeArray <RaycastCommand>(expectedGridDimension, Allocator.TempJob); JobHandle createNodesHandle = new CalculateRaycastCommandsJob { commands = mainCommands, scanSettings = scanSettings, } .ScheduleParallel(expectedGridDimension, 64, default(JobHandle)); // schedule the commands to retrieve the initial hits NativeArray <RaycastHit> nodeHits = new NativeArray <RaycastHit>(expectedGridDimension, Allocator.TempJob); createNodesHandle = RaycastCommand.ScheduleBatch(mainCommands, nodeHits, 32, createNodesHandle); JobHandle.ScheduleBatchedJobs(); // build the nodes using the received hits and the main raycast commands createNodesHandle = new CreateNodesJob { nodesTransforms = nodesTransforms, nodesTypes = nodesTypes, hits = nodeHits, commands = mainCommands, } .ScheduleParallel(expectedGridDimension, 32, createNodesHandle); // calculate the boxcasts to bake obstacles NativeArray <BoxcastCommand> boxcastCommands = new NativeArray <BoxcastCommand>(expectedGridDimension, Allocator.TempJob); JobHandle bakeObstaclesHandle = new CalculateBoxcastCommandsJob { commands = boxcastCommands, nodesTransforms = nodesTransforms, mask = obstacleMask, boxNodePercentage = boxToNodeObstaclePercentage, maxCharacterHeight = maxCharacterHeight, } .ScheduleParallel(expectedGridDimension, 64, createNodesHandle); // schedule the boxcasts to find possible obstacles NativeArray <RaycastHit> obstacleHits = new NativeArray <RaycastHit>(expectedGridDimension, Allocator.TempJob); bakeObstaclesHandle = BoxcastCommand.ScheduleBatch(boxcastCommands, obstacleHits, 32, bakeObstaclesHandle); // prepare the bake obstacles job bakeObstaclesHandle = new BakeObstaclesJob { nodesTypes = nodesTypes, boxcastHits = obstacleHits, } .ScheduleParallel(expectedGridDimension, 128, bakeObstaclesHandle); // now calculate the neighbors JobHandle calculateNeighborsHandle = new CalculateNeighborsJob { neighbors = nodesNeighbors, nodesTransforms = nodesTransforms, scanSettings = scanSettings, maxWalkableHeightWithStep = maxWalkableHeightWithStep, } .ScheduleParallel(expectedGridDimension, 32, createNodesHandle); JobHandle finalHandle = JobHandle.CombineDependencies(calculateNeighborsHandle, bakeObstaclesHandle); JobHandle disposeHandle = JobHandle.CombineDependencies(mainCommands.Dispose(finalHandle), nodeHits.Dispose(finalHandle)); disposeHandle = JobHandle.CombineDependencies(disposeHandle, boxcastCommands.Dispose(finalHandle), obstacleHits.Dispose(finalHandle)); // wait to complete all the scheduled stuff finalHandle.Complete(); gridWidth = scanSettings.gridWidth; gridDepth = scanSettings.gridDepth; isGridCreated = true; OnGridCreation?.Invoke(); Logger.LogFormat("Grid was created with dimension {0}. Width: {1}. Height: {2}.", expectedGridDimension, gridWidth, gridDepth); disposeHandle.Complete(); }
private void UpdateSimulation(float deltaTime) { //todo: break this down better. use delta time plus last sim time to figure out a list of game frames to step through, using a starting state. //Can be jobified, but it's not super necessary for (int index = 0; index < NUM_CURSORS; index++) { //_cursorTeamIDs[index] = index; _cursorInputDeltas[index] = InputMan.ListOfSources[index].GetInputAxis() * CursorAccel;//todo: needs a game frame to reference //_cursorAccelerations[index] = float2.zero; //_cursorVelocities[index] = float2.zero; //_cursorPositions[index] = Random.insideUnitCircle; //_cursorRadii[index] = 1.0f; } UpdateRuntimeValues(deltaTime); #region Job Kickoff and Dependancy // // Fire off jobs with all the data that has been set up above. Prefer not to in-line job data and job scheduling due to dependancies // #region ResetBeginningOfSimFrame _jobHandleResetBlobAccelerations = _jobDataResetBlobAccelerations.Schedule(_blobAccelerations.Length, 64); _jobHandleResetCursorAccelerations = _jobDataResetCursorAccelerations.Schedule(_cursorAccelerations.Length, 1); #endregion //ResetBeginningOfSimFrame //We need to copy values of positions over into the knn tree (one day we might be able to rule this out) _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetBlobAccelerations, _jobHandleResetCursorAccelerations); //_jobDataQueryNearestNeighboursKNN if (bNearestNeighboursDirty || DynamicallyUpdateNearestNeighbours) //HACK: see what happens when we maintain the initial lattice { #region KNN Tree _jobHandleCopyBlobInfoToFloat3 = _jobDataCopyBlobInfoToFloat3.Schedule(_blobPositionsV3.Length, 64, _jobHandleResetJobs); _jobHandleBuildKNNTree = _jobBuildKnnTree.Schedule(_jobHandleCopyBlobInfoToFloat3); // _jobHandleSetBlobRadii = _jobDataCopyBlobRadii.Schedule(_blobRadii.Length, 64); // JobHandle jobHandleResetRadiiAndBuildKNNTree = JobHandle.CombineDependencies(_jobHandleBuildKNNTree, _jobHandleBuildKNNTree); //now query nearest neighbours JobHandle jobHandleQueryKNN = _jobDataQueryNearestNeighboursKNN.Schedule(_blobPositionsV3.Length, 64, _jobHandleBuildKNNTree); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, jobHandleQueryKNN); #endregion _jobHandleResetGroupIDs = _jobDataResetGooGroups.Schedule(_blobGroupIDs.Length, 64); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleResetGroupIDs); if (UseUniqueEdges) { // Debug.Log($"Unique Blob edges length { _uniqueBlobEdges.Count() }"); _uniqueBlobEdgesHashSet.Clear();//maybe if this was a job it'd be less slow? _uniqueBlobEdges.Clear(); JobHandle jobHandFindUniqueEdges = _jobCompileDataUniqueEdges.Schedule(_blobPositionsV3.Length, 64, _jobHandleResetJobs); _jobHandleFloodFillGroupiID = _jobDataFloodFillGroupIDsMultiHashMap.Schedule(jobHandFindUniqueEdges); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleFloodFillGroupiID); // Debug.Log($"Num Groups: {_jobDataFloodFillGroupIDsMultiHashMap.NumGroups[0]}"); } else { _jobHandleFloodFillGroupiID = _jobDataFloodFillGroupIDsKnn.Schedule(_jobHandleResetJobs); _jobHandleResetJobs = JobHandle.CombineDependencies(_jobHandleResetJobs, _jobHandleFloodFillGroupiID); // Debug.Log($"Num Groups: {_jobDataFloodFillGroupIDsKnn.NumGroups[0]}"); } bNearestNeighboursDirty = false; } #region SimUpdateFrame // // Cursors must be done first. Luckily there's very few // //update cursors//todo: treat more like ECS. cursors happen to have positions/velocities/radii. But they out to be "type" tagged somehow. _jobHandleSetCursorAcceleration = _jobDataSetCursorAcceleration.Schedule(_cursorInputDeltas.Length, 1, _jobHandleResetJobs); _jobHandleApplyCursorFriction = _jobDataApplyCursorFriction.Schedule(_cursorInputDeltas.Length, 1, _jobHandleSetCursorAcceleration); _jobHandleUpdateCursorPositions = _jobDataUpdateCursorPositions.Schedule(_cursorInputDeltas.Length, 1, _jobHandleApplyCursorFriction); _jobHandleCursorsInfluenceBlobs = _jobDataCursorsInfluenceBlobs.Schedule(_blobPositions.Length, 64, _jobHandleUpdateCursorPositions);//todo: give cursors knnquery data. //Cursor Influences blobs once it's ready //Blob sim gets updated after cursor influence //blobs all figure out how much push and pull is coming from neighbouring blobs. if (UseUniqueEdges) { _jobHandleSpringForces = _jobDataSpringForcesUniqueEdges.Schedule(_blobAccelerations.Length, 64, _jobHandleCursorsInfluenceBlobs); } else { _jobHandleSpringForces = _jobDataSpringForcesUsingKnn.Schedule(_blobAccelerations.Length, 64, _jobHandleCursorsInfluenceBlobs); } _jobHandleApplyBlobFriction = _jobDataApplyFrictionToBlobs.Schedule(_blobAccelerations.Length, 64, _jobHandleSpringForces); _jobHandleFluidInfluences = _jobDataFluidInfluence.Schedule(_blobAccelerations.Length, 64, _jobHandleApplyBlobFriction); _jobHandleUpdateBlobPositions = _jobDataUpdateBlobPositions.Schedule(_blobAccelerations.Length, 64, _jobHandleFluidInfluences); #endregion //SimUpdateFrame //temp - needs an interpolator job //Todo: spit out into a particle effect instead of transforms, which are probably slow as heck //but this is still somewhat useful for debug JobHandle jobHandleDebugColorization; switch (DebugStyle) { case BlobColorDebugStyle.Edges: jobHandleDebugColorization = _jobDataDebugColorisationKNNLength.Schedule(_blobKNNNearestNeighbourQueryResults.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.Velocity: jobHandleDebugColorization = _jobDataDebugColorisationFloat2Magnitude.Schedule(_blobVelocities.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.Acceleration: jobHandleDebugColorization = _jobDataDebugColorisationFloat2Magnitude.Schedule(_blobAccelerations.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.TeamID: jobHandleDebugColorization = _jobDataDebugColorisationInt.Schedule(_blobTeamIDs.Length, 64, _jobHandleUpdateBlobPositions); break; case BlobColorDebugStyle.GroupID: jobHandleDebugColorization = _jobDataDebugColorisationInt.Schedule(_blobGroupIDs.Length, 64, _jobHandleUpdateBlobPositions); break; default: throw new ArgumentOutOfRangeException(); } _jobHandleCopyBlobsToParticleSystem = _jobDataCopyBlobsToParticleSystem.ScheduleBatch(BlobParticleSystemOutput, 64, jobHandleDebugColorization); _jobHandleCopyCursorsToTransforms = _jobDataCopyCursorsToTransforms.Schedule(_cursorTransformAccessArray, _jobHandleCursorsInfluenceBlobs); _jobHandleCopyBlobsToParticleSystem.Complete(); _jobHandleCopyCursorsToTransforms.Complete(); _jobHandleBuildAABB = _jobDataCalculateAABB.Schedule(_jobHandleUpdateBlobPositions); _jobHandleBuildAABB.Complete(); // Debug.Log($"Unique Blob edges length { _uniqueBlobEdges.Count() }"); #endregion // Job Kickoff and Dependancy //No. You must call "complete" on any handle that has something dependant on it. Which is all of them, you'd expect. //maybe i only need to complete the last, since that's dependant. }
private void Update() { framesProcessed++; if (currentRequest != null) { //jobHandle.Complete(); if (jobHandle.IsCompleted || framesProcessed > 3) { jobHandle.Complete(); //make path Path path = new Path(); if (job.result.Length == 0 || Vector3.Distance(currentRequest.dst, job.grid.GetNodePosition(job.result[0])) > 3) { path.failed = true; } else { path.nodes = new List <Vector3>(job.result.Length); for (int i = job.result.Length - 1; i >= 0; i--) { path.nodes.Add(job.grid.GetNodePosition(job.result[i].x, job.result[i].y)); } } currentRequest.result = path; currentRequest.done = true; //Dispos job structs job.grid.Dispose(); job.result.Dispose(); job.open.Dispose(); job.closed.Dispose(); currentRequest = null; } } //Queue a new job if there are requests if (currentRequest == null && requests.Count > 0 && this.grid.nodeSize > 0) { currentRequest = requests.Dequeue(); job = new ProcessPathJob() { srcPosition = currentRequest.src, dstPosition = currentRequest.dst, grid = grid.Copy(Allocator.TempJob), result = new NativeList <int2>(Allocator.TempJob), open = new NativeBinaryHeap <ProcessPathJob.NodeCost>((int)(grid.width * grid.height / (grid.nodeSize) / 2), Allocator.TempJob), closed = new NativeHashMap <int2, ProcessPathJob.NodeCost>(128, Allocator.TempJob) }; jobHandle = job.Schedule(); framesProcessed = 0; } }
protected override JobHandle OnUpdate(JobHandle inputDeps) { GatherEvents(Allocator.TempJob, out var pointerEvents, out var keyboardEvents); var pointerFrameData = GatherPointerFrameData(Allocator.TempJob); NativeArray <Entity> roots = m_RootGroup.ToEntityArray(Allocator.TempJob); var childrenFromEntity = GetBufferFromEntity <UIChild>(true); var worldSpaceRectFromEntity = GetComponentDataFromEntity <WorldSpaceRect>(true); var parentFromEntity = GetComponentDataFromEntity <UIParent>(true); var pointerReceiverFromEntity = GetComponentDataFromEntity <PointerInputReceiver>(true); var stateComponentFromEntity = GetComponentDataFromEntity <InputSystemState>(); var selectableFromEntity = GetComponentDataFromEntity <Selectable>(); var stateEntity = GetSingletonEntity <InputSystemState>(); NativeArray <Entity> perCanvasHits = new NativeArray <Entity>(pointerFrameData.Length * roots.Length, Allocator.TempJob); NativeArray <Entity> globalHits = new NativeArray <Entity>(pointerFrameData.Length, Allocator.TempJob); LowLevelUtils.MemSet(perCanvasHits, default); LowLevelUtils.MemSet(globalHits, default); ProcessPerCanvasInput process = new ProcessPerCanvasInput() { Roots = roots, ChildrenFromEntity = childrenFromEntity, Hits = perCanvasHits, LocalToWorldFromEntity = worldSpaceRectFromEntity, PointerInputReceiver = pointerReceiverFromEntity, PointersPosition = pointerFrameData }; inputDeps = process.Schedule(roots.Length, 1, inputDeps); var canvasLayerFromEntity = GetComponentDataFromEntity <CanvasSortLayer>(); LowLevelUtils.MemSet(globalHits, default); CanvasHitsToGlobal canvasHits = new CanvasHitsToGlobal() { Roots = roots, CanvasLayerFromEntity = canvasLayerFromEntity, GlobalHits = globalHits, PerCanvasHits = perCanvasHits }; inputDeps = canvasHits.Schedule(inputDeps); EntityCommandBuffer ecb = m_CommandBufferSystem.CreateCommandBuffer(); UpdatePointerEvents updatePointerJob = new UpdatePointerEvents() { ButtonStates = m_ButtonStates, EventArchetype = m_PointerEventArchetype, StateEntity = stateEntity, Hits = globalHits, ReceiverFromEntity = pointerReceiverFromEntity, StateFromEntity = stateComponentFromEntity, Manager = ecb, PointerEvents = pointerEvents, ParentFromEntity = parentFromEntity, PointerFrameData = pointerFrameData, }; inputDeps = updatePointerJob.Schedule(inputDeps); inputDeps.Complete(); if (keyboardEvents.Length > 0) { var selected = GetSingleton <InputSystemState>().SelectedEntity; if (selected != default) { var eventEntity = EntityManager.CreateEntity(m_KeyboardEventArchetype); var buff = EntityManager.GetBuffer <KeyboardInputBuffer>(eventEntity); EntityManager.SetComponentData(eventEntity, new KeyboardEvent() { Target = selected }); for (int i = 0; i < keyboardEvents.Length; i++) { if (keyboardEvents[i].EventType == NativeInputEventType.KeyDown) { buff.Add(new KeyboardInputBuffer() { Character = keyboardEvents[i].Character, EventType = keyboardEvents[i].KbdEvent, KeyCode = keyboardEvents[i].KeyCode }); } } } } keyboardEvents.Dispose(); m_LastFrameMousePos = ((float3)UnityEngine.Input.mousePosition).xy; return(inputDeps); }
/// <summary> /// マスタージョブハンドル完了待機 /// </summary> public void CompleteJob() { jobHandle.Complete(); jobHandle = default(JobHandle); }
public JobHandle Calculate(JobHandle deps, ComponentDataArray <BVHAABB> AABB) { var computeBound = new CalculateBoundsJob() { AABB = AABB, batchSize = (int)math.ceil(AABB.Length / 8), results = bounds }; deps = computeBound.Schedule(8, 1, deps); var boundMerge = new CalculateBoundsMergedJob() { results = bounds }; deps = boundMerge.Schedule(deps); var resetBVH = new ResetBVH() { BVHArray = BVHArray, }; deps = resetBVH.Schedule(BVHArray.Length, 32, deps); var computeMortonJob = new ComputeMortonCodesJob { aabbs = AABB, indexConverter = indexConverter, mortonCodes = mortonCodes, Bounds = bounds }; deps = computeMortonJob.Schedule(mortonCodes.Length, 64, deps); var bitonicMergeJob = new BitonicMergeJob() { values = mortonCodes, indexConverter = indexConverter }; var bitonicSortJob = new BitonicSortJob() { indexConverter = indexConverter, values = mortonCodes }; int pass = (int)math.log2(mortonCodes.Length); for (int i = 0; i < pass - 1; i++) { for (int j = 0; j <= i; j++) { bitonicMergeJob.strideSwap = 1 << (i - j); bitonicMergeJob.strideRisingGroup = 1 << j; deps = bitonicMergeJob.Schedule(mortonCodes.Length / 2, 64, deps); } } for (int i = 0; i < pass; i++) { bitonicSortJob.strideSwap = 1 << (pass - i - 1); deps = bitonicSortJob.Schedule(mortonCodes.Length / 2, 64, deps); } var constructBVHChild = new ConstructBVHChildNodesJob() { AABB = AABB, BVHArray = BVHArray, indexConverter = indexConverter }; deps = constructBVHChild.Schedule(BVHArray.Length, 32, deps); var constructBVHInternal = new ConstructBVHInternalNodesJob() { BVHArray = BVHArray, mortonCodes = mortonCodes, NumObjects = AABB.Length, ParentIndex = parentIndex }; deps = constructBVHInternal.Schedule(AABB.Length - 1, 32, deps); var updateParentIndex = new UpdateBVHParentIndexJob() { BVHArray = BVHArray, ParentIndex = parentIndex }; deps = updateParentIndex.Schedule(BVHArray.Length, 32, deps); var updateAABB = new UpdateAABBJob() { BVHArray = BVHArray }; deps = updateAABB.Schedule(AABB.Length, 32, deps); if (bDebug) { deps.Complete(); Debug.Assert(DebugUtils.ValidateBVH(BVHArray)); for (int i = 0; i < BVHArray.Length - 1; i++) { if (BVHArray[i].IsValid > 0) { DebugUtils.DrawAABB(BVHArray[i].aabb, UnityEngine.Random.ColorHSV()); } } } return(deps); }
public void Dispose() { _tileDataJob.Complete(); }
//1. COMPUTE OVER FRAME //2. NORMAL CALCULATIONS //3. SIMD STUFF ComputeTrisNormalsJob //4. https://forum.unity.com/threads/burst-simd-and-float3-float4-best-practices.527504/ public void Update() { //DO CALCULATION OVER THE FRAME LENGTH and RENDERING Profiler.BeginSample("__WAIT_FOR_JOB_COMPLETE__"); m_OceanJobHandle.Complete(); Swap <NativeArray <Vector3> >(m_Vertices); Profiler.EndSample(); Profiler.BeginSample("__SCHEDULE_JOBS_ASYNC__"); var oceanJob = new OceanJob() { verticesIn = m_Vertices[READ], verticesOut = m_Vertices[WRITE], time = Time.time * m_TimeMult, scale = m_Scale, perlinStrength = m_PerlinStrength, rippleStrength = m_RippleStrength, }; m_OceanJobHandle = oceanJob.Schedule(m_Mesh.vertexCount, 64); //limit number of threads to two //m_OceanJobHandle = oceanJob.Schedule(m_Mesh.vertexCount, m_Mesh.vertexCount / 2); //var recalculateNormalsJob = new RecalculateNormalsJob //{ // vertices = m_Vertices[READ], // normals = m_Normals, // indices = m_Indices //}; //m_NormalsJobHndl = recalculateNormalsJob.Schedule(); var triNormalsJob = new ComputeTrisNormalsJobSimd { vertices = m_Vertices[READ], triNormals = m_TriNormals, indices = m_Indices, }; m_NormalsJobHndl = triNormalsJob.Schedule(m_Indices.Length / 3, 64); var normalsJob = new ComputeNormalsJobSimd { verticesToTrianglesMapping = m_VerticesToTrianglesMapping, verticesToTrianglesMappingCount = m_VerticesToTrianglesMappingCount, triNormals = m_TriNormals, normals = m_Normals }; m_NormalsJobHndl = normalsJob.Schedule(m_Mesh.vertexCount, 64, m_NormalsJobHndl); //m_NormalsJobHndl.Complete(); //jobs wont be scheduled until below function is called or something starts waiting for them JobHandle.ScheduleBatchedJobs(); Profiler.EndSample(); }
protected override void OnUpdate() { int curEntityCnt = m_mobQuery.CalculateEntityCount(); if (na_rayCommands.Length != curEntityCnt) { mobMovementJH.Complete(); na_rayCommands.Dispose(); na_rayCommands = new NativeArray <RaycastInput>(curEntityCnt, Allocator.Persistent); na_rayHits.Dispose(); na_rayHits = new NativeArray <Unity.Physics.RaycastHit>(curEntityCnt, Allocator.Persistent); } /*var setupRaycastJob = new SetupRaycastJob * { * layerMask = 1 << LayerMask.NameToLayer("Wall"), * rayCommands = na_rayCommands, * wallPos = GameManager.instance.Target * };*/ //var setupJH = setupRaycastJob.Schedule(this, buildPhysicsWorldSystem.FinalJobHandle); //var raycastJH = ScheduleBatchRayCast(buildPhysicsWorldSystem.PhysicsWorld.CollisionWorld, na_rayCommands, na_rayHits, setupJH); var mobMovementJob = new MobMovementJob { deltaTime = Time.deltaTime, hits = na_rayHits }; //mobMovementJH = mobMovementJob.Schedule(this, raycastJH); mobMovementJH = mobMovementJob.Schedule(this); mobMovementJH.Complete(); var entities = m_mobQuery.ToEntityArray(Allocator.TempJob); for (int i = 0; i < entities.Length; ++i) { if (EntityManager.GetComponentData <MobStateData>(entities[i]).Value == MobState.Throw) { var spearEntity = PostUpdateCommands.Instantiate(ECSManager.Instance.SpearPrefab); Vector3 startPoint = EntityManager.GetComponentData <Translation>(entities[i]).Value + (float3)SettingsManager.ThrowingPoint; Quaternion initialRotation = Quaternion.Euler(SettingsManager.ThrowingRotation); Vector3 initialVelocity = initialRotation * Vector3.forward * 75.0f; PostUpdateCommands.SetComponent(spearEntity, new Translation { Value = startPoint }); PostUpdateCommands.SetComponent(spearEntity, new Rotation { Value = initialRotation }); PostUpdateCommands.SetComponent(spearEntity, new PhysicsVelocity { Linear = initialVelocity }); PostUpdateCommands.SetComponent(entities[i], new MobStateData { Value = MobState.FromTarget }); } } entities.Dispose(); }
private void OnDisable() { moveHandle.Complete(); transforms.Dispose(); }
protected override JobHandle OnUpdate(JobHandle inputDep) { var commandBuffer = m_Barrier.CreateCommandBuffer(); // Destroy drivers if the PingDriverComponents were removed if (!m_DestroyedDriverGroup.IsEmptyIgnoreFilter) { inputDep.Complete(); var destroyedDriverEntity = m_DestroyedDriverGroup.ToEntityArray(Allocator.TempJob); var destroyedDriverList = m_DestroyedDriverGroup.ToComponentDataArray <PingDriverComponentData>(Allocator.TempJob); for (int i = 0; i < destroyedDriverList.Length; ++i) { if (destroyedDriverList[i].isServer != 0) { var serverConnectionList = m_ServerConnectionGroup.ToEntityArray(Allocator.TempJob); // Also destroy all active connections when the driver dies for (int con = 0; con < serverConnectionList.Length; ++con) { commandBuffer.DestroyEntity(serverConnectionList[con]); } serverConnectionList.Dispose(); ServerDriver.Dispose(); } else { ClientDriver.Dispose(); } commandBuffer.RemoveComponent <PingDriverStateComponent>(destroyedDriverEntity[i]); } destroyedDriverList.Dispose(); destroyedDriverEntity.Dispose(); } // Create drivers if new PingDriverComponents were added if (!m_NewDriverGroup.IsEmptyIgnoreFilter) { inputDep.Complete(); var newDriverEntity = m_NewDriverGroup.ToEntityArray(Allocator.TempJob); var newDriverList = m_NewDriverGroup.ToComponentDataArray <PingDriverComponentData>(Allocator.TempJob); for (int i = 0; i < newDriverList.Length; ++i) { if (newDriverList[i].isServer != 0) { if (ServerDriver.IsCreated) { throw new InvalidOperationException("Cannot create multiple server drivers"); } var drv = new UdpNetworkDriver(new INetworkParameter[0]); var addr = NetworkEndPoint.AnyIpv4; addr.Port = 9000; if (drv.Bind(addr) != 0) { throw new Exception("Failed to bind to port 9000"); } else { drv.Listen(); } ServerDriver = drv; ConcurrentServerDriver = ServerDriver.ToConcurrent(); } else { if (ClientDriver.IsCreated) { throw new InvalidOperationException("Cannot create multiple client drivers"); } ClientDriver = new UdpNetworkDriver(new INetworkParameter[0]); ConcurrentClientDriver = ClientDriver.ToConcurrent(); } commandBuffer.AddComponent(newDriverEntity[i], new PingDriverStateComponent { isServer = newDriverList[i].isServer }); } newDriverList.Dispose(); newDriverEntity.Dispose(); } JobHandle clientDep = default(JobHandle); JobHandle serverDep = default(JobHandle); // Go through and update all drivers, also accept all incoming connections for server drivers if (ServerDriver.IsCreated) { // Schedule a chain with driver update, a job to accept all connections and finally a job to delete all invalid connections serverDep = ServerDriver.ScheduleUpdate(inputDep); var acceptJob = new DriverAcceptJob { driver = ServerDriver, commandBuffer = commandBuffer }; serverDep = acceptJob.Schedule(serverDep); var cleanupJob = new DriverCleanupJob { commandBuffer = m_Barrier.CreateCommandBuffer().ToConcurrent() }; serverDep = cleanupJob.Schedule(this, serverDep); m_Barrier.AddJobHandleForProducer(serverDep); } if (ClientDriver.IsCreated) { clientDep = ClientDriver.ScheduleUpdate(inputDep); } return(JobHandle.CombineDependencies(clientDep, serverDep)); }
protected override void OnUpdate() { EntityManager.CompleteAllJobs(); //We need to call this after EntityManager.CompleteAllJobs so that our uiEntityDataGroup is updated UpdateInjectedComponentGroups(); //Copy our current UI data in a tmp array Entity uiEntity = uiEntityDataGroup.GetEntityArray()[0]; UIData testData = GetComponentDataFromEntity <UIData>()[uiEntity]; NativeArray <UIData> uiTmpDataArray = new NativeArray <UIData>(1, Allocator.TempJob); uiTmpDataArray[0] = testData; //Create a tmp list to contain the data needed to do some logic after entities destruction NativeList <InfoForLogicAfterDestroy> infoLogicTmpDataList = new NativeList <InfoForLogicAfterDestroy>(entityCollisionQueue.Count, Allocator.TempJob); //Tell the EntityManager that we will start doing entity work only via an ExclusiveEntityTransaction (that can be passed to a job) ExclusiveEntityTransaction exclusiveEntityTransaction = EntityManager.BeginExclusiveEntityTransaction(); //Set up our job to destroy our entities and fill the infoLogicTmpDataList with the data we need to do some logic after the destruction DestroyEntityWithLogicJob destroyEntityWithLogicJob = new DestroyEntityWithLogicJob { entityTransaction = exclusiveEntityTransaction, uiDataArray = uiTmpDataArray, entityQueue = entityCollisionQueue, scoreValue = MonoBehaviourECSBridge.Instance.destroyScoreValue, infoForLogic = infoLogicTmpDataList, }; JobHandle destroyHandle = destroyEntityWithLogicJob.Schedule(EntityManager.ExclusiveEntityTransactionDependency); EntityManager.ExclusiveEntityTransactionDependency = JobHandle.CombineDependencies(destroyHandle, EntityManager.ExclusiveEntityTransactionDependency); //Send the job to the worker thread queue, we need to do this because we need the job to run now JobHandle.ScheduleBatchedJobs(); //Wait for it to be completed destroyHandle.Complete(); //Start a new job to destroy out of bound entities DestroyEntityJob destroyEntityJob = new DestroyEntityJob { entityTransaction = exclusiveEntityTransaction, entityQueue = entityOutOfBoundQueue, }; //Make sure we depend on the previous job (only one job at a time can use the ExclusiveEntityTransaction) destroyHandle = destroyEntityJob.Schedule(destroyHandle); EntityManager.ExclusiveEntityTransactionDependency = JobHandle.CombineDependencies(destroyHandle, EntityManager.ExclusiveEntityTransactionDependency); //Send the job to the worker thread queue, we need to do this because we need the job to run now JobHandle.ScheduleBatchedJobs(); //While the job for the entity out of bound is running, do our logic for the entity destruction on the main thread //The list was generated from the first job DestroyLogic(infoLogicTmpDataList); //wait for the entity out of bound destroy job to finish destroyHandle.Complete(); //Tell the entity manager that we are done with the ExclusiveEntityTransaction EntityManager.EndExclusiveEntityTransaction(); //We need to call this after EndExclusiveEntityTransaction so that our uiEntityDataGroup is updated UpdateInjectedComponentGroups(); //Copy back the UI data with the update score testData = uiTmpDataArray[0]; EntityManager.SetComponentData(uiEntity, testData); //dispose of our tmp array/list uiTmpDataArray.Dispose(); infoLogicTmpDataList.Dispose(); }
void IEcsRunSystem.Run() { if (_filter.EntitiesCount == 0) { return; } if (FullBackground) { if (!CanRead) { jobHandle.Complete(); CanRead = true; } } var neighbors = new NeighborsDetectionJob { prodThresh = NeighborFov, distThresh = NeighborDistance, velocities = BoidEntityData.velocities, positions = BoidEntityData.positions, neighborsFromEntity = BoidEntityData.neighbors, entitiesCount = EntitiesCount }; var wall = new WallJob { scale = WallScale, thresh = WallDistance, weight = WallWeight, positions = BoidEntityData.positions, accelerations = BoidEntityData.accelerations, _right = new Float3(1, 0, 0), _up = new Float3(0, 1, 0), _fwd = new Float3(0, 0, 1), _left = new Float3(-1, 0, 0), _down = new Float3(0, -1, 0), _back = new Float3(0, 0, -1) }; var separation = new SeparationJob { separationWeight = SeparationWeight, entitiesCount = EntitiesCount, neighborsFromEntity = BoidEntityData.neighbors, positions = BoidEntityData.positions, accelerations = BoidEntityData.accelerations }; var alignment = new AlignmentJob { alignmentWeight = AlignmentWeight, entitiesCount = EntitiesCount, neighborsFromEntity = BoidEntityData.neighbors, velocities = BoidEntityData.velocities, accelerations = BoidEntityData.accelerations }; var cohesion = new CohesionJob { cohesionWeight = CohesionWeight, entitiesCount = EntitiesCount, neighborsFromEntity = BoidEntityData.neighbors, positions = BoidEntityData.positions, accelerations = BoidEntityData.accelerations }; var move = new MoveJob { dt = _timeSystem.DeltaTime, minSpeed = MinSpeed, maxSpeed = MaxSpeed, scale = BoidScale, positions = BoidEntityData.positions, velocities = BoidEntityData.velocities, accelerations = BoidEntityData.accelerations, matrices = BoidsVisualisationSystem._nativeMatrices }; jobHandle = neighbors.Schedule(EntitiesCount, NumBoidsPerJob); jobHandle = wall.Schedule(EntitiesCount, NumBoidsPerJob, jobHandle); jobHandle = separation.Schedule(EntitiesCount, NumBoidsPerJob, jobHandle); jobHandle = alignment.Schedule(EntitiesCount, NumBoidsPerJob, jobHandle); jobHandle = cohesion.Schedule(EntitiesCount, NumBoidsPerJob, jobHandle); jobHandle = move.Schedule(EntitiesCount, NumBoidsPerJob, jobHandle); JobHandle.ScheduleBatchedJobs(); if (FullBackground) { CanRead = false; } else { jobHandle.Complete(); CanRead = true; } }
protected override void OnUpdate() { for (int i = 0; i < POSITION_SLICES; i++) { ClearQueueJob clearQueueJob = new ClearQueueJob { nativeQueue = nativeQueueArray[i] }; jobHandleArray[i] = clearQueueJob.Schedule(); } JobHandle.CompleteAll(jobHandleArray); Camera camera = Camera.main; float cameraWidth = camera.aspect * camera.orthographicSize; float3 cameraPosition = camera.transform.position; float marginX = cameraWidth / 10f; float xMin = cameraPosition.x - cameraWidth - marginX; float xMax = cameraPosition.x + cameraWidth + marginX; float cameraSliceSize = camera.orthographicSize * 2f / POSITION_SLICES; float yBottom = cameraPosition.y - camera.orthographicSize; // Bottom cull position float yTop_1 = cameraPosition.y + camera.orthographicSize; // Top most cull position float yTop_2 = yTop_1 - cameraSliceSize * 1f; float yTop_3 = yTop_1 - cameraSliceSize * 2f; float yTop_4 = yTop_1 - cameraSliceSize * 3f; float yTop_5 = yTop_1 - cameraSliceSize * 4f; float yTop_6 = yTop_1 - cameraSliceSize * 5f; float yTop_7 = yTop_1 - cameraSliceSize * 6f; float yTop_8 = yTop_1 - cameraSliceSize * 7f; float yTop_9 = yTop_1 - cameraSliceSize * 8f; float yTop_10 = yTop_1 - cameraSliceSize * 9f; float yTop_11 = yTop_1 - cameraSliceSize * 10f; float yTop_12 = yTop_1 - cameraSliceSize * 11f; float yTop_13 = yTop_1 - cameraSliceSize * 12f; float yTop_14 = yTop_1 - cameraSliceSize * 13f; float yTop_15 = yTop_1 - cameraSliceSize * 14f; float yTop_16 = yTop_1 - cameraSliceSize * 15f; float yTop_17 = yTop_1 - cameraSliceSize * 16f; float yTop_18 = yTop_1 - cameraSliceSize * 17f; float yTop_19 = yTop_1 - cameraSliceSize * 18f; float yTop_20 = yTop_1 - cameraSliceSize * 19f; float marginY = camera.orthographicSize / 10f; yTop_1 += marginY; yBottom -= marginY; CullAndSortNativeQueueJob cullAndSortNativeQueueJob = new CullAndSortNativeQueueJob { xMin = xMin, xMax = xMax, yBottom = yBottom, yTop_1 = yTop_1, yTop_2 = yTop_2, yTop_3 = yTop_3, yTop_4 = yTop_4, yTop_5 = yTop_5, yTop_6 = yTop_6, yTop_7 = yTop_7, yTop_8 = yTop_8, yTop_9 = yTop_9, yTop_10 = yTop_10, yTop_11 = yTop_11, yTop_12 = yTop_12, yTop_13 = yTop_13, yTop_14 = yTop_14, yTop_15 = yTop_15, yTop_16 = yTop_16, yTop_17 = yTop_17, yTop_18 = yTop_18, yTop_19 = yTop_19, yTop_20 = yTop_20, nativeQueue_1 = nativeQueueArray[0].AsParallelWriter(), nativeQueue_2 = nativeQueueArray[1].AsParallelWriter(), nativeQueue_3 = nativeQueueArray[2].AsParallelWriter(), nativeQueue_4 = nativeQueueArray[3].AsParallelWriter(), nativeQueue_5 = nativeQueueArray[4].AsParallelWriter(), nativeQueue_6 = nativeQueueArray[5].AsParallelWriter(), nativeQueue_7 = nativeQueueArray[6].AsParallelWriter(), nativeQueue_8 = nativeQueueArray[7].AsParallelWriter(), nativeQueue_9 = nativeQueueArray[8].AsParallelWriter(), nativeQueue_10 = nativeQueueArray[9].AsParallelWriter(), nativeQueue_11 = nativeQueueArray[10].AsParallelWriter(), nativeQueue_12 = nativeQueueArray[11].AsParallelWriter(), nativeQueue_13 = nativeQueueArray[12].AsParallelWriter(), nativeQueue_14 = nativeQueueArray[13].AsParallelWriter(), nativeQueue_15 = nativeQueueArray[14].AsParallelWriter(), nativeQueue_16 = nativeQueueArray[15].AsParallelWriter(), nativeQueue_17 = nativeQueueArray[16].AsParallelWriter(), nativeQueue_18 = nativeQueueArray[17].AsParallelWriter(), nativeQueue_19 = nativeQueueArray[18].AsParallelWriter(), nativeQueue_20 = nativeQueueArray[19].AsParallelWriter() }; JobHandle cullAndSortNativeQueueJobHandle = cullAndSortNativeQueueJob.Schedule(this); cullAndSortNativeQueueJobHandle.Complete(); int visibleEntityTotal = 0; for (int i = 0; i < POSITION_SLICES; i++) { visibleEntityTotal += nativeQueueArray[i].Count; } for (int i = 0; i < POSITION_SLICES; i++) { NativeArray <RenderData> nativeArray = new NativeArray <RenderData>(nativeQueueArray[i].Count, Allocator.TempJob); nativeArrayArray[i] = nativeArray; } for (int i = 0; i < POSITION_SLICES; i++) { NativeQueueToArrayJob nativeQueueToArrayJob = new NativeQueueToArrayJob { nativeQueue = nativeQueueArray[i], nativeArray = nativeArrayArray[i], }; jobHandleArray[i] = nativeQueueToArrayJob.Schedule(); } JobHandle.CompleteAll(jobHandleArray); // Sort by position for (int i = 0; i < POSITION_SLICES; i++) { SortByPositionJob sortByPositionJob = new SortByPositionJob { sortArray = nativeArrayArray[i], comparer = positionComparer }; jobHandleArray[i] = sortByPositionJob.Schedule(); } JobHandle.CompleteAll(jobHandleArray); // Fill up individual Arrays NativeArray <Matrix4x4> matrixArray = new NativeArray <Matrix4x4>(visibleEntityTotal, Allocator.TempJob); NativeArray <Vector4> uvArray = new NativeArray <Vector4>(visibleEntityTotal, Allocator.TempJob); int startingIndex = 0; for (int i = 0; i < POSITION_SLICES; i++) { //if (i != 4) continue; FillArraysParallelJob fillArraysParallelJob = new FillArraysParallelJob { nativeArray = nativeArrayArray[i], matrixArray = matrixArray, uvArray = uvArray, startingIndex = startingIndex }; startingIndex += nativeArrayArray[i].Length; jobHandleArray[i] = fillArraysParallelJob.Schedule(nativeArrayArray[i].Length, 10); } JobHandle.CompleteAll(jobHandleArray); //jobHandleArray.Dispose(); for (int i = 0; i < POSITION_SLICES; i++) { nativeArrayArray[i].Dispose(); } // Slice Arrays and Draw InitDrawMeshInstancedSlicedData(); for (int i = 0; i < visibleEntityTotal; i += DRAW_MESH_INSTANCED_SLICE_COUNT) { int sliceSize = math.min(visibleEntityTotal - i, DRAW_MESH_INSTANCED_SLICE_COUNT); NativeArray <Matrix4x4> .Copy(matrixArray, i, matrixInstancedArray, 0, sliceSize); NativeArray <Vector4> .Copy(uvArray, i, uvInstancedArray, 0, sliceSize); materialPropertyBlock.SetVectorArray(shaderMainTexUVid, uvInstancedArray); Graphics.DrawMeshInstanced(mesh, 0, material, matrixInstancedArray, sliceSize, materialPropertyBlock); } matrixArray.Dispose(); uvArray.Dispose(); }
public void LateUpdate() { m_PositionJobHandle.Complete(); }
public void StepOneFrame(PhysicalScene scene) { m_DragDampingJob = new DragDampingJob { b = scene.m_DragDamping, velocity = scene.m_Velocities, gradiant = scene.m_gradiants, }; m_SpringForceJob = new SpringForceJob { position = scene.m_Positions, velocity = scene.m_Velocities, springForces = scene.springForces, edges = scene.edgeIndexList, gradiant = scene.m_gradiants, }; m_GravitationalForceJob = new GravitationalForceJob { mass = scene.m_masses, position = scene.m_Positions, gradiant = scene.m_gradiants, gravitationalforces = scene.gravitationalforces, }; m_GravityGradiantJob = new SimpleGravityJob { gravity = scene.m_Gravity, gradiant = scene.m_gradiants, mass = scene.m_masses, fixes = scene.m_fixes, }; m_ExplicitEulerJob = new ExplicitEulerJob { deltaTime = scene.dt, velocity = scene.m_Velocities, fixes = scene.m_fixes, position = scene.m_Positions, gradiant = scene.m_gradiants, mass = scene.m_masses, }; m_UpdateJob = new UpdateJob { position = scene.m_Positions, gradiant = scene.m_gradiants, }; m_PotentialEnergyJob = new PotentialEnergyJob { mass = scene.m_masses, velocity = scene.m_Velocities, position = scene.m_Positions, gravity = scene.m_Gravity, }; float springEnergy = m_SpringForceJob.Execute(); float gravitationalEnergy = m_GravitationalForceJob.Execute(); scene.totalEnergy = m_PotentialEnergyJob.Execute(springEnergy, gravitationalEnergy); m_DragDampingJobHandle = m_DragDampingJob.Schedule(scene.objectCount, 64); m_GravityGradiantJobHandle = m_GravityGradiantJob.Schedule(scene.objectCount, 64, m_DragDampingJobHandle); m_ExplicitEulerJobHandle = m_ExplicitEulerJob.Schedule(scene.objectCount, 64, m_GravityGradiantJobHandle); m_UpdateJobHandle = m_UpdateJob.Schedule(scene.m_TransformsAccessArray, m_ExplicitEulerJobHandle); m_UpdateJobHandle.Complete(); }