public void RunHashWriterParallelFor() { const int MAPSIZE = 100; // Make sure that each iteration was called and the parallel write worked. NativeHashMap <int, int> map = new NativeHashMap <int, int>(MAPSIZE, Allocator.TempJob); // Tracks the threadIndex used for each job. NativeHashMap <int, bool> threadMap = new NativeHashMap <int, bool>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); HashWriterParallelFor job = new HashWriterParallelFor() { result = map.AsParallelWriter(), threadMap = threadMap.AsParallelWriter() }; JobHandle handle = job.Schedule(MAPSIZE, 10); handle.Complete(); for (int i = 0; i < MAPSIZE; ++i) { Assert.AreEqual(17, map[i]); } #if !UNITY_SINGLETHREADED_JOBS Assert.IsTrue(threadMap.Length > 1); // should have run in parallel, and used different thread indices #else Assert.IsTrue(threadMap.Length == 1); // only have one thread. Assert.IsTrue(threadMap[0] == true); // and it should be job index 0 #endif map.Dispose(); threadMap.Dispose(); }
public void RunHashWriterParallelFor() { const int MAPSIZE = 100; // Make sure that each iteration was called and the parallel write worked. NativeHashMap <int, int> map = new NativeHashMap <int, int>(MAPSIZE, Allocator.TempJob); // Tracks the threadIndex used for each job. NativeHashMap <int, bool> threadMap = new NativeHashMap <int, bool>(JobsUtility.MaxJobThreadCount, Allocator.TempJob); HashWriterParallelFor job = new HashWriterParallelFor() { result = map.AsParallelWriter(), threadMap = threadMap.AsParallelWriter() }; job.Schedule(MAPSIZE, 5).Complete(); for (int i = 0; i < MAPSIZE; ++i) { Assert.AreEqual(17, map[i]); } map.Dispose(); threadMap.Dispose(); }