protected void Start() { if (mesh == null) { Debug.LogError("[SampleController] No 'mesh' provided"); return; } if (material == null) { Debug.LogError("[SampleController] No 'material' provided"); return; } //Allocate arrays cubeData = new CubeData[cubeCount]; bucketedCubes = new BucketSet <CubeData>(bucketCount: avoidanceBucketCount, maxBucketSize: avoidanceMaxBucketSize); renderSet = new RenderSet(mesh, material, maxBatches: Mathf.CeilToInt(cubeCount / 1023f)); //Create misc stuff int numExecutors = useMultiThreading ? (System.Environment.ProcessorCount - 1) : 0; Debug.Log(string.Format("[SampleController] Staring 'TaskManager' with '{0}' executors", numExecutors)); taskManager = new TaskManager(numExecutors); avoidanceHasher = new PositionHasher(); random = new ShiftRandomProvider(); //Create tasks startTask = new StartFrameTask(); bucketCubeTask = new BucketCubeTask(bucketedCubes, avoidanceHasher); moveCubeTask = new MoveCubeTask(avoidanceHasher, bucketedCubes); respawnCubeTask = new RespawnCubeTask(random); addToRenderSetTask = new AddToRenderSetTask(renderSet); //Setup profiler timeline if (profiler != null) { completeProfilerTrack = profiler.CreateTrack <TimelineTrack>("Blocking main-thread to complete tasks"); renderProfilerTrack = profiler.CreateTrack <TimelineTrack>("Render instanced"); bucketCubesProfilerTrack = profiler.CreateTrack <TaskTimelineTrack>("Bucket cubes"); moveCubesProfilerTrack = profiler.CreateTrack <TaskTimelineTrack>("Move cubes"); respawnCubesProfilerTrack = profiler.CreateTrack <TaskTimelineTrack>("Respawn cubes"); addToRenderSetProfilerTrack = profiler.CreateTrack <TaskTimelineTrack>("Creating render batches"); profiler.StartTimers(); } //Setup initial data Rect spawnArea = MathUtils.FromCenterAndSize(Vector2.zero, spawnAreaSize); for (int i = 0; i < cubeCount; i++) { cubeData[i] = new CubeData { ID = i, Position = random.Inside(spawnArea), TimeNotHitTarget1 = 999f, TimeNotHitTarget2 = 999f } } ; }
private static bool AddFromEnumerator(ref BucketSet buckets, IEnumerator <AnalysisValue> items, IEqualityComparer <AnalysisValue> comparer) { bool wasChanged = false; while (items.MoveNext()) { wasChanged |= AddOne(ref buckets, items.Current, comparer); } return(wasChanged); }
private static void CheckGrow(ref BucketSet buckets, IEqualityComparer <AnalysisValue> comparer) { if (buckets.Capacity == 0) { return; } if (buckets.Count >= (buckets.Capacity * Load)) { // grow the hash table EnsureSize(ref buckets, (int)(buckets.Capacity / Load) * ResizeMultiplier, comparer); } }
private static bool AddOne(ref BucketSet buckets, AnalysisValue key, IEqualityComparer <AnalysisValue> comparer) { if (buckets.Buckets == null) { buckets = new BucketSet(InitialBucketSize); } int hc = comparer.GetHashCode(key) & Int32.MaxValue; if (AddOne(ref buckets, key, hc, comparer)) { CheckGrow(ref buckets, comparer); return(true); } return(false); }
private static void EnsureSize(ref BucketSet buckets, int newSize, IEqualityComparer <AnalysisValue> comparer) { // see if we can reclaim collected buckets before growing... if (buckets.Capacity == 0) { buckets = new BucketSet(newSize); return; } for (int i = 0; i < buckets.Capacity; i++) { var key = buckets.Buckets[i].Key; if (key != null && !key.IsAlive) { buckets.Buckets[i].Key = _removed; newSize--; buckets.Count--; } } if (newSize > buckets.Buckets.Length || newSize < buckets.Buckets.Length / 4) { newSize = AnalysisDictionary <object, object> .GetPrime(newSize); var newBuckets = new BucketSet(newSize); for (int i = 0; i < buckets.Buckets.Length; i++) { var curBucket = buckets.Buckets[i]; if (curBucket.Key != null && curBucket.Key != _removed && curBucket.Key.IsAlive) { AddOne(ref newBuckets, curBucket.Key, curBucket.HashCode, comparer); } } buckets = newBuckets; } }
public AnalysisHashSet(int count, IEqualityComparer <AnalysisValue> comparer) { Buckets = new BucketSet(AnalysisDictionary <object, object> .GetPrime((int)(count / Load + 2))); _comparer = comparer; }
/// <summary> /// Creates a new dictionary storage with no buckets /// </summary> public AnalysisHashSet(int count) { Buckets = new BucketSet(AnalysisDictionary <object, object> .GetPrime((int)(count / Load + 2))); _comparer = ObjectComparer.Instance; }
/// <summary> /// Add helper which adds the given key/value (where the key is not null) with /// a pre-computed hash code. /// </summary> private static bool AddOne(ref BucketSet buckets, AnalysisValue /*!*/ key, int hc, IEqualityComparer <AnalysisValue> comparer) { Debug.Assert(key != null); int index = hc % buckets.Capacity; int startIndex = index; int addIndex = -1; for (; ;) { Bucket cur = buckets.Buckets[index]; var existingKey = cur.Key; if (existingKey == null || existingKey == _removed || !existingKey.IsAlive) { if (addIndex == -1) { addIndex = index; } if (cur.Key == null) { break; } } else if (Object.ReferenceEquals(key, existingKey)) { return(false); } else if (cur.HashCode == hc && comparer.Equals(key, existingKey)) { var uc = comparer as UnionComparer; if (uc == null) { return(false); } bool changed; var newKey = uc.MergeTypes(existingKey, key, out changed); if (!changed) { return(false); } // merging values has changed the one we should store, so // replace it. var newHc = comparer.GetHashCode(newKey) & Int32.MaxValue; if (newHc != buckets.Buckets[index].HashCode) { // The hash code should not change, but if it does, we // need to keep things consistent Debug.Fail("Hash code changed when merging AnalysisValues"); } Thread.MemoryBarrier(); buckets.Buckets[index].Key = _removed; buckets.Count -= 1; return(AddOne(ref buckets, newKey, newHc, comparer)); } index = ProbeNext(buckets.Buckets, index); if (index == startIndex) { break; } } if (buckets.Buckets[addIndex].Key == null || buckets.Buckets[addIndex].Key == _removed) { // Removal has been counted already buckets.Count += 1; } else if (!buckets.Buckets[addIndex].Key.IsAlive) { // Remove/add means no change te count } else { buckets.Count += 1; } buckets.Buckets[addIndex].HashCode = hc; Thread.MemoryBarrier(); // we write the key last so that we can check for null to // determine if a bucket is available. buckets.Buckets[addIndex].Key = key; return(true); }
public BucketCubeTask(BucketSet <CubeData> bucketSet, PositionHasher hasher) { this.bucketSet = bucketSet; this.hasher = hasher; }
public MoveCubeTask(PositionHasher hasher, BucketSet <CubeData> cubeLookup) { this.hasher = hasher; this.cubeLookup = cubeLookup; }