/// <summary> /// Returns a new Query where each new element in the sequence is an instance of the PrevPair struct. /// The value field of the pair will point to an element in the current sequence, and the prev field will /// point to an element that comes 'offset' elements before the current element. If 'includeStart' is true, /// the sequence will also include elements that have no previous element. /// /// For example, with an offset of 2 and with includeStart as true, the sequence: /// A, B, C, D, E, F /// is transformed into: /// (A,_) (B,_) (C,A) (D,B) (E,C) (F,D) /// </summary> public static Query <PrevPair <T> > WithPrevious <T>(this Query <T> query, int offset = 1, bool includeStart = false) { using (var slice = query.Deconstruct()) { int resultCount = includeStart ? slice.Count : Mathf.Max(0, slice.Count - offset); var dstArray = ArrayPool <PrevPair <T> > .Spawn(resultCount); int dstIndex = 0; if (includeStart) { for (int i = 0; i < Mathf.Min(slice.Count, offset); i++) { dstArray[dstIndex++] = new PrevPair <T>() { value = slice[i], prev = default(T), hasPrev = false }; } } for (int i = offset; i < slice.Count; i++) { dstArray[dstIndex++] = new PrevPair <T>() { value = slice[i], prev = slice[i - offset], hasPrev = true }; } return(new Query <PrevPair <T> >(dstArray, resultCount)); } }
/// <summary> /// Returns a new Query representing the current Query where each element has been /// mapped onto a new Query, and then all Queries are concatenated into a /// single long sequence. /// /// For example: /// (1, 2, 3, 4).Query().SelectMany(count => new List().Fill(count, count.ToString()).Query()) /// Would result in: /// ("1", "2", "2", "3", "3", "3", "4", "4", "4", "4") /// </summary> public static Query <K> SelectMany <T, K>(this Query <T> query, Func <T, Query <K> > selector) { using (var slice = query.Deconstruct()) { var slices = ArrayPool <Query <K> .QuerySlice> .Spawn(slice.Count); int totalCount = 0; for (int i = 0; i < slice.Count; i++) { slices[i] = selector(slice[i]).Deconstruct(); totalCount += slices[i].Count; } var dstArray = ArrayPool <K> .Spawn(totalCount); int targetIndex = 0; for (int i = 0; i < slice.Count; i++) { Array.Copy(slices[i].BackingArray, 0, dstArray, targetIndex, slices[i].Count); targetIndex += slices[i].Count; slices[i].Dispose(); } ArrayPool <Query <K> .QuerySlice> .Recycle(slices); return(new Query <K>(dstArray, totalCount)); } }
/// <summary> /// Returns a Query containing a single element. /// </summary> public static Query <T> Single <T>(T value) { var array = ArrayPool <T> .Spawn(1); array[0] = value; return(new Query <T>(array, 1)); }
/// <summary> /// Constructs a new query of the given collection. /// </summary> public Query(ICollection <T> collection) { _array = ArrayPool <T> .Spawn(collection.Count); _count = collection.Count; collection.CopyTo(_array, 0); _validator = Validator.Spawn(); }
public static Query <T> Query <T>(this ReadonlyList <T> rList) { var pooledArr = ArrayPool <T> .Spawn(rList.Count); for (int i = 0; i < rList.Count; i++) { pooledArr[i] = rList[i]; } return(new Query <T>(pooledArr, rList.Count)); }
/// <summary> /// Returns a Query containing a single element repeated /// 0 or more times. /// </summary> public static Query <T> Repeat <T>(T value, int times) { var array = ArrayPool <T> .Spawn(times); for (int i = 0; i < times; i++) { array[i] = value; } return(new Query <T>(array, times)); }
/// <summary> /// Returns a new Query operation representing the concatenation of the current Query to /// the argument ICollection. /// /// For example /// (A, B, C, D).Query().Concat((E, F, G, H)) /// would result in /// (A, B, C, D, E, F, G, H) /// </summary> public static Query <T> Concat <T>(this Query <T> query, ICollection <T> collection) { using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <T> .Spawn(slice.Count + collection.Count); Array.Copy(slice.BackingArray, dstArray, slice.Count); collection.CopyTo(dstArray, slice.Count); return(new Query <T>(dstArray, slice.Count + collection.Count)); } }
/// <summary> /// Constructs a query that is an exact copy of another query. /// </summary> public Query(Query <T> other) { other._validator.Validate(); _array = ArrayPool <T> .Spawn(other._count); _count = other._count; Array.Copy(other._array, _array, _count); _validator = Validator.Spawn(); }
/// <summary> /// Returns a new Query representing the concatenation of the current Query to /// the argument Query. /// /// For example /// (A, B, C, D).Query().Concat((E, F, G, H)) /// would result in /// (A, B, C, D, E, F, G, H) /// </summary> public static Query <T> Concat <T>(this Query <T> query, Query <T> other) { using (var slice = query.Deconstruct()) using (var otherSlice = other.Deconstruct()) { var dstArray = ArrayPool <T> .Spawn(slice.Count + otherSlice.Count); Array.Copy(slice.BackingArray, dstArray, slice.Count); Array.Copy(otherSlice.BackingArray, 0, dstArray, slice.Count, otherSlice.Count); return(new Query <T>(dstArray, slice.Count + otherSlice.Count)); } }
/// <summary> /// Returns a new Query representing the current Query mapped element-by-element /// into a new Query by a mapping operation. /// /// For example: /// (1, 2, 3, 4).Query().Select(num => (num * 2).ToString()) /// Would result in: /// ("2", "4", "6", "8") /// </summary> public static Query <K> Select <T, K>(this Query <T> query, Func <T, K> selector) { using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <K> .Spawn(slice.Count); for (int i = 0; i < slice.Count; i++) { dstArray[i] = selector(slice[i]); } return(new Query <K>(dstArray, slice.Count)); } }
/// <summary> /// Returns a new Query representing the current Query mapped element-by-element /// into a new Query by a mapping operation. This variant accepts four auxiliary argument slots for the selector function, to prevent allocation. /// /// For example: /// (1, 2, 3, 4).Query().Select(10, (num, offset) => (num * 2 + offset).ToString()) /// Would result in: /// ("12", "14", "16", "18") /// </summary> public static Query <K> Select <T, Aux1, Aux2, Aux3, Aux4, K>(this Query <T> query, Aux1 aux1, Aux2 aux2, Aux3 aux3, Aux4 aux4, Func <T, Aux1, Aux2, Aux3, Aux4, K> selectorWithArg) { using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <K> .Spawn(slice.Count); for (int i = 0; i < slice.Count; i++) { dstArray[i] = selectorWithArg(slice[i], aux1, aux2, aux3, aux4); } return(new Query <K>(dstArray, slice.Count)); } }
public void RecycleArray() { var array = ArrayPool <byte> .Spawn(4); ArrayPool <byte> .Recycle(array); var memBefore = GC.GetTotalMemory(true); ArrayPool <byte> .Spawn(4); var memAfter = GC.GetTotalMemory(true); Assert.IsTrue(memAfter <= memBefore); }
/// <summary> /// Returns a new Query that represents the combination of this query sequence with another Query. /// The two sequences are combined element-by-element using a selector function. /// The resulting sequence has a length equal to the smaller of the two sequences. /// /// For example: /// sequenceA = (A, B, C, D) /// sequenceB = (E, F, G, H) /// sequenceA.Query().Zip(sequenceB.Query(), (a, b) => a + b) /// Would result in: /// (AE, BF, CG, DH) /// </summary> public static Query <V> Zip <T, K, V>(this Query <T> query, Query <K> otherQuery, Func <T, K, V> selector) { using (var slice = query.Deconstruct()) using (var otherSlice = otherQuery.Deconstruct()) { int resultCount = Mathf.Min(slice.Count, otherSlice.Count); var resultArray = ArrayPool <V> .Spawn(resultCount); for (int i = 0; i < resultCount; i++) { resultArray[i] = selector(slice[i], otherSlice[i]); } return(new Query <V>(resultArray, resultCount)); } }
/// <summary> /// Constructs a new Query from the given two dimensional array. /// </summary> public static Query <T> Query <T>(this T[,] array) { var dst = ArrayPool <T> .Spawn(array.GetLength(0) *array.GetLength(1)); int dstIndex = 0; for (int i = 0; i < array.GetLength(0); i++) { for (int j = 0; j < array.GetLength(1); j++) { dst[dstIndex++] = array[i, j]; } } return(new Query <T>(dst, array.GetLength(0) * array.GetLength(1))); }
/// <summary> /// Returns a new Query representing all of the elements paired with their index that /// they are located within the query. This can be useful if you want to retrieve /// the original index later. /// /// For example: /// (A, B, C).Query().WithIndices() /// Would result in: /// ((0, A), (1, B), (2, C)) /// </summary> public static Query <IndexedValue <T> > WithIndices <T>(this Query <T> query) { using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <IndexedValue <T> > .Spawn(slice.Count); for (int i = 0; i < slice.Count; i++) { dstArray[i] = new IndexedValue <T>() { index = i, value = slice[i] }; } return(new Query <IndexedValue <T> >(dstArray, slice.Count)); } }
/// <summary> /// Returns a new Query representing the current Query repeated a number of /// times. /// /// For example: /// (1, 2, 3).Query().Repeat(3) /// Would result in: /// (1, 2, 3, 1, 2, 3, 1, 2, 3) /// </summary> public static Query <T> Repeat <T>(this Query <T> query, int times) { if (times < 0) { throw new ArgumentException("The repetition count must be non-negative."); } using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <T> .Spawn(slice.Count *times); for (int i = 0; i < times; i++) { Array.Copy(slice.BackingArray, 0, dstArray, i * slice.Count, slice.Count); } return(new Query <T>(dstArray, slice.Count * times)); } }
/// <summary> /// Returns a new Query representing only the items of the current Query that /// are of a specific type. /// /// For example /// ("A", 1, null, 5.0f, 900, "hello").Query().OfType(typeof(string)) /// would result in /// ("A", "hello") /// </summary> public static Query <T> OfType <T>(this Query <T> query, Type type) { using (var slice = query.Deconstruct()) { var dstArray = ArrayPool <T> .Spawn(slice.Count); int dstCount = 0; for (int i = 0; i < slice.Count; i++) { if (slice[i] != null && type.IsAssignableFrom(slice[i].GetType())) { dstArray[dstCount++] = slice[i]; } } return(new Query <T>(dstArray, dstCount)); } }
//Certain operators cannot be implemented as extension methods due to the way //the generic arguments are to be consumed by the user, so there are implemented //directly here in the Query class. #region DIRECT IMPLEMENTED OPERATORS /// <summary> /// Returns a new Query representing only the items of the current Query that /// are of a specific type. /// /// For example /// ("A", 1, null, 5.0f, 900, "hello").Query().OfType<string>() /// would result in /// ("A", "hello") /// </summary> public Query <K> OfType <K>() where K : T { _validator.Validate(); var dstArray = ArrayPool <K> .Spawn(_count); int dstCount = 0; for (int i = 0; i < _count; i++) { if (_array[i] is K) { dstArray[dstCount++] = (K)_array[i]; } } Dispose(); return(new Query <K>(dstArray, dstCount)); }
/// <summary> /// Returns a new Query that represents the combination of this query sequence with a Collection. /// The two sequences are combined element-by-element using a selector function. /// The resulting sequence has a length equal to the smaller of the two sequences. /// /// For example: /// sequenceA = (A, B, C, D) /// sequenceB = (E, F, G, H) /// sequenceA.Query().Zip(sequenceB, (a, b) => a + b) /// Would result in: /// (AE, BF, CG, DH) /// </summary> public static Query <V> Zip <T, K, V>(this Query <T> query, ICollection <K> collection, Func <T, K, V> selector) { using (var slice = query.Deconstruct()) { int resultCount = Mathf.Min(slice.Count, collection.Count); var resultArray = ArrayPool <V> .Spawn(resultCount); var tmpArray = ArrayPool <K> .Spawn(collection.Count); collection.CopyTo(tmpArray, 0); for (int i = 0; i < resultCount; i++) { resultArray[i] = selector(slice[i], tmpArray[i]); } ArrayPool <K> .Recycle(tmpArray); return(new Query <V>(resultArray, resultCount)); } }
/// <summary> /// Returns a new Query representing the current Query where each element has been /// mapped onto a new Collection, and then all Collections are concatenated into a /// single long sequence. /// /// For example: /// (1, 2, 3, 4).Query().SelectMany(count => new List().Fill(count, count.ToString())) /// Would result in: /// ("1", "2", "2", "3", "3", "3", "4", "4", "4", "4") /// </summary> public static Query <K> SelectMany <T, K>(this Query <T> query, Func <T, ICollection <K> > selector) { using (var slice = query.Deconstruct()) { int totalCount = 0; for (int i = 0; i < slice.Count; i++) { totalCount += selector(slice[i]).Count; } var dstArray = ArrayPool <K> .Spawn(totalCount); int targetIndex = 0; for (int i = 0; i < slice.Count; i++) { var collection = selector(slice[i]); collection.CopyTo(dstArray, targetIndex); targetIndex += collection.Count; } return(new Query <K>(dstArray, totalCount)); } }
/// <summary> /// Returns a new Query where each new element in the sequence is an instance of the /// PrevPair struct. The value field of the pair will point to an element in the /// current sequence, and the prev field will point to an element that comes /// 'offset' elements before the current element. If 'includeStart' is true, the /// sequence will also include elements that have no previous element. /// /// For example, with an offset of 2 and with includeEnd as true, the sequence: /// A, B, C, D, E, F /// is transformed into: /// (A,C) (B,D) (C,E) (D,F) (E,_) (F,_) /// </summary> public static Query <NextPair <T> > WithNext <T>(this Query <T> query, int offset = 1, bool includeEnd = false) { offset = Math.Abs(offset); using (var slice = query.Deconstruct()) { int resultCount = includeEnd ? slice.Count : Mathf.Max(0, slice.Count - offset); var dstArray = ArrayPool <NextPair <T> > .Spawn(resultCount); int dstIndex = 0; for (int i = 0; i < slice.Count - offset; i++) { dstArray[dstIndex++] = new NextPair <T>() { value = slice[i], next = slice[i + offset], hasNext = true }; } if (includeEnd) { for (int i = slice.Count - offset; i < slice.Count; i++) { dstArray[dstIndex++] = new NextPair <T>() { value = slice[i], next = default(T), hasNext = false }; } } return(new Query <NextPair <T> >(dstArray, resultCount)); } }
public void RequestArray() { var array = ArrayPool <byte> .Spawn(4); Assert.AreEqual(8, array.Length); }
/// <summary> /// Returns a Query containing no elements. /// </summary> public static Query <T> Empty <T>() { var array = ArrayPool <T> .Spawn(0); return(new Query <T>(array, 0)); }
private void Update() { if (Input.GetKeyDown(resetKeycode)) { ResetSimulation(); } if ((loop && mainState->time > loopTime) || respawnMode) { ResetSimulation(); return; } Random.InitState(Time.frameCount); _seed = Random.Range(int.MinValue, int.MaxValue); if (simulate) { stepSimulation(); } if (_enableTrails) { using (new ProfilerSample("Simulate Trails")) { if (_profileTrails) { var stopwatch = new System.Diagnostics.Stopwatch(); stopwatch.Reset(); stopwatch.Start(); const int FRAMES_TO_TEST = 1000; for (int i = 0; i < FRAMES_TO_TEST; i++) { NBodyC.StepGalaxy(_trailState); } double seconds = stopwatch.ElapsedTicks / (double)System.Diagnostics.Stopwatch.Frequency; double framesPerSecond = FRAMES_TO_TEST / seconds; _trailFramerate = framesPerSecond; Debug.Log("#####: " + _trailFramerate); } else { int simTime = 0; while (_trailState->frames < mainState->frames + _maxTrailLength) { NBodyC.StepGalaxy(_trailState); unsafe { BlackHole * src = _trailState->blackHoles; TrailRecord trail; for (int j = 0; j < _trailState->numBlackHoles; j++, src++) { if (!_trails.TryGetValue(src->id, out trail)) { trail = new TrailRecord() { startFrame = _trailState->frames }; _trails[src->id] = trail; } trail.queue.PushBack(src->position); } } simTime++; if (simTime >= _trailUpdateRate) { break; } } } } //Build and display trail mesh //but only if it's already reached its max length if (_trailState->frames - mainState->frames >= _trailShowLength) { using (new ProfilerSample("Display Trails")) { _trailVerts.Clear(); _trailIndices.Clear(); using (new ProfilerSample("Build Vertex List")) { foreach (var pair in _trails) { for (int i = 0; i < pair.Value.queue.Count; i++) { if (i != 0) { _trailIndices.Add(_trailVerts.Count); _trailIndices.Add(_trailVerts.Count - 1); } _trailVerts.Add(pair.Value.queue[i]); } } } int[] indexArray; using (new ProfilerSample("Build Index Array")) { indexArray = ArrayPool <int> .Spawn(_trailIndices.Count); for (int i = 0; i < _trailIndices.Count; i++) { indexArray[i] = _trailIndices[i]; } for (int i = _trailIndices.Count; i < indexArray.Length; i++) { indexArray[i] = 0; } } using (new ProfilerSample("Upload Mesh")) { _trailMesh.Clear(); _trailMesh.SetVertices(_trailVerts); _trailMesh.SetIndices(indexArray, MeshTopology.Lines, 0); ArrayPool <int> .Recycle(indexArray); indexArray = null; } if (_trailResetQueued) { ResetTrails(forceReset: true); } } } _trailPropertyBlock.SetColor("_Color", _trailColor); Graphics.DrawMesh(_trailMesh, galaxyRenderer.displayAnchor.localToWorldMatrix, _trailMaterial, 0, null, 0, _trailPropertyBlock); } //Render the black holes themselves unsafe { BlackHole *prevSrc = prevState->blackHoles; BlackHole *mainSrc = mainState->blackHoles; float fraction = Mathf.InverseLerp(prevState->time, mainState->time, simulationTime); for (int j = 0; j < mainState->numBlackHoles; j++, prevSrc++, mainSrc++) { Vector3 position = Vector3.Lerp(prevSrc->position, mainSrc->position, fraction); galaxyRenderer.DrawBlackHole(position); } } }