// Only for tests public IUserComparer Create() { var primitiveComparer = new PrimitiveComparer(); var enumerableComparer = new EnumerationComparer(primitiveComparer); return(new UserComparer(primitiveComparer, enumerableComparer)); }
public IEnumerable <Document> GetEntities(SearchConditions searchConditions, IEnumerable <string> projectedFields, string orderByFieldName, bool orderByDesc) { string indexKey = this.GetIndexKey(searchConditions); string indexListKey = this.GetIndexListKeyInCache(); try { Document[] result = null; // if a full index was found if (this._redis.HashFieldExistsWithRetries(indexListKey, indexKey)) { result = RedisIndex.LoadIndexEntities(this._redis, indexKey, indexListKey); } else if (projectedFields != null) { // then trying to use a projection index indexKey = this.GetIndexKey(searchConditions, projectedFields); if (this._redis.HashFieldExistsWithRetries(indexListKey, indexKey)) { result = RedisProjectionIndex.LoadProjectionIndexEntities(this._redis, indexKey, indexListKey); } } // if we failed to load both full and projection index if (result == null) { this.OnMiss.FireSafely(); return(null); } this.OnHit.FireSafely(); this.Log("Index ({0}) with {1} items successfully loaded from cache", indexKey, result.Length); if (string.IsNullOrEmpty(orderByFieldName)) { return(result); } // creating a comparer to sort the results var comparer = PrimitiveComparer.GetComparer(this._tableEntityType, orderByFieldName); return(orderByDesc ? result.OrderByDescending(doc => doc[orderByFieldName].AsPrimitive(), comparer) : result.OrderBy(doc => doc[orderByFieldName].AsPrimitive(), comparer) ); } catch (Exception ex) { this.Log("Failed to load index ({0}) from cache. {1}", indexKey, ex); this.OnMiss.FireSafely(); return(null); } }
public IEnumerable <Document> GetEntities(SearchConditions searchConditions, IEnumerable <string> projectedFields, string orderByFieldName, bool orderByDesc) { // first trying to find a full index string indexKey = searchConditions.Key; var index = this.TryLoadHealthyIndex(indexKey); Document[] result = null; // if no full index exist if (index == null) { if (projectedFields != null) { // then there still might be a projection index indexKey = this.GetProjectionIndexKey(searchConditions, projectedFields); result = this.TryLoadProjectionIndexEntities(indexKey); } } else { result = this.TryLoadIndexEntities(index, indexKey); } // if we failed to load both full and projection index if (result == null) { this.OnMiss.FireSafely(); return(null); } this.OnHit.FireSafely(); this.Log("Index ({0}) with {1} items successfully loaded from cache", indexKey, result.Length); if (string.IsNullOrEmpty(orderByFieldName)) { return(result); } // creating a comparer to sort the results var comparer = PrimitiveComparer.GetComparer(this._tableEntityType, orderByFieldName); return(orderByDesc ? result.OrderByDescending(doc => doc[orderByFieldName].AsPrimitive(), comparer) : result.OrderBy(doc => doc[orderByFieldName].AsPrimitive(), comparer) ); }
private string ToString(object @object, IDictionary <object, int> visitedObjects, int currentIndentationLevel) { if (@object == null) { return(this.GetIndentation(currentIndentationLevel) + "null"); } Type type = @object.GetType(); if (PrimitiveComparer.TypeIsTreatedAsPrimitive(type)) { return(this.GetIndentation(currentIndentationLevel) + $"(Type: {@object.GetType().Name}, Value: \"{@object.ToString().Replace("\"", "\\\"")}\")"); } if (visitedObjects.ContainsKey(@object)) { return(this.GetIndentation(currentIndentationLevel) + $"[Object {visitedObjects[@object]}]"); } try { int id = this.PropertyEqualsCalculator.GetHashCode(@object); visitedObjects.Add(@object, id); if (EnumerableTools.ObjectIsEnumerable(@object)) { IList <object> objectAsEnumerable = EnumerableTools.ObjectToEnumerable <object>(@object).ToList(); string result = this.GetIndentation(currentIndentationLevel) + "[" + Environment.NewLine; int count = objectAsEnumerable.Count; for (int i = 0; i < count; i++) { object current = objectAsEnumerable[i]; result += this.ToString(current, visitedObjects, currentIndentationLevel + 1); if (i < count - 1) { result = result + "," + Environment.NewLine; } } return(result + Environment.NewLine + this.GetIndentation(currentIndentationLevel) + "]"); } else { List <(string /*Propertyname*/, object)> propertyValues = new(); foreach (FieldInfo field in type.GetFields()) { if (this.FieldSelector(field)) { propertyValues.Add((field.Name, field.GetValue(@object))); } } foreach (PropertyInfo property in type.GetProperties()) { if (this.PropertySelector(property)) { propertyValues.Add((property.Name, property.GetValue(@object))); } } string result = this.GetIndentation(currentIndentationLevel) + $"{{ (ObjectId: {id}, Type: {type.FullName}) "; foreach ((string, object)entry in propertyValues) { result = result + Environment.NewLine + this.GetIndentation(currentIndentationLevel + 1) + entry.Item1 + ": " + Environment.NewLine + this.ToString(entry.Item2, visitedObjects, currentIndentationLevel + 1); } return(result + Environment.NewLine + this.GetIndentation(currentIndentationLevel) + "}"); } } catch { return($"[Error while executing {nameof(ToString)} for object of type {type.FullName}]"); } }
unsafe void FindPartitionForAxis(BoundingBox *boundingBoxes, BoundingBox *aMerged, float *centroids, int *indexMap, int count, out int splitIndex, out float cost, out BoundingBox a, out BoundingBox b, out int leafCountA, out int leafCountB) { Debug.Assert(count > 1); //TODO: Note that sorting at every level isn't necessary. Like in one of the much older spatial splitting implementations we did, you can just sort once, and thereafter //just do an O(n) operation to shuffle leaf data to the relevant location on each side of the partition. That allows us to punt all sort work to a prestep. //There, we could throw an optimized parallel sort at it. Or just do the three axes independently, hidden alongside some other work maybe. //I suspect the usual problems with parallel sorts would be mitigated somewhat by having three of them going on at the same time- more chances for load balancing. //Also note that, at each step, both the above partitioning scheme and the sort result in a contiguous block of data to work on. //If you're already doing a gather like that, you might as well throw wider SIMD at the problem. This version only goes up to 3 wide, which is unfortunate for AVX2 and AVX512. //With those changes, we can probably get the sweep builder to be faster than v1's insertion builder- it's almost there already. //(You'll also want to bench it against similarly simd accelerated binned approaches for use in incremental refinement. If it's not much slower, the extra quality benefits //might make it faster on net by virtue of speeding up self-tests, which are a dominant cost.) var comparer = new PrimitiveComparer <float>(); QuickSort.Sort(ref centroids[0], ref indexMap[0], 0, count - 1, ref comparer); //Search for the best split. //Sweep across from low to high, caching the merged size and leaf count at each point. //Index N includes every subtree from 0 to N, inclusive. So index 0 contains subtree 0's information. var lastIndex = count - 1; aMerged[0] = boundingBoxes[indexMap[0]]; for (int i = 1; i < lastIndex; ++i) { var index = indexMap[i]; BoundingBox.CreateMerged(ref aMerged[i - 1], ref boundingBoxes[index], out aMerged[i]); } //Sweep from high to low. BoundingBox bMerged = new BoundingBox { Min = new Vector3(float.MaxValue), Max = new Vector3(float.MinValue) }; cost = float.MaxValue; splitIndex = 0; a = bMerged; b = bMerged; leafCountA = 0; leafCountB = 0; for (int i = lastIndex; i >= 1; --i) { int aIndex = i - 1; var subtreeIndex = indexMap[i]; BoundingBox.CreateMerged(ref bMerged, ref boundingBoxes[subtreeIndex], out bMerged); var aCost = i * ComputeBoundsMetric(ref aMerged[aIndex]); var bCost = (count - i) * ComputeBoundsMetric(ref bMerged); var totalCost = aCost + bCost; if (totalCost < cost) { cost = totalCost; splitIndex = i; a = aMerged[aIndex]; b = bMerged; leafCountA = i; leafCountB = count - i; } } }