public override int Compare(int slot1, int slot2) { Executing(nameof(Compare), $"{nameof(slot1)} = {slot1}, {nameof(slot2)} = {slot2}"); var result = _comparator.Compare(slot1, slot2); Executed(nameof(Compare), $"{nameof(slot1)} = {slot1}, {nameof(slot2)} = {slot2}", result); return(result); }
public int Compare(ICollectedSearchGroup o1, ICollectedSearchGroup o2) { for (int compIDX = 0; ; compIDX++) { FieldComparator fc = outerInstance.comparators[compIDX]; int c = outerInstance.reversed[compIDX] * fc.Compare(o1.ComparatorSlot, o2.ComparatorSlot); if (c != 0) { return(c); } else if (compIDX == outerInstance.compIDXEnd) { return(o1.TopDoc - o2.TopDoc); } } }
public override int Compare(ScoreDoc doc1, ScoreDoc doc2) { return(_luceneComparator.Compare(doc1.Doc, doc2.Doc)); }
public override void Collect(int doc) { //System.out.println("FP.collect doc=" + doc); // If orderedGroups != null we already have collected N groups and // can short circuit by comparing this document to the bottom group, // without having to find what group this document belongs to. // Even if this document belongs to a group in the top N, we'll know that // we don't have to update that group. // Downside: if the number of unique groups is very low, this is // wasted effort as we will most likely be updating an existing group. if (orderedGroups != null) { for (int compIDX = 0; ; compIDX++) { int c = reversed[compIDX] * comparators[compIDX].CompareBottom(doc); if (c < 0) { // Definitely not competitive. So don't even bother to continue return; } else if (c > 0) { // Definitely competitive. break; } else if (compIDX == compIDXEnd) { // Here c=0. If we're at the last comparator, this doc is not // competitive, since docs are visited in doc Id order, which means // this doc cannot compete with any other document in the queue. return; } } } // TODO: should we add option to mean "ignore docs that // don't have the group field" (instead of stuffing them // under null group)? TGroupValue groupValue = GetDocGroupValue(doc); CollectedSearchGroup <TGroupValue> group; if (!groupMap.TryGetValue(groupValue, out group)) { // First time we are seeing this group, or, we've seen // it before but it fell out of the top N and is now // coming back if (groupMap.Count < topNGroups) { // Still in startup transient: we have not // seen enough unique groups to start pruning them; // just keep collecting them // Add a new CollectedSearchGroup: CollectedSearchGroup <TGroupValue> sg = new CollectedSearchGroup <TGroupValue>(); sg.GroupValue = CopyDocGroupValue(groupValue, default(TGroupValue)); sg.ComparatorSlot = groupMap.Count; sg.TopDoc = docBase + doc; foreach (FieldComparator fc in comparators) { fc.Copy(sg.ComparatorSlot, doc); } groupMap[sg.GroupValue] = sg; if (groupMap.Count == topNGroups) { // End of startup transient: we now have max // number of groups; from here on we will drop // bottom group when we insert new one: BuildSortedSet(); } return; } // We already tested that the document is competitive, so replace // the bottom group with this new group. //CollectedSearchGroup<TGroupValue> bottomGroup = orderedGroups.PollLast(); CollectedSearchGroup <TGroupValue> bottomGroup; lock (orderedGroups) { bottomGroup = orderedGroups.Last(); orderedGroups.Remove(bottomGroup); } Debug.Assert(orderedGroups.Count == topNGroups - 1); groupMap.Remove(bottomGroup.GroupValue); // reuse the removed CollectedSearchGroup bottomGroup.GroupValue = CopyDocGroupValue(groupValue, bottomGroup.GroupValue); bottomGroup.TopDoc = docBase + doc; foreach (FieldComparator fc in comparators) { fc.Copy(bottomGroup.ComparatorSlot, doc); } groupMap[bottomGroup.GroupValue] = bottomGroup; orderedGroups.Add(bottomGroup); Debug.Assert(orderedGroups.Count == topNGroups); int lastComparatorSlot = orderedGroups.Last().ComparatorSlot; foreach (FieldComparator fc in comparators) { fc.Bottom = lastComparatorSlot; } return; } // Update existing group: for (int compIDX = 0; ; compIDX++) { FieldComparator fc = comparators[compIDX]; fc.Copy(spareSlot, doc); int c = reversed[compIDX] * fc.Compare(group.ComparatorSlot, spareSlot); if (c < 0) { // Definitely not competitive. return; } else if (c > 0) { // Definitely competitive; set remaining comparators: for (int compIDX2 = compIDX + 1; compIDX2 < comparators.Length; compIDX2++) { comparators[compIDX2].Copy(spareSlot, doc); } break; } else if (compIDX == compIDXEnd) { // Here c=0. If we're at the last comparator, this doc is not // competitive, since docs are visited in doc Id order, which means // this doc cannot compete with any other document in the queue. return; } } // Remove before updating the group since lookup is done via comparators // TODO: optimize this CollectedSearchGroup <TGroupValue> prevLast; if (orderedGroups != null) { lock (orderedGroups) { prevLast = orderedGroups.Last(); orderedGroups.Remove(group); } Debug.Assert(orderedGroups.Count == topNGroups - 1); } else { prevLast = null; } group.TopDoc = docBase + doc; // Swap slots int tmp = spareSlot; spareSlot = group.ComparatorSlot; group.ComparatorSlot = tmp; // Re-add the changed group if (orderedGroups != null) { orderedGroups.Add(group); Debug.Assert(orderedGroups.Count == topNGroups); var newLast = orderedGroups.Last(); // If we changed the value of the last group, or changed which group was last, then update bottom: if (group == newLast || prevLast != newLast) { foreach (FieldComparator fc in comparators) { fc.Bottom = newLast.ComparatorSlot; } } } }