Example #1
0
        // Pulls out child doc and scores for all join queries:
        private void CopyGroups(OneGroup og)
        {
            // While rare, it's possible top arrays could be too
            // short if join query had null scorer on first
            // segment(s) but then became non-null on later segments
            int numSubScorers = joinScorers.Length;

            if (og.docs.Length < numSubScorers)
            {
                // While rare, this could happen if join query had
                // null scorer on first segment(s) but then became
                // non-null on later segments
                og.docs = ArrayUtil.Grow(og.docs);
            }
            if (og.counts.Length < numSubScorers)
            {
                og.counts = ArrayUtil.Grow(og.counts);
            }
            if (trackScores && og.scores.Length < numSubScorers)
            {
                og.scores = ArrayUtil.Grow(og.scores);
            }

            //System.out.println("\ncopyGroups parentDoc=" + og.doc);
            for (int scorerIDX = 0; scorerIDX < numSubScorers; scorerIDX++)
            {
                ToParentBlockJoinQuery.BlockJoinScorer joinScorer = joinScorers[scorerIDX];
                //System.out.println("  scorer=" + joinScorer);
                if (joinScorer != null && docBase + joinScorer.ParentDoc == og.Doc)
                {
                    og.counts[scorerIDX] = joinScorer.ChildCount;
                    //System.out.println("    count=" + og.counts[scorerIDX]);
                    og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]);
                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], "length={0} vs count={1}", og.docs[scorerIDX].Length, og.counts[scorerIDX]);
                    }
                    //System.out.println("    len=" + og.docs[scorerIDX].length);

                    /*
                     * for(int idx=0;idx<og.counts[scorerIDX];idx++) {
                     * System.out.println("    docs[" + idx + "]=" + og.docs[scorerIDX][idx]);
                     * }
                     */
                    if (trackScores)
                    {
                        //System.out.println("    copy scores");
                        og.scores[scorerIDX] = joinScorer.SwapChildScores(og.scores[scorerIDX]);
                        if (Debugging.AssertsEnabled)
                        {
                            Debugging.Assert(og.scores[scorerIDX].Length >= og.counts[scorerIDX], "length={0} vs count={1}", og.scores[scorerIDX].Length, og.counts[scorerIDX]);
                        }
                    }
                }
                else
                {
                    og.counts[scorerIDX] = 0;
                }
            }
        }
        /// <summary>
        /// Accumulates groups for the BlockJoinQuery specified by its slot.
        /// </summary>
        /// <param name="slot"> Search query's slot </param>
        /// <param name="offset"> Parent docs offset </param>
        /// <param name="maxDocsPerGroup"> Upper bound of documents per group number </param>
        /// <param name="withinGroupOffset"> Offset within each group of child docs </param>
        /// <param name="withinGroupSort"> Sort criteria within groups </param>
        /// <param name="fillSortFields"> Specifies whether to add sort fields or not </param>
        /// <returns> <see cref="ITopGroups{T}"/> for the query specified by slot </returns>
        /// <exception cref="IOException"> if there is a low-level I/O error </exception>
        private ITopGroups <int> AccumulateGroups(int slot, int offset, int maxDocsPerGroup, int withinGroupOffset, Sort withinGroupSort, bool fillSortFields)
        {
            var groups     = new GroupDocs <int> [sortedGroups.Length - offset];
            var fakeScorer = new FakeScorer();

            int totalGroupedHitCount = 0;

            //System.out.println("slot=" + slot);

            for (int groupIdx = offset; groupIdx < sortedGroups.Length; groupIdx++)
            {
                OneGroup og = sortedGroups[groupIdx];
                int      numChildDocs;
                if (slot == -1 || slot >= og.counts.Length)
                {
                    numChildDocs = 0;
                }
                else
                {
                    numChildDocs = og.counts[slot];
                }

                // Number of documents in group should be bounded to prevent redundant memory allocation
                int numDocsInGroup = Math.Max(1, Math.Min(numChildDocs, maxDocsPerGroup));
                //System.out.println("parent doc=" + og.doc + " numChildDocs=" + numChildDocs + " maxDocsPG=" + maxDocsPerGroup);

                // At this point we hold all docs w/ in each group, unsorted; we now sort them:
                ICollector collector;
                if (withinGroupSort == null)
                {
                    //System.out.println("sort by score");
                    // Sort by score
                    if (!trackScores)
                    {
                        throw new ArgumentException("cannot sort by relevance within group: trackScores=false");
                    }
                    collector = TopScoreDocCollector.Create(numDocsInGroup, true);
                }
                else
                {
                    // Sort by fields
                    collector = TopFieldCollector.Create(withinGroupSort, numDocsInGroup, fillSortFields, trackScores, trackMaxScore, true);
                }

                collector.SetScorer(fakeScorer);
                collector.SetNextReader(og.readerContext);
                for (int docIdx = 0; docIdx < numChildDocs; docIdx++)
                {
                    //System.out.println("docIDX=" + docIDX + " vs " + og.docs[slot].length);
                    int doc = og.docs[slot][docIdx];
                    fakeScorer.doc = doc;
                    if (trackScores)
                    {
                        fakeScorer._score = og.scores[slot][docIdx];
                    }
                    collector.Collect(doc);
                }
                totalGroupedHitCount += numChildDocs;

                object[] groupSortValues;

                if (fillSortFields)
                {
                    groupSortValues = new object[comparers.Length];
                    for (int sortFieldIdx = 0; sortFieldIdx < comparers.Length; sortFieldIdx++)
                    {
                        groupSortValues[sortFieldIdx] = comparers[sortFieldIdx][og.Slot];
                    }
                }
                else
                {
                    groupSortValues = null;
                }

                TopDocs topDocs;
                if (withinGroupSort == null)
                {
                    var tempCollector = (TopScoreDocCollector)collector;
                    topDocs = tempCollector.GetTopDocs(withinGroupOffset, numDocsInGroup);
                }
                else
                {
                    var tempCollector = (TopFieldCollector)collector;
                    topDocs = tempCollector.GetTopDocs(withinGroupOffset, numDocsInGroup);
                }

                groups[groupIdx - offset] = new GroupDocs <int>(og.Score, topDocs.MaxScore, numChildDocs, topDocs.ScoreDocs, og.Doc, groupSortValues);
            }

            return(new TopGroups <int>(new TopGroups <int>(sort.GetSort(), withinGroupSort == null ? null : withinGroupSort.GetSort(), 0, totalGroupedHitCount, groups, maxScore), totalHitCount));
        }
        public virtual void Collect(int parentDoc)
        {
            //System.out.println("\nC parentDoc=" + parentDoc);
            totalHitCount++;

            float score = float.NaN;

            if (trackMaxScore)
            {
                score    = scorer.GetScore();
                maxScore = Math.Max(maxScore, score);
            }

            // TODO: we could sweep all joinScorers here and
            // aggregate total child hit count, so we can fill this
            // in getTopGroups (we wire it to 0 now)

            if (queueFull)
            {
                //System.out.println("  queueFull");
                // Fastmatch: return if this hit is not competitive
                for (int i = 0; ; i++)
                {
                    int c = reverseMul[i] * comparers[i].CompareBottom(parentDoc);
                    if (c < 0)
                    {
                        // Definitely not competitive.
                        //System.out.println("    skip");
                        return;
                    }
                    if (c > 0)
                    {
                        // Definitely competitive.
                        break;
                    }
                    if (i == compEnd)
                    {
                        // Here c=0. If we're at the last comparer, this doc is not
                        // competitive, since docs are visited in doc Id order, which means
                        // this doc cannot compete with any other document in the queue.
                        //System.out.println("    skip");
                        return;
                    }
                }

                //System.out.println("    competes!  doc=" + (docBase + parentDoc));

                // This hit is competitive - replace bottom element in queue & adjustTop
                for (int i = 0; i < comparers.Length; i++)
                {
                    comparers[i].Copy(bottom.Slot, parentDoc);
                }
                if (!trackMaxScore && trackScores)
                {
                    score = scorer.GetScore();
                }
                bottom.Doc           = docBase + parentDoc;
                bottom.readerContext = currentReaderContext;
                bottom.Score         = score;
                CopyGroups(bottom);
                bottom = queue.UpdateTop();

                for (int i = 0; i < comparers.Length; i++)
                {
                    comparers[i].SetBottom(bottom.Slot);
                }
            }
            else
            {
                // Startup transient: queue is not yet full:
                int comparerSlot = totalHitCount - 1;

                // Copy hit into queue
                for (int i = 0; i < comparers.Length; i++)
                {
                    comparers[i].Copy(comparerSlot, parentDoc);
                }
                //System.out.println("  startup: new OG doc=" + (docBase+parentDoc));
                if (!trackMaxScore && trackScores)
                {
                    score = scorer.GetScore();
                }
                OneGroup og = new OneGroup(comparerSlot, docBase + parentDoc, score, joinScorers.Length, trackScores);
                og.readerContext = currentReaderContext;
                CopyGroups(og);
                bottom    = queue.Add(og);
                queueFull = totalHitCount == numParentHits;
                if (queueFull)
                {
                    // End of startup transient: queue just filled up:
                    for (int i = 0; i < comparers.Length; i++)
                    {
                        comparers[i].SetBottom(bottom.Slot);
                    }
                }
            }
        }
        /// <summary>
        /// Returns the grouped results.  Returns null if the
        /// number of groups collected is &lt;= groupOffset.
        ///
        /// <para>
        /// <b>NOTE</b>: This collector is unable to compute
        /// the groupValue per group so it will always be null.
        /// This is normally not a problem, as you can obtain the
        /// value just like you obtain other values for each
        /// matching document (eg, via stored fields, via
        /// FieldCache, etc.)
        /// </para>
        /// </summary>
        /// <typeparam name="TGroupValue">The expected return type for group value</typeparam>
        /// <param name="withinGroupSort">
        /// The <see cref="Sort"/> used to sort
        /// documents within each group.  Passing null is
        /// allowed, to sort by relevance.
        /// </param>
        /// <param name="groupOffset">Which group to start from</param>
        /// <param name="withinGroupOffset">
        /// Which document to start from within each group
        /// </param>
        /// <param name="maxDocsPerGroup">
        /// How many top documents to keep within each group.
        /// </param>
        /// <param name="fillSortFields">
        /// If true then the Comparable values for the sort fields will be set
        /// </param>
        public virtual ITopGroups <TGroupValue> GetTopGroups <TGroupValue>(Sort withinGroupSort, int groupOffset, int withinGroupOffset, int maxDocsPerGroup, bool fillSortFields)
        {
            //if (queueFull) {
            //System.out.println("getTopGroups groupOffset=" + groupOffset + " topNGroups=" + topNGroups);
            //}
            if (subDocUpto != 0)
            {
                ProcessGroup();
            }
            if (groupOffset >= groupQueue.Count)
            {
                return(null);
            }
            int totalGroupedHitCount = 0;

            FakeScorer fakeScorer = new FakeScorer();

            float maxScore = float.Epsilon; // LUCENENET: Epsilon in .NET is the same as MIN_VALUE in Java

            GroupDocs <TGroupValue>[] groups = new GroupDocs <TGroupValue> [groupQueue.Count - groupOffset];
            for (int downTo = groupQueue.Count - groupOffset - 1; downTo >= 0; downTo--)
            {
                OneGroup og = groupQueue.Pop();

                // At this point we hold all docs w/ in each group,
                // unsorted; we now sort them:
                ITopDocsCollector collector;
                if (withinGroupSort == null)
                {
                    // Sort by score
                    if (!needsScores)
                    {
                        throw new ArgumentException("cannot sort by relevance within group: needsScores=false");
                    }
                    collector = TopScoreDocCollector.Create(maxDocsPerGroup, true);
                }
                else
                {
                    // Sort by fields
                    collector = TopFieldCollector.Create(withinGroupSort, maxDocsPerGroup, fillSortFields, needsScores, needsScores, true);
                }

                collector.SetScorer(fakeScorer);
                collector.SetNextReader(og.readerContext);
                for (int docIDX = 0; docIDX < og.count; docIDX++)
                {
                    int doc = og.docs[docIDX];
                    fakeScorer.doc = doc;
                    if (needsScores)
                    {
                        fakeScorer.score = og.scores[docIDX];
                    }
                    collector.Collect(doc);
                }
                totalGroupedHitCount += og.count;

                object[] groupSortValues;

                if (fillSortFields)
                {
                    groupSortValues = new IComparable[comparers.Length];
                    for (int sortFieldIDX = 0; sortFieldIDX < comparers.Length; sortFieldIDX++)
                    {
                        groupSortValues[sortFieldIDX] = comparers[sortFieldIDX][og.comparerSlot];
                    }
                }
                else
                {
                    groupSortValues = null;
                }

                TopDocs topDocs = collector.GetTopDocs(withinGroupOffset, maxDocsPerGroup);

                // TODO: we could aggregate scores across children
                // by Sum/Avg instead of passing NaN:
                groups[downTo] = new GroupDocs <TGroupValue>(float.NaN,
                                                             topDocs.MaxScore,
                                                             og.count,
                                                             topDocs.ScoreDocs,
        // Called when we transition to another group; if the
        // group is competitive we insert into the group queue
        private void ProcessGroup()
        {
            totalGroupCount++;
            //System.out.println("    processGroup ord=" + lastGroupOrd + " competes=" + groupCompetes + " count=" + subDocUpto + " groupDoc=" + topGroupDoc);
            if (groupCompetes)
            {
                if (!queueFull)
                {
                    // Startup transient: always add a new OneGroup
                    OneGroup og = new OneGroup();
                    og.count       = subDocUpto;
                    og.topGroupDoc = docBase + topGroupDoc;
                    og.docs        = pendingSubDocs;
                    pendingSubDocs = new int[10];
                    if (needsScores)
                    {
                        og.scores        = pendingSubScores;
                        pendingSubScores = new float[10];
                    }
                    og.readerContext = currentReaderContext;
                    //og.groupOrd = lastGroupOrd;
                    og.comparerSlot = bottomSlot;
                    OneGroup bottomGroup = groupQueue.Add(og);
                    //System.out.println("      ADD group=" + getGroupString(lastGroupOrd) + " newBottom=" + getGroupString(bottomGroup.groupOrd));
                    queueFull = groupQueue.Count == topNGroups;
                    if (queueFull)
                    {
                        // Queue just became full; now set the real bottom
                        // in the comparers:
                        bottomSlot = bottomGroup.comparerSlot;
                        //System.out.println("    set bottom=" + bottomSlot);
                        for (int i = 0; i < comparers.Length; i++)
                        {
                            comparers[i].SetBottom(bottomSlot);
                        }
                        //System.out.println("     QUEUE FULL");
                    }
                    else
                    {
                        // Queue not full yet -- just advance bottomSlot:
                        bottomSlot = groupQueue.Count;
                    }
                }
                else
                {
                    // Replace bottom element in PQ and then updateTop
                    OneGroup og = groupQueue.Top;
                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(og != null);
                    }
                    og.count       = subDocUpto;
                    og.topGroupDoc = docBase + topGroupDoc;
                    // Swap pending docs
                    int[] savDocs = og.docs;
                    og.docs        = pendingSubDocs;
                    pendingSubDocs = savDocs;
                    if (needsScores)
                    {
                        // Swap pending scores
                        float[] savScores = og.scores;
                        og.scores        = pendingSubScores;
                        pendingSubScores = savScores;
                    }
                    og.readerContext = currentReaderContext;
                    //og.groupOrd = lastGroupOrd;
                    bottomSlot = groupQueue.UpdateTop().comparerSlot;

                    //System.out.println("    set bottom=" + bottomSlot);
                    for (int i = 0; i < comparers.Length; i++)
                    {
                        comparers[i].SetBottom(bottomSlot);
                    }
                }
            }
            subDocUpto = 0;
        }
        // Pulls out child doc and scores for all join queries:
        private void CopyGroups(OneGroup og)
        {
            // While rare, it's possible top arrays could be too
            // short if join query had null scorer on first
            // segment(s) but then became non-null on later segments
            int numSubScorers = joinScorers.Length;
            if (og.docs.Length < numSubScorers)
            {
                // While rare, this could happen if join query had
                // null scorer on first segment(s) but then became
                // non-null on later segments
                og.docs = ArrayUtil.Grow(og.docs);
            }
            if (og.counts.Length < numSubScorers)
            {
                og.counts = ArrayUtil.Grow(og.counts);
            }
            if (trackScores && og.scores.Length < numSubScorers)
            {
                og.scores = ArrayUtil.Grow(og.scores);
            }

            //System.out.println("\ncopyGroups parentDoc=" + og.doc);
            for (int scorerIDX = 0; scorerIDX < numSubScorers; scorerIDX++)
            {
                ToParentBlockJoinQuery.BlockJoinScorer joinScorer = joinScorers[scorerIDX];
                //System.out.println("  scorer=" + joinScorer);
                if (joinScorer != null && docBase + joinScorer.ParentDoc == og.Doc)
                {
                    og.counts[scorerIDX] = joinScorer.ChildCount;
                    //System.out.println("    count=" + og.counts[scorerIDX]);
                    og.docs[scorerIDX] = joinScorer.SwapChildDocs(og.docs[scorerIDX]);
                    Debug.Assert(og.docs[scorerIDX].Length >= og.counts[scorerIDX], "length=" + og.docs[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]);
                    //System.out.println("    len=" + og.docs[scorerIDX].length);
                    /*
                      for(int idx=0;idx<og.counts[scorerIDX];idx++) {
                      System.out.println("    docs[" + idx + "]=" + og.docs[scorerIDX][idx]);
                      }
                    */
                    if (trackScores)
                    {
                        //System.out.println("    copy scores");
                        og.scores[scorerIDX] = joinScorer.SwapChildScores(og.scores[scorerIDX]);
                        Debug.Assert(og.scores[scorerIDX].Length >= og.counts[scorerIDX], "length=" + og.scores[scorerIDX].Length + " vs count=" + og.counts[scorerIDX]);
                    }
                }
                else
                {
                    og.counts[scorerIDX] = 0;
                }
            }
        }
        public override void Collect(int parentDoc)
        {
            //System.out.println("\nC parentDoc=" + parentDoc);
            totalHitCount++;

            float score = float.NaN;

            if (trackMaxScore)
            {
                score = scorer.Score();
                maxScore = Math.Max(maxScore, score);
            }

            // TODO: we could sweep all joinScorers here and
            // aggregate total child hit count, so we can fill this
            // in getTopGroups (we wire it to 0 now)

            if (queueFull)
            {
                //System.out.println("  queueFull");
                // Fastmatch: return if this hit is not competitive
                for (int i = 0; ; i++)
                {
                    int c = reverseMul[i] * comparators[i].CompareBottom(parentDoc);
                    if (c < 0)
                    {
                        // Definitely not competitive.
                        //System.out.println("    skip");
                        return;
                    }
                    if (c > 0)
                    {
                        // Definitely competitive.
                        break;
                    }
                    if (i == compEnd)
                    {
                        // Here c=0. If we're at the last comparator, this doc is not
                        // competitive, since docs are visited in doc Id order, which means
                        // this doc cannot compete with any other document in the queue.
                        //System.out.println("    skip");
                        return;
                    }
                }

                //System.out.println("    competes!  doc=" + (docBase + parentDoc));

                // This hit is competitive - replace bottom element in queue & adjustTop
                for (int i = 0; i < comparators.Length; i++)
                {
                    comparators[i].Copy(bottom.Slot, parentDoc);
                }
                if (!trackMaxScore && trackScores)
                {
                    score = scorer.Score();
                }
                bottom.Doc = docBase + parentDoc;
                bottom.readerContext = currentReaderContext;
                bottom.Score = score;
                CopyGroups(bottom);
                bottom = queue.UpdateTop();

                for (int i = 0; i < comparators.Length; i++)
                {
                    comparators[i].Bottom = bottom.Slot;
                }
            }
            else
            {
                // Startup transient: queue is not yet full:
                int comparatorSlot = totalHitCount - 1;

                // Copy hit into queue
                for (int i = 0; i < comparators.Length; i++)
                {
                    comparators[i].Copy(comparatorSlot, parentDoc);
                }
                //System.out.println("  startup: new OG doc=" + (docBase+parentDoc));
                if (!trackMaxScore && trackScores)
                {
                    score = scorer.Score();
                }
                OneGroup og = new OneGroup(comparatorSlot, docBase + parentDoc, score, joinScorers.Length, trackScores);
                og.readerContext = currentReaderContext;
                CopyGroups(og);
                bottom = queue.Add(og);
                queueFull = totalHitCount == numParentHits;
                if (queueFull)
                {
                    // End of startup transient: queue just filled up:
                    for (int i = 0; i < comparators.Length; i++)
                    {
                        comparators[i].Bottom = bottom.Slot;
                    }
                }
            }
        }
Example #8
0
        private void loadEvents(MARS_Meet_dbEntities meetDb, OneMeet oneMeet)
        {
            var query = from ev in meetDb.mars_MEventEntries
                        join gr in meetDb.mars_MGroups on new { ev.MeetId, ev.GroupId } equals new { gr.MeetId, gr.GroupId }
            where ev.MeetId == oneMeet.MeetId && ev.HeatGroupNo == 0 && (oneMeet.CurrentDay == 0 || ev.DayNo == oneMeet.CurrentDay)
            orderby gr.SortKey, ev.SortKey, ev.RoundNo
                select new
            {
                ev.EventEntryId,
                ev.GroupId,
                ev.EventId,
                ev.RoundNo,
                ev.NoOfRounds,
                ev.DayNo,
                ev.Seconds,
                ev.Text,
                ev.RoundText,
                ev.State,
                GroupText    = gr.Text,
                GroupSortKey = gr.SortKey,
                EventSortKey = ev.SortKeyEventId
            };

            oneMeet.Groups = new List <OneGroup>();
            oneMeet.Events = new List <OneEvent>();

            OneGroup currentGroup = null;

            foreach (var e in query)
            {
                if (currentGroup == null || currentGroup.GroupId != e.GroupId)
                {
                    currentGroup = new OneGroup {
                        GroupId = e.GroupId, GroupText = e.GroupText, SortKey = e.GroupSortKey, Events = new List <OneEvent>()
                    };
                    oneMeet.Groups.Add(currentGroup);
                }

                OneEvent t = new OneEvent
                {
                    Id           = e.EventEntryId,
                    EventId      = e.EventId,
                    GroupId      = e.GroupId,
                    EventText    = e.Text,
                    GroupText    = e.GroupText,
                    RoundText    = e.RoundText,
                    RoundNo      = e.RoundNo,
                    NoOfRounds   = (int)e.NoOfRounds,
                    Day          = (int)e.DayNo,
                    Seconds      = (int)e.Seconds,
                    State        = e.State,
                    GroupSortKey = e.GroupSortKey,
                    EventSortKey = e.EventSortKey
                };

                currentGroup.Events.Add(t);

                if (oneMeet.CurrentEventId == t.Id)
                {
                    oneMeet.Events.Add(t);
                }
                else
                {
                    if (oneMeet.CurrentEventId == Guid.Empty)
                    {
                        if (oneMeet.CurrentGroupId == null || oneMeet.CurrentGroupId == currentGroup.GroupId)
                        {
                            oneMeet.Events.Add(t);
                        }
                    }
                }
            }

            oneMeet.Events.Sort(CompareTimeSchedule);
        }
Example #9
0
        /// <summary>
        /// Returns the grouped results.  Returns null if the
        /// number of groups collected is &lt;= groupOffset.
        ///
        /// <para>
        /// <b>NOTE</b>: This collector is unable to compute
        /// the groupValue per group so it will always be null.
        /// This is normally not a problem, as you can obtain the
        /// value just like you obtain other values for each
        /// matching document (eg, via stored fields, via
        /// FieldCache, etc.)
        /// </para>
        /// </summary>
        /// <typeparam name="TGroupValue">The expected return type for group value</typeparam>
        /// <<param name="withinGroupSort">
        /// The <see cref="Sort"/> used to sort
        /// documents within each group.  Passing null is
        /// allowed, to sort by relevance.
        /// </param>
        /// <param name="groupOffset">Which group to start from</param>
        /// <param name="withinGroupOffset">
        /// Which document to start from within each group
        /// </param>
        /// <param name="maxDocsPerGroup">
        /// How many top documents to keep within each group.
        /// </param>
        /// <param name="fillSortFields">
        /// If true then the Comparable values for the sort fields will be set
        /// </param>
        public virtual ITopGroups <TGroupValue> GetTopGroups <TGroupValue>(Sort withinGroupSort, int groupOffset, int withinGroupOffset, int maxDocsPerGroup, bool fillSortFields)
        {
            //if (queueFull) {
            //System.out.println("getTopGroups groupOffset=" + groupOffset + " topNGroups=" + topNGroups);
            //}
            if (subDocUpto != 0)
            {
                ProcessGroup();
            }
            if (groupOffset >= groupQueue.Size())
            {
                return(null);
            }
            int totalGroupedHitCount = 0;

            FakeScorer fakeScorer = new FakeScorer();

            float maxScore = float.MinValue;

            GroupDocs <TGroupValue>[] groups = new GroupDocs <TGroupValue> [groupQueue.Size() - groupOffset];
            for (int downTo = groupQueue.Size() - groupOffset - 1; downTo >= 0; downTo--)
            {
                OneGroup og = groupQueue.Pop();

                // At this point we hold all docs w/ in each group,
                // unsorted; we now sort them:
                ITopDocsCollector collector;
                if (withinGroupSort == null)
                {
                    // Sort by score
                    if (!needsScores)
                    {
                        throw new ArgumentException("cannot sort by relevance within group: needsScores=false");
                    }
                    collector = TopScoreDocCollector.Create(maxDocsPerGroup, true);
                }
                else
                {
                    // Sort by fields
                    collector = TopFieldCollector.Create(withinGroupSort, maxDocsPerGroup, fillSortFields, needsScores, needsScores, true);
                }

                collector.Scorer     = fakeScorer;
                collector.NextReader = og.readerContext;
                for (int docIDX = 0; docIDX < og.count; docIDX++)
                {
                    int doc = og.docs[docIDX];
                    fakeScorer.doc = doc;
                    if (needsScores)
                    {
                        fakeScorer.score = og.scores[docIDX];
                    }
                    collector.Collect(doc);
                }
                totalGroupedHitCount += og.count;

                object[] groupSortValues;

                if (fillSortFields)
                {
                    groupSortValues = new IComparable[comparators.Length];
                    for (int sortFieldIDX = 0; sortFieldIDX < comparators.Length; sortFieldIDX++)
                    {
                        groupSortValues[sortFieldIDX] = comparators[sortFieldIDX].Value(og.comparatorSlot);
                    }
                }
                else
                {
                    groupSortValues = null;
                }

                TopDocs topDocs = collector.TopDocs(withinGroupOffset, maxDocsPerGroup);

                // TODO: we could aggregate scores across children
                // by Sum/Avg instead of passing NaN:
                groups[downTo] = new GroupDocs <TGroupValue>(float.NaN,
                                                             topDocs.MaxScore,
                                                             og.count,
                                                             topDocs.ScoreDocs,
                                                             default(TGroupValue),
                                                             groupSortValues);
                maxScore = Math.Max(maxScore, topDocs.MaxScore);
            }

            /*
             * while (groupQueue.size() != 0) {
             * final OneGroup og = groupQueue.pop();
             * //System.out.println("  leftover: og ord=" + og.groupOrd + " count=" + og.count);
             * totalGroupedHitCount += og.count;
             * }
             */

            return(new TopGroups <TGroupValue>(new TopGroups <TGroupValue>(groupSort.GetSort(),
                                                                           withinGroupSort == null ? null : withinGroupSort.GetSort(),
                                                                           totalHitCount, totalGroupedHitCount, groups, maxScore),
                                               totalGroupCount));
        }