예제 #1
0
        public override Explanation Explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID)
        {
            TopDocs oneHit = new TopDocs(1, new ScoreDoc[] { new ScoreDoc(docID, firstPassExplanation.Value) });
            TopDocs hits   = Rescore(searcher, oneHit, 1);

            Debug.Assert(hits.TotalHits == 1);

            // TODO: if we could ask the Sort to explain itself then
            // we wouldn't need the separate ExpressionRescorer...
            Explanation result = new Explanation(0.0f, "sort field values for sort=" + sort.ToString());

            // Add first pass:
            Explanation first = new Explanation(firstPassExplanation.Value, "first pass score");

            first.AddDetail(firstPassExplanation);
            result.AddDetail(first);

            FieldDoc fieldDoc = (FieldDoc)hits.ScoreDocs[0];

            // Add sort values:
            SortField[] sortFields = sort.GetSort();
            for (int i = 0; i < sortFields.Length; i++)
            {
                result.AddDetail(new Explanation(0.0f, "sort field " + sortFields[i].ToString() + " value=" + fieldDoc.Fields[i]));
            }

            return(result);
        }
예제 #2
0
 private Sort GetOldSort(Sort sort)
 {
     SortField[] fields    = sort.GetSort();
     SortField[] oldFields = new SortField[fields.Length];
     for (int i = 0; i < fields.Length; i++)
     {
         int sortType;
         if (fields[i].GetField() != null && fields[i].GetField().Equals("string"))
         {
             sortType = SortField.STRING;
         }
         else
         {
             sortType = fields[i].GetType();
         }
         oldFields[i] = new SortField(fields[i].GetField(), sortType, fields[i].GetReverse());
         oldFields[i].SetUseLegacySearch(true);
     }
     return(new Sort(oldFields));
 }
예제 #3
0
            public MergeSortQueue(Sort sort, TopDocs[] shardHits)
                : base(shardHits.Length)
            {
                this.shardHits = new ScoreDoc[shardHits.Length][];
                for (int shardIDX = 0; shardIDX < shardHits.Length; shardIDX++)
                {
                    ScoreDoc[] shard = shardHits[shardIDX].ScoreDocs;
                    //System.out.println("  init shardIdx=" + shardIDX + " hits=" + shard);
                    if (shard != null)
                    {
                        this.shardHits[shardIDX] = shard;
                        // Fail gracefully if API is misused:
                        for (int hitIDX = 0; hitIDX < shard.Length; hitIDX++)
                        {
                            ScoreDoc sd = shard[hitIDX];
                            if (!(sd is FieldDoc))
                            {
                                throw new System.ArgumentException("shard " + shardIDX + " was not sorted by the provided Sort (expected FieldDoc but got ScoreDoc)");
                            }
                            FieldDoc fd = (FieldDoc)sd;
                            if (fd.Fields == null)
                            {
                                throw new System.ArgumentException("shard " + shardIDX + " did not set sort field values (FieldDoc.fields is null); you must pass fillFields=true to IndexSearcher.search on each shard");
                            }
                        }
                    }
                }

                SortField[] sortFields = sort.GetSort();
                comparers  = new FieldComparer[sortFields.Length];
                reverseMul = new int[sortFields.Length];
                for (int compIDX = 0; compIDX < sortFields.Length; compIDX++)
                {
                    SortField sortField = sortFields[compIDX];
                    comparers[compIDX]  = sortField.GetComparer(1, compIDX);
                    reverseMul[compIDX] = sortField.IsReverse ? -1 : 1;
                }
            }
예제 #4
0
		private Sort GetOldSort(Sort sort)
		{
			SortField[] fields = sort.GetSort();
			SortField[] oldFields = new SortField[fields.Length];
			for (int i = 0; i < fields.Length; i++)
			{
				int sortType;
				if (fields[i].GetField() != null && fields[i].GetField().Equals("string"))
				{
					sortType = SortField.STRING;
				}
				else
				{
					sortType = fields[i].GetType();
				}
				oldFields[i] = new SortField(fields[i].GetField(), sortType, fields[i].GetReverse());
				oldFields[i].SetUseLegacySearch(true);
			}
			return new Sort(oldFields);
		}
예제 #5
0
        public virtual void  TestSort()
        {
            this.r = NewRandom();

            // reverse & not
            // all types
            // restrictive & non restrictive searches (on contents)

            Create();

            Sort[] sorts     = new Sort[50];
            int    sortCount = 0;

            for (int r = 0; r < 2; r++)
            {
                Sort sort;
                bool reverse = 1 == r;

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("byte", SortField.BYTE, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("short", SortField.SHORT, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("int", SortField.INT, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("long", SortField.LONG, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("float", SortField.FLOAT, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("double", SortField.DOUBLE, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("string", SortField.STRING_VAL, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField("stringIdx", SortField.STRING, reverse) });

                //sorts[sortCount++] = sort = new Sort();
                //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD, reverse)});

                //sorts[sortCount++] = sort = new Sort();
                //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL, reverse)});

                //sorts[sortCount++] = sort = new Sort();
                //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM, reverse)});

                //sorts[sortCount++] = sort = new Sort();
                //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM2, reverse)});

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField(null, SortField.SCORE, reverse) });

                sorts[sortCount++] = sort = new Sort();
                sort.SetSort(new SortField[] { new SortField(null, SortField.DOC, reverse) });
            }

            Query[] queries = new Query[4];
            queries[0] = new MatchAllDocsQuery();
            queries[1] = new TermQuery(new Term("contents", "x"));             // matches every 10th doc
            queries[2] = new TermQuery(new Term("contents", "y"));             // matches every 100th doc
            queries[3] = new TermQuery(new Term("contents", "z"));             // matches every 1000th doc

            for (int sx = 0; sx < 3; sx++)
            {
                IndexSearcher searcher;
                if (sx == 0)
                {
                    searcher = searcherSingleSegment;
                }
                else if (sx == 1)
                {
                    searcher = searcherFewSegment;
                }
                else
                {
                    searcher = searcherMultiSegment;
                }

                for (int qx = 0; qx < queries.Length; qx++)
                {
                    Query query = queries[qx];

                    for (int q = 0; q < 3; q++)
                    {
                        int queueSize;
                        if (q == 0)
                        {
                            queueSize = 10;
                        }
                        else if (q == 1)
                        {
                            queueSize = 100;
                        }
                        else
                        {
                            queueSize = 1000;
                        }

                        for (int s = 0; s < sortCount; s++)
                        {
                            Sort sort1 = sorts[s];

                            for (int s2 = -1; s2 < sortCount; s2++)
                            {
                                Sort sort;
                                if (s2 == -1)
                                {
                                    // Single field sort
                                    sort = sort1;
                                }
                                else
                                {
                                    sort = new Sort(new SortField[] { sort1.GetSort()[0], sorts[s2].GetSort()[0] });
                                }

                                // Old
                                Sort oldSort = GetOldSort(sort);

                                if (VERBOSE)
                                {
                                    System.Console.Out.WriteLine("query=" + query);
                                    if (sx == 0)
                                    {
                                        System.Console.Out.WriteLine("  single-segment index");
                                    }
                                    else if (sx == 1)
                                    {
                                        System.Console.Out.WriteLine("  few-segment index");
                                    }
                                    else
                                    {
                                        System.Console.Out.WriteLine("  many-segment index");
                                    }
                                    System.Console.Out.WriteLine("  numHit=" + queueSize);
                                    System.Console.Out.WriteLine("  old=" + oldSort);
                                    System.Console.Out.WriteLine("  new=" + sort);
                                }

                                TopDocs newHits = searcher.Search(query, null, queueSize, sort);
                                TopDocs oldHits = searcher.Search(query, null, queueSize, oldSort);

                                Compare(oldHits, newHits);
                            }
                        }
                    }
                }
            }

            // we explicitly test the old sort method and
            // compare with the new, so we expect to see SUBREADER
            // sanity checks fail.
            Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(Lucene.Net.Search.FieldCache_Fields.DEFAULT);
            try
            {
                int ignored = 0;
                for (int i = 0; i < insanity.Length; i++)
                {
                    if (insanity[i].GetType() == InsanityType.SUBREADER)
                    {
                        insanity[i] = new Insanity(InsanityType.EXPECTED, insanity[i].GetMsg(), insanity[i].GetCacheEntries());
                        ignored++;
                    }
                }
                Assert.AreEqual(ignored, insanity.Length, "Not all insane field cache usage was expected");

                insanity = null;
            }
            finally
            {
                // report this in the event of any exception/failure
                // if no failure, then insanity will be null
                if (null != insanity)
                {
                    System.IO.StreamWriter temp_writer;
                    temp_writer           = new System.IO.StreamWriter(System.Console.OpenStandardError(), System.Console.Error.Encoding);
                    temp_writer.AutoFlush = true;
                    DumpArray(GetTestLabel() + ": Insane FieldCache usage(s)", insanity, temp_writer);
                }
            }
            // we've already checked FieldCache, purge so tearDown doesn't complain
            PurgeFieldCache(Lucene.Net.Search.FieldCache_Fields.DEFAULT);             // so

            Close();
        }
예제 #6
0
        /// <summary>
        /// Same as <seealso cref="#merge(Sort, int, TopDocs[])"/> but also slices the result at the same time based
        /// on the provided start and size. The return TopDocs will always have a scoreDocs with length of at most size.
        /// </summary>
        public static TopDocs Merge(Sort sort, int start, int size, TopDocs[] shardHits)
        {
            Util.PriorityQueue <ShardRef> queue;
            if (sort == null)
            {
                queue = new ScoreMergeSortQueue(shardHits);
            }
            else
            {
                queue = new MergeSortQueue(sort, shardHits);
            }

            int   totalHitCount = 0;
            int   availHitCount = 0;
            float maxScore      = float.MinValue;

            for (int shardIDX = 0; shardIDX < shardHits.Length; shardIDX++)
            {
                TopDocs shard = shardHits[shardIDX];
                // totalHits can be non-zero even if no hits were
                // collected, when searchAfter was used:
                totalHitCount += shard.TotalHits;
                if (shard.ScoreDocs != null && shard.ScoreDocs.Length > 0)
                {
                    availHitCount += shard.ScoreDocs.Length;
                    queue.Add(new ShardRef(shardIDX));
                    maxScore = Math.Max(maxScore, shard.MaxScore);
                    //System.out.println("  maxScore now " + maxScore + " vs " + shard.getMaxScore());
                }
            }

            if (availHitCount == 0)
            {
                maxScore = float.NaN;
            }

            ScoreDoc[] hits;
            if (availHitCount <= start)
            {
                hits = new ScoreDoc[0];
            }
            else
            {
                hits = new ScoreDoc[Math.Min(size, availHitCount - start)];
                int requestedResultWindow = start + size;
                int numIterOnHits         = Math.Min(availHitCount, requestedResultWindow);
                int hitUpto = 0;
                while (hitUpto < numIterOnHits)
                {
                    Debug.Assert(queue.Count > 0);
                    ShardRef @ref = queue.Pop();
                    ScoreDoc hit  = shardHits[@ref.ShardIndex].ScoreDocs[@ref.HitIndex++];
                    hit.ShardIndex = @ref.ShardIndex;
                    if (hitUpto >= start)
                    {
                        hits[hitUpto - start] = hit;
                    }

                    //System.out.println("  hitUpto=" + hitUpto);
                    //System.out.println("    doc=" + hits[hitUpto].doc + " score=" + hits[hitUpto].score);

                    hitUpto++;

                    if (@ref.HitIndex < shardHits[@ref.ShardIndex].ScoreDocs.Length)
                    {
                        // Not done with this these TopDocs yet:
                        queue.Add(@ref);
                    }
                }
            }

            if (sort == null)
            {
                return(new TopDocs(totalHitCount, hits, maxScore));
            }
            else
            {
                return(new TopFieldDocs(totalHitCount, hits, sort.GetSort(), maxScore));
            }
        }
        /// <summary>
        ///  Accumulates groups for the BlockJoinQuery specified by its slot.
        /// </summary>
        /// <param name="slot"> Search query's slot </param>
        /// <param name="offset"> Parent docs offset </param>
        /// <param name="maxDocsPerGroup"> Upper bound of documents per group number </param>
        /// <param name="withinGroupOffset"> Offset within each group of child docs </param>
        /// <param name="withinGroupSort"> Sort criteria within groups </param>
        /// <param name="fillSortFields"> Specifies whether to add sort fields or not </param>
        /// <returns> TopGroups for the query specified by slot </returns>
        /// <exception cref="IOException"> if there is a low-level I/O error </exception>
        private TopGroups<int> AccumulateGroups(int slot, int offset, int maxDocsPerGroup, int withinGroupOffset, Sort withinGroupSort, bool fillSortFields)
        {
            var groups = new GroupDocs<int>[sortedGroups.Length - offset];
            var fakeScorer = new FakeScorer();

            int totalGroupedHitCount = 0;
            //System.out.println("slot=" + slot);

            for (int groupIdx = offset; groupIdx < sortedGroups.Length; groupIdx++)
            {
                OneGroup og = sortedGroups[groupIdx];
                int numChildDocs;
                if (slot == -1 || slot >= og.counts.Length)
                {
                    numChildDocs = 0;
                }
                else
                {
                    numChildDocs = og.counts[slot];
                }

                // Number of documents in group should be bounded to prevent redundant memory allocation
                int numDocsInGroup = Math.Max(1, Math.Min(numChildDocs, maxDocsPerGroup));
                //System.out.println("parent doc=" + og.doc + " numChildDocs=" + numChildDocs + " maxDocsPG=" + maxDocsPerGroup);

                // At this point we hold all docs w/ in each group, unsorted; we now sort them:
                Collector collector;
                if (withinGroupSort == null)
                {
                    //System.out.println("sort by score");
                    // Sort by score
                    if (!trackScores)
                    {
                        throw new ArgumentException("cannot sort by relevance within group: trackScores=false");
                    }
                    collector = TopScoreDocCollector.Create(numDocsInGroup, true);
                }
                else
                {
                    // Sort by fields
                    collector = TopFieldCollector.Create(withinGroupSort, numDocsInGroup, fillSortFields, trackScores, trackMaxScore, true);
                }

                collector.Scorer = fakeScorer;
                collector.NextReader = og.readerContext;
                for (int docIdx = 0; docIdx < numChildDocs; docIdx++)
                {
                    //System.out.println("docIDX=" + docIDX + " vs " + og.docs[slot].length);
                    int doc = og.docs[slot][docIdx];
                    fakeScorer.doc = doc;
                    if (trackScores)
                    {
                        fakeScorer._score = og.scores[slot][docIdx];
                    }
                    collector.Collect(doc);
                }
                totalGroupedHitCount += numChildDocs;

                object[] groupSortValues;

                if (fillSortFields)
                {
                    groupSortValues = new object[comparators.Length];
                    for (int sortFieldIdx = 0; sortFieldIdx < comparators.Length; sortFieldIdx++)
                    {
                        groupSortValues[sortFieldIdx] = comparators[sortFieldIdx].Value(og.Slot);
                    }
                }
                else
                {
                    groupSortValues = null;
                }

                TopDocs topDocs;
                if (withinGroupSort == null)
                {
                    var tempCollector = (TopScoreDocCollector) collector;
                    topDocs = tempCollector.TopDocs(withinGroupOffset, numDocsInGroup);
                }
                else
                {
                    var tempCollector = (TopFieldCollector) collector;
                    topDocs = tempCollector.TopDocs(withinGroupOffset, numDocsInGroup);
                }

                groups[groupIdx - offset] = new GroupDocs<int>(og.Score, topDocs.MaxScore, numChildDocs, topDocs.ScoreDocs, og.Doc, groupSortValues);
            }

            return new TopGroups<int>(new TopGroups<int>(sort.GetSort(), withinGroupSort == null ? null : withinGroupSort.GetSort(), 0, totalGroupedHitCount, groups, maxScore), totalHitCount);
        }
 /// <summary>
 ///  Creates a ToParentBlockJoinCollector.  The provided sort must
 ///  not be null.  If you pass true trackScores, all
 ///  ToParentBlockQuery instances must not use
 ///  ScoreMode.None. 
 /// </summary>
 public ToParentBlockJoinCollector(Sort sort, int numParentHits, bool trackScores, bool trackMaxScore)
 {
     // TODO: allow null sort to be specialized to relevance
     // only collector
     this.sort = sort;
     this.trackMaxScore = trackMaxScore;
     if (trackMaxScore)
     {
         maxScore = float.MinValue;
     }
     //System.out.println("numParentHits=" + numParentHits);
     this.trackScores = trackScores;
     this.numParentHits = numParentHits;
     queue = FieldValueHitQueue.Create<OneGroup>(sort.GetSort(), numParentHits);
     comparators = queue.Comparators;
     reverseMul = queue.ReverseMul;
     compEnd = comparators.Length - 1;
 }
 internal virtual void AssertQuery(Query query, Filter filter, Sort sort)
 {
     int size = TestUtil.NextInt(Random(), 1, searcher.IndexReader.MaxDoc / 5);
     TopDocs expected = searcher.Search(query, filter, size, sort, Random().NextBoolean
         (), Random().NextBoolean());
     // make our actual sort, mutating original by replacing some of the 
     // sortfields with equivalent expressions
     SortField[] original = sort.GetSort();
     SortField[] mutated = new SortField[original.Length];
     for (int i = 0; i < mutated.Length; i++)
     {
         if (Random().Next(3) > 0)
         {
             SortField s = original[i];
             Expression expr = JavascriptCompiler.Compile(s.Field);
             SimpleBindings simpleBindings = new SimpleBindings();
             simpleBindings.Add(s);
             bool reverse = s.Type == SortField.Type_e.SCORE || s.Reverse;
             mutated[i] = expr.GetSortField(simpleBindings, reverse);
         }
         else
         {
             mutated[i] = original[i];
         }
     }
     Sort mutatedSort = new Sort(mutated);
     TopDocs actual = searcher.Search(query, filter, size, mutatedSort, Random().NextBoolean
         (), Random().NextBoolean());
     CheckHits.CheckEqual(query, expected.ScoreDocs, actual.ScoreDocs);
     if (size < actual.TotalHits)
     {
         expected = searcher.SearchAfter(expected.ScoreDocs[size - 1], query, filter, size
             , sort);
         actual = searcher.SearchAfter(actual.ScoreDocs[size - 1], query, filter, size, mutatedSort
             );
         CheckHits.CheckEqual(query, expected.ScoreDocs, actual.ScoreDocs);
     }
 }