// TODO: ideally we'd test > Short.MAX_VALUE too, but compilation is currently recursive. // so if we want to test such huge expressions, we need to instead change parser to use an explicit Stack /// <exception cref="System.Exception"></exception> private void DoTestLotsOfBindings(int n) { SimpleBindings bindings = new SimpleBindings(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < n; i++) { if (i > 0) { sb.Append("+"); } sb.Append("x" + i); bindings.Add(new SortField("x" + i, SortFieldType.SCORE)); } var expr = JavascriptCompiler.Compile(sb.ToString()); var sort = new Sort(expr.GetSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.Search(query, null, 3, sort, true, true); for (int i_1 = 0; i_1 < 3; i_1++) { FieldDoc d = (FieldDoc)td.ScoreDocs[i_1]; float expected = n * d.Score; float actual = (float)((double)d.Fields[0]); AreEqual(expected, actual, CheckHits.ExplainToleranceDelta(expected, actual)); } }
private void ParseMember(XElement element) { string name = element.AttributeOrDefault("name")?.Trim(); if (string.IsNullOrEmpty(name) || name.Length < 2 || name[1] != ':') { return; } char kind = name[0]; NameInfo nameInfo = new NameParser().Parse(name.Substring(2), kind == 'M'); switch (kind) { case 'T': TypeDoc typeDoc = new TypeDoc(nameInfo, ParseMeta(element), typeParameters: ParseTypeParameters(element)); _types.Add(typeDoc.Name, typeDoc); break; case 'M': MethodDoc methodDoc = new MethodDoc(nameInfo, ParseMeta(element), ParseTypeParameters(element), ParseParameters(element), ParseExceptions(element)); _methods.Add(methodDoc.Name, methodDoc); break; case 'F': FieldDoc fieldDoc = new FieldDoc(nameInfo, ParseMeta(element)); _fields.Add(fieldDoc.Name, fieldDoc); break; case 'P': PropertyDoc propertyDoc = new PropertyDoc(nameInfo, ParseMeta(element), ParseExceptions(element)); _properties.Add(propertyDoc.Name, propertyDoc); break; case 'E': EventDoc eventDoc = new EventDoc(nameInfo, ParseMeta(element), ParseExceptions(element)); _events.Add(eventDoc.Name, eventDoc); break; } }
public virtual void TestDistanceSort() { var distance = JavascriptCompiler.Compile("haversin(40.7143528,-74.0059731,latitude,longitude)"); SimpleBindings bindings = new SimpleBindings(); bindings.Add(new SortField("latitude", SortFieldType.DOUBLE)); bindings.Add(new SortField("longitude", SortFieldType.DOUBLE)); Sort sort = new Sort(distance.GetSortField(bindings, false)); TopFieldDocs td = searcher.Search(new MatchAllDocsQuery(), null, 3, sort); FieldDoc d = (FieldDoc)td.ScoreDocs[0]; AreEqual(0.4619D, (double)d.Fields[0], 1E-4); d = (FieldDoc)td.ScoreDocs[1]; AreEqual(1.0546D, (double)d.Fields[0], 1E-4); d = (FieldDoc)td.ScoreDocs[2]; AreEqual(5.2842D, (double)d.Fields[0], 1E-4); }
public virtual void TestSortValues() { var expr = JavascriptCompiler.Compile("sqrt(_score)"); SimpleBindings bindings = new SimpleBindings(); bindings.Add(new SortField("_score", SortFieldType.SCORE)); Sort sort = new Sort(expr.GetSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.Search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc)td.ScoreDocs[i]; float expected = (float)Math.Sqrt(d.Score); float actual = (float)((double)d.Fields[0]); AreEqual(expected, actual, CheckHits.ExplainToleranceDelta(expected, actual)); } }
public virtual void TestTwoOfSameBinding() { var expr = JavascriptCompiler.Compile("_score + _score"); SimpleBindings bindings = new SimpleBindings(); bindings.Add(new SortField("_score", SortFieldType.SCORE)); Sort sort = new Sort(expr.GetSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.Search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc)td.ScoreDocs[i]; float expected = 2 * d.Score; float actual = ((J2N.Numerics.Double)d.Fields[0]).ToSingle(); Assert.AreEqual(expected, actual, CheckHits.ExplainToleranceDelta (expected, actual)); } }
public virtual void TestExpressionRefersToExpression() { var expr1 = JavascriptCompiler.Compile("_score"); var expr2 = JavascriptCompiler.Compile("2*expr1"); var bindings = new SimpleBindings(); bindings.Add(new SortField("_score", SortFieldType.SCORE)); bindings.Add("expr1", expr1); Sort sort = new Sort(expr2.GetSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); TopFieldDocs td = searcher.Search(query, null, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc)td.ScoreDocs[i]; float expected = 2 * d.Score; float actual = (float)((double)d.Fields[0]); Assert.AreEqual(expected, actual, CheckHits.ExplainToleranceDelta (expected, actual)); } }
public void TestSearchAfterWhenSortingByFunctionValues() { Directory dir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); iwc.SetMergePolicy(NewLogMergePolicy()); // depends on docid order RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc); Document doc = new Document(); Field field = new StringField("value", "", Field.Store.YES); doc.Add(field); // Save docs unsorted (decreasing value n, n-1, ...) const int NUM_VALS = 5; for (int val = NUM_VALS; val > 0; val--) { field.SetStringValue(Convert.ToString(val)); writer.AddDocument(doc); } // Open index IndexReader reader = writer.GetReader(); writer.Dispose(); IndexSearcher searcher = NewSearcher(reader); // Get ValueSource from FieldCache Int32FieldSource src = new Int32FieldSource("value"); // ...and make it a sort criterion SortField sf = src.GetSortField(false).Rewrite(searcher); Sort orderBy = new Sort(sf); // Get hits sorted by our FunctionValues (ascending values) Query q = new MatchAllDocsQuery(); TopDocs hits = searcher.Search(q, reader.MaxDoc, orderBy); assertEquals(NUM_VALS, hits.ScoreDocs.Length); // Verify that sorting works in general int i = 0; foreach (ScoreDoc hit in hits.ScoreDocs) { int valueFromDoc = Convert.ToInt32(reader.Document(hit.Doc).Get("value")); assertEquals(++i, valueFromDoc); } // Now get hits after hit #2 using IS.searchAfter() int afterIdx = 1; FieldDoc afterHit = (FieldDoc)hits.ScoreDocs[afterIdx]; hits = searcher.SearchAfter(afterHit, q, reader.MaxDoc, orderBy); // Expected # of hits: NUM_VALS - 2 assertEquals(NUM_VALS - (afterIdx + 1), hits.ScoreDocs.Length); // Verify that hits are actually "after" int afterValue = (int)((double?)afterHit.Fields[0]); foreach (ScoreDoc hit in hits.ScoreDocs) { int val = Convert.ToInt32(reader.Document(hit.Doc).Get("value")); assertTrue(afterValue <= val); assertFalse(hit.Doc == afterHit.Doc); } reader.Dispose(); dir.Dispose(); }
/// <summary> /// Create the results based on the search hits. /// Can be overridden by subclass to add particular behavior (e.g. weight transformation) </summary> /// <exception cref="System.IO.IOException"> If there are problems reading fields from the underlying Lucene index. </exception> protected internal virtual IList <LookupResult> CreateResults(IndexSearcher searcher, TopFieldDocs hits, int num, string charSequence, bool doHighlight, IEnumerable <string> matchedTokens, string prefixToken) { BinaryDocValues textDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, TEXT_FIELD_NAME); // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... BinaryDocValues payloadsDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, "payloads"); IList <AtomicReaderContext> leaves = searcher.IndexReader.Leaves; List <LookupResult> results = new List <LookupResult>(); BytesRef scratch = new BytesRef(); for (int i = 0; i < hits.ScoreDocs.Length; i++) { FieldDoc fd = (FieldDoc)hits.ScoreDocs[i]; textDV.Get(fd.Doc, scratch); string text = scratch.Utf8ToString(); long score = (long)fd.Fields[0]; BytesRef payload; if (payloadsDV != null) { payload = new BytesRef(); payloadsDV.Get(fd.Doc, payload); } else { payload = null; } // Must look up sorted-set by segment: int segment = ReaderUtil.SubIndex(fd.Doc, leaves); SortedSetDocValues contextsDV = leaves[segment].AtomicReader.GetSortedSetDocValues(CONTEXTS_FIELD_NAME); HashSet <BytesRef> contexts; if (contextsDV != null) { contexts = new HashSet <BytesRef>(); contextsDV.SetDocument(fd.Doc - leaves[segment].DocBase); long ord; while ((ord = contextsDV.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { BytesRef context = new BytesRef(); contextsDV.LookupOrd(ord, context); contexts.Add(context); } } else { contexts = null; } LookupResult result; if (doHighlight) { object highlightKey = Highlight(text, matchedTokens, prefixToken); result = new LookupResult(highlightKey.ToString(), highlightKey, score, payload, contexts); } else { result = new LookupResult(text, score, payload, contexts); } results.Add(result); } return(results); }
/// <summary> /// Search, sorting by <see cref="Sort"/>, and computing /// drill down and sideways counts. /// </summary> public virtual DrillSidewaysResult Search(DrillDownQuery query, Filter filter, FieldDoc after, int topN, Sort sort, bool doDocScores, bool doMaxScore) { if (filter != null) { query = new DrillDownQuery(m_config, filter, query); } if (sort != null) { int limit = m_searcher.IndexReader.MaxDoc; if (limit == 0) { limit = 1; // the collector does not alow numHits = 0 } topN = Math.Min(topN, limit); TopFieldCollector hitCollector = TopFieldCollector.Create(sort, topN, after, true, doDocScores, doMaxScore, true); DrillSidewaysResult r = Search(query, hitCollector); return(new DrillSidewaysResult(r.Facets, hitCollector.GetTopDocs())); } else { return(Search(after, query, topN)); } }
protected internal override IList <Lookup.LookupResult> CreateResults(IndexSearcher searcher, TopFieldDocs hits, int num, string key, bool doHighlight, ICollection <string> matchedTokens, string prefixToken) { BinaryDocValues textDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, TEXT_FIELD_NAME); Debug.Assert(textDV != null); // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... BinaryDocValues payloadsDV = MultiDocValues.GetBinaryValues(searcher.IndexReader, "payloads"); JCG.SortedSet <Lookup.LookupResult> results = new JCG.SortedSet <Lookup.LookupResult>(LOOKUP_COMP); // we reduce the num to the one initially requested int actualNum = num / numFactor; BytesRef scratch = new BytesRef(); for (int i = 0; i < hits.ScoreDocs.Length; i++) { FieldDoc fd = (FieldDoc)hits.ScoreDocs[i]; textDV.Get(fd.Doc, scratch); string text = scratch.Utf8ToString(); long weight = (long)fd.Fields[0]; BytesRef payload; if (payloadsDV != null) { payload = new BytesRef(); payloadsDV.Get(fd.Doc, payload); } else { payload = null; } double coefficient; if (text.StartsWith(key.ToString(), StringComparison.Ordinal)) { // if hit starts with the key, we don't change the score coefficient = 1; } else { coefficient = CreateCoefficient(searcher, fd.Doc, matchedTokens, prefixToken); } long score = (long)(weight * coefficient); LookupResult result; if (doHighlight) { object highlightKey = Highlight(text, matchedTokens, prefixToken); result = new LookupResult(highlightKey.ToString(), highlightKey, score, payload); } else { result = new LookupResult(text, score, payload); } BoundedTreeAdd(results, result, actualNum); } return(new List <LookupResult>(results.Reverse())); }