public Search ( |
||
query | ||
filter | ||
n | int | |
Результат | Lucene.Net.Search.TopDocs |
public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase) { Hits hits = searcher.Search(query); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { correct.Add((System.Int32) results[i], null); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); for (int i = 0; i < hits.Length(); i++) { actual.Add((System.Int32) hits.Id(i), null); } //Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); if (correct.Count != 0) { System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator(); bool status = false; while (iter.MoveNext()) { status = actual.ContainsKey(iter.Key); if (status == false) break; } Assert.IsTrue(status, query.ToString(defaultFieldName)); } }
public static TopDocs Search(this Searcher searcher, string type, Query query, Query filter, int numResults, Sort sort) { try { return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type, filter), numResults, sort)); } catch (Exception ex) { Log.Logger.Error($"Error while searching {type}, {query}"); throw; } }
public static TopDocs Search(this Searcher searcher, string type, Query query, Query filter, int numResults, Sort sort) { try { return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type, filter), numResults, sort)); } catch (Exception ex) { Log.Logger.ErrorFormat("Error while searching {0}, {1}", type, query.ToString()); throw ex; } }
// make sure the documents returned by the search match the expected list private void MatchHits(Searcher searcher, Sort sort) { // make a query without sorting first ScoreDoc[] hitsByRank = searcher.Search(query, null, 1000).scoreDocs; CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates System.Collections.IDictionary resultMap = new System.Collections.SortedList(); // store hits in TreeMap - TreeMap does not allow duplicates; existing entries are silently overwritten for (int hitid = 0; hitid < hitsByRank.Length; ++hitid) { resultMap[(System.Int32)hitsByRank[hitid].doc] = (System.Int32)hitid; // Value: Hits-Objekt Index } // now make a query using the sort criteria ScoreDoc[] resultSort = searcher.Search(query, null, 1000, sort).scoreDocs; CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates // besides the sorting both sets of hits must be identical for (int hitid = 0; hitid < resultSort.Length; ++hitid) { System.Int32 idHitDate = (System.Int32)resultSort[hitid].doc; // document ID from sorted search if (!resultMap.Contains(idHitDate)) { Log("ID " + idHitDate + " not found. Possibliy a duplicate."); } Assert.IsTrue(resultMap.Contains(idHitDate)); // same ID must be in the Map from the rank-sorted search // every hit must appear once in both result sets --> remove it from the Map. // At the end the Map must be empty! resultMap.Remove(idHitDate); } if (resultMap.Count == 0) { // log("All hits matched"); } else { Log("Couldn't match " + resultMap.Count + " hits."); } Assert.AreEqual(resultMap.Count, 0); }
// make sure the documents returned by the search match the expected list private void AssertMatches(Searcher searcher, Query query, Sort sort, System.String expectedResult) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } Assert.AreEqual(expectedResult, buff.ToString()); }
// make sure the documents returned by the search match the expected list pattern private void AssertMatchesPattern(Searcher searcher, Query query, Sort sort, System.String pattern) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } // System.out.println ("matching \""+buff+"\" against pattern \""+pattern+"\""); System.Text.RegularExpressions.Regex regex = new System.Text.RegularExpressions.Regex(pattern); Assert.IsTrue(regex.IsMatch(buff.ToString())); }
public static TopDocs Search(this Searcher searcher, string type, Query query, int numResults) { return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), numResults)); }
// make sure the documents returned by the search match the expected list private void MatchHits(Searcher searcher, Sort sort) { // make a query without sorting first ScoreDoc[] hitsByRank = searcher.Search(query, null, 1000).ScoreDocs; CheckHits(hitsByRank, "Sort by rank: "); // check for duplicates System.Collections.IDictionary resultMap = new System.Collections.SortedList(); // store hits in TreeMap - TreeMap does not allow duplicates; existing entries are silently overwritten for (int hitid = 0; hitid < hitsByRank.Length; ++hitid) { resultMap[hitsByRank[hitid].Doc] = hitid; // Value: Hits-Objekt Index } // now make a query using the sort criteria ScoreDoc[] resultSort = searcher.Search(query, null, 1000, sort).ScoreDocs; CheckHits(resultSort, "Sort by custom criteria: "); // check for duplicates // besides the sorting both sets of hits must be identical for (int hitid = 0; hitid < resultSort.Length; ++hitid) { System.Int32 idHitDate = (System.Int32) resultSort[hitid].Doc; // document ID from sorted search if (!resultMap.Contains(idHitDate)) { Log("ID " + idHitDate + " not found. Possibliy a duplicate."); } Assert.IsTrue(resultMap.Contains(idHitDate)); // same ID must be in the Map from the rank-sorted search // every hit must appear once in both result sets --> remove it from the Map. // At the end the Map must be empty! resultMap.Remove(idHitDate); } if (resultMap.Count == 0) { // log("All hits matched"); } else { Log("Couldn't match " + resultMap.Count + " hits."); } Assert.AreEqual(resultMap.Count, 0); }
public virtual void TestEmptyIndex() { // creating two directories for indices Directory indexStoreA = new MockRAMDirectory(); Directory indexStoreB = new MockRAMDirectory(); // creating a document to store Document lDoc = new Document(); lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.ANALYZED)); lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating a document to store Document lDoc2 = new Document(); lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.ANALYZED)); lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating a document to store Document lDoc3 = new Document(); lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.ANALYZED)); lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating an index writer for the first index IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); // creating an index writer for the second index, but writing nothing IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); //-------------------------------------------------------------------- // scenario 1 //-------------------------------------------------------------------- // writing the documents to the first index writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.AddDocument(lDoc3); writerA.Optimize(); writerA.Close(); // closing the second index writerB.Close(); // creating the query QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query query = parser.Parse("handle:1"); // building the searchables Searcher[] searchers = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers[0] = new IndexSearcher(indexStoreB, true); searchers[1] = new IndexSearcher(indexStoreA, true); // creating the multiSearcher Searcher mSearcher = GetMultiSearcherInstance(searchers); // performing the search ScoreDoc[] hits = mSearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); // iterating over the hit documents for (int i = 0; i < hits.Length; i++) { mSearcher.Doc(hits[i].Doc); } mSearcher.Close(); //-------------------------------------------------------------------- // scenario 2 //-------------------------------------------------------------------- // adding one document to the empty index writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.AddDocument(lDoc); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers2 = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers2[0] = new IndexSearcher(indexStoreB, true); searchers2[1] = new IndexSearcher(indexStoreA, true); // creating the mulitSearcher MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2); // performing the same search ScoreDoc[] hits2 = mSearcher2.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits2.Length); // iterating over the hit documents for (int i = 0; i < hits2.Length; i++) { // no exception should happen at this point mSearcher2.Doc(hits2[i].Doc); } // test the subSearcher() method: Query subSearcherQuery = parser.Parse("id:doc1"); hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).ScoreDocs; Assert.AreEqual(2, hits2.Length); Assert.AreEqual(0, mSearcher2.SubSearcher(hits2[0].Doc)); // hit from searchers2[0] Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[1].Doc)); // hit from searchers2[1] subSearcherQuery = parser.Parse("id:doc2"); hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).ScoreDocs; Assert.AreEqual(1, hits2.Length); Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[0].Doc)); // hit from searchers2[1] mSearcher2.Close(); //-------------------------------------------------------------------- // scenario 3 //-------------------------------------------------------------------- // deleting the document just added, this will cause a different exception to take place Term term = new Term("id", "doc1"); IndexReader readerB = IndexReader.Open(indexStoreB, false); readerB.DeleteDocuments(term); readerB.Close(); // optimizing the index with the writer writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers3 = new Searcher[2]; searchers3[0] = new IndexSearcher(indexStoreB, true); searchers3[1] = new IndexSearcher(indexStoreA, true); // creating the mulitSearcher Searcher mSearcher3 = GetMultiSearcherInstance(searchers3); // performing the same search ScoreDoc[] hits3 = mSearcher3.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits3.Length); // iterating over the hit documents for (int i = 0; i < hits3.Length; i++) { mSearcher3.Doc(hits3[i].Doc); } mSearcher3.Close(); indexStoreA.Close(); indexStoreB.Close(); }
public virtual void TestEmptyIndex() { // creating two directories for indices Directory indexStoreA = new RAMDirectory(); Directory indexStoreB = new RAMDirectory(); // creating a document to store Document lDoc = new Document(); lDoc.Add(Field.Text("fulltext", "Once upon a time.....")); lDoc.Add(Field.Keyword("id", "doc1")); lDoc.Add(Field.Keyword("handle", "1")); // creating a document to store Document lDoc2 = new Document(); lDoc2.Add(Field.Text("fulltext", "in a galaxy far far away.....")); lDoc2.Add(Field.Keyword("id", "doc2")); lDoc2.Add(Field.Keyword("handle", "1")); // creating a document to store Document lDoc3 = new Document(); lDoc3.Add(Field.Text("fulltext", "a bizarre bug manifested itself....")); lDoc3.Add(Field.Keyword("id", "doc3")); lDoc3.Add(Field.Keyword("handle", "1")); // creating an index writer for the first index IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true); // creating an index writer for the second index, but writing nothing IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true); //-------------------------------------------------------------------- // scenario 1 //-------------------------------------------------------------------- // writing the documents to the first index writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.AddDocument(lDoc3); writerA.Optimize(); writerA.Close(); // closing the second index writerB.Close(); // creating the query Query query = Lucene.Net.QueryParsers.QueryParser.Parse("handle:1", "fulltext", new StandardAnalyzer()); // building the searchables Searcher[] searchers = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers[0] = new IndexSearcher(indexStoreB); searchers[1] = new IndexSearcher(indexStoreA); // creating the multiSearcher Searcher mSearcher = GetMultiSearcherInstance(searchers); // performing the search Hits hits = mSearcher.Search(query); Assert.AreEqual(3, hits.Length()); try { // iterating over the hit documents for (int i = 0; i < hits.Length(); i++) { Document d = hits.Doc(i); } } catch (System.IndexOutOfRangeException e) { Assert.Fail("ArrayIndexOutOfBoundsException thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher.Close(); } //-------------------------------------------------------------------- // scenario 2 //-------------------------------------------------------------------- // adding one document to the empty index writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false); writerB.AddDocument(lDoc); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers2 = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers2[0] = new IndexSearcher(indexStoreB); searchers2[1] = new IndexSearcher(indexStoreA); // creating the mulitSearcher Searcher mSearcher2 = GetMultiSearcherInstance(searchers2); // performing the same search Hits hits2 = mSearcher2.Search(query); Assert.AreEqual(4, hits2.Length()); try { // iterating over the hit documents for (int i = 0; i < hits2.Length(); i++) { // no exception should happen at this point Document d = hits2.Doc(i); } } catch (System.Exception e) { Assert.Fail("Exception thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher2.Close(); } //-------------------------------------------------------------------- // scenario 3 //-------------------------------------------------------------------- // deleting the document just added, this will cause a different exception to take place Term term = new Term("id", "doc1"); IndexReader readerB = IndexReader.Open(indexStoreB); readerB.Delete(term); readerB.Close(); // optimizing the index with the writer writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers3 = new Searcher[2]; searchers3[0] = new IndexSearcher(indexStoreB); searchers3[1] = new IndexSearcher(indexStoreA); // creating the mulitSearcher Searcher mSearcher3 = GetMultiSearcherInstance(searchers3); // performing the same search Hits hits3 = mSearcher3.Search(query); Assert.AreEqual(3, hits3.Length()); try { // iterating over the hit documents for (int i = 0; i < hits3.Length(); i++) { Document d = hits3.Doc(i); } } catch (System.IO.IOException e) { Assert.Fail("IOException thrown: " + e.Message); System.Console.Error.WriteLine(e.Source); } finally { mSearcher3.Close(); } }
// make sure the documents returned by the search match the expected list private void AssertMatches(Searcher searcher, Query query, Sort sort, System.String expectedResult) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } Assert.AreEqual(expectedResult, buff.ToString()); }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, (System.Int32) results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) return ; for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher) searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs; System.Collections.ArrayList correct = new System.Collections.ArrayList(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, results[i]); } correct.Sort(); System.Collections.ArrayList actual = new System.Collections.ArrayList(); for (int i = 0; i < hits.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(actual, hits[i].doc); } actual.Sort(); Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }
public virtual void TestNormalizedScores() { // capture relevancy scores System.Collections.Hashtable scoresX = GetScores(full.Search(queryX)); System.Collections.Hashtable scoresY = GetScores(full.Search(queryY)); System.Collections.Hashtable scoresA = GetScores(full.Search(queryA)); // we'll test searching locally, remote and multi // note: the multi test depends on each separate index containing // the same documents as our local index, so the computed normalization // will be the same. so we make a multi searcher over two equal document // sets - not realistic, but necessary for testing. MultiSearcher remote = new MultiSearcher(new Lucene.Net.Search.Searchable[] { Remote }); MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[] { full, full }); // change sorting and make sure relevancy stays the same sort = new Sort(); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(SortField.FIELD_DOC); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("int"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("float"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("string"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[] { "int", "float" }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new SortField[] { new SortField("int", true), new SortField(null, SortField.DOC, true) }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[] { "float", "string" }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); }
public static TopDocs Search(this Searcher searcher, string type, Query query, int numResults, Sort sort) { var res = searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), numResults, sort); return(res); }
/// <summary> /// Searches for documents mapped from the given type using the specified query and Collector. /// </summary> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="type"> /// The type of the object to search documents for. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="results"> /// The Collector to use to gather results. /// </param> public static void Search(this Searcher searcher, string type, Query query, Collector results) { searcher.Search(query, JsonMappingUtils.GetTypeFilter(type), results); }
/// <summary> /// Searches for documents mapped from the given type using the specified query. /// </summary> /// <typeparam name="TObject"> /// The type of the object to search documents for. /// </typeparam> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="numResults"> /// The number of results to return. /// </param> /// <returns> /// An instance of TopDocs. /// </returns> public static TopDocs Search <TObject>(this Searcher searcher, Query query, int numResults) { return(searcher.Search(query, ObjectMapping.GetTypeFilter <TObject>(), numResults)); }
/// <summary> Perform synonym expansion on a query. /// /// </summary> /// <param name="">query /// </param> /// <param name="">syns /// </param> /// <param name="">a /// </param> /// <param name="">field /// </param> /// <param name="">boost /// </param> public static Query Expand(System.String query, Searcher syns, Analyzer a, System.String field, float boost) { System.Collections.Hashtable already = new System.Collections.Hashtable(); // avoid dups System.Collections.IList top = new System.Collections.ArrayList(); // needs to be separately listed.. // [1] Parse query into separate words so that when we expand we can avoid dups TokenStream ts = a.TokenStream(field, new System.IO.StringReader(query)); Lucene.Net.Analysis.Token t; while ((t = ts.Next()) != null) { System.String word = t.TermText(); if (already.Contains(word) == false) { already.Add(word, word); top.Add(word); } } BooleanQuery tmp = new BooleanQuery(); // [2] form query System.Collections.IEnumerator it = top.GetEnumerator(); while (it.MoveNext()) { // [2a] add to level words in System.String word = (System.String) it.Current; TermQuery tq = new TermQuery(new Term(field, word)); tmp.Add(tq, BooleanClause.Occur.SHOULD); // [2b] add in unique synonums Hits hits = syns.Search(new TermQuery(new Term(Syns2Index.F_WORD, word))); for (int i = 0; i < hits.Length(); i++) { Document doc = hits.Doc(i); System.String[] values = doc.GetValues(Syns2Index.F_SYN); for (int j = 0; j < values.Length; j++) { System.String syn = values[j]; if (already.Contains(syn) == false) { already.Add(syn, syn); tq = new TermQuery(new Term(field, syn)); if (boost > 0) // else keep normal 1.0 tq.SetBoost(boost); tmp.Add(tq, BooleanClause.Occur.SHOULD); } } } } return tmp; }
/// <summary> /// Searches for documents mapped from the given type using the specified query. /// </summary> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="type"> /// The type of the object to search documents for. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="numResults"> /// The number of results to return. /// </param> /// <param name="sort"> /// A Sort object that defines how to sort the results. /// </param> /// <returns> /// An instance of TopDocs. /// </returns> public static TopDocs Search(this Searcher searcher, Type type, Query query, int numResults, Sort sort) { return(searcher.Search(query, ObjectMapping.GetTypeFilter(type), numResults, sort)); }
/// <summary> /// Perform synonym expansion on a query. /// </summary> /// <param name="query">query</param> /// <param name="syns">syns</param> /// <param name="a">a</param> /// <param name="field">field</param> /// <param name="boost">boost</param> public static Query Expand(String query, Searcher syns, Analyzer a, String field, float boost) { already = new List<String>(); // avoid dups var top = new List<String>(); // needs to be separately listed.. var ts = a.TokenStream(field, new StringReader(query)); var termAtt = ts.AddAttribute<TermAttribute>(); while (ts.IncrementToken()) { var word = termAtt.Term; if (!already.Contains(word)) { already.Add(word); top.Add(word); } } tmp = new BooleanQuery(); // [2] form query System.Collections.IEnumerator it = top.GetEnumerator(); while (it.MoveNext()) { // [2a] add to level words in var word = (String)it.Current; var tq = new TermQuery(new Term(field, word)); tmp.Add(tq, Occur.SHOULD); var c = new CollectorImpl(field, boost); syns.Search(new TermQuery(new Term(Syns2Index.F_WORD, word)), c); } return tmp; }
/// <summary> /// Searches for documents mapped from the given type using the specified query. /// </summary> /// <typeparam name="TObject"> /// The type of the object to search documents for. /// </typeparam> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="kind"> /// The kind of type to restrict the search to. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="numResults"> /// The number of results to return. /// </param> /// <param name="sort"> /// A Sort object that defines how to sort the results. /// </param> /// <returns> /// An instance of TopDocs. /// </returns> public static TopDocs Search <TObject>(this Searcher searcher, DocumentObjectTypeKind kind, Query query, int numResults, Sort sort) { return(searcher.Search(query, ObjectMapping.GetTypeFilter <TObject>(kind), numResults, sort)); }
private void TestNormalization(int nDocs, System.String message) { Query query = new TermQuery(new Term("contents", "doc0")); RAMDirectory ramDirectory1; IndexSearcher indexSearcher1; ScoreDoc[] hits; ramDirectory1 = new MockRAMDirectory(); // First put the documents in the same index InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... InitIndex(ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1 = new IndexSearcher(ramDirectory1, true); indexSearcher1.SetDefaultFieldSortScoring(true, true); hits = indexSearcher1.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length, message); // Store the scores for use later float[] scores = new float[] { hits[0].Score, hits[1].Score }; Assert.IsTrue(scores[0] > scores[1], message); indexSearcher1.Close(); ramDirectory1.Close(); hits = null; RAMDirectory ramDirectory2; IndexSearcher indexSearcher2; ramDirectory1 = new MockRAMDirectory(); ramDirectory2 = new MockRAMDirectory(); // Now put the documents in a different index InitIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... InitIndex(ramDirectory2, nDocs, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1 = new IndexSearcher(ramDirectory1, true); indexSearcher1.SetDefaultFieldSortScoring(true, true); indexSearcher2 = new IndexSearcher(ramDirectory2, true); indexSearcher2.SetDefaultFieldSortScoring(true, true); Searcher searcher = GetMultiSearcherInstance(new Searcher[] { indexSearcher1, indexSearcher2 }); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length, message); // The scores should be the same (within reason) Assert.AreEqual(scores[0], hits[0].Score, 1e-6, message); // This will a document from ramDirectory1 Assert.AreEqual(scores[1], hits[1].Score, 1e-6, message); // This will a document from ramDirectory2 // Adding a Sort.RELEVANCE object should not change anything hits = searcher.Search(query, null, 1000, Sort.RELEVANCE).ScoreDocs; Assert.AreEqual(2, hits.Length, message); Assert.AreEqual(scores[0], hits[0].Score, 1e-6, message); // This will a document from ramDirectory1 Assert.AreEqual(scores[1], hits[1].Score, 1e-6, message); // This will a document from ramDirectory2 searcher.Close(); ramDirectory1.Close(); ramDirectory2.Close(); }
/// <summary> /// Searches for documents mapped from the given type using the specified query and Collector. /// </summary> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="type"> /// The type of the object to search documents for. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="results"> /// The Collector to use to gather results. /// </param> public static void Search(this Searcher searcher, Type type, Query query, Collector results) { searcher.Search(query, ObjectMapping.GetTypeFilter(type), results); }
private void SearchFor(int n, Searcher searcher) { System.Console.Out.WriteLine("Searching for " + n); Hits hits = searcher.Search(QueryParsers.QueryParser.Parse(English.IntToEnglish(n), "contents", Lucene.Net.ThreadSafetyTest.ANALYZER)); System.Console.Out.WriteLine("Search for " + n + ": total=" + hits.Length()); for (int j = 0; j < System.Math.Min(3, hits.Length()); j++) { System.Console.Out.WriteLine("Hit for " + n + ": " + hits.Doc(j).Get("id")); } }
/// <summary> /// Searches for documents mapped from the given type using the specified query and Collector. /// </summary> /// <typeparam name="TObject"> /// The type of the object to search documents for. /// </typeparam> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="results"> /// The Collector to use to gather results. /// </param> public static void Search <TObject>(this Searcher searcher, Query query, Collector results) { searcher.Search(query, ObjectMapping.GetTypeFilter <TObject>(), results); }
/// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. Optionally does "deep" /// testing of the explanation details. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher, bool deep) { searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher, deep)); }
/// <summary> /// Searches for documents mapped from the given type using the specified query and Collector. /// </summary> /// <typeparam name="TObject"> /// The type of the object to search documents for. /// </typeparam> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="kind"> /// The kind of type to restrict the search to. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="results"> /// The Collector to use to gather results. /// </param> public static void Search <TObject>(this Searcher searcher, DocumentObjectTypeKind kind, Query query, Collector results) { searcher.Search(query, ObjectMapping.GetTypeFilter <TObject>(kind), results); }
// make sure the documents returned by the search match the expected list private void AssertMatches(Searcher searcher, Query query, Sort sort, System.String expectedResult) { //ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs; TopDocs hits = searcher.Search(query, null, expectedResult.Length, sort); ScoreDoc[] result = hits.ScoreDocs; Assert.AreEqual(hits.TotalHits, expectedResult.Length); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length; for (int i = 0; i < n; ++i) { Document doc = searcher.Doc(result[i].Doc); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } Assert.AreEqual(expectedResult, buff.ToString()); }
/// <summary> /// Searches for documents mapped from the given type using the specified query. /// </summary> /// <param name="searcher"> /// The Searcher to search on. /// </param> /// <param name="type"> /// The type of the object to search documents for. /// </param> /// <param name="kind"> /// The kind of type to restrict the search to. /// </param> /// <param name="query"> /// The Query which selects the documents. /// </param> /// <param name="numResults"> /// The number of results to return. /// </param> /// <returns> /// An instance of TopDocs. /// </returns> public static TopDocs Search(this Searcher searcher, Type type, DocumentObjectTypeKind kind, Query query, int numResults) { return(searcher.Search(query, ObjectMapping.GetTypeFilter(type, kind), numResults)); }
public static TopDocs Search(this Searcher searcher, string type, Query Filter, Query query, int numResults) { Filter filter = new QueryWrapperFilter(query); return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type, Filter), numResults)); }
/// <summary> /// This method uses a custom HitCollector implementation which simply prints out /// the docId and score of every matching document. /// /// This simulates the streaming search use case, where all hits are supposed to /// be processed, regardless of their relevance. /// </summary> public static void DoStreamingSearch(Searcher searcher, Query query) { Collector streamingHitCollector = new AnonymousClassCollector(); searcher.Search(query, streamingHitCollector); }
/// <summary> This demonstrates a typical paging search scenario, where the search engine presents /// pages of size n to the user. The user can then go to the next page if interested in /// the next hits. /// /// When the query is executed for the first time, then only enough results are collected /// to fill 5 result pages. If the user wants to page beyond this limit, then the query /// is executed another time and all hits are collected. /// /// </summary> public static void DoPagingSearch(StreamReader input, Searcher searcher, Query query, int hitsPerPage, bool raw, bool interactive) { // Collect enough docs to show 5 pages var collector = TopScoreDocCollector.Create(5 * hitsPerPage, false); searcher.Search(query, collector); var hits = collector.TopDocs().ScoreDocs; int numTotalHits = collector.TotalHits; Console.Out.WriteLine(numTotalHits + " total matching documents"); int start = 0; int end = Math.Min(numTotalHits, hitsPerPage); while (true) { if (end > hits.Length) { Console.Out.WriteLine("Only results 1 - " + hits.Length + " of " + numTotalHits + " total matching documents collected."); Console.Out.WriteLine("Collect more (y/n) ?"); String line = input.ReadLine(); if (String.IsNullOrEmpty(line) || line[0] == 'n') { break; } collector = TopScoreDocCollector.Create(numTotalHits, false); searcher.Search(query, collector); hits = collector.TopDocs().ScoreDocs; } end = Math.Min(hits.Length, start + hitsPerPage); for (int i = start; i < end; i++) { if (raw) { // output raw format Console.Out.WriteLine("doc=" + hits[i].Doc + " score=" + hits[i].Score); continue; } Document doc = searcher.Doc(hits[i].Doc); String path = doc.Get("path"); if (path != null) { Console.Out.WriteLine((i + 1) + ". " + path); String title = doc.Get("title"); if (title != null) { Console.Out.WriteLine(" Title: " + doc.Get("title")); } } else { Console.Out.WriteLine((i + 1) + ". " + "No path for this document"); } } if (!interactive) { break; } if (numTotalHits >= end) { bool quit = false; while (true) { Console.Out.Write("Press "); if (start - hitsPerPage >= 0) { Console.Out.Write("(p)revious page, "); } if (start + hitsPerPage < numTotalHits) { Console.Out.Write("(n)ext page, "); } Console.Out.WriteLine("(q)uit or enter number to jump to a page."); String line = input.ReadLine(); if (String.IsNullOrEmpty(line) || line[0] == 'q') { quit = true; break; } if (line[0] == 'p') { start = Math.Max(0, start - hitsPerPage); break; } else if (line[0] == 'n') { if (start + hitsPerPage < numTotalHits) { start += hitsPerPage; } break; } else { int page; if (Int32.TryParse(line, out page)) { if ((page - 1)*hitsPerPage < numTotalHits) { start = (page - 1)*hitsPerPage; break; } else { Console.Out.WriteLine("No such page"); } } else { Console.Out.WriteLine("Unrecognized page number. Quitting."); quit = true; break; } } } if (quit) break; end = Math.Min(numTotalHits, start + hitsPerPage); } } }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </p> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length); for (int i = 0; i < results.Length; i++) { correct.Add(results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); searcher.Search(query, new AnonymousClassHitCollector(actual)); System.Collections.IDictionaryEnumerator e = actual.GetEnumerator(); while (e.MoveNext()) { Assert.Contains(e.Key, correct, query.ToString(defaultFieldName)); } QueryUtils.Check(query, searcher); }
public override void SetUp() { base.SetUp(); System.String[] docText = new System.String[]{"docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero", "one blah three", "one foo three multiOne", "one foobar three multiThree", "blueberry pancakes", "blueberry pie", "blueberry strudel", "blueberry pizza"}; Directory directory = new RAMDirectory(); IndexWriter iw = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED); for (int i = 0; i < N_DOCS; i++) { Add(docText[i % docText.Length], iw); } iw.Close(); searcher = new IndexSearcher(directory); System.String qtxt = "one"; for (int i = 0; i < docText.Length; i++) { qtxt += (' ' + docText[i]); // large query so that search will be longer } QueryParser queryParser = new QueryParser(FIELD_NAME, new WhitespaceAnalyzer()); query = queryParser.Parse(qtxt); // warm the searcher searcher.Search(query, null, 1000); }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// </p> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="CheckHitCollector"> /// </seealso> public static void CheckHits_Renamed(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, (IndexSearcher) searcher); } Hits hits = searcher.Search(query); System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length); for (int i = 0; i < results.Length; i++) { correct.Add(results[i]); } System.Collections.ArrayList actual = new System.Collections.ArrayList(hits.Length()); for (int i = 0; i < hits.Length(); i++) { actual.Add(hits.Id(i)); } Assert.AreEqual(correct.Count, actual.Count); correct.Sort(); actual.Sort(); for (int i = 0; i < correct.Count; i++) { Assert.AreEqual(correct[i], actual[i]); } QueryUtils.Check(query, searcher); }
// make sure the documents returned by the search match the expected list pattern private void AssertMatchesPattern(Searcher searcher, Query query, Sort sort, System.String pattern) { Hits result = searcher.Search(query, sort); System.Text.StringBuilder buff = new System.Text.StringBuilder(10); int n = result.Length(); for (int i = 0; i < n; ++i) { Document doc = result.Doc(i); System.String[] v = doc.GetValues("tracer"); for (int j = 0; j < v.Length; ++j) { buff.Append(v[j]); } } // System.out.println ("matching \""+buff+"\" against pattern \""+pattern+"\""); System.Text.RegularExpressions.Regex regex = new System.Text.RegularExpressions.Regex(pattern); Assert.IsTrue(regex.IsMatch(buff.ToString())); }
/// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. Optionally does "deep" /// testing of the explanation details. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher, bool deep) { searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher, deep)); }
/// <summary> Checks to see if the hits are what we expected. /// /// </summary> /// <param name="query">the query to execute /// </param> /// <param name="description">the description of the search /// </param> /// <param name="expectedIds">the expected document ids of the hits /// </param> /// <param name="expectedScores">the expected scores of the hits /// /// </param> /// <throws> IOException </throws> protected internal static void assertHits(Searcher s, Query query, System.String description, System.String[] expectedIds, float[] expectedScores) { QueryUtils.Check(query, s); float tolerance = 1e-5f; // Hits hits = searcher.search(query); // hits normalizes and throws things off if one score is greater than 1.0 TopDocs topdocs = s.Search(query, null, 10000); /*** // display the hits System.out.println(hits.length() + " hits for search: \"" + description + '\"'); for (int i = 0; i < hits.length(); i++) { System.out.println(" " + FIELD_ID + ':' + hits.doc(i).get(FIELD_ID) + " (score:" + hits.score(i) + ')'); } *****/ // did we get the hits we expected Assert.AreEqual(expectedIds.Length, topdocs.TotalHits); for (int i = 0; i < topdocs.TotalHits; i++) { //System.out.println(i + " exp: " + expectedIds[i]); //System.out.println(i + " field: " + hits.doc(i).get(FIELD_ID)); int id = topdocs.ScoreDocs[i].Doc; float score = topdocs.ScoreDocs[i].Score; Document doc = s.Doc(id); Assert.AreEqual(expectedIds[i], doc.Get(FIELD_ID)); bool scoreEq = System.Math.Abs(expectedScores[i] - score) < tolerance; if (!scoreEq) { System.Console.Out.WriteLine(i + " warning, expected score: " + expectedScores[i] + ", actual " + score); System.Console.Out.WriteLine(s.Explain(query, id)); } Assert.AreEqual(expectedScores[i], score, tolerance); Assert.AreEqual(s.Explain(query, id).Value, score, tolerance); } }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; SortedSet<int> correct = new SortedSet<int>(); for (int i = 0; i < results.Length; i++) { correct.Add(results[i]); } SortedSet<int> actual = new SortedSet<int>(); for (int i = 0; i < hits.Length; i++) { actual.Add(hits[i].Doc); } Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }