public virtual void TestQueryFilter() { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[] { Remote }; Searcher searcher = new MultiSearcher(searchables); Hits hits = searcher.Search(new TermQuery(new Term("test", "text")), new QueryFilter(new TermQuery(new Term("test", "test")))); Hits nohits = searcher.Search(new TermQuery(new Term("test", "text")), new QueryFilter(new TermQuery(new Term("test", "non-existent-term")))); Assert.AreEqual(0, nohits.Length()); }
void LUCENENET_100_ClientSearch() { try { Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:" + ANYPORT + "/Searcher"); Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s }); Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon")); Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort(); sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT)); Lucene.Net.Search.TopDocs h = searcher.Search(q, null, 100, sort); if (h.ScoreDocs.Length != 2) { LUCENENET_100_Exception = new SupportClassException("Test_Search_FieldDoc Error. "); } } catch (SupportClassException ex) { LUCENENET_100_Exception = ex; } catch (Exception ex) { Console.WriteLine(ex); } finally { LUCENENET_100_testFinished = true; } }
public virtual void TestCustomSimilarity() { RAMDirectory dir = new RAMDirectory(); InitIndex(dir, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... IndexSearcher srchr = new IndexSearcher(dir, true); MultiSearcher msrchr = GetMultiSearcherInstance(new Searcher[] { srchr }); Similarity customSimilarity = new AnonymousClassDefaultSimilarity(this); srchr.Similarity = customSimilarity; msrchr.Similarity = customSimilarity; Query query = new TermQuery(new Term("contents", "doc0")); // Get a score from IndexSearcher TopDocs topDocs = srchr.Search(query, null, 1); float score1 = topDocs.MaxScore; // Get the score from MultiSearcher topDocs = msrchr.Search(query, null, 1); float scoreN = topDocs.MaxScore; // The scores from the IndexSearcher and Multisearcher should be the same // if the same similarity is used. Assert.AreEqual(score1, scoreN, 1e-6, "MultiSearcher score must be equal to single searcher score!"); }
public AutoCompletionResult Autocomplete(string text, bool includeExplanation = false) { if (string.IsNullOrWhiteSpace(text)) return AutoCompletionResult.NoResult(text); var searchers = _directoryFactory.GetAllDirectories().Select(d => { try { return new IndexSearcher(d, true); } catch (Exception e) { _log.Error(e, "While searching directory {0}", d); return null; } }) .Where(s => s != null) .ToArray(); using (var searcher = new MultiSearcher(searchers)) { try { BooleanQuery query = GetQueryForText(text); var results = searcher.Search(query, 10); var commands = results.ScoreDocs .Select(d => { var document = searcher.Doc(d.Doc); try { Explanation explanation = null; if (includeExplanation) { explanation = searcher.Explain(query, d.Doc); } var coreDoc = CoreDocument.Rehydrate(document); var command = _converterRepository.FromDocumentToItem(coreDoc); return new AutoCompletionResult.CommandResult(command, coreDoc.GetDocumentId(), explanation); } catch (Exception e) { _log.Error(e, "Error getting command result for document {0}:{1}", document.GetField(SpecialFields.ConverterId).StringValue, document.GetField(SpecialFields.Id).StringValue); return null; } }) .Where(r => r != null); return AutoCompletionResult.OrderedResult(text, commands); } catch (ParseException e) { _log.Error(e, "Error parsing '{0}'", text); return AutoCompletionResult.NoResult(text); } } }
private static void Search(Query query) { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{Remote}; Searcher searcher = new MultiSearcher(searchables); Hits result = searcher.Search(query); Assert.AreEqual(1, result.Length()); Assert.AreEqual("test text", result.Doc(0).Get("test")); }
private static void Search(Query query) { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[] { Remote }; Searcher searcher = new MultiSearcher(searchables); Hits result = searcher.Search(query); Assert.AreEqual(1, result.Length()); Assert.AreEqual("test text", result.Doc(0).Get("test")); }
public virtual void TestFieldSelector() { RAMDirectory ramDirectory1, ramDirectory2; IndexSearcher indexSearcher1, indexSearcher2; ramDirectory1 = new RAMDirectory(); ramDirectory2 = new RAMDirectory(); Query query = new TermQuery(new Term("contents", "doc0")); // Now put the documents in a different index InitIndex(ramDirectory1, 10, true, null); // documents with a single token "doc0", "doc1", etc... InitIndex(ramDirectory2, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1 = new IndexSearcher(ramDirectory1, true); indexSearcher2 = new IndexSearcher(ramDirectory2, true); MultiSearcher searcher = GetMultiSearcherInstance(new Searcher[] { indexSearcher1, indexSearcher2 }); Assert.IsTrue(searcher != null, "searcher is null and it shouldn't be"); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.IsTrue(hits.Length == 2, hits.Length + " does not equal: " + 2); Document document = searcher.Doc(hits[0].Doc); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.IsTrue(document.GetFields().Count == 2, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 2); //Should be one document from each directory //they both have two fields, contents and other ISet <string> ftl = Support.Compatibility.SetFactory.CreateHashSet <string>(); ftl.Add("other"); SetBasedFieldSelector fs = new SetBasedFieldSelector(ftl, Support.Compatibility.SetFactory.CreateHashSet <string>()); document = searcher.Doc(hits[0].Doc, fs); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1); System.String value_Renamed = document.Get("contents"); Assert.IsTrue(value_Renamed == null, "value is not null and it should be"); value_Renamed = document.Get("other"); Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); ftl.Clear(); ftl.Add("contents"); fs = new SetBasedFieldSelector(ftl, Support.Compatibility.SetFactory.CreateHashSet <string>()); document = searcher.Doc(hits[1].Doc, fs); value_Renamed = document.Get("contents"); Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be"); value_Renamed = document.Get("other"); Assert.IsTrue(value_Renamed == null, "value is not null and it should be"); }
void LUCENENET_100_ClientSearch() { try { Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:" + ANYPORT + "/Searcher"); Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s }); Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon")); Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort(); sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT)); Lucene.Net.Search.TopDocs h = searcher.Search(q, null, 100, sort); } catch (Exception ex) { LUCENENET_100_Exception = ex; } finally { LUCENENET_100_testFinished = true; } }
public virtual void TestEmptyIndex() { // creating two directories for indices Directory indexStoreA = new MockRAMDirectory(); Directory indexStoreB = new MockRAMDirectory(); // creating a document to store Document lDoc = new Document(); lDoc.Add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.ANALYZED)); lDoc.Add(new Field("id", "doc1", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating a document to store Document lDoc2 = new Document(); lDoc2.Add(new Field("fulltext", "in a galaxy far far away.....", Field.Store.YES, Field.Index.ANALYZED)); lDoc2.Add(new Field("id", "doc2", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc2.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating a document to store Document lDoc3 = new Document(); lDoc3.Add(new Field("fulltext", "a bizarre bug manifested itself....", Field.Store.YES, Field.Index.ANALYZED)); lDoc3.Add(new Field("id", "doc3", Field.Store.YES, Field.Index.NOT_ANALYZED)); lDoc3.Add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating an index writer for the first index IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); // creating an index writer for the second index, but writing nothing IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); //-------------------------------------------------------------------- // scenario 1 //-------------------------------------------------------------------- // writing the documents to the first index writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.AddDocument(lDoc3); writerA.Optimize(); writerA.Close(); // closing the second index writerB.Close(); // creating the query QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query query = parser.Parse("handle:1"); // building the searchables Searcher[] searchers = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers[0] = new IndexSearcher(indexStoreB, true); searchers[1] = new IndexSearcher(indexStoreA, true); // creating the multiSearcher Searcher mSearcher = GetMultiSearcherInstance(searchers); // performing the search ScoreDoc[] hits = mSearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); // iterating over the hit documents for (int i = 0; i < hits.Length; i++) { mSearcher.Doc(hits[i].Doc); } mSearcher.Close(); //-------------------------------------------------------------------- // scenario 2 //-------------------------------------------------------------------- // adding one document to the empty index writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.AddDocument(lDoc); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers2 = new Searcher[2]; // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index searchers2[0] = new IndexSearcher(indexStoreB, true); searchers2[1] = new IndexSearcher(indexStoreA, true); // creating the mulitSearcher MultiSearcher mSearcher2 = GetMultiSearcherInstance(searchers2); // performing the same search ScoreDoc[] hits2 = mSearcher2.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits2.Length); // iterating over the hit documents for (int i = 0; i < hits2.Length; i++) { // no exception should happen at this point mSearcher2.Doc(hits2[i].Doc); } // test the subSearcher() method: Query subSearcherQuery = parser.Parse("id:doc1"); hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).ScoreDocs; Assert.AreEqual(2, hits2.Length); Assert.AreEqual(0, mSearcher2.SubSearcher(hits2[0].Doc)); // hit from searchers2[0] Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[1].Doc)); // hit from searchers2[1] subSearcherQuery = parser.Parse("id:doc2"); hits2 = mSearcher2.Search(subSearcherQuery, null, 1000).ScoreDocs; Assert.AreEqual(1, hits2.Length); Assert.AreEqual(1, mSearcher2.SubSearcher(hits2[0].Doc)); // hit from searchers2[1] mSearcher2.Close(); //-------------------------------------------------------------------- // scenario 3 //-------------------------------------------------------------------- // deleting the document just added, this will cause a different exception to take place Term term = new Term("id", "doc1"); IndexReader readerB = IndexReader.Open(indexStoreB, false); readerB.DeleteDocuments(term); readerB.Close(); // optimizing the index with the writer writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.Optimize(); writerB.Close(); // building the searchables Searcher[] searchers3 = new Searcher[2]; searchers3[0] = new IndexSearcher(indexStoreB, true); searchers3[1] = new IndexSearcher(indexStoreA, true); // creating the mulitSearcher Searcher mSearcher3 = GetMultiSearcherInstance(searchers3); // performing the same search ScoreDoc[] hits3 = mSearcher3.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits3.Length); // iterating over the hit documents for (int i = 0; i < hits3.Length; i++) { mSearcher3.Doc(hits3[i].Doc); } mSearcher3.Close(); indexStoreA.Close(); indexStoreB.Close(); }
//private static Lucene.Net.Search.Searchable GetRemote() //{ // try // { // return LookupRemote(); // } // catch (System.Exception) // { // StartServer(); // return LookupRemote(); // } //} //private static Lucene.Net.Search.Searchable LookupRemote() //{ // return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), "http://" + "//localhost/Searchable"); //} //private static void StartServer() //{ // // construct an index // RAMDirectory indexStore = new RAMDirectory(); // IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true); // Document doc = new Document(); // doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED)); // doc.Add(new Field("type", "A", Field.Store.YES, Field.Index.TOKENIZED)); // doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED)); // writer.AddDocument(doc); // //Need a second document to search for // doc = new Document(); // doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED)); // doc.Add(new Field("type", "B", Field.Store.YES, Field.Index.TOKENIZED)); // doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED)); // writer.AddDocument(doc); // writer.Optimize(); // writer.Close(); // try // { // System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false); // } // catch (System.Net.Sockets.SocketException ex) // { // if (ex.ErrorCode == 10048) // return; // EADDRINUSE? // throw ex; // } // // publish it // Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore); // RemoteSearchable impl = new RemoteSearchable(local); // System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable"); //} private static void Search(Query query, Filter filter, int hitNumber, System.String typeValue) { Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()}; Searcher searcher = new MultiSearcher(searchables); Hits result = searcher.Search(query, filter); Assert.AreEqual(1, result.Length()); Document document = result.Doc(hitNumber); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.AreEqual(typeValue, document.Get("type")); Assert.IsTrue(document.GetFields().Count == 3, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 3); }
//private Lucene.Net.Search.Searchable GetRemote() //{ // try // { // return LookupRemote(); // } // catch (System.Exception) // { // StartServer(); // return LookupRemote(); // } //} //private Lucene.Net.Search.Searchable LookupRemote() //{ // return (Lucene.Net.Search.Searchable) Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"http://localhost:1099/Searchable"); //} //[SetUp] //public void StartServer() //{ // try // { // System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Http.HttpChannel(1099), false); // } // catch (System.Net.Sockets.SocketException ex) // { // if (ex.ErrorCode == 10048) // return; // EADDRINUSE? // throw ex; // } // // construct an index // RAMDirectory indexStore = new RAMDirectory(); // IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true); // Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document(); // doc.Add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED)); // doc.Add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED)); // writer.AddDocument(doc); // writer.Optimize(); // writer.Close(); // // publish it // Lucene.Net.Search.Searchable local = new IndexSearcher(indexStore); // RemoteSearchable impl = new RemoteSearchable(local); // System.Runtime.Remoting.RemotingServices.Marshal(impl, "Searchable"); //} private void Search(Query query) { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()}; Searcher searcher = new MultiSearcher(searchables); Hits result = searcher.Search(query); Assert.AreEqual(1, result.Length()); Document document = result.Doc(0); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.AreEqual(document.Get("test"), "test text"); Assert.IsTrue(document.GetFields().Count == 2, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 2); System.Collections.Hashtable ftl = new System.Collections.Hashtable(); ftl.Add("other", "other"); FieldSelector fs = new SetBasedFieldSelector(ftl, new System.Collections.Hashtable()); document = searcher.Doc(0, fs); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1); fs = new MapFieldSelector(new System.String[]{"other"}); document = searcher.Doc(0, fs); Assert.IsTrue(document != null, "document is null and it shouldn't be"); Assert.IsTrue(document.GetFields().Count == 1, "document.getFields() Size: " + document.GetFields().Count + " is not: " + 1); }
public virtual void TestNormalizedScores() { // capture relevancy scores System.Collections.Hashtable scoresX = GetScores(full.Search(queryX)); System.Collections.Hashtable scoresY = GetScores(full.Search(queryY)); System.Collections.Hashtable scoresA = GetScores(full.Search(queryA)); // we'll test searching locally, remote and multi // note: the multi test depends on each separate index containing // the same documents as our local index, so the computed normalization // will be the same. so we make a multi searcher over two equal document // sets - not realistic, but necessary for testing. MultiSearcher remote = new MultiSearcher(new Lucene.Net.Search.Searchable[]{Remote}); MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[]{full, full}); // change sorting and make sure relevancy stays the same sort = new Sort(); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(SortField.FIELD_DOC); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("int"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("float"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("string"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[]{"int", "float"}); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new SortField[]{new SortField("int", true), new SortField(null, SortField.DOC, true)}); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[]{"float", "string"}); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); }
public virtual void TestQueryFilter() { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{Remote}; Searcher searcher = new MultiSearcher(searchables); Hits hits = searcher.Search(new TermQuery(new Term("test", "text")), new QueryFilter(new TermQuery(new Term("test", "test")))); Hits nohits = searcher.Search(new TermQuery(new Term("test", "text")), new QueryFilter(new TermQuery(new Term("test", "non-existent-term")))); Assert.AreEqual(0, nohits.Length()); }
public void TestSpanRegexBug() { CreateRamDirectories(); SpanRegexQuery srq = new SpanRegexQuery(new Term("field", "a.*")); SpanRegexQuery stq = new SpanRegexQuery(new Term("field", "b.*")); SpanNearQuery query = new SpanNearQuery(new SpanQuery[] { srq, stq }, 6, true); // 1. Search the same store which works IndexSearcher[] arrSearcher = new IndexSearcher[2]; arrSearcher[0] = new IndexSearcher(indexStoreA, true); arrSearcher[1] = new IndexSearcher(indexStoreB, true); MultiSearcher searcher = new MultiSearcher(arrSearcher); int numHits = searcher.Search(query, null, 1000).TotalHits; arrSearcher[0].Close(); arrSearcher[1].Close(); // Will fail here // We expect 2 but only one matched // The rewriter function only write it once on the first IndexSearcher // So it's using term: a1 b1 to search on the second IndexSearcher // As a result, it won't match the document in the second IndexSearcher Assert.AreEqual(2, numHits); indexStoreA.Close(); indexStoreB.Close(); }
public virtual void TestNormalizedScores() { // capture relevancy scores System.Collections.Hashtable scoresX = GetScores(full.Search(queryX)); System.Collections.Hashtable scoresY = GetScores(full.Search(queryY)); System.Collections.Hashtable scoresA = GetScores(full.Search(queryA)); // we'll test searching locally, remote and multi // note: the multi test depends on each separate index containing // the same documents as our local index, so the computed normalization // will be the same. so we make a multi searcher over two equal document // sets - not realistic, but necessary for testing. MultiSearcher remote = new MultiSearcher(new Lucene.Net.Search.Searchable[] { Remote }); MultiSearcher multi = new MultiSearcher(new Lucene.Net.Search.Searchable[] { full, full }); // change sorting and make sure relevancy stays the same sort = new Sort(); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(SortField.FIELD_DOC); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("int"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("float"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort("string"); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[] { "int", "float" }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new SortField[] { new SortField("int", true), new SortField(null, SortField.DOC, true) }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); sort.SetSort(new System.String[] { "float", "string" }); AssertSameValues(scoresX, GetScores(full.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(remote.Search(queryX, sort))); AssertSameValues(scoresX, GetScores(multi.Search(queryX, sort))); AssertSameValues(scoresY, GetScores(full.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(remote.Search(queryY, sort))); AssertSameValues(scoresY, GetScores(multi.Search(queryY, sort))); AssertSameValues(scoresA, GetScores(full.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(remote.Search(queryA, sort))); AssertSameValues(scoresA, GetScores(multi.Search(queryA, sort))); }
public virtual void TestConstantScoreQuery() { // try to search the published index Lucene.Net.Search.Searchable[] searchables = new Lucene.Net.Search.Searchable[]{GetRemote()}; Searcher searcher = new MultiSearcher(searchables); Hits hits = searcher.Search(new ConstantScoreQuery(new QueryFilter(new TermQuery(new Term("test", "test"))))); Assert.AreEqual(1, hits.Length()); }
public virtual void TestNormalizedScores() { // capture relevancy scores System.Collections.Hashtable scoresX = GetScores(full.Search(queryX, null, 1000).ScoreDocs, full); System.Collections.Hashtable scoresY = GetScores(full.Search(queryY, null, 1000).ScoreDocs, full); System.Collections.Hashtable scoresA = GetScores(full.Search(queryA, null, 1000).ScoreDocs, full); // we'll test searching locally, remote and multi MultiSearcher multi = new MultiSearcher(new Searchable[]{searchX, searchY}); // change sorting and make sure relevancy stays the same sort = new Sort(); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort(SortField.FIELD_DOC); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort("int"); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort("float"); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort("string"); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort(new System.String[]{"int", "float"}); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort(new SortField[]{new SortField("int", true), new SortField(null, SortField.DOC, true)}); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); sort.SetSort(new System.String[]{"float", "string"}); AssertSameValues(scoresX, GetScores(full.Search(queryX, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresX, GetScores(multi.Search(queryX, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresY, GetScores(full.Search(queryY, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresY, GetScores(multi.Search(queryY, null, 1000, sort).ScoreDocs, multi)); AssertSameValues(scoresA, GetScores(full.Search(queryA, null, 1000, sort).ScoreDocs, full)); AssertSameValues(scoresA, GetScores(multi.Search(queryA, null, 1000, sort).ScoreDocs, multi)); }
void LUCENENET_100_ClientSearch() { try { Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:" + ANYPORT + "/Searcher"); Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s }); Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon")); Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort(); sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT)); Lucene.Net.Search.TopDocs h = searcher.Search(q, null,100, sort); if (h.ScoreDocs.Length != 2) LUCENENET_100_Exception = new Exception("Test_Search_FieldDoc Error. "); } catch (Exception ex) { LUCENENET_100_Exception = ex; } finally { LUCENENET_100_testFinished = true; } }
public virtual void TestTermQueryMultiSearcherExplain() { // creating two directories for indices Directory indexStoreA = new MockRAMDirectory(); Directory indexStoreB = new MockRAMDirectory(); Document lDoc = new Document(); lDoc.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); Document lDoc2 = new Document(); lDoc2.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); Document lDoc3 = new Document(); lDoc3.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.Optimize(); writerA.Close(); writerB.AddDocument(lDoc3); writerB.Close(); QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(Util.Version.LUCENE_CURRENT)); Query query = parser.Parse("handle:1"); Searcher[] searchers = new Searcher[2]; searchers[0] = new IndexSearcher(indexStoreB, true); searchers[1] = new IndexSearcher(indexStoreA, true); Searcher mSearcher = new MultiSearcher(searchers); ScoreDoc[] hits = mSearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Explanation explain = mSearcher.Explain(query, hits[0].Doc); System.String exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("maxDocs=3") > - 1, exp); Assert.IsTrue(exp.IndexOf("docFreq=3") > - 1, exp); query = parser.Parse("handle:\"1 2\""); hits = mSearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); explain = mSearcher.Explain(query, hits[0].Doc); exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("1=3") > - 1, exp); Assert.IsTrue(exp.IndexOf("2=3") > - 1, exp); query = new SpanNearQuery(new SpanQuery[]{new SpanTermQuery(new Term("handle", "1")), new SpanTermQuery(new Term("handle", "2"))}, 0, true); hits = mSearcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); explain = mSearcher.Explain(query, hits[0].Doc); exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("1=3") > - 1, exp); Assert.IsTrue(exp.IndexOf("2=3") > - 1, exp); mSearcher.Close(); }
public virtual void TestTermQueryMultiSearcherExplain() { // creating two directories for indices Directory indexStoreA = new MockRAMDirectory(); Directory indexStoreB = new MockRAMDirectory(); Document lDoc = new Document(); lDoc.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); Document lDoc2 = new Document(); lDoc2.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); Document lDoc3 = new Document(); lDoc3.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writerA.AddDocument(lDoc); writerA.AddDocument(lDoc2); writerA.Optimize(); writerA.Close(); writerB.AddDocument(lDoc3); writerB.Close(); QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer()); Query query = parser.Parse("handle:1"); Searcher[] searchers = new Searcher[2]; searchers[0] = new IndexSearcher(indexStoreB); searchers[1] = new IndexSearcher(indexStoreA); Searcher mSearcher = new MultiSearcher(searchers); ScoreDoc[] hits = mSearcher.Search(query, null, 1000).scoreDocs; Assert.AreEqual(3, hits.Length); Explanation explain = mSearcher.Explain(query, hits[0].doc); System.String exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("maxDocs=3") > -1, exp); Assert.IsTrue(exp.IndexOf("docFreq=3") > -1, exp); query = parser.Parse("handle:\"1 2\""); hits = mSearcher.Search(query, null, 1000).scoreDocs; Assert.AreEqual(3, hits.Length); explain = mSearcher.Explain(query, hits[0].doc); exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("1=3") > -1, exp); Assert.IsTrue(exp.IndexOf("2=3") > -1, exp); query = new SpanNearQuery(new SpanQuery[] { new SpanTermQuery(new Term("handle", "1")), new SpanTermQuery(new Term("handle", "2")) }, 0, true); hits = mSearcher.Search(query, null, 1000).scoreDocs; Assert.AreEqual(3, hits.Length); explain = mSearcher.Explain(query, hits[0].doc); exp = explain.ToString(0); Assert.IsTrue(exp.IndexOf("1=3") > -1, exp); Assert.IsTrue(exp.IndexOf("2=3") > -1, exp); mSearcher.Close(); }