public string[] Query(string query, string[] sortFields, int start, int pageSize, out int totalResults) { IndexSearcher searcher; using (GetSearcher(out searcher)) { Query q; if (string.IsNullOrEmpty(query)) { q = new MatchAllDocsQuery(); } else { var queryParser = new RavenQueryParser(analyzer, NumericIndexFields); q = queryParser.Parse(query); } var topDocs = ExecuteQuery(searcher, sortFields, q, pageSize + start); var results = new List<string>(); for (var i = start; i < pageSize + start && i < topDocs.TotalHits; i++) { var document = searcher.Doc(topDocs.ScoreDocs[i].Doc); results.Add(document.Get("__key")); } totalResults = topDocs.TotalHits; return results.ToArray(); } }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity) : base(similarity) { InitBlock(enclosingInstance); this.reader = reader; count = - 1; maxDoc = reader.MaxDoc(); }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.Value; this.norms = norms; }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w):base(similarity) { InitBlock(enclosingInstance); this.reader = reader; id = - 1; maxId = reader.MaxDoc() - 1; score = w.GetValue(); }
internal MatchAllScorer(MatchAllDocsQuery outerInstance, IndexReader reader, Bits liveDocs, Weight w, float score) : base(w) { this.OuterInstance = outerInstance; this.LiveDocs = liveDocs; this.Score_Renamed = score; MaxDoc = reader.MaxDoc; }
public virtual void TestEquals() { Query q1 = new MatchAllDocsQuery(); Query q2 = new MatchAllDocsQuery(); Assert.IsTrue(q1.Equals(q2)); q1.SetBoost(1.5f); Assert.IsFalse(q1.Equals(q2)); }
public virtual void TestBasic() { Query q = new MatchAllDocsQuery(); TopDocs docs = @is.Search(q, 10); assertEquals(1, docs.TotalHits); float score = docs.ScoreDocs[0].Score; Query boostedQ = new BoostedQuery(q, new ConstValueSource(2.0f)); AssertHits(boostedQ, new float[] { score * 2 }); }
private void AssertFilterEquals(Filter f1, Filter f2) { Query query = new MatchAllDocsQuery(); TopDocs hits1 = @is.Search(query, f1, Ir.MaxDoc()); TopDocs hits2 = @is.Search(query, f2, Ir.MaxDoc()); Assert.AreEqual(hits1.TotalHits, hits2.TotalHits); CheckHits.CheckEqual(query, hits1.ScoreDocs, hits2.ScoreDocs); // now do it again to confirm caching works TopDocs hits3 = @is.Search(query, f1, Ir.MaxDoc()); TopDocs hits4 = @is.Search(query, f2, Ir.MaxDoc()); Assert.AreEqual(hits3.TotalHits, hits4.TotalHits); CheckHits.CheckEqual(query, hits3.ScoreDocs, hits4.ScoreDocs); }
protected void Application_Start() { AreaRegistration.RegisterAllAreas(); RegisterGlobalFilters(GlobalFilters.Filters); RegisterRoutes(RouteTable.Routes); CreateLuceneIndexFolder(); string rebuildEvery = ConfigurationManager.AppSettings["RebuildLuceneIndexEveryXDays"]; int rebuildEveryXDays = Convert.ToInt32(rebuildEvery); string path = HttpContext.Current.Server.MapPath("~/App_Data/LuceneIndex"); try { FSDirectory fsd = FSDirectory.Open(new DirectoryInfo(path)); var indexSearcher = new IndexSearcher(fsd, true); var termQuery = new MatchAllDocsQuery(); var sort = new Sort(new SortField(LuceneIndexFieldMap.LastModifiedField, SortField.STRING)); TopDocs topDocs = indexSearcher.Search(termQuery, null, 1, sort); DateTime lastUpdateDate = DateTime.MinValue; foreach (ScoreDoc match in topDocs.ScoreDocs) { Document doc = indexSearcher.Doc(match.doc); string lastUpdate = doc.Get(LuceneIndexFieldMap.LastModifiedField); if (lastUpdate != null) { lastUpdateDate = DateTools.StringToDate(lastUpdate); Debug.WriteLine(lastUpdate); } } TimeSpan ts = DateTime.Now - lastUpdateDate; if (ts.TotalDays > rebuildEveryXDays) { var generator = new DataGenerator(path); generator.WriteIndex(); } } catch (FileNotFoundException exception) { CreateLuceneIndexFolder(); var generator = new DataGenerator(path); generator.WriteIndex(); } }
public virtual IEnumerable<Topic> GetTopics(TopicFilter topicFilter) { Query query; if (topicFilter.Tags != null && topicFilter.Tags.Any()) { var booleanQuery = new BooleanQuery(); foreach (var tag in topicFilter.Tags) { booleanQuery.Add(new TermQuery(new Term("Tags", tag)), Occur.MUST); } query = booleanQuery; } else { query = new MatchAllDocsQuery(); } return _dbProvider.GetRecords<Topic>(query, n : 25); }
public virtual void TestSortWithoutFillFields() { // There was previously a bug in TopFieldCollector when fillFields was set // to false - the same doc and score was set in ScoreDoc[] array. this test // asserts that if fillFields is false, the documents are set properly. It // does not use Searcher's default search methods (with Sort) since all set // fillFields to true. Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() }; for (int i = 0; i < sort.Length; i++) { Query q = new MatchAllDocsQuery(); TopDocsCollector<Entry> tdc = TopFieldCollector.Create(sort[i], 10, false, false, false, true); @is.Search(q, tdc); ScoreDoc[] sd = tdc.TopDocs().ScoreDocs; for (int j = 1; j < sd.Length; j++) { Assert.IsTrue(sd[j].Doc != sd[j - 1].Doc); } } }
public virtual void TestMissingTerms() { string fieldName = "field1"; Directory rd = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), rd); for (int i = 0; i < 100; i++) { Document doc = new Document(); int term = i * 10; //terms are units of 10; doc.Add(NewStringField(fieldName, "" + term, Field.Store.YES)); w.AddDocument(doc); } IndexReader reader = w.Reader; w.Dispose(); IndexSearcher searcher = NewSearcher(reader); int numDocs = reader.NumDocs; ScoreDoc[] results; MatchAllDocsQuery q = new MatchAllDocsQuery(); List<string> terms = new List<string>(); terms.Add("5"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs; Assert.AreEqual(0, results.Length, "Must match nothing"); terms = new List<string>(); terms.Add("10"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs; Assert.AreEqual(1, results.Length, "Must match 1"); terms = new List<string>(); terms.Add("10"); terms.Add("20"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, terms.ToArray()), numDocs).ScoreDocs; Assert.AreEqual(2, results.Length, "Must match 2"); reader.Dispose(); rd.Dispose(); }
public virtual void TestMissingTerms() { System.String fieldName = "field1"; MockRAMDirectory rd = new MockRAMDirectory(); IndexWriter w = new IndexWriter(rd, new KeywordAnalyzer(), MaxFieldLength.UNLIMITED); for (int i = 0; i < 100; i++) { Document doc = new Document(); int term = i * 10; //terms are units of 10; doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.NOT_ANALYZED)); w.AddDocument(doc); } w.Close(); IndexReader reader = IndexReader.Open(rd, true); IndexSearcher searcher = new IndexSearcher(reader); int numDocs = reader.NumDocs(); ScoreDoc[] results; MatchAllDocsQuery q = new MatchAllDocsQuery(); System.Collections.ArrayList terms = new System.Collections.ArrayList(); terms.Add("5"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).ScoreDocs; Assert.AreEqual(0, results.Length, "Must match nothing"); terms = new System.Collections.ArrayList(); terms.Add("10"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs).ScoreDocs; Assert.AreEqual(1, results.Length, "Must match 1"); terms = new System.Collections.ArrayList(); terms.Add("10"); terms.Add("20"); results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).ScoreDocs; Assert.AreEqual(2, results.Length, "Must match 2"); reader.Close(); rd.Close(); }
public MatchAllDocsWeight(MatchAllDocsQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = searcher.Similarity; }
public static IndexItemCollection Browse( int siteId, Guid featureGuid, DateTime modifiedBeginDate, DateTime modifiedEndDate, int pageNumber, int pageSize, out int totalHits) { totalHits = 0; IndexItemCollection results = new IndexItemCollection(); using (Lucene.Net.Store.Directory searchDirectory = GetDirectory(siteId)) { Filter filter = null; BooleanQuery filterQuery = null; if ((modifiedBeginDate.Date > DateTime.MinValue.Date) || (modifiedEndDate.Date < DateTime.MaxValue.Date)) { filterQuery = new BooleanQuery(); // won't be used to score the results TermRangeQuery lastModifiedDateFilter = new TermRangeQuery( "LastModUtc", modifiedBeginDate.Date.ToString("s"), modifiedEndDate.Date.ToString("s"), true, true); filterQuery.Add(lastModifiedDateFilter, Occur.MUST); } if (featureGuid != Guid.Empty) { if (filterQuery == null) { filterQuery = new BooleanQuery(); } BooleanQuery featureFilter = new BooleanQuery(); featureFilter.Add(new TermQuery(new Term("FeatureId", featureGuid.ToString())), Occur.MUST); filterQuery.Add(featureFilter, Occur.MUST); } if (filterQuery != null) { filter = new QueryWrapperFilter(filterQuery); // filterQuery won't affect result scores } MatchAllDocsQuery matchAllQuery = new MatchAllDocsQuery(); using (IndexSearcher searcher = new IndexSearcher(searchDirectory)) { int maxResults = int.MaxValue; TopDocs hits = searcher.Search(matchAllQuery, filter, maxResults); int startHit = 0; if (pageNumber > 1) { startHit = ((pageNumber - 1) * pageSize); } totalHits = hits.TotalHits; if (startHit > totalHits) { startHit = totalHits; } int end = startHit + pageSize; if (totalHits <= end) { end = totalHits; } int itemsAdded = 0; int itemsToAdd = end; for (int i = startHit; i < itemsToAdd; i++) { Document doc = searcher.Doc(hits.ScoreDocs[i].Doc); IndexItem indexItem = new IndexItem(doc, hits.ScoreDocs[i].Score); results.Add(indexItem); itemsAdded += 1; } results.ItemCount = itemsAdded; results.PageIndex = pageNumber; results.ExecutionTime = DateTime.Now.Ticks; // -0; } // using (IndexReader reader = IndexReader.Open(searchDirectory, false)) // { // totalHits = reader.NumDocs(); // int startHit = 0; // int itemsToAdd = pageSize; // if (pageNumber > 1) // { // startHit = ((pageNumber - 1) * pageSize); // int end = startHit + pageSize; // if (totalHits <= end) // { // end = totalHits; // } // itemsToAdd = end; // } // for (int i = startHit; i < itemsToAdd; i++) // { // Document doc = reader.Document(i); // IndexItem indexItem = new IndexItem(doc, 1); // results.Add(indexItem); // } // } } return results; }
public static List<IndexItem> GetRecentModifiedContent( int siteId, Guid[] featureGuids, DateTime modifiedSinceDate, int maxItems) { int totalHits = 0; List<IndexItem> results = new List<IndexItem>(); using (Lucene.Net.Store.Directory searchDirectory = GetDirectory(siteId)) { Filter filter = null; BooleanQuery filterQuery = new BooleanQuery(); // won't be used to score the results BooleanQuery excludeFilter = new BooleanQuery(); excludeFilter.Add(new TermQuery(new Term("ExcludeFromRecentContent", "false")), Occur.MUST); filterQuery.Add(excludeFilter, Occur.MUST); TermRangeQuery lastModifiedDateFilter = new TermRangeQuery( "LastModUtc", modifiedSinceDate.Date.ToString("s"), DateTime.MaxValue.ToString("s"), true, true); filterQuery.Add(lastModifiedDateFilter, Occur.MUST); // we only want public content, that is both page and module roles must have "All Users" // which means even unauthenticated users Term pageRole = new Term("Role", "All Users"); TermQuery pageRoleFilter = new TermQuery(pageRole); filterQuery.Add(pageRoleFilter, Occur.MUST); Term moduleRole = new Term("ModuleRole", "All Users"); TermQuery moduleRoleFilter = new TermQuery(moduleRole); filterQuery.Add(moduleRoleFilter, Occur.MUST); if ((featureGuids != null)&&(featureGuids.Length > 0)) { BooleanQuery featureFilter = new BooleanQuery(); foreach (Guid featureGuid in featureGuids) { featureFilter.Add(new TermQuery(new Term("FeatureId", featureGuid.ToString())), Occur.SHOULD); } filterQuery.Add(featureFilter, Occur.MUST); } filter = new QueryWrapperFilter(filterQuery); // filterQuery won't affect result scores MatchAllDocsQuery matchAllQuery = new MatchAllDocsQuery(); using (IndexSearcher searcher = new IndexSearcher(searchDirectory)) { int maxResults = int.MaxValue; TopDocs hits = searcher.Search(matchAllQuery, filter, maxResults); totalHits = hits.TotalHits; for (int i = 0; i < totalHits; i++) { Document doc = searcher.Doc(hits.ScoreDocs[i].Doc); IndexItem indexItem = new IndexItem(doc, hits.ScoreDocs[i].Score); results.Add(indexItem); } } } // sort all descending on lastmodutc results.Sort(); if (results.Count <= maxItems) { return results; } else { List<IndexItem> finalResults = new List<IndexItem>(); for (int i = 0; i < maxItems; i++) { finalResults.Add(results[i]); } return finalResults; } }
private Query GetLuceneQuery(string query, IndexQuery indexQuery) { Query luceneQuery; if (String.IsNullOrEmpty(query)) { logQuerying.Debug("Issuing query on index {0} for all documents", parent.name); luceneQuery = new MatchAllDocsQuery(); } else { logQuerying.Debug("Issuing query on index {0} for: {1}", parent.name, query); var toDispose = new List<Action>(); RavenPerFieldAnalyzerWrapper searchAnalyzer = null; try { searchAnalyzer = parent.CreateAnalyzer(new LowerCaseKeywordAnalyzer(), toDispose, true); searchAnalyzer = parent.AnalyzerGenerators.Aggregate(searchAnalyzer, (currentAnalyzer, generator) => { Analyzer newAnalyzer = generator.GenerateAnalyzerForQuerying(parent.name, indexQuery.Query, currentAnalyzer); if (newAnalyzer != currentAnalyzer) { DisposeAnalyzerAndFriends(toDispose, currentAnalyzer); } return parent.CreateAnalyzer(newAnalyzer, toDispose, true); }); luceneQuery = QueryBuilder.BuildQuery(query, indexQuery, searchAnalyzer); } finally { DisposeAnalyzerAndFriends(toDispose, searchAnalyzer); } } return ApplyIndexTriggers(luceneQuery); }
public string[] Query(string query, string[] sortFields, int start, int pageSize, out int totalResults) { IndexSearcher searcher; using (GetSearcher(out searcher)) { Query fileQuery; if (string.IsNullOrEmpty(query)) { Log.Debug("Issuing query on index for all files"); fileQuery = new MatchAllDocsQuery(); } else { Log.Debug("Issuing query on index for: {0}", query); var queryParser = new SimpleFilesQueryParser(analyzer); fileQuery = queryParser.Parse(query); } var topDocs = ExecuteQuery(searcher, sortFields, fileQuery, pageSize + start); var results = new List<string>(); for (var i = start; i < pageSize + start && i < topDocs.TotalHits; i++) { var document = searcher.Doc(topDocs.ScoreDocs[i].Doc); results.Add(document.Get("__key")); } totalResults = topDocs.TotalHits; return results.ToArray(); } }
public void Test6() { Query query = new MatchAllDocsQuery(); SimpleFacetedSearch sfs = new SimpleFacetedSearch(_Reader, "nosuchfield"); SimpleFacetedSearch.Hits hits = sfs.Search(query); Assert.AreEqual(0, hits.TotalHitCount); Assert.AreEqual(0, hits.HitsPerFacet.Length); }
private void InitBlock(MatchAllDocsQuery enclosingInstance) { this.enclosingInstance = enclosingInstance; }
public virtual void TestSort() { this.r = NewRandom(); // reverse & not // all types // restrictive & non restrictive searches (on contents) Create(); Sort[] sorts = new Sort[50]; int sortCount = 0; for (int r = 0; r < 2; r++) { Sort sort; bool reverse = 1 == r; sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("byte", SortField.BYTE, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("short", SortField.SHORT, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("int", SortField.INT, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("long", SortField.LONG, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("float", SortField.FLOAT, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("double", SortField.DOUBLE, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("string", SortField.STRING_VAL, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField("stringIdx", SortField.STRING, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM, reverse)}); //sorts[sortCount++] = sort = new Sort(); //sort.setSort(new SortField[] {new SortField("string", SortField.STRING_ORD_VAL_DEM2, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField(null, SortField.SCORE, reverse)}); sorts[sortCount++] = sort = new Sort(); sort.SetSort(new SortField[]{new SortField(null, SortField.DOC, reverse)}); } Query[] queries = new Query[4]; queries[0] = new MatchAllDocsQuery(); queries[1] = new TermQuery(new Term("contents", "x")); // matches every 10th doc queries[2] = new TermQuery(new Term("contents", "y")); // matches every 100th doc queries[3] = new TermQuery(new Term("contents", "z")); // matches every 1000th doc for (int sx = 0; sx < 3; sx++) { IndexSearcher searcher; if (sx == 0) { searcher = searcherSingleSegment; } else if (sx == 1) { searcher = searcherFewSegment; } else { searcher = searcherMultiSegment; } for (int qx = 0; qx < queries.Length; qx++) { Query query = queries[qx]; for (int q = 0; q < 3; q++) { int queueSize; if (q == 0) { queueSize = 10; } else if (q == 1) { queueSize = 100; } else { queueSize = 1000; } for (int s = 0; s < sortCount; s++) { Sort sort1 = sorts[s]; for (int s2 = - 1; s2 < sortCount; s2++) { Sort sort; if (s2 == - 1) { // Single field sort sort = sort1; } else { sort = new Sort(new SortField[]{sort1.GetSort()[0], sorts[s2].GetSort()[0]}); } // Old Sort oldSort = GetOldSort(sort); if (VERBOSE) { System.Console.Out.WriteLine("query=" + query); if (sx == 0) { System.Console.Out.WriteLine(" single-segment index"); } else if (sx == 1) { System.Console.Out.WriteLine(" few-segment index"); } else { System.Console.Out.WriteLine(" many-segment index"); } System.Console.Out.WriteLine(" numHit=" + queueSize); System.Console.Out.WriteLine(" old=" + oldSort); System.Console.Out.WriteLine(" new=" + sort); } TopDocs newHits = searcher.Search(query, null, queueSize, sort); TopDocs oldHits = searcher.Search(query, null, queueSize, oldSort); Compare(oldHits, newHits); } } } } } // we explicitly test the old sort method and // compare with the new, so we expect to see SUBREADER // sanity checks fail. Insanity[] insanity = FieldCacheSanityChecker.CheckSanity(Lucene.Net.Search.FieldCache_Fields.DEFAULT); try { int ignored = 0; for (int i = 0; i < insanity.Length; i++) { if (insanity[i].GetType() == InsanityType.SUBREADER) { insanity[i] = new Insanity(InsanityType.EXPECTED, insanity[i].GetMsg(), insanity[i].GetCacheEntries()); ignored++; } } Assert.AreEqual(ignored, insanity.Length, "Not all insane field cache usage was expected"); insanity = null; } finally { // report this in the event of any exception/failure // if no failure, then insanity will be null if (null != insanity) { System.IO.StreamWriter temp_writer; temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardError(), System.Console.Error.Encoding); temp_writer.AutoFlush = true; DumpArray(GetTestLabel() + ": Insane FieldCache usage(s)", insanity, temp_writer); } } // we've already checked FieldCache, purge so tearDown doesn't complain PurgeFieldCache(Lucene.Net.Search.FieldCache_Fields.DEFAULT); // so Close(); }
public void Test5() { Query query = new MatchAllDocsQuery(); SimpleFacetedSearch sfs = new SimpleFacetedSearch(_Reader, "category"); SimpleFacetedSearch.Hits hits = sfs.Search(query); Assert.AreEqual(7, hits.TotalHitCount); }
public virtual void TestQuery() { RAMDirectory dir = new RAMDirectory(); IndexWriter iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); iw.SetMaxBufferedDocs(2); // force multi-segment AddDoc("one", iw, 1f); AddDoc("two", iw, 20f); AddDoc("three four", iw, 300f); iw.Close(); IndexReader ir = IndexReader.Open(dir); IndexSearcher is_Renamed = new IndexSearcher(ir); ScoreDoc[] hits; // assert with norms scoring turned off hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "three four"); // assert with norms scoring turned on MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "three four"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "one"); // change norm & retest ir.SetNorm(0, "key", 400f); normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "three four"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "two"); // some artificial queries to trigger the use of skipTo(): BooleanQuery bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); hits = is_Renamed.Search(bq, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST); hits = is_Renamed.Search(bq, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // delete a document: is_Renamed.GetIndexReader().DeleteDocument(0); hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() QueryParser qp = new QueryParser("key", analyzer); hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() with non default boost Query maq = new MatchAllDocsQuery(); maq.SetBoost(2.3f); Query pq = qp.Parse(maq.ToString()); hits = is_Renamed.Search(pq, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); is_Renamed.Close(); ir.Close(); dir.Close(); }
public virtual void TestSortWithScoreAndMaxScoreTracking() { // Two Sort criteria to instantiate the multi/single comparators. Sort[] sort = new Sort[]{new Sort(SortField.FIELD_DOC), new Sort()}; for (int i = 0; i < sort.Length; i++) { Query q = new MatchAllDocsQuery(); TopFieldCollector tdc = TopFieldCollector.Create(sort[i], 10, true, true, true, true); full.Search(q, tdc); TopDocs td = tdc.TopDocs(); ScoreDoc[] sd = td.ScoreDocs; for (int j = 0; j < sd.Length; j++) { Assert.IsTrue(!System.Single.IsNaN(sd[j].Score)); } Assert.IsTrue(!System.Single.IsNaN(td.GetMaxScore())); } }
public void TestRandom() { Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir); int num = AtLeast(100); var terms = new List<Term>(); for (int i = 0; i < num; i++) { string field = @"field" + i; string str = TestUtil.RandomRealisticUnicodeString(Random()); terms.Add(new Term(field, str)); Document doc = new Document(); doc.Add(NewStringField(field, str, Field.Store.NO)); w.AddDocument(doc); } IndexReader reader = w.Reader; w.Dispose(); IndexSearcher searcher = NewSearcher(reader); int numQueries = AtLeast(10); for (int i = 0; i < numQueries; i++) { Term term = terms[Random().nextInt(num)]; TopDocs queryResult = searcher.Search(new TermQuery(term), reader.MaxDoc); MatchAllDocsQuery matchAll = new MatchAllDocsQuery(); TermFilter filter = TermFilter(term); TopDocs filterResult = searcher.Search(matchAll, filter, reader.MaxDoc); assertEquals(filterResult.TotalHits, queryResult.TotalHits); ScoreDoc[] scoreDocs = filterResult.ScoreDocs; for (int j = 0; j < scoreDocs.Length; j++) { assertEquals(scoreDocs[j].Doc, queryResult.ScoreDocs[j].Doc); } } reader.Dispose(); dir.Dispose(); }
public void EmptyQueryMatchesAllDocuments() { // arrange var queryText = string.Empty; var expected = new MatchAllDocsQuery(); // act var actual = NuGetQuery.MakeQuery(queryText); // assert Assert.Equal(expected, actual); }
private Query CreateQuery() { CreatePendingClause(); var booleanQuery = new BooleanQuery(); Query resultQuery = booleanQuery; if (_clauses.Count == 0) { if (_filters.Count > 0) { // only filters applieds => transform to a boolean query foreach (var clause in _filters) { booleanQuery.Add(clause); } resultQuery = booleanQuery; } else { // search all documents, without filter or clause resultQuery = new MatchAllDocsQuery(null); } } else { foreach (var clause in _clauses) booleanQuery.Add(clause); if (_filters.Count > 0) { var filter = new BooleanQuery(); foreach (var clause in _filters) filter.Add(clause); var queryFilter = new QueryWrapperFilter(filter); resultQuery = new FilteredQuery(booleanQuery, queryFilter); } } Logger.Debug("New search query: {0}", resultQuery.ToString()); return resultQuery; }
public virtual void TestMA2() { Query q = new MatchAllDocsQuery(); q.Boost = 1000; Qtest(q, new int[]{0, 1, 2, 3}); }
public MatchAllDocsWeight(MatchAllDocsQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.searcher = searcher; }
private TopDocsCollector doSearch(int numResults) { Query q = new MatchAllDocsQuery(); IndexSearcher searcher = new IndexSearcher(dir); TopDocsCollector tdc = new MyTopsDocCollector(numResults); searcher.Search(q, tdc); searcher.Close(); return tdc; }
public virtual void TestQuery() { RAMDirectory dir = new RAMDirectory(); IndexWriter iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); iw.SetMaxBufferedDocs(2); // force multi-segment AddDoc("one", iw, 1f); AddDoc("two", iw, 20f); AddDoc("three four", iw, 300f); iw.Close(); IndexReader ir = IndexReader.Open(dir, false); IndexSearcher is_Renamed = new IndexSearcher(ir); ScoreDoc[] hits; // assert with norms scoring turned off hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "three four"); // assert with norms scoring turned on MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "three four"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "two"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "one"); // change norm & retest ir.SetNorm(0, "key", 400f); normsQuery = new MatchAllDocsQuery("key"); hits = is_Renamed.Search(normsQuery, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); Assert.AreEqual(ir.Document(hits[0].Doc).Get("key"), "one"); Assert.AreEqual(ir.Document(hits[1].Doc).Get("key"), "three four"); Assert.AreEqual(ir.Document(hits[2].Doc).Get("key"), "two"); // some artificial queries to trigger the use of skipTo(): BooleanQuery bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), Occur.MUST); bq.Add(new MatchAllDocsQuery(), Occur.MUST); hits = is_Renamed.Search(bq, null, 1000).ScoreDocs; Assert.AreEqual(3, hits.Length); bq = new BooleanQuery(); bq.Add(new MatchAllDocsQuery(), Occur.MUST); bq.Add(new TermQuery(new Term("key", "three")), Occur.MUST); hits = is_Renamed.Search(bq, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // delete a document: is_Renamed.IndexReader.DeleteDocument(0); hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "key", analyzer); hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test parsable toString() with non default boost Query maq = new MatchAllDocsQuery(); maq.Boost = 2.3f; Query pq = qp.Parse(maq.ToString()); hits = is_Renamed.Search(pq, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); is_Renamed.Close(); ir.Close(); dir.Close(); }