public virtual void TestConstantScoreQueryAndFilter() { Directory d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), d); Document doc = new Document(); doc.Add(NewStringField("field", "a", Field.Store.NO)); w.AddDocument(doc); doc = new Document(); doc.Add(NewStringField("field", "b", Field.Store.NO)); w.AddDocument(doc); IndexReader r = w.Reader; w.Dispose(); Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b")))); Query query = new ConstantScoreQuery(filterB); IndexSearcher s = NewSearcher(r); Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a")))); query = new ConstantScoreQuery(filterA); Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a r.Dispose(); d.Dispose(); }
public virtual Query GetQuery(XmlElement e) { XmlElement filterElem = DOMUtils.GetFirstChildOrFail(e); Query q = new ConstantScoreQuery(filterFactory.GetFilter(filterElem)); q.Boost = DOMUtils.GetAttribute(e, "boost", 1.0f); return q; }
public virtual void Test1() { BooleanQuery q = new BooleanQuery(); PhraseQuery phraseQuery = new PhraseQuery(); phraseQuery.Slop = 1; phraseQuery.Add(new Term(FIELD, "w1")); phraseQuery.Add(new Term(FIELD, "w2")); q.Add(phraseQuery, Occur.MUST); q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD); q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD); Query t = new FilteredQuery(new TermQuery(new Term(FIELD, "xx")), new ItemizedFilter(new int[] { 1, 3 })); t.Boost = 1000; q.Add(t, Occur.SHOULD); t = new ConstantScoreQuery(new ItemizedFilter(new int[] { 0, 2 })); t.Boost = 30; q.Add(t, Occur.SHOULD); DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f); dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true)); dm.Add(new TermQuery(new Term(FIELD, "QQ"))); BooleanQuery xxYYZZ = new BooleanQuery(); xxYYZZ.Add(new TermQuery(new Term(FIELD, "xx")), Occur.SHOULD); xxYYZZ.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD); xxYYZZ.Add(new TermQuery(new Term(FIELD, "zz")), Occur.MUST_NOT); dm.Add(xxYYZZ); BooleanQuery xxW1 = new BooleanQuery(); xxW1.Add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST_NOT); xxW1.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST_NOT); dm.Add(xxW1); DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f); dm2.Add(new TermQuery(new Term(FIELD, "w1"))); dm2.Add(new TermQuery(new Term(FIELD, "w2"))); dm2.Add(new TermQuery(new Term(FIELD, "w3"))); dm.Add(dm2); q.Add(dm, Occur.SHOULD); BooleanQuery b = new BooleanQuery(); b.MinimumNumberShouldMatch = 2; b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD); b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD); b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD); q.Add(b, Occur.SHOULD); Qtest(q, new int[] { 0, 1, 2 }); }
public virtual void TestCSQ() { Query q1 = new ConstantScoreQuery(new TermQuery(new Term("a", "b"))); Query q2 = new ConstantScoreQuery(new TermQuery(new Term("a", "c"))); Query q3 = new ConstantScoreQuery(TermRangeFilter.NewStringRange("a", "b", "c", true, true)); QueryUtils.Check(q1); QueryUtils.Check(q2); QueryUtils.CheckEqual(q1, q1); QueryUtils.CheckEqual(q2, q2); QueryUtils.CheckEqual(q3, q3); QueryUtils.CheckUnequal(q1, q2); QueryUtils.CheckUnequal(q2, q3); QueryUtils.CheckUnequal(q1, q3); QueryUtils.CheckUnequal(q1, new TermQuery(new Term("a", "b"))); }
public virtual void Test1() { BooleanQuery q = new BooleanQuery(); q.Add(qp.Parse("\"w1 w2\"~1"), Occur.MUST); q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD); q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD); Query t = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3})); t.SetBoost(1000); q.Add(t, Occur.SHOULD); t = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 2})); t.SetBoost(30); q.Add(t, Occur.SHOULD); DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f); dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true)); dm.Add(qp.Parse("QQ")); dm.Add(qp.Parse("xx yy -zz")); dm.Add(qp.Parse("-xx -w1")); DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f); dm2.Add(qp.Parse("w1")); dm2.Add(qp.Parse("w2")); dm2.Add(qp.Parse("w3")); dm.Add(dm2); q.Add(dm, Occur.SHOULD); BooleanQuery b = new BooleanQuery(); b.SetMinimumNumberShouldMatch(2); b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD); b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD); b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD); q.Add(b, Occur.SHOULD); Qtest(q, new int[]{0, 1, 2}); }
public virtual void TestWrapped2Times() { Directory directory = null; IndexReader reader = null; IndexSearcher searcher = null; try { directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewStringField("field", "term", Field.Store.NO)); writer.AddDocument(doc); reader = writer.Reader; writer.Dispose(); // we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer. searcher = NewSearcher(reader, true, false); // set a similarity that does not normalize our boost away searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this); Query csq1 = new ConstantScoreQuery(new TermQuery(new Term("field", "term"))); csq1.Boost = 2.0f; Query csq2 = new ConstantScoreQuery(csq1); csq2.Boost = 5.0f; BooleanQuery bq = new BooleanQuery(); bq.Add(csq1, BooleanClause.Occur.SHOULD); bq.Add(csq2, BooleanClause.Occur.SHOULD); Query csqbq = new ConstantScoreQuery(bq); csqbq.Boost = 17.0f; CheckHits(searcher, csq1, csq1.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, null); CheckHits(searcher, csq2, csq2.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, typeof(ConstantScoreQuery.ConstantScorer).Name); // for the combined BQ, the scorer should always be BooleanScorer's BucketScorer, because our scorer supports out-of order collection! string bucketScorerClass = typeof(FakeScorer).Name; CheckHits(searcher, bq, csq1.Boost + csq2.Boost, bucketScorerClass, null); CheckHits(searcher, csqbq, csqbq.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, bucketScorerClass); } finally { if (reader != null) { reader.Dispose(); } if (directory != null) { directory.Dispose(); } } }
public virtual void TestCSQ4() { Query q = new ConstantScoreQuery(new ItemizedFilter(new int[] { 3 })); q.Boost = 0; Bqtest(q, new int[] { 3 }); }
/// <summary> /// check that the # of hits is the same as if the query /// is run against the inverted index /// </summary> protected internal virtual void AssertSame(BytesRef lowerVal, BytesRef upperVal, bool includeLower, bool includeUpper) { Query docValues = new ConstantScoreQuery(DocTermOrdsRangeFilter.NewBytesRefRange(FieldName, lowerVal, upperVal, includeLower, includeUpper)); MultiTermQuery inverted = new TermRangeQuery(FieldName, lowerVal, upperVal, includeLower, includeUpper); inverted.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); TopDocs invertedDocs = Searcher1.Search(inverted, 25); TopDocs docValuesDocs = Searcher2.Search(docValues, 25); CheckHits.CheckEqual(inverted, invertedDocs.ScoreDocs, docValuesDocs.ScoreDocs); }
public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w) : base(similarity) { InitBlock(enclosingInstance); theScore = w.GetValue(); bits = Enclosing_Instance.filter.Bits(reader); }
private static void VisitQuery(ConstantScoreQuery query, AzureQueryLogger.IndentedTextWriter writer) { writer.WriteLine("Filter: {0}", (object)query.Filter); }
public override Query Rewrite(IndexReader reader, MultiTermQuery query) { // Get the enum and start visiting terms. If we // exhaust the enum before hitting either of the // cutoffs, we use ConstantBooleanQueryRewrite; else, // ConstantFilterRewrite: System.Collections.ArrayList pendingTerms = new System.Collections.ArrayList(); int docCountCutoff = (int) ((docCountPercent / 100.0) * reader.MaxDoc()); int termCountLimit = System.Math.Min(BooleanQuery.GetMaxClauseCount(), termCountCutoff); int docVisitCount = 0; FilteredTermEnum enumerator = query.GetEnum(reader); try { while (true) { Term t = enumerator.Term(); if (t != null) { pendingTerms.Add(t); // Loading the TermInfo from the terms dict here // should not be costly, because 1) the // query/filter will load the TermInfo when it // runs, and 2) the terms dict has a cache: docVisitCount += reader.DocFreq(t); } if (pendingTerms.Count >= termCountLimit || docVisitCount >= docCountCutoff) { // Too many terms -- make a filter. Query result = new ConstantScoreQuery(new MultiTermQueryWrapperFilter(query)); result.SetBoost(query.GetBoost()); return result; } else if (!enumerator.Next()) { // Enumeration is done, and we hit a small // enough number of terms & docs -- just make a // BooleanQuery, now System.Collections.IEnumerator it = pendingTerms.GetEnumerator(); BooleanQuery bq = new BooleanQuery(true); while (it.MoveNext()) { TermQuery tq = new TermQuery((Term) it.Current); bq.Add(tq, BooleanClause.Occur.SHOULD); } // Strip scores Query result = new ConstantScoreQuery(new QueryWrapperFilter(bq)); result.SetBoost(query.GetBoost()); query.IncTotalNumberOfTerms(pendingTerms.Count); return result; } } } finally { enumerator.Close(); } }
public virtual void TestPrefixFilter_Mem() { Directory directory = NewDirectory(); string[] categories = new string[] { "/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows" }; RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone); for (int i = 0; i < categories.Length; i++) { Document doc = new Document(); doc.Add(NewStringField("category", categories[i], Field.Store.YES)); writer.AddDocument(doc); } IndexReader reader = writer.Reader; // PrefixFilter combined with ConstantScoreQuery PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers")); Query query = new ConstantScoreQuery(filter); IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test middle of values filter = new PrefixFilter(new Term("category", "/Computers/Mac")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test start of values filter = new PrefixFilter(new Term("category", "/Computers/Linux")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test end of values filter = new PrefixFilter(new Term("category", "/Computers/Windows")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test non-existant filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, before values filter = new PrefixFilter(new Term("category", "/Computers/AAA")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, after values filter = new PrefixFilter(new Term("category", "/Computers/ZZZ")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test zero length prefix filter = new PrefixFilter(new Term("category", "")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test non existent field filter = new PrefixFilter(new Term("nonexistantfield", "/Computers")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); writer.Dispose(); reader.Dispose(); directory.Dispose(); }
public virtual void TestEnforceDeletions() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10))); // asserts below requires no unexpected merges: // NOTE: cannot use writer.getReader because RIW (on // flipping a coin) may give us a newly opened reader, // but we use .reopen on this reader below and expect to // (must) get an NRT reader: DirectoryReader reader = DirectoryReader.Open(writer.IndexWriter, true); // same reason we don't wrap? IndexSearcher searcher = NewSearcher( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif reader, false); // add a doc, refresh the reader, and check that it's there Document doc = new Document(); doc.Add(NewStringField("id", "1", Field.Store.YES)); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = NewSearcher( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif reader, false); TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1); Assert.AreEqual(1, docs.TotalHits, "Should find a hit..."); Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1"))); CachingWrapperFilter filter = new CachingWrapperFilter(startFilter); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.IsTrue(filter.GetSizeInBytes() > 0); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); Query constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // make sure we get a cache hit when we reopen reader // that had no change to deletions // fake delete (deletes nothing): writer.DeleteDocuments(new Term("foo", "bar")); IndexReader oldReader = reader; reader = RefreshReader(reader); Assert.IsTrue(reader == oldReader); int missCount = filter.missCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // cache hit: Assert.AreEqual(missCount, filter.missCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); // NOTE: important to hold ref here so GC doesn't clear // the cache entry! Else the assert below may sometimes // fail: oldReader = reader; reader = RefreshReader(reader); searcher = NewSearcher(reader, false); missCount = filter.missCount; docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); // cache hit Assert.AreEqual(missCount, filter.missCount); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // apply deletes dynamically: filter = new CachingWrapperFilter(startFilter); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = NewSearcher(reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); missCount = filter.missCount; Assert.IsTrue(missCount > 0); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.missCount); writer.AddDocument(doc); // NOTE: important to hold ref here so GC doesn't clear // the cache entry! Else the assert below may sometimes // fail: oldReader = reader; reader = RefreshReader(reader); searcher = NewSearcher( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits..."); Assert.IsTrue(filter.missCount > missCount); missCount = filter.missCount; constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.missCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = NewSearcher( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); // CWF reused the same entry (it dynamically applied the deletes): Assert.AreEqual(missCount, filter.missCount); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // CWF reused the same entry (it dynamically applied the deletes): Assert.AreEqual(missCount, filter.missCount); // NOTE: silliness to make sure JRE does not eliminate // our holding onto oldReader to prevent // CachingWrapperFilter's WeakHashMap from dropping the // entry: Assert.IsTrue(oldReader != null); reader.Dispose(); writer.Dispose(); dir.Dispose(); }
public virtual void TestPrefixFilter_Renamed() { RAMDirectory directory = new RAMDirectory(); System.String[] categories = new System.String[] { "/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows" }; IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); for (int i = 0; i < categories.Length; i++) { Document doc = new Document(); doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); } writer.Close(); // PrefixFilter combined with ConstantScoreQuery PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers")); Query query = new ConstantScoreQuery(filter); IndexSearcher searcher = new IndexSearcher(directory); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test middle of values filter = new PrefixFilter(new Term("category", "/Computers/Mac")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test start of values filter = new PrefixFilter(new Term("category", "/Computers/Linux")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test end of values filter = new PrefixFilter(new Term("category", "/Computers/Windows")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test non-existant filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, before values filter = new PrefixFilter(new Term("category", "/Computers/AAA")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, after values filter = new PrefixFilter(new Term("category", "/Computers/ZZZ")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test zero length prefix filter = new PrefixFilter(new Term("category", "")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test non existent field filter = new PrefixFilter(new Term("nonexistantfield", "/Computers")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); }
public virtual Query VisitConstantScoreQuery(ConstantScoreQuery constantScoreq) { throw new NotImplementedException(); }
public ConstantWeight(ConstantScoreQuery outerInstance, IndexSearcher searcher) { this.outerInstance = outerInstance; this.innerWeight = (outerInstance.m_query == null) ? null : outerInstance.m_query.CreateWeight(searcher); }
public void TestEnforceDeletions() { Directory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); IndexReader reader = writer.GetReader(); IndexSearcher searcher = new IndexSearcher(reader); // add a doc, refresh the reader, and check that its there Document doc = new Document(); doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1); Assert.AreEqual(1, docs.TotalHits, "Should find a hit..."); Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1"))); // ignore deletions CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); ConstantScoreQuery constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // force cache to regenerate: filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // make sure we get a cache hit when we reopen reader // that had no change to deletions IndexReader newReader = RefreshReader(reader); Assert.IsTrue(reader != newReader); reader = newReader; searcher = new IndexSearcher(reader); int missCount = filter.missCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.missCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); missCount = filter.missCount; docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(missCount + 1, filter.missCount); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // apply deletions dynamically filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.DYNAMIC); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); missCount = filter.missCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // doesn't count as a miss Assert.AreEqual(missCount, filter.missCount); }
public virtual void TestCSQ2() { Query q = new ConstantScoreQuery(new ItemizedFilter(new int[] { 1, 3 })); Qtest(q, new int[] { 1, 3 }); }
public virtual void TestPrefixFilter_Mem() { Directory directory = NewDirectory(); string[] categories = new string[] { "/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows" }; RandomIndexWriter writer = new RandomIndexWriter(Random(), directory); for (int i = 0; i < categories.Length; i++) { Document doc = new Document(); doc.Add(NewStringField("category", categories[i], Field.Store.YES)); writer.AddDocument(doc); } IndexReader reader = writer.Reader; // PrefixFilter combined with ConstantScoreQuery PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers")); Query query = new ConstantScoreQuery(filter); IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test middle of values filter = new PrefixFilter(new Term("category", "/Computers/Mac")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(2, hits.Length); // test start of values filter = new PrefixFilter(new Term("category", "/Computers/Linux")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test end of values filter = new PrefixFilter(new Term("category", "/Computers/Windows")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); // test non-existant filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, before values filter = new PrefixFilter(new Term("category", "/Computers/AAA")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test non-existant, after values filter = new PrefixFilter(new Term("category", "/Computers/ZZZ")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); // test zero length prefix filter = new PrefixFilter(new Term("category", "")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(4, hits.Length); // test non existent field filter = new PrefixFilter(new Term("nonexistantfield", "/Computers")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query, null, 1000).ScoreDocs; Assert.AreEqual(0, hits.Length); writer.Dispose(); reader.Dispose(); directory.Dispose(); }
public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w) : base(similarity) { InitBlock(enclosingInstance); theScore = w.GetValue(); docIdSetIterator = Enclosing_Instance.filter.GetDocIdSet(reader).Iterator(); }
private void InitBlock(ConstantScoreQuery enclosingInstance) { this.enclosingInstance = enclosingInstance; }
public override Query Rewrite(IndexReader reader, MultiTermQuery query) { // strip the scores off Query result = new ConstantScoreQuery(new QueryWrapperFilter(base.Rewrite(reader, query))); result.SetBoost(query.GetBoost()); return result; }
public ConstantWeight(ConstantScoreQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); }
public override Query Rewrite(IndexReader reader, MultiTermQuery query) { Query result = new ConstantScoreQuery(new MultiTermQueryWrapperFilter(query)); result.SetBoost(query.GetBoost()); return result; }
public override Query Rewrite(IndexReader reader) { if (query != null) { Query rewritten = query.Rewrite(reader); if (rewritten != query) { rewritten = new ConstantScoreQuery(rewritten); rewritten.Boost = this.Boost; return rewritten; } } else { Debug.Assert(filter != null); // Fix outdated usage pattern from Lucene 2.x/early-3.x: // because ConstantScoreQuery only accepted filters, // QueryWrapperFilter was used to wrap queries. if (filter is QueryWrapperFilter) { QueryWrapperFilter qwf = (QueryWrapperFilter)filter; Query rewritten = new ConstantScoreQuery(qwf.Query.Rewrite(reader)); rewritten.Boost = this.Boost; return rewritten; } } return this; }
public ConstantScorer(ConstantScoreQuery outerInstance, DocIdSetIterator docIdSetIterator, Weight w, float theScore) : base(w) { this.OuterInstance = outerInstance; this.TheScore = theScore; this.DocIdSetIterator = docIdSetIterator; }
public ConstantBulkScorer(ConstantScoreQuery outerInstance, BulkScorer bulkScorer, Weight weight, float theScore) { this.OuterInstance = outerInstance; this.BulkScorer = bulkScorer; this.Weight = weight; this.TheScore = theScore; }
public virtual void TestPrefixFilter_Renamed_Method() { RAMDirectory directory = new RAMDirectory(); System.String[] categories = new System.String[]{"/Computers/Linux", "/Computers/Mac/One", "/Computers/Mac/Two", "/Computers/Windows"}; IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true); for (int i = 0; i < categories.Length; i++) { Document doc = new Document(); doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED)); writer.AddDocument(doc); } writer.Close(); // PrefixFilter combined with ConstantScoreQuery PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers")); Query query = new ConstantScoreQuery(filter); IndexSearcher searcher = new IndexSearcher(directory); Hits hits = searcher.Search(query); Assert.AreEqual(4, hits.Length()); // test middle of values filter = new PrefixFilter(new Term("category", "/Computers/Mac")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(2, hits.Length()); // test start of values filter = new PrefixFilter(new Term("category", "/Computers/Linux")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(1, hits.Length()); // test end of values filter = new PrefixFilter(new Term("category", "/Computers/Windows")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(1, hits.Length()); // test non-existant filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(0, hits.Length()); // test non-existant, before values filter = new PrefixFilter(new Term("category", "/Computers/AAA")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(0, hits.Length()); // test non-existant, after values filter = new PrefixFilter(new Term("category", "/Computers/ZZZ")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(0, hits.Length()); // test zero length prefix filter = new PrefixFilter(new Term("category", "")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(4, hits.Length()); // test non existent field filter = new PrefixFilter(new Term("nonexistantfield", "/Computers")); query = new ConstantScoreQuery(filter); hits = searcher.Search(query); Assert.AreEqual(0, hits.Length()); }
public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w):base(similarity) { InitBlock(enclosingInstance); theScore = w.GetValue(); DocIdSet docIdSet = Enclosing_Instance.filter.GetDocIdSet(reader); if (docIdSet == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { DocIdSetIterator iter = docIdSet.Iterator(); if (iter == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { docIdSetIterator = iter; } } }
public void TestEnforceDeletions() { Directory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); IndexReader reader = writer.GetReader(); IndexSearcher searcher = new IndexSearcher(reader); // add a doc, refresh the reader, and check that its there Document doc = new Document(); doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1); Assert.AreEqual(1, docs.TotalHits, "Should find a hit..."); SpanFilter startFilter = new SpanQueryFilter(new SpanTermQuery(new Term("id", "1"))); // ignore deletions CachingSpanFilter filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); ConstantScoreQuery constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // force cache to regenerate: filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // make sure we get a cache hit when we reopen readers // that had no new deletions IndexReader newReader = RefreshReader(reader); Assert.IsTrue(reader != newReader); reader = newReader; searcher = new IndexSearcher(reader); int missCount = filter.missCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.missCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = new IndexSearcher(reader); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); }
public virtual void TestCSQ3() { Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 2})); q.Boost = 1000; Qtest(q, new int[]{0, 2}); }
public ConstantWeight(ConstantScoreQuery outerInstance, IndexSearcher searcher) { this.OuterInstance = outerInstance; this.InnerWeight = (outerInstance.query == null) ? null : outerInstance.query.CreateWeight(searcher); }
public override Query Rewrite(IndexReader reader, MultiTermQuery query) { Query result = new ConstantScoreQuery(new MultiTermQueryDocTermOrdsWrapperFilter(query)); result.Boost = query.Boost; return result; }
internal virtual BitArray AddClause(BooleanQuery bq, BitArray result) { BitArray rnd = Sets[Random().Next(Sets.Length)]; Query q = new ConstantScoreQuery(new FilterAnonymousInnerClassHelper(this, rnd)); bq.Add(q, BooleanClause.Occur.MUST); if (Validate) { if (result == null) { result = (BitArray)rnd.Clone(); } else { result = result.And(rnd); } } return result; }
public virtual void TestCSQ2() { Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{1, 3})); Qtest(q, new int[]{1, 3}); }
public virtual void TestEnforceDeletions() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10))); // asserts below requires no unexpected merges: // NOTE: cannot use writer.getReader because RIW (on // flipping a coin) may give us a newly opened reader, // but we use .reopen on this reader below and expect to // (must) get an NRT reader: DirectoryReader reader = DirectoryReader.Open(writer.w, true); // same reason we don't wrap? IndexSearcher searcher = NewSearcher(reader, false); // add a doc, refresh the reader, and check that it's there Document doc = new Document(); doc.Add(NewStringField("id", "1", Field.Store.YES)); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = NewSearcher(reader, false); TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1); Assert.AreEqual(1, docs.TotalHits, "Should find a hit..."); Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1"))); CachingWrapperFilter filter = new CachingWrapperFilter(startFilter); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.IsTrue(filter.SizeInBytes() > 0); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); Query constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // make sure we get a cache hit when we reopen reader // that had no change to deletions // fake delete (deletes nothing): writer.DeleteDocuments(new Term("foo", "bar")); IndexReader oldReader = reader; reader = RefreshReader(reader); Assert.IsTrue(reader == oldReader); int missCount = filter.MissCount; docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); // cache hit: Assert.AreEqual(missCount, filter.MissCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); // NOTE: important to hold ref here so GC doesn't clear // the cache entry! Else the assert below may sometimes // fail: oldReader = reader; reader = RefreshReader(reader); searcher = NewSearcher(reader, false); missCount = filter.MissCount; docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); // cache hit Assert.AreEqual(missCount, filter.MissCount); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // apply deletes dynamically: filter = new CachingWrapperFilter(startFilter); writer.AddDocument(doc); reader = RefreshReader(reader); searcher = NewSearcher(reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit..."); missCount = filter.MissCount; Assert.IsTrue(missCount > 0); constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.MissCount); writer.AddDocument(doc); // NOTE: important to hold ref here so GC doesn't clear // the cache entry! Else the assert below may sometimes // fail: oldReader = reader; reader = RefreshReader(reader); searcher = NewSearcher(reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits..."); Assert.IsTrue(filter.MissCount > missCount); missCount = filter.MissCount; constantScore = new ConstantScoreQuery(filter); docs = searcher.Search(constantScore, 1); Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit..."); Assert.AreEqual(missCount, filter.MissCount); // now delete the doc, refresh the reader, and see that it's not there writer.DeleteDocuments(new Term("id", "1")); reader = RefreshReader(reader); searcher = NewSearcher(reader, false); docs = searcher.Search(new MatchAllDocsQuery(), filter, 1); Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit..."); // CWF reused the same entry (it dynamically applied the deletes): Assert.AreEqual(missCount, filter.MissCount); docs = searcher.Search(constantScore, 1); Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit..."); // CWF reused the same entry (it dynamically applied the deletes): Assert.AreEqual(missCount, filter.MissCount); // NOTE: silliness to make sure JRE does not eliminate // our holding onto oldReader to prevent // CachingWrapperFilter's WeakHashMap from dropping the // entry: Assert.IsTrue(oldReader != null); reader.Dispose(); writer.Dispose(); dir.Dispose(); }
public override Query Rewrite(IndexReader reader) { // Map to RangeFilter semantics which are slightly different... RangeFilter rangeFilt = new RangeFilter(fieldName, lowerVal != null ? lowerVal : "", upperVal, (System.Object) lowerVal == (System.Object) ""?false:includeLower, upperVal == null?false:includeUpper); Query q = new ConstantScoreQuery(rangeFilt); q.SetBoost(GetBoost()); return q; }
internal virtual System.Collections.BitArray AddClause(BooleanQuery bq, System.Collections.BitArray result) { System.Collections.BitArray rnd = sets[r.Next(sets.Length)]; Query q = new ConstantScoreQuery(new BitSetFilter(rnd)); bq.Add(q, BooleanClause.Occur.MUST); if (validate) { if (result == null) result = (System.Collections.BitArray) rnd.Clone(); else { result.And(rnd); } } return result; }