示例#1
0
        public virtual void TestEmpty3()
        {
            Filter expected = new PrefixFilter(new Term("bogusField", "bogusVal"));
            Filter actual   = new CachingWrapperFilter(expected);

            AssertFilterEquals(expected, actual);
        }
        private IEnumerable<int> GetDocIDs(IndexReader reader, string groupByField, string group)
        {
            var query = new TermQuery(new Term(groupByField, group));
            Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return filter.GetDocIdSet(reader).AsEnumerable().ToList();
        }
        private IEnumerable <int> GetDocIDs(IndexReader reader, string groupByField, string group)
        {
            var    query  = new TermQuery(new Term(groupByField, group));
            Filter filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return(filter.GetDocIdSet(reader).AsEnumerable().ToList());
        }
        public virtual void TestConstantScoreQueryAndFilter()
        {
            Directory         d   = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(Random, d, Similarity, TimeZone);
            Document          doc = new Document();

            doc.Add(NewStringField("field", "a", Field.Store.NO));
            w.AddDocument(doc);
            doc = new Document();
            doc.Add(NewStringField("field", "b", Field.Store.NO));
            w.AddDocument(doc);
            IndexReader r = w.GetReader();

            w.Dispose();

            Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b"))));
            Query  query   = new ConstantScoreQuery(filterB);

            IndexSearcher s = NewSearcher(r);

            Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b

            Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));

            query = new ConstantScoreQuery(filterA);

            Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a

            r.Dispose();
            d.Dispose();
        }
示例#5
0
        OpenBitSetDISI GetBitSet(string groupByField, string group)
        {
            TermQuery query  = new TermQuery(new Term(groupByField, group));
            Filter    filter = new CachingWrapperFilter(new QueryWrapperFilter(query));

            return(new OpenBitSetDISI(filter.GetDocIdSet(_Reader).Iterator(), _Reader.MaxDoc));
        }
		public virtual void  TestCachingWorks()
		{
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			
			MockFilter filter = new MockFilter();
			CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
			
			// first time, nested filter is called
			cacher.GetDocIdSet(reader);
			Assert.IsTrue(filter.WasCalled(), "first time");
			
			// make sure no exception if cache is holding the wrong bitset
			cacher.Bits(reader);
			cacher.GetDocIdSet(reader);
			
			// second time, nested filter should not be called
			filter.Clear();
			cacher.GetDocIdSet(reader);
			Assert.IsFalse(filter.WasCalled(), "second time");
			
			reader.Close();
		}
示例#7
0
        public virtual void TestCachingWorks()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            MockFilter           filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            cacher.GetDocIdSet(reader);
            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong DocIdSet
            cacher.GetDocIdSet(reader);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(reader);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Close();
        }
示例#8
0
        public virtual void TestCachingWorks()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            writer.Dispose();

            IndexReader          reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext  context = (AtomicReaderContext)reader.Context;
            MockFilter           filter  = new MockFilter();
            CachingWrapperFilter cacher  = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            DocIdSet strongRef = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong docIdSet
            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Dispose();
            dir.Dispose();
        }
示例#9
0
        private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
        {
            Assert.IsTrue(reader.Context is AtomicReaderContext);
            AtomicReaderContext  context     = (AtomicReaderContext)reader.Context;
            CachingWrapperFilter cacher      = new CachingWrapperFilter(filter);
            DocIdSet             originalSet = filter.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
            DocIdSet             cachedSet   = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);

            if (originalSet == null)
            {
                Assert.IsNull(cachedSet);
            }
            if (cachedSet == null)
            {
                Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
            }
            else
            {
                Assert.IsTrue(cachedSet.IsCacheable);
                Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
                //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
                if (originalSet.IsCacheable)
                {
                    Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
                }
                else
                {
                    Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
                }
            }
        }
示例#10
0
        public void TestNullDocIdSetIterator()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            Filter filter = new AnonymousFilter2();
            //final Filter filter = new Filter() {
            //  //@Override
            //  public DocIdSet getDocIdSet(IndexReader reader) {
            //    return new DocIdSet() {
            //      //@Override
            //      public DocIdSetIterator iterator() {
            //        return null;
            //      }
            //    };
            //  }
            //};
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader));

            reader.Close();
        }
        public virtual void TestConstantScoreQueryAndFilter()
        {
            Directory d = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), d);
            Document doc = new Document();
            doc.Add(NewStringField("field", "a", Field.Store.NO));
            w.AddDocument(doc);
            doc = new Document();
            doc.Add(NewStringField("field", "b", Field.Store.NO));
            w.AddDocument(doc);
            IndexReader r = w.Reader;
            w.Dispose();

            Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b"))));
            Query query = new ConstantScoreQuery(filterB);

            IndexSearcher s = NewSearcher(r);
            Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b

            Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));
            query = new ConstantScoreQuery(filterA);

            Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a

            r.Dispose();
            d.Dispose();
        }
示例#12
0
        public virtual void TestDense()
        {
            Query  query    = new MatchAllDocsQuery();
            Filter expected = new QueryWrapperFilter(query);
            Filter actual   = new CachingWrapperFilter(expected);

            AssertFilterEquals(expected, actual);
        }
示例#13
0
        public virtual void TestEmpty()
        {
            Query  query    = new BooleanQuery();
            Filter expected = new QueryWrapperFilter(query);
            Filter actual   = new CachingWrapperFilter(expected);

            AssertFilterEquals(expected, actual);
        }
示例#14
0
        public OpenBitSetDISI TermToBitSet(string term, IndexReader indexReader)
        {
            var facetQuery = new TermQuery(new Term(this.Field, term));
            var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));
            var bitSet = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(indexReader).Iterator(), indexReader.MaxDoc());

            return bitSet;
        }
示例#15
0
 /**
 * @param filter Filter to cache results of
 * @param deletesMode See {@link CachingWrapperFilter.DeletesMode}
 */
 public CachingSpanFilter(SpanFilter filter, CachingWrapperFilter.DeletesMode deletesMode)
 {
     this.filter = filter;
     if (deletesMode == CachingWrapperFilter.DeletesMode.DYNAMIC)
     {
         throw new System.ArgumentException("DeletesMode.DYNAMIC is not supported");
     }
     this.cache = new AnonymousFilterCache(deletesMode);
 }
示例#16
0
 public virtual void TestSingle()
 {
     for (int i = 0; i < 10; i++)
     {
         int    id       = Random.Next(ir.MaxDoc);
         Query  query    = new TermQuery(new Term("id", Convert.ToString(id)));
         Filter expected = new QueryWrapperFilter(query);
         Filter actual   = new CachingWrapperFilter(expected);
         AssertFilterEquals(expected, actual);
     }
 }
示例#17
0
        public virtual void TestEmpty2()
        {
            BooleanQuery query = new BooleanQuery();

            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST);
            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST_NOT);
            Filter expected = new QueryWrapperFilter(query);
            Filter actual   = new CachingWrapperFilter(expected);

            AssertFilterEquals(expected, actual);
        }
示例#18
0
 public virtual void TestSparse()
 {
     for (int i = 0; i < 10; i++)
     {
         int    id_start = Random.Next(ir.MaxDoc - 1);
         int    id_end   = id_start + 1;
         Query  query    = TermRangeQuery.NewStringRange("id", Convert.ToString(id_start), Convert.ToString(id_end), true, true);
         Filter expected = new QueryWrapperFilter(query);
         Filter actual   = new CachingWrapperFilter(expected);
         AssertFilterEquals(expected, actual);
     }
 }
 /**
 * @param filter Filter to cache results of
 * @param deletesMode See {@link CachingWrapperFilter.DeletesMode}
 */
 public CachingSpanFilter(SpanFilter filter, CachingWrapperFilter.DeletesMode deletesMode)
 {
     this.filter = filter;
     if (deletesMode == CachingWrapperFilter.DeletesMode.DYNAMIC)
     {
         throw new System.ArgumentException("DeletesMode.DYNAMIC is not supported");
     }
     this.cache = new CachingWrapperFilter.FilterCache<SpanFilterResult>(deletesMode, 
         (r, value) =>
         {
             throw new System.ArgumentException("DeletesMode.DYNAMIC is not supported");
         });
 }
        public void TestNullDocIdSet()
        {
            Directory dir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            Filter filter = new AnonymousFilter();

            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader));

            reader.Close();
        }
        public void TestNullDocIdSet()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true, null);

            Filter filter = new AnonymousFilter();

            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, cacher.GetDocIdSet(reader, null));

            reader.Close();
        }
示例#22
0
        private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
        {
            CachingWrapperFilter cacher      = new CachingWrapperFilter(filter);
            DocIdSet             originalSet = filter.GetDocIdSet(reader);
            DocIdSet             cachedSet   = cacher.GetDocIdSet(reader);

            Assert.IsTrue(cachedSet.IsCacheable);
            Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
            //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
            if (originalSet.IsCacheable)
            {
                Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
            }
            else
            {
                Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable");
            }
        }
示例#23
0
        private IEnumerable <FacetHits> SearchInternal(Query query = null, int maxDocPerGroup = DefaultMaxDocPerGroup)
        {
            if (query == null)
            {
                query = new MatchAllDocsQuery();
            }

            var queryDocidSet =
                new CachingWrapperFilter(new QueryWrapperFilter(query))
                .GetDocIdSet(reader)
                .AsEnumerable()
                .ToList();

            var hitsPerFacet = groups.Select(g => new FacetHits(g.Item1, reader, queryDocidSet, g.Item2, maxDocPerGroup)).ToList();

            Parallel.ForEach(hitsPerFacet, h => h.Calculate());

            return(hitsPerFacet);
        }
示例#24
0
        public virtual void TestNullDocIdSetIterator()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            writer.Dispose();

            IndexReader         reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
示例#25
0
        public static List<Tuple<string, Filter>> ExtractTermsForField(this IndexReader indexReader, string field)
        {
            Throw.IfArgumentNull(indexReader);
            Throw.IfArgumentNullOrEmpty(field);

            var allterms = new List<Tuple<string, Filter>>();
            var termReader = indexReader.Terms(new Term(field, String.Empty));

            do
            {
                if (termReader.Term().Field() != field)
                    break;

                var facetQuery = new TermQuery(termReader.Term().CreateTerm(termReader.Term().Text()));
                var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));
                allterms.Add(new Tuple<string, Filter>(termReader.Term().Text(), facetQueryFilter));
            }
            while (termReader.Next());

            return allterms;
        }
示例#26
0
		public virtual void  TestCachingWorks()
		{
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
			writer.Close();
			
			IndexReader reader = IndexReader.Open(dir);
			
			MockFilter filter = new MockFilter();
			CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
			
			// first time, nested filter is called
			cacher.Bits(reader);
			Assert.IsTrue(filter.WasCalled(), "first time");
			
			// second time, nested filter should not be called
			filter.Clear();
			cacher.Bits(reader);
			Assert.IsFalse(filter.WasCalled(), "second time");
			
			reader.Close();
		}
示例#27
0
        public virtual void  TestCachingWorks()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);

            writer.Close();

            IndexReader reader = IndexReader.Open(dir);

            MockFilter           filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            cacher.Bits(reader);
            Assert.IsTrue(filter.WasCalled(), "first time");

            // second time, nested filter should not be called
            filter.Clear();
            cacher.Bits(reader);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Close();
        }
示例#28
0
        public virtual void TestNullDocIdSetIterator()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            writer.Dispose();

            IndexReader         reader  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
 public virtual void TestEmpty2()
 {
     BooleanQuery query = new BooleanQuery();
     query.Add(new TermQuery(new Term("id", "0")), BooleanClause.Occur.MUST);
     query.Add(new TermQuery(new Term("id", "0")), BooleanClause.Occur.MUST_NOT);
     Filter expected = new QueryWrapperFilter(query);
     Filter actual = new CachingWrapperFilter(expected);
     AssertFilterEquals(expected, actual);
 }
        public Hits Search(Query query, int maxDocPerGroup = DefaultMaxDocPerGroup)
        {
            List<HitsPerFacet> hitsPerGroup = new List<HitsPerFacet>();

            DocIdSet queryDocidSet = new CachingWrapperFilter(new QueryWrapperFilter(query)).GetDocIdSet(_Reader);
            Action[] actions = new Action[_Groups.Count];           
            for (int i = 0; i < _Groups.Count; i++)
            {
                HitsPerFacet h = new HitsPerFacet(new FacetName(_Groups[i].Key.ToArray()), _Reader, queryDocidSet, _Groups[i].Value, maxDocPerGroup);
                hitsPerGroup.Add(h);
                actions[i] = () => h.Calculate();
            }
            
            Parallel.Invoke(actions);
            
            Hits hits = new Hits();
            hits.HitsPerFacet = hitsPerGroup.ToArray();

            return hits;
        }
 public virtual void TestEmpty3()
 {
     Filter expected = new PrefixFilter(new Term("bogusField", "bogusVal"));
     Filter actual = new CachingWrapperFilter(expected);
     AssertFilterEquals(expected, actual);
 }
示例#32
0
        public void TestWithCachingFilter()
        {
            Directory dir = new RAMDirectory();
            Analyzer analyzer = new WhitespaceAnalyzer();

            IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
            writer.Close();

            Searcher searcher = new IndexSearcher(dir, true);

            Query query = new TermQuery(new Term("none", "none"));

            QueryWrapperFilter queryFilter = new QueryWrapperFilter(query);
            CachingWrapperFilter cachingFilter = new CachingWrapperFilter(queryFilter);

            searcher.Search(query, cachingFilter, 1);

            CachingWrapperFilter cachingFilter2 = new CachingWrapperFilter(queryFilter);
            Filter[] chain = new Filter[2];
            chain[0] = cachingFilter;
            chain[1] = cachingFilter2;
            ChainedFilter cf = new ChainedFilter(chain);

            // throws java.lang.ClassCastException: org.apache.lucene.util.OpenBitSet cannot be cast to java.util.BitSet
            searcher.Search(new MatchAllDocsQuery(), cf, 1);
        }
        public Hits Search(Query query, int maxDocPerGroup)
        {
            var hitsPerGroup = new List<HitsPerFacet>();

            DocIdSet queryDocidSet = new CachingWrapperFilter(new QueryWrapperFilter(query)).GetDocIdSet(_Reader);
            var actions = new Action[_Groups.Count];           
            for (int i = 0; i < _Groups.Count; i++)
            {
                var h = new HitsPerFacet(new FacetName(_Groups[i].Key.ToArray()), _Reader, queryDocidSet, _Groups[i].Value, maxDocPerGroup);
                hitsPerGroup.Add(h);
                actions[i] = h.Calculate;
            }
            
#if !NET35
            Parallel.Invoke(actions);
#else
            foreach (var action in actions)
                action();
#endif
            
            Hits hits = new Hits {HitsPerFacet = hitsPerGroup.ToArray()};

            return hits;
        }
        private IEnumerable<FacetMatch> FindMatchesInQuery(Query baseQueryWithoutFacetDrilldown, IList<FacetFieldInfo> allFacetFieldInfos, FacetFieldInfo facetFieldInfoToCalculateFor)
        {
            var calculations = 0;

            var queryFilter = new CachingWrapperFilter(new QueryWrapperFilter(CreateFacetedQuery(baseQueryWithoutFacetDrilldown, allFacetFieldInfos, facetFieldInfoToCalculateFor.FieldName)));
            var bitsQueryWithoutFacetDrilldown = new OpenBitSetDISI(queryFilter.GetDocIdSet(IndexReader).Iterator(), IndexReader.MaxDoc);
            var baseQueryWithoutFacetDrilldownCopy = new OpenBitSetDISI(bitsQueryWithoutFacetDrilldown.Bits.Length)
            {
                Bits = new long[bitsQueryWithoutFacetDrilldown.Bits.Length]
            };

            var calculatedFacetCounts = new ResultCollection(facetFieldInfoToCalculateFor);
            foreach (var facetValueBitSet in GetOrCreateFacetBitSet(facetFieldInfoToCalculateFor.FieldName).FacetValueBitSetList)
            {
                var isSelected = calculatedFacetCounts.IsSelected(facetValueBitSet.Value);

                if (!isSelected && facetValueBitSet.Count < calculatedFacetCounts.MinCountForNonSelected) //Impossible to get a better result
                {
                    if (calculatedFacetCounts.HaveEnoughResults)
                        break;
                }

                bitsQueryWithoutFacetDrilldown.Bits.CopyTo(baseQueryWithoutFacetDrilldownCopy.Bits, 0);
                baseQueryWithoutFacetDrilldownCopy.NumWords = bitsQueryWithoutFacetDrilldown.NumWords;

                var bitset = facetValueBitSet.Bitset ?? CalculateOpenBitSetDisi(facetFieldInfoToCalculateFor.FieldName, facetValueBitSet.Value);
                baseQueryWithoutFacetDrilldownCopy.And(bitset);
                var count = baseQueryWithoutFacetDrilldownCopy.Cardinality();
                if (count == 0)
                    continue;
                var match = new FacetMatch
                {
                    Count = count,
                    Value = facetValueBitSet.Value,
                    FacetFieldName = facetFieldInfoToCalculateFor.FieldName
                };

                calculations++;
                if (isSelected)
                    calculatedFacetCounts.AddToSelected(match);
                else
                    calculatedFacetCounts.AddToNonSelected(match);
            }

            return calculatedFacetCounts.GetList();
        }
 public virtual void TestDense()
 {
     Query query = new MatchAllDocsQuery();
     Filter expected = new QueryWrapperFilter(query);
     Filter actual = new CachingWrapperFilter(expected);
     AssertFilterEquals(expected, actual);
 }
        public void TestEnforceDeletions()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            IndexReader reader = writer.GetReader();
            IndexSearcher searcher = new IndexSearcher(reader);

            // add a doc, refresh the reader, and check that its there
            Document doc = new Document();
            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc);

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

            // ignore deletions
            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            ConstantScoreQuery constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");


            // force cache to regenerate:
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE);

            writer.AddDocument(doc);
            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen reader
            // that had no change to deletions
            IndexReader newReader = RefreshReader(reader);
            Assert.IsTrue(reader != newReader);
            reader = newReader;
            searcher = new IndexSearcher(reader);
            int missCount = filter.missCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            missCount = filter.missCount;
            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(missCount + 1, filter.missCount);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");


            // apply deletions dynamically
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.DYNAMIC);

            writer.AddDocument(doc);
            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            missCount = filter.missCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");

            // doesn't count as a miss
            Assert.AreEqual(missCount, filter.missCount);
        }
        public virtual void TestCachingWorks()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir);
            writer.Dispose();

            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
            MockFilter filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            DocIdSet strongRef = cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
            Assert.IsTrue(filter.WasCalled(), "first time");

            // make sure no exception if cache is holding the wrong docIdSet
            cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
            Assert.IsFalse(filter.WasCalled(), "second time");

            reader.Dispose();
            dir.Dispose();
        }
示例#38
0
        public virtual void TestWithCachingFilter()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir);
            IndexReader reader = writer.Reader;
            writer.Dispose();

            IndexSearcher searcher = NewSearcher(reader);

            Query query = new TermQuery(new Term("none", "none"));

            QueryWrapperFilter queryFilter = new QueryWrapperFilter(query);
            CachingWrapperFilter cachingFilter = new CachingWrapperFilter(queryFilter);

            searcher.Search(query, cachingFilter, 1);

            CachingWrapperFilter cachingFilter2 = new CachingWrapperFilter(queryFilter);
            Filter[] chain = new Filter[2];
            chain[0] = cachingFilter;
            chain[1] = cachingFilter2;
            ChainedFilter cf = new ChainedFilter(chain);

            // throws java.lang.ClassCastException: org.apache.lucene.util.OpenBitSet cannot be cast to java.util.BitSet
            searcher.Search(new MatchAllDocsQuery(), cf, 1);
            reader.Dispose();
            dir.Dispose();
        }
        public virtual void TestNullDocIdSetIterator()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir);
            writer.Dispose();

            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            AtomicReaderContext context = (AtomicReaderContext)reader.Context;

            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // the caching filter should return the empty set constant
            Assert.IsNull(cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs));

            reader.Dispose();
            dir.Dispose();
        }
 public virtual void TestEmpty()
 {
     Query query = new BooleanQuery();
     Filter expected = new QueryWrapperFilter(query);
     Filter actual = new CachingWrapperFilter(expected);
     AssertFilterEquals(expected, actual);
 }
 public Filter GetFilter(string tenantId, string[] types, bool includePrerelease, bool includeUnlisted)
 {
     Filter visibilityFilter = GetVisibilityFilter(tenantId);
     Filter typeFilter = new CachingWrapperFilter(new TypeFilter(types));
     Filter versionFilter = GetVersionFilter(includePrerelease, includeUnlisted);
     
     return new ChainedFilter(new[] { visibilityFilter, versionFilter, typeFilter }, ChainedFilter.Logic.AND);
 }
示例#42
0
        public IEnumerable<IHit> Query(int pageIndex, int pageSize, out int totalCount, out IEnumerable<FacetGroup> facetedResults)
        {
            totalCount = 0;
            facetedResults = null;

            if (searchPaths == null || searchPaths.Count <= 0)
                searchPaths.AddRange(indexPaths.Values.Select(o => o.Path));

            List<LuceneHit> results = new List<LuceneHit>();

            List<IndexSearcher> subSearchs = new List<IndexSearcher>();

            searchPaths.ForEach(o => subSearchs.Add(new IndexSearcher(FSDirectory.Open(o))));

            if (facetFields != null && facetFields.Count > 0)
            {
                var facetGroups = new List<FacetGroup>();
                var mainQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
                MultiReader readers = new MultiReader(subSearchs.Select(o => o.IndexReader).ToArray());

                foreach (var facetField in facetFields)
                {
                    FacetGroup fg = new FacetGroup();
                    fg.FieldName = facetFieldNameProvider.GetMapName(TypeName, facetField);
                    var items = new List<FacetItem>();

                    var allDistinctField = FieldCache_Fields.DEFAULT.GetStrings(readers, facetField).Distinct().ToArray();
                    int totalHits = 0;

                    Parallel.ForEach(allDistinctField, fieldValue =>
                        {
                        //foreach (var fieldValue in allDistinctField)
                        //{
                            var facetQuery = new TermQuery(new Term(facetField, fieldValue));
                            var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));

                            var bs = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(readers).Iterator(), readers.MaxDoc);
                            bs.InPlaceAnd(mainQueryFilter.GetDocIdSet(readers).Iterator());
                            int count = (Int32)bs.Cardinality();

                            FacetItem item = new FacetItem();
                            item.GroupValue = fieldValue;
                            item.Count = count;

                            items.Add(item);
                            totalHits += count;
                        }
                    );

                    fg.FacetItems = items.OrderByDescending(o => o.Count);
                    fg.TotalHits = totalHits;

                    facetGroups.Add(fg);
                }

                facetedResults = facetGroups.OrderBy(o => o.FieldName);
            }
            ParallelMultiSearcher searcher = new ParallelMultiSearcher(subSearchs.ToArray());
            Sort sort = null;
            if (sortFields != null && sortFields.Count > 0)
            {
                sort = new Sort(sortFields.ToArray());
            }

            int maxDoc = searcher.MaxDoc;
            int startIndex = 0;
            if (pageIndex >= 0 && pageSize > 0)
            {
                startIndex = pageIndex * pageSize;
                maxDoc = pageSize * (pageIndex + 1);
            }
            var docs = sort == null ?  searcher.Search(query, null, maxDoc) : searcher.Search(query, null, maxDoc, sort);
            totalCount = docs.TotalHits;
            int endIndex = docs.TotalHits - startIndex;
            for (int i = startIndex; i < endIndex; i++)
            {
                LuceneHit h = new LuceneHit(TypeName, DocumentBuilder, searcher.Doc(docs.ScoreDocs[i].Doc));
                results.Add(h);
            }
            return results;
        }
示例#43
0
        public void TestEnforceDeletions()
        {
            Directory     dir      = new MockRAMDirectory();
            IndexWriter   writer   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            IndexReader   reader   = writer.GetReader();
            IndexSearcher searcher = new IndexSearcher(reader);

            // add a doc, refresh the reader, and check that its there
            Document doc = new Document();

            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc);

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);

            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

            // ignore deletions
            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            ConstantScoreQuery constantScore = new ConstantScoreQuery(filter);

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");


            // force cache to regenerate:
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE);

            writer.AddDocument(doc);
            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen reader
            // that had no change to deletions
            IndexReader newReader = RefreshReader(reader);

            Assert.IsTrue(reader != newReader);
            reader   = newReader;
            searcher = new IndexSearcher(reader);
            int missCount = filter.missCount;

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            missCount = filter.missCount;
            docs      = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(missCount + 1, filter.missCount);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");


            // apply deletions dynamically
            filter = new CachingWrapperFilter(startFilter, CachingWrapperFilter.DeletesMode.DYNAMIC);

            writer.AddDocument(doc);
            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            missCount = filter.missCount;
            docs      = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");

            // doesn't count as a miss
            Assert.AreEqual(missCount, filter.missCount);
        }
        /// <summary>
        /// Creates facets.
        /// </summary>
        /// <param name="reader">The reader.</param>
        /// <param name="query">The query.</param>
        private void CreateFacets(IndexReader reader, Query query)
        {
            var groups = new List<FacetGroup>();
            var baseQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
            var baseDocIdSet = baseQueryFilter.GetDocIdSet(reader);

            #region Subcategory filters


            /* 
            var catalogCriteria = Results.SearchCriteria as CatalogItemSearchCriteria;
            if (catalogCriteria != null && catalogCriteria.ChildCategoryFilters.Any())
            {
                var group = new FacetGroup("Subcategory");
                var groupCount = 0;

                foreach (var value in catalogCriteria.ChildCategoryFilters)
                {
                    var q = LuceneQueryHelper.CreateQuery(catalogCriteria.OutlineField, value);

                    if (q == null) continue;

                    var queryFilter = new CachingWrapperFilter(new QueryWrapperFilter(q));
                    var filterArray = queryFilter.GetDocIdSet(reader);
                    var newCount = (int)CalculateFacetCount(baseDocIdSet, filterArray);
                    if (newCount == 0) continue;

                    var newFacet = new Facet(group, value.Code, value.Name, newCount);
                    group.Facets.Add(newFacet);
                    groupCount += newCount;
                }

                // Add only if items exist under
                if (groupCount > 0)
                {
                    groups.Add(group);
                }
            }
             * */

            #endregion

            if (Results.SearchCriteria.Filters != null && Results.SearchCriteria.Filters.Length > 0)
            {
                foreach (var filter in Results.SearchCriteria.Filters)
                {

                    if (!string.IsNullOrEmpty(Results.SearchCriteria.Currency) && filter is PriceRangeFilter)
                    {
                        var valCurrency = ((PriceRangeFilter)filter).Currency;
                        if (!valCurrency.Equals(Results.SearchCriteria.Currency, StringComparison.OrdinalIgnoreCase))
                        {
                            continue;
                        }
                    }

                    var facetGroup = CalculateResultCount(reader, baseDocIdSet, filter, Results.SearchCriteria);
                    if (facetGroup != null)
                    {
                        groups.Add(facetGroup);
                    }
                }
            }

            Results.FacetGroups = groups.ToArray();
        }
 public virtual void TestSparse()
 {
     for (int i = 0; i < 10; i++)
     {
         int id_start = Random().Next(Ir.MaxDoc() - 1);
         int id_end = id_start + 1;
         Query query = TermRangeQuery.NewStringRange("id", Convert.ToString(id_start), Convert.ToString(id_end), true, true);
         Filter expected = new QueryWrapperFilter(query);
         Filter actual = new CachingWrapperFilter(expected);
         AssertFilterEquals(expected, actual);
     }
 }
 private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
 {
     CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
     DocIdSet originalSet = filter.GetDocIdSet(reader);
     DocIdSet cachedSet = cacher.GetDocIdSet(reader);
     Assert.IsTrue(cachedSet.IsCacheable());
     Assert.AreEqual(shouldCacheable, originalSet.IsCacheable());
     //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
     if (originalSet.IsCacheable())
     {
         Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
     }
     else
     {
         Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable");
     }
 }
 public virtual void TestSingle()
 {
     for (int i = 0; i < 10; i++)
     {
         int id = Random().Next(Ir.MaxDoc());
         Query query = new TermQuery(new Term("id", Convert.ToString(id)));
         Filter expected = new QueryWrapperFilter(query);
         Filter actual = new CachingWrapperFilter(expected);
         AssertFilterEquals(expected, actual);
     }
 }
 private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
 {
     Assert.IsTrue(reader.Context is AtomicReaderContext);
     AtomicReaderContext context = (AtomicReaderContext)reader.Context;
     CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
     DocIdSet originalSet = filter.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
     DocIdSet cachedSet = cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs);
     if (originalSet == null)
     {
         Assert.IsNull(cachedSet);
     }
     if (cachedSet == null)
     {
         Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
     }
     else
     {
         Assert.IsTrue(cachedSet.Cacheable);
         Assert.AreEqual(shouldCacheable, originalSet.Cacheable);
         //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
         if (originalSet.Cacheable)
         {
             Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
         }
         else
         {
             Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
         }
     }
 }
        public Filter GetFilter(string tenantId, string[] types)
        {
            Filter visibilityFilter = GetVisibilityFilter(tenantId);
            Filter typeFilter = new CachingWrapperFilter(new TypeFilter(types));

            return new ChainedFilter(new[] { visibilityFilter, typeFilter }, ChainedFilter.Logic.AND);
        }
示例#50
0
        public virtual void TestEnforceDeletions()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10)));
            // asserts below requires no unexpected merges:

            // NOTE: cannot use writer.getReader because RIW (on
            // flipping a coin) may give us a newly opened reader,
            // but we use .reopen on this reader below and expect to
            // (must) get an NRT reader:
            DirectoryReader reader = DirectoryReader.Open(writer.IndexWriter, true);
            // same reason we don't wrap?
            IndexSearcher searcher = NewSearcher(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                reader, false);

            // add a doc, refresh the reader, and check that it's there
            Document doc = new Document();

            doc.Add(NewStringField("id", "1", Field.Store.YES));
            writer.AddDocument(doc);

            reader   = RefreshReader(reader);
            searcher = NewSearcher(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                reader, false);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.IsTrue(filter.GetSizeInBytes() > 0);

            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            Query constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen reader
            // that had no change to deletions

            // fake delete (deletes nothing):
            writer.DeleteDocuments(new Term("foo", "bar"));

            IndexReader oldReader = reader;
            reader = RefreshReader(reader);
            Assert.IsTrue(reader == oldReader);
            int missCount = filter.missCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // cache hit:
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            // NOTE: important to hold ref here so GC doesn't clear
            // the cache entry!  Else the assert below may sometimes
            // fail:
            oldReader = reader;
            reader    = RefreshReader(reader);

            searcher = NewSearcher(reader, false);

            missCount = filter.missCount;
            docs      = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            // cache hit
            Assert.AreEqual(missCount, filter.missCount);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");

            // apply deletes dynamically:
            filter = new CachingWrapperFilter(startFilter);
            writer.AddDocument(doc);
            reader   = RefreshReader(reader);
            searcher = NewSearcher(reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            missCount = filter.missCount;
            Assert.IsTrue(missCount > 0);
            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            writer.AddDocument(doc);

            // NOTE: important to hold ref here so GC doesn't clear
            // the cache entry!  Else the assert below may sometimes
            // fail:
            oldReader = reader;

            reader   = RefreshReader(reader);
            searcher = NewSearcher(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits...");
            Assert.IsTrue(filter.missCount > missCount);
            missCount = filter.missCount;

            constantScore = new ConstantScoreQuery(filter);
            docs          = searcher.Search(constantScore, 1);
            Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader   = RefreshReader(reader);
            searcher = NewSearcher(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
            // CWF reused the same entry (it dynamically applied the deletes):
            Assert.AreEqual(missCount, filter.missCount);

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
            // CWF reused the same entry (it dynamically applied the deletes):
            Assert.AreEqual(missCount, filter.missCount);

            // NOTE: silliness to make sure JRE does not eliminate
            // our holding onto oldReader to prevent
            // CachingWrapperFilter's WeakHashMap from dropping the
            // entry:
            Assert.IsTrue(oldReader != null);

            reader.Dispose();
            writer.Dispose();
            dir.Dispose();
        }
        public virtual void TestEnforceDeletions()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10)));
            // asserts below requires no unexpected merges:

            // NOTE: cannot use writer.getReader because RIW (on
            // flipping a coin) may give us a newly opened reader,
            // but we use .reopen on this reader below and expect to
            // (must) get an NRT reader:
            DirectoryReader reader = DirectoryReader.Open(writer.w, true);
            // same reason we don't wrap?
            IndexSearcher searcher = NewSearcher(reader, false);

            // add a doc, refresh the reader, and check that it's there
            Document doc = new Document();
            doc.Add(NewStringField("id", "1", Field.Store.YES));
            writer.AddDocument(doc);

            reader = RefreshReader(reader);
            searcher = NewSearcher(reader, false);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.IsTrue(filter.SizeInBytes() > 0);

            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            Query constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen reader
            // that had no change to deletions

            // fake delete (deletes nothing):
            writer.DeleteDocuments(new Term("foo", "bar"));

            IndexReader oldReader = reader;
            reader = RefreshReader(reader);
            Assert.IsTrue(reader == oldReader);
            int missCount = filter.MissCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // cache hit:
            Assert.AreEqual(missCount, filter.MissCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            // NOTE: important to hold ref here so GC doesn't clear
            // the cache entry!  Else the assert below may sometimes
            // fail:
            oldReader = reader;
            reader = RefreshReader(reader);

            searcher = NewSearcher(reader, false);

            missCount = filter.MissCount;
            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            // cache hit
            Assert.AreEqual(missCount, filter.MissCount);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");

            // apply deletes dynamically:
            filter = new CachingWrapperFilter(startFilter);
            writer.AddDocument(doc);
            reader = RefreshReader(reader);
            searcher = NewSearcher(reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            missCount = filter.MissCount;
            Assert.IsTrue(missCount > 0);
            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.MissCount);

            writer.AddDocument(doc);

            // NOTE: important to hold ref here so GC doesn't clear
            // the cache entry!  Else the assert below may sometimes
            // fail:
            oldReader = reader;

            reader = RefreshReader(reader);
            searcher = NewSearcher(reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits...");
            Assert.IsTrue(filter.MissCount > missCount);
            missCount = filter.MissCount;

            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.MissCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = NewSearcher(reader, false);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
            // CWF reused the same entry (it dynamically applied the deletes):
            Assert.AreEqual(missCount, filter.MissCount);

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
            // CWF reused the same entry (it dynamically applied the deletes):
            Assert.AreEqual(missCount, filter.MissCount);

            // NOTE: silliness to make sure JRE does not eliminate
            // our holding onto oldReader to prevent
            // CachingWrapperFilter's WeakHashMap from dropping the
            // entry:
            Assert.IsTrue(oldReader != null);

            reader.Dispose();
            writer.Dispose();
            dir.Dispose();
        }