public abstract GetDocIdSet ( Lucene.Net.Index.IndexReader reader ) : Lucene.Net.Search.DocIdSet | ||
reader | Lucene.Net.Index.IndexReader | |
return | Lucene.Net.Search.DocIdSet |
public override DocIdSet GetDocIdSet(IndexReader reader) { object coreKey = reader.GetFieldCacheKey(); object delCoreKey = reader.HasDeletions() ? reader.GetDeletesCacheKey() : coreKey; object cached = cache.Get(reader, coreKey, delCoreKey); if (cached != null) { hitCount++; if (cached is DocIdSet) { return((DocIdSet)cached); } else { return(new DocIdBitSet((System.Collections.BitArray)cached)); } } missCount++; // cache miss DocIdSet docIdSet = DocIdSetToCache(filter.GetDocIdSet(reader), reader); if (docIdSet != null) { cache.Put(coreKey, delCoreKey, docIdSet); } return(docIdSet); }
public virtual Explanation Explain(IndexReader ir, int i) { Explanation inner = weight.Explain(ir, i); if (Enclosing_Instance.GetBoost() != 1) { Explanation preBoost = inner; inner = new Explanation(inner.GetValue() * Enclosing_Instance.GetBoost(), "product of:"); inner.AddDetail(new Explanation(Enclosing_Instance.GetBoost(), "boost")); inner.AddDetail(preBoost); } Filter f = Enclosing_Instance.filter; DocIdSetIterator docIdSetIterator = f.GetDocIdSet(ir).Iterator(); if (docIdSetIterator.SkipTo(i) && docIdSetIterator.Doc() == i) { return(inner); } else { Explanation result = new Explanation(0.0f, "failure to match filter: " + f.ToString()); result.AddDetail(inner); return(result); } }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { DocIdSet docIdSet = filter.GetDocIdSet(reader); if (docIdSet == null) return; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) return; scorer.DocID(); DocIdSetIterator docIdSetIterator = docIdSet.Iterator(); if (docIdSetIterator == null) return; int target = docIdSetIterator.NextDoc(); int num = scorer.Advance(target); collector.SetScorer(scorer); while (true) { while (num != target) { if (num > target) target = docIdSetIterator.Advance(num); else num = scorer.Advance(target); } if (num != DocIdSetIterator.NO_MORE_DOCS && !((GroupCollector)collector).GroupLimitReached) { collector.Collect(num); target = docIdSetIterator.NextDoc(); num = scorer.Advance(target); } else break; } }
private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable) { Assert.IsTrue(reader.Context is AtomicReaderContext); AtomicReaderContext context = (AtomicReaderContext)reader.Context; CachingWrapperFilter cacher = new CachingWrapperFilter(filter); DocIdSet originalSet = filter.GetDocIdSet(context, (context.AtomicReader).LiveDocs); DocIdSet cachedSet = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs); if (originalSet == null) { Assert.IsNull(cachedSet); } if (cachedSet == null) { Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null); } else { Assert.IsTrue(cachedSet.IsCacheable); Assert.AreEqual(shouldCacheable, originalSet.IsCacheable); //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName()); if (originalSet.IsCacheable) { Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable"); } else { Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable"); } } }
public override DocIdSet GetDocIdSet(IndexReader reader) { object coreKey = reader.GetFieldCacheKey(); object delCoreKey = reader.HasDeletions() ? reader.GetDeletesCacheKey() : coreKey; DocIdSet docIdSet = cache.Get(reader, coreKey, delCoreKey); if (docIdSet != null) { hitCount++; return(docIdSet); } missCount++; // cache miss docIdSet = DocIdSetToCache(filter.GetDocIdSet(reader), reader); if (docIdSet != null) { cache.Put(coreKey, delCoreKey, docIdSet); } return(docIdSet); }
public override Explanation Explain(IndexReader ir, int i) { Explanation inner = weight.Explain(ir, i); if (Enclosing_Instance.GetBoost() != 1) { Explanation preBoost = inner; inner = new Explanation(inner.GetValue() * Enclosing_Instance.GetBoost(), "product of:"); inner.AddDetail(new Explanation(Enclosing_Instance.GetBoost(), "boost")); inner.AddDetail(preBoost); } Filter f = Enclosing_Instance.filter; DocIdSet docIdSet = f.GetDocIdSet(ir); DocIdSetIterator docIdSetIterator = docIdSet == null?DocIdSet.EMPTY_DOCIDSET.Iterator() : docIdSet.Iterator(); if (docIdSetIterator == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } if (docIdSetIterator.Advance(i) == i) { return(inner); } else { Explanation result = new Explanation(0.0f, "failure to match filter: " + f.ToString()); result.AddDetail(inner); return(result); } }
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { var reader = context.AtomicReader; object key = reader.CoreCacheKey; if (_cache.TryGetValue(key, out DocIdSet docIdSet) && docIdSet != null) { hitCount++; } else { missCount++; docIdSet = DocIdSetToCache(_filter.GetDocIdSet(context, null), reader); if (Debugging.AssertsEnabled) { Debugging.Assert(docIdSet.IsCacheable); } #if FEATURE_CONDITIONALWEAKTABLE_ADDORUPDATE _cache.AddOrUpdate(key, docIdSet); #else _cache[key] = docIdSet; #endif } return(docIdSet == EMPTY_DOCIDSET ? null : BitsFilteredDocIdSet.Wrap(docIdSet, acceptDocs)); }
public override DocIdSet GetDocIdSet(IndexReader reader) { if (cache == null) { cache = new System.Collections.Hashtable(); } lock (cache.SyncRoot) { // check cache DocIdSet cached = (DocIdSet)cache[reader]; if (cached != null) { return(cached); } } DocIdSet docIdSet = filter.GetDocIdSet(reader); lock (cache.SyncRoot) { // update cache cache[reader] = docIdSet; } return(docIdSet); }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector, IState state) { System.Diagnostics.Debug.Assert(filter != null); Scorer scorer = weight.Scorer(reader, true, false, state); if (scorer == null) { return; } int docID = scorer.DocID(); System.Diagnostics.Debug.Assert(docID == -1 || docID == DocIdSetIterator.NO_MORE_DOCS); // CHECKME: use ConjunctionScorer here? DocIdSet filterDocIdSet = filter.GetDocIdSet(reader, state); if (filterDocIdSet == null) { // this means the filter does not accept any documents. return; } DocIdSetIterator filterIter = filterDocIdSet.Iterator(state); if (filterIter == null) { // this means the filter does not accept any documents. return; } int filterDoc = filterIter.NextDoc(state); int scorerDoc = scorer.Advance(filterDoc, state); collector.SetScorer(scorer); while (true) { if (scorerDoc == filterDoc) { // Check if scorer has exhausted, only before collecting. if (scorerDoc == DocIdSetIterator.NO_MORE_DOCS) { break; } collector.Collect(scorerDoc, state); filterDoc = filterIter.NextDoc(state); scorerDoc = scorer.Advance(filterDoc, state); } else if (scorerDoc > filterDoc) { filterDoc = filterIter.Advance(scorerDoc, state); } else { scorerDoc = scorer.Advance(filterDoc, state); } } }
private void TstFilterCard(String mes, int expected, Filter filt) { DocIdSetIterator disi = filt.GetDocIdSet(reader).Iterator(); int actual = 0; while (disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { actual++; } Assert.AreEqual(expected, actual, mes); }
private void TstFilterCard(String mes, int expected, Filter filt) { DocIdSetIterator disi = filt.GetDocIdSet(reader).Iterator(); int actual = 0; while (disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { actual++; } Assert.AreEqual(expected, actual, mes); }
public override BitArray Bits(IndexReader reader) { BitArray bits = new BitArray(reader.MaxDoc()); DocIdSetIterator it = filter.GetDocIdSet(reader).Iterator(); int doc = DocIdSetIterator.NO_MORE_DOCS; while ((doc = it.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { bits.Set(doc, true); } return(bits); }
private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable) { CachingWrapperFilter cacher = new CachingWrapperFilter(filter); DocIdSet originalSet = filter.GetDocIdSet(reader); DocIdSet cachedSet = cacher.GetDocIdSet(reader); Assert.IsTrue(cachedSet.IsCacheable); Assert.AreEqual(shouldCacheable, originalSet.IsCacheable); //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName()); if (originalSet.IsCacheable) { Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable"); } else { Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable"); } }
// inherit javadoc public override void Search(Weight weight, Filter filter, HitCollector results) { Scorer scorer = weight.Scorer(reader); if (scorer == null) { return; } if (filter == null) { scorer.Score(results); return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.Next() && scorer.SkipTo(filterDocIdIterator.Doc()); while (more) { int filterDocId = filterDocIdIterator.Doc(); if (filterDocId > scorer.Doc() && !scorer.SkipTo(filterDocId)) { more = false; } else { int scorerDocId = scorer.Doc(); if (scorerDocId == filterDocId) // permitted by filter { results.Collect(scorerDocId, scorer.Score()); more = filterDocIdIterator.Next(); } else { more = filterDocIdIterator.SkipTo(scorerDocId); } } } }
private List<FacetMatch> FindMatchesInQuery(Facet facet, Filter query, Filter filter, IndexReader indexReader) { var matches = facet.Values.Select(value => { var bitsQuery = new OpenBitSetDISI(query.GetDocIdSet(indexReader).Iterator(), indexReader.MaxDoc()); bitsQuery.And(value.Item2); if (filter != null) { //TODO: Remove this hard coded value (1000) var bitsFilter = new OpenBitSetDISI(filter.GetDocIdSet(indexReader).Iterator(), 1000); bitsQuery.And(bitsFilter); } var count = bitsQuery.Cardinality(); return new FacetMatch() { Count = count, Value = value.Item1, Id = facet.Id }; }).ToList(); return matches; }
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { var reader = context.AtomicReader; object key = reader.CoreCacheKey; DocIdSet docIdSet = _cache[key]; if (docIdSet != null) { hitCount++; } else { missCount++; docIdSet = DocIdSetToCache(_filter.GetDocIdSet(context, null), reader); Debug.Assert(docIdSet.IsCacheable); _cache[key] = docIdSet; } return(docIdSet == EMPTY_DOCIDSET ? null : BitsFilteredDocIdSet.Wrap(docIdSet, acceptDocs)); }
public override Explanation Explain(AtomicReaderContext ir, int i) { Explanation inner = weight.Explain(ir, i); Filter f = outerInstance.filter; DocIdSet docIdSet = f.GetDocIdSet(ir, ir.AtomicReader.LiveDocs); DocIdSetIterator docIdSetIterator = docIdSet == null?DocIdSetIterator.GetEmpty() : docIdSet.GetIterator(); if (docIdSetIterator == null) { docIdSetIterator = DocIdSetIterator.GetEmpty(); } if (docIdSetIterator.Advance(i) == i) { return(inner); } else { Explanation result = new Explanation(0.0f, "failure to match filter: " + f.ToString()); result.AddDetail(inner); return(result); } }
public override DocIdSet GetDocIdSet(IndexReader reader) { if (cache == null) { cache = new SupportClass.WeakHashTable(); } System.Object cached = null; lock (cache.SyncRoot) { // check cache cached = cache[reader]; } if (cached != null) { if (cached is DocIdSet) { return((DocIdSet)cached); } else { return(new DocIdBitSet((System.Collections.BitArray)cached)); } } DocIdSet docIdSet = DocIdSetToCache(filter.GetDocIdSet(reader), reader); if (docIdSet != null) { lock (cache.SyncRoot) { // update cache cache[reader] = docIdSet; } } return(docIdSet); }
public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs) { var reader = context.AtomicReader; object key = reader.CoreCacheKey; if (cache.TryGetValue(key, out DocIdSet docIdSet)) { hitCount++; } else { missCount++; docIdSet = DocIdSetToCache(filter.GetDocIdSet(context, null), reader); if (Debugging.AssertsEnabled) { Debugging.Assert(docIdSet.IsCacheable); } #if FEATURE_CONDITIONALWEAKTABLE_ENUMERATOR cache.AddOrUpdate(key, docIdSet); #else UninterruptableMonitor.Enter(cache); try { cache.AddOrUpdate(key, docIdSet); // LUCENENET specific - since .NET Standard 2.0 and .NET Framework don't have a CondtionalWeakTable enumerator, // we use a weak event to retrieve the DocIdSet instances reader.SubscribeToGetCacheKeysEvent(eventAggregator.GetEvent <Events.GetCacheKeysEvent>()); } finally { UninterruptableMonitor.Exit(cache); } #endif } return(docIdSet == EMPTY_DOCIDSET ? null : BitsFilteredDocIdSet.Wrap(docIdSet, acceptDocs)); }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; bool doValidate = false; FacetHitCollector[] facetCollectors = this.facetCollectors.ToArray(); foreach (FacetHitCollector facetCollector in facetCollectors) { if (facetCollector.PostDocIDSetIterator != null) { doValidate = true; break; } } Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); if (filter == null) { while (scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS) { int doc = scorer.DocID(); if (validateAndIncrement(doc, facetCollectors, doValidate)) { results.Collect(doc); } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.NextDoc() != DocIdSetIterator.NO_MORE_DOCS && scorer.Advance(filterDocIdIterator.DocID()) != DocIdSetIterator.NO_MORE_DOCS; while (more) { int filterDocId = filterDocIdIterator.DocID(); if (filterDocId > scorer.DocID() && scorer.Advance(filterDocId)==DocIdSetIterator.NO_MORE_DOCS) { more = false; } else { int scorerDocId = scorer.DocID(); if (scorerDocId == filterDocId) // permitted by filter { if (validateAndIncrement(scorerDocId, facetCollectors, doValidate)) { results.Collect(scorerDocId); } more = filterDocIdIterator.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { more = filterDocIdIterator.Advance(scorerDocId) != DocIdSetIterator.NO_MORE_DOCS; } } } }
private DocIdSetIterator getDISI(Filter filter, IndexReader reader) { DocIdSet docIdSet = filter.GetDocIdSet(reader); if (docIdSet == null) { return DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { DocIdSetIterator iter = docIdSet.Iterator(); if (iter == null) { return DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { return iter; } } }
/// <summary> Uses the {@link FilterManager} to keep the cache for a filter on the /// searcher side of a remote connection. /// </summary> /// <param name="reader">the index reader for the Filter /// </param> /// <returns> the DocIdSet /// </returns> public override DocIdSet GetDocIdSet(IndexReader reader) { Filter cachedFilter = FilterManager.GetInstance().GetFilter(filter); return(cachedFilter.GetDocIdSet(reader)); }
public OpenBitSetDISI GetBitSetFromFilter(Filter filter, IndexReader indexReader) { var bitSet = new OpenBitSetDISI(filter.GetDocIdSet(indexReader).Iterator(), indexReader.MaxDoc()); return bitSet; }
private Document GetParentDoc(IndexReader reader, Filter parents, int childDocID) { IList<AtomicReaderContext> leaves = reader.Leaves; int subIndex = ReaderUtil.SubIndex(childDocID, leaves); AtomicReaderContext leaf = leaves[subIndex]; FixedBitSet bits = (FixedBitSet)parents.GetDocIdSet(leaf, null); return leaf.AtomicReader.Document(bits.NextSetBit(childDocID - leaf.DocBase)); }
// inherit javadoc public override void Search(Weight weight, Filter filter, HitCollector results) { Scorer scorer = weight.Scorer(reader); if (scorer == null) return; if (filter == null) { scorer.Score(results); return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? bool more = filterDocIdIterator.Next() && scorer.SkipTo(filterDocIdIterator.Doc()); while (more) { int filterDocId = filterDocIdIterator.Doc(); if (filterDocId > scorer.Doc() && !scorer.SkipTo(filterDocId)) { more = false; } else { int scorerDocId = scorer.Doc(); if (scorerDocId == filterDocId) // permitted by filter { results.Collect(scorerDocId, scorer.Score()); more = filterDocIdIterator.Next(); } else { more = filterDocIdIterator.SkipTo(scorerDocId); } } } }
private static void assertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable) { CachingWrapperFilter cacher = new CachingWrapperFilter(filter); DocIdSet originalSet = filter.GetDocIdSet(reader); DocIdSet cachedSet = cacher.GetDocIdSet(reader); Assert.IsTrue(cachedSet.IsCacheable()); Assert.AreEqual(shouldCacheable, originalSet.IsCacheable()); //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName()); if (originalSet.IsCacheable()) { Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable"); } else { Assert.IsTrue(cachedSet is OpenBitSetDISI, "Cached DocIdSet must be an OpenBitSet if the original one was not cacheable"); } }
private void TstFilterCard(string mes, int expected, Filter filt) { DocIdSet docIdSet = filt.GetDocIdSet(reader.AtomicContext, reader.LiveDocs); int actual = 0; if (docIdSet != null) { DocIdSetIterator disi = docIdSet.GetIterator(); while (disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { actual++; } } assertEquals(mes, expected, actual); }
public override void Search(Weight weight, Filter filter, Collector results) { IndexReader reader = IndexReader; Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return; } results.SetScorer(scorer); results.SetNextReader(reader, 0); FacetValidator validator = CreateFacetValidator(); int target = 0; bool more; if (filter == null) { more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; while (more) { target = scorer.DocID(); if (validator.Validate(target)) { results.Collect(target); more = scorer.NextDoc()!=DocIdSetIterator.NO_MORE_DOCS; } else { target = validator.NextTarget; more = scorer.Advance(target) != DocIdSetIterator.NO_MORE_DOCS; } } return; } DocIdSetIterator filterDocIdIterator = filter.GetDocIdSet(reader).Iterator(); // CHECKME: use ConjunctionScorer here? target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { return; } int doc = -1; while (true) { if (doc < target) { doc = scorer.Advance(target); if (doc == DocIdSetIterator.NO_MORE_DOCS) { break; } } if (doc == target) // permitted by filter { if (validator.Validate(doc)) { results.Collect(doc); target = filterDocIdIterator.NextDoc(); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } else { continue; } } else { // skip to the next possible docid target = validator.NextTarget; } } else // doc > target { target = doc; } target = filterDocIdIterator.Advance(target); if (target == DocIdSetIterator.NO_MORE_DOCS) { break; } } }
private DocIdSetIterator GetDISI(Filter filter, AtomicReaderContext context) { // we dont pass acceptDocs, we will filter at the end using an additional filter DocIdSet docIdSet = filter.GetDocIdSet(context, null); if (docIdSet == null) { return DocIdSetIterator.Empty(); } else { DocIdSetIterator iter = docIdSet.GetIterator(); if (iter == null) { return DocIdSetIterator.Empty(); } else { return iter; } } }
public DocumentFilteredAtomicIndexReader(AtomicReaderContext context, Filter preserveFilter, bool negateFilter) : base(context.AtomicReader) { int maxDoc = @in.MaxDoc; FixedBitSet bits = new FixedBitSet(maxDoc); // ignore livedocs here, as we filter them later: DocIdSet docs = preserveFilter.GetDocIdSet(context, null); if (docs != null) { DocIdSetIterator it = docs.GetIterator(); if (it != null) { bits.Or(it); } } if (negateFilter) { bits.Flip(0, maxDoc); } if (@in.HasDeletions) { Bits oldLiveDocs = @in.LiveDocs; Debug.Assert(oldLiveDocs != null); DocIdSetIterator it = bits.GetIterator(); for (int i = it.NextDoc(); i < maxDoc; i = it.NextDoc()) { if (!oldLiveDocs.Get(i)) { // we can safely modify the current bit, as the iterator already stepped over it: bits.Clear(i); } } } this.liveDocs = bits; this.numDocs_Renamed = bits.Cardinality(); }
private void SearchWithFilter(IndexReader reader, Weight weight, Filter filter, Collector collector) { System.Diagnostics.Debug.Assert(filter != null); Scorer scorer = weight.Scorer(reader, true, false); if (scorer == null) { return ; } int docID = scorer.DocID(); System.Diagnostics.Debug.Assert(docID == - 1 || docID == DocIdSetIterator.NO_MORE_DOCS); // CHECKME: use ConjunctionScorer here? DocIdSet filterDocIdSet = filter.GetDocIdSet(reader); if (filterDocIdSet == null) { // this means the filter does not accept any documents. return ; } DocIdSetIterator filterIter = filterDocIdSet.Iterator(); if (filterIter == null) { // this means the filter does not accept any documents. return ; } int filterDoc = filterIter.NextDoc(); int scorerDoc = scorer.Advance(filterDoc); collector.SetScorer(scorer); while (true) { if (scorerDoc == filterDoc) { // Check if scorer has exhausted, only before collecting. if (scorerDoc == DocIdSetIterator.NO_MORE_DOCS) { break; } collector.Collect(scorerDoc); filterDoc = filterIter.NextDoc(); scorerDoc = scorer.Advance(filterDoc); } else if (scorerDoc > filterDoc) { filterDoc = filterIter.Advance(scorerDoc); } else { scorerDoc = scorer.Advance(filterDoc); } } }
private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable) { Assert.IsTrue(reader.Context is AtomicReaderContext); AtomicReaderContext context = (AtomicReaderContext)reader.Context; CachingWrapperFilter cacher = new CachingWrapperFilter(filter); DocIdSet originalSet = filter.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs); DocIdSet cachedSet = cacher.GetDocIdSet(context, ((AtomicReader)context.Reader()).LiveDocs); if (originalSet == null) { Assert.IsNull(cachedSet); } if (cachedSet == null) { Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null); } else { Assert.IsTrue(cachedSet.Cacheable); Assert.AreEqual(shouldCacheable, originalSet.Cacheable); //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName()); if (originalSet.Cacheable) { Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable"); } else { Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable"); } } }