// test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0 public void TestSparseIndex() { RAMDirectory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED); for (int d = -20; d <= 20; d++) { Document doc = new Document(); doc.Add(new Field("id", d.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.Add(new Field("body", "body", Field.Store.NO, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); } writer.Optimize(); writer.DeleteDocuments(new Term("id", "0")); writer.Close(); IndexReader reader = IndexReader.Open(dir, true); IndexSearcher Search = new IndexSearcher(reader); Assert.True(reader.HasDeletions); ScoreDoc[] result; Query q = new TermQuery(new Term("body", "body")); FieldCacheRangeFilter <sbyte?> fcrf; result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 20, T, T), 100).ScoreDocs; Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0]).IsCacheable, "DocIdSet must be not cacheable"); Assert.AreEqual(40, result.Length, "find all"); result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 0, 20, T, T), 100).ScoreDocs; Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0]).IsCacheable, "DocIdSet must be not cacheable"); Assert.AreEqual(20, result.Length, "find all"); result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 0, T, T), 100).ScoreDocs; Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0]).IsCacheable, "DocIdSet must be not cacheable"); Assert.AreEqual(20, result.Length, "find all"); result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 10, 20, T, T), 100).ScoreDocs; Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0]).IsCacheable, "DocIdSet must be not cacheable"); Assert.AreEqual(11, result.Length, "find all"); result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, -10, T, T), 100).ScoreDocs; Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0]).IsCacheable, "DocIdSet must be not cacheable"); Assert.AreEqual(11, result.Length, "find all"); }
public override DocValues GetValues(IndexReader reader) { IndexReader[] subReaders = reader.GetSequentialSubReaders(); if (subReaders != null) { // This is a composite reader return(new MultiDocValues(this, subReaders)); } else { // Already an atomic reader -- just delegate return(other.GetValues(reader)); } }
public override DocValues GetValues(IndexReader reader) { IndexReader[] subReaders = reader.GetSequentialSubReaders(); if (subReaders != null) { // This is a composite reader return new MultiDocValues(this, subReaders); } else { // Already an atomic reader -- just delegate return other.GetValues(reader); } }
/// <summary> Gathers sub-readers from reader into a List. /// /// </summary> /// <param name="allSubReaders"> /// </param> /// <param name="reader"> /// </param> public static void GatherSubReaders(System.Collections.IList allSubReaders, IndexReader reader) { IndexReader[] subReaders = reader.GetSequentialSubReaders(); if (subReaders == null) { // Add the reader itself, and do not recurse allSubReaders.Add(reader); } else { for (int i = 0; i < subReaders.Length; i++) { GatherSubReaders(allSubReaders, subReaders[i]); } } }