GetSequentialSubReaders() public method

Expert: returns the sequential sub readers that this reader is logically composed of. For example, IndexSearcher uses this API to drive searching by one sub reader at a time. If this reader is not composed of sequential child readers, it should return null. If this method returns an empty array, that means this reader is a null reader (for example a MultiReader that has no sub readers).

NOTE: You should not try using sub-readers returned by this method to make any changes (setNorm, deleteDocument, etc.). While this might succeed for one composite reader (like MultiReader), it will most likely lead to index corruption for other readers (like DirectoryReader obtained through IndexReader.Open(Lucene.Net.Store.Directory,bool). Use the parent reader directly.

public GetSequentialSubReaders ( ) : Lucene.Net.Index.IndexReader[]
return Lucene.Net.Index.IndexReader[]
        public static IDictionary<string, Filter> CreateFilters(IndexReader reader, IDictionary<string, HashSet<string>> feeds)
        {
            var bitSetLookup = new Dictionary<string, IDictionary<string, OpenBitSet>>(StringComparer.OrdinalIgnoreCase);

            foreach (var key in feeds.Keys)
            {
                bitSetLookup[key] = new Dictionary<string, OpenBitSet>();

                foreach (SegmentReader segmentReader in reader.GetSequentialSubReaders())
                {
                    bitSetLookup[key][segmentReader.SegmentName] = new OpenBitSet();
                }
            }

            foreach (SegmentReader segmentReader in reader.GetSequentialSubReaders())
            {
                CreateOpenBitSets(segmentReader, feeds, bitSetLookup);
            }

            var filters = new Dictionary<string, Filter>(StringComparer.OrdinalIgnoreCase);

            foreach (var key in feeds.Keys)
            {
                filters[key] = new OpenBitSetLookupFilter(bitSetLookup[key]);
            }

            return filters;
        }
        public void Process(IndexReader indexReader)
        {
            var perIndexDocumentNumber = 0;

            foreach (var handler in _handlers)
            {
                handler.Begin(indexReader);
            }

            if (_enumerateSubReaders && indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    ProcessReader(segmentReader, segmentReader.SegmentName, ref perIndexDocumentNumber);
                }
            }
            else
            {
                ProcessReader(indexReader, string.Empty, ref perIndexDocumentNumber);
            }

            foreach (var handler in _handlers)
            {
                handler.End(indexReader);
            }
        }
        public static Filter Create(IndexReader indexReader, bool includePrerelease, bool includeUnlisted)
        {
            IDictionary<string, OpenBitSet> openBitSetLookup = new Dictionary<string, OpenBitSet>();

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    openBitSetLookup.Add(segmentReader.SegmentName, new OpenBitSet());
                }
            }
            else
            {
                openBitSetLookup.Add(string.Empty, new OpenBitSet());
            }

            IDictionary<string, Tuple<NuGetVersion, string, int>> lookup = MakeLatestVersionLookup(indexReader, includePrerelease, includeUnlisted);

            foreach (Tuple<NuGetVersion, string, int> entry in lookup.Values)
            {
                string readerName = entry.Item2;
                int readerDocumentId = entry.Item3;

                openBitSetLookup[readerName].Set(readerDocumentId);
            }

            return new OpenBitSetLookupFilter(openBitSetLookup);
        }
        public void Begin(IndexReader indexReader)
        {
            _bitSetLookup = new Dictionary<string, IDictionary<string, OpenBitSet>>(StringComparer.OrdinalIgnoreCase);

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (var key in _feeds.Keys)
                {
                    _bitSetLookup[key] = new Dictionary<string, OpenBitSet>();

                    foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                    {
                        _bitSetLookup[key][segmentReader.SegmentName] = new OpenBitSet();
                    }
                }
            }
            else
            {
                foreach (var key in _feeds.Keys)
                {
                    _bitSetLookup[key] = new Dictionary<string, OpenBitSet>();
                    _bitSetLookup[key][string.Empty] = new OpenBitSet();
                }
            }
        }
 public void Begin(IndexReader indexReader)
 {
     if (indexReader.GetSequentialSubReaders() != null)
     {
         foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
         {
             _mapping[segmentReader.SegmentName] = new int[segmentReader.MaxDoc];
         }
     }
     else
     {
         _mapping[string.Empty] = new int[indexReader.MaxDoc];
     }
 }
        public void Begin(IndexReader indexReader)
        {
            _rankingBySegmentReaderName = new RankingBySegment();

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    _rankingBySegmentReaderName[segmentReader.SegmentName] = new Ranking[segmentReader.MaxDoc];
                }
            }
            else
            {
                _rankingBySegmentReaderName[string.Empty] = new Ranking[indexReader.MaxDoc];
            }
        }
        // test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0
        public void TestSparseIndex()
        {
            RAMDirectory dir    = new RAMDirectory();
            IndexWriter  writer = new IndexWriter(dir, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int d = -20; d <= 20; d++)
            {
                Document doc = new Document();
                doc.Add(new Field("id", d.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("body", "body", Field.Store.NO, Field.Index.NOT_ANALYZED));
                writer.AddDocument(doc, null);
            }

            writer.Optimize(null);
            writer.DeleteDocuments(null, new Term("id", "0"));
            writer.Close();

            IndexReader   reader = IndexReader.Open((Directory)dir, true, null);
            IndexSearcher Search = new IndexSearcher(reader);

            Assert.True(reader.HasDeletions);

            ScoreDoc[] result;
            Query      q = new TermQuery(new Term("body", "body"));

            FieldCacheRangeFilter <sbyte?> fcrf;

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 20, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(40, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 0, 20, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(20, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 0, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(20, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 10, 20, T, T), 100, null).ScoreDocs;
            Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(11, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, -10, T, T), 100, null).ScoreDocs;
            Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(11, result.Length, "find all");
        }
        public void Begin(IndexReader indexReader)
        {
            _openBitSetLookup = new Dictionary<string, OpenBitSet>();
            _lookup = new Dictionary<string, Tuple<NuGetVersion, string, int>>();

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    _openBitSetLookup.Add(segmentReader.SegmentName, new OpenBitSet());
                }
            }
            else
            {
                _openBitSetLookup.Add(string.Empty, new OpenBitSet());
            }
        }
        public void Begin(IndexReader indexReader)
        {
            _knownOwners = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
            _ownerTuples = new Dictionary<string, IDictionary<string, DynamicDocIdSet>>();

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    _ownerTuples.Add(segmentReader.SegmentName, new Dictionary<string, DynamicDocIdSet>(StringComparer.OrdinalIgnoreCase));
                }
            }
            else
            {
                _ownerTuples.Add(string.Empty, new Dictionary<string, DynamicDocIdSet>(StringComparer.OrdinalIgnoreCase));
            }
        }
        static IDictionary<string, Tuple<NuGetVersion, string, int>> MakeLatestVersionLookup(IndexReader indexReader, bool includePrerelease, bool includeUnlisted)
        {
            IDictionary<string, Tuple<NuGetVersion, string, int>> lookup = new Dictionary<string, Tuple<NuGetVersion, string, int>>();

            if (indexReader.GetSequentialSubReaders() != null)
            {
                foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
                {
                    MakeLatestVersionLookupPerReader(lookup, segmentReader, segmentReader.SegmentName, includePrerelease, includeUnlisted);
                }
            }
            else
            {
                MakeLatestVersionLookupPerReader(lookup, indexReader, string.Empty, includePrerelease, includeUnlisted);
            }

            return lookup;
        }
        static IDictionary<string, Tuple<NuGetVersion, string, int>> MakeLatestVersionLookup(IndexReader indexReader, bool includePrerelease, bool includeUnlisted)
        {
            IDictionary<string, Tuple<NuGetVersion, string, int>> lookup = new Dictionary<string, Tuple<NuGetVersion, string, int>>();

            foreach (SegmentReader segmentReader in indexReader.GetSequentialSubReaders())
            {
                for (int n = 0; n < segmentReader.MaxDoc; n++)
                {
                    if (segmentReader.IsDeleted(n))
                    {
                        continue;
                    }

                    Document document = segmentReader.Document(n);

                    NuGetVersion version = GetVersion(document);

                    if (version == null)
                    {
                        continue;
                    }

                    bool isListed = GetListed(document);

                    if (isListed || includeUnlisted)
                    {
                        if (!version.IsPrerelease || includePrerelease)
                        {
                            string id = GetId(document);

                            if (id == null)
                            {
                                continue;
                            }

                            Tuple<NuGetVersion, string, int> existingVersion;
                            if (lookup.TryGetValue(id, out existingVersion))
                            {
                                if (version > existingVersion.Item1)
                                {
                                    lookup[id] = Tuple.Create(version, segmentReader.SegmentName, n);
                                }
                            }
                            else
                            {
                                lookup.Add(id, Tuple.Create(version, segmentReader.SegmentName, n));
                            }
                        }
                    }
                }
            }

            return lookup;
        }
        public static IDictionary<string, BitSetsLookupEntry> CreateBitSetsLookup(IndexReader reader, IDictionary<string, ISet<string>> frameworkCompatibility)
        {
            //  This is a two step process because we first need to calculate the highest version across the whole data set (i.e. across every segment)

            //  STEP 1. Create a lookup table of compatible documents (identified by SegmentName and Doc) per entry in the framework compatibility table
            //  (The result include separate structures for release-only and including pre-release.)

            IDictionary<string, MatchingDocsEntry> matchingDocsLookup = new Dictionary<string, MatchingDocsEntry>();

            foreach (string key in frameworkCompatibility.Keys)
            {
                matchingDocsLookup[key] = new MatchingDocsEntry();
            }

            foreach (SegmentReader segmentReader in reader.GetSequentialSubReaders())
            {
                UpdateMatchingDocs(matchingDocsLookup, segmentReader, frameworkCompatibility);
            }

            //  STEP 2. From the globally created MatchingDocsLookup table we create per-segment lookups 

            IDictionary<string, BitSetsLookupEntry> bitSetsLookup = new Dictionary<string, BitSetsLookupEntry>();

            foreach (string key in frameworkCompatibility.Keys)
            {
                BitSetsLookupEntry newBitSetsLookupEntry = new BitSetsLookupEntry();

                foreach (SegmentReader segmentReader in reader.GetSequentialSubReaders())
                {
                    newBitSetsLookupEntry.MatchingDocs.Add(segmentReader.SegmentName, new OpenBitSet());
                    newBitSetsLookupEntry.MatchingDocsPre.Add(segmentReader.SegmentName, new OpenBitSet());
                }

                bitSetsLookup[key] = newBitSetsLookupEntry;
            }

            foreach (KeyValuePair<string, MatchingDocsEntry> entry in matchingDocsLookup)
            {
                foreach (MatchingDoc matchingDoc in entry.Value.MatchingDocs.Values)
                {
                    bitSetsLookup[entry.Key].MatchingDocs[matchingDoc.SegmentName].Set(matchingDoc.Doc);
                }
                foreach (MatchingDoc matchingDocPre in entry.Value.MatchingDocsPre.Values)
                {
                    bitSetsLookup[entry.Key].MatchingDocsPre[matchingDocPre.SegmentName].Set(matchingDocPre.Doc);
                }
            }

            return bitSetsLookup;
        }
Example #13
0
        public virtual void  TestCloneSubreaders()
        {
            Directory dir1 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir1, true);
            IndexReader reader = IndexReader.Open(dir1, false);

            reader.DeleteDocument(1);             // acquire write lock
            IndexReader[] subs = reader.GetSequentialSubReaders();
            System.Diagnostics.Debug.Assert(subs.Length > 1);

            IndexReader[] clones = new IndexReader[subs.Length];
            for (int x = 0; x < subs.Length; x++)
            {
                clones[x] = (IndexReader)subs[x].Clone();
            }
            reader.Close();
            for (int x = 0; x < subs.Length; x++)
            {
                clones[x].Close();
            }
            dir1.Close();
        }
Example #14
0
 public override IndexReader[] GetSequentialSubReaders()
 {
     return(in_Renamed.GetSequentialSubReaders());
 }
Example #15
0
		public /*internal*/ static SegmentReader GetOnlySegmentReader(IndexReader reader)
		{
			if (reader is SegmentReader)
				return (SegmentReader) reader;
			
			if (reader is DirectoryReader)
			{
				IndexReader[] subReaders = reader.GetSequentialSubReaders();
				if (subReaders.Length != 1)
				{
					throw new System.ArgumentException(reader + " has " + subReaders.Length + " segments instead of exactly one");
				}
				
				return (SegmentReader) subReaders[0];
			}
			
			throw new System.ArgumentException(reader + " is not a SegmentReader or a single-segment DirectoryReader");
		}
		private void  AssertReaderClosed(IndexReader reader, bool checkSubReaders, bool checkNormsClosed)
		{
			Assert.AreEqual(0, reader.GetRefCount());
			
			if (checkNormsClosed && reader is SegmentReader)
			{
				Assert.IsTrue(((SegmentReader) reader).NormsClosed());
			}
			
			if (checkSubReaders)
			{
				if (reader is DirectoryReader)
				{
					IndexReader[] subReaders = reader.GetSequentialSubReaders();
					for (int i = 0; i < subReaders.Length; i++)
					{
						AssertReaderClosed(subReaders[i], checkSubReaders, checkNormsClosed);
					}
				}
				
				if (reader is MultiReader)
				{
					IndexReader[] subReaders = reader.GetSequentialSubReaders();
					for (int i = 0; i < subReaders.Length; i++)
					{
						AssertReaderClosed(subReaders[i], checkSubReaders, checkNormsClosed);
					}
				}
				
				if (reader is ParallelReader)
				{
					IndexReader[] subReaders = ((ParallelReader) reader).GetSubReaders();
					for (int i = 0; i < subReaders.Length; i++)
					{
						AssertReaderClosed(subReaders[i], checkSubReaders, checkNormsClosed);
					}
				}
			}
		}