public TermPositionsEnum ( Lucene.Net.Index.Term term ) : Lucene.Net.Index.DocsAndPositionsEnum | ||
term | Lucene.Net.Index.Term | |
return | Lucene.Net.Index.DocsAndPositionsEnum |
public virtual void TestDocsAndPositionsEnumStart() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); Document doc = new Document(); doc.Add(NewTextField("foo", "bar", Field.Store.NO)); writer.AddDocument(doc); DirectoryReader reader = writer.Reader; AtomicReader r = GetOnlySegmentReader(reader); DocsAndPositionsEnum disi = r.TermPositionsEnum(new Term("foo", "bar")); int docid = disi.DocID(); Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); // now reuse and check again TermsEnum te = r.Terms("foo").Iterator(null); Assert.IsTrue(te.SeekExact(new BytesRef("bar"))); disi = te.DocsAndPositions(null, disi); docid = disi.DocID(); Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); writer.Dispose(); r.Dispose(); dir.Dispose(); }
public virtual void TestNegativePositions() { DirectoryInfo oldIndexDir = CreateTempDir("negatives"); using (Stream dataFile = this.GetType().Assembly.GetManifestResourceStream(CURRENT_RESOURCE_DIRECTORY + Bogus24IndexName)) { TestUtil.Unzip(dataFile, oldIndexDir); } Directory dir = NewFSDirectory(oldIndexDir); DirectoryReader ir = DirectoryReader.Open(dir); IndexSearcher @is = new IndexSearcher(ir); PhraseQuery pq = new PhraseQuery(); pq.Add(new Term("field3", "more")); pq.Add(new Term("field3", "text")); TopDocs td = @is.Search(pq, 10); Assert.AreEqual(1, td.TotalHits); AtomicReader wrapper = SlowCompositeReaderWrapper.Wrap(ir); DocsAndPositionsEnum de = wrapper.TermPositionsEnum(new Term("field3", "broken")); Debug.Assert(de != null); Assert.AreEqual(0, de.NextDoc()); Assert.AreEqual(0, de.NextPosition()); ir.Dispose(); TestUtil.CheckIndex(dir); dir.Dispose(); }
public virtual void TestSimpleSkip() { Directory dir = new CountingRAMDirectory(this, new RAMDirectory()); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy())); Term term = new Term("test", "a"); for (int i = 0; i < 5000; i++) { Document d1 = new Document(); d1.Add(NewTextField(term.Field(), term.Text(), Field.Store.NO)); writer.AddDocument(d1); } writer.Commit(); writer.ForceMerge(1); writer.Dispose(); AtomicReader reader = GetOnlySegmentReader(DirectoryReader.Open(dir)); for (int i = 0; i < 2; i++) { Counter = 0; DocsAndPositionsEnum tp = reader.TermPositionsEnum(term); CheckSkipTo(tp, 14, 185); // no skips CheckSkipTo(tp, 17, 190); // one skip on level 0 CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0 // this test would fail if we had only one skip level, // because than more bytes would be read from the freqStream CheckSkipTo(tp, 4800, 250); // one skip on level 2 } }
public virtual void TestMixupDocs() { Directory dir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); iwc.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc); Document doc = new Document(); Field field = new TextField("field", "", Field.Store.NO); TokenStream ts = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true); Assert.IsFalse(ts.HasAttribute <PayloadAttribute>()); field.TokenStream = ts; doc.Add(field); writer.AddDocument(doc); Token withPayload = new Token("withPayload", 0, 11); withPayload.Payload = new BytesRef("test"); ts = new CannedTokenStream(withPayload); Assert.IsTrue(ts.HasAttribute <IPayloadAttribute>()); field.TokenStream = ts; writer.AddDocument(doc); ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true); Assert.IsFalse(ts.HasAttribute <PayloadAttribute>()); field.TokenStream = ts; writer.AddDocument(doc); DirectoryReader reader = writer.Reader; AtomicReader sr = SlowCompositeReaderWrapper.Wrap(reader); DocsAndPositionsEnum de = sr.TermPositionsEnum(new Term("field", "withPayload")); de.NextDoc(); de.NextPosition(); Assert.AreEqual(new BytesRef("test"), de.Payload); writer.Dispose(); reader.Dispose(); dir.Dispose(); }