public virtual void TestNegativePositions()
        {
            DirectoryInfo oldIndexDir = CreateTempDir("negatives");

            using (Stream dataFile = this.GetType().FindAndGetManifestResourceStream(Bogus24IndexName))
            {
                TestUtil.Unzip(dataFile, oldIndexDir);
            }
            Directory       dir = NewFSDirectory(oldIndexDir);
            DirectoryReader ir  = DirectoryReader.Open(dir);
            IndexSearcher   @is = new IndexSearcher(ir);
            PhraseQuery     pq  = new PhraseQuery();

            pq.Add(new Term("field3", "more"));
            pq.Add(new Term("field3", "text"));
            TopDocs td = @is.Search(pq, 10);

            Assert.AreEqual(1, td.TotalHits);
            AtomicReader         wrapper = SlowCompositeReaderWrapper.Wrap(ir);
            DocsAndPositionsEnum de      = wrapper.GetTermPositionsEnum(new Term("field3", "broken"));

            if (Debugging.AssertsEnabled)
            {
                Debugging.Assert(de != null);
            }
            Assert.AreEqual(0, de.NextDoc());
            Assert.AreEqual(0, de.NextPosition());
            ir.Dispose();
            TestUtil.CheckIndex(dir);
            dir.Dispose();
        }
Esempio n. 2
0
        public virtual void TestDocsAndPositionsEnumStart()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);
            Document doc = new Document();

            doc.Add(NewTextField("foo", "bar", Field.Store.NO));
            writer.AddDocument(doc);
            DirectoryReader      reader = writer.GetReader();
            AtomicReader         r      = GetOnlySegmentReader(reader);
            DocsAndPositionsEnum disi   = r.GetTermPositionsEnum(new Term("foo", "bar"));
            int docid = disi.DocID;

            Assert.AreEqual(-1, docid);
            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);

            // now reuse and check again
            TermsEnum te = r.GetTerms("foo").GetIterator(null);

            Assert.IsTrue(te.SeekExact(new BytesRef("bar")));
            disi  = te.DocsAndPositions(null, disi);
            docid = disi.DocID;
            Assert.AreEqual(-1, docid);
            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
            writer.Dispose();
            r.Dispose();
            dir.Dispose();
        }
        public virtual void TestSimpleSkip()
        {
            Directory   dir    = new CountingRAMDirectory(this, new RAMDirectory());
            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())).SetMergePolicy(NewLogMergePolicy()));
            Term        term   = new Term("test", "a");

            for (int i = 0; i < 5000; i++)
            {
                Document d1 = new Document();
                d1.Add(NewTextField(term.Field, term.Text(), Field.Store.NO));
                writer.AddDocument(d1);
            }
            writer.Commit();
            writer.ForceMerge(1);
            writer.Dispose();

            AtomicReader reader = GetOnlySegmentReader(DirectoryReader.Open(dir));

            for (int i = 0; i < 2; i++)
            {
                counter = 0;
                DocsAndPositionsEnum tp = reader.GetTermPositionsEnum(term);
                CheckSkipTo(tp, 14, 185);  // no skips
                CheckSkipTo(tp, 17, 190);  // one skip on level 0
                CheckSkipTo(tp, 287, 200); // one skip on level 1, two on level 0

                // this test would fail if we had only one skip level,
                // because than more bytes would be read from the freqStream
                CheckSkipTo(tp, 4800, 250); // one skip on level 2
            }
        }
Esempio n. 4
0
        public virtual void TestMixupDocs()
        {
            Directory         dir = NewDirectory();
            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter writer = new RandomIndexWriter(Random, dir, iwc);
            Document          doc    = new Document();
            Field             field  = new TextField("field", "", Field.Store.NO);
            TokenStream       ts     = new MockTokenizer(new StringReader("here we go"), MockTokenizer.WHITESPACE, true);

            Assert.IsFalse(ts.HasAttribute <PayloadAttribute>());
            field.SetTokenStream(ts);
            doc.Add(field);
            writer.AddDocument(doc);
            Token withPayload = new Token("withPayload", 0, 11);

            withPayload.Payload = new BytesRef("test");
            ts = new CannedTokenStream(withPayload);
            Assert.IsTrue(ts.HasAttribute <IPayloadAttribute>());
            field.SetTokenStream(ts);
            writer.AddDocument(doc);
            ts = new MockTokenizer(new StringReader("another"), MockTokenizer.WHITESPACE, true);
            Assert.IsFalse(ts.HasAttribute <PayloadAttribute>());
            field.SetTokenStream(ts);
            writer.AddDocument(doc);
            DirectoryReader      reader = writer.GetReader();
            AtomicReader         sr     = SlowCompositeReaderWrapper.Wrap(reader);
            DocsAndPositionsEnum de     = sr.GetTermPositionsEnum(new Term("field", "withPayload"));

            de.NextDoc();
            de.NextPosition();
            Assert.AreEqual(new BytesRef("test"), de.GetPayload());
            writer.Dispose();
            reader.Dispose();
            dir.Dispose();
        }