Beispiel #1
0
        public static void BeforeClass()
        {
            Directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));

            for (int i = 0; i < DocFields.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(field, DocFields[i], Field.Store.NO));
                writer.AddDocument(doc);
            }
            writer.Dispose();
            LittleReader = DirectoryReader.Open(Directory);
            Searcher     = NewSearcher(LittleReader);
            // this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
            Searcher.Similarity = new DefaultSimilarity();

            // Make big index
            Dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory(Directory, IOContext.DEFAULT));

            // First multiply small test index:
            MulFactor = 1;
            int docCount = 0;

            if (VERBOSE)
            {
                Console.WriteLine("\nTEST: now copy index...");
            }
            do
            {
                if (VERBOSE)
                {
                    Console.WriteLine("\nTEST: cycle...");
                }
                Directory         copy = new MockDirectoryWrapper(Random(), new RAMDirectory(Dir2, IOContext.DEFAULT));
                RandomIndexWriter w    = new RandomIndexWriter(Random(), Dir2);
                w.AddIndexes(copy);
                docCount = w.MaxDoc();
                w.Dispose();
                MulFactor *= 2;
            } while (docCount < 3000);

            RandomIndexWriter riw  = new RandomIndexWriter(Random(), Dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
            Document          doc_ = new Document();

            doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            doc_ = new Document();
            doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            Reader      = riw.Reader;
            BigSearcher = NewSearcher(Reader);
            riw.Dispose();
        }
Beispiel #2
0
 public virtual void TestAddEmpty()
 {
     Directory d1 = NewDirectory();
     RandomIndexWriter w = new RandomIndexWriter(Random(), d1);
     MultiReader empty = new MultiReader();
     w.AddIndexes(empty);
     w.Dispose();
     DirectoryReader dr = DirectoryReader.Open(d1);
     foreach (AtomicReaderContext ctx in dr.Leaves)
     {
         Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
     }
     dr.Dispose();
     d1.Dispose();
 }
        public virtual void TestAddIndexes()
        {
            Directory d1 = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
            Document doc = new Document();
            doc.Add(NewStringField("id", "1", Field.Store.YES));
            doc.Add(new NumericDocValuesField("dv", 1));
            w.AddDocument(doc);
            IndexReader r1 = w.Reader;
            w.Dispose();

            Directory d2 = NewDirectory();
            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
            doc = new Document();
            doc.Add(NewStringField("id", "2", Field.Store.YES));
            doc.Add(new NumericDocValuesField("dv", 2));
            w.AddDocument(doc);
            IndexReader r2 = w.Reader;
            w.Dispose();

            Directory d3 = NewDirectory();
            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
            w.AddIndexes(SlowCompositeReaderWrapper.Wrap(r1), SlowCompositeReaderWrapper.Wrap(r2));
            r1.Dispose();
            d1.Dispose();
            r2.Dispose();
            d2.Dispose();

            w.ForceMerge(1);
            DirectoryReader r3 = w.Reader;
            w.Dispose();
            AtomicReader sr = GetOnlySegmentReader(r3);
            Assert.AreEqual(2, sr.NumDocs);
            NumericDocValues docValues = sr.GetNumericDocValues("dv");
            Assert.IsNotNull(docValues);
            r3.Dispose();
            d3.Dispose();
        }
        public virtual void TestLocksBlock()
        {
            Directory src = NewDirectory();
            RandomIndexWriter w1 = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
            w1.AddDocument(new Document());
            w1.Commit();

            Directory dest = NewDirectory();

            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
            iwc.SetWriteLockTimeout(1);
            RandomIndexWriter w2 = new RandomIndexWriter(Random(), dest, iwc);

            try
            {
                w2.AddIndexes(src);
                Assert.Fail("did not hit expected exception");
            }
            catch (LockObtainFailedException lofe)
            {
                // expected
            }

            IOUtils.Close(w1, w2, src, dest);
        }
        public virtual void TestFakeAllDeleted()
        {
            Directory src = NewDirectory(), dest = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), src, Similarity, TimeZone);
            w.AddDocument(new Document());
            IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.Reader.Leaves[0].Reader);
            w.Dispose();

            w = new RandomIndexWriter(Random(), dest, Similarity, TimeZone);
            w.AddIndexes(allDeletedReader);
            w.Dispose();
            DirectoryReader dr = DirectoryReader.Open(src);
            foreach (AtomicReaderContext ctx in dr.Leaves)
            {
                Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
            }
            dr.Dispose();
            allDeletedReader.Dispose();
            src.Dispose();
            dest.Dispose();
        }
        public virtual void TestFieldNamesChanged()
        {
            Directory d1 = NewDirectory();
            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
            Document doc = new Document();
            doc.Add(NewStringField("f1", "doc1 field1", Field.Store.YES));
            doc.Add(NewStringField("id", "1", Field.Store.YES));
            w.AddDocument(doc);
            IndexReader r1 = w.Reader;
            w.Dispose();

            Directory d2 = NewDirectory();
            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
            doc = new Document();
            doc.Add(NewStringField("f2", "doc2 field2", Field.Store.YES));
            doc.Add(NewStringField("id", "2", Field.Store.YES));
            w.AddDocument(doc);
            IndexReader r2 = w.Reader;
            w.Dispose();

            Directory d3 = NewDirectory();
            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
            w.AddIndexes(r1, r2);
            r1.Dispose();
            d1.Dispose();
            r2.Dispose();
            d2.Dispose();

            IndexReader r3 = w.Reader;
            w.Dispose();
            Assert.AreEqual(2, r3.NumDocs);
            for (int docID = 0; docID < 2; docID++)
            {
                Document d = r3.Document(docID);
                if (d.Get("id").Equals("1"))
                {
                    Assert.AreEqual("doc1 field1", d.Get("f1"));
                }
                else
                {
                    Assert.AreEqual("doc2 field2", d.Get("f2"));
                }
            }
            r3.Dispose();
            d3.Dispose();
        }
Beispiel #7
0
        public static void BeforeClass()
        {
            Directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
            for (int i = 0; i < DocFields.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(field, DocFields[i], Field.Store.NO));
                writer.AddDocument(doc);
            }
            writer.Dispose();
            LittleReader = DirectoryReader.Open(Directory);
            Searcher = NewSearcher(LittleReader);
            // this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
            Searcher.Similarity = new DefaultSimilarity();

            // Make big index
            Dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory(Directory, IOContext.DEFAULT));

            // First multiply small test index:
            MulFactor = 1;
            int docCount = 0;
            if (VERBOSE)
            {
                Console.WriteLine("\nTEST: now copy index...");
            }
            do
            {
                if (VERBOSE)
                {
                    Console.WriteLine("\nTEST: cycle...");
                }
                Directory copy = new MockDirectoryWrapper(Random(), new RAMDirectory(Dir2, IOContext.DEFAULT));
                RandomIndexWriter w = new RandomIndexWriter(Random(), Dir2);
                w.AddIndexes(copy);
                docCount = w.MaxDoc();
                w.Dispose();
                MulFactor *= 2;
            } while (docCount < 3000);

            RandomIndexWriter riw = new RandomIndexWriter(Random(), Dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
            Document doc_ = new Document();
            doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            doc_ = new Document();
            doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            Reader = riw.Reader;
            BigSearcher = NewSearcher(Reader);
            riw.Dispose();
        }
Beispiel #8
0
        public override void BeforeClass()
        {
            base.BeforeClass();

            directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random, directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy()));

            for (int i = 0; i < docFields.Length; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(field, docFields[i], Field.Store.NO));
                writer.AddDocument(doc);
            }
            writer.Dispose();
            littleReader = DirectoryReader.Open(directory);
            searcher     = NewSearcher(littleReader);
            // this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
            searcher.Similarity = new DefaultSimilarity();

            // Make big index
            dir2 = new MockDirectoryWrapper(Random, new RAMDirectory(directory, IOContext.DEFAULT));

            // First multiply small test index:
            mulFactor = 1;
            int docCount = 0;

            if (Verbose)
            {
                Console.WriteLine("\nTEST: now copy index...");
            }
            do
            {
                if (Verbose)
                {
                    Console.WriteLine("\nTEST: cycle...");
                }
                Directory         copy = new MockDirectoryWrapper(Random, new RAMDirectory(dir2, IOContext.DEFAULT));
                RandomIndexWriter w    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                    this,
#endif
                    Random, dir2);
                w.AddIndexes(copy);
                docCount = w.MaxDoc;
                w.Dispose();
                mulFactor *= 2;
            } while (docCount < 3000);

            RandomIndexWriter riw  = new RandomIndexWriter(Random, dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000)));
            Document          doc_ = new Document();

            doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            doc_ = new Document();
            doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                riw.AddDocument(doc_);
            }
            reader      = riw.GetReader();
            bigSearcher = NewSearcher(reader);
            riw.Dispose();
        }
        /// <summary>
        /// The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs.
        /// </summary>
        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        public virtual void TestMergeStability()
        {
            Directory dir = NewDirectory();
            // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
            MergePolicy mp = NewTieredMergePolicy();
            mp.NoCFSRatio = 0;
            var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);
            using (var w = new RandomIndexWriter(Random(), dir, cfg))
            {
                var numDocs = AtLeast(500);
                for (var i = 0; i < numDocs; ++i)
                {
                    var d = new Document();
                    AddRandomFields(d);
                    w.AddDocument(d);
                }
                w.ForceMerge(1);
                w.Commit();
            }
            IndexReader reader = DirectoryReader.Open(dir);

            Directory dir2 = NewDirectory();
            mp = NewTieredMergePolicy();
            mp.NoCFSRatio = 0;
            cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);

            using (var w = new RandomIndexWriter(Random(), dir2, cfg))
            {
                w.AddIndexes(reader);
                w.Commit();
            }

            assertEquals(BytesUsedByExtension(dir), BytesUsedByExtension(dir2));

            reader.Dispose();
            dir.Dispose();
            dir2.Dispose();
        }