Exemplo n.º 1
0
        public virtual void TestFloatNorms()
        {
            Directory    dir      = NewDirectory();
            MockAnalyzer analyzer = new MockAnalyzer(Random);

            analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH);

            IndexWriterConfig config   = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
            Similarity        provider = new MySimProvider(this);

            config.SetSimilarity(provider);
            RandomIndexWriter writer = new RandomIndexWriter(Random, dir, config);
            LineFileDocs      docs   = new LineFileDocs(Random);
            int num = AtLeast(100);

            for (int i = 0; i < num; i++)
            {
                Document doc       = docs.NextDoc();
                float    nextFloat = Random.nextFloat();
                // Cast to a double to get more precision output to the string.
                Field f = new TextField(floatTestField, "" + ((double)nextFloat).ToString(CultureInfo.InvariantCulture), Field.Store.YES);
                f.Boost = nextFloat;

                doc.Add(f);
                writer.AddDocument(doc);
                doc.RemoveField(floatTestField);
                if (Rarely())
                {
                    writer.Commit();
                }
            }
            writer.Commit();
            writer.Dispose();
            AtomicReader     open  = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
            NumericDocValues norms = open.GetNormValues(floatTestField);

            Assert.IsNotNull(norms);
            for (int i = 0; i < open.MaxDoc; i++)
            {
                Document document = open.Document(i);
                float    expected = Convert.ToSingle(document.Get(floatTestField), CultureInfo.InvariantCulture);
                Assert.AreEqual(expected, J2N.BitConversion.Int32BitsToSingle((int)norms.Get(i)), 0.0f);
            }
            open.Dispose();
            dir.Dispose();
            docs.Dispose();
        }
Exemplo n.º 2
0
        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        public virtual void TestReadSkip()
        {
            Directory         dir    = NewDirectory();
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);

            FieldType ft = new FieldType();

            ft.IsStored = true;
            ft.Freeze();

            string @string = TestUtil.RandomSimpleString(Random(), 50);
            var    bytes   = @string.GetBytes(Encoding.UTF8);
            long   l       = Random().NextBoolean() ? Random().Next(42) : Random().NextLong();
            int    i       = Random().NextBoolean() ? Random().Next(42) : Random().Next();
            float  f       = Random().NextFloat();
            double d       = Random().NextDouble();

            IList <Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new Int64Field("long", l, Field.Store.YES), new Int32Field("int", i, Field.Store.YES), new SingleField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES)
                                                 );

            for (int k = 0; k < 100; ++k)
            {
                Document doc = new Document();
                foreach (Field fld in fields)
                {
                    doc.Add(fld);
                }
                iw.w.AddDocument(doc);
            }
            iw.Commit();

            DirectoryReader reader = DirectoryReader.Open(dir);
            int             docID  = Random().Next(100);

            foreach (Field fld in fields)
            {
                string          fldName = fld.Name;
                Document        sDoc    = reader.Document(docID, Collections.Singleton(fldName));
                IIndexableField sField  = sDoc.GetField(fldName);
                if (typeof(Field) == fld.GetType())
                {
                    Assert.AreEqual(fld.GetBinaryValue(), sField.GetBinaryValue());
                    Assert.AreEqual(fld.GetStringValue(), sField.GetStringValue());
                }
                else
                {
#pragma warning disable 612, 618
                    Assert.AreEqual(fld.GetNumericValue(), sField.GetNumericValue());
#pragma warning restore 612, 618
                }
            }
            reader.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
Exemplo n.º 3
0
        public virtual void TestSorted()
        {
            Directory dir   = NewDirectory();
            Document  doc   = new Document();
            BytesRef  @ref  = new BytesRef();
            Field     field = new SortedDocValuesField("bytes", @ref);

            doc.Add(field);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                @ref.CopyChars(TestUtil.RandomUnicodeString(Random()));
                if (DefaultCodecSupportsDocsWithField() && Random().Next(7) == 0)
                {
                    iw.AddDocument(new Document());
                }
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            SortedDocValues multi  = MultiDocValues.GetSortedValues(ir, "bytes");
            SortedDocValues single = merged.GetSortedDocValues("bytes");

            Assert.AreEqual(single.ValueCount, multi.ValueCount);
            BytesRef actual   = new BytesRef();
            BytesRef expected = new BytesRef();

            for (int i = 0; i < numDocs; i++)
            {
                // check ord
                Assert.AreEqual(single.GetOrd(i), multi.GetOrd(i));
                // check value
                single.Get(i, expected);
                multi.Get(i, actual);
                Assert.AreEqual(expected, actual);
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Exemplo n.º 4
0
        public virtual void TestDeleteByTermIsCurrent()
        {
            // get reader
            DirectoryReader reader = Writer.GetReader();

            // assert index has a document and reader is up2date
            Assert.AreEqual(1, Writer.NumDocs, "One document should be in the index");
            Assert.IsTrue(reader.IsCurrent(), "One document added, reader should be current");

            // remove document
            Term idTerm = new Term("UUID", "1");

            Writer.DeleteDocuments(idTerm);
            Writer.Commit();

            // assert document has been deleted (index changed), reader is stale
            Assert.AreEqual(0, Writer.NumDocs, "Document should be removed");
            Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");

            reader.Dispose();
        }
Exemplo n.º 5
0
        // [Test, Timeout(300000)] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        public virtual void TestConcurrentReads()
        {
            Directory         dir    = NewDirectory();
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);

            // make sure the readers are properly cloned
            Document doc   = new Document();
            Field    field = new StringField("fld", "", Field.Store.YES);

            doc.Add(field);
            int numDocs = AtLeast(1000);

            for (int i = 0; i < numDocs; ++i)
            {
                field.SetStringValue("" + i);
                iw.AddDocument(doc);
            }
            iw.Commit();

            DirectoryReader          rd              = DirectoryReader.Open(dir);
            IndexSearcher            searcher        = new IndexSearcher(rd);
            int                      concurrentReads = AtLeast(5);
            int                      readsPerThread  = AtLeast(50);
            IList <ThreadClass>      readThreads     = new List <ThreadClass>();
            AtomicObject <Exception> ex              = new AtomicObject <Exception>();

            for (int i = 0; i < concurrentReads; ++i)
            {
                readThreads.Add(new ThreadAnonymousInnerClassHelper(numDocs, rd, searcher, readsPerThread, ex, i));
            }
            foreach (ThreadClass thread in readThreads)
            {
                thread.Start();
            }
            foreach (ThreadClass thread in readThreads)
            {
                thread.Join();
            }
            rd.Dispose();
            if (ex.Value != null)
            {
                throw ex.Value;
            }

            iw.Dispose();
            dir.Dispose();
        }
Exemplo n.º 6
0
        public virtual void TestConcurrentReads()
        {
            using Directory dir = NewDirectory();
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));

            iwConf.SetMaxBufferedDocs(RandomInts.RandomInt32Between(Random, 2, 30));
            using RandomIndexWriter iw = new RandomIndexWriter(Random, dir, iwConf);

            // make sure the readers are properly cloned
            Document doc   = new Document();
            Field    field = new StringField("fld", "", Field.Store.YES);

            doc.Add(field);
            int numDocs = AtLeast(1000);

            for (int i = 0; i < numDocs; ++i)
            {
                field.SetStringValue("" + i);
                iw.AddDocument(doc);
            }
            iw.Commit();

            AtomicReference <Exception> ex = new AtomicReference <Exception>();

            using (DirectoryReader rd = DirectoryReader.Open(dir))
            {
                IndexSearcher     searcher        = new IndexSearcher(rd);
                int               concurrentReads = AtLeast(5);
                int               readsPerThread  = AtLeast(50);
                IList <ThreadJob> readThreads     = new JCG.List <ThreadJob>();

                for (int i = 0; i < concurrentReads; ++i)
                {
                    readThreads.Add(new ThreadAnonymousClass(numDocs, rd, searcher, readsPerThread, ex));
                }
                foreach (ThreadJob thread in readThreads)
                {
                    thread.Start();
                }
                foreach (ThreadJob thread in readThreads)
                {
                    thread.Join();
                }
            } // rd.Dispose();
            if (ex.Value != null)
            {
                ExceptionDispatchInfo.Capture(ex.Value).Throw(); // LUCENENET: Rethrow to preserve stack details from the other thread
            }
        }
        public virtual void TestMerge()
        {
            RandomDocumentFactory docFactory = new RandomDocumentFactory(this, 5, 20);
            int            numDocs           = AtLeast(100);
            int            numDeletes        = Random.Next(numDocs);
            HashSet <int?> deletes           = new HashSet <int?>();

            while (deletes.Count < numDeletes)
            {
                deletes.Add(Random.Next(numDocs));
            }
            foreach (Options options in ValidOptions())
            {
                RandomDocument[] docs = new RandomDocument[numDocs];
                for (int i = 0; i < numDocs; ++i)
                {
                    docs[i] = docFactory.NewDocument(TestUtil.NextInt32(Random, 1, 3), AtLeast(10), options);
                }
                using (Directory dir = NewDirectory())
                    using (RandomIndexWriter writer = new RandomIndexWriter(Random, dir, ClassEnvRule.similarity, ClassEnvRule.timeZone))
                    {
                        for (int i = 0; i < numDocs; ++i)
                        {
                            writer.AddDocument(AddId(docs[i].ToDocument(), "" + i));
                            if (Rarely())
                            {
                                writer.Commit();
                            }
                        }
                        foreach (int delete in deletes)
                        {
                            writer.DeleteDocuments(new Term("id", "" + delete));
                        }
                        // merge with deletes
                        writer.ForceMerge(1);
                        using (IndexReader reader = writer.GetReader())
                        {
                            for (int i = 0; i < numDocs; ++i)
                            {
                                if (!deletes.Contains(i))
                                {
                                    int docID = DocID(reader, "" + i);
                                    AssertEquals(docs[i], reader.GetTermVectors(docID));
                                }
                            }
                        } // reader.Dispose();
                    }// writer.Dispose();, dir.Dispose();
            }
        }
Exemplo n.º 8
0
        public virtual void TestMixing()
        {
            // no positions
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);

            ft.IndexOptions = IndexOptions.DOCS_AND_FREQS;

            Directory         dir = NewDirectory();
            RandomIndexWriter iw  = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            for (int i = 0; i < 20; i++)
            {
                Document doc = new Document();
                if (i < 19 && Random.NextBoolean())
                {
                    for (int j = 0; j < 50; j++)
                    {
                        doc.Add(new TextField("foo", "i have positions", Field.Store.NO));
                    }
                }
                else
                {
                    for (int j = 0; j < 50; j++)
                    {
                        doc.Add(new Field("foo", "i have no positions", ft));
                    }
                }
                iw.AddDocument(doc);
                iw.Commit();
            }

            if (Random.NextBoolean())
            {
                iw.ForceMerge(1);
            }

            DirectoryReader ir  = iw.GetReader();
            FieldInfos      fis = MultiFields.GetMergedFieldInfos(ir);

            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS, fis.FieldInfo("foo").IndexOptions);
            Assert.IsFalse(fis.FieldInfo("foo").HasPayloads);
            iw.Dispose();
            ir.Dispose();
            dir.Dispose(); // checkindex
        }
Exemplo n.º 9
0
        public override void SetUp()
        {
            base.SetUp();

            // initialize directory
            directory = NewDirectory();
            writer    = new RandomIndexWriter(Random, directory);

            // write document
            Document doc = new Document();

            doc.Add(NewTextField("UUID", "1", Field.Store.YES));
            writer.AddDocument(doc);
            writer.Commit();
        }
Exemplo n.º 10
0
        public virtual void TestBinary()
        {
            Directory dir   = NewDirectory();
            Document  doc   = new Document();
            BytesRef  @ref  = new BytesRef();
            Field     field = new BinaryDocValuesField("bytes", @ref);

            doc.Add(field);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random, TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random, dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                @ref.CopyChars(TestUtil.RandomUnicodeString(Random));
                iw.AddDocument(doc);
                if (Random.Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.GetReader();

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.GetReader();
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            BinaryDocValues multi    = MultiDocValues.GetBinaryValues(ir, "bytes");
            BinaryDocValues single   = merged.GetBinaryDocValues("bytes");
            BytesRef        actual   = new BytesRef();
            BytesRef        expected = new BytesRef();

            for (int i = 0; i < numDocs; i++)
            {
                single.Get(i, expected);
                multi.Get(i, actual);
                Assert.AreEqual(expected, actual);
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Exemplo n.º 11
0
        public virtual void TestMixing()
        {
            // no positions
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);

            ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;

            Directory         dir = NewDirectory();
            RandomIndexWriter iw  = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            for (int i = 0; i < 20; i++)
            {
                Document doc = new Document();
                if (i < 19 && Random().NextBoolean())
                {
                    for (int j = 0; j < 50; j++)
                    {
                        doc.Add(new TextField("foo", "i have positions", Field.Store.NO));
                    }
                }
                else
                {
                    for (int j = 0; j < 50; j++)
                    {
                        doc.Add(new Field("foo", "i have no positions", ft));
                    }
                }
                iw.AddDocument(doc);
                iw.Commit();
            }

            if (Random().NextBoolean())
            {
                iw.ForceMerge(1);
            }

            DirectoryReader ir  = iw.Reader;
            FieldInfos      fis = MultiFields.GetMergedFieldInfos(ir);

            Assert.AreEqual(FieldInfo.IndexOptions.DOCS_AND_FREQS, fis.FieldInfo("foo").FieldIndexOptions);
            Assert.IsFalse(fis.FieldInfo("foo").HasPayloads());
            iw.Dispose();
            ir.Dispose();
            dir.Dispose(); // checkindex
        }
Exemplo n.º 12
0
        public override void SetUp()
        {
            base.SetUp();

            // initialize directory
            Directory = NewDirectory();
            Writer    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, Directory);

            // write document
            Document doc = new Document();
            doc.Add(NewTextField("UUID", "1", Field.Store.YES));
            Writer.AddDocument(doc);
            Writer.Commit();
        }
Exemplo n.º 13
0
        public virtual void TestNumerics()
        {
            Directory dir   = NewDirectory();
            Document  doc   = new Document();
            Field     field = new NumericDocValuesField("numbers", 0);

            doc.Add(field);

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                field.SetInt64Value(Random().NextLong());
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            NumericDocValues multi  = MultiDocValues.GetNumericValues(ir, "numbers");
            NumericDocValues single = merged.GetNumericDocValues("numbers");

            for (int i = 0; i < numDocs; i++)
            {
                Assert.AreEqual(single.Get(i), multi.Get(i));
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Exemplo n.º 14
0
        public virtual void TestMergeStability()
        {
            Directory dir = NewDirectory();
            // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
            MergePolicy mp = NewTieredMergePolicy();

            mp.NoCFSRatio = 0;
            var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);

            using (var w = new RandomIndexWriter(Random(), dir, cfg))
            {
                var numDocs = AtLeast(500);
                for (var i = 0; i < numDocs; ++i)
                {
                    var d = new Document();
                    AddRandomFields(d);
                    w.AddDocument(d);
                }
                w.ForceMerge(1);
                w.Commit();
            }
            IndexReader reader = DirectoryReader.Open(dir);

            Directory dir2 = NewDirectory();

            mp            = NewTieredMergePolicy();
            mp.NoCFSRatio = 0;
            cfg           = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetUseCompoundFile(false).SetMergePolicy(mp);

            using (var w = new RandomIndexWriter(Random(), dir2, cfg))
            {
                w.AddIndexes(reader);
                w.Commit();
            }

            assertEquals(BytesUsedByExtension(dir), BytesUsedByExtension(dir2));

            reader.Dispose();
            dir.Dispose();
            dir2.Dispose();
        }
Exemplo n.º 15
0
        public virtual void TestMergeStability()
        {
            using Directory dir = NewDirectory();
            // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
            MergePolicy mp = NewTieredMergePolicy();

            mp.NoCFSRatio = 0;
            var cfg = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp);

            using (var w = new RandomIndexWriter(Random, dir, cfg))
            {
                var numDocs = AtLeast(500);
                for (var i = 0; i < numDocs; ++i)
                {
                    var d = new Document();
                    AddRandomFields(d);
                    w.AddDocument(d);
                }
                w.ForceMerge(1);
                w.Commit();
            }
            using IndexReader reader = DirectoryReader.Open(dir);
            using Directory dir2     = NewDirectory();
            mp            = NewTieredMergePolicy();
            mp.NoCFSRatio = 0;
            cfg           = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetUseCompoundFile(false).SetMergePolicy(mp);

            using (var w = new RandomIndexWriter(Random, dir2, cfg))
            {
                w.AddIndexes(reader);
                w.Commit();
            }

            // LUCENENET: We need to explicitly call Equals() and use HashMap in order to ensure our
            // equality check is done correctly. Calling Assert.AreEqual doesn't guarantee this is done.
            Assert.True(BytesUsedByExtension(dir).Equals(BytesUsedByExtension(dir2)));
        }
Exemplo n.º 16
0
 public override void Run()
 {
     try
     {
         Document        doc = new Document();
         DirectoryReader r   = DirectoryReader.Open(dir);
         Field           f   = newStringField("f", "", Field.Store.NO);
         doc.Add(f);
         int count = 0;
         do
         {
             if (failed)
             {
                 break;
             }
             for (int j = 0; j < 10; j++)
             {
                 string s = finalI + "_" + Convert.ToString(count++);
                 f.SetStringValue(s);
                 w.AddDocument(doc);
                 w.Commit();
                 DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
                 Assert.IsNotNull(r2);
                 Assert.IsTrue(!r2.Equals(r));
                 r.Dispose();
                 r = r2;
                 Assert.AreEqual(1, r.DocFreq(new Term("f", s)), "term=f:" + s + "; r=" + r);
             }
         } while ((J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) < endTime); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
         r.Dispose();
     }
     catch (Exception t) when(t.IsThrowable())
     {
         failed.Value = (true);
         throw RuntimeException.Create(t);
     }
 }
Exemplo n.º 17
0
 public override void Run()
 {
     try
     {
         Document        doc = new Document();
         DirectoryReader r   = DirectoryReader.Open(Dir);
         Field           f   = NewStringField("f", "", Field.Store.NO);
         doc.Add(f);
         int count = 0;
         do
         {
             if (Failed.Get())
             {
                 break;
             }
             for (int j = 0; j < 10; j++)
             {
                 string s = FinalI + "_" + Convert.ToString(count++);
                 f.SetStringValue(s);
                 w.AddDocument(doc);
                 w.Commit();
                 DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
                 Assert.IsNotNull(r2);
                 Assert.IsTrue(!r2.Equals(r));
                 r.Dispose();
                 r = r2;
                 Assert.AreEqual(1, r.DocFreq(new Term("f", s)), "term=f:" + s + "; r=" + r);
             }
         } while (Environment.TickCount < EndTime);
         r.Dispose();
     }
     catch (Exception t)
     {
         Failed.Set(true);
         throw new Exception(t.Message, t);
     }
 }
Exemplo n.º 18
0
            public override void Run()
            {
                try
                {
                    int docCount = 0;
                    while (Environment.TickCount < StopTime)
                    {
                        Document doc = new Document();
                        doc.Add(NewStringField("dc", "" + docCount, Field.Store.YES));
                        doc.Add(NewTextField("field", "here is some text", Field.Store.YES));
                        w.AddDocument(doc);

                        if (docCount % 13 == 0)
                        {
                            w.Commit();
                        }
                        docCount++;
                    }
                }
                catch (Exception e)
                {
                    throw new Exception(e.Message, e);
                }
            }
Exemplo n.º 19
0
            public override void Run()
            {
                try
                {
                    int docCount = 0;
                    while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
                    {
                        Document doc = new Document();
                        doc.Add(newStringField("dc", "" + docCount, Field.Store.YES));
                        doc.Add(newTextField("field", "here is some text", Field.Store.YES));
                        w.AddDocument(doc);

                        if (docCount % 13 == 0)
                        {
                            w.Commit();
                        }
                        docCount++;
                    }
                }
                catch (Exception e) when(e.IsException())
                {
                    throw RuntimeException.Create(e);
                }
            }
Exemplo n.º 20
0
        // [Test, LongRunningTest, Timeout(int.MaxValue)] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        public virtual void TestBigDocuments()
        {
            // "big" as "much bigger than the chunk size"
            // for this test we force a FS dir
            // we can't just use newFSDirectory, because this test doesn't really index anything.
            // so if we get NRTCachingDir+SimpleText, we make massive stored fields and OOM (LUCENE-4484)
            Directory         dir    = new MockDirectoryWrapper(Random(), new MMapDirectory(CreateTempDir("testBigDocuments")));
            IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));

            iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30));
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf);

            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }

            Document emptyDoc = new Document(); // emptyDoc
            Document bigDoc1  = new Document(); // lot of small fields
            Document bigDoc2  = new Document(); // 1 very big field

            Field idField = new StringField("id", "", Field.Store.NO);

            emptyDoc.Add(idField);
            bigDoc1.Add(idField);
            bigDoc2.Add(idField);

            FieldType onlyStored = new FieldType(StringField.TYPE_STORED);

            onlyStored.IsIndexed = false;

            Field smallField = new Field("fld", RandomByteArray(Random().Next(10), 256), onlyStored);
            int   numFields  = RandomInts.NextIntBetween(Random(), 500000, 1000000);

            for (int i = 0; i < numFields; ++i)
            {
                bigDoc1.Add(smallField);
            }

            Field bigField = new Field("fld", RandomByteArray(RandomInts.NextIntBetween(Random(), 1000000, 5000000), 2), onlyStored);

            bigDoc2.Add(bigField);

            int numDocs = AtLeast(5);

            Document[] docs = new Document[numDocs];
            for (int i = 0; i < numDocs; ++i)
            {
                docs[i] = RandomInts.RandomFrom(Random(), Arrays.AsList(emptyDoc, bigDoc1, bigDoc2));
            }
            for (int i = 0; i < numDocs; ++i)
            {
                idField.SetStringValue("" + i);
                iw.AddDocument(docs[i]);
                if (Random().Next(numDocs) == 0)
                {
                    iw.Commit();
                }
            }
            iw.Commit();
            iw.ForceMerge(1); // look at what happens when big docs are merged
            DirectoryReader rd       = DirectoryReader.Open(dir);
            IndexSearcher   searcher = new IndexSearcher(rd);

            for (int i = 0; i < numDocs; ++i)
            {
                Query   query   = new TermQuery(new Term("id", "" + i));
                TopDocs topDocs = searcher.Search(query, 1);
                Assert.AreEqual(1, topDocs.TotalHits, "" + i);
                Document doc = rd.Document(topDocs.ScoreDocs[0].Doc);
                Assert.IsNotNull(doc);
                IIndexableField[] fieldValues = doc.GetFields("fld");
                Assert.AreEqual(docs[i].GetFields("fld").Length, fieldValues.Length);
                if (fieldValues.Length > 0)
                {
                    Assert.AreEqual(docs[i].GetFields("fld")[0].GetBinaryValue(), fieldValues[0].GetBinaryValue());
                }
            }
            rd.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
Exemplo n.º 21
0
        public virtual void TestTermUTF16SortOrder()
        {
            Random            rnd    = Random;
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                rnd, dir);
            Document d = new Document();
            // Single segment
            Field f = NewStringField("f", "", Field.Store.NO);

            d.Add(f);
            char[]        chars    = new char[2];
            ISet <string> allTerms = new JCG.HashSet <string>();

            int num = AtLeast(200);

            for (int i = 0; i < num; i++)
            {
                string s;
                if (rnd.NextBoolean())
                {
                    // Single char
                    if (rnd.NextBoolean())
                    {
                        // Above surrogates
                        chars[0] = (char)GetInt(rnd, 1 + UnicodeUtil.UNI_SUR_LOW_END, 0xffff);
                    }
                    else
                    {
                        // Below surrogates
                        chars[0] = (char)GetInt(rnd, 0, UnicodeUtil.UNI_SUR_HIGH_START - 1);
                    }
                    s = new string(chars, 0, 1);
                }
                else
                {
                    // Surrogate pair
                    chars[0] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_HIGH_START, UnicodeUtil.UNI_SUR_HIGH_END);
                    Assert.IsTrue(((int)chars[0]) >= UnicodeUtil.UNI_SUR_HIGH_START && ((int)chars[0]) <= UnicodeUtil.UNI_SUR_HIGH_END);
                    chars[1] = (char)GetInt(rnd, UnicodeUtil.UNI_SUR_LOW_START, UnicodeUtil.UNI_SUR_LOW_END);
                    s        = new string(chars, 0, 2);
                }
                allTerms.Add(s);
                f.SetStringValue(s);

                writer.AddDocument(d);

                if ((1 + i) % 42 == 0)
                {
                    writer.Commit();
                }
            }

            IndexReader r = writer.GetReader();

            // Test each sub-segment
            foreach (AtomicReaderContext ctx in r.Leaves)
            {
                CheckTermsOrder(ctx.Reader, allTerms, false);
            }
            CheckTermsOrder(r, allTerms, true);

            // Test multi segment
            r.Dispose();

            writer.ForceMerge(1);

            // Test single segment
            r = writer.GetReader();
            CheckTermsOrder(r, allTerms, true);
            r.Dispose();

            writer.Dispose();
            dir.Dispose();
        }
Exemplo n.º 22
0
        public virtual void Test()
        {
            // update variables
            int commitPercent        = Random().Next(20);
            int softCommitPercent    = Random().Next(100); // what percent of the commits are soft
            int deletePercent        = Random().Next(50);
            int deleteByQueryPercent = Random().Next(25);
            int ndocs                = AtLeast(50);
            int nWriteThreads        = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
            int maxConcurrentCommits = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max

            bool tombstones = Random().NextBoolean();

            // query variables
            AtomicLong operations = new AtomicLong(AtLeast(10000)); // number of query operations to perform in total

            int nReadThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);

            InitModel(ndocs);

            FieldType storedOnlyType = new FieldType();

            storedOnlyType.Stored = true;

            if (VERBOSE)
            {
                Console.WriteLine("\n");
                Console.WriteLine("TEST: commitPercent=" + commitPercent);
                Console.WriteLine("TEST: softCommitPercent=" + softCommitPercent);
                Console.WriteLine("TEST: deletePercent=" + deletePercent);
                Console.WriteLine("TEST: deleteByQueryPercent=" + deleteByQueryPercent);
                Console.WriteLine("TEST: ndocs=" + ndocs);
                Console.WriteLine("TEST: nWriteThreads=" + nWriteThreads);
                Console.WriteLine("TEST: nReadThreads=" + nReadThreads);
                Console.WriteLine("TEST: maxConcurrentCommits=" + maxConcurrentCommits);
                Console.WriteLine("TEST: tombstones=" + tombstones);
                Console.WriteLine("TEST: operations=" + operations);
                Console.WriteLine("\n");
            }

            AtomicInteger numCommitting = new AtomicInteger();

            IList <ThreadClass> threads = new List <ThreadClass>();

            Directory dir = NewDirectory();

            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            writer.DoRandomForceMergeAssert = false;
            writer.Commit();
            Reader = DirectoryReader.Open(dir);

            for (int i = 0; i < nWriteThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);

                threads.Add(thread);
            }

            for (int i = 0; i < nReadThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations);

                threads.Add(thread);
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Start();
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }

            writer.Dispose();
            if (VERBOSE)
            {
                Console.WriteLine("TEST: close reader=" + Reader);
            }
            Reader.Dispose();
            dir.Dispose();
        }
Exemplo n.º 23
0
            public override void Run()
            {
                try
                {
                    while (Operations.Get() > 0)
                    {
                        int oper = rand.Next(100);

                        if (oper < CommitPercent)
                        {
                            if (NumCommitting.IncrementAndGet() <= MaxConcurrentCommits)
                            {
                                IDictionary <int, long> newCommittedModel;
                                long            version;
                                DirectoryReader oldReader;

                                lock (OuterInstance)
                                {
                                    newCommittedModel = new Dictionary <int, long>(OuterInstance.Model); // take a snapshot
                                    version           = OuterInstance.SnapshotCount++;
                                    oldReader         = OuterInstance.Reader;
                                    oldReader.IncRef(); // increment the reference since we will use this for reopening
                                }

                                DirectoryReader newReader;
                                if (rand.Next(100) < SoftCommitPercent)
                                {
                                    // assertU(h.Commit("softCommit","true"));
                                    if (Random().NextBoolean())
                                    {
                                        if (VERBOSE)
                                        {
                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": call writer.getReader");
                                        }
                                        newReader = Writer.GetReader(true);
                                    }
                                    else
                                    {
                                        if (VERBOSE)
                                        {
                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": reopen reader=" + oldReader + " version=" + version);
                                        }
                                        newReader = DirectoryReader.OpenIfChanged(oldReader, Writer.w, true);
                                    }
                                }
                                else
                                {
                                    // assertU(commit());
                                    if (VERBOSE)
                                    {
                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": commit+reopen reader=" + oldReader + " version=" + version);
                                    }
                                    Writer.Commit();
                                    if (VERBOSE)
                                    {
                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": now reopen after commit");
                                    }
                                    newReader = DirectoryReader.OpenIfChanged(oldReader);
                                }

                                // Code below assumes newReader comes w/
                                // extra ref:
                                if (newReader == null)
                                {
                                    oldReader.IncRef();
                                    newReader = oldReader;
                                }

                                oldReader.DecRef();

                                lock (OuterInstance)
                                {
                                    // install the new reader if it's newest (and check the current version since another reader may have already been installed)
                                    //System.out.println(Thread.currentThread().getName() + ": newVersion=" + newReader.getVersion());
                                    Debug.Assert(newReader.RefCount > 0);
                                    Debug.Assert(OuterInstance.Reader.RefCount > 0);
                                    if (newReader.Version > OuterInstance.Reader.Version)
                                    {
                                        if (VERBOSE)
                                        {
                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": install new reader=" + newReader);
                                        }
                                        OuterInstance.Reader.DecRef();
                                        OuterInstance.Reader = newReader;

                                        // Silly: forces fieldInfos to be
                                        // loaded so we don't hit IOE on later
                                        // reader.toString
                                        newReader.ToString();

                                        // install this snapshot only if it's newer than the current one
                                        if (version >= OuterInstance.CommittedModelClock)
                                        {
                                            if (VERBOSE)
                                            {
                                                Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": install new model version=" + version);
                                            }
                                            OuterInstance.CommittedModel      = newCommittedModel;
                                            OuterInstance.CommittedModelClock = version;
                                        }
                                        else
                                        {
                                            if (VERBOSE)
                                            {
                                                Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": skip install new model version=" + version);
                                            }
                                        }
                                    }
                                    else
                                    {
                                        // if the same reader, don't decRef.
                                        if (VERBOSE)
                                        {
                                            Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": skip install new reader=" + newReader);
                                        }
                                        newReader.DecRef();
                                    }
                                }
                            }
                            NumCommitting.DecrementAndGet();
                        }
                        else
                        {
                            int    id   = rand.Next(Ndocs);
                            object sync = OuterInstance.SyncArr[id];

                            // set the lastId before we actually change it sometimes to try and
                            // uncover more race conditions between writing and reading
                            bool before = Random().NextBoolean();
                            if (before)
                            {
                                OuterInstance.LastId = id;
                            }

                            // We can't concurrently update the same document and retain our invariants of increasing values
                            // since we can't guarantee what order the updates will be executed.
                            lock (sync)
                            {
                                long val     = OuterInstance.Model[id];
                                long nextVal = Math.Abs(val) + 1;

                                if (oper < CommitPercent + DeletePercent)
                                {
                                    // assertU("<delete><id>" + id + "</id></delete>");

                                    // add tombstone first
                                    if (Tombstones)
                                    {
                                        Document d = new Document();
                                        d.Add(OuterInstance.NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES));
                                        d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
                                        Writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d);
                                    }

                                    if (VERBOSE)
                                    {
                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": term delDocs id:" + id + " nextVal=" + nextVal);
                                    }
                                    Writer.DeleteDocuments(new Term("id", Convert.ToString(id)));
                                    OuterInstance.Model[id] = -nextVal;
                                }
                                else if (oper < CommitPercent + DeletePercent + DeleteByQueryPercent)
                                {
                                    //assertU("<delete><query>id:" + id + "</query></delete>");

                                    // add tombstone first
                                    if (Tombstones)
                                    {
                                        Document d = new Document();
                                        d.Add(OuterInstance.NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES));
                                        d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
                                        Writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d);
                                    }

                                    if (VERBOSE)
                                    {
                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": query delDocs id:" + id + " nextVal=" + nextVal);
                                    }
                                    Writer.DeleteDocuments(new TermQuery(new Term("id", Convert.ToString(id))));
                                    OuterInstance.Model[id] = -nextVal;
                                }
                                else
                                {
                                    // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
                                    Document d = new Document();
                                    d.Add(OuterInstance.NewStringField("id", Convert.ToString(id), Documents.Field.Store.YES));
                                    d.Add(OuterInstance.NewField(OuterInstance.Field, Convert.ToString(nextVal), StoredOnlyType));
                                    if (VERBOSE)
                                    {
                                        Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": u id:" + id + " val=" + nextVal);
                                    }
                                    Writer.UpdateDocument(new Term("id", Convert.ToString(id)), d);
                                    if (Tombstones)
                                    {
                                        // remove tombstone after new addition (this should be optional?)
                                        Writer.DeleteDocuments(new Term("id", "-" + Convert.ToString(id)));
                                    }
                                    OuterInstance.Model[id] = nextVal;
                                }
                            }

                            if (!before)
                            {
                                OuterInstance.LastId = id;
                            }
                        }
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(Thread.CurrentThread.Name + ": FAILED: unexpected exception");
                    Console.WriteLine(e.StackTrace);
                    throw new Exception(e.Message, e);
                }
            }
Exemplo n.º 24
0
        public virtual void TestIndexing()
        {
            DirectoryInfo        tmpDir = CreateTempDir("TestNeverDelete");
            BaseDirectoryWrapper d      = NewFSDirectory(tmpDir);

            // We want to "see" files removed if Lucene removed
            // them.  this is still worth running on Windows since
            // some files the IR opens and closes.
            if (d is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)d).NoDeleteOpenFile = false;
            }
            RandomIndexWriter w = new RandomIndexWriter(Random(), d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));

            w.w.Config.SetMaxBufferedDocs(TestUtil.NextInt(Random(), 5, 30));

            w.Commit();
            ThreadClass[] indexThreads = new ThreadClass[Random().Next(4)];
            long          stopTime     = Environment.TickCount + AtLeast(1000);

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x]      = new ThreadAnonymousInnerClassHelper(w, stopTime, NewStringField, NewTextField);
                indexThreads[x].Name = "Thread " + x;
                indexThreads[x].Start();
            }

            HashSet <string> allFiles = new HashSet <string>();

            DirectoryReader r = DirectoryReader.Open(d);

            while (Environment.TickCount < stopTime)
            {
                IndexCommit ic = r.IndexCommit;
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: check files: " + ic.FileNames);
                }
                allFiles.AddAll(ic.FileNames);
                // Make sure no old files were removed
                foreach (string fileName in allFiles)
                {
                    Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist");
                }
                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
                if (r2 != null)
                {
                    r.Dispose();
                    r = r2;
                }
                Thread.Sleep(1);
            }
            r.Dispose();

            foreach (ThreadClass t in indexThreads)
            {
                t.Join();
            }
            w.Dispose();
            d.Dispose();

            System.IO.Directory.Delete(tmpDir.FullName, true);
        }
Exemplo n.º 25
0
        public virtual void TestDocsWithField()
        {
            AssumeTrue("codec does not support docsWithField", DefaultCodecSupportsDocsWithField());
            Directory dir = NewDirectory();

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();
                if (Random().Next(4) >= 0)
                {
                    doc.Add(new NumericDocValuesField("numbers", Random().NextLong()));
                }
                doc.Add(new NumericDocValuesField("numbersAlways", Random().NextLong()));
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            IBits multi  = MultiDocValues.GetDocsWithField(ir, "numbers");
            IBits single = merged.GetDocsWithField("numbers");

            if (multi == null)
            {
                Assert.IsNull(single);
            }
            else
            {
                Assert.AreEqual(single.Length, multi.Length);
                for (int i = 0; i < numDocs; i++)
                {
                    Assert.AreEqual(single.Get(i), multi.Get(i));
                }
            }

            multi  = MultiDocValues.GetDocsWithField(ir, "numbersAlways");
            single = merged.GetDocsWithField("numbersAlways");
            Assert.AreEqual(single.Length, multi.Length);
            for (int i = 0; i < numDocs; i++)
            {
                Assert.AreEqual(single.Get(i), multi.Get(i));
            }
            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Exemplo n.º 26
0
        public virtual void TestSortedSetWithDups()
        {
            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
            Directory dir = NewDirectory();

            IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null);

            iwc.SetMergePolicy(NewLogMergePolicy());
            RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc);

            int numDocs = AtLeast(500);

            for (int i = 0; i < numDocs; i++)
            {
                Document doc       = new Document();
                int      numValues = Random().Next(5);
                for (int j = 0; j < numValues; j++)
                {
                    doc.Add(new SortedSetDocValuesField("bytes", new BytesRef(TestUtil.RandomSimpleString(Random(), 2))));
                }
                iw.AddDocument(doc);
                if (Random().Next(17) == 0)
                {
                    iw.Commit();
                }
            }
            DirectoryReader ir = iw.Reader;

            iw.ForceMerge(1);
            DirectoryReader ir2    = iw.Reader;
            AtomicReader    merged = GetOnlySegmentReader(ir2);

            iw.Dispose();

            SortedSetDocValues multi  = MultiDocValues.GetSortedSetValues(ir, "bytes");
            SortedSetDocValues single = merged.GetSortedSetDocValues("bytes");

            if (multi == null)
            {
                Assert.IsNull(single);
            }
            else
            {
                Assert.AreEqual(single.ValueCount, multi.ValueCount);
                BytesRef actual   = new BytesRef();
                BytesRef expected = new BytesRef();
                // check values
                for (long i = 0; i < single.ValueCount; i++)
                {
                    single.LookupOrd(i, expected);
                    multi.LookupOrd(i, actual);
                    Assert.AreEqual(expected, actual);
                }
                // check ord list
                for (int i = 0; i < numDocs; i++)
                {
                    single.SetDocument(i);
                    List <long?> expectedList = new List <long?>();
                    long         ord;
                    while ((ord = single.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                    {
                        expectedList.Add(ord);
                    }

                    multi.SetDocument(i);
                    int upto = 0;
                    while ((ord = multi.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS)
                    {
                        Assert.AreEqual((long)expectedList[upto], ord);
                        upto++;
                    }
                    Assert.AreEqual(expectedList.Count, upto);
                }
            }

            ir.Dispose();
            ir2.Dispose();
            dir.Dispose();
        }
Exemplo n.º 27
0
        public virtual void TestIndexing()
        {
            DirectoryInfo        tmpDir = CreateTempDir("TestNeverDelete");
            BaseDirectoryWrapper d      = NewFSDirectory(tmpDir);

            // We want to "see" files removed if Lucene removed
            // them.  this is still worth running on Windows since
            // some files the IR opens and closes.
            if (d is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)d).NoDeleteOpenFile = false;
            }
            RandomIndexWriter w = new RandomIndexWriter(Random, d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));

            w.IndexWriter.Config.SetMaxBufferedDocs(TestUtil.NextInt32(Random, 5, 30));

            w.Commit();
            ThreadJob[] indexThreads = new ThreadJob[Random.Next(4)];
            long        stopTime     = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + AtLeast(1000); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x]      = new ThreadAnonymousClass(w, stopTime, NewStringField, NewTextField);
                indexThreads[x].Name = "Thread " + x;
                indexThreads[x].Start();
            }

            ISet <string> allFiles = new JCG.HashSet <string>();

            DirectoryReader r = DirectoryReader.Open(d);

            while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
            {
                IndexCommit ic = r.IndexCommit;
                if (Verbose)
                {
                    Console.WriteLine("TEST: check files: " + ic.FileNames);
                }
                allFiles.UnionWith(ic.FileNames);
                // Make sure no old files were removed
                foreach (string fileName in allFiles)
                {
                    Assert.IsTrue(SlowFileExists(d, fileName), "file " + fileName + " does not exist");
                }
                DirectoryReader r2 = DirectoryReader.OpenIfChanged(r);
                if (r2 != null)
                {
                    r.Dispose();
                    r = r2;
                }
                Thread.Sleep(1);
            }
            r.Dispose();

            foreach (ThreadJob t in indexThreads)
            {
                t.Join();
            }
            w.Dispose();
            d.Dispose();

            System.IO.Directory.Delete(tmpDir.FullName, true);
        }