예제 #1
0
        public virtual void TestGetScores()
        {
            Directory         directory = NewDirectory();
            RandomIndexWriter writer    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, directory);

            writer.Commit();
            IndexReader ir = writer.GetReader();

            writer.Dispose();
            IndexSearcher         searcher = NewSearcher(ir);
            Weight                fake     = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher);
            Scorer                s        = new SimpleScorer(fake);
            ScoreCachingCollector scc      = new ScoreCachingCollector(Scores.Length);

            scc.SetScorer(s);

            // We need to iterate on the scorer so that its doc() advances.
            int doc;

            while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
            {
                scc.Collect(doc);
            }

            for (int i = 0; i < Scores.Length; i++)
            {
                Assert.AreEqual(Scores[i], scc.Mscores[i], 0f);
            }
            ir.Dispose();
            directory.Dispose();
        }
        public virtual void TestGetScores()
        {
            Directory         directory = NewDirectory();
            RandomIndexWriter writer    = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);

            writer.Commit();
            IndexReader ir = writer.Reader;

            writer.Dispose();
            IndexSearcher         searcher = NewSearcher(ir);
            Weight                fake     = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher);
            Scorer                s        = new SimpleScorer(fake);
            ScoreCachingCollector scc      = new ScoreCachingCollector(Scores.Length);

            scc.Scorer = s;

            // We need to iterate on the scorer so that its doc() advances.
            int doc;

            while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
            {
                scc.Collect(doc);
            }

            for (int i = 0; i < Scores.Length; i++)
            {
                Assert.AreEqual(Scores[i], scc.Mscores[i], 0f);
            }
            ir.Dispose();
            directory.Dispose();
        }
예제 #3
0
        public virtual void TestMostlySet()
        {
            Directory d       = NewDirectory();
            int       numBits = TestUtil.NextInt(Random(), 30, 1000);

            for (int numClear = 0; numClear < 20; numClear++)
            {
                BitVector bv = new BitVector(numBits);
                bv.InvertAll();
                int count = 0;
                while (count < numClear)
                {
                    int bit = Random().Next(numBits);
                    // Don't use getAndClear, so that count is recomputed
                    if (bv.Get(bit))
                    {
                        bv.Clear(bit);
                        count++;
                        Assert.AreEqual(numBits - count, bv.Count());
                    }
                }
            }

            d.Dispose();
        }
예제 #4
0
        public virtual void TestExactlyAtLimit()
        {
            Directory   dir2 = NewFSDirectory(CreateTempDir("2BDocs2"));
            IndexWriter iw   = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
            Document    doc  = new Document();

            for (int i = 0; i < 262143; i++)
            {
                iw.AddDocument(doc);
            }
            iw.Dispose();
            DirectoryReader ir  = DirectoryReader.Open(Dir);
            DirectoryReader ir2 = DirectoryReader.Open(dir2);

            IndexReader[] subReaders = new IndexReader[8192];
            Arrays.Fill(subReaders, ir);
            subReaders[subReaders.Length - 1] = ir2;
            MultiReader mr = new MultiReader(subReaders);

            Assert.AreEqual(int.MaxValue, mr.MaxDoc);
            Assert.AreEqual(int.MaxValue, mr.NumDocs);
            ir.Dispose();
            ir2.Dispose();
            dir2.Dispose();
        }
예제 #5
0
 public override void TearDown()
 {
     Reader.Dispose();
     Dir.Dispose();
     Dir = null;
     base.TearDown();
 }
예제 #6
0
        public virtual void TestFarsiRangeQueryCollating(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd)
        {
            Directory   dir    = NewDirectory();
            IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer));
            Document    doc    = new Document();

            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
            // orders the U+0698 character before the U+0633 character, so the single
            // index Term below should NOT be returned by a TermRangeQuery with a Farsi
            // Collator (or an Arabic one for the case when Farsi is not supported).
            doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES));
            writer.AddDocument(doc);
            writer.Dispose();
            IndexReader   reader   = DirectoryReader.Open(dir);
            IndexSearcher searcher = new IndexSearcher(reader);

            Search.Query query = new TermRangeQuery("content", firstBeg, firstEnd, true, true);
            ScoreDoc[]   hits  = searcher.Search(query, null, 1000).ScoreDocs;
            AreEqual(0, hits.Length, "The index Term should not be included.");

            query = new TermRangeQuery("content", secondBeg, secondEnd, true, true);
            hits  = searcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length, "The index Term should be included.");
            reader.Dispose();
            dir.Dispose();
        }
예제 #7
0
        public virtual void Test()
        {
            string     name       = "testFile";
            Directory  dir        = NewDirectory();
            FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);

            FieldInfos readIn = ReadFieldInfos(dir, name);

            Assert.IsTrue(fieldInfos.Count == readIn.Count);
            FieldInfo info = readIn.FieldInfo("textField1");

            Assert.IsTrue(info != null);
            Assert.IsTrue(info.HasVectors == false);
            Assert.IsTrue(info.OmitsNorms == false);

            info = readIn.FieldInfo("textField2");
            Assert.IsTrue(info != null);
            Assert.IsTrue(info.OmitsNorms == false);

            info = readIn.FieldInfo("textField3");
            Assert.IsTrue(info != null);
            Assert.IsTrue(info.HasVectors == false);
            Assert.IsTrue(info.OmitsNorms == true);

            info = readIn.FieldInfo("omitNorms");
            Assert.IsTrue(info != null);
            Assert.IsTrue(info.HasVectors == false);
            Assert.IsTrue(info.OmitsNorms == true);

            dir.Dispose();
        }
예제 #8
0
        public virtual void TestFarsiTermRangeQuery(Analyzer analyzer, BytesRef firstBeg, BytesRef firstEnd, BytesRef secondBeg, BytesRef secondEnd)
        {
            Directory   farsiIndex = NewDirectory();
            IndexWriter writer     = new IndexWriter(farsiIndex, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, analyzer));
            Document    doc        = new Document();

            doc.Add(new TextField("content", "\u0633\u0627\u0628", Field.Store.YES));
            doc.Add(new StringField("body", "body", Field.Store.YES));
            writer.AddDocument(doc);
            writer.Dispose();

            IndexReader   reader = DirectoryReader.Open(farsiIndex);
            IndexSearcher search = this.NewSearcher(reader);

            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
            // orders the U+0698 character before the U+0633 character, so the single
            // index Term below should NOT be returned by a TermRangeQuery
            // with a Farsi Collator (or an Arabic one for the case when Farsi is
            // not supported).
            Search.Query csrq   = new TermRangeQuery("content", firstBeg, firstEnd, true, true);
            ScoreDoc[]   result = search.Search(csrq, null, 1000).ScoreDocs;
            Assert.AreEqual(0, result.Length, "The index Term should not be included.");

            csrq   = new TermRangeQuery("content", secondBeg, secondEnd, true, true);
            result = search.Search(csrq, null, 1000).ScoreDocs;
            Assert.AreEqual(1, result.Length, "The index Term should be included.");
            reader.Dispose();
            farsiIndex.Dispose();
        }
예제 #9
0
        // Test using various international locales with accented characters (which
        // sort differently depending on locale)
        //
        // Copied (and slightly modified) from
        // Lucene.Net.Search.TestSort.testInternationalSort()
        //
        // TODO: this test is really fragile. there are already 3 different cases,
        // depending upon unicode version.
        public virtual void TestCollationKeySort(Analyzer usAnalyzer, Analyzer franceAnalyzer, Analyzer swedenAnalyzer, Analyzer denmarkAnalyzer, string usResult, string frResult, string svResult, string dkResult)
        {
            Directory   indexStore = NewDirectory();
            IndexWriter writer     = new IndexWriter(indexStore, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)));

            // document data:
            // the tracer field is used to determine which document was hit
            string[][] sortData = new string[][] { new string[] { "A", "x", "p\u00EAche", "p\u00EAche", "p\u00EAche", "p\u00EAche" }, new string[] { "B", "y", "HAT", "HAT", "HAT", "HAT" }, new string[] { "C", "x", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9", "p\u00E9ch\u00E9" }, new string[] { "D", "y", "HUT", "HUT", "HUT", "HUT" }, new string[] { "E", "x", "peach", "peach", "peach", "peach" }, new string[] { "F", "y", "H\u00C5T", "H\u00C5T", "H\u00C5T", "H\u00C5T" }, new string[] { "G", "x", "sin", "sin", "sin", "sin" }, new string[] { "H", "y", "H\u00D8T", "H\u00D8T", "H\u00D8T", "H\u00D8T" }, new string[] { "I", "x", "s\u00EDn", "s\u00EDn", "s\u00EDn", "s\u00EDn" }, new string[] { "J", "y", "HOT", "HOT", "HOT", "HOT" } };

            FieldType customType = new FieldType();

            customType.IsStored = true;

            for (int i = 0; i < sortData.Length; ++i)
            {
                Document doc = new Document();
                doc.Add(new Field("tracer", sortData[i][0], customType));
                doc.Add(new TextField("contents", sortData[i][1], Field.Store.NO));
                if (sortData[i][2] != null)
                {
                    doc.Add(new TextField("US", usAnalyzer.GetTokenStream("US", new StringReader(sortData[i][2]))));
                }
                if (sortData[i][3] != null)
                {
                    doc.Add(new TextField("France", franceAnalyzer.GetTokenStream("France", new StringReader(sortData[i][3]))));
                }
                if (sortData[i][4] != null)
                {
                    doc.Add(new TextField("Sweden", swedenAnalyzer.GetTokenStream("Sweden", new StringReader(sortData[i][4]))));
                }
                if (sortData[i][5] != null)
                {
                    doc.Add(new TextField("Denmark", denmarkAnalyzer.GetTokenStream("Denmark", new StringReader(sortData[i][5]))));
                }
                writer.AddDocument(doc);
            }
            writer.ForceMerge(1);
            writer.Dispose();
            IndexReader   reader   = DirectoryReader.Open(indexStore);
            IndexSearcher searcher = new IndexSearcher(reader);

            Sort sort = new Sort();

            Search.Query queryX = new TermQuery(new Term("contents", "x"));
            Search.Query queryY = new TermQuery(new Term("contents", "y"));

            sort.SetSort(new SortField("US", SortFieldType.STRING));
            this.AssertMatches(searcher, queryY, sort, usResult);

            sort.SetSort(new SortField("France", SortFieldType.STRING));
            this.AssertMatches(searcher, queryX, sort, frResult);

            sort.SetSort(new SortField("Sweden", SortFieldType.STRING));
            this.AssertMatches(searcher, queryY, sort, svResult);

            sort.SetSort(new SortField("Denmark", SortFieldType.STRING));
            this.AssertMatches(searcher, queryY, sort, dkResult);
            reader.Dispose();
            indexStore.Dispose();
        }
예제 #10
0
 public static void AfterClass()
 {
     Reader.Dispose();
     Reader = null;
     Directory.Dispose();
     Directory = null;
 }
예제 #11
0
        public virtual void TestSeek()
        {
            Directory   directory = NewDirectory();
            IndexWriter writer    = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(NewTextField(this.Field, "a b", Documents.Field.Store.YES));
                writer.AddDocument(doc);
            }

            writer.Dispose();
            IndexReader reader = DirectoryReader.Open(directory);

            DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), this.Field, new BytesRef("b"));

            for (int i = 0; i < 10; i++)
            {
                tp.NextDoc();
                Assert.AreEqual(tp.DocID(), i);
                Assert.AreEqual(tp.NextPosition(), 1);
            }

            tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), this.Field, new BytesRef("a"));

            for (int i = 0; i < 10; i++)
            {
                tp.NextDoc();
                Assert.AreEqual(tp.DocID(), i);
                Assert.AreEqual(tp.NextPosition(), 0);
            }
            reader.Dispose();
            directory.Dispose();
        }
예제 #12
0
 public override void AfterClass()
 {
     Reader.Dispose();
     Reader = null;
     Directory.Dispose();
     Directory = null;
     base.AfterClass();
 }
예제 #13
0
        public virtual void TestNewestSegment_Mem()
        {
            Directory   directory = NewDirectory();
            IndexWriter writer    = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            Assert.IsNull(writer.NewestSegment());
            writer.Dispose();
            directory.Dispose();
        }
예제 #14
0
        public virtual void TestReadOnly()
        {
            string     name       = "testFile";
            Directory  dir        = NewDirectory();
            FieldInfos fieldInfos = CreateAndWriteFieldInfos(dir, name);
            FieldInfos readOnly   = ReadFieldInfos(dir, name);

            AssertReadOnly(readOnly, fieldInfos);
            dir.Dispose();
        }
예제 #15
0
 public override void TearDown()
 {
     if (reader != null)
     {
         reader.Dispose();
         reader = null;
     }
     dir.Dispose();
     base.TearDown();
 }
예제 #16
0
        public virtual void TestClearedBitNearEnd()
        {
            Directory d       = NewDirectory();
            int       numBits = TestUtil.NextInt(Random(), 7, 1000);
            BitVector bv      = new BitVector(numBits);

            bv.InvertAll();
            bv.Clear(numBits - TestUtil.NextInt(Random(), 1, 7));
            bv.Write(d, "test", NewIOContext(Random()));
            Assert.AreEqual(numBits - 1, bv.Count());
            d.Dispose();
        }
예제 #17
0
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         // dispose managed resources
         searcher.Dispose();
         writer.Dispose();
         analyzer.Dispose();
         luceneIndexDirectory.Dispose();
     }
     // free native resources
 }
예제 #18
0
        public virtual void TestNegativeScores()
        {
            // The Top*Collectors previously filtered out documents with <= scores. this
            // behavior has changed. this test checks that if PositiveOnlyScoresFilter
            // wraps one of these collectors, documents with <= 0 scores are indeed
            // filtered.

            int numPositiveScores = 0;

            for (int i = 0; i < Scores.Length; i++)
            {
                if (Scores[i] > 0)
                {
                    ++numPositiveScores;
                }
            }

            Directory         directory = NewDirectory();
            RandomIndexWriter writer    = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, directory);

            writer.Commit();
            IndexReader ir = writer.GetReader();

            writer.Dispose();
            IndexSearcher searcher          = NewSearcher(ir);
            Weight        fake              = (new TermQuery(new Term("fake", "weight"))).CreateWeight(searcher);
            Scorer        s                 = new SimpleScorer(fake);
            TopDocsCollector <ScoreDoc> tdc = TopScoreDocCollector.Create(Scores.Length, true);
            ICollector c = new PositiveScoresOnlyCollector(tdc);

            c.SetScorer(s);
            while (s.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
            {
                c.Collect(0);
            }
            TopDocs td = tdc.GetTopDocs();

            ScoreDoc[] sd = td.ScoreDocs;
            Assert.AreEqual(numPositiveScores, td.TotalHits);
            for (int i = 0; i < sd.Length; i++)
            {
                Assert.IsTrue(sd[i].Score > 0, "only positive scores should return: " + sd[i].Score);
            }
            ir.Dispose();
            directory.Dispose();
        }
예제 #19
0
        public virtual void TestOutOfOrderCollection()
        {
            Directory         dir    = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(new Document());
            }

            bool[]   inOrder         = new bool[] { false, true };
            string[] actualTSDCClass = new string[] { "OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector" };

            BooleanQuery bq = new BooleanQuery();

            // Add a Query with SHOULD, since bw.Scorer() returns BooleanScorer2
            // which delegates to BS if there are no mandatory clauses.
            bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
            // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
            // the clause instead of BQ.
            bq.MinimumNumberShouldMatch = 1;
            IndexReader   reader   = writer.GetReader();
            IndexSearcher searcher = NewSearcher(reader);

            for (int i = 0; i < inOrder.Length; i++)
            {
                TopDocsCollector <ScoreDoc> tdc = TopScoreDocCollector.Create(3, inOrder[i]);
                Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector+" + actualTSDCClass[i], tdc.GetType().FullName);

                searcher.Search(new MatchAllDocsQuery(), tdc);

                ScoreDoc[] sd = tdc.GetTopDocs().ScoreDocs;
                Assert.AreEqual(3, sd.Length);
                for (int j = 0; j < sd.Length; j++)
                {
                    Assert.AreEqual(j, sd[j].Doc, "expected doc Id " + j + " found " + sd[j].Doc);
                }
            }
            writer.Dispose();
            reader.Dispose();
            dir.Dispose();
        }
예제 #20
0
        public virtual void TestSparseWrite()
        {
            Directory d       = NewDirectory();
            const int numBits = 10240;
            BitVector bv      = new BitVector(numBits);

            bv.InvertAll();
            int numToClear = Random().Next(5);

            for (int i = 0; i < numToClear; i++)
            {
                bv.Clear(Random().Next(numBits));
            }
            bv.Write(d, "test", NewIOContext(Random()));
            long size = d.FileLength("test");

            Assert.IsTrue(size < 100, "size=" + size);
            d.Dispose();
        }
예제 #21
0
        public virtual void TestNoOrds()
        {
            Directory dir = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random, dir);
            Document doc = new Document();
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
            ft.StoreTermVectors = true;
            doc.Add(new Field("foo", "this is a test", ft));
            iw.AddDocument(doc);
            AtomicReader ir = GetOnlySegmentReader(iw.GetReader());
            Terms terms = ir.GetTermVector(0, "foo");
            Assert.IsNotNull(terms);
            TermsEnum termsEnum = terms.GetEnumerator();
            Assert.AreEqual(TermsEnum.SeekStatus.FOUND, termsEnum.SeekCeil(new BytesRef("this")));
            try
            {
                var _ = termsEnum.Ord;
                Assert.Fail();
            }
            catch (Exception expected) when (expected.IsUnsupportedOperationException())
            {
                // expected exception
            }

            try
            {
                termsEnum.SeekExact(0);
                Assert.Fail();
            }
            catch (Exception expected) when (expected.IsUnsupportedOperationException())
            {
                // expected exception
            }
            ir.Dispose();
            iw.Dispose();
            dir.Dispose();
        }
예제 #22
0
        /// <summary>
        /// Dispose of this object.
        /// </summary>
        public void Dispose()
        {
            if (_writer != null)
            {
                _writer.Optimize();
                _writer.Flush(true, true, true);
                _writer.Dispose();
            }

            if (_searcher != null)
            {
                _searcher.Dispose();
            }

            if (_analyzer != null)
            {
                _analyzer.Dispose();
            }

            if (_directory != null)
            {
                _directory.Dispose();
            }
        }
예제 #23
0
        public void LuceneDispose()
        {
            if (searcher != null)
            {
                searcher.Dispose();
                searcher = null;
            }
            if (reader != null)
            {
                reader.Dispose();
                reader = null;
            }
            if (analyzer != null)
            {
                analyzer.Dispose();
                analyzer = null;
            }

            if (direc != null)
            {
                direc.Dispose();
                direc = null;
            }
        }
예제 #24
0
 public void SaveAndDispose()
 {
     //if (doc != null)
     //{
     //    doc = null;
     //}
     if (analyzer != null)
     {
         analyzer.Dispose();
         analyzer = null;
     }
     if (writer != null)
     {
         writer.Optimize();
         writer.Commit();
         writer.Dispose();
         writer = null;
     }
     if (direc != null)
     {
         direc.Dispose();
         direc = null;
     }
 }
예제 #25
0
        private void Dispose(bool disposing)
        {
            if (_disposed)
            {
                return;
            }
            if (disposing)
            {
                if (_directory != null)
                {
                    _directory.Dispose();
                }

                if (_indexReader != null)
                {
                    _indexReader.Dispose();
                }
            }

            _directory     = null;
            _indexReader   = null;
            _indexSearcher = null;
            _disposed      = true;
        }
        // Collections.synchronizedMap(new WeakHashMap<SegmentCoreReaders, bool?>());
        public virtual void RunTest(string testName)
        {
            Failed.Set(false);
            AddCount.Set(0);
            DelCount.Set(0);
            PackCount.Set(0);

            DateTime t0 = DateTime.UtcNow;

            Random random = new Random(Random().Next());
            LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());
            DirectoryInfo tempDir = CreateTempDir(testName);
            Dir = GetDirectory(NewMockFSDirectory(tempDir)); // some subclasses rely on this being MDW
            if (Dir is BaseDirectoryWrapper)
            {
                ((BaseDirectoryWrapper)Dir).CheckIndexOnClose = false; // don't double-checkIndex, we do it ourselves.
            }
            MockAnalyzer analyzer = new MockAnalyzer(Random());
            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetInfoStream(new FailOnNonBulkMergesInfoStream());

            if (LuceneTestCase.TEST_NIGHTLY)
            {
                // newIWConfig makes smallish max seg size, which
                // results in tons and tons of segments for this test
                // when run nightly:
                MergePolicy mp = conf.MergePolicy;
                if (mp is TieredMergePolicy)
                {
                    ((TieredMergePolicy)mp).MaxMergedSegmentMB = 5000.0;
                }
                else if (mp is LogByteSizeMergePolicy)
                {
                    ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1000.0;
                }
                else if (mp is LogMergePolicy)
                {
                    ((LogMergePolicy)mp).MaxMergeDocs = 100000;
                }
            }

            conf.SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this));

            if (VERBOSE)
            {
                conf.InfoStream = new PrintStreamInfoStreamAnonymousInnerClassHelper(this, Console.Out);
            }
            Writer = new IndexWriter(Dir, conf);
            TestUtil.ReduceOpenFiles(Writer);

            //TaskScheduler es = Random().NextBoolean() ? null : Executors.newCachedThreadPool(new NamedThreadFactory(testName));
            TaskScheduler es = null;

            DoAfterWriter(es);

            int NUM_INDEX_THREADS = TestUtil.NextInt(Random(), 2, 4);

            int RUN_TIME_SEC = LuceneTestCase.TEST_NIGHTLY ? 300 : RANDOM_MULTIPLIER;

            ISet<string> delIDs = new ConcurrentHashSet<string>(new HashSet<string>());
            ISet<string> delPackIDs = new ConcurrentHashSet<string>(new HashSet<string>());
            IList<SubDocs> allSubDocs = new SynchronizedCollection<SubDocs>();

            DateTime stopTime = DateTime.UtcNow.AddSeconds(RUN_TIME_SEC);

            ThreadClass[] indexThreads = LaunchIndexingThreads(docs, NUM_INDEX_THREADS, stopTime, delIDs, delPackIDs, allSubDocs);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: DONE start " + NUM_INDEX_THREADS + " indexing threads [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]");
            }

            // Let index build up a bit
            Thread.Sleep(100);

            DoSearching(es, stopTime);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: all searching done [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]");
            }

            for (int thread = 0; thread < indexThreads.Length; thread++)
            {
                indexThreads[thread].Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: done join indexing threads [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]; addCount=" + AddCount + " delCount=" + DelCount);
            }

            IndexSearcher s = FinalSearcher;
            if (VERBOSE)
            {
                Console.WriteLine("TEST: finalSearcher=" + s);
            }

            Assert.IsFalse(Failed.Get());

            bool doFail = false;

            // Verify: make sure delIDs are in fact deleted:
            foreach (string id in delIDs)
            {
                TopDocs hits = s.Search(new TermQuery(new Term("docid", id)), 1);
                if (hits.TotalHits != 0)
                {
                    Console.WriteLine("doc id=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " hits; first docID=" + hits.ScoreDocs[0].Doc);
                    doFail = true;
                }
            }

            // Verify: make sure delPackIDs are in fact deleted:
            foreach (string id in delPackIDs)
            {
                TopDocs hits = s.Search(new TermQuery(new Term("packID", id)), 1);
                if (hits.TotalHits != 0)
                {
                    Console.WriteLine("packID=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " matches");
                    doFail = true;
                }
            }

            // Verify: make sure each group of sub-docs are still in docID order:
            foreach (SubDocs subDocs in allSubDocs.ToList())
            {
                TopDocs hits = s.Search(new TermQuery(new Term("packID", subDocs.PackID)), 20);
                if (!subDocs.Deleted)
                {
                    // We sort by relevance but the scores should be identical so sort falls back to by docID:
                    if (hits.TotalHits != subDocs.SubIDs.Count)
                    {
                        Console.WriteLine("packID=" + subDocs.PackID + ": expected " + subDocs.SubIDs.Count + " hits but got " + hits.TotalHits);
                        doFail = true;
                    }
                    else
                    {
                        int lastDocID = -1;
                        int startDocID = -1;
                        foreach (ScoreDoc scoreDoc in hits.ScoreDocs)
                        {
                            int docID = scoreDoc.Doc;
                            if (lastDocID != -1)
                            {
                                Assert.AreEqual(1 + lastDocID, docID);
                            }
                            else
                            {
                                startDocID = docID;
                            }
                            lastDocID = docID;
                            Document doc = s.Doc(docID);
                            Assert.AreEqual(subDocs.PackID, doc.Get("packID"));
                        }

                        lastDocID = startDocID - 1;
                        foreach (string subID in subDocs.SubIDs)
                        {
                            hits = s.Search(new TermQuery(new Term("docid", subID)), 1);
                            Assert.AreEqual(1, hits.TotalHits);
                            int docID = hits.ScoreDocs[0].Doc;
                            if (lastDocID != -1)
                            {
                                Assert.AreEqual(1 + lastDocID, docID);
                            }
                            lastDocID = docID;
                        }
                    }
                }
                else
                {
                    // Pack was deleted -- make sure its docs are
                    // deleted.  We can't verify packID is deleted
                    // because we can re-use packID for update:
                    foreach (string subID in subDocs.SubIDs)
                    {
                        Assert.AreEqual(0, s.Search(new TermQuery(new Term("docid", subID)), 1).TotalHits);
                    }
                }
            }

            // Verify: make sure all not-deleted docs are in fact
            // not deleted:
            int endID = Convert.ToInt32(docs.NextDoc().Get("docid"));
            docs.Dispose();

            for (int id = 0; id < endID; id++)
            {
                string stringID = "" + id;
                if (!delIDs.Contains(stringID))
                {
                    TopDocs hits = s.Search(new TermQuery(new Term("docid", stringID)), 1);
                    if (hits.TotalHits != 1)
                    {
                        Console.WriteLine("doc id=" + stringID + " is not supposed to be deleted, but got hitCount=" + hits.TotalHits + "; delIDs=" + string.Join(",",  delIDs.ToArray()));
                        doFail = true;
                    }
                }
            }
            Assert.IsFalse(doFail);

            Assert.AreEqual(AddCount.Get() - DelCount.Get(), s.IndexReader.NumDocs, "index=" + Writer.SegString() + " addCount=" + AddCount + " delCount=" + DelCount);
            ReleaseSearcher(s);

            Writer.Commit();

            Assert.AreEqual(AddCount.Get() - DelCount.Get(), Writer.NumDocs(), "index=" + Writer.SegString() + " addCount=" + AddCount + " delCount=" + DelCount);

            DoClose();
            Writer.Dispose(false);

            // Cannot shutdown until after writer is closed because
            // writer has merged segment warmer that uses IS to run
            // searches, and that IS may be using this es!
            /*if (es != null)
            {
              es.shutdown();
              es.awaitTermination(1, TimeUnit.SECONDS);
            }*/

            TestUtil.CheckIndex(Dir);
            Dir.Dispose();
            System.IO.Directory.Delete(tempDir.FullName, true);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: done [" + (DateTime.UtcNow - t0).TotalMilliseconds + " ms]");
            }
        }
예제 #27
0
        public void Dispose()
        {
            try
            {
                // this is here so we can give good logs in the case of a long shutdown process
                if (Monitor.TryEnter(m_writeLock, 100) == false)
                {
                    var localReason = m_waitReason;
                    if (localReason != null)
                    {
                        LogIndexing.Warn(
                            "Waiting for {0} to complete before disposing of index {1}, that might take a while if the server is very busy",
                            localReason, Name);
                    }

                    Monitor.Enter(m_writeLock);
                }

                m_disposed = true;
                if (m_currentIndexSearcherHolder != null)
                {
                    var item = m_currentIndexSearcherHolder.SetIndexSearcher(null, true);
                    if (item.WaitOne(TimeSpan.FromSeconds(5)) == false)
                    {
                        LogIndexing.Warn(
                            "After closing the index searching, we waited for 5 seconds for the searching to be done, but it wasn't. Continuing with normal shutdown anyway.");
                    }
                }

                if (m_indexWriter != null)
                {
                    var writer = m_indexWriter;
                    m_indexWriter = null;

                    try
                    {
                        writer.Analyzer.Close();
                    }
                    catch (Exception e)
                    {
                        LogIndexing.ErrorException("Error while closing the index (closing the analyzer failed)", e);
                    }

                    try
                    {
                        writer.Dispose();
                    }
                    catch (Exception e)
                    {
                        LogIndexing.ErrorException("Error when closing the index", e);
                    }
                }

                try
                {
                    m_directory.Dispose();
                }
                catch (Exception e)
                {
                    LogIndexing.ErrorException("Error when closing the directory", e);
                }
            }
            finally
            {
                Monitor.Exit(m_writeLock);
            }
        }
예제 #28
0
 public override void TearDown()
 {
     reader.Dispose();
     directory.Dispose();
     base.TearDown();
 }
예제 #29
0
 public override void TearDown()
 {
     Ir.Dispose();
     Dir.Dispose();
     base.TearDown();
 }
예제 #30
0
 public static void AfterClass()
 {
     Dir.Dispose();
     Dir = null;
 }
예제 #31
0
        public WorkerThreadResult RunMT(bW1Arg arg)
        {
            WorkerThreadResult result = new WorkerThreadResult();

            string indexPath = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(System.Windows.Forms.Application.ExecutablePath), "LuceneData");

            Lucene.Net.Store.Directory directory = Lucene.Net.Store.FSDirectory.Open(indexPath);
            Lucene.Net.Analysis.Standard.StandardAnalyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);

            Worker workerObject = new Worker();

            workerObject.Writer      = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
            workerObject.ConStackLog = ErrLog;
            workerObject.SetRAMBufferSizeMB();

            List <FileMeta> LstMsgFile = MyIO.GetFileMsgFromTo(MyConfig.GetMailFolder(), arg.DFrom, arg.DTo);

            if (arg.IsReIndex)
            {
                workerObject.DeleteAllDocuments();
            }
            else
            {
                //Xoa bo nhung item trung lap giua list moi tao va du lieu da index
                using (Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(directory))
                {
                    for (int i = LstMsgFile.Count - 1; i >= 0; i--)
                    {
                        if (MyLucene.CheckDocExist(searcher, LstMsgFile[i]))
                        {
                            LstMsgFile.RemoveAt(i);
                            result.IncreaseNumDupFiles();
                        }
                    }
                }
            }

            //Chay song song
            ParallelLoopResult pResult = Parallel.ForEach(LstMsgFile, (currentFileMeta) => workerObject.DoWork(currentFileMeta));

            StringBuilder sb = new StringBuilder();

            if (pResult.IsCompleted)
            {
                workerObject.DisposeWriter();
                analyzer.Dispose();
                directory.Dispose();

                result.FileDone = LstMsgFile;

                int i = 0;
                foreach (var a in ErrLog)
                {
                    string item;
                    ErrLog.TryPop(out item);
                    i++;
                    sb.Append(i.ToString());
                    sb.Append(". ");
                    sb.Append(item);
                    sb.Append(Environment.NewLine);
                }
                result.ErrLog = sb.ToString();
            }
            return(result);
        }