GetReader() 공개 메소드

Expert: returns a readonly reader, covering all committed as well as un-committed changes to the index. This provides "near real-time" searching, in that changes made during an IndexWriter session can be quickly made available for searching without closing the writer nor calling Commit().

Note that this is functionally equivalent to calling {#commit} and then using IndexReader.Open(string) to open a new reader. But the turarnound time of this method should be faster since it avoids the potentially costly Commit().

You must close the IndexReader returned by this method once you are done using it.

It's near real-time because there is no hard guarantee on how quickly you can get a new reader after making changes with IndexWriter. You'll have to experiment in your situation to determine if it's faster enough. As this is a new and experimental feature, please report back on your findings so we can learn, improve and iterate.

The resulting reader suppports IndexReader.Reopen() , but that call will simply forward back to this method (though this may change in the future).

The very first time this method is called, this writer instance will make every effort to pool the readers that it opens for doing merges, applying deletes, etc. This means additional resources (RAM, file descriptors, CPU time) will be consumed.

For lower latency on reopening a reader, you should call SetMergedSegmentWarmer to call SetMergedSegmentWarmer to pre-warm a newly merged segment before it's committed to the index. This is important for minimizing index-to-search delay after a large merge.

If an addIndexes* call is running in another thread, then this reader will only search those segments from the foreign index that have been successfully copied over, so far

.

NOTE: Once the writer is closed, any outstanding readers may continue to be used. However, if you attempt to reopen any of those readers, you'll hit an AlreadyClosedException.

NOTE: This API is experimental and might change in incompatible ways in the next release.

public GetReader ( ) : IndexReader
리턴 IndexReader
예제 #1
0
        public void testMissingTerms()
        {
            String fieldName = "field1";
            Directory rd = new RAMDirectory();
            var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 100; i++)
            {
                var doc = new Document();
                int term = i*10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();
            w.Close();

            TermsFilter tf = new TermsFilter();
            tf.AddTerm(new Term(fieldName, "19"));
            FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            reader.Close();
            rd.Close();
        }
예제 #2
0
        public void TestReadersWriters()
        {
            Directory dir;
            
            using(dir = new RAMDirectory())
            {
                Document doc;
                IndexWriter writer;
                IndexReader reader;

                using (writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    Field field = new Field("name", "value", Field.Store.YES,Field.Index.ANALYZED);
                    doc = new Document();
                    doc.Add(field);
                    writer.AddDocument(doc);
                    writer.Commit();

                    using (reader = writer.GetReader())
                    {
                        IndexReader r1 = reader.Reopen();
                    }

                    Assert.Throws<AlreadyClosedException>(() => reader.Reopen(), "IndexReader shouldn't be open here");
                }
                
                Assert.Throws<AlreadyClosedException>(() => writer.AddDocument(doc), "IndexWriter shouldn't be open here");

                Assert.IsTrue(dir.isOpen_ForNUnit, "RAMDirectory");
            }
            Assert.IsFalse(dir.isOpen_ForNUnit, "RAMDirectory");
        }
예제 #3
0
		public virtual void  TestMultiValueSource()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
			Document doc = new Document();
			Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
			doc.Add(f);
			
			for (int i = 0; i < 17; i++)
			{
				f.SetValue("" + i);
				w.AddDocument(doc);
				w.Commit();
			}
			
			IndexReader r = w.GetReader();
			w.Close();
			
			Assert.IsTrue(r.GetSequentialSubReaders().Length > 1);
			
			ValueSource s1 = new IntFieldSource("field");
			DocValues v1 = s1.GetValues(r);
			DocValues v2 = new MultiValueSource(s1).GetValues(r);
			
			for (int i = 0; i < r.MaxDoc(); i++)
			{
				Assert.AreEqual(v1.IntVal(i), i);
				Assert.AreEqual(v2.IntVal(i), i);
			}
			
			Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
			
			r.Close();
			dir.Close();
		}
        public override Task Load(CancellationToken cancellationToken)
        {
            if (IndexReader.IndexExists(_directory))
            {
                IDictionary<string, string> commitUserData;
                using (IndexWriter indexWriter = new IndexWriter(_directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), false, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    commitUserData = indexWriter.GetReader().CommitUserData;
                }

                string value;
                if (commitUserData != null && commitUserData.TryGetValue("commitTimeStamp", out value))
                {
                    Value = DateTime.ParseExact(value, "o", CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
                }
                else
                {
                    Value = _defaultValue;
                }
            }
            else
            {
                Value = _defaultValue;
            }
            Trace.TraceInformation("LuceneCursor.Load: {0}", this);
            return Task.FromResult(true);
        }
예제 #5
0
        public void Test_IndexReader_IsCurrent()
        {
            RAMDirectory ramDir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(ramDir, new KeywordAnalyzer(), true, new IndexWriter.MaxFieldLength(1000));
            Field field = new Field("TEST", "mytest", Field.Store.YES, Field.Index.ANALYZED);
            Document doc = new Document();
            doc.Add(field);
            writer.AddDocument(doc);

            IndexReader reader = writer.GetReader();

            writer.DeleteDocuments(new Lucene.Net.Index.Term("TEST", "mytest"));

            Assert.IsFalse(reader.IsCurrent());

            int resCount1 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
            Assert.AreEqual(1, resCount1);

            writer.Commit();

            Assert.IsFalse(reader.IsCurrent());

            int resCount2 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
            Assert.AreEqual(1, resCount2, "Reopen not invoked yet, resultCount must still be 1.");

            reader = reader.Reopen();
            Assert.IsTrue(reader.IsCurrent());

            int resCount3 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")), 100).TotalHits;
            Assert.AreEqual(0, resCount3, "After reopen, resultCount must be 0.");

            reader.Close();
            writer.Dispose();
        }
		public virtual void  TestSorting()
		{
			Directory directory = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			writer.SetMaxBufferedDocs(2);
			writer.SetMergeFactor(1000);
			writer.AddDocument(Adoc(new System.String[]{"id", "a", "title", "ipod", "str_s", "a"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "b", "title", "ipod ipod", "str_s", "b"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "c", "title", "ipod ipod ipod", "str_s", "c"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "x", "title", "boosted", "str_s", "x"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "y", "title", "boosted boosted", "str_s", "y"}));
			writer.AddDocument(Adoc(new System.String[]{"id", "z", "title", "boosted boosted boosted", "str_s", "z"}));
			
			IndexReader r = writer.GetReader();
			writer.Close();
			
			IndexSearcher searcher = new IndexSearcher(r);
			
			RunTest(searcher, true);
			RunTest(searcher, false);
			
			searcher.Close();
			r.Close();
			directory.Close();
		}
예제 #7
0
 public IndexStore(Type modelType, Directory directory, Analyzer analyzer)
 {
     ModelType = modelType;
     _directory = directory;
     _analyzer = analyzer;
     _writer = new IndexWriter(_directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
     _searcher = new IndexSearcher(_writer.GetReader());
 }
예제 #8
0
        //function to initialize the spell checker functionality
        public void SpellCheckerInit()
        {
            spellchecker = new SpellChecker.Net.Search.Spell.SpellChecker(spellCheckIndexStorage);

            // To index a field of a user index:
            indexReader = writer.GetReader();
            spellchecker.IndexDictionary(new LuceneDictionary(indexReader, WORD_FN));
        }
예제 #9
0
 public SearcherManager(IndexWriter writer, bool applyAllDeletes = true, ISearcherWarmer warmer = null)
 {
     _warmer = warmer;
     _currentSearcher = new IndexSearcher(writer.GetReader());
     if (_warmer != null)
     {
         writer.MergedSegmentWarmer = new WarmerWrapper(_warmer);
     }
 }
예제 #10
0
 protected internal override DirectoryReader DoOpenIfChanged(IndexWriter writer, bool applyAllDeletes)
 {
     EnsureOpen();
     if (writer == this.writer && applyAllDeletes == this.applyAllDeletes)
     {
         return(DoOpenFromWriter(null));
     }
     else
     {
         return(writer.GetReader(applyAllDeletes));
     }
 }
예제 #11
0
        public void TestDeleteByTermIsCurrent()
        {
            // get reader
            IndexReader reader = writer.GetReader();

            // assert index has a document and reader is up2date
            Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
            Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");

            // remove document
            Term idTerm = new Term("UUID", "1");

            writer.DeleteDocuments(idTerm);
            writer.Commit();

            // assert document has been deleted (index changed), reader is stale
            Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
            Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");

            reader.Close();
        }
예제 #12
0
        public override void  SetUp()
        {
            base.SetUp();
            RAMDirectory directory = new RAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int i = 0; i < docFields.Length; i++)
            {
                Document document = new Document();
                document.Add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
                writer.AddDocument(document, null);
            }
            writer.Close();
            searcher = new IndexSearcher(directory, true, null);

            // Make big index
            dir2 = new MockRAMDirectory(directory);

            // First multiply small test index:
            mulFactor = 1;
            int docCount = 0;

            do
            {
                Directory   copy        = new RAMDirectory(dir2, null);
                IndexWriter indexWriter = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
                indexWriter.AddIndexesNoOptimize(null, new[] { copy });
                docCount = indexWriter.MaxDoc();
                indexWriter.Close();
                mulFactor *= 2;
            } while (docCount < 3000);

            IndexWriter w   = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
            Document    doc = new Document();

            doc.Add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                w.AddDocument(doc, null);
            }
            doc = new Document();
            doc.Add(new Field("field2", "big bad bug", Field.Store.NO, Field.Index.ANALYZED));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                w.AddDocument(doc, null);
            }
            // optimize to 1 segment
            w.Optimize(null);
            reader = w.GetReader(null);
            w.Close();
            bigSearcher = new IndexSearcher(reader);
        }
예제 #13
0
        [Slow] // (occasionally)
        public virtual void TestIndexing()
        {
            Directory mainDir = NewDirectory();
            var       wrapper = mainDir as MockDirectoryWrapper;

            if (wrapper != null)
            {
                wrapper.AssertNoDeleteOpenFile = true;
            }
            var         writer = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false, 2)));
            IndexReader reader = writer.GetReader(); // start pooling readers

            reader.Dispose();
            var indexThreads = new RunThread[4];

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x]      = new RunThread(this, x % 2, writer);
                indexThreads[x].Name = "Thread " + x;
                indexThreads[x].Start();
            }
            long startTime = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
            long duration  = 1000;

            while (((J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) - startTime) < duration) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results
            {
                Thread.Sleep(100);
            }
            int delCount = 0;
            int addCount = 0;

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].run = false;
                Assert.IsNull(indexThreads[x].ex, "Exception thrown: " + indexThreads[x].ex);
                addCount += indexThreads[x].addCount;
                delCount += indexThreads[x].delCount;
            }
            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].Join();
            }
            for (int x = 0; x < indexThreads.Length; x++)
            {
                Assert.IsNull(indexThreads[x].ex, "Exception thrown: " + indexThreads[x].ex);
            }
            //System.out.println("addCount:"+addCount);
            //System.out.println("delCount:"+delCount);
            writer.Dispose();
            mainDir.Dispose();
        }
예제 #14
0
        public virtual void TestIndexing()
        {
            Directory mainDir = NewDirectory();
            var       wrapper = mainDir as MockDirectoryWrapper;

            if (wrapper != null)
            {
                wrapper.AssertNoDeleteOpenFile = true;
            }
            var         writer = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false, 2)));
            IndexReader reader = writer.GetReader(); // start pooling readers

            reader.Dispose();
            var indexThreads = new RunThread[4];

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x]      = new RunThread(this, x % 2, writer);
                indexThreads[x].Name = "Thread " + x;
                indexThreads[x].Start();
            }
            long startTime = Environment.TickCount;
            long duration  = 1000;

            while ((Environment.TickCount - startTime) < duration)
            {
                Thread.Sleep(100);
            }
            int delCount = 0;
            int addCount = 0;

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].Run_Renamed = false;
                Assert.IsNull(indexThreads[x].Ex, "Exception thrown: " + indexThreads[x].Ex);
                addCount += indexThreads[x].AddCount;
                delCount += indexThreads[x].DelCount;
            }
            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].Join();
            }
            for (int x = 0; x < indexThreads.Length; x++)
            {
                Assert.IsNull(indexThreads[x].Ex, "Exception thrown: " + indexThreads[x].Ex);
            }
            //System.out.println("addCount:"+addCount);
            //System.out.println("delCount:"+delCount);
            writer.Dispose();
            mainDir.Dispose();
        }
예제 #15
0
        private ScoreDoc[] Search(IndexWriter writer, Query q, int n)
        {
            IndexReader   reader   = writer.GetReader();
            IndexSearcher searcher = NewSearcher(reader);

            try
            {
                return(searcher.Search(q, null, n).ScoreDocs);
            }
            finally
            {
                reader.Dispose();
            }
        }
예제 #16
0
 public void Test_SegmentTermVector_IndexOf()
 {
     Lucene.Net.Store.RAMDirectory directory = new Lucene.Net.Store.RAMDirectory();
     Lucene.Net.Analysis.Analyzer analyzer = new Lucene.Net.Analysis.WhitespaceAnalyzer();
     Lucene.Net.Index.IndexWriter writer = new Lucene.Net.Index.IndexWriter(directory, analyzer, Lucene.Net.Index.IndexWriter.MaxFieldLength.LIMITED);
     Lucene.Net.Documents.Document document = new Lucene.Net.Documents.Document();
     document.Add(new Lucene.Net.Documents.Field("contents", new System.IO.StreamReader(new System.IO.MemoryStream(System.Text.Encoding.ASCII.GetBytes("a_ a0"))), Lucene.Net.Documents.Field.TermVector.WITH_OFFSETS));
     writer.AddDocument(document);
     Lucene.Net.Index.IndexReader reader = writer.GetReader();
     Lucene.Net.Index.TermPositionVector tpv = reader.GetTermFreqVector(0, "contents") as Lucene.Net.Index.TermPositionVector;
     //Console.WriteLine("tpv: " + tpv);
     int index = tpv.IndexOf("a_");
     Assert.AreEqual(index, 1, "See the issue: LUCENENET-183");
 }
예제 #17
0
        public void Test_SegmentTermVector_IndexOf()
        {
            Lucene.Net.Store.RAMDirectory directory = new Lucene.Net.Store.RAMDirectory();
            Lucene.Net.Analysis.Analyzer  analyzer  = new Lucene.Net.Analysis.WhitespaceAnalyzer();
            Lucene.Net.Index.IndexWriter  writer    = new Lucene.Net.Index.IndexWriter(directory, analyzer, Lucene.Net.Index.IndexWriter.MaxFieldLength.LIMITED);
            Lucene.Net.Documents.Document document  = new Lucene.Net.Documents.Document();
            document.Add(new Lucene.Net.Documents.Field("contents", new System.IO.StreamReader(new System.IO.MemoryStream(System.Text.Encoding.ASCII.GetBytes("a_ a0"))), Lucene.Net.Documents.Field.TermVector.WITH_OFFSETS));
            writer.AddDocument(document);
            Lucene.Net.Index.IndexReader        reader = writer.GetReader();
            Lucene.Net.Index.TermPositionVector tpv    = reader.GetTermFreqVector(0, "contents") as Lucene.Net.Index.TermPositionVector;
            //Console.WriteLine("tpv: " + tpv);
            int index = tpv.IndexOf("a_");

            Assert.AreEqual(index, 1, "See the issue: LUCENENET-183");
        }
예제 #18
0
        /// <summary>
        /// Create an instance of the class.
        /// </summary>
        /// <param name="indexPath"></param>
        /// <param name="create"></param>
        /// <param name="ignoreCase"></param>
        public NGramLucene(string indexPath, bool create, bool ignoreCase)
        {
            _indexPath = indexPath;
            var emptyStopWords = new HashSet<string>();
            var dirInfo = new DirectoryInfo(_indexPath);

            // These all must be disposed
            _directory = FSDirectory.Open(dirInfo);

            _analyzer = ignoreCase
                ? new StandardAnalyzer(Version.LUCENE_30, emptyStopWords)
                : new CaseSensitiveStandardAnalyzer(Version.LUCENE_30, emptyStopWords);

            _writer = new IndexWriter(_directory, _analyzer, create, IndexWriter.MaxFieldLength.UNLIMITED);
            _reader = _writer.GetReader();
        }
예제 #19
0
            public DeleteThreads(TestIndexWriterReader enclosingInstance, IndexWriter mainWriter)
            {
                InitBlock(enclosingInstance);
                this.mainWriter = mainWriter;
                IndexReader reader = mainWriter.GetReader();
                int         maxDoc = reader.MaxDoc();

                random = Enclosing_Instance.NewRandom();
                int iter = random.Next(maxDoc);

                for (int x = 0; x < iter; x++)
                {
                    int           doc = random.Next(iter);
                    System.String id  = reader.Document(doc).Get("id");
                    toDeleteTerms.Add(new Term("id", id));
                }
            }
예제 #20
0
        public void NearRealTimeTestTeste()
        {
            var diretorio = new RAMDirectory();
            const int quantidadeItensEsperados = 10;

            using (var escritorIndice = new IndexWriter(diretorio, LuceneUtil.ObterAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED))
            {
                for (int i = 0; i < quantidadeItensEsperados; i++)
                {
                    var documento = new Document();
                    documento.Add(new Field("id", i.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
                    documento.Add(new Field("text", "aaa", Field.Store.NO, Field.Index.ANALYZED));
                    escritorIndice.AddDocument(documento);
                }

                var leitorIndice = escritorIndice.GetReader();

                var pesquisa = new IndexSearcher(leitorIndice);
                var consulta = new TermQuery(new Term("text", "aaa"));
                var resultado = pesquisa.Search(consulta, 1);

                var mensagemErro = string.Format("Resultado não encontrou {0} itens que se encaixavam", quantidadeItensEsperados);
                Assert.AreEqual(quantidadeItensEsperados, resultado.TotalHits, mensagemErro);

                var outroDocumento = new Document();
                escritorIndice.DeleteDocuments(new Term("id", "7"));
                outroDocumento.Add(new Field("id", "11", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
                outroDocumento.Add(new Field("text", "bbb", Field.Store.NO, Field.Index.ANALYZED));
                escritorIndice.AddDocument(outroDocumento);

                var novoLeitor = leitorIndice.Reopen();
                Assert.AreNotSame(novoLeitor, leitorIndice);
                leitorIndice.Close();

                pesquisa = new IndexSearcher(novoLeitor);
                resultado = pesquisa.Search(consulta, 10);
                Assert.AreEqual(9, resultado.TotalHits, string.Format("Não encontrou {0} como quantidade esperada.", quantidadeItensEsperados - 1));

                consulta = new TermQuery(new Term("text", "bbb"));
                resultado = pesquisa.Search(consulta, 1);
                Assert.AreEqual(1, resultado.TotalHits);

                novoLeitor.Close();
            }
        }
예제 #21
0
        public void TestReadersWriters()
        {
            Directory dir;
            
            using(dir = new RAMDirectory())
            {
                Document doc;
                IndexWriter writer;
                IndexReader reader;

                using (writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true))
                {
                    Field field = new Field("name", "value", Field.Store.YES,Field.Index.ANALYZED);
                    doc = new Document();
                    doc.Add(field);
                    writer.AddDocument(doc);
                    writer.Commit();

                    using (reader = writer.GetReader())
                    {
                        IndexReader r1 =  reader.Reopen();
                    }

                    try
                    {
                        IndexReader r2 = reader.Reopen();
                        Assert.Fail("IndexReader shouldn't be open here");
                    }
                    catch (AlreadyClosedException)
                    {
                    }
                }
                try
                {
                    writer.AddDocument(doc);
                    Assert.Fail("IndexWriter shouldn't be open here");
                }
                catch (AlreadyClosedException)
                {
                }

                Assert.IsTrue(dir.isOpen_ForNUnit, "RAMDirectory");
            }
            Assert.IsFalse(dir.isOpen_ForNUnit, "RAMDirectory");
        }
예제 #22
0
        public virtual void TestEmbeddedFFFF()
        {
            Directory   d   = NewDirectory();
            IndexWriter w   = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
            Document    doc = new Document();

            doc.Add(NewTextField("field", "a a\uffffb", Field.Store.NO));
            w.AddDocument(doc);
            doc = new Document();
            doc.Add(NewTextField("field", "a", Field.Store.NO));
            w.AddDocument(doc);
            IndexReader r = w.GetReader();

            Assert.AreEqual(1, r.DocFreq(new Term("field", "a\uffffb")));
            r.Dispose();
            w.Dispose();
            d.Dispose();
        }
        public virtual void  TestIndexing()
        {
            Directory   mainDir = new MockRAMDirectory();
            IndexWriter writer  = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            writer.UseCompoundFile = false;
            IndexReader reader = writer.GetReader(); // start pooling readers

            reader.Close();
            writer.MergeFactor = 2;
            writer.SetMaxBufferedDocs(10);
            RunThread[] indexThreads = new RunThread[4];
            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x]      = new RunThread(this, x % 2, writer);
                indexThreads[x].Name = "Thread " + x;
                indexThreads[x].Start();
            }
            long startTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
            long duration  = 5 * 1000;

            while (((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) - startTime) < duration)
            {
                System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 100));
            }
            int delCount = 0;
            int addCount = 0;

            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].run_Renamed_Field = false;
                Assert.IsTrue(indexThreads[x].ex == null);
                addCount += indexThreads[x].addCount;
                delCount += indexThreads[x].delCount;
            }
            for (int x = 0; x < indexThreads.Length; x++)
            {
                indexThreads[x].Join();
            }
            //System.out.println("addCount:"+addCount);
            //System.out.println("delCount:"+delCount);
            writer.Close();
            mainDir.Close();
        }
예제 #24
0
        public void TestSegmentWarmer()
        {
            Directory   dir = new MockRAMDirectory();
            IndexWriter w   = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);

            w.SetMaxBufferedDocs(2);
            w.GetReader(null).Close();
            w.MergedSegmentWarmer = new AnonymousIndexReaderWarmer();

            Document doc = new Document();

            doc.Add(new Field("foo", "bar", Field.Store.YES, Field.Index.NOT_ANALYZED));
            for (int i = 0; i < 20; i++)
            {
                w.AddDocument(doc, null);
            }
            w.WaitForMerges();
            w.Close();
            dir.Close();
        }
예제 #25
0
        public virtual void TestTermDocsEnum()
        {
            Directory   dir = NewDirectory();
            IndexWriter w   = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));

            Documents.Document d = new Documents.Document();
            d.Add(NewStringField("f", "j", Field.Store.NO));
            w.AddDocument(d);
            w.Commit();
            w.AddDocument(d);
            IndexReader r = w.GetReader();

            w.Dispose();
            DocsEnum de = MultiFields.GetTermDocsEnum(r, null, "f", new BytesRef("j"));

            Assert.AreEqual(0, de.NextDoc());
            Assert.AreEqual(1, de.NextDoc());
            Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, de.NextDoc());
            r.Dispose();
            dir.Dispose();
        }
예제 #26
0
        public virtual void TestSeparateEnums()
        {
            Directory   dir = NewDirectory();
            IndexWriter w   = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));

            Documents.Document d = new Documents.Document();
            d.Add(NewStringField("f", "j", Field.Store.NO));
            w.AddDocument(d);
            w.Commit();
            w.AddDocument(d);
            IndexReader r = w.GetReader();

            w.Dispose();
            DocsEnum d1 = TestUtil.Docs(Random, r, "f", new BytesRef("j"), null, null, DocsFlags.NONE);
            DocsEnum d2 = TestUtil.Docs(Random, r, "f", new BytesRef("j"), null, null, DocsFlags.NONE);

            Assert.AreEqual(0, d1.NextDoc());
            Assert.AreEqual(0, d2.NextDoc());
            r.Dispose();
            dir.Dispose();
        }
		public virtual void  TestIndexing()
		{
			Directory mainDir = new MockRAMDirectory();
			IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
			writer.UseCompoundFile = false;
			IndexReader reader = writer.GetReader(); // start pooling readers
			reader.Close();
			writer.MergeFactor = 2;
			writer.SetMaxBufferedDocs(10);
			RunThread[] indexThreads = new RunThread[4];
			for (int x = 0; x < indexThreads.Length; x++)
			{
				indexThreads[x] = new RunThread(this, x % 2, writer);
				indexThreads[x].Name = "Thread " + x;
				indexThreads[x].Start();
			}
			long startTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
			long duration = 5 * 1000;
			while (((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) - startTime) < duration)
			{
				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 100));
			}
			int delCount = 0;
			int addCount = 0;
			for (int x = 0; x < indexThreads.Length; x++)
			{
				indexThreads[x].run_Renamed_Field = false;
				Assert.IsTrue(indexThreads[x].ex == null);
				addCount += indexThreads[x].addCount;
				delCount += indexThreads[x].delCount;
			}
			for (int x = 0; x < indexThreads.Length; x++)
			{
				indexThreads[x].Join();
			}
			//System.out.println("addCount:"+addCount);
			//System.out.println("delCount:"+delCount);
			writer.Close();
			mainDir.Close();
		}
예제 #28
0
 public virtual DirectoryReader GetReader(bool applyDeletions)
 {
     getReaderCalled = true;
     if (r.Next(20) == 2)
     {
         _DoRandomForceMerge();
     }
     // If we are writing with PreFlexRW, force a full
     // IndexReader.open so terms are sorted in codepoint
     // order during searching:
     if (!applyDeletions || !codec.Name.Equals("Lucene3x", StringComparison.Ordinal) && r.NextBoolean())
     {
         if (LuceneTestCase.VERBOSE)
         {
             Console.WriteLine("RIW.getReader: use NRT reader");
         }
         if (r.Next(5) == 1)
         {
             IndexWriter.Commit();
         }
         return(IndexWriter.GetReader(applyDeletions));
     }
     else
     {
         if (LuceneTestCase.VERBOSE)
         {
             Console.WriteLine("RIW.getReader: open new reader");
         }
         IndexWriter.Commit();
         if (r.NextBoolean())
         {
             return(DirectoryReader.Open(IndexWriter.Directory, TestUtil.NextInt32(r, 1, 10)));
         }
         else
         {
             return(IndexWriter.GetReader(applyDeletions));
         }
     }
 }
예제 #29
0
    /// <summary>
    /// This method indexes the content that is sent across to it. Each piece of content (or "document")
    /// that is indexed has to have a unique identifier (so that the caller can take action based on the
    /// document id). Therefore, this method accepts key-value pairs in the form of a dictionary. The key
    /// is a ulong which uniquely identifies the string to be indexed. The string itself is the value
    /// within the dictionary for that key. Be aware that stop words (like the, this, at, etc.) are _not_
    /// indexed.
    /// </summary>
    /// <param name="txtIdPairToBeIndexed">A dictionary of key-value pairs that are sent by the caller
    /// to uniquely identify each string that is to be indexed.</param>
    /// <returns>The number of documents indexed.</returns>
    public int Index (Dictionary<long, string> txtIdPairToBeIndexed) {

		using (Directory directory = FSDirectory.Open(_indexDir))
		using (Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30))
		using (IndexWriter writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
		using (IndexReader reader = writer.GetReader())
		{

			//writer.DeleteAll();


			Dictionary<long, string>.KeyCollection keys = txtIdPairToBeIndexed.Keys;

			foreach (long id in keys)
			{
				char[] delimiter = { ';' };
				string[] text = txtIdPairToBeIndexed[id].Split(delimiter);
				Document document = new Document();

				Field title = new	Field("title", text[0], Field.Store.YES, Field.Index.NO);
				Field type = new Field("type", text[1], Field.Store.YES, Field.Index.NO);
				Field idField = new Field("date", (id).ToString(), Field.Store.YES, Field.Index.ANALYZED);

				document.Add(title);
				document.Add(type);
				document.Add(idField);

				writer.AddDocument(document);
			}

			int numIndexed = writer.GetDocCount(0);//TODO check number
			writer.Optimize();
			writer.Flush(true,true,true);

			return numIndexed;
		}

		}
예제 #30
0
파일: TestFlex.cs 프로젝트: ywscr/lucenenet
        public virtual void TestNonFlex()
        {
            Directory d = NewDirectory();

            const int DOC_COUNT = 177;

            IndexWriter w = new IndexWriter(d, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMaxBufferedDocs(7).SetMergePolicy(NewLogMergePolicy()));

            for (int iter = 0; iter < 2; iter++)
            {
                if (iter == 0)
                {
                    Documents.Document doc = new Documents.Document();
                    doc.Add(NewTextField("field1", "this is field1", Field.Store.NO));
                    doc.Add(NewTextField("field2", "this is field2", Field.Store.NO));
                    doc.Add(NewTextField("field3", "aaa", Field.Store.NO));
                    doc.Add(NewTextField("field4", "bbb", Field.Store.NO));
                    for (int i = 0; i < DOC_COUNT; i++)
                    {
                        w.AddDocument(doc);
                    }
                }
                else
                {
                    w.ForceMerge(1);
                }

                IndexReader r = w.GetReader();

                TermsEnum terms = MultiFields.GetTerms(r, "field3").GetEnumerator();
                Assert.AreEqual(TermsEnum.SeekStatus.END, terms.SeekCeil(new BytesRef("abc")));
                r.Dispose();
            }

            w.Dispose();
            d.Dispose();
        }
예제 #31
0
        // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass
        // LUCENE-1727: make sure doc fields are stored in order
        public virtual void TestStoredFieldsOrder()
        {
            Directory   d   = NewDirectory();
            IndexWriter w   = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            Document    doc = new Document();

            FieldType customType = new FieldType();

            customType.IsStored = true;
            doc.Add(NewField("zzz", "a b c", customType));
            doc.Add(NewField("aaa", "a b c", customType));
            doc.Add(NewField("zzz", "1 2 3", customType));
            w.AddDocument(doc);
            IndexReader r    = w.GetReader();
            Document    doc2 = r.Document(0);
            IEnumerator <IIndexableField> it = doc2.Fields.GetEnumerator();

            Assert.IsTrue(it.MoveNext());
            Field f = (Field)it.Current;

            Assert.AreEqual(f.Name, "zzz");
            Assert.AreEqual(f.GetStringValue(), "a b c");

            Assert.IsTrue(it.MoveNext());
            f = (Field)it.Current;
            Assert.AreEqual(f.Name, "aaa");
            Assert.AreEqual(f.GetStringValue(), "a b c");

            Assert.IsTrue(it.MoveNext());
            f = (Field)it.Current;
            Assert.AreEqual(f.Name, "zzz");
            Assert.AreEqual(f.GetStringValue(), "1 2 3");
            Assert.IsFalse(it.MoveNext());
            r.Dispose();
            w.Dispose();
            d.Dispose();
        }
예제 #32
0
        public virtual void  TestMergeWarmer()
        {
            Directory   dir1   = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            writer.SetInfoStream(infoStream);

            // create the index
            CreateIndexNoClose(false, "test", writer);

            // get a reader to put writer into near real-time mode
            IndexReader r1 = writer.GetReader();

            // Enroll warmer
            MyWarmer warmer = new MyWarmer();

            writer.SetMergedSegmentWarmer(warmer);
            writer.SetMergeFactor(2);
            writer.SetMaxBufferedDocs(2);

            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(CreateDocument(i, "test", 4));
            }
            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).Sync();

            Assert.IsTrue(warmer.warmCount > 0);
            int count = warmer.warmCount;

            writer.AddDocument(CreateDocument(17, "test", 4));
            writer.Optimize();
            Assert.IsTrue(warmer.warmCount > count);

            writer.Close();
            r1.Close();
            dir1.Close();
        }
예제 #33
0
        public virtual void  TestAfterCommit()
        {
            Directory   dir1   = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            writer.SetInfoStream(infoStream);

            // create the index
            CreateIndexNoClose(false, "test", writer);

            // get a reader to put writer into near real-time mode
            IndexReader r1 = writer.GetReader();

            _TestUtil.CheckIndex(dir1);
            writer.Commit();
            _TestUtil.CheckIndex(dir1);
            Assert.AreEqual(100, r1.NumDocs());

            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(CreateDocument(i, "test", 4));
            }
            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).Sync();

            IndexReader r2 = r1.Reopen();

            if (r2 != r1)
            {
                r1.Close();
                r1 = r2;
            }
            Assert.AreEqual(110, r1.NumDocs());
            writer.Close();
            r1.Close();
            dir1.Close();
        }
예제 #34
0
 private void ReplaceSearcher(IndexWriter writer)
 {
     currentIndexSearcherHolder.SetIndexSearcher(new IndexSearcher(writer.GetReader()));
 }
        public virtual void TestRollingUpdates_Mem()
        {
            Random random = new Random(Random().Next());
            BaseDirectoryWrapper dir = NewDirectory();
            LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());

            //provider.register(new MemoryCodec());
            if ((!"Lucene3x".Equals(Codec.Default.Name)) && Random().NextBoolean())
            {
                Codec.Default =
                    TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(Random().nextBoolean(), random.NextFloat()));
            }

            MockAnalyzer analyzer = new MockAnalyzer(Random());
            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);

            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            int SIZE = AtLeast(20);
            int id = 0;
            IndexReader r = null;
            IndexSearcher s = null;
            int numUpdates = (int)(SIZE * (2 + (TEST_NIGHTLY ? 200 * Random().NextDouble() : 5 * Random().NextDouble())));
            if (VERBOSE)
            {
                Console.WriteLine("TEST: numUpdates=" + numUpdates);
            }
            int updateCount = 0;
            // TODO: sometimes update ids not in order...
            for (int docIter = 0; docIter < numUpdates; docIter++)
            {
                Documents.Document doc = docs.NextDoc();
                string myID = "" + id;
                if (id == SIZE - 1)
                {
                    id = 0;
                }
                else
                {
                    id++;
                }
                if (VERBOSE)
                {
                    Console.WriteLine("  docIter=" + docIter + " id=" + id);
                }
                ((Field)doc.GetField("docid")).StringValue = myID;

                Term idTerm = new Term("docid", myID);

                bool doUpdate;
                if (s != null && updateCount < SIZE)
                {
                    TopDocs hits = s.Search(new TermQuery(idTerm), 1);
                    Assert.AreEqual(1, hits.TotalHits);
                    doUpdate = !w.TryDeleteDocument(r, hits.ScoreDocs[0].Doc);
                    if (VERBOSE)
                    {
                        if (doUpdate)
                        {
                            Console.WriteLine("  tryDeleteDocument failed");
                        }
                        else
                        {
                            Console.WriteLine("  tryDeleteDocument succeeded");
                        }
                    }
                }
                else
                {
                    doUpdate = true;
                    if (VERBOSE)
                    {
                        Console.WriteLine("  no searcher: doUpdate=true");
                    }
                }

                updateCount++;

                if (doUpdate)
                {
                    w.UpdateDocument(idTerm, doc);
                }
                else
                {
                    w.AddDocument(doc);
                }

                if (docIter >= SIZE && Random().Next(50) == 17)
                {
                    if (r != null)
                    {
                        r.Dispose();
                    }

                    bool applyDeletions = Random().NextBoolean();

                    if (VERBOSE)
                    {
                        Console.WriteLine("TEST: reopen applyDeletions=" + applyDeletions);
                    }

                    r = w.GetReader(applyDeletions);
                    if (applyDeletions)
                    {
                        s = NewSearcher(r);
                    }
                    else
                    {
                        s = null;
                    }
                    Assert.IsTrue(!applyDeletions || r.NumDocs == SIZE, "applyDeletions=" + applyDeletions + " r.NumDocs=" + r.NumDocs + " vs SIZE=" + SIZE);
                    updateCount = 0;
                }
            }

            if (r != null)
            {
                r.Dispose();
            }

            w.Commit();
            Assert.AreEqual(SIZE, w.NumDocs());

            w.Dispose();

            TestIndexWriter.AssertNoUnreferencedFiles(dir, "leftover files after rolling updates");

            docs.Dispose();

            // LUCENE-4455:
            SegmentInfos infos = new SegmentInfos();
            infos.Read(dir);
            long totalBytes = 0;
            foreach (SegmentCommitInfo sipc in infos.Segments)
            {
                totalBytes += sipc.SizeInBytes();
            }
            long totalBytes2 = 0;
            foreach (string fileName in dir.ListAll())
            {
                if (!fileName.StartsWith(IndexFileNames.SEGMENTS))
                {
                    totalBytes2 += dir.FileLength(fileName);
                }
            }
            Assert.AreEqual(totalBytes2, totalBytes);
            dir.Dispose();
        }
예제 #36
0
 public DeleteThreads(TestIndexWriterReader enclosingInstance, IndexWriter mainWriter)
 {
     InitBlock(enclosingInstance);
     this.mainWriter = mainWriter;
     IndexReader reader = mainWriter.GetReader();
     int maxDoc = reader.MaxDoc;
     random = Enclosing_Instance.NewRandom();
     int iter = random.Next(maxDoc);
     for (int x = 0; x < iter; x++)
     {
         int doc = random.Next(iter);
         System.String id = reader.Document(doc).Get("id");
         toDeleteTerms.Add(new Term("id", id));
     }
 }
예제 #37
0
        public virtual void  TestUpdateDocument()
        {
            bool optimize = true;

            Directory   dir1   = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            // create the index
            CreateIndexNoClose(!optimize, "index1", writer);

            // writer.flush(false, true, true);

            // get a reader
            IndexReader r1 = writer.GetReader();

            Assert.IsTrue(r1.IsCurrent());

            System.String id10 = r1.Document(10).GetField("id").StringValue();

            Document newDoc = r1.Document(10);

            newDoc.RemoveField("id");
            newDoc.Add(new Field("id", System.Convert.ToString(8000), Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.UpdateDocument(new Term("id", id10), newDoc);
            Assert.IsFalse(r1.IsCurrent());

            IndexReader r2 = writer.GetReader();

            Assert.IsTrue(r2.IsCurrent());
            Assert.AreEqual(0, Count(new Term("id", id10), r2));
            Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r2));

            r1.Close();
            writer.Close();
            Assert.IsTrue(r2.IsCurrent());

            IndexReader r3 = IndexReader.Open(dir1);

            Assert.IsTrue(r3.IsCurrent());
            Assert.IsTrue(r2.IsCurrent());
            Assert.AreEqual(0, Count(new Term("id", id10), r3));
            Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r3));

            writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            Document doc = new Document();

            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            writer.AddDocument(doc);
            Assert.IsTrue(r2.IsCurrent());
            Assert.IsTrue(r3.IsCurrent());

            writer.Close();

            Assert.IsFalse(r2.IsCurrent());
            Assert.IsTrue(!r3.IsCurrent());

            r2.Close();
            r3.Close();

            dir1.Close();
        }
예제 #38
0
            public override void Run()
            {
                DirectoryReader currentReader = null;
                Random          random        = LuceneTestCase.Random();

                try
                {
                    Document doc = new Document();
                    doc.Add(new TextField("id", "1", Field.Store.NO));
                    Writer.AddDocument(doc);
                    Holder.Reader = currentReader = Writer.GetReader(true);
                    Term term = new Term("id");
                    for (int i = 0; i < NumOps && !Holder.Stop; i++)
                    {
                        float nextOp = (float)random.NextDouble();
                        if (nextOp < 0.3)
                        {
                            term.Set("id", new BytesRef("1"));
                            Writer.UpdateDocument(term, doc);
                        }
                        else if (nextOp < 0.5)
                        {
                            Writer.AddDocument(doc);
                        }
                        else
                        {
                            term.Set("id", new BytesRef("1"));
                            Writer.DeleteDocuments(term);
                        }
                        if (Holder.Reader != currentReader)
                        {
                            Holder.Reader = currentReader;
                            if (Countdown)
                            {
                                Countdown = false;
                                Latch.Signal();
                            }
                        }
                        if (random.NextBoolean())
                        {
                            Writer.Commit();
                            DirectoryReader newReader = DirectoryReader.OpenIfChanged(currentReader);
                            if (newReader != null)
                            {
                                currentReader.DecRef();
                                currentReader = newReader;
                            }
                            if (currentReader.NumDocs == 0)
                            {
                                Writer.AddDocument(doc);
                            }
                        }
                    }
                }
                catch (Exception e)
                {
                    Failed = e;
                }
                finally
                {
                    Holder.Reader = null;
                    if (Countdown)
                    {
                        Latch.Signal();
                    }
                    if (currentReader != null)
                    {
                        try
                        {
                            currentReader.DecRef();
                        }
                        catch (IOException e)
                        {
                        }
                    }
                }
                if (VERBOSE)
                {
                    Console.WriteLine("writer stopped - forced by reader: " + Holder.Stop);
                }
            }
예제 #39
0
 public virtual void  TestAddIndexes()
 {
     bool optimize = false;
     
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     // create the index
     CreateIndexNoClose(!optimize, "index1", writer);
     writer.Flush(false, true, true);
     
     // create a 2nd index
     Directory dir2 = new MockRAMDirectory();
     IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer2.SetInfoStream(infoStream);
     CreateIndexNoClose(!optimize, "index2", writer2);
     writer2.Close();
     
     IndexReader r0 = writer.GetReader();
     Assert.IsTrue(r0.IsCurrent());
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     Assert.IsFalse(r0.IsCurrent());
     r0.Close();
     
     IndexReader r1 = writer.GetReader();
     Assert.IsTrue(r1.IsCurrent());
     
     writer.Commit();
     Assert.IsFalse(r1.IsCurrent());
     
     Assert.AreEqual(200, r1.MaxDoc);
     
     int index2df = r1.DocFreq(new Term("indexname", "index2"));
     
     Assert.AreEqual(100, index2df);
     
     // verify the docs are from different indexes
     Document doc5 = r1.Document(5);
     Assert.AreEqual("index1", doc5.Get("indexname"));
     Document doc150 = r1.Document(150);
     Assert.AreEqual("index2", doc150.Get("indexname"));
     r1.Close();
     writer.Close();
     dir1.Close();
 }
예제 #40
0
        public virtual void  TestUpdateDocument()
        {
            bool optimize = true;
            
            Directory dir1 = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            
            // create the index
            CreateIndexNoClose(!optimize, "index1", writer);
            
            // writer.flush(false, true, true);
            
            // get a reader
            IndexReader r1 = writer.GetReader();
            Assert.IsTrue(r1.IsCurrent());
            
            System.String id10 = r1.Document(10).GetField("id").StringValue;
            
            Document newDoc = r1.Document(10);
            newDoc.RemoveField("id");
            newDoc.Add(new Field("id", System.Convert.ToString(8000), Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.UpdateDocument(new Term("id", id10), newDoc);
            Assert.IsFalse(r1.IsCurrent());
            
            IndexReader r2 = writer.GetReader();
            Assert.IsTrue(r2.IsCurrent());
            Assert.AreEqual(0, Count(new Term("id", id10), r2));
            Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r2));
            
            r1.Close();
            writer.Close();
            Assert.IsTrue(r2.IsCurrent());

            IndexReader r3 = IndexReader.Open(dir1, true);
            Assert.IsTrue(r3.IsCurrent());
            Assert.IsTrue(r2.IsCurrent());
            Assert.AreEqual(0, Count(new Term("id", id10), r3));
            Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r3));
            
            writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            Document doc = new Document();
            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
            writer.AddDocument(doc);
            Assert.IsTrue(r2.IsCurrent());
            Assert.IsTrue(r3.IsCurrent());
            
            writer.Close();
            
            Assert.IsFalse(r2.IsCurrent());
            Assert.IsTrue(!r3.IsCurrent());
            
            r2.Close();
            r3.Close();
            
            dir1.Close();
        }
예제 #41
0
        public void Test()
        {
            DirectoryInfo dir     = CreateTempDir(GetType().Name);
            DirectoryInfo destDir = CreateTempDir(GetType().Name);

            Store.Directory fsDir = NewFSDirectory(dir);
            // IndexSplitter.split makes its own commit directly with SIPC/SegmentInfos,
            // so the unreferenced files are expected.
            if (fsDir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)fsDir).AssertNoUnrefencedFilesOnClose = (false);
            }

            MergePolicy mergePolicy = new LogByteSizeMergePolicy();

            mergePolicy.NoCFSRatio          = 1.0;
            mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity;
            IndexWriter iw = new IndexWriter(
                fsDir,
                new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).
                SetOpenMode(OpenMode.CREATE).
                SetMergePolicy(mergePolicy)
                );

            for (int x = 0; x < 100; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            for (int x = 100; x < 150; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index2", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            for (int x = 150; x < 200; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index3", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            DirectoryReader iwReader = iw.GetReader();

            assertEquals(3, iwReader.Leaves.Count);
            iwReader.Dispose();
            iw.Dispose();
            // we should have 2 segments now
            IndexSplitter @is          = new IndexSplitter(dir);
            string        splitSegName = @is.Infos.Info(1).Info.Name;

            @is.Split(destDir, new string[] { splitSegName });
            Store.Directory fsDirDest = NewFSDirectory(destDir);
            DirectoryReader r         = DirectoryReader.Open(fsDirDest);

            assertEquals(50, r.MaxDoc);
            r.Dispose();
            fsDirDest.Dispose();

            // now test cmdline
            DirectoryInfo destDir2 = CreateTempDir(GetType().Name);

            IndexSplitter.Main(new String[] { dir.FullName, destDir2.FullName, splitSegName });
            assertEquals(5, destDir2.GetFiles().Length);
            Store.Directory fsDirDest2 = NewFSDirectory(destDir2);
            r = DirectoryReader.Open(fsDirDest2);
            assertEquals(50, r.MaxDoc);
            r.Dispose();
            fsDirDest2.Dispose();

            // now remove the copied segment from src
            IndexSplitter.Main(new String[] { dir.FullName, "-d", splitSegName });
            r = DirectoryReader.Open(fsDir);
            assertEquals(2, r.Leaves.size());
            r.Dispose();
            fsDir.Dispose();
        }
예제 #42
0
        public virtual void TestRollingUpdates_Mem()
        {
            Random random             = new Random(Random().Next());
            BaseDirectoryWrapper dir  = NewDirectory();
            LineFileDocs         docs = new LineFileDocs(random, DefaultCodecSupportsDocValues());

            //provider.register(new MemoryCodec());
            if ((!"Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal)) && Random().NextBoolean())
            {
                Codec.Default =
                    TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(Random().nextBoolean(), random.NextFloat()));
            }

            MockAnalyzer analyzer = new MockAnalyzer(Random());

            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);

            IndexWriter   w          = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
            int           SIZE       = AtLeast(20);
            int           id         = 0;
            IndexReader   r          = null;
            IndexSearcher s          = null;
            int           numUpdates = (int)(SIZE * (2 + (TEST_NIGHTLY ? 200 * Random().NextDouble() : 5 * Random().NextDouble())));

            if (VERBOSE)
            {
                Console.WriteLine("TEST: numUpdates=" + numUpdates);
            }
            int updateCount = 0;

            // TODO: sometimes update ids not in order...
            for (int docIter = 0; docIter < numUpdates; docIter++)
            {
                Documents.Document doc  = docs.NextDoc();
                string             myID = "" + id;
                if (id == SIZE - 1)
                {
                    id = 0;
                }
                else
                {
                    id++;
                }
                if (VERBOSE)
                {
                    Console.WriteLine("  docIter=" + docIter + " id=" + id);
                }
                ((Field)doc.GetField("docid")).SetStringValue(myID);

                Term idTerm = new Term("docid", myID);

                bool doUpdate;
                if (s != null && updateCount < SIZE)
                {
                    TopDocs hits = s.Search(new TermQuery(idTerm), 1);
                    Assert.AreEqual(1, hits.TotalHits);
                    doUpdate = !w.TryDeleteDocument(r, hits.ScoreDocs[0].Doc);
                    if (VERBOSE)
                    {
                        if (doUpdate)
                        {
                            Console.WriteLine("  tryDeleteDocument failed");
                        }
                        else
                        {
                            Console.WriteLine("  tryDeleteDocument succeeded");
                        }
                    }
                }
                else
                {
                    doUpdate = true;
                    if (VERBOSE)
                    {
                        Console.WriteLine("  no searcher: doUpdate=true");
                    }
                }

                updateCount++;

                if (doUpdate)
                {
                    w.UpdateDocument(idTerm, doc);
                }
                else
                {
                    w.AddDocument(doc);
                }

                if (docIter >= SIZE && Random().Next(50) == 17)
                {
                    if (r != null)
                    {
                        r.Dispose();
                    }

                    bool applyDeletions = Random().NextBoolean();

                    if (VERBOSE)
                    {
                        Console.WriteLine("TEST: reopen applyDeletions=" + applyDeletions);
                    }

                    r = w.GetReader(applyDeletions);
                    if (applyDeletions)
                    {
                        s = NewSearcher(r);
                    }
                    else
                    {
                        s = null;
                    }
                    Assert.IsTrue(!applyDeletions || r.NumDocs == SIZE, "applyDeletions=" + applyDeletions + " r.NumDocs=" + r.NumDocs + " vs SIZE=" + SIZE);
                    updateCount = 0;
                }
            }

            if (r != null)
            {
                r.Dispose();
            }

            w.Commit();
            Assert.AreEqual(SIZE, w.NumDocs);

            w.Dispose();

            TestIndexWriter.AssertNoUnreferencedFiles(dir, "leftover files after rolling updates");

            docs.Dispose();

            // LUCENE-4455:
            SegmentInfos infos = new SegmentInfos();

            infos.Read(dir);
            long totalBytes = 0;

            foreach (SegmentCommitInfo sipc in infos.Segments)
            {
                totalBytes += sipc.GetSizeInBytes();
            }
            long totalBytes2 = 0;

            foreach (string fileName in dir.ListAll())
            {
                if (!fileName.StartsWith(IndexFileNames.SEGMENTS, StringComparison.Ordinal))
                {
                    totalBytes2 += dir.FileLength(fileName);
                }
            }
            Assert.AreEqual(totalBytes2, totalBytes);
            dir.Dispose();
        }
예제 #43
0
        public virtual void  TestDuringAddIndexes()
        {
            MockRAMDirectory dir1 = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            writer.SetInfoStream(infoStream);
            writer.MergeFactor = 2;
            
            // create the index
            CreateIndexNoClose(false, "test", writer);
            writer.Commit();
            
            Directory[] dirs = new Directory[10];
            for (int i = 0; i < 10; i++)
            {
                dirs[i] = new MockRAMDirectory(dir1);
            }
            
            IndexReader r = writer.GetReader();
            
            int NUM_THREAD = 5;
            float SECONDS = 3;
            
            long endTime = (long) ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000.0 * SECONDS);
            System.Collections.IList excs = (System.Collections.IList) System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));
            
            ThreadClass[] threads = new ThreadClass[NUM_THREAD];
            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i] = new AnonymousClassThread(endTime, writer, dirs, excs, this);
                threads[i].IsBackground = true;
                threads[i].Start();
            }
            
            int lastCount = 0;
            while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
            {
                IndexReader r2 = r.Reopen();
                if (r2 != r)
                {
                    r.Close();
                    r = r2;
                }
                Query q = new TermQuery(new Term("indexname", "test"));
                int count = new IndexSearcher(r).Search(q, 10).TotalHits;
                Assert.IsTrue(count >= lastCount);
                lastCount = count;
            }
            
            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i].Join();
            }
            
            Assert.AreEqual(0, excs.Count);
            r.Close();
            try
            {
                Assert.AreEqual(0, dir1.GetOpenDeletedFiles().Count);
            }
            catch
            {
                //DIGY: 
                //I think this is an expected behaviour.
                //There isn't any pending files to be deleted after "writer.Close()". 
                //But, since lucene.java's test case is designed that way
                //and I might be wrong, I will add a warning

                // Assert only in debug mode, so that CheckIndex is called during release.
#if DEBUG
                Assert.Inconclusive("", 0, dir1.GetOpenDeletedFiles().Count);
#endif 
            }
            writer.Close();
            
            _TestUtil.CheckIndex(dir1);
            
            dir1.Close();
        }
예제 #44
0
        public virtual void  TestAfterClose()
        {
            Directory dir1 = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
            writer.SetInfoStream(infoStream);
            
            // create the index
            CreateIndexNoClose(false, "test", writer);
            
            IndexReader r = writer.GetReader();
            writer.Close();
            
            _TestUtil.CheckIndex(dir1);
            
            // reader should remain usable even after IndexWriter is closed:
            Assert.AreEqual(100, r.NumDocs());
            Query q = new TermQuery(new Term("indexname", "test"));
            Assert.AreEqual(100, new IndexSearcher(r).Search(q, 10).TotalHits);

            Assert.Throws<AlreadyClosedException>(() => r.Reopen(), "failed to hit AlreadyClosedException");

            r.Close();
            dir1.Close();
        }
        public void OpenWriterWithCommit()
        {
            SegmentsGenCommit sgCommit = new SegmentsGenCommit(this.directory);
            IndexWriter writer = new IndexWriter(this.directory, new WhitespaceAnalyzer(), null, IndexWriter.MaxFieldLength.UNLIMITED, sgCommit);
            Assert.AreEqual(10, writer.MaxDoc());
            IndexReader reader = writer.GetReader();

            IndexCommit commit = reader.GetIndexCommit();
            Assert.AreEqual(commit.GetGeneration(), sgCommit.GetGeneration());
            Assert.AreEqual(commit.GetSegmentsFileName(), sgCommit.GetSegmentsFileName());
        }
        public void TestEnforceDeletions()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            IndexReader reader = writer.GetReader();
            IndexSearcher searcher = new IndexSearcher(reader);

            // add a doc, refresh the reader, and check that its there
            Document doc = new Document();
            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc);

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");

            SpanFilter startFilter = new SpanQueryFilter(new SpanTermQuery(new Term("id", "1")));

            // ignore deletions
            CachingSpanFilter filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.IGNORE);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
            ConstantScoreQuery constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");


            // force cache to regenerate:
            filter = new CachingSpanFilter(startFilter, CachingWrapperFilter.DeletesMode.RECACHE);

            writer.AddDocument(doc);
            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");

            constantScore = new ConstantScoreQuery(filter);
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");

            // make sure we get a cache hit when we reopen readers
            // that had no new deletions
            IndexReader newReader = RefreshReader(reader);
            Assert.IsTrue(reader != newReader);
            reader = newReader;
            searcher = new IndexSearcher(reader);
            int missCount = filter.missCount;
            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
            Assert.AreEqual(missCount, filter.missCount);

            // now delete the doc, refresh the reader, and see that it's not there
            writer.DeleteDocuments(new Term("id", "1"));

            reader = RefreshReader(reader);
            searcher = new IndexSearcher(reader);

            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");

            docs = searcher.Search(constantScore, 1);
            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
        }
예제 #47
0
		public virtual void  TestDeleteAllNRT()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
			modifier.SetMaxBufferedDocs(2);
			modifier.SetMaxBufferedDeleteTerms(2);
			
			int id = 0;
			int value_Renamed = 100;
			
			for (int i = 0; i < 7; i++)
			{
				AddDoc(modifier, ++id, value_Renamed);
			}
			modifier.Commit();
			
			IndexReader reader = modifier.GetReader();
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			AddDoc(modifier, ++id, value_Renamed);
			AddDoc(modifier, ++id, value_Renamed);
			
			// Delete all
			modifier.DeleteAll();
			
			reader = modifier.GetReader();
			Assert.AreEqual(0, reader.NumDocs());
			reader.Close();
			
			
			// Roll it back
			modifier.Rollback();
			modifier.Close();
			
			// Validate that the docs are still there
			reader = IndexReader.Open(dir);
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			dir.Close();
		}
예제 #48
0
 public virtual void  TestDeleteFromIndexWriter()
 {
     bool optimize = true;
     
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.ReaderTermsIndexDivisor = 2;
     writer.SetInfoStream(infoStream);
     // create the index
     CreateIndexNoClose(!optimize, "index1", writer);
     writer.Flush(false, true, true);
     // get a reader
     IndexReader r1 = writer.GetReader();
     
     System.String id10 = r1.Document(10).GetField("id").StringValue;
     
     // deleted IW docs should not show up in the next getReader
     writer.DeleteDocuments(new Term("id", id10));
     IndexReader r2 = writer.GetReader();
     Assert.AreEqual(1, Count(new Term("id", id10), r1));
     Assert.AreEqual(0, Count(new Term("id", id10), r2));
     
     System.String id50 = r1.Document(50).GetField("id").StringValue;
     Assert.AreEqual(1, Count(new Term("id", id50), r1));
     
     writer.DeleteDocuments(new Term("id", id50));
     
     IndexReader r3 = writer.GetReader();
     Assert.AreEqual(0, Count(new Term("id", id10), r3));
     Assert.AreEqual(0, Count(new Term("id", id50), r3));
     
     System.String id75 = r1.Document(75).GetField("id").StringValue;
     writer.DeleteDocuments(new TermQuery(new Term("id", id75)));
     IndexReader r4 = writer.GetReader();
     Assert.AreEqual(1, Count(new Term("id", id75), r3));
     Assert.AreEqual(0, Count(new Term("id", id75), r4));
     
     r1.Close();
     r2.Close();
     r3.Close();
     r4.Close();
     writer.Close();
     
     // reopen the writer to verify the delete made it to the directory
     writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     IndexReader w2r1 = writer.GetReader();
     Assert.AreEqual(0, Count(new Term("id", id10), w2r1));
     w2r1.Close();
     writer.Close();
     dir1.Close();
 }
예제 #49
0
            public override void Run()
            {
                DirectoryReader currentReader = null;
                Random          random        = LuceneTestCase.Random;

                try
                {
                    Document doc = new Document();
                    doc.Add(new TextField("id", "1", Field.Store.NO));
                    writer.AddDocument(doc);
                    holder.reader = currentReader = writer.GetReader(true);
                    Term term = new Term("id");
                    for (int i = 0; i < numOps && !holder.stop; i++)
                    {
                        float nextOp = (float)random.NextDouble();
                        if (nextOp < 0.3)
                        {
                            term.Set("id", new BytesRef("1"));
                            writer.UpdateDocument(term, doc);
                        }
                        else if (nextOp < 0.5)
                        {
                            writer.AddDocument(doc);
                        }
                        else
                        {
                            term.Set("id", new BytesRef("1"));
                            writer.DeleteDocuments(term);
                        }
                        if (holder.reader != currentReader)
                        {
                            holder.reader = currentReader;
                            if (countdown)
                            {
                                countdown = false;
                                latch.Signal();
                            }
                        }
                        if (random.NextBoolean())
                        {
                            writer.Commit();
                            DirectoryReader newReader = DirectoryReader.OpenIfChanged(currentReader);
                            if (newReader != null)
                            {
                                currentReader.DecRef();
                                currentReader = newReader;
                            }
                            if (currentReader.NumDocs == 0)
                            {
                                writer.AddDocument(doc);
                            }
                        }
                    }
                }
                catch (Exception e)
                {
                    failed = e;
                }
                finally
                {
                    holder.reader = null;
                    if (countdown)
                    {
                        latch.Signal();
                    }
                    if (currentReader != null)
                    {
                        try
                        {
                            currentReader.DecRef();
                        }
#pragma warning disable 168
                        catch (IOException e)
#pragma warning restore 168
                        {
                        }
                    }
                }
                if (Verbose)
                {
                    Console.WriteLine("writer stopped - forced by reader: " + holder.stop);
                }
            }
예제 #50
0
 public virtual void  TestAddIndexes2()
 {
     bool optimize = false;
     
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     
     // create a 2nd index
     Directory dir2 = new MockRAMDirectory();
     IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer2.SetInfoStream(infoStream);
     CreateIndexNoClose(!optimize, "index2", writer2);
     writer2.Close();
     
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     writer.AddIndexesNoOptimize(new Directory[]{dir2});
     
     IndexReader r1 = writer.GetReader();
     Assert.AreEqual(500, r1.MaxDoc);
     
     r1.Close();
     writer.Close();
     dir1.Close();
 }
예제 #51
0
 public virtual void  TestAfterCommit()
 {
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     
     // create the index
     CreateIndexNoClose(false, "test", writer);
     
     // get a reader to put writer into near real-time mode
     IndexReader r1 = writer.GetReader();
     _TestUtil.CheckIndex(dir1);
     writer.Commit();
     _TestUtil.CheckIndex(dir1);
     Assert.AreEqual(100, r1.NumDocs());
     
     for (int i = 0; i < 10; i++)
     {
         writer.AddDocument(CreateDocument(i, "test", 4));
     }
     ((ConcurrentMergeScheduler) writer.MergeScheduler).Sync();
     
     IndexReader r2 = r1.Reopen();
     if (r2 != r1)
     {
         r1.Close();
         r1 = r2;
     }
     Assert.AreEqual(110, r1.NumDocs());
     writer.Close();
     r1.Close();
     dir1.Close();
 }
예제 #52
0
        public virtual void TestDeletes1()
        {
            //IndexWriter.debug2 = System.out;
            Directory         dir = new MockDirectoryWrapper(new J2N.Randomizer(Random.NextInt64()), new RAMDirectory());
            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));

            iwc.SetMergeScheduler(new SerialMergeScheduler());
            iwc.SetMaxBufferedDocs(5000);
            iwc.SetRAMBufferSizeMB(100);
            RangeMergePolicy fsmp = new RangeMergePolicy(this, false);

            iwc.SetMergePolicy(fsmp);
            IndexWriter writer = new IndexWriter(dir, iwc);

            for (int x = 0; x < 5; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "1", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            //System.out.println("commit1");
            writer.Commit();
            Assert.AreEqual(1, writer.SegmentCount);
            for (int x = 5; x < 10; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "2", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            //System.out.println("commit2");
            writer.Commit();
            Assert.AreEqual(2, writer.SegmentCount);

            for (int x = 10; x < 15; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "3", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }

            writer.DeleteDocuments(new Term("id", "1"));

            writer.DeleteDocuments(new Term("id", "11"));

            // flushing without applying deletes means
            // there will still be deletes in the segment infos
            writer.Flush(false, false);
            Assert.IsTrue(writer.bufferedUpdatesStream.Any());

            // get reader flushes pending deletes
            // so there should not be anymore
            IndexReader r1 = writer.GetReader();

            Assert.IsFalse(writer.bufferedUpdatesStream.Any());
            r1.Dispose();

            // delete id:2 from the first segment
            // merge segments 0 and 1
            // which should apply the delete id:2
            writer.DeleteDocuments(new Term("id", "2"));
            writer.Flush(false, false);
            fsmp         = (RangeMergePolicy)writer.Config.MergePolicy;
            fsmp.doMerge = true;
            fsmp.start   = 0;
            fsmp.length  = 2;
            writer.MaybeMerge();

            Assert.AreEqual(2, writer.SegmentCount);

            // id:2 shouldn't exist anymore because
            // it's been applied in the merge and now it's gone
            IndexReader r2 = writer.GetReader();

            int[] id2docs = ToDocsArray(new Term("id", "2"), null, r2);
            Assert.IsTrue(id2docs is null);
            r2.Dispose();

            /*
             * /// // added docs are in the ram buffer
             * /// for (int x = 15; x < 20; x++) {
             * ///  writer.AddDocument(TestIndexWriterReader.CreateDocument(x, "4", 2));
             * ///  System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
             * /// }
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * /// // delete from the ram buffer
             * /// writer.DeleteDocuments(new Term("id", Integer.toString(13)));
             * ///
             * /// Term id3 = new Term("id", Integer.toString(3));
             * ///
             * /// // delete from the 1st segment
             * /// writer.DeleteDocuments(id3);
             * ///
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * ///
             * /// //System.out
             * /// //    .println("segdels1:" + writer.docWriter.deletesToString());
             * ///
             * /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
             * ///
             * /// // we cause a merge to happen
             * /// fsmp.doMerge = true;
             * /// fsmp.start = 0;
             * /// fsmp.length = 2;
             * /// System.out.println("maybeMerge "+writer.SegmentInfos);
             * ///
             * /// SegmentInfo info0 = writer.SegmentInfos[0];
             * /// SegmentInfo info1 = writer.SegmentInfos[1];
             * ///
             * /// writer.MaybeMerge();
             * /// System.out.println("maybeMerge after "+writer.SegmentInfos);
             * /// // there should be docs in RAM
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * ///
             * /// // assert we've merged the 1 and 2 segments
             * /// // and still have a segment leftover == 2
             * /// Assert.AreEqual(2, writer.SegmentInfos.Size());
             * /// Assert.IsFalse(segThere(info0, writer.SegmentInfos));
             * /// Assert.IsFalse(segThere(info1, writer.SegmentInfos));
             * ///
             * /// //System.out.println("segdels2:" + writer.docWriter.deletesToString());
             * ///
             * /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
             * ///
             * /// IndexReader r = writer.GetReader();
             * /// IndexReader r1 = r.getSequentialSubReaders()[0];
             * /// printDelDocs(r1.GetLiveDocs());
             * /// int[] docs = toDocsArray(id3, null, r);
             * /// System.out.println("id3 docs:"+Arrays.toString(docs));
             * /// // there shouldn't be any docs for id:3
             * /// Assert.IsTrue(docs is null);
             * /// r.Dispose();
             * ///
             * /// part2(writer, fsmp);
             * ///
             */
            // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
            //System.out.println("close");
            writer.Dispose();
            dir.Dispose();
        }
예제 #53
0
 public virtual void  TestMergeWarmer()
 {
     
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     
     // create the index
     CreateIndexNoClose(false, "test", writer);
     
     // get a reader to put writer into near real-time mode
     IndexReader r1 = writer.GetReader();
     
     // Enroll warmer
     MyWarmer warmer = new MyWarmer();
     writer.MergedSegmentWarmer = warmer;
     writer.MergeFactor = 2;
     writer.SetMaxBufferedDocs(2);
     
     for (int i = 0; i < 100; i++)
     {
         writer.AddDocument(CreateDocument(i, "test", 4));
     }
     ((ConcurrentMergeScheduler) writer.MergeScheduler).Sync();
     
     Assert.IsTrue(warmer.warmCount > 0);
     int count = warmer.warmCount;
     
     writer.AddDocument(CreateDocument(17, "test", 4));
     writer.Optimize();
     Assert.IsTrue(warmer.warmCount > count);
     
     writer.Close();
     r1.Close();
     dir1.Close();
 }
예제 #54
0
        public virtual void  TestDuringAddIndexes()
        {
            Directory   dir1   = new MockRAMDirectory();
            IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);

            writer.SetInfoStream(infoStream);
            writer.SetMergeFactor(2);

            // create the index
            CreateIndexNoClose(false, "test", writer);
            writer.Commit();

            Directory[] dirs = new Directory[10];
            for (int i = 0; i < 10; i++)
            {
                dirs[i] = new MockRAMDirectory(dir1);
            }

            IndexReader r = writer.GetReader();

            int   NUM_THREAD = 5;
            float SECONDS    = 3;

            long endTime = (long)((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000.0 * SECONDS);

            System.Collections.IList excs = (System.Collections.IList)System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));

            SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREAD];
            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i] = new AnonymousClassThread(endTime, writer, dirs, excs, this);
                threads[i].IsBackground = true;
                threads[i].Start();
            }

            int lastCount = 0;

            while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
            {
                IndexReader r2 = r.Reopen();
                if (r2 != r)
                {
                    r.Close();
                    r = r2;
                }
                Query q     = new TermQuery(new Term("indexname", "test"));
                int   count = new IndexSearcher(r).Search(q, 10).totalHits;
                Assert.IsTrue(count >= lastCount);
                lastCount = count;
            }

            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i].Join();
            }

            Assert.AreEqual(0, excs.Count);
            writer.Close();

            _TestUtil.CheckIndex(dir1);
            r.Close();
            dir1.Close();
        }
예제 #55
0
 /// <summary> Tests creating a segment, then check to insure the segment can be seen via
 /// IW.getReader
 /// </summary>
 public virtual void  DoTestIndexWriterReopenSegment(bool optimize)
 {
     Directory dir1 = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.SetInfoStream(infoStream);
     IndexReader r1 = writer.GetReader();
     Assert.AreEqual(0, r1.MaxDoc);
     CreateIndexNoClose(false, "index1", writer);
     writer.Flush(!optimize, true, true);
     
     IndexReader iwr1 = writer.GetReader();
     Assert.AreEqual(100, iwr1.MaxDoc);
     
     IndexReader r2 = writer.GetReader();
     Assert.AreEqual(r2.MaxDoc, 100);
     // add 100 documents
     for (int x = 10000; x < 10000 + 100; x++)
     {
         Document d = CreateDocument(x, "index1", 5);
         writer.AddDocument(d);
     }
     writer.Flush(false, true, true);
     // verify the reader was reopened internally
     IndexReader iwr2 = writer.GetReader();
     Assert.IsTrue(iwr2 != r1);
     Assert.AreEqual(200, iwr2.MaxDoc);
     // should have flushed out a segment
     IndexReader r3 = writer.GetReader();
     Assert.IsTrue(r2 != r3);
     Assert.AreEqual(200, r3.MaxDoc);
     
     // dec ref the readers rather than close them because
     // closing flushes changes to the writer
     r1.Close();
     iwr1.Close();
     r2.Close();
     r3.Close();
     iwr2.Close();
     writer.Close();
     
     // test whether the changes made it to the directory
     writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     IndexReader w2r1 = writer.GetReader();
     // insure the deletes were actually flushed to the directory
     Assert.AreEqual(200, w2r1.MaxDoc);
     w2r1.Close();
     writer.Close();
     
     dir1.Close();
 }
예제 #56
0
        public void TestSegmentWarmer()
        {
            Directory dir = new MockRAMDirectory();
            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            w.SetMaxBufferedDocs(2);
            w.GetReader().Close();
            w.MergedSegmentWarmer = new AnonymousIndexReaderWarmer();

            Document doc = new Document();
            doc.Add(new Field("foo", "bar", Field.Store.YES, Field.Index.NOT_ANALYZED));
            for (int i = 0; i < 20; i++)
            {
                w.AddDocument(doc);
            }
            w.WaitForMerges();
            w.Close();
            dir.Close();
        }
예제 #57
0
        public virtual void TestRandom()
        {
            int num = AtLeast(2);

            for (int iter = 0; iter < num; iter++)
            {
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: iter=" + iter);
                }

                Directory dir = NewDirectory();

                IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.COMPOUND_FILES));
                // we can do this because we use NoMergePolicy (and dont merge to "nothing")
                w.KeepFullyDeletedSegments = true;

                IDictionary <BytesRef, IList <int?> > docs = new Dictionary <BytesRef, IList <int?> >();
                ISet <int?>      deleted = new JCG.HashSet <int?>();
                IList <BytesRef> terms   = new List <BytesRef>();

                int numDocs            = TestUtil.NextInt32(Random, 1, 100 * RANDOM_MULTIPLIER);
                Documents.Document doc = new Documents.Document();
                Field f = NewStringField("field", "", Field.Store.NO);
                doc.Add(f);
                Field id = NewStringField("id", "", Field.Store.NO);
                doc.Add(id);

                bool onlyUniqueTerms = Random.NextBoolean();
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: onlyUniqueTerms=" + onlyUniqueTerms + " numDocs=" + numDocs);
                }
                ISet <BytesRef> uniqueTerms = new JCG.HashSet <BytesRef>();
                for (int i = 0; i < numDocs; i++)
                {
                    if (!onlyUniqueTerms && Random.NextBoolean() && terms.Count > 0)
                    {
                        // re-use existing term
                        BytesRef term = terms[Random.Next(terms.Count)];
                        docs[term].Add(i);
                        f.SetStringValue(term.Utf8ToString());
                    }
                    else
                    {
                        string   s    = TestUtil.RandomUnicodeString(Random, 10);
                        BytesRef term = new BytesRef(s);
                        if (!docs.TryGetValue(term, out IList <int?> docsTerm))
                        {
                            docs[term] = docsTerm = new List <int?>();
                        }
                        docsTerm.Add(i);
                        terms.Add(term);
                        uniqueTerms.Add(term);
                        f.SetStringValue(s);
                    }
                    id.SetStringValue("" + i);
                    w.AddDocument(doc);
                    if (Random.Next(4) == 1)
                    {
                        w.Commit();
                    }
                    if (i > 0 && Random.Next(20) == 1)
                    {
                        int delID = Random.Next(i);
                        deleted.Add(delID);
                        w.DeleteDocuments(new Term("id", "" + delID));
                        if (VERBOSE)
                        {
                            Console.WriteLine("TEST: delete " + delID);
                        }
                    }
                }

                if (VERBOSE)
                {
                    List <BytesRef> termsList = new List <BytesRef>(uniqueTerms);
#pragma warning disable 612, 618
                    termsList.Sort(BytesRef.UTF8SortedAsUTF16Comparer);
#pragma warning restore 612, 618
                    Console.WriteLine("TEST: terms in UTF16 order:");
                    foreach (BytesRef b in termsList)
                    {
                        Console.WriteLine("  " + UnicodeUtil.ToHexString(b.Utf8ToString()) + " " + b);
                        foreach (int docID in docs[b])
                        {
                            if (deleted.Contains(docID))
                            {
                                Console.WriteLine("    " + docID + " (deleted)");
                            }
                            else
                            {
                                Console.WriteLine("    " + docID);
                            }
                        }
                    }
                }

                IndexReader reader = w.GetReader();
                w.Dispose();
                if (VERBOSE)
                {
                    Console.WriteLine("TEST: reader=" + reader);
                }

                IBits liveDocs = MultiFields.GetLiveDocs(reader);
                foreach (int delDoc in deleted)
                {
                    Assert.IsFalse(liveDocs.Get(delDoc));
                }

                for (int i = 0; i < 100; i++)
                {
                    BytesRef term = terms[Random.Next(terms.Count)];
                    if (VERBOSE)
                    {
                        Console.WriteLine("TEST: seek term=" + UnicodeUtil.ToHexString(term.Utf8ToString()) + " " + term);
                    }

                    DocsEnum docsEnum = TestUtil.Docs(Random, reader, "field", term, liveDocs, null, DocsFlags.NONE);
                    Assert.IsNotNull(docsEnum);

                    foreach (int docID in docs[term])
                    {
                        if (!deleted.Contains(docID))
                        {
                            Assert.AreEqual(docID, docsEnum.NextDoc());
                        }
                    }
                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc());
                }

                reader.Dispose();
                dir.Dispose();
            }
        }
예제 #58
0
        public virtual void  TestDuringAddIndexes()
        {
            Assert.Ignore("Known issue");

            MockRAMDirectory dir1   = new MockRAMDirectory();
            IndexWriter      writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetInfoStream(infoStream, null);
            writer.MergeFactor = 2;

            // create the index
            CreateIndexNoClose(false, "test", writer);
            writer.Commit(null);

            Directory[] dirs = new Directory[10];
            for (int i = 0; i < 10; i++)
            {
                dirs[i] = new MockRAMDirectory(dir1);
            }

            IndexReader r = writer.GetReader(null);

            int   NUM_THREAD = 5;
            float SECONDS    = 3;

            long endTime = (long)((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000.0 * SECONDS);

            System.Collections.IList excs = (System.Collections.IList)System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));

            ThreadClass[] threads = new ThreadClass[NUM_THREAD];
            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i] = new AnonymousClassThread(endTime, writer, dirs, excs, this);
                threads[i].IsBackground = true;
                threads[i].Start();
            }

            int lastCount = 0;

            while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
            {
                IndexReader r2 = r.Reopen(null);
                if (r2 != r)
                {
                    r.Close();
                    r = r2;
                }
                Query q     = new TermQuery(new Term("indexname", "test"));
                int   count = new IndexSearcher(r).Search(q, 10, null).TotalHits;
                Assert.IsTrue(count >= lastCount);
                lastCount = count;
            }

            for (int i = 0; i < NUM_THREAD; i++)
            {
                threads[i].Join();
            }

            Assert.AreEqual(0, excs.Count);
            r.Close();
            try
            {
                Assert.AreEqual(0, dir1.GetOpenDeletedFiles().Count);
            }
            catch
            {
                //DIGY:
                //I think this is an expected behaviour.
                //There isn't any pending files to be deleted after "writer.Close()".
                //But, since lucene.java's test case is designed that way
                //and I might be wrong, I will add a warning

                // Assert only in debug mode, so that CheckIndex is called during release.
#if DEBUG
                Assert.Inconclusive("", 0, dir1.GetOpenDeletedFiles().Count);
#endif
            }
            writer.Close();

            _TestUtil.CheckIndex(dir1);

            dir1.Close();
        }
예제 #59
0
 /// <summary>
 /// Open a near real time <see cref="IndexReader"/> from the <see cref="IndexWriter"/>.
 /// <para/>
 /// @lucene.experimental
 /// </summary>
 /// <param name="writer"> The <see cref="IndexWriter"/> to open from </param>
 /// <param name="applyAllDeletes"> If <c>true</c>, all buffered deletes will
 /// be applied (made visible) in the returned reader.  If
 /// <c>false</c>, the deletes are not applied but remain buffered
 /// (in IndexWriter) so that they will be applied in the
 /// future.  Applying deletes can be costly, so if your app
 /// can tolerate deleted documents being returned you might
 /// gain some performance by passing <c>false</c>. </param>
 /// <returns> The new <see cref="IndexReader"/> </returns>
 /// <exception cref="CorruptIndexException"> if the index is corrupt </exception>
 /// <exception cref="IOException"> if there is a low-level IO error
 /// </exception>
 /// <seealso cref="OpenIfChanged(DirectoryReader, IndexWriter, bool)"/>
 new public static DirectoryReader Open(IndexWriter writer, bool applyAllDeletes)
 {
     return(writer.GetReader(applyAllDeletes));
 }