コード例 #1
0
ファイル: Test2BDocs.cs プロジェクト: zhuthree/lucenenet
        public virtual void TestExactlyAtLimit()
        {
            Directory   dir2 = NewFSDirectory(CreateTempDir("2BDocs2"));
            IndexWriter iw   = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
            Document    doc  = new Document();

            for (int i = 0; i < 262143; i++)
            {
                iw.AddDocument(doc);
            }
            iw.Dispose();
            DirectoryReader ir  = DirectoryReader.Open(Dir);
            DirectoryReader ir2 = DirectoryReader.Open(dir2);

            IndexReader[] subReaders = new IndexReader[8192];
            Arrays.Fill(subReaders, ir);
            subReaders[subReaders.Length - 1] = ir2;
            MultiReader mr = new MultiReader(subReaders);

            Assert.AreEqual(int.MaxValue, mr.MaxDoc);
            Assert.AreEqual(int.MaxValue, mr.NumDocs);
            ir.Dispose();
            ir2.Dispose();
            dir2.Dispose();
        }
コード例 #2
0
        /// <summary>
        /// Full-Text search engine search provider, used to search indexed documents.
        /// </summary>
        /// <param name="directoryIndexInfos">The array directory infomation where the index files are located.</param>
        public SearchProvider(DirectoryInfo[] directoryIndexInfos)
        {
            try
            {
                List <Lucene.Net.Index.IndexReader> readers = new List <IndexReader>();

                // For each directory.
                foreach (DirectoryInfo item in directoryIndexInfos)
                {
                    // Create the index reader.
                    Lucene.Net.Store.Directory   directory = FSDirectory.Open(item);
                    Lucene.Net.Index.IndexReader reader    = Lucene.Net.Index.DirectoryReader.Open(directory);
                    readers.Add(reader);
                }

                // Create the multiple index readers.
                _reader = new Lucene.Net.Index.MultiReader(readers.ToArray(), true);
            }
            catch (Exception)
            {
                if (_reader != null)
                {
                    _reader.Dispose();
                }

                throw;
            }
        }
コード例 #3
0
        public virtual void  TestIsCurrent()
        {
            RAMDirectory ramDir1 = new RAMDirectory();

            AddDoc(ramDir1, "test foo", true);
            RAMDirectory ramDir2 = new RAMDirectory();

            AddDoc(ramDir2, "test blah", true);
            IndexReader[] readers = new IndexReader[] { IndexReader.Open(ramDir1), IndexReader.Open(ramDir2) };
            MultiReader   mr      = new MultiReader(readers);

            Assert.IsTrue(mr.IsCurrent());             // just opened, must be current
            AddDoc(ramDir1, "more text", false);
            Assert.IsFalse(mr.IsCurrent());            // has been modified, not current anymore
            AddDoc(ramDir2, "even more text", false);
            Assert.IsFalse(mr.IsCurrent());            // has been modified even more, not current anymore
            try
            {
                mr.GetVersion();
                Assert.Fail();
            }
            catch (System.NotSupportedException e)
            {
                // expected exception
            }
            mr.Close();
        }
コード例 #4
0
        public virtual void TestIncompatibleIndexes3()
        {
            Directory dir1 = GetDir1(Random());
            Directory dir2 = GetDir2(Random());

            CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2));

            CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
            try
            {
                new ParallelCompositeReader(readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            try
            {
                new ParallelCompositeReader(Random().NextBoolean(), readers, readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            Assert.AreEqual(1, ir1.RefCount);
            Assert.AreEqual(1, ir2.RefCount);
            ir1.Dispose();
            ir2.Dispose();
            Assert.AreEqual(0, ir1.RefCount);
            Assert.AreEqual(0, ir2.RefCount);
            dir1.Dispose();
            dir2.Dispose();
        }
コード例 #5
0
 public virtual void  TestTermVectors()
 {
     try
     {
         MultiReader reader = new MultiReader(dir, sis, false, readers);
         Assert.IsTrue(reader != null);
     }
     catch (System.IO.IOException e)
     {
         System.Console.Error.WriteLine(e.StackTrace);
         Assert.IsTrue(false);
     }
 }
コード例 #6
0
ファイル: TestAddIndexes.cs プロジェクト: Cefa68000/lucenenet
 public virtual void TestAddEmpty()
 {
     Directory d1 = NewDirectory();
     RandomIndexWriter w = new RandomIndexWriter(Random(), d1);
     MultiReader empty = new MultiReader();
     w.AddIndexes(empty);
     w.Dispose();
     DirectoryReader dr = DirectoryReader.Open(d1);
     foreach (AtomicReaderContext ctx in dr.Leaves)
     {
         Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes");
     }
     dr.Dispose();
     d1.Dispose();
 }
コード例 #7
0
        // Fields 1-4 indexed together:
        private IndexSearcher Single(Random random, bool compositeComposite)
        {
            Dir = NewDirectory();
            IndexWriter w  = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
            Document    d1 = new Document();

            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
            w.AddDocument(d1);
            Document d2 = new Document();

            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
            w.AddDocument(d2);
            Document d3 = new Document();

            d3.Add(NewTextField("f1", "v3", Field.Store.YES));
            d3.Add(NewTextField("f2", "v3", Field.Store.YES));
            d3.Add(NewTextField("f3", "v3", Field.Store.YES));
            d3.Add(NewTextField("f4", "v3", Field.Store.YES));
            w.AddDocument(d3);
            Document d4 = new Document();

            d4.Add(NewTextField("f1", "v4", Field.Store.YES));
            d4.Add(NewTextField("f2", "v4", Field.Store.YES));
            d4.Add(NewTextField("f3", "v4", Field.Store.YES));
            d4.Add(NewTextField("f4", "v4", Field.Store.YES));
            w.AddDocument(d4);
            w.Dispose();

            CompositeReader ir;

            if (compositeComposite)
            {
                ir = new MultiReader(DirectoryReader.Open(Dir), DirectoryReader.Open(Dir));
            }
            else
            {
                ir = DirectoryReader.Open(Dir);
            }
            return(NewSearcher(ir));
        }
コード例 #8
0
        public virtual void  TestMixedReaders()
        {
            Directory dir1 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir1, true);
            Directory dir2 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir2, true);
            IndexReader r1 = IndexReader.Open(dir1, false);
            IndexReader r2 = IndexReader.Open(dir2, false);

            MultiReader multiReader = new MultiReader(new IndexReader[] { r1, r2 });

            PerformDefaultTests(multiReader);
            multiReader.Close();
            dir1.Close();
            dir2.Close();
        }
コード例 #9
0
        public virtual void  TestIsCurrent()
        {
            RAMDirectory ramDir1 = new RAMDirectory();

            AddDoc(ramDir1, "test foo", true);
            RAMDirectory ramDir2 = new RAMDirectory();

            AddDoc(ramDir2, "test blah", true);
            IndexReader[] readers = new IndexReader[] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false) };
            MultiReader   mr      = new MultiReader(readers);

            Assert.IsTrue(mr.IsCurrent());             // just opened, must be current
            AddDoc(ramDir1, "more text", false);
            Assert.IsFalse(mr.IsCurrent());            // has been modified, not current anymore
            AddDoc(ramDir2, "even more text", false);
            Assert.IsFalse(mr.IsCurrent());            // has been modified even more, not current anymore

            Assert.Throws <NotSupportedException>(() => { var ver = mr.Version; });
            mr.Close();
        }
コード例 #10
0
		protected internal override IndexReader OpenReader()
		{
			IndexReader reader;
			
			sis.Read(dir);
			SegmentReader reader1 = SegmentReader.Get(false, sis.Info(0), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
            SegmentReader reader2 = SegmentReader.Get(false, sis.Info(1), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
			readers[0] = reader1;
			readers[1] = reader2;
			Assert.IsTrue(reader1 != null);
			Assert.IsTrue(reader2 != null);
			
			reader = new MultiReader(readers);
			
			Assert.IsTrue(dir != null);
			Assert.IsTrue(sis != null);
			Assert.IsTrue(reader != null);
			
			return reader;
		}
コード例 #11
0
		protected internal override IndexReader OpenReader()
		{
			IndexReader reader;
			
			sis.Read(dir);
			SegmentReader reader1 = SegmentReader.Get(sis.Info(0));
			SegmentReader reader2 = SegmentReader.Get(sis.Info(1));
			readers[0] = reader1;
			readers[1] = reader2;
			Assert.IsTrue(reader1 != null);
			Assert.IsTrue(reader2 != null);
			
			reader = new MultiReader(readers);
			
			Assert.IsTrue(dir != null);
			Assert.IsTrue(sis != null);
			Assert.IsTrue(reader != null);
			
			return reader;
		}
コード例 #12
0
        protected internal override IndexReader OpenReader()
        {
            IndexReader reader;

            sis.Read(dir, null);
            SegmentReader reader1 = SegmentReader.Get(false, sis.Info(0), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null);
            SegmentReader reader2 = SegmentReader.Get(false, sis.Info(1), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null);

            readers[0] = reader1;
            readers[1] = reader2;
            Assert.IsTrue(reader1 != null);
            Assert.IsTrue(reader2 != null);

            reader = new MultiReader(readers);

            Assert.IsTrue(dir != null);
            Assert.IsTrue(sis != null);
            Assert.IsTrue(reader != null);

            return(reader);
        }
コード例 #13
0
        protected internal override IndexReader OpenReader()
        {
            IndexReader reader;

            sis.Read(dir);
            SegmentReader reader1 = SegmentReader.Get(sis.Info(0));
            SegmentReader reader2 = SegmentReader.Get(sis.Info(1));

            readers[0] = reader1;
            readers[1] = reader2;
            Assert.IsTrue(reader1 != null);
            Assert.IsTrue(reader2 != null);

            reader = new MultiReader(readers);

            Assert.IsTrue(dir != null);
            Assert.IsTrue(sis != null);
            Assert.IsTrue(reader != null);

            return(reader);
        }
コード例 #14
0
ファイル: TestMultiReader.cs プロジェクト: runefs/Marvin
		public virtual void  TestDocument()
		{
			try
			{
				sis.Read(dir);
				MultiReader reader = new MultiReader(dir, sis, false, readers);
				Assert.IsTrue(reader != null);
				Document newDoc1 = reader.Document(0);
				Assert.IsTrue(newDoc1 != null);
				Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
				Document newDoc2 = reader.Document(1);
				Assert.IsTrue(newDoc2 != null);
				Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);
				TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
				Assert.IsTrue(vector != null);
			}
			catch (System.IO.IOException e)
			{
				System.Console.Error.WriteLine(e.StackTrace);
				Assert.IsTrue(false);
			}
		}
コード例 #15
0
 public virtual void  TestDocument()
 {
     try
     {
         sis.Read(dir);
         MultiReader reader = new MultiReader(dir, sis, false, readers);
         Assert.IsTrue(reader != null);
         Document newDoc1 = reader.Document(0);
         Assert.IsTrue(newDoc1 != null);
         Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
         Document newDoc2 = reader.Document(1);
         Assert.IsTrue(newDoc2 != null);
         Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);
         TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
         Assert.IsTrue(vector != null);
     }
     catch (System.IO.IOException e)
     {
         System.Console.Error.WriteLine(e.StackTrace);
         Assert.IsTrue(false);
     }
 }
コード例 #16
0
ファイル: Test2BDocs.cs プロジェクト: Cefa68000/lucenenet
 public virtual void TestExactlyAtLimit()
 {
     Directory dir2 = NewFSDirectory(CreateTempDir("2BDocs2"));
     IndexWriter iw = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
     Document doc = new Document();
     for (int i = 0; i < 262143; i++)
     {
         iw.AddDocument(doc);
     }
     iw.Dispose();
     DirectoryReader ir = DirectoryReader.Open(Dir);
     DirectoryReader ir2 = DirectoryReader.Open(dir2);
     IndexReader[] subReaders = new IndexReader[8192];
     Arrays.Fill(subReaders, ir);
     subReaders[subReaders.Length - 1] = ir2;
     MultiReader mr = new MultiReader(subReaders);
     Assert.AreEqual(int.MaxValue, mr.MaxDoc);
     Assert.AreEqual(int.MaxValue, mr.NumDocs);
     ir.Dispose();
     ir2.Dispose();
     dir2.Dispose();
 }
コード例 #17
0
        public virtual void  TestMultiTermDocs()
        {
            RAMDirectory ramDir1 = new RAMDirectory();

            AddDoc(ramDir1, "test foo", true);
            RAMDirectory ramDir2 = new RAMDirectory();

            AddDoc(ramDir2, "test blah", true);
            RAMDirectory ramDir3 = new RAMDirectory();

            AddDoc(ramDir3, "test wow", true);

            IndexReader[] readers1 = new IndexReader[] { IndexReader.Open(ramDir1), IndexReader.Open(ramDir3) };
            IndexReader[] readers2 = new IndexReader[] { IndexReader.Open(ramDir1), IndexReader.Open(ramDir2), IndexReader.Open(ramDir3) };
            MultiReader   mr2      = new MultiReader(readers1);
            MultiReader   mr3      = new MultiReader(readers2);

            // test mixing up TermDocs and TermEnums from different readers.
            TermDocs td2 = mr2.TermDocs();
            TermEnum te3 = mr3.Terms(new Term("body", "wow"));

            td2.Seek(te3);
            int ret = 0;

            // This should blow up if we forget to check that the TermEnum is from the same
            // reader as the TermDocs.
            while (td2.Next())
            {
                ret += td2.Doc();
            }
            td2.Close();
            te3.Close();

            // really a dummy assert to ensure that we got some docs and to ensure that
            // nothing is optimized out.
            Assert.IsTrue(ret > 0);
        }
コード例 #18
0
        // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
        private IndexSearcher Parallel(Random random, bool compositeComposite)
        {
            Dir1 = GetDir1(random);
            Dir2 = GetDir2(random);
            CompositeReader rd1, rd2;

            if (compositeComposite)
            {
                rd1 = new MultiReader(DirectoryReader.Open(Dir1), DirectoryReader.Open(Dir1));
                rd2 = new MultiReader(DirectoryReader.Open(Dir2), DirectoryReader.Open(Dir2));
                Assert.AreEqual(2, rd1.Context.Children.Count);
                Assert.AreEqual(2, rd2.Context.Children.Count);
            }
            else
            {
                rd1 = DirectoryReader.Open(Dir1);
                rd2 = DirectoryReader.Open(Dir2);
                Assert.AreEqual(3, rd1.Context.Children.Count);
                Assert.AreEqual(3, rd2.Context.Children.Count);
            }
            ParallelCompositeReader pr = new ParallelCompositeReader(rd1, rd2);

            return(NewSearcher(pr));
        }
コード例 #19
0
		public virtual void  TestMixedReaders()
		{
			Directory dir1 = new MockRAMDirectory();
			TestIndexReaderReopen.CreateIndex(dir1, true);
			Directory dir2 = new MockRAMDirectory();
			TestIndexReaderReopen.CreateIndex(dir2, true);
            IndexReader r1 = IndexReader.Open(dir1, false);
            IndexReader r2 = IndexReader.Open(dir2, false);
			
			MultiReader multiReader = new MultiReader(new IndexReader[]{r1, r2});
			PerformDefaultTests(multiReader);
			multiReader.Close();
			dir1.Close();
			dir2.Close();
		}
コード例 #20
0
        public virtual void TestMultiTermDocs()
        {
            Directory ramDir1 = NewDirectory();
            AddDoc(Random(), ramDir1, "test foo", true);
            Directory ramDir2 = NewDirectory();
            AddDoc(Random(), ramDir2, "test blah", true);
            Directory ramDir3 = NewDirectory();
            AddDoc(Random(), ramDir3, "test wow", true);

            IndexReader[] readers1 = new IndexReader[] { DirectoryReader.Open(ramDir1), DirectoryReader.Open(ramDir3) };
            IndexReader[] readers2 = new IndexReader[] { DirectoryReader.Open(ramDir1), DirectoryReader.Open(ramDir2), DirectoryReader.Open(ramDir3) };
            MultiReader mr2 = new MultiReader(readers1);
            MultiReader mr3 = new MultiReader(readers2);

            // test mixing up TermDocs and TermEnums from different readers.
            TermsEnum te2 = MultiFields.GetTerms(mr2, "body").Iterator(null);
            te2.SeekCeil(new BytesRef("wow"));
            DocsEnum td = TestUtil.Docs(Random(), mr2, "body", te2.Term(), MultiFields.GetLiveDocs(mr2), null, 0);

            TermsEnum te3 = MultiFields.GetTerms(mr3, "body").Iterator(null);
            te3.SeekCeil(new BytesRef("wow"));
            td = TestUtil.Docs(Random(), te3, MultiFields.GetLiveDocs(mr3), td, 0);

            int ret = 0;

            // this should blow up if we forget to check that the TermEnum is from the same
            // reader as the TermDocs.
            while (td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
            {
                ret += td.DocID();
            }

            // really a dummy assert to ensure that we got some docs and to ensure that
            // nothing is eliminated by hotspot
            Assert.IsTrue(ret > 0);
            readers1[0].Dispose();
            readers1[1].Dispose();
            readers2[0].Dispose();
            readers2[1].Dispose();
            readers2[2].Dispose();
            ramDir1.Dispose();
            ramDir2.Dispose();
            ramDir3.Dispose();
        }
コード例 #21
0
		public virtual void  TestReferenceCountingMultiReader()
		{
			for (int mode = 0; mode <= 1; mode++)
			{
				Directory dir1 = new MockRAMDirectory();
				CreateIndex(dir1, false);
				Directory dir2 = new MockRAMDirectory();
				CreateIndex(dir2, true);
				
				IndexReader reader1 = IndexReader.Open(dir1);
				AssertRefCountEquals(1, reader1);
				
				IndexReader initReader2 = IndexReader.Open(dir2);
				IndexReader multiReader1 = new MultiReader(new IndexReader[]{reader1, initReader2}, (mode == 0));
				ModifyIndex(0, dir2);
				AssertRefCountEquals(1 + mode, reader1);
				
				IndexReader multiReader2 = multiReader1.Reopen();
				// index1 hasn't changed, so multiReader2 should share reader1 now with multiReader1
				AssertRefCountEquals(2 + mode, reader1);
				
				ModifyIndex(0, dir1);
				IndexReader reader2 = reader1.Reopen();
				AssertRefCountEquals(2 + mode, reader1);
				
				if (mode == 1)
				{
					initReader2.Close();
				}
				
				ModifyIndex(1, dir1);
				IndexReader reader3 = reader2.Reopen();
				AssertRefCountEquals(2 + mode, reader1);
				AssertRefCountEquals(1, reader2);
				
				multiReader1.Close();
				AssertRefCountEquals(1 + mode, reader1);
				
				multiReader1.Close();
				AssertRefCountEquals(1 + mode, reader1);
				
				if (mode == 1)
				{
					initReader2.Close();
				}
				
				reader1.Close();
				AssertRefCountEquals(1, reader1);
				
				multiReader2.Close();
				AssertRefCountEquals(0, reader1);
				
				multiReader2.Close();
				AssertRefCountEquals(0, reader1);
				
				reader3.Close();
				AssertRefCountEquals(0, reader1);
				AssertReaderClosed(reader1, true, false);
				
				reader2.Close();
				AssertRefCountEquals(0, reader1);
				AssertReaderClosed(reader1, true, false);
				
				reader2.Close();
				AssertRefCountEquals(0, reader1);
				
				reader3.Close();
				AssertRefCountEquals(0, reader1);
				AssertReaderClosed(reader1, true, true);
				dir1.Close();
				dir2.Close();
			}
		}
コード例 #22
0
			protected internal override IndexReader OpenReader()
			{
				ParallelReader pr = new ParallelReader();
				pr.Add(IndexReader.Open(dir1));
				pr.Add(IndexReader.Open(dir2));
				MultiReader mr = new MultiReader(new IndexReader[]{IndexReader.Open(dir3), IndexReader.Open(dir4)});
				return new MultiReader(new IndexReader[]{pr, mr, IndexReader.Open(dir5)});
			}
コード例 #23
0
 // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
 private IndexSearcher Parallel(Random random, bool compositeComposite)
 {
     Dir1 = GetDir1(random);
     Dir2 = GetDir2(random);
     CompositeReader rd1, rd2;
     if (compositeComposite)
     {
         rd1 = new MultiReader(DirectoryReader.Open(Dir1), DirectoryReader.Open(Dir1));
         rd2 = new MultiReader(DirectoryReader.Open(Dir2), DirectoryReader.Open(Dir2));
         Assert.AreEqual(2, rd1.Context.Children.Count);
         Assert.AreEqual(2, rd2.Context.Children.Count);
     }
     else
     {
         rd1 = DirectoryReader.Open(Dir1);
         rd2 = DirectoryReader.Open(Dir2);
         Assert.AreEqual(3, rd1.Context.Children.Count);
         Assert.AreEqual(3, rd2.Context.Children.Count);
     }
     ParallelCompositeReader pr = new ParallelCompositeReader(rd1, rd2);
     return NewSearcher(pr);
 }
コード例 #24
0
        // Fields 1-4 indexed together:
        private IndexSearcher Single(Random random, bool compositeComposite)
        {
            Dir = NewDirectory();
            IndexWriter w = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
            Document d1 = new Document();
            d1.Add(NewTextField("f1", "v1", Field.Store.YES));
            d1.Add(NewTextField("f2", "v1", Field.Store.YES));
            d1.Add(NewTextField("f3", "v1", Field.Store.YES));
            d1.Add(NewTextField("f4", "v1", Field.Store.YES));
            w.AddDocument(d1);
            Document d2 = new Document();
            d2.Add(NewTextField("f1", "v2", Field.Store.YES));
            d2.Add(NewTextField("f2", "v2", Field.Store.YES));
            d2.Add(NewTextField("f3", "v2", Field.Store.YES));
            d2.Add(NewTextField("f4", "v2", Field.Store.YES));
            w.AddDocument(d2);
            Document d3 = new Document();
            d3.Add(NewTextField("f1", "v3", Field.Store.YES));
            d3.Add(NewTextField("f2", "v3", Field.Store.YES));
            d3.Add(NewTextField("f3", "v3", Field.Store.YES));
            d3.Add(NewTextField("f4", "v3", Field.Store.YES));
            w.AddDocument(d3);
            Document d4 = new Document();
            d4.Add(NewTextField("f1", "v4", Field.Store.YES));
            d4.Add(NewTextField("f2", "v4", Field.Store.YES));
            d4.Add(NewTextField("f3", "v4", Field.Store.YES));
            d4.Add(NewTextField("f4", "v4", Field.Store.YES));
            w.AddDocument(d4);
            w.Dispose();

            CompositeReader ir;
            if (compositeComposite)
            {
                ir = new MultiReader(DirectoryReader.Open(Dir), DirectoryReader.Open(Dir));
            }
            else
            {
                ir = DirectoryReader.Open(Dir);
            }
            return NewSearcher(ir);
        }
コード例 #25
0
		public virtual void  TestIsCurrent()
		{
			RAMDirectory ramDir1 = new RAMDirectory();
			AddDoc(ramDir1, "test foo", true);
			RAMDirectory ramDir2 = new RAMDirectory();
			AddDoc(ramDir2, "test blah", true);
			IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2)};
			MultiReader mr = new MultiReader(readers);
			Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
			AddDoc(ramDir1, "more text", false);
			Assert.IsFalse(mr.IsCurrent()); // has been modified, not current anymore
			AddDoc(ramDir2, "even more text", false);
			Assert.IsFalse(mr.IsCurrent()); // has been modified even more, not current anymore
			try
			{
				mr.GetVersion();
				Assert.Fail();
			}
			catch (System.NotSupportedException e)
			{
				// expected exception
			}
			mr.Close();
		}
コード例 #26
0
        public void TestMissingField()
        {
            string fieldName = "field1";
            Directory rd1 = NewDirectory();
            RandomIndexWriter w1 = new RandomIndexWriter(Random(), rd1, Similarity, TimeZone);
            Document doc = new Document();
            doc.Add(NewStringField(fieldName, "content1", Field.Store.YES));
            w1.AddDocument(doc);
            IndexReader reader1 = w1.Reader;
            w1.Dispose();

            fieldName = "field2";
            Directory rd2 = NewDirectory();
            RandomIndexWriter w2 = new RandomIndexWriter(Random(), rd2, Similarity, TimeZone);
            doc = new Document();
            doc.Add(NewStringField(fieldName, "content2", Field.Store.YES));
            w2.AddDocument(doc);
            IndexReader reader2 = w2.Reader;
            w2.Dispose();

            TermsFilter tf = new TermsFilter(new Term(fieldName, "content1"));
            MultiReader multi = new MultiReader(reader1, reader2);
            foreach (AtomicReaderContext context in multi.Leaves)
            {
                DocIdSet docIdSet = tf.GetDocIdSet(context, context.AtomicReader.LiveDocs);
                if (context.Reader.DocFreq(new Term(fieldName, "content1")) == 0)
                {
                    assertNull(docIdSet);
                }
                else
                {
                    FixedBitSet bits = (FixedBitSet)docIdSet;
                    assertTrue("Must be >= 0", bits.Cardinality() >= 0);
                }
            }
            multi.Dispose();
            reader1.Dispose();
            reader2.Dispose();
            rd1.Dispose();
            rd2.Dispose();
        }
コード例 #27
0
		/// <summary> Tries to reopen the subreaders.
		/// <br>
		/// If one or more subreaders could be re-opened (i. e. subReader.reopen() 
		/// returned a new instance != subReader), then a new MultiReader instance 
		/// is returned, otherwise this instance is returned.
		/// <p>
		/// A re-opened instance might share one or more subreaders with the old 
		/// instance. Index modification operations result in undefined behavior
		/// when performed before the old instance is closed.
		/// (see {@link IndexReader#Reopen()}).
		/// <p>
		/// If subreaders are shared, then the reference count of those
		/// readers is increased to ensure that the subreaders remain open
		/// until the last referring reader is closed.
		/// 
		/// </summary>
		/// <throws>  CorruptIndexException if the index is corrupt </throws>
		/// <throws>  IOException if there is a low-level IO error  </throws>
		public override IndexReader Reopen()
		{
			EnsureOpen();
			
			bool reopened = false;
			IndexReader[] newSubReaders = new IndexReader[subReaders.Length];
			bool[] newDecrefOnClose = new bool[subReaders.Length];
			
			bool success = false;
			try
			{
				for (int i = 0; i < subReaders.Length; i++)
				{
					newSubReaders[i] = subReaders[i].Reopen();
					// if at least one of the subreaders was updated we remember that
					// and return a new MultiReader
					if (newSubReaders[i] != subReaders[i])
					{
						reopened = true;
						// this is a new subreader instance, so on close() we don't
						// decRef but close it 
						newDecrefOnClose[i] = false;
					}
				}
				
				if (reopened)
				{
					for (int i = 0; i < subReaders.Length; i++)
					{
						if (newSubReaders[i] == subReaders[i])
						{
							newSubReaders[i].IncRef();
							newDecrefOnClose[i] = true;
						}
					}
					
					MultiReader mr = new MultiReader(newSubReaders);
					mr.decrefOnClose = newDecrefOnClose;
					success = true;
					return mr;
				}
				else
				{
					success = true;
					return this;
				}
			}
			finally
			{
				if (!success && reopened)
				{
					for (int i = 0; i < newSubReaders.Length; i++)
					{
						if (newSubReaders[i] != null)
						{
							try
							{
								if (newDecrefOnClose[i])
								{
									newSubReaders[i].DecRef();
								}
								else
								{
									newSubReaders[i].Close();
								}
							}
							catch (System.IO.IOException ignore)
							{
								// keep going - we want to clean up as much as possible
							}
						}
					}
				}
			}
		}
コード例 #28
0
ファイル: TestMultiReader.cs プロジェクト: runefs/Marvin
		public virtual void  TestTermVectors()
		{
			try
			{
				MultiReader reader = new MultiReader(dir, sis, false, readers);
				Assert.IsTrue(reader != null);
			}
			catch (System.IO.IOException e)
			{
                System.Console.Error.WriteLine(e.StackTrace);
				Assert.IsTrue(false);
			}
		}
コード例 #29
0
        public static Hits FuzzySearchEx(out Query query)
        {
            Hits hits = null;
            query = null;
            try
            {
                List<IndexReader> readerList = new List<IndexReader>();
                if (searchIndexList.Count > 0)
                {
                    foreach (IndexSet indexSet in searchIndexList)
                    {
                        if (indexSet.Type == IndexTypeEnum.Increment)
                            continue;
                        readerList.Add(IndexReader.Open(indexSet.Path));
                    }
                }
                else
                {
                    foreach (IndexSet indexSet in indexFieldsDict.Keys)
                    {
                        if (indexSet.Type == IndexTypeEnum.Increment)
                            continue;
                        readerList.Add(IndexReader.Open(indexSet.Path));
                    }
                }
                MultiReader multiReader = new MultiReader(readerList.ToArray());
                IndexSearcher searcher = new IndexSearcher(multiReader);
                query = GetQuery();
#if DEBUG
                System.Console.WriteLine(query.ToString());
#endif
                SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, query.ToString());
                hits = searcher.Search(query);
            }
            catch (Exception e)
            {
                SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString());
            }
            return hits;
        }
コード例 #30
0
ファイル: MultiReader.cs プロジェクト: sinsay/SSE
        /// <summary> If clone is true then we clone each of the subreaders</summary>
        /// <param name="doClone">
        /// </param>
        /// <returns> New IndexReader, or same one (this) if
        /// reopen/clone is not necessary
        /// </returns>
        /// <throws>  CorruptIndexException </throws>
        /// <throws>  IOException </throws>
        protected internal virtual IndexReader DoReopen(bool doClone)
        {
            EnsureOpen();

            bool reopened = false;
            IndexReader[] newSubReaders = new IndexReader[subReaders.Length];

            bool success = false;
            try
            {
                for (int i = 0; i < subReaders.Length; i++)
                {
                    if (doClone)
                        newSubReaders[i] = (IndexReader) subReaders[i].Clone();
                    else
                        newSubReaders[i] = subReaders[i].Reopen();
                    // if at least one of the subreaders was updated we remember that
                    // and return a new MultiReader
                    if (newSubReaders[i] != subReaders[i])
                    {
                        reopened = true;
                    }
                }
                success = true;
            }
            finally
            {
                if (!success && reopened)
                {
                    for (int i = 0; i < newSubReaders.Length; i++)
                    {
                        if (newSubReaders[i] != subReaders[i])
                        {
                            try
                            {
                                newSubReaders[i].Close();
                            }
                            catch (System.IO.IOException ignore)
                            {
                                // keep going - we want to clean up as much as possible
                            }
                        }
                    }
                }
            }

            if (reopened)
            {
                bool[] newDecrefOnClose = new bool[subReaders.Length];
                for (int i = 0; i < subReaders.Length; i++)
                {
                    if (newSubReaders[i] == subReaders[i])
                    {
                        newSubReaders[i].IncRef();
                        newDecrefOnClose[i] = true;
                    }
                }
                MultiReader mr = new MultiReader(newSubReaders);
                mr.decrefOnClose = newDecrefOnClose;
                mr.SetDisableFakeNorms(GetDisableFakeNorms());
                return mr;
            }
            else
            {
                return this;
            }
        }
コード例 #31
0
        public virtual void  _testTermVectors()
        {
            MultiReader reader = new MultiReader(readers);

            Assert.IsTrue(reader != null);
        }
コード例 #32
0
        public void TestTieBreaker()
        {
            Directory directory = NewDirectory();
            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
            addDoc("a123456", writer);
            addDoc("c123456", writer);
            addDoc("d123456", writer);
            addDoc("e123456", writer);

            Directory directory2 = NewDirectory();
            RandomIndexWriter writer2 = new RandomIndexWriter(Random(), directory2, Similarity, TimeZone);
            addDoc("a123456", writer2);
            addDoc("b123456", writer2);
            addDoc("b123456", writer2);
            addDoc("b123456", writer2);
            addDoc("c123456", writer2);
            addDoc("f123456", writer2);

            IndexReader ir1 = writer.Reader;
            IndexReader ir2 = writer2.Reader;

            MultiReader mr = new MultiReader(ir1, ir2);
            IndexSearcher searcher = NewSearcher(mr);
            SlowFuzzyQuery fq = new SlowFuzzyQuery(new Term("field", "z123456"), 1f, 0, 2);
            TopDocs docs = searcher.Search(fq, 2);
            assertEquals(5, docs.TotalHits); // 5 docs, from the a and b's
            mr.Dispose();
            ir1.Dispose();
            ir2.Dispose();
            writer.Dispose();
            writer2.Dispose();
            directory.Dispose();
            directory2.Dispose();
        }
コード例 #33
0
        public virtual void TestIncompatibleIndexes3()
        {
            Directory dir1 = GetDir1(Random());
            Directory dir2 = GetDir2(Random());

            CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2));
            CompositeReader[] readers = new CompositeReader[] { ir1, ir2 };
            try
            {
                new ParallelCompositeReader(readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            try
            {
                new ParallelCompositeReader(Random().NextBoolean(), readers, readers);
                Assert.Fail("didn't get expected exception: indexes don't have same subreader structure");
            }
            catch (System.ArgumentException e)
            {
                // expected exception
            }
            Assert.AreEqual(1, ir1.RefCount);
            Assert.AreEqual(1, ir2.RefCount);
            ir1.Dispose();
            ir2.Dispose();
            Assert.AreEqual(0, ir1.RefCount);
            Assert.AreEqual(0, ir2.RefCount);
            dir1.Dispose();
            dir2.Dispose();
        }
コード例 #34
0
        /// <summary> If clone is true then we clone each of the subreaders</summary>
        /// <param name="doClone">
        /// </param>
        /// <returns> New IndexReader, or same one (this) if
        /// reopen/clone is not necessary
        /// </returns>
        /// <throws>  CorruptIndexException </throws>
        /// <throws>  IOException </throws>
        protected internal virtual IndexReader DoReopen(bool doClone)
        {
            EnsureOpen();

            bool reopened = false;

            IndexReader[] newSubReaders = new IndexReader[subReaders.Length];

            bool success = false;

            try
            {
                for (int i = 0; i < subReaders.Length; i++)
                {
                    if (doClone)
                    {
                        newSubReaders[i] = (IndexReader)subReaders[i].Clone();
                    }
                    else
                    {
                        newSubReaders[i] = subReaders[i].Reopen();
                    }
                    // if at least one of the subreaders was updated we remember that
                    // and return a new MultiReader
                    if (newSubReaders[i] != subReaders[i])
                    {
                        reopened = true;
                    }
                }
                success = true;
            }
            finally
            {
                if (!success && reopened)
                {
                    for (int i = 0; i < newSubReaders.Length; i++)
                    {
                        if (newSubReaders[i] != subReaders[i])
                        {
                            try
                            {
                                newSubReaders[i].Close();
                            }
                            catch (System.IO.IOException ignore)
                            {
                                // keep going - we want to clean up as much as possible
                            }
                        }
                    }
                }
            }

            if (reopened)
            {
                bool[] newDecrefOnClose = new bool[subReaders.Length];
                for (int i = 0; i < subReaders.Length; i++)
                {
                    if (newSubReaders[i] == subReaders[i])
                    {
                        newSubReaders[i].IncRef();
                        newDecrefOnClose[i] = true;
                    }
                }
                MultiReader mr = new MultiReader(newSubReaders);
                mr.decrefOnClose = newDecrefOnClose;
                mr.SetDisableFakeNorms(GetDisableFakeNorms());
                return(mr);
            }
            else
            {
                return(this);
            }
        }
コード例 #35
0
        public static void Main(string[] args)
        {
            if (args.Length < 5)
            {
                Console.Error.WriteLine("Usage: MultiPassIndexSplitter -out <outputDir> -num <numParts> [-seq] <inputIndex1> [<inputIndex2 ...]");
                Console.Error.WriteLine("\tinputIndex\tpath to input index, multiple values are ok");
                Console.Error.WriteLine("\t-out ouputDir\tpath to output directory to contain partial indexes");
                Console.Error.WriteLine("\t-num numParts\tnumber of parts to produce");
                Console.Error.WriteLine("\t-seq\tsequential docid-range split (default is round-robin)");
                Environment.Exit(-1);
            }
            List <IndexReader> indexes = new List <IndexReader>();
            string             outDir  = null;
            int  numParts = -1;
            bool seq      = false;

            for (int i = 0; i < args.Length; i++)
            {
                if (args[i].Equals("-out"))
                {
                    outDir = args[++i];
                }
                else if (args[i].Equals("-num"))
                {
                    numParts = Convert.ToInt32(args[++i]);
                }
                else if (args[i].Equals("-seq"))
                {
                    seq = true;
                }
                else
                {
                    DirectoryInfo file = new DirectoryInfo(args[i]);
                    if (!file.Exists)
                    {
                        Console.Error.WriteLine("Invalid input path - skipping: " + file);
                        continue;
                    }
                    Store.Directory dir = FSDirectory.Open(new DirectoryInfo(args[i]));
                    try
                    {
                        if (!DirectoryReader.IndexExists(dir))
                        {
                            Console.Error.WriteLine("Invalid input index - skipping: " + file);
                            continue;
                        }
                    }
                    catch (Exception)
                    {
                        Console.Error.WriteLine("Invalid input index - skipping: " + file);
                        continue;
                    }
                    indexes.Add(DirectoryReader.Open(dir));
                }
            }
            if (outDir == null)
            {
                throw new Exception("Required argument missing: -out outputDir");
            }
            if (numParts < 2)
            {
                throw new Exception("Invalid value of required argument: -num numParts");
            }
            if (indexes.Count == 0)
            {
                throw new Exception("No input indexes to process");
            }
            DirectoryInfo @out = new DirectoryInfo(outDir);

            @out.Create();
            if (!new DirectoryInfo(outDir).Exists)
            {
                throw new Exception("Can't create output directory: " + @out);
            }
            Store.Directory[] dirs = new Store.Directory[numParts];
            for (int i = 0; i < numParts; i++)
            {
                dirs[i] = FSDirectory.Open(new DirectoryInfo(Path.Combine(@out.FullName, "part-" + i)));
            }
            MultiPassIndexSplitter splitter = new MultiPassIndexSplitter();
            IndexReader            input;

            if (indexes.Count == 1)
            {
                input = indexes[0];
            }
            else
            {
                input = new MultiReader(indexes.ToArray());
            }
#pragma warning disable 612, 618
            splitter.Split(LuceneVersion.LUCENE_CURRENT, input, dirs, seq);
#pragma warning restore 612, 618
        }
コード例 #36
0
		public virtual void  _testTermVectors()
		{
			MultiReader reader = new MultiReader(readers);
			Assert.IsTrue(reader != null);
		}
コード例 #37
0
ファイル: LuceneSearch.cs プロジェクト: cairabbit/daf
        public IEnumerable<IHit> Query(int pageIndex, int pageSize, out int totalCount, out IEnumerable<FacetGroup> facetedResults)
        {
            totalCount = 0;
            facetedResults = null;

            if (searchPaths == null || searchPaths.Count <= 0)
                searchPaths.AddRange(indexPaths.Values.Select(o => o.Path));

            List<LuceneHit> results = new List<LuceneHit>();

            List<IndexSearcher> subSearchs = new List<IndexSearcher>();

            searchPaths.ForEach(o => subSearchs.Add(new IndexSearcher(FSDirectory.Open(o))));

            if (facetFields != null && facetFields.Count > 0)
            {
                var facetGroups = new List<FacetGroup>();
                var mainQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(query));
                MultiReader readers = new MultiReader(subSearchs.Select(o => o.IndexReader).ToArray());

                foreach (var facetField in facetFields)
                {
                    FacetGroup fg = new FacetGroup();
                    fg.FieldName = facetFieldNameProvider.GetMapName(TypeName, facetField);
                    var items = new List<FacetItem>();

                    var allDistinctField = FieldCache_Fields.DEFAULT.GetStrings(readers, facetField).Distinct().ToArray();
                    int totalHits = 0;

                    Parallel.ForEach(allDistinctField, fieldValue =>
                        {
                        //foreach (var fieldValue in allDistinctField)
                        //{
                            var facetQuery = new TermQuery(new Term(facetField, fieldValue));
                            var facetQueryFilter = new CachingWrapperFilter(new QueryWrapperFilter(facetQuery));

                            var bs = new OpenBitSetDISI(facetQueryFilter.GetDocIdSet(readers).Iterator(), readers.MaxDoc);
                            bs.InPlaceAnd(mainQueryFilter.GetDocIdSet(readers).Iterator());
                            int count = (Int32)bs.Cardinality();

                            FacetItem item = new FacetItem();
                            item.GroupValue = fieldValue;
                            item.Count = count;

                            items.Add(item);
                            totalHits += count;
                        }
                    );

                    fg.FacetItems = items.OrderByDescending(o => o.Count);
                    fg.TotalHits = totalHits;

                    facetGroups.Add(fg);
                }

                facetedResults = facetGroups.OrderBy(o => o.FieldName);
            }
            ParallelMultiSearcher searcher = new ParallelMultiSearcher(subSearchs.ToArray());
            Sort sort = null;
            if (sortFields != null && sortFields.Count > 0)
            {
                sort = new Sort(sortFields.ToArray());
            }

            int maxDoc = searcher.MaxDoc;
            int startIndex = 0;
            if (pageIndex >= 0 && pageSize > 0)
            {
                startIndex = pageIndex * pageSize;
                maxDoc = pageSize * (pageIndex + 1);
            }
            var docs = sort == null ?  searcher.Search(query, null, maxDoc) : searcher.Search(query, null, maxDoc, sort);
            totalCount = docs.TotalHits;
            int endIndex = docs.TotalHits - startIndex;
            for (int i = startIndex; i < endIndex; i++)
            {
                LuceneHit h = new LuceneHit(TypeName, DocumentBuilder, searcher.Doc(docs.ScoreDocs[i].Doc));
                results.Add(h);
            }
            return results;
        }
コード例 #38
0
		public virtual void  TestMultiTermDocs()
		{
			RAMDirectory ramDir1 = new RAMDirectory();
			AddDoc(ramDir1, "test foo", true);
			RAMDirectory ramDir2 = new RAMDirectory();
			AddDoc(ramDir2, "test blah", true);
			RAMDirectory ramDir3 = new RAMDirectory();
			AddDoc(ramDir3, "test wow", true);
			
			IndexReader[] readers1 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir3)};
			IndexReader[] readers2 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2), IndexReader.Open(ramDir3)};
			MultiReader mr2 = new MultiReader(readers1);
			MultiReader mr3 = new MultiReader(readers2);
			
			// test mixing up TermDocs and TermEnums from different readers.
			TermDocs td2 = mr2.TermDocs();
			TermEnum te3 = mr3.Terms(new Term("body", "wow"));
			td2.Seek(te3);
			int ret = 0;
			
			// This should blow up if we forget to check that the TermEnum is from the same
			// reader as the TermDocs.
			while (td2.Next())
				ret += td2.Doc();
			td2.Close();
			te3.Close();
			
			// really a dummy assert to ensure that we got some docs and to ensure that
			// nothing is optimized out.
			Assert.IsTrue(ret > 0);
		}
コード例 #39
0
        public virtual void  TestIsCurrent()
        {
            RAMDirectory ramDir1 = new RAMDirectory();
            AddDoc(ramDir1, "test foo", true);
            RAMDirectory ramDir2 = new RAMDirectory();
            AddDoc(ramDir2, "test blah", true);
            IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false)};
            MultiReader mr = new MultiReader(readers);
            Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
            AddDoc(ramDir1, "more text", false);
            Assert.IsFalse(mr.IsCurrent()); // has been modified, not current anymore
            AddDoc(ramDir2, "even more text", false);
            Assert.IsFalse(mr.IsCurrent()); // has been modified even more, not current anymore

            Assert.Throws<NotSupportedException>(() => { var ver = mr.Version; });
            mr.Close();
        }
コード例 #40
0
        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")] IConcurrentMergeScheduler scheduler)
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostingsBytes1"));

            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }

            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
                         .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                         .SetRAMBufferSizeMB(256.0)
                         .SetMergeScheduler(scheduler)
                         .SetMergePolicy(NewLogMergePolicy(false, 10))
                         .SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            MergePolicy mp = w.Config.MergePolicy;

            if (mp is LogByteSizeMergePolicy)
            {
                // 1 petabyte:
                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
            }

            Document  doc = new Document();
            FieldType ft  = new FieldType(TextField.TYPE_NOT_STORED);

            ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;
            ft.OmitNorms    = true;
            MyTokenStream tokenStream = new MyTokenStream();
            Field         field       = new Field("field", tokenStream, ft);

            doc.Add(field);

            const int numDocs = 1000;

            for (int i = 0; i < numDocs; i++)
            {
                if (i % 2 == 1) // trick blockPF's little optimization
                {
                    tokenStream.n = 65536;
                }
                else
                {
                    tokenStream.n = 65537;
                }
                w.AddDocument(doc);
            }
            w.ForceMerge(1);
            w.Dispose();

            DirectoryReader oneThousand = DirectoryReader.Open(dir);

            IndexReader[] subReaders = new IndexReader[1000];
            Arrays.Fill(subReaders, oneThousand);
            MultiReader          mr   = new MultiReader(subReaders);
            BaseDirectoryWrapper dir2 = NewFSDirectory(CreateTempDir("2BPostingsBytes2"));

            if (dir2 is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir2).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }
            IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));

            w2.AddIndexes(mr);
            w2.ForceMerge(1);
            w2.Dispose();
            oneThousand.Dispose();

            DirectoryReader oneMillion = DirectoryReader.Open(dir2);

            subReaders = new IndexReader[2000];
            Arrays.Fill(subReaders, oneMillion);
            mr = new MultiReader(subReaders);
            BaseDirectoryWrapper dir3 = NewFSDirectory(CreateTempDir("2BPostingsBytes3"));

            if (dir3 is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir3).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }
            IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null));

            w3.AddIndexes(mr);
            w3.ForceMerge(1);
            w3.Dispose();
            oneMillion.Dispose();

            dir.Dispose();
            dir2.Dispose();
            dir3.Dispose();
        }
コード例 #41
0
ファイル: MultiReader.cs プロジェクト: ferrod20/tprilucene
        /// <summary> Tries to reopen the subreaders.
        /// <br>
        /// If one or more subreaders could be re-opened (i. e. subReader.reopen()
        /// returned a new instance != subReader), then a new MultiReader instance
        /// is returned, otherwise this instance is returned.
        /// <p>
        /// A re-opened instance might share one or more subreaders with the old
        /// instance. Index modification operations result in undefined behavior
        /// when performed before the old instance is closed.
        /// (see {@link IndexReader#Reopen()}).
        /// <p>
        /// If subreaders are shared, then the reference count of those
        /// readers is increased to ensure that the subreaders remain open
        /// until the last referring reader is closed.
        ///
        /// </summary>
        /// <throws>  CorruptIndexException if the index is corrupt </throws>
        /// <throws>  IOException if there is a low-level IO error  </throws>
        public override IndexReader Reopen()
        {
            EnsureOpen();

            bool reopened = false;

            IndexReader[] newSubReaders    = new IndexReader[subReaders.Length];
            bool[]        newDecrefOnClose = new bool[subReaders.Length];

            bool success = false;

            try
            {
                for (int i = 0; i < subReaders.Length; i++)
                {
                    newSubReaders[i] = subReaders[i].Reopen();
                    // if at least one of the subreaders was updated we remember that
                    // and return a new MultiReader
                    if (newSubReaders[i] != subReaders[i])
                    {
                        reopened = true;
                        // this is a new subreader instance, so on close() we don't
                        // decRef but close it
                        newDecrefOnClose[i] = false;
                    }
                }

                if (reopened)
                {
                    for (int i = 0; i < subReaders.Length; i++)
                    {
                        if (newSubReaders[i] == subReaders[i])
                        {
                            newSubReaders[i].IncRef();
                            newDecrefOnClose[i] = true;
                        }
                    }

                    MultiReader mr = new MultiReader(newSubReaders);
                    mr.decrefOnClose = newDecrefOnClose;
                    success          = true;
                    return(mr);
                }
                else
                {
                    success = true;
                    return(this);
                }
            }
            finally
            {
                if (!success && reopened)
                {
                    for (int i = 0; i < newSubReaders.Length; i++)
                    {
                        if (newSubReaders[i] != null)
                        {
                            try
                            {
                                if (newDecrefOnClose[i])
                                {
                                    newSubReaders[i].DecRef();
                                }
                                else
                                {
                                    newSubReaders[i].Close();
                                }
                            }
                            catch (System.IO.IOException ignore)
                            {
                                // keep going - we want to clean up as much as possible
                            }
                        }
                    }
                }
            }
        }
コード例 #42
0
        public virtual void Test([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BPostingsBytes1"));
            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }

            var config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
                            .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                            .SetRAMBufferSizeMB(256.0)
                            .SetMergeScheduler(scheduler)
                            .SetMergePolicy(NewLogMergePolicy(false, 10))
                            .SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE);
            IndexWriter w = new IndexWriter(dir, config);

            MergePolicy mp = w.Config.MergePolicy;
            if (mp is LogByteSizeMergePolicy)
            {
                // 1 petabyte:
                ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1024 * 1024 * 1024;
            }

            Document doc = new Document();
            FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
            ft.IndexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;
            ft.OmitNorms = true;
            MyTokenStream tokenStream = new MyTokenStream();
            Field field = new Field("field", tokenStream, ft);
            doc.Add(field);

            const int numDocs = 1000;
            for (int i = 0; i < numDocs; i++)
            {
                if (i % 2 == 1) // trick blockPF's little optimization
                {
                    tokenStream.n = 65536;
                }
                else
                {
                    tokenStream.n = 65537;
                }
                w.AddDocument(doc);
            }
            w.ForceMerge(1);
            w.Dispose();

            DirectoryReader oneThousand = DirectoryReader.Open(dir);
            IndexReader[] subReaders = new IndexReader[1000];
            Arrays.Fill(subReaders, oneThousand);
            MultiReader mr = new MultiReader(subReaders);
            BaseDirectoryWrapper dir2 = NewFSDirectory(CreateTempDir("2BPostingsBytes2"));
            if (dir2 is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir2).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }
            IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
            w2.AddIndexes(mr);
            w2.ForceMerge(1);
            w2.Dispose();
            oneThousand.Dispose();

            DirectoryReader oneMillion = DirectoryReader.Open(dir2);
            subReaders = new IndexReader[2000];
            Arrays.Fill(subReaders, oneMillion);
            mr = new MultiReader(subReaders);
            BaseDirectoryWrapper dir3 = NewFSDirectory(CreateTempDir("2BPostingsBytes3"));
            if (dir3 is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir3).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
            }
            IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
            w3.AddIndexes(mr);
            w3.ForceMerge(1);
            w3.Dispose();
            oneMillion.Dispose();

            dir.Dispose();
            dir2.Dispose();
            dir3.Dispose();
        }
コード例 #43
0
        public static List<SearchRecord> ExactFastSearch(out Query query)
        {
            List<SearchRecord> docList = new List<SearchRecord>();
            query = null;
            try
            {
                List<IndexReader> readerList = new List<IndexReader>();
                foreach (IndexSet indexSet in searchIndexList)
                {
                    if (indexSet.Type == IndexTypeEnum.Increment)
                        continue;
                    readerList.Add(IndexReader.Open(indexSet.Path));
                }
                MultiReader multiReader = new MultiReader(readerList.ToArray());
                IndexSearcher searcher = new IndexSearcher(multiReader);
                query = GetQuery();
#if DEBUG
                System.Console.WriteLine(query.ToString());
#endif
                TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches);
                ScoreDoc[] scoreDocs = topDocs.scoreDocs;
                for (int i = 0; i < scoreDocs.Length; i++)
                {
                    Document doc = searcher.Doc(scoreDocs[i].doc);
                    float score = scoreDocs[i].score;
                    if (score < searchSet.MinScore)
                        continue;
                    docList.Add(doc);
                }
            }
            catch (Exception e)
            {
                SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString());
            }
            return docList;
        }