public virtual void TestIncompatibleIndexes3() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2)); CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } try { new ParallelCompositeReader(Random().NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
/// <summary> /// Full-Text search engine search provider, used to search indexed documents. /// </summary> /// <param name="directoryIndexInfos">The array directory infomation where the index files are located.</param> public SearchProvider(DirectoryInfo[] directoryIndexInfos) { try { List <Lucene.Net.Index.IndexReader> readers = new List <IndexReader>(); // For each directory. foreach (DirectoryInfo item in directoryIndexInfos) { // Create the index reader. Lucene.Net.Store.Directory directory = FSDirectory.Open(item); Lucene.Net.Index.IndexReader reader = Lucene.Net.Index.DirectoryReader.Open(directory); readers.Add(reader); } // Create the multiple index readers. _reader = new Lucene.Net.Index.MultiReader(readers.ToArray(), true); } catch (Exception) { if (_reader != null) { _reader.Dispose(); } throw; } }
public virtual void TestIncompatibleIndexes3() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2)); CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } try { new ParallelCompositeReader(Random().NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public void TestMissingField() { string fieldName = "field1"; Directory rd1 = NewDirectory(); RandomIndexWriter w1 = new RandomIndexWriter(Random(), rd1, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewStringField(fieldName, "content1", Field.Store.YES)); w1.AddDocument(doc); IndexReader reader1 = w1.Reader; w1.Dispose(); fieldName = "field2"; Directory rd2 = NewDirectory(); RandomIndexWriter w2 = new RandomIndexWriter(Random(), rd2, Similarity, TimeZone); doc = new Document(); doc.Add(NewStringField(fieldName, "content2", Field.Store.YES)); w2.AddDocument(doc); IndexReader reader2 = w2.Reader; w2.Dispose(); TermsFilter tf = new TermsFilter(new Term(fieldName, "content1")); MultiReader multi = new MultiReader(reader1, reader2); foreach (AtomicReaderContext context in multi.Leaves) { DocIdSet docIdSet = tf.GetDocIdSet(context, context.AtomicReader.LiveDocs); if (context.Reader.DocFreq(new Term(fieldName, "content1")) == 0) { assertNull(docIdSet); } else { FixedBitSet bits = (FixedBitSet)docIdSet; assertTrue("Must be >= 0", bits.Cardinality() >= 0); } } multi.Dispose(); reader1.Dispose(); reader2.Dispose(); rd1.Dispose(); rd2.Dispose(); }
public void TestTieBreaker() { Directory directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone); addDoc("a123456", writer); addDoc("c123456", writer); addDoc("d123456", writer); addDoc("e123456", writer); Directory directory2 = NewDirectory(); RandomIndexWriter writer2 = new RandomIndexWriter(Random(), directory2, Similarity, TimeZone); addDoc("a123456", writer2); addDoc("b123456", writer2); addDoc("b123456", writer2); addDoc("b123456", writer2); addDoc("c123456", writer2); addDoc("f123456", writer2); IndexReader ir1 = writer.Reader; IndexReader ir2 = writer2.Reader; MultiReader mr = new MultiReader(ir1, ir2); IndexSearcher searcher = NewSearcher(mr); SlowFuzzyQuery fq = new SlowFuzzyQuery(new Term("field", "z123456"), 1f, 0, 2); TopDocs docs = searcher.Search(fq, 2); assertEquals(5, docs.TotalHits); // 5 docs, from the a and b's mr.Dispose(); ir1.Dispose(); ir2.Dispose(); writer.Dispose(); writer2.Dispose(); directory.Dispose(); directory2.Dispose(); }