public virtual void TestCloseInnerReader() { Directory dir1 = GetDir1(Random); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); // with overlapping ParallelAtomicReader pr = new ParallelAtomicReader(true, new AtomicReader[] { ir1 }, new AtomicReader[] { ir1 }); ir1.Dispose(); try { pr.Document(0); Assert.Fail("ParallelAtomicReader should be already closed because inner reader was closed!"); } #pragma warning disable 168 catch (ObjectDisposedException e) #pragma warning restore 168 { // pass } // noop: pr.Dispose(); dir1.Dispose(); }
public virtual void TestFieldNames() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); FieldInfos fieldInfos = pr.FieldInfos; Assert.AreEqual(4, fieldInfos.Size()); Assert.IsNotNull(fieldInfos.FieldInfo("f1")); Assert.IsNotNull(fieldInfos.FieldInfo("f2")); Assert.IsNotNull(fieldInfos.FieldInfo("f3")); Assert.IsNotNull(fieldInfos.FieldInfo("f4")); pr.Dispose(); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestFieldNames() { Directory dir1 = GetDir1(Random); Directory dir2 = GetDir2(Random); ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); FieldInfos fieldInfos = pr.FieldInfos; Assert.AreEqual(4, fieldInfos.Count); Assert.IsNotNull(fieldInfos.FieldInfo("f1")); Assert.IsNotNull(fieldInfos.FieldInfo("f2")); Assert.IsNotNull(fieldInfos.FieldInfo("f3")); Assert.IsNotNull(fieldInfos.FieldInfo("f4")); pr.Dispose(); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestRefCounts1() { Directory dir1 = GetDir1(Random); Directory dir2 = GetDir2(Random); AtomicReader ir1, ir2; // close subreaders, ParallelReader will not change refCounts, but close on its own close ParallelAtomicReader pr = new ParallelAtomicReader(ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); // check RefCounts Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); pr.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestRefCounts2() { Directory dir1 = GetDir1(Random); Directory dir2 = GetDir2(Random); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // don't close subreaders, so ParallelReader will increment refcounts ParallelAtomicReader pr = new ParallelAtomicReader(false, ir1, ir2); // check RefCounts Assert.AreEqual(2, ir1.RefCount); Assert.AreEqual(2, ir2.RefCount); pr.Dispose(); Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestCloseInnerReader() { Directory dir1 = GetDir1(Random()); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); // with overlapping ParallelAtomicReader pr = new ParallelAtomicReader(true, new AtomicReader[] { ir1 }, new AtomicReader[] { ir1 }); ir1.Dispose(); try { pr.Document(0); Assert.Fail("ParallelAtomicReader should be already closed because inner reader was closed!"); } catch (AlreadyClosedException e) { // pass } // noop: pr.Dispose(); dir1.Dispose(); }
public virtual void TestEmptyIndexWithVectors() { Directory rd1 = NewDirectory(); { if (Verbose) { Console.WriteLine("\nTEST: make 1st writer"); } IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); Field idField = NewTextField("id", "", Field.Store.NO); doc.Add(idField); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.StoreTermVectors = true; doc.Add(NewField("test", "", customType)); idField.SetStringValue("1"); iw.AddDocument(doc); doc.Add(NewTextField("test", "", Field.Store.NO)); idField.SetStringValue("2"); iw.AddDocument(doc); iw.Dispose(); IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); if (Verbose) { Console.WriteLine("\nTEST: make 2nd writer"); } IndexWriter writer = new IndexWriter(rd1, dontMergeConfig); writer.DeleteDocuments(new Term("id", "1")); writer.Dispose(); IndexReader ir = DirectoryReader.Open(rd1); Assert.AreEqual(2, ir.MaxDoc); Assert.AreEqual(1, ir.NumDocs); ir.Dispose(); iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); iw.ForceMerge(1); iw.Dispose(); } Directory rd2 = NewDirectory(); { IndexWriter iw = new IndexWriter(rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document doc = new Document(); iw.AddDocument(doc); iw.Dispose(); } Directory rdOut = NewDirectory(); IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); DirectoryReader reader1, reader2; ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(reader1 = DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(reader2 = DirectoryReader.Open(rd2))); // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter) iwOut.AddIndexes(pr); // ParallelReader closes any IndexReader you added to it: pr.Dispose(); // assert subreaders were closed Assert.AreEqual(0, reader1.RefCount); Assert.AreEqual(0, reader2.RefCount); rd1.Dispose(); rd2.Dispose(); iwOut.ForceMerge(1); iwOut.Dispose(); rdOut.Dispose(); }
public virtual void TestIgnoreStoredFields() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // with overlapping ParallelAtomicReader pr = new ParallelAtomicReader(false, new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNotNull(pr.Terms("f1")); Assert.IsNotNull(pr.Terms("f2")); Assert.IsNotNull(pr.Terms("f3")); Assert.IsNotNull(pr.Terms("f4")); pr.Dispose(); // no stored fields at all pr = new ParallelAtomicReader(false, new AtomicReader[] { ir2 }, new AtomicReader[0]); Assert.IsNull(pr.Document(0).Get("f1")); Assert.IsNull(pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNull(pr.Terms("f1")); Assert.IsNull(pr.Terms("f2")); Assert.IsNotNull(pr.Terms("f3")); Assert.IsNotNull(pr.Terms("f4")); pr.Dispose(); // without overlapping pr = new ParallelAtomicReader(true, new AtomicReader[] { ir2 }, new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNull(pr.Terms("f1")); Assert.IsNull(pr.Terms("f2")); Assert.IsNotNull(pr.Terms("f3")); Assert.IsNotNull(pr.Terms("f4")); pr.Dispose(); // no main readers try { new ParallelAtomicReader(true, new AtomicReader[0], new AtomicReader[] { ir1 }); Assert.Fail("didn't get expected exception: need a non-empty main-reader array"); } catch (System.ArgumentException iae) { // pass } dir1.Dispose(); dir2.Dispose(); }
public virtual void TestRefCounts2() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // don't close subreaders, so ParallelReader will increment refcounts ParallelAtomicReader pr = new ParallelAtomicReader(false, ir1, ir2); // check RefCounts Assert.AreEqual(2, ir1.RefCount); Assert.AreEqual(2, ir2.RefCount); pr.Dispose(); Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestRefCounts1() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); AtomicReader ir1, ir2; // close subreaders, ParallelReader will not change refCounts, but close on its own close ParallelAtomicReader pr = new ParallelAtomicReader(ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); // check RefCounts Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); pr.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestIgnoreStoredFields() { Directory dir1 = GetDir1(Random); Directory dir2 = GetDir2(Random); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // with overlapping ParallelAtomicReader pr = new ParallelAtomicReader(false, new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNotNull(pr.GetTerms("f1")); Assert.IsNotNull(pr.GetTerms("f2")); Assert.IsNotNull(pr.GetTerms("f3")); Assert.IsNotNull(pr.GetTerms("f4")); pr.Dispose(); // no stored fields at all pr = new ParallelAtomicReader(false, new AtomicReader[] { ir2 }, new AtomicReader[0]); Assert.IsNull(pr.Document(0).Get("f1")); Assert.IsNull(pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNull(pr.GetTerms("f1")); Assert.IsNull(pr.GetTerms("f2")); Assert.IsNotNull(pr.GetTerms("f3")); Assert.IsNotNull(pr.GetTerms("f4")); pr.Dispose(); // without overlapping pr = new ParallelAtomicReader(true, new AtomicReader[] { ir2 }, new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there Assert.IsNull(pr.GetTerms("f1")); Assert.IsNull(pr.GetTerms("f2")); Assert.IsNotNull(pr.GetTerms("f3")); Assert.IsNotNull(pr.GetTerms("f4")); pr.Dispose(); // no main readers try { new ParallelAtomicReader(true, new AtomicReader[0], new AtomicReader[] { ir1 }); Assert.Fail("didn't get expected exception: need a non-empty main-reader array"); } #pragma warning disable 168 catch (ArgumentException iae) #pragma warning restore 168 { // pass } dir1.Dispose(); dir2.Dispose(); }
public virtual void TestEmptyIndexWithVectors() { Directory rd1 = NewDirectory(); { if (VERBOSE) { Console.WriteLine("\nTEST: make 1st writer"); } IndexWriter iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); Document doc = new Document(); Field idField = NewTextField("id", "", Field.Store.NO); doc.Add(idField); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.StoreTermVectors = true; doc.Add(NewField("test", "", customType)); idField.StringValue = "1"; iw.AddDocument(doc); doc.Add(NewTextField("test", "", Field.Store.NO)); idField.StringValue = "2"; iw.AddDocument(doc); iw.Dispose(); IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); if (VERBOSE) { Console.WriteLine("\nTEST: make 2nd writer"); } IndexWriter writer = new IndexWriter(rd1, dontMergeConfig); writer.DeleteDocuments(new Term("id", "1")); writer.Dispose(); IndexReader ir = DirectoryReader.Open(rd1); Assert.AreEqual(2, ir.MaxDoc); Assert.AreEqual(1, ir.NumDocs); ir.Dispose(); iw = new IndexWriter(rd1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode_e.APPEND)); iw.ForceMerge(1); iw.Dispose(); } Directory rd2 = NewDirectory(); { IndexWriter iw = new IndexWriter(rd2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); Document doc = new Document(); iw.AddDocument(doc); iw.Dispose(); } Directory rdOut = NewDirectory(); IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); DirectoryReader reader1, reader2; ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(reader1 = DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(reader2 = DirectoryReader.Open(rd2))); // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter) iwOut.AddIndexes(pr); // ParallelReader closes any IndexReader you added to it: pr.Dispose(); // assert subreaders were closed Assert.AreEqual(0, reader1.RefCount); Assert.AreEqual(0, reader2.RefCount); rd1.Dispose(); rd2.Dispose(); iwOut.ForceMerge(1); iwOut.Dispose(); rdOut.Dispose(); }