public virtual void TestCloseInnerReader() { Directory dir1 = GetDir1(Random); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); // with overlapping ParallelAtomicReader pr = new ParallelAtomicReader(true, new AtomicReader[] { ir1 }, new AtomicReader[] { ir1 }); ir1.Dispose(); try { pr.Document(0); Assert.Fail("ParallelAtomicReader should be already closed because inner reader was closed!"); } #pragma warning disable 168 catch (ObjectDisposedException e) #pragma warning restore 168 { // pass } // noop: pr.Dispose(); dir1.Dispose(); }
public virtual void TestDocsAndPositionsEnumStart() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, dir); Document doc = new Document(); doc.Add(NewTextField("foo", "bar", Field.Store.NO)); writer.AddDocument(doc); DirectoryReader reader = writer.GetReader(); AtomicReader r = GetOnlySegmentReader(reader); DocsAndPositionsEnum disi = r.GetTermPositionsEnum(new Term("foo", "bar")); int docid = disi.DocID; Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); // now reuse and check again TermsEnum te = r.GetTerms("foo").GetIterator(null); Assert.IsTrue(te.SeekExact(new BytesRef("bar"))); disi = te.DocsAndPositions(null, disi); docid = disi.DocID; Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); writer.Dispose(); r.Dispose(); dir.Dispose(); }
public virtual void TestDocsEnumStart() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewStringField("foo", "bar", Field.Store.NO)); writer.AddDocument(doc); DirectoryReader reader = writer.Reader; AtomicReader r = GetOnlySegmentReader(reader); DocsEnum disi = TestUtil.Docs(Random(), r, "foo", new BytesRef("bar"), null, null, DocsFlags.NONE); int docid = disi.DocID; Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); // now reuse and check again TermsEnum te = r.GetTerms("foo").GetIterator(null); Assert.IsTrue(te.SeekExact(new BytesRef("bar"))); disi = TestUtil.Docs(Random(), te, null, disi, DocsFlags.NONE); docid = disi.DocID; Assert.AreEqual(-1, docid); Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); writer.Dispose(); r.Dispose(); dir.Dispose(); }
public virtual void TestDocValuesUnstored() { Directory dir = NewDirectory(); IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); iwconfig.SetMergePolicy(NewLogMergePolicy()); IndexWriter writer = new IndexWriter(dir, iwconfig); for (int i = 0; i < 50; i++) { Document doc = new Document(); doc.Add(new NumericDocValuesField("dv", i)); doc.Add(new TextField("docId", "" + i, Field.Store.YES)); writer.AddDocument(doc); } DirectoryReader r = writer.GetReader(); AtomicReader slow = SlowCompositeReaderWrapper.Wrap(r); FieldInfos fi = slow.FieldInfos; FieldInfo dvInfo = fi.FieldInfo("dv"); Assert.IsTrue(dvInfo.HasDocValues); NumericDocValues dv = slow.GetNumericDocValues("dv"); for (int i = 0; i < 50; i++) { Assert.AreEqual(i, dv.Get(i)); Document d = slow.Document(i); // cannot use d.Get("dv") due to another bug! Assert.IsNull(d.GetField("dv")); Assert.AreEqual(Convert.ToString(i), d.Get("docId")); } slow.Dispose(); writer.Dispose(); dir.Dispose(); }
public override void TearDown() { Ir1.Dispose(); Ir2.Dispose(); Rd1.Dispose(); Rd2.Dispose(); base.TearDown(); }
public override void TearDown() { ir1.Dispose(); ir2.Dispose(); rd1.Dispose(); rd2.Dispose(); base.TearDown(); }
public virtual void TestIncompatibleIndexes() { // two documents: Directory dir1 = GetDir1(Random); // one document only: Directory dir2 = NewDirectory(); IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document d3 = new Document(); d3.Add(NewTextField("f3", "v1", Field.Store.YES)); w2.AddDocument(d3); w2.Dispose(); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); try { new ParallelAtomicReader(ir1, ir2); Assert.Fail("didn't get exptected exception: indexes don't have same number of documents"); } #pragma warning disable 168 catch (ArgumentException e) #pragma warning restore 168 { // expected exception } try { new ParallelAtomicReader(Random.NextBoolean(), new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1, ir2 }); Assert.Fail("didn't get expected exception: indexes don't have same number of documents"); } #pragma warning disable 168 catch (ArgumentException e) #pragma warning restore 168 { // expected exception } // check RefCounts Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestFloatNorms() { Directory dir = NewDirectory(); MockAnalyzer analyzer = new MockAnalyzer(Random); analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); Similarity provider = new MySimProvider(this); config.SetSimilarity(provider); RandomIndexWriter writer = new RandomIndexWriter(Random, dir, config); LineFileDocs docs = new LineFileDocs(Random); int num = AtLeast(100); for (int i = 0; i < num; i++) { Document doc = docs.NextDoc(); float nextFloat = Random.nextFloat(); // Cast to a double to get more precision output to the string. Field f = new TextField(floatTestField, "" + ((double)nextFloat).ToString(CultureInfo.InvariantCulture), Field.Store.YES); f.Boost = nextFloat; doc.Add(f); writer.AddDocument(doc); doc.RemoveField(floatTestField); if (Rarely()) { writer.Commit(); } } writer.Commit(); writer.Dispose(); AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir)); NumericDocValues norms = open.GetNormValues(floatTestField); Assert.IsNotNull(norms); for (int i = 0; i < open.MaxDoc; i++) { Document document = open.Document(i); float expected = Convert.ToSingle(document.Get(floatTestField), CultureInfo.InvariantCulture); Assert.AreEqual(expected, J2N.BitConversion.Int32BitsToSingle((int)norms.Get(i)), 0.0f); } open.Dispose(); dir.Dispose(); docs.Dispose(); }
public virtual void TestMaxByteNorms() { Directory dir = NewFSDirectory(CreateTempDir("TestNorms.testMaxByteNorms")); BuildIndex(dir); AtomicReader open = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir)); NumericDocValues normValues = open.GetNormValues(ByteTestField); Assert.IsNotNull(normValues); for (int i = 0; i < open.MaxDoc; i++) { Document document = open.Document(i); int expected = Convert.ToInt32(document.Get(ByteTestField)); Assert.AreEqual(expected, normValues.Get(i) & 0xff); } open.Dispose(); dir.Dispose(); }
public virtual void TestWithUnindexedFields() { Directory dir = NewDirectory(); RandomIndexWriter riw = new RandomIndexWriter(Random, dir, iwc); for (int i = 0; i < 100; i++) { Document doc = new Document(); // ensure at least one doc is indexed with offsets if (i < 99 && Random.Next(2) == 0) { // stored only FieldType ft = new FieldType(); ft.IsIndexed = false; ft.IsStored = true; doc.Add(new Field("foo", "boo!", ft)); } else { FieldType ft = new FieldType(TextField.TYPE_STORED); ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; if (Random.NextBoolean()) { // store some term vectors for the checkindex cross-check ft.StoreTermVectors = true; ft.StoreTermVectorPositions = true; ft.StoreTermVectorOffsets = true; } doc.Add(new Field("foo", "bar", ft)); } riw.AddDocument(doc); } CompositeReader ir = riw.GetReader(); AtomicReader slow = SlowCompositeReaderWrapper.Wrap(ir); FieldInfos fis = slow.FieldInfos; Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.FieldInfo("foo").IndexOptions); slow.Dispose(); ir.Dispose(); riw.Dispose(); dir.Dispose(); }
public virtual void TestRefCounts2() { Directory dir1 = GetDir1(Random); Directory dir2 = GetDir2(Random); AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // don't close subreaders, so ParallelReader will increment refcounts ParallelAtomicReader pr = new ParallelAtomicReader(false, ir1, ir2); // check RefCounts Assert.AreEqual(2, ir1.RefCount); Assert.AreEqual(2, ir2.RefCount); pr.Dispose(); Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
public virtual void TestCloseUnderException() { int iters = 1000 + 1 + Random().Next(20); for (int j = 0; j < iters; j++) { Directory dir = NewDirectory(); IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); writer.Commit(); writer.Dispose(); DirectoryReader open = DirectoryReader.Open(dir); bool throwOnClose = !Rarely(); AtomicReader wrap = SlowCompositeReaderWrapper.Wrap(open); FilterAtomicReader reader = new FilterAtomicReaderAnonymousInnerClassHelper(this, wrap, throwOnClose); IList <IndexReader.ReaderClosedListener> listeners = new List <IndexReader.ReaderClosedListener>(); int listenerCount = Random().Next(20); AtomicInteger count = new AtomicInteger(); bool faultySet = false; for (int i = 0; i < listenerCount; i++) { if (Rarely()) { faultySet = true; reader.AddReaderClosedListener(new FaultyListener()); } else { count.IncrementAndGet(); reader.AddReaderClosedListener(new CountListener(count)); } } if (!faultySet && !throwOnClose) { reader.AddReaderClosedListener(new FaultyListener()); } try { reader.Dispose(); Assert.Fail("expected Exception"); } catch (InvalidOperationException ex) { if (throwOnClose) { Assert.AreEqual("BOOM!", ex.Message); } else { Assert.AreEqual("GRRRRRRRRRRRR!", ex.Message); } } try { var aaa = reader.Fields; Assert.Fail("we are closed"); } catch (AlreadyClosedException ex) { } if (Random().NextBoolean()) { reader.Dispose(); // call it again } Assert.AreEqual(0, count.Get()); wrap.Dispose(); dir.Dispose(); } }
protected internal override void DoClose() { m_input.Dispose(); }
protected internal override void DoClose() { @in.Dispose(); }