private static void Validate(CompositeReader[] readers, int maxDoc, int[] childMaxDoc, bool[] childAtomic) { for (int i = 0; i < readers.Length; i++) { CompositeReader reader = readers[i]; IList <IndexReader> subs = reader.GetSequentialSubReaders(); if (reader.MaxDoc != maxDoc) { throw new ArgumentException("All readers must have same MaxDoc: " + maxDoc + "!=" + reader.MaxDoc); } int noSubs = subs.Count; if (noSubs != childMaxDoc.Length) { throw new ArgumentException("All readers must have same number of subReaders"); } for (int subIDX = 0; subIDX < noSubs; subIDX++) { IndexReader r = subs[subIDX]; if (r.MaxDoc != childMaxDoc[subIDX]) { throw new ArgumentException("All readers must have same corresponding subReader maxDoc"); } if (!(childAtomic[subIDX] ? (r is AtomicReader) : (r is CompositeReader))) { throw new ArgumentException("All readers must have same corresponding subReader types (atomic or composite)"); } } } }
public virtual void TestIncompatibleIndexes2() { Directory dir1 = GetDir1(Random); Directory dir2 = GetInvalidStructuredDir2(Random); DirectoryReader ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2); CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when(e.IsIllegalArgumentException()) { // expected exception } try { new ParallelCompositeReader(Random.NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when(e.IsIllegalArgumentException()) { // expected exception } Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
private CompositeReaderContext(CompositeReaderContext parent, CompositeReader reader, int ordInParent, int docbaseInParent, IList <IndexReaderContext> children, IList <AtomicReaderContext> leaves) : base(parent, ordInParent, docbaseInParent) { this.children = Collections.UnmodifiableList(children); this.leaves = leaves; this.reader = reader; }
private CompositeReaderContext(CompositeReaderContext parent, CompositeReader reader, int ordInParent, int docbaseInParent, List<IndexReaderContext> children, IList<AtomicReaderContext> leaves) : base(parent, ordInParent, docbaseInParent) { this.children = children.AsReadOnly(); this.leaves = leaves; this.reader = reader; }
private CompositeReaderContext(CompositeReaderContext parent, CompositeReader reader, int ordInParent, int docbaseInParent, List <IndexReaderContext> children, IList <AtomicReaderContext> leaves) : base(parent, ordInParent, docbaseInParent) { this.children = children.AsReadOnly(); this.leaves = leaves; this.reader = reader; }
public virtual void TestIncompatibleIndexes3() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); CompositeReader ir1 = new MultiReader(DirectoryReader.Open(dir1), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1))), ir2 = new MultiReader(DirectoryReader.Open(dir2), DirectoryReader.Open(dir2)); CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } try { new ParallelCompositeReader(Random().NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (System.ArgumentException e) { // expected exception } Assert.AreEqual(1, ir1.RefCount); Assert.AreEqual(1, ir2.RefCount); ir1.Dispose(); ir2.Dispose(); Assert.AreEqual(0, ir1.RefCount); Assert.AreEqual(0, ir2.RefCount); dir1.Dispose(); dir2.Dispose(); }
private SlowCompositeReaderWrapper(CompositeReader reader) : base() { @in = reader; fields = MultiFields.GetFields(@in); liveDocs = MultiFields.GetLiveDocs(@in); @in.RegisterParentReader(this); }
public virtual void TestToStringCompositeComposite() { Directory dir1 = GetDir1(Random()); CompositeReader ir1 = DirectoryReader.Open(dir1); ParallelCompositeReader pr = new ParallelCompositeReader(new CompositeReader[] { new MultiReader(ir1) }); string s = pr.ToString(); Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelCompositeReaderAnonymousInnerClassHelper(ParallelAtomicReader("), "toString incorrect: " + s); pr.Dispose(); dir1.Dispose(); }
public virtual void TestToString() { Directory dir1 = GetDir1(Random); CompositeReader ir1 = DirectoryReader.Open(dir1); ParallelCompositeReader pr = new ParallelCompositeReader(new CompositeReader[] { ir1 }); string s = pr.ToString(); Assert.IsTrue(s.StartsWith("ParallelCompositeReader(ParallelAtomicReader(", StringComparison.Ordinal), "toString incorrect: " + s); pr.Dispose(); dir1.Dispose(); }
/// <summary> /// This method is sugar for getting an <see cref="AtomicReader"/> from /// an <see cref="IndexReader"/> of any kind. If the reader is already atomic, /// it is returned unchanged, otherwise wrapped by this class. /// </summary> public static AtomicReader Wrap(IndexReader reader) { CompositeReader compositeReader = reader as CompositeReader; if (compositeReader != null) { return(new SlowCompositeReaderWrapper(compositeReader)); } else { Debug.Assert(reader is AtomicReader); return((AtomicReader)reader); } }
public virtual void TestCloseInnerReader() { Directory dir1 = GetDir1(Random); CompositeReader ir1 = DirectoryReader.Open(dir1); Assert.AreEqual(1, ir1.GetSequentialSubReaders()[0].RefCount); // with overlapping ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 }); IndexReader psub = pr.GetSequentialSubReaders()[0]; Assert.AreEqual(1, psub.RefCount); ir1.Dispose(); Assert.AreEqual(1, psub.RefCount, "refCount of synthetic subreader should be unchanged"); try { psub.Document(0); Assert.Fail("Subreader should be already closed because inner reader was closed!"); } #pragma warning disable 168 catch (ObjectDisposedException e) #pragma warning restore 168 { // pass } try { pr.Document(0); Assert.Fail("ParallelCompositeReader should be already closed because inner reader was closed!"); } #pragma warning disable 168 catch (ObjectDisposedException e) #pragma warning restore 168 { // pass } // noop: pr.Dispose(); Assert.AreEqual(0, psub.RefCount); dir1.Dispose(); }
public virtual void TestWithUnindexedFields() { Directory dir = NewDirectory(); RandomIndexWriter riw = new RandomIndexWriter(Random, dir, iwc); for (int i = 0; i < 100; i++) { Document doc = new Document(); // ensure at least one doc is indexed with offsets if (i < 99 && Random.Next(2) == 0) { // stored only FieldType ft = new FieldType(); ft.IsIndexed = false; ft.IsStored = true; doc.Add(new Field("foo", "boo!", ft)); } else { FieldType ft = new FieldType(TextField.TYPE_STORED); ft.IndexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; if (Random.NextBoolean()) { // store some term vectors for the checkindex cross-check ft.StoreTermVectors = true; ft.StoreTermVectorPositions = true; ft.StoreTermVectorOffsets = true; } doc.Add(new Field("foo", "bar", ft)); } riw.AddDocument(doc); } CompositeReader ir = riw.GetReader(); AtomicReader slow = SlowCompositeReaderWrapper.Wrap(ir); FieldInfos fis = slow.FieldInfos; Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, fis.FieldInfo("foo").IndexOptions); slow.Dispose(); ir.Dispose(); riw.Dispose(); dir.Dispose(); }
public virtual void TestReaderClosedListener2() { Directory dir1 = GetDir1(Random()); CompositeReader ir1 = DirectoryReader.Open(dir1); // with overlapping ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 }); int[] listenerClosedCount = new int[1]; Assert.AreEqual(3, pr.Leaves.Count); foreach (AtomicReaderContext cxt in pr.Leaves) { cxt.Reader.AddReaderClosedListener(new ReaderClosedListenerAnonymousInnerClassHelper2(this, listenerClosedCount)); } pr.Dispose(); Assert.AreEqual(3, listenerClosedCount[0]); dir1.Dispose(); }
internal IndexReaderContext Build(CompositeReaderContext parent, IndexReader reader, int ord, int docBase) { var ar = reader as AtomicReader; if (ar != null) { var atomic = new AtomicReaderContext(parent, ar, ord, docBase, leaves.Count, leafDocBase); leaves.Add(atomic); leafDocBase += reader.MaxDoc; return(atomic); } else { CompositeReader cr = (CompositeReader)reader; var sequentialSubReaders = cr.GetSequentialSubReaders(); var children = new IndexReaderContext[sequentialSubReaders.Count]; CompositeReaderContext newParent; if (parent == null) { newParent = new CompositeReaderContext(cr, children, leaves); } else { newParent = new CompositeReaderContext(parent, cr, ord, docBase, children); } int newDocBase = 0; for (int i = 0, c = sequentialSubReaders.Count; i < c; i++) { IndexReader r = sequentialSubReaders[i]; children[i] = Build(newParent, r, i, newDocBase); newDocBase += r.MaxDoc; } if (Debugging.AssertsEnabled) { Debugging.Assert(newDocBase == cr.MaxDoc); } return(newParent); } }
public Builder(CompositeReader reader) { this.Reader = reader; }
/// <summary> /// Creates a <seealso cref="CompositeReaderContext"/> for top-level readers with parent set to <code>null</code> /// </summary> internal CompositeReaderContext(CompositeReader reader, List<IndexReaderContext> children, IList<AtomicReaderContext> leaves) : this(null, reader, 0, 0, children, leaves) { }
/// <summary> /// Creates a <seealso cref="CompositeReaderContext"/> for top-level readers with parent set to <code>null</code> /// </summary> internal CompositeReaderContext(CompositeReader reader, List <IndexReaderContext> children, IList <AtomicReaderContext> leaves) : this(null, reader, 0, 0, children, leaves) { }
/// <summary> /// Creates a <seealso cref="CompositeReaderContext"/> for intermediate readers that aren't /// not top-level readers in the current context /// </summary> internal CompositeReaderContext(CompositeReaderContext parent, CompositeReader reader, int ordInParent, int docbaseInParent, List <IndexReaderContext> children) : this(parent, reader, ordInParent, docbaseInParent, children, null) { }
internal static CompositeReaderContext Create(CompositeReader reader) { return((new Builder(reader)).Build()); }
internal static CompositeReaderContext Create(CompositeReader reader) { return (new Builder(reader)).Build(); }
private static IndexReader[] PrepareSubReaders(CompositeReader[] readers, CompositeReader[] storedFieldsReaders) { if (readers.Length == 0) { if (storedFieldsReaders.Length > 0) { throw new ArgumentException("There must be at least one main reader if storedFieldsReaders are used."); } // LUCENENET: Optimized empty string array creation return(Arrays.Empty <IndexReader>()); } else { IList <IndexReader> firstSubReaders = readers[0].GetSequentialSubReaders(); // check compatibility: int maxDoc = readers[0].MaxDoc, noSubs = firstSubReaders.Count; int[] childMaxDoc = new int[noSubs]; bool[] childAtomic = new bool[noSubs]; for (int i = 0; i < noSubs; i++) { IndexReader r = firstSubReaders[i]; childMaxDoc[i] = r.MaxDoc; childAtomic[i] = r is AtomicReader; } Validate(readers, maxDoc, childMaxDoc, childAtomic); Validate(storedFieldsReaders, maxDoc, childMaxDoc, childAtomic); // hierarchically build the same subreader structure as the first CompositeReader with Parallel*Readers: IndexReader[] subReaders = new IndexReader[noSubs]; for (int i = 0; i < subReaders.Length; i++) { if (firstSubReaders[i] is AtomicReader) { AtomicReader[] atomicSubs = new AtomicReader[readers.Length]; for (int j = 0; j < readers.Length; j++) { atomicSubs[j] = (AtomicReader)readers[j].GetSequentialSubReaders()[i]; } AtomicReader[] storedSubs = new AtomicReader[storedFieldsReaders.Length]; for (int j = 0; j < storedFieldsReaders.Length; j++) { storedSubs[j] = (AtomicReader)storedFieldsReaders[j].GetSequentialSubReaders()[i]; } // We pass true for closeSubs and we prevent closing of subreaders in doClose(): // By this the synthetic throw-away readers used here are completely invisible to ref-counting subReaders[i] = new ParallelAtomicReaderAnonymousClass(atomicSubs, storedSubs); } else { if (Debugging.AssertsEnabled) { Debugging.Assert(firstSubReaders[i] is CompositeReader); } CompositeReader[] compositeSubs = new CompositeReader[readers.Length]; for (int j = 0; j < readers.Length; j++) { compositeSubs[j] = (CompositeReader)readers[j].GetSequentialSubReaders()[i]; } CompositeReader[] storedSubs = new CompositeReader[storedFieldsReaders.Length]; for (int j = 0; j < storedFieldsReaders.Length; j++) { storedSubs[j] = (CompositeReader)storedFieldsReaders[j].GetSequentialSubReaders()[i]; } // We pass true for closeSubs and we prevent closing of subreaders in doClose(): // By this the synthetic throw-away readers used here are completely invisible to ref-counting subReaders[i] = new ParallelCompositeReaderAnonymousClass(compositeSubs, storedSubs); } } return(subReaders); } }
public virtual void TestIgnoreStoredFields() { Directory dir1 = GetDir1(Random()); Directory dir2 = GetDir2(Random()); CompositeReader ir1 = DirectoryReader.Open(dir1); CompositeReader ir2 = DirectoryReader.Open(dir2); // with overlapping ParallelCompositeReader pr = new ParallelCompositeReader(false, new CompositeReader[] { ir1, ir2 }, new CompositeReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there AtomicReader slow = SlowCompositeReaderWrapper.Wrap(pr); Assert.IsNotNull(slow.Terms("f1")); Assert.IsNotNull(slow.Terms("f2")); Assert.IsNotNull(slow.Terms("f3")); Assert.IsNotNull(slow.Terms("f4")); pr.Dispose(); // no stored fields at all pr = new ParallelCompositeReader(false, new CompositeReader[] { ir2 }, new CompositeReader[0]); Assert.IsNull(pr.Document(0).Get("f1")); Assert.IsNull(pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there slow = SlowCompositeReaderWrapper.Wrap(pr); Assert.IsNull(slow.Terms("f1")); Assert.IsNull(slow.Terms("f2")); Assert.IsNotNull(slow.Terms("f3")); Assert.IsNotNull(slow.Terms("f4")); pr.Dispose(); // without overlapping pr = new ParallelCompositeReader(true, new CompositeReader[] { ir2 }, new CompositeReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); Assert.IsNull(pr.Document(0).Get("f4")); // check that fields are there slow = SlowCompositeReaderWrapper.Wrap(pr); Assert.IsNull(slow.Terms("f1")); Assert.IsNull(slow.Terms("f2")); Assert.IsNotNull(slow.Terms("f3")); Assert.IsNotNull(slow.Terms("f4")); pr.Dispose(); // no main readers try { new ParallelCompositeReader(true, new CompositeReader[0], new CompositeReader[] { ir1 }); Assert.Fail("didn't get expected exception: need a non-empty main-reader array"); } catch (System.ArgumentException iae) { // pass } dir1.Dispose(); dir2.Dispose(); }
/// <summary> /// Creates a <seealso cref="CompositeReaderContext"/> for intermediate readers that aren't /// not top-level readers in the current context /// </summary> internal CompositeReaderContext(CompositeReaderContext parent, CompositeReader reader, int ordInParent, int docbaseInParent, List<IndexReaderContext> children) : this(parent, reader, ordInParent, docbaseInParent, children, null) { }