protected override void Dispose(bool disposing) { if (disposing) { _delegateFieldsProducer.Dispose(); } }
protected override void Dispose(bool disposing) { if (disposing) { @in.Dispose(); } }
protected void Dispose(bool disposing) { if (disposing) { @in.Dispose(); } }
public virtual void TestRandomPostings() { FieldInfos.Builder builder = new FieldInfos.Builder(); FieldData[] fields = new FieldData[NUM_FIELDS]; for (int i = 0; i < NUM_FIELDS; i++) { bool omitTF = 0 == (i % 3); bool storePayloads = 1 == (i % 3); fields[i] = new FieldData(this, FieldNames[i], builder, this.MakeRandomTerms(omitTF, storePayloads), omitTF, storePayloads); } Directory dir = NewDirectory(); FieldInfos fieldInfos = builder.Finish(); if (VERBOSE) { Console.WriteLine("TEST: now write postings"); } this.Write(fieldInfos, dir, fields, false); Codec codec = Codec.Default; SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); if (VERBOSE) { Console.WriteLine("TEST: now read postings"); } FieldsProducer terms = codec.PostingsFormat().FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR)); Verify[] threads = new Verify[NUM_TEST_THREADS - 1]; for (int i = 0; i < NUM_TEST_THREADS - 1; i++) { threads[i] = new Verify(this, si, fields, terms); threads[i].SetDaemon(true); threads[i].Start(); } (new Verify(this, si, fields, terms)).Run(); for (int i = 0; i < NUM_TEST_THREADS - 1; i++) { threads[i].Join(); Debug.Assert(!threads[i].Failed); } terms.Dispose(); dir.Dispose(); }
public virtual void TestFixedPostings() { const int NUM_TERMS = 100; TermData[] terms = new TermData[NUM_TERMS]; for (int i = 0; i < NUM_TERMS; i++) { int[] docs = new int[] { i }; string text = Convert.ToString(i); terms[i] = new TermData(this, text, docs, null); } FieldInfos.Builder builder = new FieldInfos.Builder(); FieldData field = new FieldData(this, "field", builder, terms, true, false); FieldData[] fields = new FieldData[] { field }; FieldInfos fieldInfos = builder.Finish(); Directory dir = NewDirectory(); this.Write(fieldInfos, dir, fields, true); Codec codec = Codec.Default; SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null); FieldsProducer reader = codec.PostingsFormat().FieldsProducer(new SegmentReadState(dir, si, fieldInfos, NewIOContext(Random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR)); IEnumerator <string> fieldsEnum = reader.GetEnumerator(); fieldsEnum.MoveNext(); string fieldName = fieldsEnum.Current; Assert.IsNotNull(fieldName); Terms terms2 = reader.Terms(fieldName); Assert.IsNotNull(terms2); TermsEnum termsEnum = terms2.Iterator(null); DocsEnum docsEnum = null; for (int i = 0; i < NUM_TERMS; i++) { BytesRef term = termsEnum.Next(); Assert.IsNotNull(term); Assert.AreEqual(terms[i].Text2, term.Utf8ToString()); // do this twice to stress test the codec's reuse, ie, // make sure it properly fully resets (rewinds) its // internal state: for (int iter = 0; iter < 2; iter++) { docsEnum = TestUtil.Docs(Random(), termsEnum, null, docsEnum, DocsEnum.FLAG_NONE); Assert.AreEqual(terms[i].Docs[0], docsEnum.NextDoc()); Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.NextDoc()); } } Assert.IsNull(termsEnum.Next()); for (int i = 0; i < NUM_TERMS; i++) { Assert.AreEqual(termsEnum.SeekCeil(new BytesRef(terms[i].Text2)), TermsEnum.SeekStatus.FOUND); } Assert.IsFalse(fieldsEnum.MoveNext()); reader.Dispose(); dir.Dispose(); }