// [Test, Timeout(300000)] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestEmptyDocs() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); // make sure that the fact that documents might be empty is not a problem Document emptyDoc = new Document(); int numDocs = Random().NextBoolean() ? 1 : AtLeast(1000); for (int i = 0; i < numDocs; ++i) { iw.AddDocument(emptyDoc); } iw.Commit(); DirectoryReader rd = DirectoryReader.Open(dir); for (int i = 0; i < numDocs; ++i) { Document doc = rd.Document(i); Assert.IsNotNull(doc); Assert.IsTrue(doc.Fields.Count <= 0); } rd.Dispose(); iw.Dispose(); dir.Dispose(); }
protected internal RandomDocument(BaseTermVectorsFormatTestCase outerInstance, int fieldCount, int maxTermCount, Options options, string[] fieldNames, string[] sampleTerms, BytesRef[] sampleTermBytes) { this.OuterInstance = outerInstance; if (fieldCount > fieldNames.Length) { throw new System.ArgumentException(); } this.FieldNames = new string[fieldCount]; FieldTypes = new FieldType[fieldCount]; TokenStreams = new RandomTokenStream[fieldCount]; Arrays.Fill(FieldTypes, outerInstance.FieldType(options)); HashSet <string> usedFileNames = new HashSet <string>(); for (int i = 0; i < fieldCount; ++i) { // LUCENENET NOTE: Using a simple Linq query to filter rather than using brute force makes this a lot // faster (and won't infinitely retry due to poor random distribution). this.FieldNames[i] = RandomInts.RandomFrom(Random(), fieldNames.Except(usedFileNames).ToArray()); //do //{ // this.FieldNames[i] = RandomInts.RandomFrom(Random(), fieldNames); //} while (usedFileNames.Contains(this.FieldNames[i])); usedFileNames.Add(this.FieldNames[i]); TokenStreams[i] = new RandomTokenStream(outerInstance, TestUtil.NextInt(Random(), 1, maxTermCount), sampleTerms, sampleTermBytes); } }
private void CreateRandomIndexes(int maxSegments) { dir = NewDirectory(); numDocs = AtLeast(150); int numTerms = TestUtil.NextInt(Random(), 1, numDocs / 5); ISet <string> randomTerms = new HashSet <string>(); while (randomTerms.size() < numTerms) { randomTerms.add(TestUtil.RandomSimpleString(Random())); } terms = new List <string>(randomTerms); int seed = Random().Next(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))); iwc.SetMergePolicy(TestSortingMergePolicy.NewSortingMergePolicy(sort)); iw = new RandomIndexWriter(new Random(seed), dir, iwc); for (int i = 0; i < numDocs; ++i) { Document doc = RandomDocument(); iw.AddDocument(doc); if (i == numDocs / 2 || (i != numDocs - 1 && Random().nextInt(8) == 0)) { iw.Commit(); } if (Random().nextInt(15) == 0) { string term = RandomInts.RandomFrom(Random(), terms); iw.DeleteDocuments(new Term("s", term)); } } reader = iw.Reader; }
public virtual void TestNoMergeScheduler_Mem() { MergeScheduler ms = NoMergeScheduler.INSTANCE; ms.Dispose(); ms.Merge(null, RandomInts.RandomFrom(Random(), Enum.GetValues(typeof(MergeTrigger)).Cast <MergeTrigger>().ToArray()), Random().NextBoolean()); }
public void TestEarlyTerminationDifferentSorter() { // test that the collector works correctly when the index was sorted by a // different sorter than the one specified in the ctor. CreateRandomIndexes(5); int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10); Sort sort = new Sort(new SortField("ndv2", SortField.Type_e.LONG, false)); bool fillFields = Random().nextBoolean(); bool trackDocScores = Random().nextBoolean(); bool trackMaxScore = Random().nextBoolean(); bool inOrder = Random().nextBoolean(); TopFieldCollector collector1 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TopFieldCollector collector2 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); IndexSearcher searcher = NewSearcher(reader); int iters = AtLeast(5); for (int i = 0; i < iters; ++i) { TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms))); searcher.Search(query, collector1); Sort different = new Sort(new SortField("ndv2", SortField.Type_e.LONG)); searcher.Search(query, new EarlyTerminatingSortingCollectorHelper(collector2, different, numHits)); assertTrue(collector1.TotalHits >= collector2.TotalHits); AssertTopDocsEquals(collector1.TopDocs().ScoreDocs, collector2.TopDocs().ScoreDocs); } }
internal static long RandomLong() { if (Random().NextBoolean()) { long l = 1; if (Random().NextBoolean()) { l *= -1; } foreach (long i in PRIMES) { int m = Random().Next(3); for (int j = 0; j < m; ++j) { l *= i; } } return(l); } else if (Random().NextBoolean()) { return(Random().NextLong()); } else { return(RandomInts.RandomFrom(Random(), Arrays.AsList(long.MinValue, long.MaxValue, 0L, -1L, 1L))); } }
// private String fieldName; //public PortedSolr3Test(Param param) //{ // SpatialStrategy strategy = param.strategy; // this.ctx = strategy.SpatialContext; // this.strategy = strategy; //} public override void SetUp() { base.SetUp(); SpatialStrategy strategy = ((Param)(RandomInts.RandomFrom(Random(), Parameters()))[0]).strategy; this.ctx = strategy.SpatialContext; this.strategy = strategy; }
private Document randomDocument() { Document doc = new Document(); doc.Add(new NumericDocValuesField("ndv", Random().NextLong())); doc.Add(new StringField("s", RandomInts.RandomFrom(Random(), terms), Field.Store.YES)); return(doc); }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestReadSkip() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); FieldType ft = new FieldType(); ft.IsStored = true; ft.Freeze(); string @string = TestUtil.RandomSimpleString(Random(), 50); var bytes = @string.GetBytes(Encoding.UTF8); long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong(); int i = Random().NextBoolean() ? Random().Next(42) : Random().Next(); float f = Random().NextFloat(); double d = Random().NextDouble(); IList <Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new Int64Field("long", l, Field.Store.YES), new Int32Field("int", i, Field.Store.YES), new SingleField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES) ); for (int k = 0; k < 100; ++k) { Document doc = new Document(); foreach (Field fld in fields) { doc.Add(fld); } iw.w.AddDocument(doc); } iw.Commit(); DirectoryReader reader = DirectoryReader.Open(dir); int docID = Random().Next(100); foreach (Field fld in fields) { string fldName = fld.Name; Document sDoc = reader.Document(docID, Collections.Singleton(fldName)); IIndexableField sField = sDoc.GetField(fldName); if (typeof(Field) == fld.GetType()) { Assert.AreEqual(fld.GetBinaryValue(), sField.GetBinaryValue()); Assert.AreEqual(fld.GetStringValue(), sField.GetStringValue()); } else { #pragma warning disable 612, 618 Assert.AreEqual(fld.GetNumericValue(), sField.GetNumericValue()); #pragma warning restore 612, 618 } } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
internal static sbyte[] RandomArray(int length, int max) { sbyte[] arr = new sbyte[length]; for (int i = 0; i < arr.Length; ++i) { arr[i] = (sbyte)RandomInts.NextIntBetween(Random(), 0, max); } return(arr); }
public void RandomInts_CreateRandomIntCantBeNegative_ReturnsTrue() { // Check of het getal positief is. RandomInts random = new RandomInts(new LoggerMock()); var result = random.CreateRandomInt(); Assert.Positive(result); }
public virtual void TestIncompressible() { sbyte[] decompressed = new sbyte[RandomInts.NextIntBetween(Random(), 20, 256)]; for (int i = 0; i < decompressed.Length; ++i) { decompressed[i] = (sbyte)i; } Test(decompressed); }
public void RandomInts_CreateRandomIntIsLowerThan99999_ReturnsTrue() { // check of het getal lager is dan 99999 RandomInts random = new RandomInts(new LoggerMock()); var result = random.CreateRandomInt(); Assert.Less(result, 99999); }
public virtual void TestLongMatchs() { // match length >= 20 sbyte[] decompressed = new sbyte[RandomInts.NextIntBetween(Random(), 300, 1024)]; for (int i = 0; i < decompressed.Length; ++i) { decompressed[i] = (sbyte)i; } Test(decompressed); }
internal static byte[] RandomArray(int length, int max) { var arr = new byte[length]; for (int i = 0; i < arr.Length; ++i) { arr[i] = (byte)RandomInts.RandomInt32Between(Random, 0, max); } return(arr); }
public virtual void TestIncompressible() { var decompressed = new byte[RandomInts.RandomInt32Between(Random, 20, 256)]; for (int i = 0; i < decompressed.Length; ++i) { decompressed[i] = (byte)i; } Test(decompressed); }
public virtual void TestLongLiterals() { // long literals (length >= 16) which are not the last literals var decompressed = RandomArray(RandomInts.RandomInt32Between(Random, 400, 1024), 256); int matchRef = Random.Next(30); int matchOff = RandomInts.RandomInt32Between(Random, decompressed.Length - 40, decompressed.Length - 20); int matchLength = RandomInts.RandomInt32Between(Random, 4, 10); Array.Copy(decompressed, matchRef, decompressed, matchOff, matchLength); Test(decompressed); }
public virtual void TestLongMatchs() { // match length >= 20 var decompressed = new byte[RandomInts.RandomInt32Between(Random, 300, 1024)]; for (int i = 0; i < decompressed.Length; ++i) { decompressed[i] = (byte)i; } Test(decompressed); }
public void RandomInts_CheckLogWhenCalled_MustBeTheSame() { // bekijk of de log gelijk is aan de log fake log. var mockLog = new FakeLoggerMock(); var randomInts = new RandomInts(mockLog); randomInts.MakeListNDigitsLong(100); Assert.AreEqual("Loggin now", mockLog.Text); }
public void RandomInts_CheckIfIsNUniqueDigitsLong_MustBeTheSame() { // bekijk of er geen dubbelen integers in de lijst zitten. int n = 100; RandomInts random = new RandomInts(new LoggerMock()); random.MakeListNDigitsLong(n); var result = random.ListOfRandomInts.Distinct().Count(); Assert.AreEqual(result, n); }
public void RandomInts_CheckIfIntIsUniqueInList_ReturnsTrue() { // check of het mogelijk is om een nieuw uniek number toe te voegen List <int> list = new List <int> { 1, 2, 3, 4, 5 }; var newInt = 6; RandomInts random = new RandomInts(new LoggerMock()); var result = random.CheckIfIntIsUnique(newInt, list); Assert.IsTrue(result); }
public void RandomInts_CheckIfIntIsNotUniqueInList_ReturnsFalse() { // voeg een al bestaand nummer toe kijk of het stuk gaat. List <int> list = new List <int> { 1, 2, 3, 4, 5 }; var alreadyUsedInt = 5; RandomInts random = new RandomInts(new LoggerMock()); var result = random.CheckIfIntIsUnique(alreadyUsedInt, list); Assert.IsFalse(result); }
// [Test, Timeout(300000)] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestConcurrentReads() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); // make sure the readers are properly cloned Document doc = new Document(); Field field = new StringField("fld", "", Field.Store.YES); doc.Add(field); int numDocs = AtLeast(1000); for (int i = 0; i < numDocs; ++i) { field.SetStringValue("" + i); iw.AddDocument(doc); } iw.Commit(); DirectoryReader rd = DirectoryReader.Open(dir); IndexSearcher searcher = new IndexSearcher(rd); int concurrentReads = AtLeast(5); int readsPerThread = AtLeast(50); IList <ThreadClass> readThreads = new List <ThreadClass>(); AtomicObject <Exception> ex = new AtomicObject <Exception>(); for (int i = 0; i < concurrentReads; ++i) { readThreads.Add(new ThreadAnonymousInnerClassHelper(numDocs, rd, searcher, readsPerThread, ex, i)); } foreach (ThreadClass thread in readThreads) { thread.Start(); } foreach (ThreadClass thread in readThreads) { thread.Join(); } rd.Dispose(); if (ex.Value != null) { throw ex.Value; } iw.Dispose(); dir.Dispose(); }
public virtual void TestDeletePartiallyWrittenFilesIfAbort() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); iwConf.SetCodec(CompressingCodec.RandomInstance(Random())); // disable CFS because this test checks file names iwConf.SetMergePolicy(NewLogMergePolicy(false)); iwConf.SetUseCompoundFile(false); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); Document validDoc = new Document(); validDoc.Add(new IntField("id", 0, Field.Store.YES)); iw.AddDocument(validDoc); iw.Commit(); // make sure that #writeField will fail to trigger an abort Document invalidDoc = new Document(); FieldType fieldType = new FieldType(); fieldType.Stored = true; invalidDoc.Add(new FieldAnonymousInnerClassHelper(this, fieldType)); Assert.Throws <ArgumentException>(() => { try { iw.AddDocument(invalidDoc); iw.Commit(); } finally { int counter = 0; foreach (string fileName in dir.ListAll()) { if (fileName.EndsWith(".fdt") || fileName.EndsWith(".fdx")) { counter++; } } // Only one .fdt and one .fdx files must have been found Assert.AreEqual(2, counter); iw.Dispose(); dir.Dispose(); } }); }
public virtual void TestStringUnion() { List <BytesRef> strings = new List <BytesRef>(); for (int i = RandomInts.RandomInt32Between(Random, 0, 1000); --i >= 0;) { strings.Add(new BytesRef(TestUtil.RandomUnicodeString(Random))); } strings.Sort(); Automaton union = BasicAutomata.MakeStringUnion(strings); Assert.IsTrue(union.IsDeterministic); Assert.IsTrue(BasicOperations.SameLanguage(union, NaiveUnion(strings))); }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestUniqueValuesCompression() { Directory dir = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter iwriter = new IndexWriter(dir, iwc); int uniqueValueCount = TestUtil.NextInt(Random(), 1, 256); IList <long> values = new List <long>(); Document doc = new Document(); NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); doc.Add(dvf); for (int i = 0; i < 300; ++i) { long value; if (values.Count < uniqueValueCount) { value = Random().NextLong(); values.Add(value); } else { value = RandomInts.RandomFrom(Random(), values); } dvf.LongValue = value; iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size1 = DirSize(dir); for (int i = 0; i < 20; ++i) { dvf.LongValue = RandomInts.RandomFrom(Random(), values); iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size2 = DirSize(dir); // make sure the new longs did not cost 8 bytes each Assert.IsTrue(size2 < size1 + 8 * 20); }
public void TestEarlyTerminationDifferentSorter() { // test that the collector works correctly when the index was sorted by a // different sorter than the one specified in the ctor. CreateRandomIndexes(5); int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10); Sort sort = new Sort(new SortField("ndv2", SortFieldType.INT64, false)); bool fillFields = Random().nextBoolean(); bool trackDocScores = Random().nextBoolean(); bool trackMaxScore = Random().nextBoolean(); bool inOrder = Random().nextBoolean(); // LUCENENET specific: // we are changing this test to use Lucene.Net 4.9-like behavior rather than going through all of the effort to // fix a hard-to-find null reference exception problem. // https://github.com/apache/lucene-solr/commit/c59f13f9918faeeb4e69acd41731e674ce88f912 //TopFieldCollector collector1 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); //TopFieldCollector collector2 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); IndexSearcher searcher = NewSearcher(reader); int iters = AtLeast(5); for (int i = 0; i < iters; ++i) { // LUCENENET specific: // we are changing this test to use Lucene.Net 4.9-like behavior rather than going through all of the effort to // fix a hard-to-find null reference exception problem. // https://github.com/apache/lucene-solr/commit/c59f13f9918faeeb4e69acd41731e674ce88f912 TopFieldCollector collector1 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TopFieldCollector collector2 = TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms))); searcher.Search(query, collector1); Sort different = new Sort(new SortField("ndv2", SortFieldType.INT64)); searcher.Search(query, new EarlyTerminatingSortingCollectorHelper(collector2, different, numHits)); assertTrue(collector1.TotalHits >= collector2.TotalHits); AssertTopDocsEquals(collector1.GetTopDocs().ScoreDocs, collector2.GetTopDocs().ScoreDocs); } }
protected internal RandomDocument(BaseTermVectorsFormatTestCase outerInstance, int fieldCount, int maxTermCount, Options options, string[] fieldNames, string[] sampleTerms, BytesRef[] sampleTermBytes) { this.OuterInstance = outerInstance; if (fieldCount > fieldNames.Length) { throw new System.ArgumentException(); } this.FieldNames = new string[fieldCount]; FieldTypes = new FieldType[fieldCount]; TokenStreams = new RandomTokenStream[fieldCount]; Arrays.Fill(FieldTypes, outerInstance.FieldType(options)); HashSet <string> usedFileNames = new HashSet <string>(); for (int i = 0; i < fieldCount; ++i) { do { this.FieldNames[i] = RandomInts.RandomFrom(Random(), fieldNames); } while (usedFileNames.Contains(this.FieldNames[i])); usedFileNames.Add(this.FieldNames[i]); TokenStreams[i] = new RandomTokenStream(outerInstance, TestUtil.NextInt(Random(), 1, maxTermCount), sampleTerms, sampleTermBytes); } }
public void TestEarlyTermination_() { CreateRandomIndexes(5); int numHits = TestUtil.NextInt(Random(), 1, numDocs / 10); Sort sort = new Sort(new SortField("ndv1", SortField.Type_e.LONG, false)); bool fillFields = Random().nextBoolean(); bool trackDocScores = Random().nextBoolean(); bool trackMaxScore = Random().nextBoolean(); bool inOrder = Random().nextBoolean(); TopFieldCollector collector1 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); TopFieldCollector collector2 = Search.TopFieldCollector.Create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder); IndexSearcher searcher = NewSearcher(reader); int iters = AtLeast(5); for (int i = 0; i < iters; ++i) { TermQuery query = new TermQuery(new Term("s", RandomInts.RandomFrom(Random(), terms))); searcher.Search(query, collector1); searcher.Search(query, new EarlyTerminatingSortingCollector(collector2, sort, numHits)); } assertTrue(collector1.TotalHits >= collector2.TotalHits); AssertTopDocsEquals(collector1.TopDocs().ScoreDocs, collector2.TopDocs().ScoreDocs); }
protected internal virtual void AssertEquals(RandomTokenStream tk, FieldType ft, Terms terms) { Assert.AreEqual(1, terms.DocCount); int termCount = (new HashSet <string>(Arrays.AsList(tk.Terms))).Count; Assert.AreEqual(termCount, terms.Size()); Assert.AreEqual(termCount, terms.SumDocFreq); Assert.AreEqual(ft.StoreTermVectorPositions, terms.HasPositions()); Assert.AreEqual(ft.StoreTermVectorOffsets, terms.HasOffsets()); Assert.AreEqual(ft.StoreTermVectorPayloads && tk.HasPayloads(), terms.HasPayloads()); HashSet <BytesRef> uniqueTerms = new HashSet <BytesRef>(); foreach (string term in tk.Freqs.Keys) { uniqueTerms.Add(new BytesRef(term)); } BytesRef[] sortedTerms = uniqueTerms.ToArray(/*new BytesRef[0]*/); Array.Sort(sortedTerms, terms.Comparator); TermsEnum termsEnum = terms.Iterator(Random().NextBoolean() ? null : this.termsEnum.Value); this.termsEnum.Value = termsEnum; for (int i = 0; i < sortedTerms.Length; ++i) { BytesRef nextTerm = termsEnum.Next(); Assert.AreEqual(sortedTerms[i], nextTerm); Assert.AreEqual(sortedTerms[i], termsEnum.Term()); Assert.AreEqual(1, termsEnum.DocFreq()); FixedBitSet bits = new FixedBitSet(1); DocsEnum docsEnum = termsEnum.Docs(bits, Random().NextBoolean() ? null : this.docsEnum.Value); Assert.AreEqual(DocsEnum.NO_MORE_DOCS, docsEnum.NextDoc()); bits.Set(0); docsEnum = termsEnum.Docs(Random().NextBoolean() ? bits : null, Random().NextBoolean() ? null : docsEnum); Assert.IsNotNull(docsEnum); Assert.AreEqual(0, docsEnum.NextDoc()); Assert.AreEqual(0, docsEnum.DocID()); Assert.AreEqual(tk.Freqs[termsEnum.Term().Utf8ToString()], (int?)docsEnum.Freq()); Assert.AreEqual(DocsEnum.NO_MORE_DOCS, docsEnum.NextDoc()); this.docsEnum.Value = docsEnum; bits.Clear(0); DocsAndPositionsEnum docsAndPositionsEnum = termsEnum.DocsAndPositions(bits, Random().NextBoolean() ? null : this.docsAndPositionsEnum.Value); Assert.AreEqual(ft.StoreTermVectorOffsets || ft.StoreTermVectorPositions, docsAndPositionsEnum != null); if (docsAndPositionsEnum != null) { Assert.AreEqual(DocsEnum.NO_MORE_DOCS, docsAndPositionsEnum.NextDoc()); } bits.Set(0); docsAndPositionsEnum = termsEnum.DocsAndPositions(Random().NextBoolean() ? bits : null, Random().NextBoolean() ? null : docsAndPositionsEnum); Assert.AreEqual(ft.StoreTermVectorOffsets || ft.StoreTermVectorPositions, docsAndPositionsEnum != null); if (terms.HasPositions() || terms.HasOffsets()) { Assert.AreEqual(0, docsAndPositionsEnum.NextDoc()); int freq = docsAndPositionsEnum.Freq(); Assert.AreEqual(tk.Freqs[termsEnum.Term().Utf8ToString()], (int?)freq); if (docsAndPositionsEnum != null) { for (int k = 0; k < freq; ++k) { int position = docsAndPositionsEnum.NextPosition(); ISet <int?> indexes; if (terms.HasPositions()) { indexes = tk.PositionToTerms[position]; Assert.IsNotNull(indexes); } else { indexes = tk.StartOffsetToTerms[docsAndPositionsEnum.StartOffset()]; Assert.IsNotNull(indexes); } if (terms.HasPositions()) { bool foundPosition = false; foreach (int index in indexes) { if (tk.TermBytes[index].Equals(termsEnum.Term()) && tk.Positions[index] == position) { foundPosition = true; break; } } Assert.IsTrue(foundPosition); } if (terms.HasOffsets()) { bool foundOffset = false; foreach (int index in indexes) { if (tk.TermBytes[index].Equals(termsEnum.Term()) && tk.StartOffsets[index] == docsAndPositionsEnum.StartOffset() && tk.EndOffsets[index] == docsAndPositionsEnum.EndOffset()) { foundOffset = true; break; } } Assert.IsTrue(foundOffset); } if (terms.HasPayloads()) { bool foundPayload = false; foreach (int index in indexes) { if (tk.TermBytes[index].Equals(termsEnum.Term()) && Equals(tk.Payloads[index], docsAndPositionsEnum.Payload)) { foundPayload = true; break; } } Assert.IsTrue(foundPayload); } } try { docsAndPositionsEnum.NextPosition(); Assert.Fail(); } catch (Exception e) { // ok } } Assert.AreEqual(DocsEnum.NO_MORE_DOCS, docsAndPositionsEnum.NextDoc()); } this.docsAndPositionsEnum.Value = docsAndPositionsEnum; } Assert.IsNull(termsEnum.Next()); for (int i = 0; i < 5; ++i) { if (Random().NextBoolean()) { Assert.IsTrue(termsEnum.SeekExact(RandomInts.RandomFrom(Random(), tk.TermBytes))); } else { Assert.AreEqual(SeekStatus.FOUND, termsEnum.SeekCeil(RandomInts.RandomFrom(Random(), tk.TermBytes))); } } }