public virtual void TestIntFieldCache() { Directory dir = NewDirectory(); IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); cfg.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, cfg); Document doc = new Document(); IntField field = new IntField("f", 0, Field.Store.YES); doc.Add(field); int[] values = new int[TestUtil.NextInt(Random(), 1, 10)]; for (int i = 0; i < values.Length; ++i) { int v; switch (Random().Next(10)) { case 0: v = int.MinValue; break; case 1: v = 0; break; case 2: v = int.MaxValue; break; default: v = TestUtil.NextInt(Random(), -10, 10); break; } values[i] = v; if (v == 0 && Random().NextBoolean()) { // missing iw.AddDocument(new Document()); } else { field.IntValue = v; iw.AddDocument(doc); } } iw.ForceMerge(1); DirectoryReader reader = iw.Reader; Ints ints = FieldCache.DEFAULT.GetInts(GetOnlySegmentReader(reader), "f", false); for (int i = 0; i < values.Length; ++i) { Assert.AreEqual(values[i], ints.Get(i)); } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
public virtual void TestDocsWithField() { IFieldCache cache = FieldCache.DEFAULT; cache.PurgeAllCaches(); Assert.AreEqual(0, cache.CacheEntries.Length); cache.GetDoubles(Reader, "theDouble", true); // The double[] takes two slots (one w/ null parser, one // w/ real parser), and docsWithField should also // have been populated: Assert.AreEqual(3, cache.CacheEntries.Length); Bits bits = cache.GetDocsWithField(Reader, "theDouble"); // No new entries should appear: Assert.AreEqual(3, cache.CacheEntries.Length); Assert.IsTrue(bits is Bits_MatchAllBits); Ints ints = cache.GetInts(Reader, "sparse", true); Assert.AreEqual(6, cache.CacheEntries.Length); Bits docsWithField = cache.GetDocsWithField(Reader, "sparse"); Assert.AreEqual(6, cache.CacheEntries.Length); for (int i = 0; i < docsWithField.Length(); i++) { if (i % 2 == 0) { Assert.IsTrue(docsWithField.Get(i)); Assert.AreEqual(i, ints.Get(i)); } else { Assert.IsFalse(docsWithField.Get(i)); } } Ints numInts = cache.GetInts(Reader, "numInt", Random().NextBoolean()); docsWithField = cache.GetDocsWithField(Reader, "numInt"); for (int i = 0; i < docsWithField.Length(); i++) { if (i % 2 == 0) { Assert.IsTrue(docsWithField.Get(i)); Assert.AreEqual(i, numInts.Get(i)); } else { Assert.IsFalse(docsWithField.Get(i)); } } }
public override void Run() { try { while (!Failed.Get()) { int op = Random().Next(3); if (op == 0) { // Purge all caches & resume, once all // threads get here: Restart.SignalAndWait(); if (Iters.Get() >= NUM_ITER) { break; } } else if (op == 1) { Bits docsWithField = Cache.GetDocsWithField(Reader, "sparse"); for (int i = 0; i < docsWithField.Length(); i++) { Assert.AreEqual(i % 2 == 0, docsWithField.Get(i)); } } else { Ints ints = Cache.GetInts(Reader, "sparse", true); Bits docsWithField = Cache.GetDocsWithField(Reader, "sparse"); for (int i = 0; i < docsWithField.Length(); i++) { if (i % 2 == 0) { Assert.IsTrue(docsWithField.Get(i)); Assert.AreEqual(i, ints.Get(i)); } else { Assert.IsFalse(docsWithField.Get(i)); } } } } } catch (Exception t) { Failed.Set(true); throw new Exception(t.Message, t); } }
public virtual void TestNonIndexedFields() { Directory dir = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(new StoredField("bogusbytes", "bogus")); doc.Add(new StoredField("bogusshorts", "bogus")); doc.Add(new StoredField("bogusints", "bogus")); doc.Add(new StoredField("boguslongs", "bogus")); doc.Add(new StoredField("bogusfloats", "bogus")); doc.Add(new StoredField("bogusdoubles", "bogus")); doc.Add(new StoredField("bogusterms", "bogus")); doc.Add(new StoredField("bogustermsindex", "bogus")); doc.Add(new StoredField("bogusmultivalued", "bogus")); doc.Add(new StoredField("bogusbits", "bogus")); iw.AddDocument(doc); DirectoryReader ir = iw.Reader; iw.Dispose(); AtomicReader ar = GetOnlySegmentReader(ir); IFieldCache cache = FieldCache.DEFAULT; cache.PurgeAllCaches(); Assert.AreEqual(0, cache.CacheEntries.Length); Bytes bytes = cache.GetBytes(ar, "bogusbytes", true); Assert.AreEqual(0, bytes.Get(0)); Shorts shorts = cache.GetShorts(ar, "bogusshorts", true); Assert.AreEqual(0, shorts.Get(0)); Ints ints = cache.GetInts(ar, "bogusints", true); Assert.AreEqual(0, ints.Get(0)); Longs longs = cache.GetLongs(ar, "boguslongs", true); Assert.AreEqual(0, longs.Get(0)); Floats floats = cache.GetFloats(ar, "bogusfloats", true); Assert.AreEqual(0, floats.Get(0), 0.0f); Doubles doubles = cache.GetDoubles(ar, "bogusdoubles", true); Assert.AreEqual(0, doubles.Get(0), 0.0D); BytesRef scratch = new BytesRef(); BinaryDocValues binaries = cache.GetTerms(ar, "bogusterms", true); binaries.Get(0, scratch); Assert.AreEqual(0, scratch.Length); SortedDocValues sorted = cache.GetTermsIndex(ar, "bogustermsindex"); Assert.AreEqual(-1, sorted.GetOrd(0)); sorted.Get(0, scratch); Assert.AreEqual(0, scratch.Length); SortedSetDocValues sortedSet = cache.GetDocTermOrds(ar, "bogusmultivalued"); sortedSet.Document = 0; Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd()); Bits bits = cache.GetDocsWithField(ar, "bogusbits"); Assert.IsFalse(bits.Get(0)); // check that we cached nothing Assert.AreEqual(0, cache.CacheEntries.Length); ir.Dispose(); dir.Dispose(); }
public virtual void TestDocValuesIntegration() { AssumeTrue("3.x does not support docvalues", DefaultCodecSupportsDocValues()); Directory dir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc); Document doc = new Document(); doc.Add(new BinaryDocValuesField("binary", new BytesRef("binary value"))); doc.Add(new SortedDocValuesField("sorted", new BytesRef("sorted value"))); doc.Add(new NumericDocValuesField("numeric", 42)); if (DefaultCodecSupportsSortedSet()) { doc.Add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value1"))); doc.Add(new SortedSetDocValuesField("sortedset", new BytesRef("sortedset value2"))); } iw.AddDocument(doc); DirectoryReader ir = iw.Reader; iw.Dispose(); AtomicReader ar = GetOnlySegmentReader(ir); BytesRef scratch = new BytesRef(); // Binary type: can be retrieved via getTerms() try { FieldCache.DEFAULT.GetInts(ar, "binary", false); Assert.Fail(); } catch (InvalidOperationException expected) { } BinaryDocValues binary = FieldCache.DEFAULT.GetTerms(ar, "binary", true); binary.Get(0, scratch); Assert.AreEqual("binary value", scratch.Utf8ToString()); try { FieldCache.DEFAULT.GetTermsIndex(ar, "binary"); Assert.Fail(); } catch (InvalidOperationException expected) { } try { FieldCache.DEFAULT.GetDocTermOrds(ar, "binary"); Assert.Fail(); } catch (InvalidOperationException expected) { } try { new DocTermOrds(ar, null, "binary"); Assert.Fail(); } catch (InvalidOperationException expected) { } Bits bits = FieldCache.DEFAULT.GetDocsWithField(ar, "binary"); Assert.IsTrue(bits.Get(0)); // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds() try { FieldCache.DEFAULT.GetInts(ar, "sorted", false); Assert.Fail(); } catch (InvalidOperationException expected) { } try { new DocTermOrds(ar, null, "sorted"); Assert.Fail(); } catch (InvalidOperationException expected) { } binary = FieldCache.DEFAULT.GetTerms(ar, "sorted", true); binary.Get(0, scratch); Assert.AreEqual("sorted value", scratch.Utf8ToString()); SortedDocValues sorted = FieldCache.DEFAULT.GetTermsIndex(ar, "sorted"); Assert.AreEqual(0, sorted.GetOrd(0)); Assert.AreEqual(1, sorted.ValueCount); sorted.Get(0, scratch); Assert.AreEqual("sorted value", scratch.Utf8ToString()); SortedSetDocValues sortedSet = FieldCache.DEFAULT.GetDocTermOrds(ar, "sorted"); sortedSet.Document = 0; Assert.AreEqual(0, sortedSet.NextOrd()); Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd()); Assert.AreEqual(1, sortedSet.ValueCount); bits = FieldCache.DEFAULT.GetDocsWithField(ar, "sorted"); Assert.IsTrue(bits.Get(0)); // Numeric type: can be retrieved via getInts() and so on Ints numeric = FieldCache.DEFAULT.GetInts(ar, "numeric", false); Assert.AreEqual(42, numeric.Get(0)); try { FieldCache.DEFAULT.GetTerms(ar, "numeric", true); Assert.Fail(); } catch (InvalidOperationException expected) { } try { FieldCache.DEFAULT.GetTermsIndex(ar, "numeric"); Assert.Fail(); } catch (InvalidOperationException expected) { } try { FieldCache.DEFAULT.GetDocTermOrds(ar, "numeric"); Assert.Fail(); } catch (InvalidOperationException expected) { } try { new DocTermOrds(ar, null, "numeric"); Assert.Fail(); } catch (InvalidOperationException expected) { } bits = FieldCache.DEFAULT.GetDocsWithField(ar, "numeric"); Assert.IsTrue(bits.Get(0)); // SortedSet type: can be retrieved via getDocTermOrds() if (DefaultCodecSupportsSortedSet()) { try { FieldCache.DEFAULT.GetInts(ar, "sortedset", false); Assert.Fail(); } catch (InvalidOperationException expected) { } try { FieldCache.DEFAULT.GetTerms(ar, "sortedset", true); Assert.Fail(); } catch (InvalidOperationException expected) { } try { FieldCache.DEFAULT.GetTermsIndex(ar, "sortedset"); Assert.Fail(); } catch (InvalidOperationException expected) { } try { new DocTermOrds(ar, null, "sortedset"); Assert.Fail(); } catch (InvalidOperationException expected) { } sortedSet = FieldCache.DEFAULT.GetDocTermOrds(ar, "sortedset"); sortedSet.Document = 0; Assert.AreEqual(0, sortedSet.NextOrd()); Assert.AreEqual(1, sortedSet.NextOrd()); Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, sortedSet.NextOrd()); Assert.AreEqual(2, sortedSet.ValueCount); bits = FieldCache.DEFAULT.GetDocsWithField(ar, "sortedset"); Assert.IsTrue(bits.Get(0)); } ir.Dispose(); dir.Dispose(); }