Field that stores a per-document long value for scoring, sorting or value retrieval. Here's an example usage:
long
document.add(new NumericDocValuesField(name, 22L));
If you also need to store the value, you should add a separate StoredField instance.
/// <summary> /// Adds a new <see cref="NumericDocValuesField"/> field with the specified 64-bit <see cref="long"/> value </summary> /// <remarks> /// If you also need to store the value, you should add a /// separate <see cref="StoredField"/> instance. /// </remarks> /// <param name="document">This <see cref="Document"/>.</param> /// <param name="name"> field name </param> /// <param name="value"> 64-bit <see cref="long"/> value </param> /// <returns>The field that was added to this <see cref="Document"/>.</returns> /// <exception cref="System.ArgumentNullException"> if the field <paramref name="name"/> is <c>null</c> </exception> public static NumericDocValuesField AddNumericDocValuesField(this Document document, string name, long value) { var field = new NumericDocValuesField(name, value); document.Add(field); return(field); }
public virtual void Test() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); conf.SetCodec(new Lucene46Codec()); RandomIndexWriter riw = new RandomIndexWriter(Random(), dir, conf); Document doc = new Document(); // these fields should sometimes get term vectors, etc Field idField = NewStringField("id", "", Field.Store.NO); Field bodyField = NewTextField("body", "", Field.Store.NO); Field dvField = new NumericDocValuesField("dv", 5); doc.Add(idField); doc.Add(bodyField); doc.Add(dvField); for (int i = 0; i < 100; i++) { idField.StringValue = Convert.ToString(i); bodyField.StringValue = TestUtil.RandomUnicodeString(Random()); riw.AddDocument(doc); if (Random().Next(7) == 0) { riw.Commit(); } // TODO: we should make a new format with a clean header... // if (Random().nextInt(20) == 0) { // riw.DeleteDocuments(new Term("id", Integer.toString(i))); // } } riw.Dispose(); CheckHeaders(dir); dir.Dispose(); }
public virtual void TestDateCompression() { Directory dir = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter iwriter = new IndexWriter(dir, iwc); const long @base = 13; // prime long day = 1000L * 60 * 60 * 24; Document doc = new Document(); NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); doc.Add(dvf); for (int i = 0; i < 300; ++i) { dvf.LongValue = @base + Random().Next(1000) * day; iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size1 = DirSize(dir); for (int i = 0; i < 50; ++i) { dvf.LongValue = @base + Random().Next(1000) * day; iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size2 = DirSize(dir); // make sure the new longs costed less than if they had only been packed Assert.IsTrue(size2 < size1 + (PackedInts.BitsRequired(day) * 50) / 8); }
private IDictionary<string, Document> GenerateIndexDocuments(int ndocs) { IDictionary<string, Document> docs = new HashMap<string, Document>(); for (int i = 0; i < ndocs; i++) { Field field = new TextField(FIELD_NAME, "field_" + i, Field.Store.YES); Field payload = new StoredField(PAYLOAD_FIELD_NAME, new BytesRef("payload_" + i)); Field weight1 = new NumericDocValuesField(WEIGHT_FIELD_NAME_1, 10 + i); Field weight2 = new NumericDocValuesField(WEIGHT_FIELD_NAME_2, 20 + i); Field weight3 = new NumericDocValuesField(WEIGHT_FIELD_NAME_3, 30 + i); Field contexts = new StoredField(CONTEXTS_FIELD_NAME, new BytesRef("ctx_" + i + "_0")); Document doc = new Document(); doc.Add(field); doc.Add(payload); doc.Add(weight1); doc.Add(weight2); doc.Add(weight3); doc.Add(contexts); for (int j = 1; j < AtLeast(3); j++) { contexts.BytesValue = new BytesRef("ctx_" + i + "_" + j); doc.Add(contexts); } docs.Put(field.StringValue, doc); } return docs; }
public virtual void TestNumerics([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler) { BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("2BNumerics")); if (dir is MockDirectoryWrapper) { ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER; } IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())) .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetRAMBufferSizeMB(256.0).SetMergeScheduler(scheduler).SetMergePolicy(NewLogMergePolicy(false, 10)).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE)); Document doc = new Document(); NumericDocValuesField dvField = new NumericDocValuesField("dv", 0); doc.Add(dvField); for (int i = 0; i < int.MaxValue; i++) { dvField.LongValue = i; w.AddDocument(doc); if (i % 100000 == 0) { Console.WriteLine("indexed: " + i); Console.Out.Flush(); } } w.ForceMerge(1); w.Dispose(); Console.WriteLine("verifying..."); Console.Out.Flush(); DirectoryReader r = DirectoryReader.Open(dir); long expectedValue = 0; foreach (AtomicReaderContext context in r.Leaves) { AtomicReader reader = context.AtomicReader; NumericDocValues dv = reader.GetNumericDocValues("dv"); for (int i = 0; i < reader.MaxDoc; i++) { Assert.AreEqual(expectedValue, dv.Get(i)); expectedValue++; } } r.Dispose(); dir.Dispose(); }
public virtual void TestNumericDocValuesField() { NumericDocValuesField field = new NumericDocValuesField("foo", 5L); TrySetBoost(field); TrySetByteValue(field); TrySetBytesValue(field); TrySetBytesRefValue(field); TrySetDoubleValue(field); TrySetIntValue(field); TrySetFloatValue(field); field.SetInt64Value(6); // ok TrySetReaderValue(field); TrySetShortValue(field); TrySetStringValue(field); TrySetTokenStreamValue(field); Assert.AreEqual(6L, field.GetInt64Value().Value); }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestUniqueValuesCompression() { Directory dir = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter iwriter = new IndexWriter(dir, iwc); int uniqueValueCount = TestUtil.NextInt(Random(), 1, 256); IList<long> values = new List<long>(); Document doc = new Document(); NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); doc.Add(dvf); for (int i = 0; i < 300; ++i) { long value; if (values.Count < uniqueValueCount) { value = Random().NextLong(); values.Add(value); } else { value = RandomInts.RandomFrom(Random(), values); } dvf.LongValue = value; iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size1 = DirSize(dir); for (int i = 0; i < 20; ++i) { dvf.LongValue = RandomInts.RandomFrom(Random(), values); iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size2 = DirSize(dir); // make sure the new longs did not cost 8 bytes each Assert.IsTrue(size2 < size1 + 8 * 20); }
public virtual void TestSingleBigValueCompression() { Directory dir = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); IndexWriter iwriter = new IndexWriter(dir, iwc); Document doc = new Document(); NumericDocValuesField dvf = new NumericDocValuesField("dv", 0); doc.Add(dvf); for (int i = 0; i < 20000; ++i) { dvf.LongValue = i & 1023; iwriter.AddDocument(doc); } iwriter.ForceMerge(1); long size1 = DirSize(dir); dvf.LongValue = long.MaxValue; iwriter.AddDocument(doc); iwriter.ForceMerge(1); long size2 = DirSize(dir); // make sure the new value did not grow the bpv for every other value Assert.IsTrue(size2 < size1 + (20000 * (63 - 10)) / 8); }
public virtual void TestNumerics() { Directory dir = NewDirectory(); Document doc = new Document(); Field field = new NumericDocValuesField("numbers", 0); doc.Add(field); IndexWriterConfig iwc = NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT, null); iwc.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwc); int numDocs = AtLeast(500); for (int i = 0; i < numDocs; i++) { field.LongValue = Random().NextLong(); iw.AddDocument(doc); if (Random().Next(17) == 0) { iw.Commit(); } } DirectoryReader ir = iw.Reader; iw.ForceMerge(1); DirectoryReader ir2 = iw.Reader; AtomicReader merged = GetOnlySegmentReader(ir2); iw.Dispose(); NumericDocValues multi = MultiDocValues.GetNumericValues(ir, "numbers"); NumericDocValues single = merged.GetNumericDocValues("numbers"); for (int i = 0; i < numDocs; i++) { Assert.AreEqual(single.Get(i), multi.Get(i)); } ir.Dispose(); ir2.Dispose(); dir.Dispose(); }
public virtual void TestDifferentTypedDocValuesField2() { Directory d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), d); Document doc = new Document(); // Index doc values are single-valued so we should not // be able to add same field more than once: Field f = new NumericDocValuesField("field", 17); doc.Add(f); doc.Add(new SortedDocValuesField("field", new BytesRef("hello"))); try { w.AddDocument(doc); Assert.Fail("didn't hit expected exception"); } catch (System.ArgumentException iae) { // expected } doc = new Document(); doc.Add(f); w.AddDocument(doc); w.ForceMerge(1); DirectoryReader r = w.Reader; Assert.AreEqual(17, GetOnlySegmentReader(r).GetNumericDocValues("field").Get(0)); r.Dispose(); w.Dispose(); d.Dispose(); }
private void DoTestMissingVsFieldCache(LongProducer longs) { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField()); Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf); Field idField = new StringField("id", "", Field.Store.NO); Field indexedField = NewStringField("indexed", "", Field.Store.NO); Field dvField = new NumericDocValuesField("dv", 0); // index some docs int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested Debug.Assert(numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.StringValue = Convert.ToString(i); long value = longs.Next(); indexedField.StringValue = Convert.ToString(value); dvField.LongValue = value; Document doc = new Document(); doc.Add(idField); // 1/4 of the time we neglect to add the fields if (Random().Next(4) > 0) { doc.Add(indexedField); doc.Add(dvField); } writer.AddDocument(doc); if (Random().Next(31) == 0) { writer.Commit(); } } // delete some docs int numDeletions = Random().Next(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = Random().Next(numDocs); writer.DeleteDocuments(new Term("id", Convert.ToString(id))); } // merge some segments and ensure that at least one of them has more than // 256 values writer.ForceMerge(numDocs / 256); writer.Dispose(); // compare DirectoryReader ir = DirectoryReader.Open(dir); foreach (var context in ir.Leaves) { AtomicReader r = context.AtomicReader; Bits expected = FieldCache.DEFAULT.GetDocsWithField(r, "indexed"); Bits actual = FieldCache.DEFAULT.GetDocsWithField(r, "dv"); AssertEquals(expected, actual); } ir.Dispose(); dir.Dispose(); }
public void Test() { RandomIndexWriter writer; DirectoryReader indexReader; int numParents = AtLeast(200); IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); cfg.SetMergePolicy(NewLogMergePolicy()); using (writer = new RandomIndexWriter(Random(), NewDirectory(), cfg)) { Document parentDoc = new Document(); NumericDocValuesField parentVal = new NumericDocValuesField("parent_val", 0L); parentDoc.Add(parentVal); StringField parent = new StringField("parent", "true", Field.Store.YES); parentDoc.Add(parent); for (int i = 0; i < numParents; ++i) { List<Document> documents = new List<Document>(); int numChildren = Random().nextInt(10); for (int j = 0; j < numChildren; ++j) { Document childDoc = new Document(); childDoc.Add(new NumericDocValuesField("child_val", Random().nextInt(5))); documents.Add(childDoc); } parentVal.LongValue = (Random().nextInt(50)); documents.Add(parentDoc); writer.AddDocuments(documents); } writer.ForceMerge(1); indexReader = writer.Reader; } AtomicReader reader = GetOnlySegmentReader(indexReader); Filter parentsFilter = new FixedBitSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("parent", "true")))); FixedBitSet parentBits = (FixedBitSet)parentsFilter.GetDocIdSet(reader.AtomicContext, null); NumericDocValues parentValues = reader.GetNumericDocValues("parent_val"); NumericDocValues childValues = reader.GetNumericDocValues("child_val"); Sort parentSort = new Sort(new SortField("parent_val", SortField.Type_e.LONG)); Sort childSort = new Sort(new SortField("child_val", SortField.Type_e.LONG)); Sort sort = new Sort(new SortField("custom", new BlockJoinComparatorSource(parentsFilter, parentSort, childSort))); Sorter sorter = new Sorter(sort); Sorter.DocMap docMap = sorter.Sort(reader); assertEquals(reader.MaxDoc, docMap.Count); int[] children = new int[1]; int numChildren2 = 0; int previousParent = -1; for (int i = 0; i < docMap.Count; ++i) { int oldID = docMap.NewToOld(i); if (parentBits.Get(oldID)) { // check that we have the right children for (int j = 0; j < numChildren2; ++j) { assertEquals(oldID, parentBits.NextSetBit(children[j])); } // check that children are sorted for (int j = 1; j < numChildren2; ++j) { int doc1 = children[j - 1]; int doc2 = children[j]; if (childValues.Get(doc1) == childValues.Get(doc2)) { assertTrue(doc1 < doc2); // sort is stable } else { assertTrue(childValues.Get(doc1) < childValues.Get(doc2)); } } // check that parents are sorted if (previousParent != -1) { if (parentValues.Get(previousParent) == parentValues.Get(oldID)) { assertTrue(previousParent < oldID); } else { assertTrue(parentValues.Get(previousParent) < parentValues.Get(oldID)); } } // reset previousParent = oldID; numChildren2 = 0; } else { children = ArrayUtil.Grow(children, numChildren2 + 1); children[numChildren2++] = oldID; } } indexReader.Dispose(); writer.w.Directory.Dispose(); }
public virtual void TestNumericDocValuesField() { NumericDocValuesField field = new NumericDocValuesField("foo", 5L); TrySetBoost(field); TrySetByteValue(field); TrySetBytesValue(field); TrySetBytesRefValue(field); TrySetDoubleValue(field); TrySetIntValue(field); TrySetFloatValue(field); field.LongValue = 6; // ok TrySetReaderValue(field); TrySetShortValue(field); TrySetStringValue(field); TrySetTokenStreamValue(field); Assert.AreEqual(6L, (long)field.NumericValue); }
/// <summary> /// Tests dv against stored fields with threads (all types + missing) /// </summary> // [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestThreads2() { AssumeTrue("Codec does not support GetDocsWithField", DefaultCodecSupportsDocsWithField()); AssumeTrue("Codec does not support SORTED_SET", DefaultCodecSupportsSortedSet()); Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf); Field idField = new StringField("id", "", Field.Store.NO); Field storedBinField = new StoredField("storedBin", new byte[0]); Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); Field storedNumericField = new StoredField("storedNum", ""); Field dvNumericField = new NumericDocValuesField("dvNum", 0); // index some docs int numDocs = AtLeast(300); for (int i = 0; i < numDocs; i++) { idField.StringValue = Convert.ToString(i); int length = TestUtil.NextInt(Random(), 0, 8); var buffer = new byte[length]; Random().NextBytes(buffer); storedBinField.BytesValue = new BytesRef(buffer); dvBinField.BytesValue = new BytesRef(buffer); dvSortedField.BytesValue = new BytesRef(buffer); long numericValue = Random().NextLong(); storedNumericField.StringValue = Convert.ToString(numericValue); dvNumericField.LongValue = numericValue; Document doc = new Document(); doc.Add(idField); if (Random().Next(4) > 0) { doc.Add(storedBinField); doc.Add(dvBinField); doc.Add(dvSortedField); } if (Random().Next(4) > 0) { doc.Add(storedNumericField); doc.Add(dvNumericField); } int numSortedSetFields = Random().Next(3); SortedSet<string> values = new SortedSet<string>(); for (int j = 0; j < numSortedSetFields; j++) { values.Add(TestUtil.RandomSimpleString(Random())); } foreach (string v in values) { doc.Add(new SortedSetDocValuesField("dvSortedSet", new BytesRef(v))); doc.Add(new StoredField("storedSortedSet", v)); } writer.AddDocument(doc); if (Random().Next(31) == 0) { writer.Commit(); } } // delete some docs int numDeletions = Random().Next(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = Random().Next(numDocs); writer.DeleteDocuments(new Term("id", Convert.ToString(id))); } writer.Dispose(); // compare DirectoryReader ir = DirectoryReader.Open(dir); int numThreads = TestUtil.NextInt(Random(), 2, 7); ThreadClass[] threads = new ThreadClass[numThreads]; CountdownEvent startingGun = new CountdownEvent(1); for (int i = 0; i < threads.Length; i++) { threads[i] = new ThreadAnonymousInnerClassHelper2(this, ir, startingGun); threads[i].Start(); } startingGun.Signal(); foreach (ThreadClass t in threads) { t.Join(); } ir.Dispose(); dir.Dispose(); }
private void DoTest(FieldInfo.DocValuesType_e type) { Directory d = NewDirectory(); IndexWriterConfig iwConfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); int nDocs = AtLeast(50); Field id = new NumericDocValuesField("id", 0); Field f; switch (type) { case FieldInfo.DocValuesType_e.BINARY: f = new BinaryDocValuesField("dv", new BytesRef()); break; case FieldInfo.DocValuesType_e.SORTED: f = new SortedDocValuesField("dv", new BytesRef()); break; case FieldInfo.DocValuesType_e.NUMERIC: f = new NumericDocValuesField("dv", 0); break; default: throw new InvalidOperationException(); } Document document = new Document(); document.Add(id); document.Add(f); object[] vals = new object[nDocs]; RandomIndexWriter iw = new RandomIndexWriter(Random(), d, iwConfig); for (int i = 0; i < nDocs; ++i) { id.LongValue = i; switch (type) { case FieldInfo.DocValuesType_e.SORTED: case FieldInfo.DocValuesType_e.BINARY: do { vals[i] = TestUtil.RandomSimpleString(Random(), 20); } while (((string)vals[i]).Length == 0); f.BytesValue = new BytesRef((string)vals[i]); break; case FieldInfo.DocValuesType_e.NUMERIC: int bitsPerValue = Random().NextIntBetween(1, 31); // keep it an int vals[i] = (long)Random().Next((int)PackedInts.MaxValue(bitsPerValue)); f.LongValue = (long) vals[i]; break; } iw.AddDocument(document); if (Random().NextBoolean() && i % 10 == 9) { iw.Commit(); } } iw.Dispose(); DirectoryReader rd = DirectoryReader.Open(d); foreach (AtomicReaderContext leave in rd.Leaves) { FunctionValues ids = (new LongFieldSource("id")).GetValues(null, leave); ValueSource vs; switch (type) { case FieldInfo.DocValuesType_e.BINARY: case FieldInfo.DocValuesType_e.SORTED: vs = new BytesRefFieldSource("dv"); break; case FieldInfo.DocValuesType_e.NUMERIC: vs = new LongFieldSource("dv"); break; default: throw new InvalidOperationException(); } FunctionValues values = vs.GetValues(null, leave); BytesRef bytes = new BytesRef(); for (int i = 0; i < leave.AtomicReader.MaxDoc; ++i) { assertTrue(values.Exists(i)); if (vs is BytesRefFieldSource) { assertTrue(values.ObjectVal(i) is string); } else if (vs is LongFieldSource) { assertTrue(values.ObjectVal(i) is long?); assertTrue(values.BytesVal(i, bytes)); } else { throw new InvalidOperationException(); } object expected = vals[ids.IntVal(i)]; switch (type) { case FieldInfo.DocValuesType_e.SORTED: values.OrdVal(i); // no exception assertTrue(values.NumOrd() >= 1); goto case FieldInfo.DocValuesType_e.BINARY; case FieldInfo.DocValuesType_e.BINARY: assertEquals(expected, values.ObjectVal(i)); assertEquals(expected, values.StrVal(i)); assertEquals(expected, values.ObjectVal(i)); assertEquals(expected, values.StrVal(i)); assertTrue(values.BytesVal(i, bytes)); assertEquals(new BytesRef((string)expected), bytes); break; case FieldInfo.DocValuesType_e.NUMERIC: assertEquals(Number.ToInt64(expected.ToString()), values.LongVal(i)); break; } } } rd.Dispose(); d.Dispose(); }
private void DoTestNumericsVsStoredFields(LongProducer longs) { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); Field storedField = NewStringField("stored", "", Field.Store.YES); Field dvField = new NumericDocValuesField("dv", 0); doc.Add(idField); doc.Add(storedField); doc.Add(dvField); // index some docs int numDocs = AtLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes // for numbers of values <= 256, all storage layouts are tested Debug.Assert(numDocs > 256); for (int i = 0; i < numDocs; i++) { idField.StringValue = Convert.ToString(i); long value = longs.Next(); storedField.StringValue = Convert.ToString(value); dvField.LongValue = value; writer.AddDocument(doc); if (Random().Next(31) == 0) { writer.Commit(); } } // delete some docs int numDeletions = Random().Next(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = Random().Next(numDocs); writer.DeleteDocuments(new Term("id", Convert.ToString(id))); } // merge some segments and ensure that at least one of them has more than // 256 values writer.ForceMerge(numDocs / 256); writer.Dispose(); // compare DirectoryReader ir = DirectoryReader.Open(dir); foreach (AtomicReaderContext context in ir.Leaves) { AtomicReader r = context.AtomicReader; NumericDocValues docValues = r.GetNumericDocValues("dv"); for (int i = 0; i < r.MaxDoc; i++) { long storedValue = Convert.ToInt64(r.Document(i).Get("stored")); Assert.AreEqual(storedValue, docValues.Get(i)); } } ir.Dispose(); dir.Dispose(); }
public virtual void TestMixedTypesDifferentThreads() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); CountDownLatch startingGun = new CountDownLatch(1); AtomicBoolean hitExc = new AtomicBoolean(); ThreadClass[] threads = new ThreadClass[3]; for (int i = 0; i < 3; i++) { Field field; if (i == 0) { field = new SortedDocValuesField("foo", new BytesRef("hello")); } else if (i == 1) { field = new NumericDocValuesField("foo", 0); } else { field = new BinaryDocValuesField("foo", new BytesRef("bazz")); } Document doc = new Document(); doc.Add(field); threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc); threads[i].Start(); } startingGun.countDown(); foreach (ThreadClass t in threads) { t.Join(); } Assert.IsTrue(hitExc.Get()); w.Dispose(); dir.Dispose(); }
public virtual void TestMultiValuedDocValuesField() { Directory d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), d); Document doc = new Document(); Field f = new NumericDocValuesField("field", 17); // Index doc values are single-valued so we should not // be able to add same field more than once: doc.Add(f); doc.Add(f); try { w.AddDocument(doc); Assert.Fail("didn't hit expected exception"); } catch (System.ArgumentException iae) { // expected } doc = new Document(); doc.Add(f); w.AddDocument(doc); w.ForceMerge(1); DirectoryReader r = w.Reader; w.Dispose(); Assert.AreEqual(17, FieldCache.DEFAULT.GetInts(GetOnlySegmentReader(r), "field", false).Get(0)); r.Dispose(); d.Dispose(); }
public void TestThreads() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); Field storedBinField = new StoredField("storedBin", new byte[0]); Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef()); Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef()); Field storedNumericField = new StoredField("storedNum", ""); Field dvNumericField = new NumericDocValuesField("dvNum", 0); doc.Add(idField); doc.Add(storedBinField); doc.Add(dvBinField); doc.Add(dvSortedField); doc.Add(storedNumericField); doc.Add(dvNumericField); // index some docs int numDocs = AtLeast(300); for (int i = 0; i < numDocs; i++) { idField.StringValue = Convert.ToString(i); int length = TestUtil.NextInt(Random(), 0, 8); var buffer = new byte[length]; Random().NextBytes(buffer); storedBinField.BytesValue = new BytesRef(buffer); dvBinField.BytesValue = new BytesRef(buffer); dvSortedField.BytesValue = new BytesRef(buffer); long numericValue = Random().NextLong(); storedNumericField.StringValue = Convert.ToString(numericValue); dvNumericField.LongValue = numericValue; writer.AddDocument(doc); if (Random().Next(31) == 0) { writer.Commit(); } } // delete some docs int numDeletions = Random().Next(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = Random().Next(numDocs); writer.DeleteDocuments(new Term("id", Convert.ToString(id))); } writer.Dispose(); // compare DirectoryReader ir = DirectoryReader.Open(dir); int numThreads = TestUtil.NextInt(Random(), 2, 7); ThreadClass[] threads = new ThreadClass[numThreads]; CountdownEvent startingGun = new CountdownEvent(1); for (int i = 0; i < threads.Length; i++) { threads[i] = new ThreadAnonymousInnerClassHelper(this, ir, startingGun); threads[i].Start(); } startingGun.Signal(); foreach (ThreadClass t in threads) { t.Join(); } ir.Dispose(); dir.Dispose(); }