public virtual void TestLongFieldCache() { Directory dir = NewDirectory(); IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); cfg.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(Random, dir, cfg); Document doc = new Document(); Int64Field field = new Int64Field("f", 0L, Field.Store.YES); doc.Add(field); long[] values = new long[TestUtil.NextInt32(Random, 1, 10)]; for (int i = 0; i < values.Length; ++i) { long v; switch (Random.Next(10)) { case 0: v = long.MinValue; break; case 1: v = 0; break; case 2: v = long.MaxValue; break; default: v = TestUtil.NextInt64(Random, -10, 10); break; } values[i] = v; if (v == 0 && Random.NextBoolean()) { // missing iw.AddDocument(new Document()); } else { field.SetInt64Value(v); iw.AddDocument(doc); } } iw.ForceMerge(1); DirectoryReader reader = iw.GetReader(); Int64s longs = FieldCache.DEFAULT.GetInt64s(GetOnlySegmentReader(reader), "f", false); for (int i = 0; i < values.Length; ++i) { Assert.AreEqual(values[i], longs.Get(i)); } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
public override void BeforeClass() { base.BeforeClass(); NoDocs = AtLeast(4096); Distance = (1L << 60) / NoDocs; Directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy())); FieldType storedLong = new FieldType(Int64Field.TYPE_NOT_STORED); storedLong.IsStored = true; storedLong.Freeze(); FieldType storedLong8 = new FieldType(storedLong); storedLong8.NumericPrecisionStep = 8; FieldType storedLong4 = new FieldType(storedLong); storedLong4.NumericPrecisionStep = 4; FieldType storedLong6 = new FieldType(storedLong); storedLong6.NumericPrecisionStep = 6; FieldType storedLong2 = new FieldType(storedLong); storedLong2.NumericPrecisionStep = 2; FieldType storedLongNone = new FieldType(storedLong); storedLongNone.NumericPrecisionStep = int.MaxValue; FieldType unstoredLong = Int64Field.TYPE_NOT_STORED; FieldType unstoredLong8 = new FieldType(unstoredLong); unstoredLong8.NumericPrecisionStep = 8; FieldType unstoredLong6 = new FieldType(unstoredLong); unstoredLong6.NumericPrecisionStep = 6; FieldType unstoredLong4 = new FieldType(unstoredLong); unstoredLong4.NumericPrecisionStep = 4; FieldType unstoredLong2 = new FieldType(unstoredLong); unstoredLong2.NumericPrecisionStep = 2; Int64Field field8 = new Int64Field("field8", 0L, storedLong8), field6 = new Int64Field("field6", 0L, storedLong6), field4 = new Int64Field("field4", 0L, storedLong4), field2 = new Int64Field("field2", 0L, storedLong2), fieldNoTrie = new Int64Field("field" + int.MaxValue, 0L, storedLongNone), ascfield8 = new Int64Field("ascfield8", 0L, unstoredLong8), ascfield6 = new Int64Field("ascfield6", 0L, unstoredLong6), ascfield4 = new Int64Field("ascfield4", 0L, unstoredLong4), ascfield2 = new Int64Field("ascfield2", 0L, unstoredLong2); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.Add(field8); doc.Add(field6); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie); // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive doc.Add(ascfield8); doc.Add(ascfield6); doc.Add(ascfield4); doc.Add(ascfield2); // Add a series of noDocs docs with increasing long values, by updating the fields for (int l = 0; l < NoDocs; l++) { long val = Distance * l + StartOffset; field8.SetInt64Value(val); field6.SetInt64Value(val); field4.SetInt64Value(val); field2.SetInt64Value(val); fieldNoTrie.SetInt64Value(val); val = l - (NoDocs / 2); ascfield8.SetInt64Value(val); ascfield6.SetInt64Value(val); ascfield4.SetInt64Value(val); ascfield2.SetInt64Value(val); writer.AddDocument(doc); } Reader = writer.Reader; Searcher = NewSearcher(Reader); writer.Dispose(); }