/// <summary> /// Adds a stored or un-stored <see cref="Int32Field"/> with the provided value /// and default <c>precisionStep</c> /// <see cref="Util.NumericUtils.PRECISION_STEP_DEFAULT"/> (4). /// </summary> /// <param name="document">This <see cref="Document"/>.</param> /// <param name="name"> field name </param> /// <param name="value"> 32-bit <see cref="int"/> value </param> /// <param name="stored"> <see cref="Field.Store.YES"/> if the content should also be stored </param> /// <returns>The field that was added to this <see cref="Document"/>.</returns> /// <exception cref="System.ArgumentNullException"> if the field <paramref name="name"/> is <c>null</c>. </exception> public static Int32Field AddInt32Field(this Document document, string name, int value, Field.Store stored) { var field = new Int32Field(name, value, stored); document.Add(field); return(field); }
public DocState(bool reuseFields, FieldType ft, FieldType bodyFt) { this.reuseFields = reuseFields; if (reuseFields) { fields = new Dictionary <string, Field>(); numericFields = new Dictionary <string, Field>(); // Initialize the map with the default fields. fields[BODY_FIELD] = new Field(BODY_FIELD, "", bodyFt); fields[TITLE_FIELD] = new Field(TITLE_FIELD, "", ft); fields[DATE_FIELD] = new Field(DATE_FIELD, "", ft); fields[ID_FIELD] = new StringField(ID_FIELD, "", Field.Store.YES); fields[NAME_FIELD] = new Field(NAME_FIELD, "", ft); numericFields[DATE_MSEC_FIELD] = new Int64Field(DATE_MSEC_FIELD, 0L, Field.Store.NO); numericFields[TIME_SEC_FIELD] = new Int32Field(TIME_SEC_FIELD, 0, Field.Store.NO); doc = new Document(); } else { numericFields = null; fields = null; doc = null; } }
/// <summary> /// Adds a stored or un-stored <see cref="Int32Field"/> with the provided value. /// <para/> /// Expert: allows you to customize the /// <see cref="FieldType"/>. /// </summary> /// <param name="document">This <see cref="Document"/>.</param> /// <param name="name"> field name </param> /// <param name="value"> 32-bit <see cref="int"/> value </param> /// <param name="type"> customized field type: must have <see cref="FieldType.NumericType"/> /// of <see cref="NumericType.INT32"/>. </param> /// <returns>The field that was added to this <see cref="Document"/>.</returns> /// <exception cref="System.ArgumentNullException"> if the field <paramref name="name"/> or <paramref name="type"/> is <see cref="NumericType.NONE"/> </exception> /// <exception cref="System.ArgumentException">if the field type does not have a /// <see cref="FieldType.NumericType"/> of <see cref="NumericType.INT32"/> </exception> public static Int32Field AddInt32Field(this Document document, string name, int value, FieldType type) { var field = new Int32Field(name, value, type); document.Add(field); return(field); }
public AuditFileFieldInfo(StringField filename, StringField originalName, Int32Field size) { if (ReferenceEquals(null, filename)) throw new ArgumentNullException("filenameField"); Filename = filename; OriginalName = originalName; Size = size; }
public virtual void TestIntFieldCache() { Directory dir = NewDirectory(); IndexWriterConfig cfg = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); cfg.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(Random, dir, cfg); Document doc = new Document(); Int32Field field = new Int32Field("f", 0, Field.Store.YES); doc.Add(field); int[] values = new int[TestUtil.NextInt32(Random, 1, 10)]; for (int i = 0; i < values.Length; ++i) { int v; switch (Random.Next(10)) { case 0: v = int.MinValue; break; case 1: v = 0; break; case 2: v = int.MaxValue; break; default: v = TestUtil.NextInt32(Random, -10, 10); break; } values[i] = v; if (v == 0 && Random.NextBoolean()) { // missing iw.AddDocument(new Document()); } else { field.SetInt32Value(v); iw.AddDocument(doc); } } iw.ForceMerge(1); DirectoryReader reader = iw.GetReader(); Int32s ints = FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(reader), "f", false); for (int i = 0; i < values.Length; ++i) { Assert.AreEqual(values[i], ints.Get(i)); } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
public bool ActivateFor(Row row) { var mt = row as IMultiTenantRow; if (mt == null) return false; fldTenantId = mt.TenantIdField; return true; }
public void TestAddInt32Field_Stored() { Int32Field field = null; int value = 123; var stored = Field.Store.YES; AssertDocumentExtensionAddsToDocument(document => field = document.AddInt32Field("theName", value, stored)); Assert.AreEqual("theName", field.Name); Assert.AreEqual(value, field.GetInt32ValueOrDefault()); Assert.AreSame(Int32Field.TYPE_STORED, field.FieldType); }
public DPoS(AElfDPoSFieldMapCollection collection) { _currentRoundNumberField = collection.CurrentRoundNumberField; _blockProducerField = collection.BlockProducerField; _dPoSInfoMap = collection.DPoSInfoMap; _eBPMap = collection.EBPMap; _timeForProducingExtraBlockField = collection.TimeForProducingExtraBlockField; _firstPlaceMap = collection.FirstPlaceMap; _miningIntervalField = collection.MiningIntervalField; _roundHashMap = collection.RoundHashMap; }
public AuditFileFieldInfo(StringField filename, StringField originalName, Int32Field size) { if (filename == null) { throw new ArgumentNullException("filenameField"); } Filename = filename; OriginalName = originalName; Size = size; }
public bool ActivateFor(Row row) { var mt = row as IMultiTenantRow; if (mt == null) { return(false); } fldTenantId = mt.TenantIdField; return(true); }
public bool ActivateFor(Row row) { var mt = row as IMultiComercioRow; if (mt == null) { return(false); } fldIdComercio = mt.IdComercioField; return(true); }
/// <summary> /// Adds a stored or un-stored <see cref="Int32Field"/> with the provided value. /// <para/> /// Expert: allows you to customize the /// <see cref="FieldType"/>. /// </summary> /// <param name="document">This <see cref="Document"/>.</param> /// <param name="name"> field name </param> /// <param name="value"> 32-bit <see cref="int"/> value </param> /// <param name="type"> customized field type: must have <see cref="FieldType.NumericType"/> /// of <see cref="NumericType.INT32"/>. </param> /// <returns>The field that was added to this <see cref="Document"/>.</returns> /// <exception cref="ArgumentNullException"> if this <paramref name="document"/>, the field <paramref name="name"/> or <paramref name="type"/> is <c>null</c>. </exception> /// <exception cref="ArgumentException">if the field type does not have a /// <see cref="FieldType.NumericType"/> of <see cref="NumericType.INT32"/> </exception> public static Int32Field AddInt32Field(this Document document, string name, int value, FieldType type) { if (document is null) { throw new ArgumentNullException(nameof(document)); } var field = new Int32Field(name, value, type); document.Add(field); return(field); }
private static void addNumericField(this Document doc, string userField, int fieldValue) { userField = userField.ToLower(Str.Culture); if (!IntFields.Contains(userField)) { throw new ArgumentException($"Numeric int field {userField} not initialized"); } var field = new Int32Field(userField, fieldValue, Field.Store.NO); doc.Add(field); }
private void addNumericField(Document doc, string userField, int fieldValue) { userField = userField.ToLower(Str.Culture); if (!IsIntField(userField)) { throw new ArgumentException($"{userField} is not int"); } var field = new Int32Field(userField, fieldValue, Field.Store.NO); doc.Add(field); }
private static void addIdField(Document doc, string fieldName, int fieldValue) { fieldName = fieldName.ToLower(Str.Culture); var field = new Int32Field(fieldName, fieldValue, new FieldType(Int32Field.TYPE_STORED) { IsIndexed = false }); doc.Add(field); }
/// <summary> /// Deserializes this fields value from JSON /// </summary> /// <param name="reader">The reader.</param> /// <param name="row">The row.</param> /// <param name="serializer">The serializer.</param> /// <exception cref="ArgumentNullException">reader</exception> public override void ValueFromJson(JsonReader reader, IRow row, JsonSerializer serializer) { if (reader == null) { throw new ArgumentNullException("reader"); } switch (reader.TokenType) { case JsonToken.Null: case JsonToken.Undefined: _setValue(row, null); break; case JsonToken.Integer: case JsonToken.Float: case JsonToken.Boolean: var v = Convert.ToInt64(reader.Value, CultureInfo.InvariantCulture); if (EnumType == null) { _setValue(row, v); } else { _setValue(row, Int32Field.ConvertEnumFromInt(EnumType, v)); } break; case JsonToken.String: string s = ((string)reader.Value).TrimToNull(); if (s == null) { _setValue(row, null); } else if (EnumType == null) { _setValue(row, Convert.ToInt64(s, CultureInfo.InvariantCulture)); } else { _setValue(row, Int32Field.ConvertEnumFromString(EnumType, s)); } break; default: throw JsonUnexpectedToken(reader); } row.FieldAssignedValue(this); }
public RowFields() : base(R.TableName, "") { ID = new Int32Field(this, "ID", "ID", 0, FieldFlags.Identity, r => ((R)r)._id, (r, v) => ((R)r)._id = v); GroupID = new Int32Field(this, "GroupID", "Değer No", 0, FieldFlags.Required, r => ((R)r)._groupId, (r, v) => ((R)r)._groupId = v); DisplayOrder = new Int32Field(this, "DisplayOrder", "Gösterim Sırası", 0, FieldFlags.Required, r => ((R)r)._displayOrder, (r, v) => ((R)r)._displayOrder = v); IsActive = new Int16Field(this, "IsActive", "Aktif", 0, FieldFlags.Required, r => ((R)r)._isActive, (r, v) => ((R)r)._isActive = v); }
public RowFields() : base(R.TableName, "") { ID = new Int32Field(this, "ID", "ID", 0, FieldFlags.NotNull, r => ((R)r)._id, (r, v) => ((R)r)._id = v); GroupID = new Int32Field(this, "GroupID", "Değer No", 0, FieldFlags.Identity, r => ((R)r)._groupId, (r, v) => ((R)r)._groupId = v); DisplayOrder = new Int32Field(this, "DisplayOrder", "Gösterim Sırası", 0, FieldFlags.Required, r => ((R)r)._displayOrder, (r, v) => ((R)r)._displayOrder = v); IsActive = new Int16Field(this, "IsActive", "Aktif", 0, FieldFlags.Required, r => ((R)r)._isActive, (r, v) => ((R)r)._isActive = v); }
public PaintStyle(FieldList source, Enum sourceID) : this() { if ((source != null) && (sourceID != null)) { PaletteEntryType pst = (PaletteEntryType)(source[sourceID].AsByte() ?? (byte)PaletteEntryType.Inherited); switch (pst) { case PaletteEntryType.Inherited: default: StyleType = PaletteEntryType.Inherited; break; case PaletteEntryType.DoNotPaint: StyleType = PaletteEntryType.DoNotPaint; break; case PaletteEntryType.Colour: { StyleType = PaletteEntryType.Colour; Int32Field field = source.GetNextItemAfter <Int32Field>(sourceID, DefAgentFieldID.PaintStyleData); if (field != null) { Data = field.Data; } break; } case PaletteEntryType.PaletteReference: { StyleType = PaletteEntryType.PaletteReference; Int32Field field = source.GetNextItemAfter <Int32Field>(sourceID, DefAgentFieldID.PaintStyleData); if (field != null) { Data = field.Data; } break; } } } }
public static Field[] ToFieldArray(this IEnumerable<ICustomFieldDefinition> items, ICollection<Field> collection, string namePrefix, Action<Field, ICustomFieldDefinition> initialize = null) { var result = new List<Field>(); foreach (var item in items) { var flags = item.IsRequired ? Required : NotRequired; var caption = ToLocalText(item.Title); var name = (namePrefix ?? "") + item.Name; Field field; switch (item.FieldType) { case CustomFieldType.Boolean: field = new BooleanField(collection, name, caption, 0, flags); break; case CustomFieldType.Date: field = new DateTimeField(collection, name, caption, 0, flags) { DateTimeKind = DateTimeKind.Unspecified }; break; case CustomFieldType.DateTime: field = new DateTimeField(collection, name, caption, 0, flags) { DateTimeKind = DateTimeKind.Local }; break; case CustomFieldType.Decimal: field = new DecimalField(collection, name, caption, item.Size, flags); break; case CustomFieldType.Int32: field = new Int32Field(collection, name, caption, item.Size, flags); break; case CustomFieldType.Int64: field = new Int64Field(collection, name, caption, item.Size, flags); break; default: field = new StringField(collection, name, caption, item.Size, flags); break; } field.DefaultValue = item.DefaultValue.TrimToNull(); result.Add(field); if (initialize != null) initialize(field, item); } return result.ToArray(); }
public void TestAddInt32Field_FieldType() { Int32Field field = null; int value = 123; var fieldType = new FieldType { IsIndexed = true, IsTokenized = true, OmitNorms = false, IndexOptions = IndexOptions.DOCS_ONLY, NumericType = NumericType.INT32, IsStored = true }.Freeze(); AssertDocumentExtensionAddsToDocument(document => field = document.AddInt32Field("theName", value, fieldType)); Assert.AreEqual("theName", field.Name); Assert.AreEqual(value, field.GetInt32ValueOrDefault()); Assert.AreSame(fieldType, field.FieldType); }
internal Field GetNumericField(string name, NumericType type) { Field f; if (reuseFields) { numericFields.TryGetValue(name, out f); } else { f = null; } if (f == null) { switch (type) { case NumericType.INT32: f = new Int32Field(name, 0, Field.Store.NO); break; case NumericType.INT64: f = new Int64Field(name, 0L, Field.Store.NO); break; case NumericType.SINGLE: f = new SingleField(name, 0.0F, Field.Store.NO); break; case NumericType.DOUBLE: f = new DoubleField(name, 0.0, Field.Store.NO); break; default: throw new InvalidOperationException("Cannot get here"); } if (reuseFields) { numericFields[name] = f; } } return(f); }
private void CalcMomentary(List <Field> Fields, DateTime Now, ushort Raw) { Fields.Add(new Int32Field(this, Now, "Raw", Raw, FieldType.Momentary, FieldQoS.AutomaticReadout, typeof(Module).Namespace, 26)); if (this.exp is null && !string.IsNullOrEmpty(this.expression)) { this.exp = new Expression(this.expression); } if (this.exp != null) { Variables v = new Variables() { { "Raw", (double)Raw } }; object Value = this.exp.Evaluate(v); Field F; if (Value is double dbl) { F = new QuantityField(this, Now, this.fieldName, dbl, this.nrDecimals, this.unit, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is PhysicalQuantity qty) { F = new QuantityField(this, Now, this.fieldName, qty.Magnitude, this.nrDecimals, qty.Unit.ToString(), FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is bool b) { F = new BooleanField(this, Now, this.fieldName, b, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is DateTime DT) { F = new DateTimeField(this, Now, this.fieldName, DT, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is Duration D) { F = new DurationField(this, Now, this.fieldName, D, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is Enum E) { F = new EnumField(this, Now, this.fieldName, E, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is int i32) { if (string.IsNullOrEmpty(this.unit)) { F = new Int32Field(this, Now, this.fieldName, i32, FieldType.Momentary, FieldQoS.AutomaticReadout); } else { F = new QuantityField(this, Now, this.fieldName, i32, 0, this.unit, FieldType.Momentary, FieldQoS.AutomaticReadout); } } else if (Value is long i64) { if (string.IsNullOrEmpty(this.unit)) { F = new Int64Field(this, Now, this.fieldName, i64, FieldType.Momentary, FieldQoS.AutomaticReadout); } else { F = new QuantityField(this, Now, this.fieldName, i64, 0, this.unit, FieldType.Momentary, FieldQoS.AutomaticReadout); } } else if (Value is string s) { F = new StringField(this, Now, this.fieldName, s, FieldType.Momentary, FieldQoS.AutomaticReadout); } else if (Value is TimeSpan TS) { F = new TimeField(this, Now, this.fieldName, TS, FieldType.Momentary, FieldQoS.AutomaticReadout); } else { F = new StringField(this, Now, this.fieldName, Value.ToString(), FieldType.Momentary, FieldQoS.AutomaticReadout); } if (this.fieldName == "Value") { F.Module = typeof(Module).Namespace; F.StringIdSteps = new LocalizationStep[] { new LocalizationStep(13) }; } Fields.Add(F); } }
public void TestRandom() { int numberOfRuns = TestUtil.NextInt32(Random, 3, 6); for (int iter = 0; iter < numberOfRuns; iter++) { if (VERBOSE) { Console.WriteLine(string.Format("TEST: iter={0} total={1}", iter, numberOfRuns)); } int numDocs = TestUtil.NextInt32(Random, 100, 1000) * RANDOM_MULTIPLIER; int numGroups = TestUtil.NextInt32(Random, 1, numDocs); if (VERBOSE) { Console.WriteLine("TEST: numDocs=" + numDocs + " numGroups=" + numGroups); } List <BytesRef> groups = new List <BytesRef>(); for (int i = 0; i < numGroups; i++) { string randomValue; do { // B/c of DV based impl we can't see the difference between an empty string and a null value. // For that reason we don't generate empty string groups. randomValue = TestUtil.RandomRealisticUnicodeString(Random); } while ("".Equals(randomValue, StringComparison.Ordinal)); groups.Add(new BytesRef(randomValue)); } string[] contentStrings = new string[TestUtil.NextInt32(Random, 2, 20)]; if (VERBOSE) { Console.WriteLine("TEST: create fake content"); } for (int contentIDX = 0; contentIDX < contentStrings.Length; contentIDX++) { StringBuilder sb = new StringBuilder(); sb.append("real").append(Random.nextInt(3)).append(' '); int fakeCount = Random.nextInt(10); for (int fakeIDX = 0; fakeIDX < fakeCount; fakeIDX++) { sb.append("fake "); } contentStrings[contentIDX] = sb.toString(); if (VERBOSE) { Console.WriteLine(" content=" + sb.toString()); } } Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); bool preFlex = "Lucene3x".Equals(w.IndexWriter.Config.Codec.Name, StringComparison.Ordinal); bool canUseIDV = !preFlex; DocValuesType valueType = vts[Random.nextInt(vts.Length)]; Document doc = new Document(); Document docNoGroup = new Document(); Field group = NewStringField("group", "", Field.Store.NO); doc.Add(group); Field valuesField = null; if (canUseIDV) { switch (valueType) { case DocValuesType.BINARY: valuesField = new BinaryDocValuesField("group_dv", new BytesRef()); break; case DocValuesType.SORTED: valuesField = new SortedDocValuesField("group_dv", new BytesRef()); break; default: fail("unhandled type"); break; } doc.Add(valuesField); } Field sort1 = NewStringField("sort1", "", Field.Store.NO); doc.Add(sort1); docNoGroup.Add(sort1); Field sort2 = NewStringField("sort2", "", Field.Store.NO); doc.Add(sort2); docNoGroup.Add(sort2); Field sort3 = NewStringField("sort3", "", Field.Store.NO); doc.Add(sort3); docNoGroup.Add(sort3); Field content = NewTextField("content", "", Field.Store.NO); doc.Add(content); docNoGroup.Add(content); Int32Field id = new Int32Field("id", 0, Field.Store.NO); doc.Add(id); docNoGroup.Add(id); GroupDoc[] groupDocs = new GroupDoc[numDocs]; for (int i = 0; i < numDocs; i++) { BytesRef groupValue; if (Random.nextInt(24) == 17) { // So we test the "doc doesn't have the group'd // field" case: groupValue = null; } else { groupValue = groups[Random.nextInt(groups.size())]; } GroupDoc groupDoc = new GroupDoc( i, groupValue, groups[Random.nextInt(groups.size())], groups[Random.nextInt(groups.size())], new BytesRef(string.Format(CultureInfo.InvariantCulture, "{0:D5}", i)), contentStrings[Random.nextInt(contentStrings.Length)] ); if (VERBOSE) { Console.WriteLine(" doc content=" + groupDoc.content + " id=" + i + " group=" + (groupDoc.group == null ? "null" : groupDoc.group.Utf8ToString()) + " sort1=" + groupDoc.sort1.Utf8ToString() + " sort2=" + groupDoc.sort2.Utf8ToString() + " sort3=" + groupDoc.sort3.Utf8ToString()); } groupDocs[i] = groupDoc; if (groupDoc.group != null) { group.SetStringValue(groupDoc.group.Utf8ToString()); if (canUseIDV) { valuesField.SetBytesValue(new BytesRef(groupDoc.group.Utf8ToString())); } } sort1.SetStringValue(groupDoc.sort1.Utf8ToString()); sort2.SetStringValue(groupDoc.sort2.Utf8ToString()); sort3.SetStringValue(groupDoc.sort3.Utf8ToString()); content.SetStringValue(groupDoc.content); id.SetInt32Value(groupDoc.id); if (groupDoc.group == null) { w.AddDocument(docNoGroup); } else { w.AddDocument(doc); } } DirectoryReader r = w.GetReader(); w.Dispose(); // NOTE: intentional but temporary field cache insanity! FieldCache.Int32s docIdToFieldId = FieldCache.DEFAULT.GetInt32s(SlowCompositeReaderWrapper.Wrap(r), "id", false); int[] fieldIdToDocID = new int[numDocs]; for (int i = 0; i < numDocs; i++) { int fieldId = docIdToFieldId.Get(i); fieldIdToDocID[fieldId] = i; } try { IndexSearcher s = NewSearcher(r); if (typeof(SlowCompositeReaderWrapper).GetTypeInfo().IsAssignableFrom(s.IndexReader.GetType())) { canUseIDV = false; } else { canUseIDV = !preFlex; } for (int contentID = 0; contentID < 3; contentID++) { ScoreDoc[] hits = s.Search(new TermQuery(new Term("content", "real" + contentID)), numDocs).ScoreDocs; foreach (ScoreDoc hit in hits) { GroupDoc gd = groupDocs[docIdToFieldId.Get(hit.Doc)]; assertTrue(gd.score == 0.0); gd.score = hit.Score; int docId = gd.id; assertEquals(docId, docIdToFieldId.Get(hit.Doc)); } } foreach (GroupDoc gd in groupDocs) { assertTrue(gd.score != 0.0); } for (int searchIter = 0; searchIter < 100; searchIter++) { if (VERBOSE) { Console.WriteLine("TEST: searchIter=" + searchIter); } string searchTerm = "real" + Random.nextInt(3); bool sortByScoreOnly = Random.nextBoolean(); Sort sortWithinGroup = GetRandomSort(sortByScoreOnly); AbstractAllGroupHeadsCollector allGroupHeadsCollector = CreateRandomCollector("group", sortWithinGroup, canUseIDV, valueType); s.Search(new TermQuery(new Term("content", searchTerm)), allGroupHeadsCollector); int[] expectedGroupHeads = CreateExpectedGroupHeads(searchTerm, groupDocs, sortWithinGroup, sortByScoreOnly, fieldIdToDocID); int[] actualGroupHeads = allGroupHeadsCollector.RetrieveGroupHeads(); // The actual group heads contains Lucene ids. Need to change them into our id value. for (int i = 0; i < actualGroupHeads.Length; i++) { actualGroupHeads[i] = docIdToFieldId.Get(actualGroupHeads[i]); } // Allows us the easily iterate and assert the actual and expected results. Array.Sort(expectedGroupHeads); Array.Sort(actualGroupHeads); if (VERBOSE) { Console.WriteLine("Collector: " + allGroupHeadsCollector.GetType().Name); Console.WriteLine("Sort within group: " + sortWithinGroup); Console.WriteLine("Num group: " + numGroups); Console.WriteLine("Num doc: " + numDocs); Console.WriteLine("\n=== Expected: \n"); foreach (int expectedDocId in expectedGroupHeads) { GroupDoc expectedGroupDoc = groupDocs[expectedDocId]; string expectedGroup = expectedGroupDoc.group == null ? null : expectedGroupDoc.group.Utf8ToString(); Console.WriteLine( string.Format(CultureInfo.InvariantCulture, "Group:{0,10} score{1:0.0#######,5} Sort1:{2,10} Sort2:{3,10} Sort3:{4,10} doc:{5,10}", expectedGroup, expectedGroupDoc.score, expectedGroupDoc.sort1.Utf8ToString(), expectedGroupDoc.sort2.Utf8ToString(), expectedGroupDoc.sort3.Utf8ToString(), expectedDocId) ); } Console.WriteLine("\n=== Actual: \n"); foreach (int actualDocId in actualGroupHeads) { GroupDoc actualGroupDoc = groupDocs[actualDocId]; string actualGroup = actualGroupDoc.group == null ? null : actualGroupDoc.group.Utf8ToString(); Console.WriteLine( string.Format(CultureInfo.InvariantCulture, "Group:{0,10} score{1:0.0#######,5} Sort1:{2,10} Sort2:{3,10} Sort3:{4,10} doc:{5,10}", actualGroup, actualGroupDoc.score, actualGroupDoc.sort1.Utf8ToString(), actualGroupDoc.sort2.Utf8ToString(), actualGroupDoc.sort3.Utf8ToString(), actualDocId) ); } Console.WriteLine("\n==================================================================================="); } assertArrayEquals(expectedGroupHeads, actualGroupHeads); } } finally { QueryUtils.PurgeFieldCache(r); } r.Dispose(); dir.Dispose(); } }
internal object sp_KarticaSredstvaPojedinacno(Int32Field osnovnoId) { throw new NotImplementedException(); }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestWriteReadMerge() { // get another codec, other than the default: so we are merging segments across different codecs Codec otherCodec; if ("SimpleText".Equals(Codec.Default.Name, StringComparison.Ordinal)) { otherCodec = new Lucene46Codec(); } else { otherCodec = new SimpleTextCodec(); } Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone()); int docCount = AtLeast(200); var data = new byte[docCount][][]; for (int i = 0; i < docCount; ++i) { int fieldCount = Rarely() ? RandomInts.NextIntBetween(Random(), 1, 500) : RandomInts.NextIntBetween(Random(), 1, 5); data[i] = new byte[fieldCount][]; for (int j = 0; j < fieldCount; ++j) { int length = Rarely() ? Random().Next(1000) : Random().Next(10); int max = Rarely() ? 256 : 2; data[i][j] = RandomByteArray(length, max); } } FieldType type = new FieldType(StringField.TYPE_STORED); type.IsIndexed = false; type.Freeze(); Int32Field id = new Int32Field("id", 0, Field.Store.YES); for (int i = 0; i < data.Length; ++i) { Document doc = new Document(); doc.Add(id); id.SetInt32Value(i); for (int j = 0; j < data[i].Length; ++j) { Field f = new Field("bytes" + j, data[i][j], type); doc.Add(f); } iw.w.AddDocument(doc); if (Random().NextBoolean() && (i % (data.Length / 10) == 0)) { iw.w.Dispose(); // test merging against a non-compressing codec if (iwConf.Codec == otherCodec) { iwConf.SetCodec(Codec.Default); } else { iwConf.SetCodec(otherCodec); } iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone()); } } for (int i = 0; i < 10; ++i) { int min = Random().Next(data.Length); int max = min + Random().Next(20); iw.DeleteDocuments(NumericRangeQuery.NewInt32Range("id", min, max, true, false)); } iw.ForceMerge(2); // force merges with deletions iw.Commit(); DirectoryReader ir = DirectoryReader.Open(dir); Assert.IsTrue(ir.NumDocs > 0); int numDocs = 0; for (int i = 0; i < ir.MaxDoc; ++i) { Document doc = ir.Document(i); if (doc == null) { continue; } ++numDocs; int docId = (int)doc.GetField("id").GetNumericValue(); Assert.AreEqual(data[docId].Length + 1, doc.Fields.Count); for (int j = 0; j < data[docId].Length; ++j) { var arr = data[docId][j]; BytesRef arr2Ref = doc.GetBinaryValue("bytes" + j); var arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length); Assert.AreEqual(arr, arr2); } } Assert.IsTrue(ir.NumDocs <= numDocs); ir.Dispose(); iw.DeleteAll(); iw.Commit(); iw.ForceMerge(1); iw.Dispose(); dir.Dispose(); }
// [Test] // LUCENENET NOTE: For now, we are overriding this test in every subclass to pull it into the right context for the subclass public virtual void TestNumericField() { Directory dir = NewDirectory(); var w = new RandomIndexWriter(Random(), dir, ClassEnvRule.similarity, ClassEnvRule.timeZone); var numDocs = AtLeast(500); var answers = new object[numDocs]; NumericType[] typeAnswers = new NumericType[numDocs]; for (int id = 0; id < numDocs; id++) { Document doc = new Document(); Field nf; Field sf; object answer; NumericType typeAnswer; if (Random().NextBoolean()) { // float/double if (Random().NextBoolean()) { float f = Random().NextFloat(); answer = Convert.ToSingle(f, CultureInfo.InvariantCulture); nf = new SingleField("nf", f, Field.Store.NO); sf = new StoredField("nf", f); typeAnswer = NumericType.SINGLE; } else { double d = Random().NextDouble(); answer = Convert.ToDouble(d, CultureInfo.InvariantCulture); nf = new DoubleField("nf", d, Field.Store.NO); sf = new StoredField("nf", d); typeAnswer = NumericType.DOUBLE; } } else { // int/long if (Random().NextBoolean()) { int i = Random().Next(); answer = Convert.ToInt32(i, CultureInfo.InvariantCulture); nf = new Int32Field("nf", i, Field.Store.NO); sf = new StoredField("nf", i); typeAnswer = NumericType.INT32; } else { long l = Random().NextLong(); answer = Convert.ToInt64(l, CultureInfo.InvariantCulture); nf = new Int64Field("nf", l, Field.Store.NO); sf = new StoredField("nf", l); typeAnswer = NumericType.INT64; } } doc.Add(nf); doc.Add(sf); answers[id] = answer; typeAnswers[id] = typeAnswer; FieldType ft = new FieldType(Int32Field.TYPE_STORED); ft.NumericPrecisionStep = int.MaxValue; doc.Add(new Int32Field("id", id, ft)); w.AddDocument(doc); } DirectoryReader r = w.Reader; w.Dispose(); Assert.AreEqual(numDocs, r.NumDocs); foreach (AtomicReaderContext ctx in r.Leaves) { AtomicReader sub = (AtomicReader)ctx.Reader; FieldCache.Int32s ids = FieldCache.DEFAULT.GetInt32s(sub, "id", false); for (int docID = 0; docID < sub.NumDocs; docID++) { Document doc = sub.Document(docID); Field f = (Field)doc.GetField("nf"); Assert.IsTrue(f is StoredField, "got f=" + f); Assert.AreEqual(answers[ids.Get(docID)], f.GetNumericValue()); } } r.Dispose(); dir.Dispose(); }
private ObjectDataFieldControl CreateControl(long typeId, int member, ObjectInfo objInfo) { ObjectDataFieldControl c = null; Safir.Dob.Typesystem.MemberType memberType; long complexType; int typeSize; bool isArray; int arrLength; string memberName = Safir.Dob.Typesystem.Members.GetInfo(typeId, member, out memberType, out complexType, out typeSize, out isArray, out arrLength); switch (memberType) { case Safir.Dob.Typesystem.MemberType.EnumerationMemberType: { c = new EnumField(objInfo, member, complexType, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.BooleanMemberType: { c = new BoolField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.Int32MemberType: { c = new Int32Field(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.Int64MemberType: { c = new Int64Field(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.ChannelIdMemberType: { c = new ChannelIdField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.HandlerIdMemberType: { c = new HandlerIdField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.EntityIdMemberType: { c = new EntityIdField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.InstanceIdMemberType: { c = new InstanceIdField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.StringMemberType: { c = new StringField(objInfo, member, memberName, arrLength, typeSize); break; } case Safir.Dob.Typesystem.MemberType.TypeIdMemberType: { c = new TypeIdField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.ObjectMemberType: { c = new ObjectField(objInfo, member, complexType, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.BinaryMemberType: { c = new BinaryField(objInfo, member, memberName, arrLength); break; } case Safir.Dob.Typesystem.MemberType.Float32MemberType: case Safir.Dob.Typesystem.MemberType.Ampere32MemberType: case Safir.Dob.Typesystem.MemberType.CubicMeter32MemberType: case Safir.Dob.Typesystem.MemberType.Hertz32MemberType: case Safir.Dob.Typesystem.MemberType.Joule32MemberType: case Safir.Dob.Typesystem.MemberType.Kelvin32MemberType: case Safir.Dob.Typesystem.MemberType.Kilogram32MemberType: case Safir.Dob.Typesystem.MemberType.Meter32MemberType: case Safir.Dob.Typesystem.MemberType.MeterPerSecond32MemberType: case Safir.Dob.Typesystem.MemberType.MeterPerSecondSquared32MemberType: case Safir.Dob.Typesystem.MemberType.Newton32MemberType: case Safir.Dob.Typesystem.MemberType.Pascal32MemberType: case Safir.Dob.Typesystem.MemberType.Radian32MemberType: case Safir.Dob.Typesystem.MemberType.RadianPerSecond32MemberType: case Safir.Dob.Typesystem.MemberType.RadianPerSecondSquared32MemberType: case Safir.Dob.Typesystem.MemberType.Second32MemberType: case Safir.Dob.Typesystem.MemberType.SquareMeter32MemberType: case Safir.Dob.Typesystem.MemberType.Steradian32MemberType: case Safir.Dob.Typesystem.MemberType.Volt32MemberType: case Safir.Dob.Typesystem.MemberType.Watt32MemberType: { c = new Float32Field(objInfo, member, memberName, arrLength, Safir.Dob.Typesystem.Members.GetTypeName(typeId, member)); break; } case Safir.Dob.Typesystem.MemberType.Float64MemberType: case Safir.Dob.Typesystem.MemberType.Ampere64MemberType: case Safir.Dob.Typesystem.MemberType.CubicMeter64MemberType: case Safir.Dob.Typesystem.MemberType.Hertz64MemberType: case Safir.Dob.Typesystem.MemberType.Joule64MemberType: case Safir.Dob.Typesystem.MemberType.Kelvin64MemberType: case Safir.Dob.Typesystem.MemberType.Kilogram64MemberType: case Safir.Dob.Typesystem.MemberType.Meter64MemberType: case Safir.Dob.Typesystem.MemberType.MeterPerSecond64MemberType: case Safir.Dob.Typesystem.MemberType.MeterPerSecondSquared64MemberType: case Safir.Dob.Typesystem.MemberType.Newton64MemberType: case Safir.Dob.Typesystem.MemberType.Pascal64MemberType: case Safir.Dob.Typesystem.MemberType.Radian64MemberType: case Safir.Dob.Typesystem.MemberType.RadianPerSecond64MemberType: case Safir.Dob.Typesystem.MemberType.RadianPerSecondSquared64MemberType: case Safir.Dob.Typesystem.MemberType.Second64MemberType: case Safir.Dob.Typesystem.MemberType.SquareMeter64MemberType: case Safir.Dob.Typesystem.MemberType.Steradian64MemberType: case Safir.Dob.Typesystem.MemberType.Volt64MemberType: case Safir.Dob.Typesystem.MemberType.Watt64MemberType: { c = new Float64Field(objInfo, member, memberName, arrLength, Safir.Dob.Typesystem.Members.GetTypeName(typeId, member)); break; } } if (c != null) { c.ParentObjectEditPanel = this; } return(c); }
// NOTE: this is likely buggy, and cannot clone fields // with tokenStreamValues, etc. Use at your own risk!! // TODO: is there a pre-existing way to do this!!! public static Document CloneDocument(Document doc1) { Document doc2 = new Document(); foreach (IIndexableField f in doc1.Fields) { Field field1 = (Field)f; Field field2; DocValuesType dvType = field1.FieldType.DocValueType; NumericType numType = field1.FieldType.NumericType; if (dvType != DocValuesType.NONE) { switch (dvType) { case DocValuesType.NUMERIC: field2 = new NumericDocValuesField(field1.Name, field1.GetInt64Value().Value); break; case DocValuesType.BINARY: field2 = new BinaryDocValuesField(field1.Name, field1.GetBinaryValue()); break; case DocValuesType.SORTED: field2 = new SortedDocValuesField(field1.Name, field1.GetBinaryValue()); break; default: throw IllegalStateException.Create("unknown Type: " + dvType); } } else if (numType != NumericType.NONE) { switch (numType) { case NumericType.INT32: field2 = new Int32Field(field1.Name, field1.GetInt32Value().Value, field1.FieldType); break; case NumericType.SINGLE: field2 = new SingleField(field1.Name, field1.GetInt32Value().Value, field1.FieldType); break; case NumericType.INT64: field2 = new Int64Field(field1.Name, field1.GetInt32Value().Value, field1.FieldType); break; case NumericType.DOUBLE: field2 = new DoubleField(field1.Name, field1.GetInt32Value().Value, field1.FieldType); break; default: throw IllegalStateException.Create("unknown Type: " + numType); } } else { field2 = new Field(field1.Name, field1.GetStringValue(), field1.FieldType); } doc2.Add(field2); } return(doc2); }
public void GetFieldSettings(List <FlattenedObject> props, Document doc, List <KeyValuePair <string, Analyzer> > analyzers) { foreach (var p in props) { if (p == null) { continue; } if (analyzers != null) { if (p.Analyzer != null) { analyzers.Add(new KeyValuePair <string, Analyzer>(p.Key, p.Analyzer)); } } if (doc != null) { if (p.Value is int || p.Value is int?) { var nf = new Int32Field(p.Key, int.Parse(p.Value.ToString()), p.FieldStoreSetting); doc.Add(nf); } else if (p.Value is long || p.Value is long?) { var nf = new Int64Field(p.Key, long.Parse(p.Value.ToString()), p.FieldStoreSetting); doc.Add(nf); } else if (p.Value is float || p.Value is float?) { var nf = new SingleField(p.Key, float.Parse(p.Value.ToString()), p.FieldStoreSetting); doc.Add(nf); } else if (p.Value is double || p.Value is double?) { var nf = new DoubleField(p.Key, double.Parse(p.Value.ToString()), p.FieldStoreSetting); doc.Add(nf); } else if (p.Spatial) { if (p.Value == null || string.IsNullOrEmpty(p.Value.ToString())) { continue; } var name = p.Key;// p.Key.IndexOf('.')>-1?p.Key.Substring(0,p.Key.LastIndexOf('.')):p.Key; int maxLevels = 11; SpatialPrefixTree grid = new GeohashPrefixTree(ctx, maxLevels); var strat = new RecursivePrefixTreeStrategy(grid, name); //var strat = new PointVectorStrategy(ctx,name); var xyArr = p.Value.ToString().Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); if (xyArr.Length != 2) { continue; } double x; double y; if (!double.TryParse(xyArr[0], out x) || !double.TryParse(xyArr[1], out y)) { continue; } var point = ctx.MakePoint(x, y); //var point = ctx.ReadShape(p.Value.ToString()); var fields = strat.CreateIndexableFields(point); fields.ToList().ForEach(x => doc.Add(x)); IPoint pt = (IPoint)point; //doc.Add(new StoredField(strat.FieldName, pt.X.ToString(CultureInfo.InvariantCulture) + " " + pt.Y.ToString(CultureInfo.InvariantCulture))); } else { string value = p.Value == null ? null : (p.KeepValueCasing ? p.Value.ToString() : p.Value.ToString().ToLower()); Field f = null; if (p.FieldIndexSetting == Field.Index.ANALYZED || p.FieldIndexSetting == Field.Index.ANALYZED_NO_NORMS) { f = new TextField(p.Key, value ?? string.Empty, p.FieldStoreSetting); } else { f = new StringField(p.Key, value ?? string.Empty, p.FieldStoreSetting); } doc.Add(f); } } } }
public override void BeforeClass() { base.BeforeClass(); ANALYZER = new MockAnalyzer(Random()); qp = new StandardQueryParser(ANALYZER); HashMap <String, /*Number*/ object> randomNumberMap = new HashMap <string, object>(); /*SimpleDateFormat*/ string dateFormat; long randomDate; bool dateFormatSanityCheckPass; int count = 0; do { if (count > 100) { fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations."); } dateFormatSanityCheckPass = true; LOCALE = RandomLocale(Random()); TIMEZONE = RandomTimeZone(Random()); DATE_STYLE = randomDateStyle(Random()); TIME_STYLE = randomDateStyle(Random()); //// assumes localized date pattern will have at least year, month, day, //// hour, minute //dateFormat = (SimpleDateFormat)DateFormat.getDateTimeInstance( // DATE_STYLE, TIME_STYLE, LOCALE); //// not all date patterns includes era, full year, timezone and second, //// so we add them here //dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy"); //dateFormat.setTimeZone(TIMEZONE); // assumes localized date pattern will have at least year, month, day, // hour, minute DATE_FORMAT = new NumberDateFormat(DATE_STYLE, TIME_STYLE, LOCALE) { TimeZone = TIMEZONE }; // not all date patterns includes era, full year, timezone and second, // so we add them here DATE_FORMAT.SetDateFormat(DATE_FORMAT.GetDateFormat() + " g s z yyyy"); dateFormat = DATE_FORMAT.GetDateFormat(); do { randomDate = Random().nextLong(); // prune date value so it doesn't pass in insane values to some // calendars. randomDate = randomDate % 3400000000000L; // truncate to second randomDate = (randomDate / 1000L) * 1000L; // only positive values randomDate = Math.Abs(randomDate); } while (randomDate == 0L); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, -randomDate); count++; } while (!dateFormatSanityCheckPass); //NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE); //NUMBER_FORMAT.setMaximumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMaximumIntegerDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumIntegerDigits((Random().nextInt() & 20) + 1); NUMBER_FORMAT = new NumberFormat(LOCALE); double randomDouble; long randomLong; int randomInt; float randomFloat; while ((randomLong = Convert.ToInt64(NormalizeNumber(Math.Abs(Random().nextLong())) )) == 0L) { ; } while ((randomDouble = Convert.ToDouble(NormalizeNumber(Math.Abs(Random().NextDouble())) )) == 0.0) { ; } while ((randomFloat = Convert.ToSingle(NormalizeNumber(Math.Abs(Random().nextFloat())) )) == 0.0f) { ; } while ((randomInt = Convert.ToInt32(NormalizeNumber(Math.Abs(Random().nextInt())))) == 0) { ; } randomNumberMap.Put(NumericType.INT64.ToString(), randomLong); randomNumberMap.Put(NumericType.INT32.ToString(), randomInt); randomNumberMap.Put(NumericType.SINGLE.ToString(), randomFloat); randomNumberMap.Put(NumericType.DOUBLE.ToString(), randomDouble); randomNumberMap.Put(DATE_FIELD_NAME, randomDate); RANDOM_NUMBER_MAP = Collections.UnmodifiableMap(randomNumberMap); directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())) .SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)) .SetMergePolicy(NewLogMergePolicy())); Document doc = new Document(); HashMap <String, NumericConfig> numericConfigMap = new HashMap <String, NumericConfig>(); HashMap <String, Field> numericFieldMap = new HashMap <String, Field>(); qp.NumericConfigMap = (numericConfigMap); foreach (NumericType type in Enum.GetValues(typeof(NumericType))) { if (type == NumericType.NONE) { continue; } numericConfigMap.Put(type.ToString(), new NumericConfig(PRECISION_STEP, NUMBER_FORMAT, type)); FieldType ft2 = new FieldType(Int32Field.TYPE_NOT_STORED); ft2.NumericType = (type); ft2.IsStored = (true); ft2.NumericPrecisionStep = (PRECISION_STEP); ft2.Freeze(); Field field; switch (type) { case NumericType.INT32: field = new Int32Field(type.ToString(), 0, ft2); break; case NumericType.SINGLE: field = new SingleField(type.ToString(), 0.0f, ft2); break; case NumericType.INT64: field = new Int64Field(type.ToString(), 0L, ft2); break; case NumericType.DOUBLE: field = new DoubleField(type.ToString(), 0.0, ft2); break; default: fail(); field = null; break; } numericFieldMap.Put(type.ToString(), field); doc.Add(field); } numericConfigMap.Put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP, DATE_FORMAT, NumericType.INT64)); FieldType ft = new FieldType(Int64Field.TYPE_NOT_STORED); ft.IsStored = (true); ft.NumericPrecisionStep = (PRECISION_STEP); Int64Field dateField = new Int64Field(DATE_FIELD_NAME, 0L, ft); numericFieldMap.Put(DATE_FIELD_NAME, dateField); doc.Add(dateField); foreach (NumberType numberType in Enum.GetValues(typeof(NumberType))) { setFieldValues(numberType, numericFieldMap); if (VERBOSE) { Console.WriteLine("Indexing document: " + doc); } writer.AddDocument(doc); } reader = writer.Reader; searcher = NewSearcher(reader); writer.Dispose(); }
public override void BeforeClass() { base.BeforeClass(); NoDocs = AtLeast(4096); Distance = (1 << 30) / NoDocs; Directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy())); FieldType storedInt = new FieldType(Int32Field.TYPE_NOT_STORED); storedInt.IsStored = true; storedInt.Freeze(); FieldType storedInt8 = new FieldType(storedInt); storedInt8.NumericPrecisionStep = 8; FieldType storedInt4 = new FieldType(storedInt); storedInt4.NumericPrecisionStep = 4; FieldType storedInt2 = new FieldType(storedInt); storedInt2.NumericPrecisionStep = 2; FieldType storedIntNone = new FieldType(storedInt); storedIntNone.NumericPrecisionStep = int.MaxValue; FieldType unstoredInt = Int32Field.TYPE_NOT_STORED; FieldType unstoredInt8 = new FieldType(unstoredInt); unstoredInt8.NumericPrecisionStep = 8; FieldType unstoredInt4 = new FieldType(unstoredInt); unstoredInt4.NumericPrecisionStep = 4; FieldType unstoredInt2 = new FieldType(unstoredInt); unstoredInt2.NumericPrecisionStep = 2; Int32Field field8 = new Int32Field("field8", 0, storedInt8), field4 = new Int32Field("field4", 0, storedInt4), field2 = new Int32Field("field2", 0, storedInt2), fieldNoTrie = new Int32Field("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new Int32Field("ascfield8", 0, unstoredInt8), ascfield4 = new Int32Field("ascfield4", 0, unstoredInt4), ascfield2 = new Int32Field("ascfield2", 0, unstoredInt2); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.Add(field8); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie); // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive doc.Add(ascfield8); doc.Add(ascfield4); doc.Add(ascfield2); // Add a series of noDocs docs with increasing int values for (int l = 0; l < NoDocs; l++) { int val = Distance * l + StartOffset; field8.SetInt32Value(val); field4.SetInt32Value(val); field2.SetInt32Value(val); fieldNoTrie.SetInt32Value(val); val = l - (NoDocs / 2); ascfield8.SetInt32Value(val); ascfield4.SetInt32Value(val); ascfield2.SetInt32Value(val); writer.AddDocument(doc); } Reader = writer.Reader; Searcher = NewSearcher(Reader); writer.Dispose(); }
public virtual void TestNumericField() { using Directory dir = NewDirectory(); DirectoryReader r = null; try { var numDocs = AtLeast(500); var answers = new Number[numDocs]; using (var w = new RandomIndexWriter(Random, dir)) { NumericType[] typeAnswers = new NumericType[numDocs]; for (int id = 0; id < numDocs; id++) { Document doc = new Document(); Field nf; Field sf; Number answer; NumericType typeAnswer; if (Random.NextBoolean()) { // float/double if (Random.NextBoolean()) { float f = Random.NextSingle(); answer = Single.GetInstance(f); nf = new SingleField("nf", f, Field.Store.NO); sf = new StoredField("nf", f); typeAnswer = NumericType.SINGLE; } else { double d = Random.NextDouble(); answer = Double.GetInstance(d); nf = new DoubleField("nf", d, Field.Store.NO); sf = new StoredField("nf", d); typeAnswer = NumericType.DOUBLE; } } else { // int/long if (Random.NextBoolean()) { int i = Random.Next(); answer = Int32.GetInstance(i); nf = new Int32Field("nf", i, Field.Store.NO); sf = new StoredField("nf", i); typeAnswer = NumericType.INT32; } else { long l = Random.NextInt64(); answer = Int64.GetInstance(l); nf = new Int64Field("nf", l, Field.Store.NO); sf = new StoredField("nf", l); typeAnswer = NumericType.INT64; } } doc.Add(nf); doc.Add(sf); answers[id] = answer; typeAnswers[id] = typeAnswer; FieldType ft = new FieldType(Int32Field.TYPE_STORED); ft.NumericPrecisionStep = int.MaxValue; doc.Add(new Int32Field("id", id, ft)); w.AddDocument(doc); } r = w.GetReader(); } // w.Dispose(); Assert.AreEqual(numDocs, r.NumDocs); foreach (AtomicReaderContext ctx in r.Leaves) { AtomicReader sub = ctx.AtomicReader; FieldCache.Int32s ids = FieldCache.DEFAULT.GetInt32s(sub, "id", false); for (int docID = 0; docID < sub.NumDocs; docID++) { Document doc = sub.Document(docID); Field f = doc.GetField <Field>("nf"); Assert.IsTrue(f is StoredField, "got f=" + f); #pragma warning disable 612, 618 Assert.AreEqual(answers[ids.Get(docID)], f.GetNumericValue()); #pragma warning restore 612, 618 } } } finally { r?.Dispose(); } }