// LUCENE-1219 public void TestBinaryFieldOffsetLength() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); var b = new byte[50]; for (int i = 0; i < 50; i++) { b[i] = (byte)(i + 77); } Document doc = new Document(); Field f = new StoredField("binary", b, 10, 17); var bx = f.BinaryValue().Bytes; Assert.IsTrue(bx != null); Assert.AreEqual(50, bx.Length); Assert.AreEqual(10, f.BinaryValue().Offset); Assert.AreEqual(17, f.BinaryValue().Length); doc.Add(f); w.AddDocument(doc); w.Dispose(); IndexReader ir = DirectoryReader.Open(dir); Document doc2 = ir.Document(0); IndexableField f2 = doc2.GetField("binary"); b = f2.BinaryValue().Bytes; Assert.IsTrue(b != null); Assert.AreEqual(17, b.Length, 17); Assert.AreEqual(87, b[0]); ir.Dispose(); dir.Dispose(); }
public static void VerifyEquals(Document d1, Document d2) { List <IndexableField> ff1 = d1.Fields; List <IndexableField> ff2 = d2.Fields; ff1.Sort(fieldNameComparator); ff2.Sort(fieldNameComparator); Assert.AreEqual(ff1.Count, ff2.Count, ff1 + " : " + ff2); for (int i = 0; i < ff1.Count; i++) { IndexableField f1 = ff1[i]; IndexableField f2 = ff2[i]; if (f1.BinaryValue() != null) { Debug.Assert(f2.BinaryValue() != null); } else { string s1 = f1.StringValue; string s2 = f2.StringValue; Assert.AreEqual(s1, s2, ff1 + " : " + ff2); } } }
public override void WriteField(FieldInfo info, IndexableField field) { Debug.Assert(DocStatus == Status.STARTED); @in.WriteField(info, field); Debug.Assert(FieldCount > 0); FieldCount--; }
public override void AddField(int docID, IndexableField field, FieldInfo fieldInfo) { DocValuesType_e?dvType = field.FieldType.DocValueType; if (dvType != null) { fieldInfo.DocValuesType = dvType; if (dvType == DocValuesType_e.BINARY) { AddBinaryField(fieldInfo, docID, field.BinaryValue); } else if (dvType == DocValuesType_e.SORTED) { AddSortedField(fieldInfo, docID, field.BinaryValue); } else if (dvType == DocValuesType_e.SORTED_SET) { AddSortedSetField(fieldInfo, docID, field.BinaryValue); } else if (dvType == DocValuesType_e.NUMERIC) { if (!(field.NumericValue is long?)) { throw new System.ArgumentException("illegal type " + field.NumericValue.GetType() + ": DocValues types must be Long"); } AddNumericField(fieldInfo, docID, (long)field.NumericValue); } else { Debug.Assert(false, "unrecognized DocValues.Type: " + dvType); } } }
public override void AddField(int docID, IndexableField field, FieldInfo fieldInfo) { DocValuesType_e? dvType = field.FieldType().DocValueType; if (dvType != null) { fieldInfo.DocValuesType = dvType; if (dvType == DocValuesType_e.BINARY) { AddBinaryField(fieldInfo, docID, field.BinaryValue()); } else if (dvType == DocValuesType_e.SORTED) { AddSortedField(fieldInfo, docID, field.BinaryValue()); } else if (dvType == DocValuesType_e.SORTED_SET) { AddSortedSetField(fieldInfo, docID, field.BinaryValue()); } else if (dvType == DocValuesType_e.NUMERIC) { if (!(field.NumericValue is long?)) { throw new System.ArgumentException("illegal type " + field.NumericValue.GetType() + ": DocValues types must be Long"); } AddNumericField(fieldInfo, docID, (long)field.NumericValue); } else { Debug.Assert(false, "unrecognized DocValues.Type: " + dvType); } } }
public void BeforeClass() { Directory = NewDirectory(); Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(); RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, analyzer, Similarity, TimeZone); Documents.Document doc = new Documents.Document(); doc.Add(NewTextField("field", "one two three four five", Field.Store.YES)); doc.Add(NewTextField("repeated", "this is a repeated field - first part", Field.Store.YES)); IndexableField repeatedField = NewTextField("repeated", "second part of a repeated field", Field.Store.YES); doc.Add(repeatedField); doc.Add(NewTextField("palindrome", "one two three two one", Field.Store.YES)); writer.AddDocument(doc); doc = new Documents.Document(); doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES)); writer.AddDocument(doc); doc = new Documents.Document(); doc.Add(NewTextField("nonexist", "phrase exist notexist exist found", Field.Store.YES)); writer.AddDocument(doc); Reader = writer.Reader; writer.Dispose(); Searcher = NewSearcher(Reader); }
public virtual BytesRef Next() { while (currentDocId < docCount) { currentDocId++; if (liveDocs != null && !liveDocs.Get(currentDocId)) { continue; } Document doc = outerInstance.reader.Document(currentDocId, relevantFields); BytesRef tempPayload = null; BytesRef tempTerm = null; HashSet <BytesRef> tempContexts = new HashSet <BytesRef>(); if (hasPayloads) { IndexableField payload = doc.GetField(outerInstance.payloadField); if (payload == null || (payload.BinaryValue == null && payload.StringValue == null)) { continue; } tempPayload = payload.BinaryValue ?? new BytesRef(payload.StringValue); } if (hasContexts) { IndexableField[] contextFields = doc.GetFields(outerInstance.contextsField); foreach (IndexableField contextField in contextFields) { if (contextField.BinaryValue == null && contextField.StringValue == null) { continue; } else { tempContexts.Add(contextField.BinaryValue ?? new BytesRef(contextField.StringValue)); } } } IndexableField fieldVal = doc.GetField(outerInstance.field); if (fieldVal == null || (fieldVal.BinaryValue == null && fieldVal.StringValue == null)) { continue; } tempTerm = (fieldVal.StringValue != null) ? new BytesRef(fieldVal.StringValue) : fieldVal.BinaryValue; currentPayload = tempPayload; currentContexts = tempContexts; currentWeight = GetWeight(doc, currentDocId); return(tempTerm); } return(null); }
public void TestReadSkip() { Directory dir = NewDirectory(); IndexWriterConfig iwConf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwConf.SetMaxBufferedDocs(RandomInts.NextIntBetween(Random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, iwConf); FieldType ft = new FieldType(); ft.Stored = true; ft.Freeze(); string @string = TestUtil.RandomSimpleString(Random(), 50); sbyte[] bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8); long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong(); int i = Random().NextBoolean() ? Random().Next(42) : Random().Next(); float f = Random().NextFloat(); double d = Random().NextDouble(); IList <Field> fields = Arrays.AsList(new Field("bytes", bytes, ft), new Field("string", @string, ft), new LongField("long", l, Field.Store.YES), new IntField("int", i, Field.Store.YES), new FloatField("float", f, Field.Store.YES), new DoubleField("double", d, Field.Store.YES) ); for (int k = 0; k < 100; ++k) { Document doc = new Document(); foreach (Field fld in fields) { doc.Add(fld); } iw.w.AddDocument(doc); } iw.Commit(); DirectoryReader reader = DirectoryReader.Open(dir); int docID = Random().Next(100); foreach (Field fld in fields) { string fldName = fld.Name(); Document sDoc = reader.Document(docID, CollectionsHelper.Singleton(fldName)); IndexableField sField = sDoc.GetField(fldName); if (typeof(Field).Equals(fld.GetType())) { Assert.AreEqual(fld.BinaryValue(), sField.BinaryValue()); Assert.AreEqual(fld.StringValue, sField.StringValue); } else { Assert.AreEqual(fld.NumericValue, sField.NumericValue); } } reader.Dispose(); iw.Dispose(); dir.Dispose(); }
internal override void Start(IndexableField f) { TermAtt = FieldState.AttributeSource_Renamed.GetAttribute <ITermToBytesRefAttribute>(); TermBytesRef = TermAtt.BytesRef; Consumer.Start(f); if (NextPerField != null) { NextPerField.Start(f); } }
public void AddField(IndexableField field) { if (FieldCount == Fields.Length) { int newSize = ArrayUtil.Oversize(FieldCount + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); IndexableField[] newArray = new IndexableField[newSize]; Array.Copy(Fields, 0, newArray, 0, FieldCount); Fields = newArray; } Fields[FieldCount++] = field; }
public void TestWithContexts() { Directory dir = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())); iwc.SetMergePolicy(NewLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, iwc); KeyValuePair <List <string>, IDictionary <string, Document> > res = GenerateIndexDocuments(AtLeast(1000), true, true); IDictionary <string, Document> docs = res.Value; List <string> invalidDocTerms = res.Key; foreach (Document doc in docs.Values) { writer.AddDocument(doc); } writer.Commit(); writer.Dispose(); IndexReader ir = DirectoryReader.Open(dir); IDictionary dictionary = new DocumentDictionary(ir, FIELD_NAME, WEIGHT_FIELD_NAME, PAYLOAD_FIELD_NAME, CONTEXT_FIELD_NAME); IInputIterator inputIterator = dictionary.EntryIterator; BytesRef f; while ((f = inputIterator.Next()) != null) { string field = f.Utf8ToString(); Document doc = docs.ContainsKey(field) ? docs[field] : null; docs.Remove(field); //Document doc = docs.remove(f.utf8ToString()); assertTrue(f.equals(new BytesRef(doc.Get(FIELD_NAME)))); IndexableField weightField = doc.GetField(WEIGHT_FIELD_NAME); assertEquals(inputIterator.Weight, (weightField != null) ? Convert.ToInt64(weightField.NumericValue) : 0); assertTrue(inputIterator.Payload.equals(doc.GetField(PAYLOAD_FIELD_NAME).BinaryValue)); ISet <BytesRef> oriCtxs = new HashSet <BytesRef>(); IEnumerable <BytesRef> contextSet = inputIterator.Contexts; foreach (IndexableField ctxf in doc.GetFields(CONTEXT_FIELD_NAME)) { oriCtxs.add(ctxf.BinaryValue); } assertEquals(oriCtxs.size(), contextSet.Count()); } foreach (string invalidTerm in invalidDocTerms) { var invalid = docs[invalidTerm]; docs.Remove(invalidTerm); assertNotNull(invalid); } assertTrue(!docs.Any()); ir.Dispose(); dir.Dispose(); }
internal virtual void RemoveAllValueFields() { IEnumerator <IndexableField> it = Document.Fields.GetEnumerator(); while (it.MoveNext()) { IndexableField field = it.Current; string fieldName = field.name(); if (!fieldName.Equals(FIELD_ENTITY_ID)) { //JAVA TO C# CONVERTER TODO TASK: .NET enumerators are read-only: it.remove(); } } }
/// <summary> /// Returns the value of the <code>weightField</code> for the current document. /// Retrieves the value for the <code>weightField</code> if its stored (using <code>doc</code>) /// or if its indexed as <seealso cref="NumericDocValues"/> (using <code>docId</code>) for the document. /// If no value is found, then the weight is 0. /// </summary> protected internal virtual long GetWeight(Document doc, int docId) { IndexableField weight = doc.GetField(outerInstance.weightField); if (weight != null) // found weight as stored { return((weight.NumericValue != null) ? (long)weight.NumericValue : 0); } // found weight as NumericDocValue else if (weightValues != null) { return(weightValues.Get(docId)); } // fall back else { return(0); } }
internal override void Start(IndexableField f) { if (FieldState.AttributeSource_Renamed.HasAttribute <IPayloadAttribute>()) { PayloadAttribute = FieldState.AttributeSource_Renamed.GetAttribute <IPayloadAttribute>(); } else { PayloadAttribute = null; } if (HasOffsets) { OffsetAttribute = FieldState.AttributeSource_Renamed.AddAttribute <IOffsetAttribute>(); } else { OffsetAttribute = null; } }
public static void CheckNorms(AtomicReader reader) { // test omit norms for (int i = 0; i < DocHelper.Fields.Length; i++) { IndexableField f = DocHelper.Fields[i]; if (f.FieldType().Indexed) { Assert.AreEqual(reader.GetNormValues(f.Name()) != null, !f.FieldType().OmitNorms); Assert.AreEqual(reader.GetNormValues(f.Name()) != null, !DocHelper.NoNorms.ContainsKey(f.Name())); if (reader.GetNormValues(f.Name()) == null) { // test for norms of null NumericDocValues norms = MultiDocValues.GetNormValues(reader, f.Name()); Assert.IsNull(norms); } } } }
/// <summary> /// Returns the value of the <see cref="Weight"/> property for the current document. /// Retrieves the value for the <see cref="Weight"/> property if its stored (using <paramref name="doc"/>) /// or if its indexed as <see cref="NumericDocValues"/> (using <paramref name="docId"/>) for the document. /// If no value is found, then the weight is 0. /// </summary> protected internal virtual long GetWeight(Document doc, int docId) { IndexableField weight = doc.GetField(outerInstance.weightField); if (weight != null) // found weight as stored { // LUCENENET TODO: See if we can make NumericValue into Decimal (which can be converted to any other type of number) // rather than using object. return((weight.NumericValue != null) ? Convert.ToInt64(weight.NumericValue) : 0); } // found weight as NumericDocValue else if (weightValues != null) { return(weightValues.Get(docId)); } // fall back else { return(0); } }
public void FieldIndexHandler_InTreeIndexHandler() { var fieldValue = "/Root/A/B"; var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new InTreeIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoString(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _); var parsed = fieldIndexHandler.Parse(fieldValue.ToString()); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValue); Assert.AreEqual(indexed.First().Type, parsed.Type); Assert.AreEqual(parsed.Type, termValue.Type); // get back is not supported }
public void FieldIndexHandler_NumberIndexHandler() { var fieldValue = Convert.ToDecimal(42L + int.MaxValue); var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new NumberIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoLong(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _); var parsed = fieldIndexHandler.Parse(fieldValue.ToString()); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValue); Assert.AreEqual(indexed.First().Type, parsed.Type); Assert.AreEqual(parsed.Type, termValue.Type); var retrieved = fieldIndexHandler.GetBack(fieldValue.ToString()); Assert.AreEqual(fieldValue, retrieved); }
public void FieldIndexHandler_IntegerIndexHandler() { var fieldValue = 42; var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new IntegerIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoInt(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _); var parsed = fieldIndexHandler.Parse("42"); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValue); Assert.AreEqual(indexed.First().Type, parsed.Type); Assert.AreEqual(parsed.Type, termValue.Type); var retrieved = fieldIndexHandler.GetBack("42"); Assert.AreEqual(fieldValue, retrieved); }
public void FieldIndexHandler_PermissionChoiceIndexHandler() { var fieldValue = new[] { "href", "text", "title", "target" }; var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new PermissionChoiceIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoString(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _); Assert.AreEqual(IndexValueType.StringArray, indexed.First().Type); var parsed = fieldIndexHandler.Parse(fieldValue.ToString()); Assert.AreEqual(IndexValueType.String, parsed.Type); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValue.ToString()); Assert.AreEqual(parsed.Type, termValue.Type); // get back is not supported }
public override void AddField(int docID, IndexableField field, FieldInfo fieldInfo) { if (field.FieldType.Stored) { if (NumStoredFields == StoredFields.Length) { int newSize = ArrayUtil.Oversize(NumStoredFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); IndexableField[] newArray = new IndexableField[newSize]; Array.Copy(StoredFields, 0, newArray, 0, NumStoredFields); StoredFields = newArray; FieldInfo[] newInfoArray = new FieldInfo[newSize]; Array.Copy(FieldInfos, 0, newInfoArray, 0, NumStoredFields); FieldInfos = newInfoArray; } StoredFields[NumStoredFields] = field; FieldInfos[NumStoredFields] = fieldInfo; NumStoredFields++; Debug.Assert(DocState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); } }
public void FieldIndexHandler_RichTextIndexHandler() { var fieldValue = new RichTextFieldValue { Text = "Lorem ipsum", Editor = "{dolor: 'sit amet'}" }; var fieldValueInIndex = "lorem ipsum"; var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new RichTextIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoString(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _).ToArray(); var parsed = fieldIndexHandler.Parse(fieldValueInIndex.ToString()); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValueInIndex); Assert.AreEqual(indexed[0].Type, parsed.Type); Assert.AreEqual(fieldValueInIndex, indexed[0].StringValue); Assert.AreEqual(parsed.Type, termValue.Type); // get back is not supported }
public void FieldIndexHandler_TagIndexHandler() { var fieldValue = "Tag1,Tag2,Tag3"; var fieldValueObject = (object)fieldValue; var snField = new IndexableField(fieldValueObject); var fieldIndexHandler = new TagIndexHandler(); fieldIndexHandler.OwnerIndexingInfo = new TestPerfieldIndexingInfoString(); var indexed = fieldIndexHandler.GetIndexFields(snField, out _); Assert.AreEqual(IndexValueType.StringArray, indexed.First().Type); var parsed = fieldIndexHandler.Parse(fieldValue.ToString()); Assert.AreEqual(IndexValueType.String, parsed.Type); var termValue = fieldIndexHandler.ConvertToTermValue(fieldValue.ToString()); Assert.AreEqual(parsed.Type, termValue.Type); var retrieved = fieldIndexHandler.GetBack("tag1"); Assert.AreEqual("tag1", retrieved); }
public override void ProcessFields(IndexableField[] fields, int count) { FieldState.Reset(); bool doInvert = Consumer.Start(fields, count); for (int i = 0; i < count; i++) { IndexableField field = fields[i]; IndexableFieldType fieldType = field.FieldType(); // TODO FI: this should be "genericized" to querying // consumer if it wants to see this particular field // tokenized. if (fieldType.Indexed && doInvert) { bool analyzed = fieldType.Tokenized && DocState.Analyzer != null; // if the field omits norms, the boost cannot be indexed. if (fieldType.OmitNorms && field.GetBoost() != 1.0f) { throw new System.NotSupportedException("You cannot set an index-time boost: norms are omitted for field '" + field.Name() + "'"); } // only bother checking offsets if something will consume them. // TODO: after we fix analyzers, also check if termVectorOffsets will be indexed. bool checkOffsets = fieldType.IndexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; int lastStartOffset = 0; if (i > 0) { FieldState.Position_Renamed += analyzed ? DocState.Analyzer.GetPositionIncrementGap(fieldInfo.Name) : 0; } /* * To assist people in tracking down problems in analysis components, we wish to write the field name to the infostream * when we fail. We expect some caller to eventually deal with the real exception, so we don't want any 'catch' clauses, * but rather a finally that takes note of the problem. */ bool succeededInProcessingField = false; TokenStream stream = field.GetTokenStream(DocState.Analyzer); // reset the TokenStream to the first token stream.Reset(); try { bool hasMoreTokens = stream.IncrementToken(); FieldState.AttributeSource_Renamed = stream; IOffsetAttribute offsetAttribute = FieldState.AttributeSource_Renamed.AddAttribute<IOffsetAttribute>(); IPositionIncrementAttribute posIncrAttribute = FieldState.AttributeSource_Renamed.AddAttribute<IPositionIncrementAttribute>(); if (hasMoreTokens) { Consumer.Start(field); do { // If we hit an exception in stream.next below // (which is fairly common, eg if analyzer // chokes on a given document), then it's // non-aborting and (above) this one document // will be marked as deleted, but still // consume a docID int posIncr = posIncrAttribute.PositionIncrement; if (posIncr < 0) { throw new System.ArgumentException("position increment must be >=0 (got " + posIncr + ") for field '" + field.Name() + "'"); } if (FieldState.Position_Renamed == 0 && posIncr == 0) { throw new System.ArgumentException("first position increment must be > 0 (got 0) for field '" + field.Name() + "'"); } int position = FieldState.Position_Renamed + posIncr; if (position > 0) { // NOTE: confusing: this "mirrors" the // position++ we do below position--; } else if (position < 0) { throw new System.ArgumentException("position overflow for field '" + field.Name() + "'"); } // position is legal, we can safely place it in fieldState now. // not sure if anything will use fieldState after non-aborting exc... FieldState.Position_Renamed = position; if (posIncr == 0) { FieldState.NumOverlap_Renamed++; } if (checkOffsets) { int startOffset = FieldState.Offset_Renamed + offsetAttribute.StartOffset(); int endOffset = FieldState.Offset_Renamed + offsetAttribute.EndOffset(); if (startOffset < 0 || endOffset < startOffset) { throw new System.ArgumentException("startOffset must be non-negative, and endOffset must be >= startOffset, " + "startOffset=" + startOffset + ",endOffset=" + endOffset + " for field '" + field.Name() + "'"); } if (startOffset < lastStartOffset) { throw new System.ArgumentException("offsets must not go backwards startOffset=" + startOffset + " is < lastStartOffset=" + lastStartOffset + " for field '" + field.Name() + "'"); } lastStartOffset = startOffset; } bool success = false; try { // If we hit an exception in here, we abort // all buffered documents since the last // flush, on the likelihood that the // internal state of the consumer is now // corrupt and should not be flushed to a // new segment: Consumer.Add(); success = true; } finally { if (!success) { DocState.DocWriter.SetAborting(); } } FieldState.Length_Renamed++; FieldState.Position_Renamed++; } while (stream.IncrementToken()); } // trigger streams to perform end-of-stream operations stream.End(); // TODO: maybe add some safety? then again, its already checked // when we come back around to the field... FieldState.Position_Renamed += posIncrAttribute.PositionIncrement; FieldState.Offset_Renamed += offsetAttribute.EndOffset(); if (DocState.MaxTermPrefix != null) { string msg = "Document contains at least one immense term in field=\"" + fieldInfo.Name + "\" (whose UTF8 encoding is longer than the max length " + DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8 + "), all of which were skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense term is: '" + DocState.MaxTermPrefix + "...'"; if (DocState.InfoStream.IsEnabled("IW")) { DocState.InfoStream.Message("IW", "ERROR: " + msg); } DocState.MaxTermPrefix = null; throw new System.ArgumentException(msg); } /* if success was false above there is an exception coming through and we won't get here.*/ succeededInProcessingField = true; } finally { if (!succeededInProcessingField) { IOUtils.CloseWhileHandlingException(stream); } else { stream.Dispose(); } if (!succeededInProcessingField && DocState.InfoStream.IsEnabled("DW")) { DocState.InfoStream.Message("DW", "An exception was thrown while processing field " + fieldInfo.Name); } } FieldState.Offset_Renamed += analyzed ? DocState.Analyzer.GetOffsetGap(fieldInfo.Name) : 0; FieldState.Boost_Renamed *= field.GetBoost(); } // LUCENE-2387: don't hang onto the field, so GC can // reclaim fields[i] = null; } Consumer.Finish(); EndConsumer.Finish(); }
static DocHelper() { CustomType = new FieldType(TextField.TYPE_STORED); TextField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, CustomType); CustomType2 = new FieldType(TextField.TYPE_STORED); CustomType2.StoreTermVectors = true; CustomType2.StoreTermVectorPositions = true; CustomType2.StoreTermVectorOffsets = true; TextField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, CustomType2); CustomType3 = new FieldType(TextField.TYPE_STORED); CustomType3.OmitNorms = true; TextField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, CustomType3); KeyField = new StringField(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES); CustomType5 = new FieldType(TextField.TYPE_STORED); CustomType5.OmitNorms = true; CustomType5.Tokenized = false; NoNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, CustomType5); CustomType6 = new FieldType(TextField.TYPE_STORED); CustomType6.IndexOptions = FieldInfo.IndexOptions.DOCS_ONLY; NoTFField = new Field(NO_TF_KEY, NO_TF_TEXT, CustomType6); CustomType7 = new FieldType(); CustomType7.Stored = true; UnIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, CustomType7); CustomType8 = new FieldType(TextField.TYPE_NOT_STORED); CustomType8.StoreTermVectors = true; UnStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, CustomType8); UnStoredField1 = new TextField(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO); LazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, CustomType); TextUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, CustomType); TextUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, CustomType2); Fields = new Field[] { TextField1, TextField2, TextField3, KeyField, NoNormsField, NoTFField, UnIndField, UnStoredField1, UnStoredField2, TextUtfField1, TextUtfField2, LazyField, LazyFieldBinary, LargeLazyField }; //Initialize the large Lazy Field StringBuilder buffer = new StringBuilder(); for (int i = 0; i < 10000; i++) { buffer.Append("Lazily loading lengths of language in lieu of laughing "); } try { LAZY_FIELD_BINARY_BYTES = "These are some binary field bytes".GetBytes(IOUtils.CHARSET_UTF_8); } catch (EncoderFallbackException e) { } LazyFieldBinary = new StoredField(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES); Fields[Fields.Length - 2] = LazyFieldBinary; LARGE_LAZY_FIELD_TEXT = buffer.ToString(); LargeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, CustomType); Fields[Fields.Length - 1] = LargeLazyField; for (int i = 0; i < Fields.Length; i++) { IndexableField f = Fields[i]; Add(All, f); if (f.FieldType().Indexed) { Add(Indexed, f); } else { Add(Unindexed, f); } if (f.FieldType().StoreTermVectors) { Add(Termvector, f); } if (f.FieldType().Indexed&& !f.FieldType().StoreTermVectors) { Add(Notermvector, f); } if (f.FieldType().Stored) { Add(Stored, f); } else { Add(Unstored, f); } if (f.FieldType().IndexOptions == FieldInfo.IndexOptions.DOCS_ONLY) { Add(NoTf, f); } if (f.FieldType().OmitNorms) { Add(NoNorms, f); } if (f.FieldType().IndexOptions == FieldInfo.IndexOptions.DOCS_ONLY) { Add(NoTf, f); } //if (f.isLazy()) add(lazy, f); } NameValues = new Dictionary <string, object>(); NameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT; NameValues[TEXT_FIELD_2_KEY] = FIELD_2_TEXT; NameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT; NameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT; NameValues[NO_NORMS_KEY] = NO_NORMS_TEXT; NameValues[NO_TF_KEY] = NO_TF_TEXT; NameValues[UNINDEXED_FIELD_KEY] = UNINDEXED_FIELD_TEXT; NameValues[UNSTORED_FIELD_1_KEY] = UNSTORED_1_FIELD_TEXT; NameValues[UNSTORED_FIELD_2_KEY] = UNSTORED_2_FIELD_TEXT; NameValues[LAZY_FIELD_KEY] = LAZY_FIELD_TEXT; NameValues[LAZY_FIELD_BINARY_KEY] = LAZY_FIELD_BINARY_BYTES; NameValues[LARGE_LAZY_FIELD_KEY] = LARGE_LAZY_FIELD_TEXT; NameValues[TEXT_FIELD_UTF1_KEY] = FIELD_UTF1_TEXT; NameValues[TEXT_FIELD_UTF2_KEY] = FIELD_UTF2_TEXT; }
public override void WriteField(FieldInfo info, IndexableField field) { FieldsStream.WriteVInt(info.Number); int bits = 0; BytesRef bytes; string @string; // TODO: maybe a field should serialize itself? // this way we don't bake into indexer all these // specific encodings for different fields? and apps // can customize... object number = (object)field.NumericValue; if (number != null) { if (number is sbyte || number is short || number is int) { bits |= FIELD_IS_NUMERIC_INT; } else if (number is long) { bits |= FIELD_IS_NUMERIC_LONG; } else if (number is float) { bits |= FIELD_IS_NUMERIC_FLOAT; } else if (number is double) { bits |= FIELD_IS_NUMERIC_DOUBLE; } else { throw new System.ArgumentException("cannot store numeric type " + number.GetType()); } @string = null; bytes = null; } else { bytes = field.BinaryValue(); if (bytes != null) { bits |= FIELD_IS_BINARY; @string = null; } else { @string = field.StringValue; if (@string == null) { throw new System.ArgumentException("field " + field.Name() + " is stored but does not have binaryValue, stringValue nor numericValue"); } } } FieldsStream.WriteByte((byte)(sbyte)bits); if (bytes != null) { FieldsStream.WriteVInt(bytes.Length); FieldsStream.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length); } else if (@string != null) { FieldsStream.WriteString(field.StringValue); } else { if (number is sbyte || number is short || number is int) { FieldsStream.WriteInt((int)number); } else if (number is long) { FieldsStream.WriteLong((long)number); } else if (number is float) { FieldsStream.WriteInt(Number.FloatToIntBits((float)number)); } else if (number is double) { FieldsStream.WriteLong(BitConverter.DoubleToInt64Bits((double)number)); } else { throw new InvalidOperationException("Cannot get here"); } } }
// Called once per field, and is given all IndexableField // occurrences for this field in the document. Return // true if you wish to see inverted tokens for these // fields: internal abstract bool Start(IndexableField[] fields, int count);
protected internal override bool IsStoredField(IndexableField field) { return(!field.name().EndsWith(CustomType.EXACT_FIELD_SUFFIX) && base.IsStoredField(field)); }
public override void WriteField(FieldInfo info, IndexableField field) { int bits = 0; BytesRef bytes; string @string; object number = (object)field.NumericValue; if (number != null) { if (number is string) { string numStr = number.ToString(); sbyte dummySbyte; short dummyShort; int dummyInt; long dummyLong; float dummyFloat; double dummyDouble; if (sbyte.TryParse(numStr, out dummySbyte) || short.TryParse(numStr, out dummyShort) || int.TryParse(numStr, out dummyInt)) { bits = NUMERIC_INT; } else if (long.TryParse(numStr, out dummyLong)) { bits = NUMERIC_LONG; } else if (float.TryParse(numStr, out dummyFloat)) { bits = NUMERIC_FLOAT; } else if (double.TryParse(numStr, out dummyDouble)) { bits = NUMERIC_DOUBLE; } else { throw new System.ArgumentException("cannot store numeric type " + number.GetType()); } } else { if (number is sbyte || number is short || number is int) { bits = NUMERIC_INT; } else if (number is long) { bits = NUMERIC_LONG; } else if (number is float) { bits = NUMERIC_FLOAT; } else if (number is double) { bits = NUMERIC_DOUBLE; } else { throw new System.ArgumentException("cannot store numeric type " + number.GetType()); } } @string = null; bytes = null; } else { bytes = field.BinaryValue(); if (bytes != null) { bits = BYTE_ARR; @string = null; } else { bits = STRING; @string = field.StringValue; if (@string == null) { throw new System.ArgumentException("field " + field.Name() + " is stored but does not have binaryValue, stringValue nor numericValue"); } } } long infoAndBits = (((long)info.Number) << TYPE_BITS) | bits; BufferedDocs.WriteVLong(infoAndBits); if (bytes != null) { BufferedDocs.WriteVInt(bytes.Length); BufferedDocs.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length); } else if (@string != null) { BufferedDocs.WriteString(field.StringValue); } else { if (number is string) { string numStr = number.ToString(); sbyte dummySbyte; short dummyShort; int dummyInt; long dummyLong; float dummyFloat; double dummyDouble; if (sbyte.TryParse(numStr, out dummySbyte) || short.TryParse(numStr, out dummyShort) || int.TryParse(numStr, out dummyInt)) { bits = NUMERIC_INT; } else if (long.TryParse(numStr, out dummyLong)) { bits = NUMERIC_LONG; } else if (float.TryParse(numStr, out dummyFloat)) { bits = NUMERIC_FLOAT; } else if (double.TryParse(numStr, out dummyDouble)) { bits = NUMERIC_DOUBLE; } else { throw new System.ArgumentException("cannot store numeric type " + number.GetType()); } } else { if (number is sbyte || number is short || number is int) { BufferedDocs.WriteInt((int)number); } else if (number is long) { BufferedDocs.WriteLong((long)number); } else if (number is float) { BufferedDocs.WriteInt(Number.FloatToIntBits((float)number)); } else if (number is double) { BufferedDocs.WriteLong(BitConverter.DoubleToInt64Bits((double)number)); } else { throw new Exception("Cannot get here"); } } } }
/// <summary> /// checks that two stored fields are equivalent /// </summary> public void AssertStoredFieldEquals(string info, IndexableField leftField, IndexableField rightField) { Assert.AreEqual(leftField.Name(), rightField.Name(), info); Assert.AreEqual(leftField.BinaryValue(), rightField.BinaryValue(), info); Assert.AreEqual(leftField.StringValue, rightField.StringValue, info); Assert.AreEqual(leftField.NumericValue, rightField.NumericValue, info); // TODO: should we check the FT at all? }
public abstract void AddField(int docID, IndexableField field, FieldInfo fieldInfo);
// Called before a field instance is being processed internal abstract void Start(IndexableField field);
public override void WriteField(FieldInfo info, IndexableField field) { FieldsStream.WriteVInt(info.Number); int bits = 0; BytesRef bytes; string @string; // TODO: maybe a field should serialize itself? // this way we don't bake into indexer all these // specific encodings for different fields? and apps // can customize... object number = field.NumericValue; if (number != null) { if (number is sbyte? || number is short? || number is int?) { bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_INT; } else if (number is long?) { bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_LONG; } else if (number is float?) { bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_FLOAT; } else if (number is double?) { bits |= Lucene3xStoredFieldsReader.FIELD_IS_NUMERIC_DOUBLE; } else { throw new System.ArgumentException("cannot store numeric type " + number.GetType()); } @string = null; bytes = null; } else { bytes = field.BinaryValue; if (bytes != null) { bits |= Lucene3xStoredFieldsReader.FIELD_IS_BINARY; @string = null; } else { @string = field.StringValue; if (@string == null) { throw new System.ArgumentException("field " + field.Name + " is stored but does not have binaryValue, stringValue nor numericValue"); } } } FieldsStream.WriteByte((byte)(sbyte)bits); if (bytes != null) { FieldsStream.WriteVInt(bytes.Length); FieldsStream.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length); } else if (@string != null) { FieldsStream.WriteString(field.StringValue); } else { if (number is sbyte? || number is short? || number is int?) { FieldsStream.WriteInt((int)number); } else if (number is long?) { FieldsStream.WriteLong((long)number); } else if (number is float?) { FieldsStream.WriteInt(Number.FloatToIntBits((float)number)); } else if (number is double?) { FieldsStream.WriteLong(BitConverter.DoubleToInt64Bits((double)number)); } else { Debug.Assert(false); } } }
public override void AddField(int docID, IndexableField field, FieldInfo fieldInfo) { First.AddField(docID, field, fieldInfo); Second.AddField(docID, field, fieldInfo); }
protected internal virtual bool IsStoredField(IndexableField field) { return(isValidKey(field.name()) && field.fieldType().stored() && !FullTxData.TX_STATE_KEY.Equals(field.name())); }
public override void AddField(int docID, IndexableField field, FieldInfo fieldInfo) { if (field.FieldType().Stored) { if (NumStoredFields == StoredFields.Length) { int newSize = ArrayUtil.Oversize(NumStoredFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); IndexableField[] newArray = new IndexableField[newSize]; Array.Copy(StoredFields, 0, newArray, 0, NumStoredFields); StoredFields = newArray; FieldInfo[] newInfoArray = new FieldInfo[newSize]; Array.Copy(FieldInfos, 0, newInfoArray, 0, NumStoredFields); FieldInfos = newInfoArray; } StoredFields[NumStoredFields] = field; FieldInfos[NumStoredFields] = fieldInfo; NumStoredFields++; Debug.Assert(DocState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField")); } }
private object GetFieldValue(IndexableField field) { Number numericFieldValue = field.numericValue(); return(numericFieldValue != null ? numericFieldValue : field.stringValue()); }
/// <summary> /// Processes all occurrences of a single field </summary> public abstract void ProcessFields(IndexableField[] fields, int count);
internal override void Start(IndexableField f) { if (DoVectorOffsets) { OffsetAttribute = FieldState.AttributeSource_Renamed.AddAttribute<IOffsetAttribute>(); } else { OffsetAttribute = null; } if (DoVectorPayloads && FieldState.AttributeSource_Renamed.HasAttribute<IPayloadAttribute>()) { PayloadAttribute = FieldState.AttributeSource_Renamed.GetAttribute<IPayloadAttribute>(); } else { PayloadAttribute = null; } }
internal override bool Start(IndexableField[] fields, int count) { DoVectors = false; DoVectorPositions = false; DoVectorOffsets = false; DoVectorPayloads = false; HasPayloads = false; for (int i = 0; i < count; i++) { IndexableField field = fields[i]; if (field.FieldType.Indexed) { if (field.FieldType.StoreTermVectors) { DoVectors = true; DoVectorPositions |= field.FieldType.StoreTermVectorPositions; DoVectorOffsets |= field.FieldType.StoreTermVectorOffsets; if (DoVectorPositions) { DoVectorPayloads |= field.FieldType.StoreTermVectorPayloads; } else if (field.FieldType.StoreTermVectorPayloads) { // TODO: move this check somewhere else, and impl the other missing ones throw new System.ArgumentException("cannot index term vector payloads without term vector positions (field=\"" + field.Name + "\")"); } } else { if (field.FieldType.StoreTermVectorOffsets) { throw new System.ArgumentException("cannot index term vector offsets when term vectors are not indexed (field=\"" + field.Name + "\")"); } if (field.FieldType.StoreTermVectorPositions) { throw new System.ArgumentException("cannot index term vector positions when term vectors are not indexed (field=\"" + field.Name + "\")"); } if (field.FieldType.StoreTermVectorPayloads) { throw new System.ArgumentException("cannot index term vector payloads when term vectors are not indexed (field=\"" + field.Name + "\")"); } } } else { if (field.FieldType.StoreTermVectors) { throw new System.ArgumentException("cannot index term vectors when field is not indexed (field=\"" + field.Name + "\")"); } if (field.FieldType.StoreTermVectorOffsets) { throw new System.ArgumentException("cannot index term vector offsets when field is not indexed (field=\"" + field.Name + "\")"); } if (field.FieldType.StoreTermVectorPositions) { throw new System.ArgumentException("cannot index term vector positions when field is not indexed (field=\"" + field.Name + "\")"); } if (field.FieldType.StoreTermVectorPayloads) { throw new System.ArgumentException("cannot index term vector payloads when field is not indexed (field=\"" + field.Name + "\")"); } } } if (DoVectors) { TermsWriter.HasVectors = true; if (TermsHashPerField.BytesHash.Size() != 0) { // Only necessary if previous doc hit a // non-aborting exception while writing vectors in // this field: TermsHashPerField.Reset(); } } // TODO: only if needed for performance //perThread.postingsCount = 0; return DoVectors; }
private static void Add(IDictionary <string, IndexableField> map, IndexableField field) { map[field.Name()] = field; }
internal override void Start(IndexableField f) { TermAtt = FieldState.AttributeSource_Renamed.GetAttribute<ITermToBytesRefAttribute>(); TermBytesRef = TermAtt.BytesRef; Consumer.Start(f); if (NextPerField != null) { NextPerField.Start(f); } }
private static void Add(IDictionary<string, IndexableField> map, IndexableField field) { map[field.Name()] = field; }
public virtual void SearchIndex(Directory dir, string oldName) { //QueryParser parser = new QueryParser("contents", new MockAnalyzer(random)); //Query query = parser.parse("handle:1"); IndexReader reader = DirectoryReader.Open(dir); IndexSearcher searcher = NewSearcher(reader); TestUtil.CheckIndex(dir); // true if this is a 4.0+ index bool is40Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("content5") != null; // true if this is a 4.2+ index bool is42Index = MultiFields.GetMergedFieldInfos(reader).FieldInfo("dvSortedSet") != null; Debug.Assert(is40Index); // NOTE: currently we can only do this on trunk! Bits liveDocs = MultiFields.GetLiveDocs(reader); for (int i = 0; i < 35; i++) { if (liveDocs.Get(i)) { Document d = reader.Document(i); IList <IndexableField> fields = d.Fields; bool isProxDoc = d.GetField("content3") == null; if (isProxDoc) { int numFields = is40Index ? 7 : 5; Assert.AreEqual(numFields, fields.Count); IndexableField f = d.GetField("id"); Assert.AreEqual("" + i, f.StringValue); f = d.GetField("utf8"); Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue); f = d.GetField("autf8"); Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue); f = d.GetField("content2"); Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue); f = d.GetField("fie\u2C77ld"); Assert.AreEqual("field with non-ascii name", f.StringValue); } Fields tfvFields = reader.GetTermVectors(i); Assert.IsNotNull(tfvFields, "i=" + i); Terms tfv = tfvFields.Terms("utf8"); Assert.IsNotNull(tfv, "docID=" + i + " index=" + oldName); } else { // Only ID 7 is deleted Assert.AreEqual(7, i); } } if (is40Index) { // check docvalues fields NumericDocValues dvByte = MultiDocValues.GetNumericValues(reader, "dvByte"); BinaryDocValues dvBytesDerefFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefFixed"); BinaryDocValues dvBytesDerefVar = MultiDocValues.GetBinaryValues(reader, "dvBytesDerefVar"); SortedDocValues dvBytesSortedFixed = MultiDocValues.GetSortedValues(reader, "dvBytesSortedFixed"); SortedDocValues dvBytesSortedVar = MultiDocValues.GetSortedValues(reader, "dvBytesSortedVar"); BinaryDocValues dvBytesStraightFixed = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightFixed"); BinaryDocValues dvBytesStraightVar = MultiDocValues.GetBinaryValues(reader, "dvBytesStraightVar"); NumericDocValues dvDouble = MultiDocValues.GetNumericValues(reader, "dvDouble"); NumericDocValues dvFloat = MultiDocValues.GetNumericValues(reader, "dvFloat"); NumericDocValues dvInt = MultiDocValues.GetNumericValues(reader, "dvInt"); NumericDocValues dvLong = MultiDocValues.GetNumericValues(reader, "dvLong"); NumericDocValues dvPacked = MultiDocValues.GetNumericValues(reader, "dvPacked"); NumericDocValues dvShort = MultiDocValues.GetNumericValues(reader, "dvShort"); SortedSetDocValues dvSortedSet = null; if (is42Index) { dvSortedSet = MultiDocValues.GetSortedSetValues(reader, "dvSortedSet"); } for (int i = 0; i < 35; i++) { int id = Convert.ToInt32(reader.Document(i).Get("id")); Assert.AreEqual(id, dvByte.Get(i)); sbyte[] bytes = new sbyte[] { (sbyte)((int)((uint)id >> 24)), (sbyte)((int)((uint)id >> 16)), (sbyte)((int)((uint)id >> 8)), (sbyte)id }; BytesRef expectedRef = new BytesRef((byte[])(Array)bytes); BytesRef scratch = new BytesRef(); dvBytesDerefFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesDerefVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesSortedFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesSortedVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesStraightFixed.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); dvBytesStraightVar.Get(i, scratch); Assert.AreEqual(expectedRef, scratch); Assert.AreEqual((double)id, BitConverter.Int64BitsToDouble(dvDouble.Get(i)), 0D); Assert.AreEqual((float)id, Number.IntBitsToFloat((int)dvFloat.Get(i)), 0F); Assert.AreEqual(id, dvInt.Get(i)); Assert.AreEqual(id, dvLong.Get(i)); Assert.AreEqual(id, dvPacked.Get(i)); Assert.AreEqual(id, dvShort.Get(i)); if (is42Index) { dvSortedSet.Document = i; long ord = dvSortedSet.NextOrd(); Assert.AreEqual(SortedSetDocValues.NO_MORE_ORDS, dvSortedSet.NextOrd()); dvSortedSet.LookupOrd(ord, scratch); Assert.AreEqual(expectedRef, scratch); } } } ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs; // First document should be #0 Document doc = searcher.IndexReader.Document(hits[0].Doc); assertEquals("didn't get the right document first", "0", doc.Get("id")); DoTestHits(hits, 34, searcher.IndexReader); if (is40Index) { hits = searcher.Search(new TermQuery(new Term("content5", "aaa")), null, 1000).ScoreDocs; DoTestHits(hits, 34, searcher.IndexReader); hits = searcher.Search(new TermQuery(new Term("content6", "aaa")), null, 1000).ScoreDocs; DoTestHits(hits, 34, searcher.IndexReader); } hits = searcher.Search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); hits = searcher.Search(new TermQuery(new Term("utf8", "lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); hits = searcher.Search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).ScoreDocs; Assert.AreEqual(34, hits.Length); reader.Dispose(); }
internal override bool Start(IndexableField[] fields, int count) { DoCall = Consumer.Start(fields, count); BytesHash.Reinit(); if (NextPerField != null) { DoNextCall = NextPerField.Start(fields, count); } return DoCall || DoNextCall; }