/// <summary> /// Create a Field /// </summary> /// <param name="name"> /// The name. /// </param> /// <param name="value"> /// The value. /// </param> /// <param name="storageType"> /// The storage type. /// </param> /// <param name="indexType"> /// The index type. /// </param> /// <param name="vectorType"> /// The vector type. /// </param> /// <param name="boost"> /// The boost. /// </param> /// <returns> /// Abstract Field /// </returns> protected AbstractField CreateField(string name, string value, LuceneField.Store storageType, LuceneField.Index indexType, LuceneField.TermVector vectorType, float boost) { var field = new LuceneField(name, value, storageType, indexType, vectorType); field.SetBoost(boost); return(field); }
public IndexFieldInfo(string name, string value, FieldInfoType type, Field.Store store, Field.Index index, Field.TermVector termVector) { Name = name; Value = value; Type = type; Store = store; Index = index; TermVector = termVector; }
/// <summary> /// Process Field /// </summary> /// <param name="doc"> /// The doc. /// </param> /// <param name="fieldKey"> /// The field key. /// </param> /// <param name="fieldValue"> /// The field value. /// </param> /// <param name="storage"> /// The storage. /// </param> /// <param name="index"> /// The index. /// </param> /// <param name="vector"> /// The vector. /// </param> /// <param name="boost"> /// The boost. /// </param> protected virtual void ProcessField(Document doc, string fieldKey, string fieldValue, LuceneField.Store storage, LuceneField.Index index, LuceneField.TermVector vector, float boost) { if ((!fieldKey.IsNullOrEmpty() && !fieldValue.IsNullOrEmpty()) && (index != LuceneField.Index.NO || storage != LuceneField.Store.NO)) { doc.Add(CreateField(fieldKey, fieldValue.ToLowerInvariant(), storage, index, vector, boost)); } }
/// <summary> /// Process Field /// </summary> /// <param name="doc"> /// The doc. /// </param> /// <param name="fieldKey"> /// The field key. /// </param> /// <param name="fieldValue"> /// The field value. /// </param> /// <param name="storage"> /// The storage. /// </param> /// <param name="index"> /// The index. /// </param> /// <param name="vector"> /// The vector. /// </param> protected virtual void ProcessField(Document doc, string fieldKey, string fieldValue, LuceneField.Store storage, LuceneField.Index index, LuceneField.TermVector vector) { this.ProcessField(doc, fieldKey, fieldValue, storage, index, vector, 1f); }
protected virtual void ProcessField(Document doc, string fieldKey, string fieldValue, LuceneField.Store storage, LuceneField.Index index) { ProcessField(doc, fieldKey, fieldValue, storage, index, LuceneField.TermVector.NO); }
public /*internal*/ Document Doc(int n) { indexStream.Seek(n * 8L); long position = indexStream.ReadLong(); fieldsStream.Seek(position); Document doc = new Document(); int numFields = fieldsStream.ReadVInt(); for (int i = 0; i < numFields; i++) { int fieldNumber = fieldsStream.ReadVInt(); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); byte bits = fieldsStream.ReadByte(); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; if ((bits & FieldsWriter.FIELD_IS_BINARY) != 0) { byte[] b = new byte[fieldsStream.ReadVInt()]; fieldsStream.ReadBytes(b, 0, b.Length); if (compressed) { doc.Add(new Field(fi.name, Uncompress(b), Field.Store.COMPRESS)); } else { doc.Add(new Field(fi.name, b, Field.Store.YES)); } } else { Field.Index index; Field.Store store = Field.Store.YES; if (fi.isIndexed && tokenize) { index = Field.Index.TOKENIZED; } else if (fi.isIndexed && !tokenize) { index = Field.Index.UN_TOKENIZED; } else { index = Field.Index.NO; } Field.TermVector termVector = null; if (fi.storeTermVector) { if (fi.storeOffsetWithTermVector) { if (fi.storePositionWithTermVector) { termVector = Field.TermVector.WITH_POSITIONS_OFFSETS; } else { termVector = Field.TermVector.WITH_OFFSETS; } } else if (fi.storePositionWithTermVector) { termVector = Field.TermVector.WITH_POSITIONS; } else { termVector = Field.TermVector.YES; } } else { termVector = Field.TermVector.NO; } if (compressed) { store = Field.Store.COMPRESS; byte[] b = new byte[fieldsStream.ReadVInt()]; fieldsStream.ReadBytes(b, 0, b.Length); Field f = new Field(fi.name, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index, termVector); f.SetOmitNorms(fi.omitNorms); doc.Add(f); } else { Field f = new Field(fi.name, fieldsStream.ReadString(), store, index, termVector); f.SetOmitNorms(fi.omitNorms); doc.Add(f); } } } return(doc); }
public void AddField(string name, double value, Field.Store store, bool isIndexed) { fields.Add(new IndexFieldInfo(name, value.ToString(CultureInfo.InvariantCulture), FieldInfoType.DoubleField, store, isIndexed ? Field.Index.ANALYZED_NO_NORMS : Field.Index.NO, Field.TermVector.NO)); }
public void AddField(string name, string value, Field.Store store, Field.Index index, Field.TermVector termVector) { fields.Add(new IndexFieldInfo(name, value, FieldInfoType.StringField, store, index, termVector)); }
public void AddField(string name, string value, Field.Store store, Field.Index index) { AddField(name, value, store, index, Field.TermVector.NO); }
public /*internal*/ Document Doc(int n, string[] fields) { if (fields == null || fields.Length == 0) { return(Doc(n)); } // FIXME: use Hashset ArrayList field_list = new ArrayList(fields); int num_required_fields = field_list.Count; indexStream.Seek(n * 8L); long position = indexStream.ReadLong(); fieldsStream.Seek(position); Document doc = new Document(); int numFields = fieldsStream.ReadVInt(); for (int i = 0; i < numFields && num_required_fields > 0; i++) { int fieldNumber = fieldsStream.ReadVInt(); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); if (field_list.Contains(fi.name)) { num_required_fields--; byte bits = fieldsStream.ReadByte(); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; if ((bits & FieldsWriter.FIELD_IS_BINARY) != 0) { byte[] b = new byte[fieldsStream.ReadVInt()]; fieldsStream.ReadBytes(b, 0, b.Length); if (compressed) { doc.Add(new Field(fi.name, Uncompress(b), Field.Store.COMPRESS)); } else { doc.Add(new Field(fi.name, b, Field.Store.YES)); } } else { Field.Index index; Field.Store store = Field.Store.YES; if (fi.isIndexed && tokenize) { index = Field.Index.TOKENIZED; } else if (fi.isIndexed && !tokenize) { index = Field.Index.UN_TOKENIZED; } else { index = Field.Index.NO; } Field.TermVector termVector = null; if (fi.storeTermVector) { if (fi.storeOffsetWithTermVector) { if (fi.storePositionWithTermVector) { termVector = Field.TermVector.WITH_POSITIONS_OFFSETS; } else { termVector = Field.TermVector.WITH_OFFSETS; } } else if (fi.storePositionWithTermVector) { termVector = Field.TermVector.WITH_POSITIONS; } else { termVector = Field.TermVector.YES; } } else { termVector = Field.TermVector.NO; } if (compressed) { store = Field.Store.COMPRESS; byte[] b = new byte[fieldsStream.ReadVInt()]; fieldsStream.ReadBytes(b, 0, b.Length); Field f = new Field(fi.name, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index, termVector); f.SetOmitNorms(fi.omitNorms); doc.Add(f); } else { Field f = new Field(fi.name, fieldsStream.ReadString(), store, index, termVector); f.SetOmitNorms(fi.omitNorms); doc.Add(f); } } } else { byte bits = fieldsStream.ReadByte(); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; if ((bits & FieldsWriter.FIELD_IS_BINARY) != 0) { //byte[] b = new byte[fieldsStream.ReadVInt()]; //fieldsStream.ReadBytes(b, 0, b.Length); int length = fieldsStream.ReadVInt(); for (int j = 0; j < length; j++) { fieldsStream.ReadByte(); } } else { if (compressed) { //byte[] b = new byte[fieldsStream.ReadVInt()]; //fieldsStream.ReadBytes(b, 0, b.Length); int length = fieldsStream.ReadVInt(); for (int j = 0; j < length; j++) { fieldsStream.ReadByte(); } } else { //fieldsStream.ReadString (); int length = fieldsStream.ReadVInt(); for (int j = 0; j < length; j++) { byte b = fieldsStream.ReadByte(); if ((b & 0x80) == 0) { continue; } else if ((b & 0xE0) != 0xE0) { fieldsStream.ReadByte(); } else { fieldsStream.ReadByte(); fieldsStream.ReadByte(); } } } } } } return(doc); }