internal void  SetField(FieldInfo fieldInfo)
		{
			this.fieldInfo = fieldInfo;
			omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
			storePayloads = fieldInfo.storePayloads;
			posWriter.SetField(fieldInfo);
		}
Пример #2
0
		public DocInverterPerField(DocInverterPerThread perThread, FieldInfo fieldInfo)
		{
			this.perThread = perThread;
			this.fieldInfo = fieldInfo;
			docState = perThread.docState;
			fieldState = perThread.fieldState;
			this.consumer = perThread.consumer.AddField(this, fieldInfo);
			this.endConsumer = perThread.endConsumer.AddField(this, fieldInfo);
		}
		public TermVectorsTermsWriterPerField(TermsHashPerField termsHashPerField, TermVectorsTermsWriterPerThread perThread, FieldInfo fieldInfo)
		{
			this.termsHashPerField = termsHashPerField;
			this.perThread = perThread;
			this.termsWriter = perThread.termsWriter;
			this.fieldInfo = fieldInfo;
			docState = termsHashPerField.docState;
			fieldState = termsHashPerField.fieldState;
		}
Пример #4
0
		public TermsHashPerField(DocInverterPerField docInverterPerField, TermsHashPerThread perThread, TermsHashPerThread nextPerThread, FieldInfo fieldInfo)
		{
			InitBlock();
			this.perThread = perThread;
			intPool = perThread.intPool;
			charPool = perThread.charPool;
			bytePool = perThread.bytePool;
			docState = perThread.docState;
			fieldState = docInverterPerField.fieldState;
			this.consumer = perThread.consumer.AddField(this, fieldInfo);
			streamCount = consumer.GetStreamCount();
			numPostingInt = 2 * streamCount;
			this.fieldInfo = fieldInfo;
			if (nextPerThread != null)
				nextPerField = (TermsHashPerField) nextPerThread.AddField(docInverterPerField, fieldInfo);
			else
				nextPerField = null;
		}
		internal void  SetField(FieldInfo fieldInfo)
		{
			omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
			storePayloads = omitTermFreqAndPositions?false:fieldInfo.storePayloads;
		}
Пример #6
0
		public override DocFieldConsumerPerField AddField(FieldInfo fi)
		{
			return new DocInverterPerField(this, fi);
		}
		/// <summary>Add a new field </summary>
		internal override FormatPostingsTermsConsumer AddField(FieldInfo field)
		{
			termsWriter.SetField(field);
			return termsWriter;
		}
		public DocFieldProcessorPerField(DocFieldProcessorPerThread perThread, FieldInfo fieldInfo)
		{
			this.consumer = perThread.consumer.AddField(fieldInfo);
			this.fieldInfo = fieldInfo;
		}
Пример #9
0
		internal void  WriteField(FieldInfo fi, IFieldable field)
		{
			fieldsStream.WriteVInt(fi.number);
			byte bits = 0;
			if (field.IsTokenized)
				bits |= FieldsWriter.FIELD_IS_TOKENIZED;
			if (field.IsBinary)
				bits |= FieldsWriter.FIELD_IS_BINARY;
			
			fieldsStream.WriteByte(bits);
			
			// compression is disabled for the current field
			if (field.IsBinary)
			{
				byte[] data = field.GetBinaryValue();
				int len = field.BinaryLength;
				int offset = field.BinaryOffset;
					
				fieldsStream.WriteVInt(len);
				fieldsStream.WriteBytes(data, offset, len);
			}
			else
			{
				fieldsStream.WriteString(field.StringValue);
			}
		}
Пример #10
0
		private FieldInfo AddInternal(String name, bool isIndexed, bool storeTermVector, bool storePositionWithTermVector, bool storeOffsetWithTermVector, bool omitNorms, bool storePayloads, bool omitTermFreqAndPositions)
		{
			name = StringHelper.Intern(name);
			var fi = new FieldInfo(name, isIndexed, byNumber.Count, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
			byNumber.Add(fi);
			byName[name] = fi;
			return fi;
		}
		internal void  SetField(FieldInfo fieldInfo)
		{
			this.fieldInfo = fieldInfo;
			docsWriter.SetField(fieldInfo);
		}
Пример #12
0
		// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes)
		// Read just the size -- caller must skip the field content to continue reading fields
		// Return the size in bytes or chars, depending on field type
		private int AddFieldSize(Document.Document doc, FieldInfo fi, bool binary, bool compressed)
		{
			int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size;
			var sizebytes = new byte[4];
			sizebytes[0] = (byte) (Number.URShift(bytesize, 24));
			sizebytes[1] = (byte) (Number.URShift(bytesize, 16));
			sizebytes[2] = (byte) (Number.URShift(bytesize, 8));
			sizebytes[3] = (byte) bytesize;
			doc.Add(new Field(fi.name, sizebytes, Field.Store.YES));
			return size;
		}
Пример #13
0
		private void AddField(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			//we have a binary stored field, and it may be compressed
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				var b = new byte[toRead];
				fieldsStream.ReadBytes(b, 0, b.Length);
				doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES));
			}
			else
			{
				const Field.Store store = Field.Store.YES;
				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
				
				AbstractField f;
				if (compressed)
				{
					int toRead = fieldsStream.ReadVInt();
					
					var b = new byte[toRead];
					fieldsStream.ReadBytes(b, 0, b.Length);
					f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index,
					              termVector) {OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
				}
				else
				{
					f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector)
					    	{OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
				}

				doc.Add(f);
			}
		}
Пример #14
0
		private void  AddFieldLazy(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				long pointer = fieldsStream.FilePointer;
				//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
				doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed));

				//Need to move the pointer ahead by toRead positions
				fieldsStream.Seek(pointer + toRead);
			}
			else
			{
				const Field.Store store = Field.Store.YES;
				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
				
				AbstractField f;
				if (compressed)
				{
					int toRead = fieldsStream.ReadVInt();
					long pointer = fieldsStream.FilePointer;
					f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed);
					//skip over the part that we aren't loading
					fieldsStream.Seek(pointer + toRead);
					f.OmitNorms = fi.omitNorms;
					f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions;
				}
				else
				{
					int length = fieldsStream.ReadVInt();
					long pointer = fieldsStream.FilePointer;
					//Skip ahead of where we are by the length of what is stored
                    if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
                    {
                        fieldsStream.Seek(pointer + length);
                    }
                    else
                    {
                        fieldsStream.SkipChars(length);
                    }
					f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed)
					    	{OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions};
				}

				doc.Add(f);
			}
		}
		internal bool VectorFieldsInOrder(FieldInfo fi)
		{
			try
			{
				if (lastVectorFieldName != null)
					return String.CompareOrdinal(lastVectorFieldName, fi.name) < 0;
				else
					return true;
			}
			finally
			{
				lastVectorFieldName = fi.name;
			}
		}
		public override TermsHashConsumerPerField AddField(TermsHashPerField termsHashPerField, FieldInfo fieldInfo)
		{
			return new TermVectorsTermsWriterPerField(termsHashPerField, this, fieldInfo);
		}