示例#1
0
		public NormsWriterPerField(DocInverterPerField docInverterPerField, NormsWriterPerThread perThread, FieldInfo fieldInfo)
		{
			this.perThread = perThread;
			this.fieldInfo = fieldInfo;
			docState = perThread.docState;
			fieldState = docInverterPerField.fieldState;
		}
		internal void  SetField(FieldInfo fieldInfo)
		{
			this.fieldInfo = fieldInfo;
			omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
			storePayloads = fieldInfo.storePayloads;
			posWriter.SetField(fieldInfo);
		}
		public FreqProxTermsWriterPerField(TermsHashPerField termsHashPerField, FreqProxTermsWriterPerThread perThread, FieldInfo fieldInfo)
		{
			this.termsHashPerField = termsHashPerField;
			this.perThread = perThread;
			this.fieldInfo = fieldInfo;
			docState = termsHashPerField.docState;
			fieldState = termsHashPerField.fieldState;
			omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
		}
		public TermVectorsTermsWriterPerField(TermsHashPerField termsHashPerField, TermVectorsTermsWriterPerThread perThread, FieldInfo fieldInfo)
		{
			this.termsHashPerField = termsHashPerField;
			this.perThread = perThread;
			this.termsWriter = perThread.termsWriter;
			this.fieldInfo = fieldInfo;
			docState = termsHashPerField.docState;
			fieldState = termsHashPerField.fieldState;
		}
示例#5
0
		public DocInverterPerField(DocInverterPerThread perThread, FieldInfo fieldInfo)
		{
			this.perThread = perThread;
			this.fieldInfo = fieldInfo;
			docState = perThread.docState;
			fieldState = perThread.fieldState;
			this.consumer = perThread.consumer.AddField(this, fieldInfo);
			this.endConsumer = perThread.endConsumer.AddField(this, fieldInfo);
		}
		public void  AddField(Fieldable field, FieldInfo fieldInfo)
		{
			if (doc == null)
			{
				doc = storedFieldsWriter.GetPerDoc();
				doc.docID = docState.docID;
				localFieldsWriter.SetFieldsStream(doc.fdt);
				System.Diagnostics.Debug.Assert(doc.numStoredFields == 0, "doc.numStoredFields=" + doc.numStoredFields);
				System.Diagnostics.Debug.Assert(0 == doc.fdt.Length());
				System.Diagnostics.Debug.Assert(0 == doc.fdt.GetFilePointer());
			}
			
			localFieldsWriter.WriteField(fieldInfo, field);
			System.Diagnostics.Debug.Assert(docState.TestPoint("StoredFieldsWriterPerThread.processFields.writeField"));
			doc.numStoredFields++;
		}
示例#7
0
		public TermsHashPerField(DocInverterPerField docInverterPerField, TermsHashPerThread perThread, TermsHashPerThread nextPerThread, FieldInfo fieldInfo)
		{
			InitBlock();
			this.perThread = perThread;
			intPool = perThread.intPool;
			charPool = perThread.charPool;
			bytePool = perThread.bytePool;
			docState = perThread.docState;
			fieldState = docInverterPerField.fieldState;
			this.consumer = perThread.consumer.AddField(this, fieldInfo);
			streamCount = consumer.GetStreamCount();
			numPostingInt = 2 * streamCount;
			this.fieldInfo = fieldInfo;
			if (nextPerThread != null)
				nextPerField = (TermsHashPerField) nextPerThread.AddField(docInverterPerField, fieldInfo);
			else
				nextPerField = null;
		}
示例#8
0
		// in merge mode we don't uncompress the data of a compressed field
		private void  AddFieldForMerge(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			System.Object data;
			
			if (binary || compressed)
			{
				int toRead = fieldsStream.ReadVInt();
				byte[] b = new byte[toRead];
				fieldsStream.ReadBytes(b, 0, b.Length);
				data = b;
			}
			else
			{
				data = fieldsStream.ReadString();
			}
			
			doc.Add(new FieldForMerge(data, fi, binary, compressed, tokenize));
		}
		public abstract DocFieldConsumerPerField AddField(FieldInfo fi);
		/// <summary>Add a new field </summary>
		internal override FormatPostingsTermsConsumer AddField(FieldInfo field)
		{
			termsWriter.SetField(field);
			return termsWriter;
		}
		internal abstract InvertedDocConsumerPerField AddField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo);
示例#12
0
		internal override InvertedDocConsumerPerField AddField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo)
		{
			return new TermsHashPerField(docInverterPerField, this, nextPerThread, fieldInfo);
		}
示例#13
0
		internal override InvertedDocEndConsumerPerField AddField(DocInverterPerField docInverterPerField, FieldInfo fieldInfo)
		{
			return new NormsWriterPerField(docInverterPerField, this, fieldInfo);
		}
		public override TermsHashConsumerPerField AddField(TermsHashPerField termsHashPerField, FieldInfo fieldInfo)
		{
			return new FreqProxTermsWriterPerField(termsHashPerField, this, fieldInfo);
		}
		public override DocFieldConsumerPerField AddField(FieldInfo fi)
		{
			return new DocFieldConsumersPerField(this, one.AddField(fi), two.AddField(fi));
		}
示例#16
0
		private void  AddFieldLazy(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				long pointer = fieldsStream.GetFilePointer();
				if (compressed)
				{
					//was: doc.add(new Fieldable(fi.name, uncompress(b), Fieldable.Store.COMPRESS));
					doc.Add(new LazyField(this, fi.name, Field.Store.COMPRESS, toRead, pointer, binary));
				}
				else
				{
					//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
					doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary));
				}
				//Need to move the pointer ahead by toRead positions
				fieldsStream.Seek(pointer + toRead);
			}
			else
			{
				Field.Store store = Field.Store.YES;
				Field.Index index = GetIndexType(fi, tokenize);
				Field.TermVector termVector = GetTermVectorType(fi);
				
				AbstractField f;
				if (compressed)
				{
					store = Field.Store.COMPRESS;
					int toRead = fieldsStream.ReadVInt();
					long pointer = fieldsStream.GetFilePointer();
					f = new LazyField(this, fi.name, store, toRead, pointer, binary);
					//skip over the part that we aren't loading
					fieldsStream.Seek(pointer + toRead);
					f.SetOmitNorms(fi.omitNorms);
					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
				}
				else
				{
					int length = fieldsStream.ReadVInt();
					long pointer = fieldsStream.GetFilePointer();
					//Skip ahead of where we are by the length of what is stored
					if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
						fieldsStream.Seek(pointer + length);
					else
						fieldsStream.SkipChars(length);
					f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary);
					f.SetOmitNorms(fi.omitNorms);
					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
				}
				doc.Add(f);
			}
		}
		internal bool VectorFieldsInOrder(FieldInfo fi)
		{
			try
			{
				if (lastVectorFieldName != null)
					return String.CompareOrdinal(lastVectorFieldName, fi.name) < 0;
				else
					return true;
			}
			finally
			{
				lastVectorFieldName = fi.name;
			}
		}
示例#18
0
		internal void  WriteField(FieldInfo fi, Fieldable field)
		{
			// if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
			// and field.binaryValue() already returns the compressed value for a field
			// with isCompressed()==true, so we disable compression in that case
			bool disableCompression = (field is FieldsReader.FieldForMerge);
			fieldsStream.WriteVInt(fi.number);
			byte bits = 0;
			if (field.IsTokenized())
				bits |= FieldsWriter.FIELD_IS_TOKENIZED;
			if (field.IsBinary())
				bits |= FieldsWriter.FIELD_IS_BINARY;
			if (field.IsCompressed())
				bits |= FieldsWriter.FIELD_IS_COMPRESSED;
			
			fieldsStream.WriteByte(bits);
			
			if (field.IsCompressed())
			{
				// compression is enabled for the current field
				byte[] data;
				int len;
				int offset;
				if (disableCompression)
				{
					// optimized case for merging, the data
					// is already compressed
					data = field.GetBinaryValue();
					System.Diagnostics.Debug.Assert(data != null);
					len = field.GetBinaryLength();
					offset = field.GetBinaryOffset();
				}
				else
				{
					// check if it is a binary field
					if (field.IsBinary())
					{
						data = CompressionTools.Compress(field.GetBinaryValue(), field.GetBinaryOffset(), field.GetBinaryLength());
					}
					else
					{
						byte[] x = System.Text.Encoding.GetEncoding("UTF-8").GetBytes(field.StringValue());
						data = CompressionTools.Compress(x, 0, x.Length);
					}
					len = data.Length;
					offset = 0;
				}
				
				fieldsStream.WriteVInt(len);
				fieldsStream.WriteBytes(data, offset, len);
			}
			else
			{
				// compression is disabled for the current field
				if (field.IsBinary())
				{
					byte[] data;
					int len;
					int offset;
					data = field.GetBinaryValue();
					len = field.GetBinaryLength();
					offset = field.GetBinaryOffset();
					
					fieldsStream.WriteVInt(len);
					fieldsStream.WriteBytes(data, offset, len);
				}
				else
				{
					fieldsStream.WriteString(field.StringValue());
				}
			}
		}
		abstract public TermsHashConsumerPerField AddField(TermsHashPerField termsHashPerField, FieldInfo fieldInfo);
示例#20
0
		private void  AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			
			//we have a binary stored field, and it may be compressed
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				byte[] b = new byte[toRead];
				fieldsStream.ReadBytes(b, 0, b.Length);
				if (compressed)
					doc.Add(new Field(fi.name, Uncompress(b), Field.Store.COMPRESS));
				else
					doc.Add(new Field(fi.name, b, Field.Store.YES));
			}
			else
			{
				Field.Store store = Field.Store.YES;
				Field.Index index = GetIndexType(fi, tokenize);
				Field.TermVector termVector = GetTermVectorType(fi);
				
				AbstractField f;
				if (compressed)
				{
					store = Field.Store.COMPRESS;
					int toRead = fieldsStream.ReadVInt();
					
					byte[] b = new byte[toRead];
					fieldsStream.ReadBytes(b, 0, b.Length);
					f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index, termVector);
					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
					f.SetOmitNorms(fi.omitNorms);
				}
				else
				{
					f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector);
					f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions);
					f.SetOmitNorms(fi.omitNorms);
				}
				doc.Add(f);
			}
		}
示例#21
0
		internal void  SetField(FieldInfo fieldInfo)
		{
			this.fieldInfo = fieldInfo;
			docsWriter.SetField(fieldInfo);
		}
示例#22
0
		// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes)
		// Read just the size -- caller must skip the field content to continue reading fields
		// Return the size in bytes or chars, depending on field type
		private int AddFieldSize(Document doc, FieldInfo fi, bool binary, bool compressed)
		{
			int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size;
			byte[] sizebytes = new byte[4];
			sizebytes[0] = (byte) (SupportClass.Number.URShift(bytesize, 24));
			sizebytes[1] = (byte) (SupportClass.Number.URShift(bytesize, 16));
			sizebytes[2] = (byte) (SupportClass.Number.URShift(bytesize, 8));
			sizebytes[3] = (byte) bytesize;
			doc.Add(new Field(fi.name, sizebytes, Field.Store.YES));
			return size;
		}
示例#23
0
		public DocFieldProcessorPerField(DocFieldProcessorPerThread perThread, FieldInfo fieldInfo)
		{
			this.consumer = perThread.consumer.AddField(fieldInfo);
			this.fieldInfo = fieldInfo;
		}
示例#24
0
		private Field.TermVector GetTermVectorType(FieldInfo fi)
		{
			Field.TermVector termVector = null;
			if (fi.storeTermVector)
			{
				if (fi.storeOffsetWithTermVector)
				{
					if (fi.storePositionWithTermVector)
					{
						termVector = Field.TermVector.WITH_POSITIONS_OFFSETS;
					}
					else
					{
						termVector = Field.TermVector.WITH_OFFSETS;
					}
				}
				else if (fi.storePositionWithTermVector)
				{
					termVector = Field.TermVector.WITH_POSITIONS;
				}
				else
				{
					termVector = Field.TermVector.YES;
				}
			}
			else
			{
				termVector = Field.TermVector.NO;
			}
			return termVector;
		}
示例#25
0
		public override DocFieldConsumerPerField AddField(FieldInfo fi)
		{
			return new DocInverterPerField(this, fi);
		}
示例#26
0
		private Field.Index GetIndexType(FieldInfo fi, bool tokenize)
		{
			Field.Index index;
			if (fi.isIndexed && tokenize)
				index = Field.Index.ANALYZED;
			else if (fi.isIndexed && !tokenize)
				index = Field.Index.NOT_ANALYZED;
			else
				index = Field.Index.NO;
			return index;
		}
		internal void  SetField(FieldInfo fieldInfo)
		{
			omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
			storePayloads = omitTermFreqAndPositions?false:fieldInfo.storePayloads;
		}
示例#28
0
			public FieldForMerge(System.Object value_Renamed, FieldInfo fi, bool binary, bool compressed, bool tokenize)
			{
				this.isStored = true;
				this.fieldsData = value_Renamed;
				this.isCompressed = compressed;
				this.isBinary = binary;
				if (binary)
					binaryLength = ((byte[]) value_Renamed).Length;
				
				this.isTokenized = tokenize;
				
				this.name = StringHelper.Intern(fi.name);
				this.isIndexed = fi.isIndexed;
				this.omitNorms = fi.omitNorms;
				this.omitTermFreqAndPositions = fi.omitTermFreqAndPositions;
				this.storeOffsetWithTermVector = fi.storeOffsetWithTermVector;
				this.storePositionWithTermVector = fi.storePositionWithTermVector;
				this.storeTermVector = fi.storeTermVector;
			}
示例#29
0
		private FieldInfo AddInternal(System.String name, bool isIndexed, bool storeTermVector, bool storePositionWithTermVector, bool storeOffsetWithTermVector, bool omitNorms, bool storePayloads, bool omitTermFreqAndPositions)
		{
			name = StringHelper.Intern(name);
			FieldInfo fi = new FieldInfo(name, isIndexed, byNumber.Count, storeTermVector, storePositionWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
			byNumber.Add(fi);
			byName[name] = fi;
			return fi;
		}
		/// <summary>Add a new field </summary>
		internal abstract FormatPostingsTermsConsumer AddField(FieldInfo field);