private void AddFieldLazy(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { if (binary) { int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; //was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES)); doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed)); //Need to move the pointer ahead by toRead positions fieldsStream.Seek(pointer + toRead); } else { const Field.Store store = Field.Store.YES; Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize); Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector); AbstractField f; if (compressed) { int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed); //skip over the part that we aren't loading fieldsStream.Seek(pointer + toRead); f.OmitNorms = fi.omitNorms; f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions; } else { int length = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; //Skip ahead of where we are by the length of what is stored if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) { fieldsStream.Seek(pointer + length); } else { fieldsStream.SkipChars(length); } f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed) { OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions }; } doc.Add(f); } }
// append fields from storedFieldReaders public override Document.Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); Document.Document result = new Document.Document(); foreach (IndexReader reader in storedFieldReaders) { bool include = (fieldSelector == null); if (!include) { var fields = readerToFields[reader]; foreach (var field in fields) { if (fieldSelector.Accept(field) != FieldSelectorResult.NO_LOAD) { include = true; break; } } } if (include) { var fields = reader.Document(n, fieldSelector).GetFields(); foreach (var field in fields) { result.Add(field); } } } return(result); }
// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes) // Read just the size -- caller must skip the field content to continue reading fields // Return the size in bytes or chars, depending on field type private int AddFieldSize(Document.Document doc, FieldInfo fi, bool binary, bool compressed) { int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size; var sizebytes = new byte[4]; sizebytes[0] = (byte)(Number.URShift(bytesize, 24)); sizebytes[1] = (byte)(Number.URShift(bytesize, 16)); sizebytes[2] = (byte)(Number.URShift(bytesize, 8)); sizebytes[3] = (byte)bytesize; doc.Add(new Field(fi.name, sizebytes, Field.Store.YES)); return(size); }
private void AddField(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { //we have a binary stored field, and it may be compressed if (binary) { int toRead = fieldsStream.ReadVInt(); var b = new byte[toRead]; fieldsStream.ReadBytes(b, 0, b.Length); doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES)); } else { const Field.Store store = Field.Store.YES; Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize); Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector); AbstractField f; if (compressed) { int toRead = fieldsStream.ReadVInt(); var b = new byte[toRead]; fieldsStream.ReadBytes(b, 0, b.Length); f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index, termVector) { OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms }; } else { f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector) { OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms }; } doc.Add(f); } }
// append fields from storedFieldReaders public override Document.Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); Document.Document result = new Document.Document(); foreach(IndexReader reader in storedFieldReaders) { bool include = (fieldSelector == null); if (!include) { var fields = readerToFields[reader]; foreach(var field in fields) { if (fieldSelector.Accept(field) != FieldSelectorResult.NO_LOAD) { include = true; break; } } } if (include) { var fields = reader.Document(n, fieldSelector).GetFields(); foreach(var field in fields) { result.Add(field); } } } return result; }