public AnonymousClassEnumeration(Document enclosingInstance) { InitBlock(enclosingInstance); }
private void InitBlock(Document enclosingInstance) { this.enclosingInstance = enclosingInstance; iter = Enclosing_Instance.fields.GetEnumerator(); }
// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes) // Read just the size -- caller must skip the field content to continue reading fields // Return the size in bytes or chars, depending on field type private int AddFieldSize(Document doc, FieldInfo fi, bool binary, bool compressed) { int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size; byte[] sizebytes = new byte[4]; sizebytes[0] = (byte) (SupportClass.Number.URShift(bytesize, 24)); sizebytes[1] = (byte) (SupportClass.Number.URShift(bytesize, 16)); sizebytes[2] = (byte) (SupportClass.Number.URShift(bytesize, 8)); sizebytes[3] = (byte) bytesize; doc.Add(new Field(fi.name, sizebytes, Field.Store.YES)); return size; }
private void AddField(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { //we have a binary stored field, and it may be compressed if (binary) { int toRead = fieldsStream.ReadVInt(); byte[] b = new byte[toRead]; fieldsStream.ReadBytes(b, 0, b.Length); if (compressed) doc.Add(new Field(fi.name, Uncompress(b), Field.Store.COMPRESS)); else doc.Add(new Field(fi.name, b, Field.Store.YES)); } else { Field.Store store = Field.Store.YES; Field.Index index = GetIndexType(fi, tokenize); Field.TermVector termVector = GetTermVectorType(fi); AbstractField f; if (compressed) { store = Field.Store.COMPRESS; int toRead = fieldsStream.ReadVInt(); byte[] b = new byte[toRead]; fieldsStream.ReadBytes(b, 0, b.Length); f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index, termVector); f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions); f.SetOmitNorms(fi.omitNorms); } else { f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector); f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions); f.SetOmitNorms(fi.omitNorms); } doc.Add(f); } }
// in merge mode we don't uncompress the data of a compressed field private void AddFieldForMerge(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { System.Object data; if (binary || compressed) { int toRead = fieldsStream.ReadVInt(); byte[] b = new byte[toRead]; fieldsStream.ReadBytes(b, 0, b.Length); data = b; } else { data = fieldsStream.ReadString(); } doc.Add(new FieldForMerge(data, fi, binary, compressed, tokenize)); }
private void AddFieldLazy(Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { if (binary) { int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.GetFilePointer(); if (compressed) { //was: doc.add(new Fieldable(fi.name, uncompress(b), Fieldable.Store.COMPRESS)); doc.Add(new LazyField(this, fi.name, Field.Store.COMPRESS, toRead, pointer, binary)); } else { //was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES)); doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary)); } //Need to move the pointer ahead by toRead positions fieldsStream.Seek(pointer + toRead); } else { Field.Store store = Field.Store.YES; Field.Index index = GetIndexType(fi, tokenize); Field.TermVector termVector = GetTermVectorType(fi); AbstractField f; if (compressed) { store = Field.Store.COMPRESS; int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.GetFilePointer(); f = new LazyField(this, fi.name, store, toRead, pointer, binary); //skip over the part that we aren't loading fieldsStream.Seek(pointer + toRead); f.SetOmitNorms(fi.omitNorms); f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions); } else { int length = fieldsStream.ReadVInt(); long pointer = fieldsStream.GetFilePointer(); //Skip ahead of where we are by the length of what is stored if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) fieldsStream.Seek(pointer + length); else fieldsStream.SkipChars(length); f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary); f.SetOmitNorms(fi.omitNorms); f.SetOmitTermFreqAndPositions(fi.omitTermFreqAndPositions); } doc.Add(f); } }
public /*internal*/ Document Doc(int n, FieldSelector fieldSelector) { SeekIndex(n); long position = indexStream.ReadLong(); fieldsStream.Seek(position); Document doc = new Document(); int numFields = fieldsStream.ReadVInt(); for (int i = 0; i < numFields; i++) { int fieldNumber = fieldsStream.ReadVInt(); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name); byte bits = fieldsStream.ReadByte(); System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0; //TODO: Find an alternative approach here if this list continues to grow beyond the //list of 5 or 6 currently here. See Lucene 762 for discussion if (acceptField.Equals(FieldSelectorResult.LOAD)) { AddField(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.LOAD_FOR_MERGE)) { AddFieldForMerge(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK)) { AddField(doc, fi, binary, compressed, tokenize); break; //Get out of this loop } else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD)) { AddFieldLazy(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.SIZE)) { SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed)); } else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK)) { AddFieldSize(doc, fi, binary, compressed); break; } else { SkipField(binary, compressed); } } return doc; }