예제 #1
0
            public virtual bool MoveNext()
            {
                // LUCENENET specific - Since there is no way to check for a next element
                // without calling this method in .NET, the assert is redundant and ineffective.
                //if (Debugging.AssertsEnabled) Debugging.Assert(input.Position < input.Length); // Has next
                if (input.Position < input.Length) // LUCENENET specific: Renamed from getFilePointer() to match FileStream
                {
                    try
                    {
                        int code = input.ReadVInt32();
                        if ((code & 1) != 0)
                        {
                            field = input.ReadString();
                        }
                        int prefix = code.TripleShift(1);
                        int suffix = input.ReadVInt32();
                        bytes.Grow(prefix + suffix);
                        input.ReadBytes(bytes.Bytes, prefix, suffix);
                        bytes.Length = prefix + suffix;
                        term.Set(field, bytes);
                    }
                    catch (Exception e) when (e.IsIOException())
                    {
                        throw RuntimeException.Create(e);
                    }

                    return true;
                }
                return false;
            }
예제 #2
0
            public virtual bool MoveNext()
            {
                // LUCENENET specific - Since there is no way to check for a next element
                // without calling this method in .NET, the assert is redundant and ineffective.
                //if (Debugging.AssertsEnabled) Debugging.Assert(input.GetFilePointer() < input.Length); // Has next
                if (input.GetFilePointer() < input.Length)
                {
                    try
                    {
                        int code = input.ReadVInt32();
                        if ((code & 1) != 0)
                        {
                            field = input.ReadString();
                        }
                        int prefix = code.TripleShift(1);
                        int suffix = input.ReadVInt32();
                        bytes.Grow(prefix + suffix);
                        input.ReadBytes(bytes.Bytes, prefix, suffix);
                        bytes.Length = prefix + suffix;
                        term.Set(field, bytes);
                    }
                    catch (IOException e)
                    {
                        throw new Exception(e.ToString(), e);
                    }

                    return(true);
                }
                return(false);
            }
예제 #3
0
            public virtual bool MoveNext()
            {
                if (input.GetFilePointer() < input.Length)
                {
                    try
                    {
                        int code = input.ReadVInt32();
                        if ((code & 1) != 0)
                        {
                            field = input.ReadString();
                        }
                        int prefix = Number.URShift(code, 1);
                        int suffix = input.ReadVInt32();
                        bytes.Grow(prefix + suffix);
                        input.ReadBytes(bytes.Bytes, prefix, suffix);
                        bytes.Length = prefix + suffix;
                        term.Set(field, bytes);
                    }
                    catch (IOException e)
                    {
                        throw new Exception(e.ToString(), e);
                    }

                    return(true);
                }
                return(false);
            }
예제 #4
0
        public CompoundFileReader(Directory dir, System.String name, int readBufferSize)
        {
            directory           = dir;
            fileName            = name;
            this.readBufferSize = readBufferSize;

            bool success = false;

            try
            {
                stream = dir.OpenInput(name, readBufferSize);

                // read the directory and init files
                int       count = stream.ReadVInt();
                FileEntry entry = null;
                for (int i = 0; i < count; i++)
                {
                    long          offset = stream.ReadLong();
                    System.String id     = stream.ReadString();

                    if (entry != null)
                    {
                        // set length of the previous entry
                        entry.length = offset - entry.offset;
                    }

                    entry = new FileEntry {
                        offset = offset
                    };
                    entries[id] = entry;
                }

                // set the length of the final entry
                if (entry != null)
                {
                    entry.length = stream.Length() - entry.offset;
                }

                success = true;
            }
            finally
            {
                if (!success && (stream != null))
                {
                    try
                    {
                        stream.Close();
                    }
                    catch (System.IO.IOException)
                    {
                    }
                }
            }
        }
예제 #5
0
        private void  Read(IndexInput input, String fileName)
        {
            int firstInt = input.ReadVInt();

            if (firstInt < 0)
            {
                // This is a real format
                format = firstInt;
            }
            else
            {
                format = FORMAT_PRE;
            }

            if (format != FORMAT_PRE & format != FORMAT_START)
            {
                throw new CorruptIndexException("unrecognized format " + format + " in file \"" + fileName + "\"");
            }

            int size;

            if (format == FORMAT_PRE)
            {
                size = firstInt;
            }
            else
            {
                size = input.ReadVInt();                 //read in the size
            }

            for (int i = 0; i < size; i++)
            {
                String name            = StringHelper.Intern(input.ReadString());
                byte   bits            = input.ReadByte();
                bool   isIndexed       = (bits & IS_INDEXED) != 0;
                bool   storeTermVector = (bits & STORE_TERMVECTOR) != 0;
                bool   storePositionsWithTermVector = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0;
                bool   storeOffsetWithTermVector    = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0;
                bool   omitNorms                = (bits & OMIT_NORMS) != 0;
                bool   storePayloads            = (bits & STORE_PAYLOADS) != 0;
                bool   omitTermFreqAndPositions = (bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0;

                AddInternal(name, isIndexed, storeTermVector, storePositionsWithTermVector, storeOffsetWithTermVector, omitNorms, storePayloads, omitTermFreqAndPositions);
            }

            if (input.FilePointer != input.Length())
            {
                throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.FilePointer + " vs size " + input.Length());
            }
        }
예제 #6
0
 public bool MoveNext()
 {
     if (input.FilePointer < input.Length())
     {
         int code = input.ReadVInt();
         if ((code & 1) != 0)
         {
             field = input.ReadString();
         }
         int prefix = Number.URShift(code, 1);
         int suffix = input.ReadVInt();
         bytes.Grow(prefix + suffix);
         input.ReadBytes(bytes.Bytes, prefix, suffix);
         bytes.Length = prefix + suffix;
         term.Set(field, bytes);
         return(true);
     }
     return(false);
 }
예제 #7
0
        private SegmentInfo ReadUpgradedSegmentInfo(string name, Directory dir, IndexInput input)
        {
            CodecUtil.CheckHeader(input, Lucene3xSegmentInfoFormat.UPGRADED_SI_CODEC_NAME, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_START, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_CURRENT);
            string version = input.ReadString();

            int docCount = input.ReadInt();

            IDictionary <string, string> attributes = input.ReadStringStringMap();

            bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;

            IDictionary <string, string> diagnostics = input.ReadStringStringMap();

            ISet <string> files = input.ReadStringSet();

            SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, null, diagnostics, CollectionsHelper.UnmodifiableMap(attributes));

            info.Files = files;
            return(info);
        }
예제 #8
0
        public override SegmentInfo Read(Directory dir, string segment, IOContext context)
        {
            string     fileName = IndexFileNames.SegmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
            IndexInput input    = dir.OpenInput(fileName, context);
            bool       success  = false;

            try
            {
                CodecUtil.CheckHeader(input, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_START, Lucene40SegmentInfoFormat.VERSION_CURRENT);
                string version  = input.ReadString();
                int    docCount = input.ReadInt32();
                if (docCount < 0)
                {
                    throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")");
                }
                bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;
                IDictionary <string, string> diagnostics = input.ReadStringStringMap();
                input.ReadStringStringMap(); // read deprecated attributes
                ISet <string> files = input.ReadStringSet();

                CodecUtil.CheckEOF(input);

                SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
                si.SetFiles(files);

                success = true;

                return(si);
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(input);
                }
                else
                {
                    input.Dispose();
                }
            }
        }
예제 #9
0
        private void AddField(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
        {
            //we have a binary stored field, and it may be compressed
            if (binary)
            {
                int toRead = fieldsStream.ReadVInt();
                var b      = new byte[toRead];
                fieldsStream.ReadBytes(b, 0, b.Length);
                doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES));
            }
            else
            {
                const Field.Store store      = Field.Store.YES;
                Field.Index       index      = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
                Field.TermVector  termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);

                AbstractField f;
                if (compressed)
                {
                    int toRead = fieldsStream.ReadVInt();

                    var b = new byte[toRead];
                    fieldsStream.ReadBytes(b, 0, b.Length);
                    f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index,
                                  termVector)
                    {
                        OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms
                    };
                }
                else
                {
                    f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector)
                    {
                        OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms
                    };
                }

                doc.Add(f);
            }
        }
예제 #10
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            bool success = false;

            try
            {
                CodecUtil.CheckHeader(input, Lucene42FieldInfosFormat.CODEC_NAME,
                                      Lucene42FieldInfosFormat.FORMAT_START,
                                      Lucene42FieldInfosFormat.FORMAT_CURRENT);

                int         size  = input.ReadVInt32(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string       name            = input.ReadString();
                    int          fieldNumber     = input.ReadVInt32();
                    sbyte        bits            = (sbyte)input.ReadByte();
                    bool         isIndexed       = (bits & Lucene42FieldInfosFormat.IS_INDEXED) != 0;
                    bool         storeTermVector = (bits & Lucene42FieldInfosFormat.STORE_TERMVECTOR) != 0;
                    bool         omitNorms       = (bits & Lucene42FieldInfosFormat.OMIT_NORMS) != 0;
                    bool         storePayloads   = (bits & Lucene42FieldInfosFormat.STORE_PAYLOADS) != 0;
                    IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = IndexOptions.NONE;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.OMIT_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
                    }
                    else
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // DV Types are packed in one byte
                    sbyte         val                       = (sbyte)input.ReadByte();
                    DocValuesType docValuesType             = GetDocValuesType(input, (byte)(val & 0x0F));
                    DocValuesType normsType                 = GetDocValuesType(input, (byte)((val.TripleShift(4)) & 0x0F));
                    IDictionary <string, string> attributes = input.ReadStringStringMap();
                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
                                             omitNorms, storePayloads, indexOptions, docValuesType, normsType, attributes.AsReadOnly());
                }

                CodecUtil.CheckEOF(input);
                FieldInfos fieldInfos = new FieldInfos(infos);
                success = true;
                return(fieldInfos);
            }
            finally
            {
                if (success)
                {
                    input.Dispose();
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(input);
                }
            }
        }
예제 #11
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            bool success = false;

            try
            {
                CodecUtil.CheckHeader(input, Lucene40FieldInfosFormat.CODEC_NAME, Lucene40FieldInfosFormat.FORMAT_START, Lucene40FieldInfosFormat.FORMAT_CURRENT);

                int         size  = input.ReadVInt(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string name            = input.ReadString();
                    int    fieldNumber     = input.ReadVInt();
                    byte   bits            = input.ReadByte();
                    bool   isIndexed       = (bits & Lucene40FieldInfosFormat.IS_INDEXED) != 0;
                    bool   storeTermVector = (bits & Lucene40FieldInfosFormat.STORE_TERMVECTOR) != 0;
                    bool   omitNorms       = (bits & Lucene40FieldInfosFormat.OMIT_NORMS) != 0;
                    bool   storePayloads   = (bits & Lucene40FieldInfosFormat.STORE_PAYLOADS) != 0;
                    FieldInfo.IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = default(FieldInfo.IndexOptions);
                    }
                    else if ((bits & Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = FieldInfo.IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & Lucene40FieldInfosFormat.OMIT_POSITIONS) != 0)
                    {
                        indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS;
                    }
                    else if ((bits & Lucene40FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS) != 0)
                    {
                        indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
                    }
                    else
                    {
                        indexOptions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // LUCENE-3027: past indices were able to write
                    // storePayloads=true when omitTFAP is also true,
                    // which is invalid.  We correct that, here:
                    if (isIndexed && indexOptions.CompareTo(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0)
                    {
                        storePayloads = false;
                    }
                    // DV Types are packed in one byte
                    byte val = input.ReadByte();
                    LegacyDocValuesType          oldValuesType = GetDocValuesType((sbyte)(val & 0x0F));
                    LegacyDocValuesType          oldNormsType  = GetDocValuesType((sbyte)(((int)((uint)val >> 4)) & 0x0F));
                    IDictionary <string, string> attributes    = input.ReadStringStringMap();
                    if (oldValuesType.Mapping != null)
                    {
                        attributes[LEGACY_DV_TYPE_KEY] = oldValuesType.Name;
                    }
                    if (oldNormsType.Mapping != null)
                    {
                        if (oldNormsType.Mapping != FieldInfo.DocValuesType_e.NUMERIC)
                        {
                            throw new CorruptIndexException("invalid norm type: " + oldNormsType + " (resource=" + input + ")");
                        }
                        attributes[LEGACY_NORM_TYPE_KEY] = oldNormsType.Name;
                    }
                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, oldValuesType.Mapping, oldNormsType.Mapping, attributes);
                }
                CodecUtil.CheckEOF(input);
                FieldInfos fieldInfos = new FieldInfos(infos);
                success = true;
                return(fieldInfos);
            }
            finally
            {
                if (success)
                {
                    input.Dispose();
                }
                else
                {
                    IOUtils.CloseWhileHandlingException(input);
                }
            }
        }
        private SegmentInfo ReadUpgradedSegmentInfo(string name, Directory dir, IndexInput input)
        {
            CodecUtil.CheckHeader(input, Lucene3xSegmentInfoFormat.UPGRADED_SI_CODEC_NAME, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_START, Lucene3xSegmentInfoFormat.UPGRADED_SI_VERSION_CURRENT);
            string version = input.ReadString();

            int docCount = input.ReadInt();

            IDictionary<string, string> attributes = input.ReadStringStringMap();

            bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;

            IDictionary<string, string> diagnostics = input.ReadStringStringMap();

            ISet<string> files = input.ReadStringSet();

            SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, null, diagnostics, CollectionsHelper.UnmodifiableMap(attributes));
            info.Files = files;
            return info;
        }
        /// <summary>
        /// reads from legacy 3.x segments_N </summary>
        private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, IndexInput input)
        {
            // check that it is a format we can understand
            if (format > Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS)
            {
                throw new IndexFormatTooOldException(input, format, Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS, Lucene3xSegmentInfoFormat.FORMAT_3_1);
            }
            if (format < Lucene3xSegmentInfoFormat.FORMAT_3_1)
            {
                throw new IndexFormatTooNewException(input, format, Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS, Lucene3xSegmentInfoFormat.FORMAT_3_1);
            }
            string version;
            if (format <= Lucene3xSegmentInfoFormat.FORMAT_3_1)
            {
                version = input.ReadString();
            }
            else
            {
                version = null;
            }

            string name = input.ReadString();

            int docCount = input.ReadInt();
            long delGen = input.ReadLong();

            int docStoreOffset = input.ReadInt();
            IDictionary<string, string> attributes = new Dictionary<string, string>();

            // parse the docstore stuff and shove it into attributes
            string docStoreSegment;
            bool docStoreIsCompoundFile;
            if (docStoreOffset != -1)
            {
                docStoreSegment = input.ReadString();
                docStoreIsCompoundFile = input.ReadByte() == SegmentInfo.YES;
                attributes[Lucene3xSegmentInfoFormat.DS_OFFSET_KEY] = Convert.ToString(docStoreOffset);
                attributes[Lucene3xSegmentInfoFormat.DS_NAME_KEY] = docStoreSegment;
                attributes[Lucene3xSegmentInfoFormat.DS_COMPOUND_KEY] = Convert.ToString(docStoreIsCompoundFile);
            }
            else
            {
                docStoreSegment = name;
                docStoreIsCompoundFile = false;
            }

            // pre-4.0 indexes write a byte if there is a single norms file
            byte b = input.ReadByte();

            //System.out.println("version=" + version + " name=" + name + " docCount=" + docCount + " delGen=" + delGen + " dso=" + docStoreOffset + " dss=" + docStoreSegment + " dssCFs=" + docStoreIsCompoundFile + " b=" + b + " format=" + format);

            Debug.Assert(1 == b, "expected 1 but was: " + b + " format: " + format);
            int numNormGen = input.ReadInt();
            IDictionary<int, long> normGen;
            if (numNormGen == SegmentInfo.NO)
            {
                normGen = null;
            }
            else
            {
                normGen = new Dictionary<int, long>();
                for (int j = 0; j < numNormGen; j++)
                {
                    normGen[j] = input.ReadLong();
                }
            }
            bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;

            int delCount = input.ReadInt();
            Debug.Assert(delCount <= docCount);

            bool hasProx = input.ReadByte() == 1;

            IDictionary<string, string> diagnostics = input.ReadStringStringMap();

            if (format <= Lucene3xSegmentInfoFormat.FORMAT_HAS_VECTORS)
            {
                // NOTE: unused
                int hasVectors = input.ReadByte();
            }

            // Replicate logic from 3.x's SegmentInfo.files():
            ISet<string> files = new HashSet<string>();
            if (isCompoundFile)
            {
                files.Add(IndexFileNames.SegmentFileName(name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
            }
            else
            {
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xFieldInfosReader.FIELD_INFOS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.FREQ_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.PROX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.TERMS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xNormsProducer.NORMS_EXTENSION));
            }

            if (docStoreOffset != -1)
            {
                if (docStoreIsCompoundFile)
                {
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION));
                }
                else
                {
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
                }
            }
            else if (!isCompoundFile)
            {
                files.Add(IndexFileNames.SegmentFileName(name, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
                files.Add(IndexFileNames.SegmentFileName(name, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
            }

            // parse the normgen stuff and shove it into attributes
            if (normGen != null)
            {
                attributes[Lucene3xSegmentInfoFormat.NORMGEN_KEY] = Convert.ToString(numNormGen);
                foreach (KeyValuePair<int, long> ent in normGen)
                {
                    long gen = ent.Value;
                    if (gen >= SegmentInfo.YES)
                    {
                        // Definitely a separate norm file, with generation:
                        files.Add(IndexFileNames.FileNameFromGeneration(name, "s" + ent.Key, gen));
                        attributes[Lucene3xSegmentInfoFormat.NORMGEN_PREFIX + ent.Key] = Convert.ToString(gen);
                    }
                    else if (gen == SegmentInfo.NO)
                    {
                        // No separate norm
                    }
                    else
                    {
                        // We should have already hit indexformat too old exception
                        Debug.Assert(false);
                    }
                }
            }

            SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, null, diagnostics, CollectionsHelper.UnmodifiableMap(attributes));
            info.Files = files;

            SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, delCount, delGen, -1);
            return infoPerCommit;
        }
예제 #14
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            try
            {
                int format = input.ReadVInt32();

                if (format > FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
                }
                if (format < PreFlexRWFieldInfosWriter.FORMAT_CURRENT && format != PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW)
                {
                    throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT);
                }

                int         size  = input.ReadVInt32(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string       name            = input.ReadString();
                    int          fieldNumber     = format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW ? input.ReadInt32() : i;
                    byte         bits            = input.ReadByte();
                    bool         isIndexed       = (bits & PreFlexRWFieldInfosWriter.IS_INDEXED) != 0;
                    bool         storeTermVector = (bits & PreFlexRWFieldInfosWriter.STORE_TERMVECTOR) != 0;
                    bool         omitNorms       = (bits & PreFlexRWFieldInfosWriter.OMIT_NORMS) != 0;
                    bool         storePayloads   = (bits & PreFlexRWFieldInfosWriter.STORE_PAYLOADS) != 0;
                    IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = IndexOptions.NONE;
                    }
                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & PreFlexRWFieldInfosWriter.OMIT_POSITIONS) != 0)
                    {
                        if (format <= PreFlexRWFieldInfosWriter.FORMAT_OMIT_POSITIONS)
                        {
                            indexOptions = IndexOptions.DOCS_AND_FREQS;
                        }
                        else
                        {
                            throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
                        }
                    }
                    else
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // LUCENE-3027: past indices were able to write
                    // storePayloads=true when omitTFAP is also true,
                    // which is invalid.  We correct that, here:
                    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
                    {
                        storePayloads = false;
                    }

                    DocValuesType normType = isIndexed && !omitNorms ? DocValuesType.NUMERIC : DocValuesType.NONE;
                    if (format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW && normType != DocValuesType.NONE)
                    {
                        // RW can have norms but doesn't write them
                        normType = input.ReadByte() != 0 ? DocValuesType.NUMERIC : DocValuesType.NONE;
                    }

                    infos[i] = new FieldInfo(name,
                                             isIndexed,
                                             fieldNumber,
                                             storeTermVector,
                                             omitNorms,
                                             storePayloads,
                                             indexOptions,
                                             DocValuesType.NONE,
                                             normType,
                                             null);
                }

                if (input.GetFilePointer() != input.Length)
                {
                    throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.GetFilePointer() + " vs size " + input.Length + " (resource: " + input + ")");
                }
                return(new FieldInfos(infos));
            }
            finally
            {
                input.Dispose();
            }
        }
예제 #15
0
        /// <summary> Construct a new SegmentInfo instance by reading a
        /// previously saved SegmentInfo from input.
        ///
        /// </summary>
        /// <param name="dir">directory to load from
        /// </param>
        /// <param name="format">format of the segments info file
        /// </param>
        /// <param name="input">input handle to read segment info from
        /// </param>
        internal SegmentInfo(Directory dir, int format, IndexInput input)
        {
            this.dir = dir;
            name     = input.ReadString();
            docCount = input.ReadInt();
            if (format <= SegmentInfos.FORMAT_LOCKLESS)
            {
                delGen = input.ReadLong();
                if (format <= SegmentInfos.FORMAT_SHARED_DOC_STORE)
                {
                    docStoreOffset = input.ReadInt();
                    if (docStoreOffset != -1)
                    {
                        docStoreSegment        = input.ReadString();
                        docStoreIsCompoundFile = (1 == input.ReadByte());
                    }
                    else
                    {
                        docStoreSegment        = name;
                        docStoreIsCompoundFile = false;
                    }
                }
                else
                {
                    docStoreOffset         = -1;
                    docStoreSegment        = name;
                    docStoreIsCompoundFile = false;
                }
                if (format <= SegmentInfos.FORMAT_SINGLE_NORM_FILE)
                {
                    hasSingleNormFile = (1 == input.ReadByte());
                }
                else
                {
                    hasSingleNormFile = false;
                }
                int numNormGen = input.ReadInt();
                if (numNormGen == NO)
                {
                    normGen = null;
                }
                else
                {
                    normGen = new long[numNormGen];
                    for (int j = 0; j < numNormGen; j++)
                    {
                        normGen[j] = input.ReadLong();
                    }
                }
                isCompoundFile = (sbyte)input.ReadByte();
                preLockless    = (isCompoundFile == CHECK_DIR);
                if (format <= SegmentInfos.FORMAT_DEL_COUNT)
                {
                    delCount = input.ReadInt();
                    System.Diagnostics.Debug.Assert(delCount <= docCount);
                }
                else
                {
                    delCount = -1;
                }
                if (format <= SegmentInfos.FORMAT_HAS_PROX)
                {
                    hasProx = input.ReadByte() == 1;
                }
                else
                {
                    hasProx = true;
                }

                if (format <= SegmentInfos.FORMAT_DIAGNOSTICS)
                {
                    diagnostics = input.ReadStringStringMap();
                }
                else
                {
                    diagnostics = new Dictionary <string, string>();
                }
            }
            else
            {
                delGen                 = CHECK_DIR;
                normGen                = null;
                isCompoundFile         = (sbyte)(CHECK_DIR);
                preLockless            = true;
                hasSingleNormFile      = false;
                docStoreOffset         = -1;
                docStoreIsCompoundFile = false;
                docStoreSegment        = null;
                delCount               = -1;
                hasProx                = true;
                diagnostics            = new Dictionary <string, string>();
            }
        }
예제 #16
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            bool success = false;

            try
            {
                int format = input.ReadVInt32();

                if (format > FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
                if (format < FORMAT_CURRENT)
                {
                    throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }

                int         size  = input.ReadVInt32(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string       name            = input.ReadString();
                    int          fieldNumber     = i;
                    byte         bits            = input.ReadByte();
                    bool         isIndexed       = (bits & IS_INDEXED) != 0;
                    bool         storeTermVector = (bits & STORE_TERMVECTOR) != 0;
                    bool         omitNorms       = (bits & OMIT_NORMS) != 0;
                    bool         storePayloads   = (bits & STORE_PAYLOADS) != 0;
                    IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = IndexOptions.NONE;
                    }
                    else if ((bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & OMIT_POSITIONS) != 0)
                    {
                        if (format <= FORMAT_OMIT_POSITIONS)
                        {
                            indexOptions = IndexOptions.DOCS_AND_FREQS;
                        }
                        else
                        {
                            throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
                        }
                    }
                    else
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // LUCENE-3027: past indices were able to write
                    // storePayloads=true when omitTFAP is also true,
                    // which is invalid.  We correct that, here:
                    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
                    {
                        storePayloads = false;
                    }
                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
                                             omitNorms, storePayloads, indexOptions, DocValuesType.NONE,
                                             isIndexed && !omitNorms ? DocValuesType.NUMERIC : DocValuesType.NONE,
                                             Collections.EmptyMap <string, string>());
                }

                if (input.GetFilePointer() != input.Length)
                {
                    throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.GetFilePointer() + " vs size " + input.Length + " (resource: " + input + ")");
                }
                FieldInfos fieldInfos = new FieldInfos(infos);
                success = true;
                return(fieldInfos);
            }
            finally
            {
                if (success)
                {
                    input.Dispose();
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(input);
                }
            }
        }
예제 #17
0
        /// <summary>
        /// reads from legacy 3.x segments_N </summary>
        private SegmentCommitInfo ReadLegacySegmentInfo(Directory dir, int format, IndexInput input)
        {
            // check that it is a format we can understand
            if (format > Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS)
            {
                throw new IndexFormatTooOldException(input, format, Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS, Lucene3xSegmentInfoFormat.FORMAT_3_1);
            }
            if (format < Lucene3xSegmentInfoFormat.FORMAT_3_1)
            {
                throw new IndexFormatTooNewException(input, format, Lucene3xSegmentInfoFormat.FORMAT_DIAGNOSTICS, Lucene3xSegmentInfoFormat.FORMAT_3_1);
            }
            string version;

            if (format <= Lucene3xSegmentInfoFormat.FORMAT_3_1)
            {
                version = input.ReadString();
            }
            else
            {
                version = null;
            }

            string name = input.ReadString();

            int  docCount = input.ReadInt();
            long delGen   = input.ReadLong();

            int docStoreOffset = input.ReadInt();
            IDictionary <string, string> attributes = new Dictionary <string, string>();

            // parse the docstore stuff and shove it into attributes
            string docStoreSegment;
            bool   docStoreIsCompoundFile;

            if (docStoreOffset != -1)
            {
                docStoreSegment        = input.ReadString();
                docStoreIsCompoundFile = input.ReadByte() == SegmentInfo.YES;
                attributes[Lucene3xSegmentInfoFormat.DS_OFFSET_KEY]   = Convert.ToString(docStoreOffset);
                attributes[Lucene3xSegmentInfoFormat.DS_NAME_KEY]     = docStoreSegment;
                attributes[Lucene3xSegmentInfoFormat.DS_COMPOUND_KEY] = Convert.ToString(docStoreIsCompoundFile);
            }
            else
            {
                docStoreSegment        = name;
                docStoreIsCompoundFile = false;
            }

            // pre-4.0 indexes write a byte if there is a single norms file
            byte b = input.ReadByte();

            //System.out.println("version=" + version + " name=" + name + " docCount=" + docCount + " delGen=" + delGen + " dso=" + docStoreOffset + " dss=" + docStoreSegment + " dssCFs=" + docStoreIsCompoundFile + " b=" + b + " format=" + format);

            Debug.Assert(1 == b, "expected 1 but was: " + b + " format: " + format);
            int numNormGen = input.ReadInt();
            IDictionary <int, long> normGen;

            if (numNormGen == SegmentInfo.NO)
            {
                normGen = null;
            }
            else
            {
                normGen = new Dictionary <int, long>();
                for (int j = 0; j < numNormGen; j++)
                {
                    normGen[j] = input.ReadLong();
                }
            }
            bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;

            int delCount = input.ReadInt();

            Debug.Assert(delCount <= docCount);

            bool hasProx = input.ReadByte() == 1;

            IDictionary <string, string> diagnostics = input.ReadStringStringMap();

            if (format <= Lucene3xSegmentInfoFormat.FORMAT_HAS_VECTORS)
            {
                // NOTE: unused
                int hasVectors = input.ReadByte();
            }

            // Replicate logic from 3.x's SegmentInfo.files():
            ISet <string> files = new HashSet <string>();

            if (isCompoundFile)
            {
                files.Add(IndexFileNames.SegmentFileName(name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
            }
            else
            {
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xFieldInfosReader.FIELD_INFOS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.FREQ_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.PROX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.TERMS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xNormsProducer.NORMS_EXTENSION));
            }

            if (docStoreOffset != -1)
            {
                if (docStoreIsCompoundFile)
                {
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION));
                }
                else
                {
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
                    files.Add(IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
                    AddIfExists(dir, files, IndexFileNames.SegmentFileName(docStoreSegment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
                }
            }
            else if (!isCompoundFile)
            {
                files.Add(IndexFileNames.SegmentFileName(name, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
                files.Add(IndexFileNames.SegmentFileName(name, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
                AddIfExists(dir, files, IndexFileNames.SegmentFileName(name, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
            }

            // parse the normgen stuff and shove it into attributes
            if (normGen != null)
            {
                attributes[Lucene3xSegmentInfoFormat.NORMGEN_KEY] = Convert.ToString(numNormGen);
                foreach (KeyValuePair <int, long> ent in normGen)
                {
                    long gen = ent.Value;
                    if (gen >= SegmentInfo.YES)
                    {
                        // Definitely a separate norm file, with generation:
                        files.Add(IndexFileNames.FileNameFromGeneration(name, "s" + ent.Key, gen));
                        attributes[Lucene3xSegmentInfoFormat.NORMGEN_PREFIX + ent.Key] = Convert.ToString(gen);
                    }
                    else if (gen == SegmentInfo.NO)
                    {
                        // No separate norm
                    }
                    else
                    {
                        // We should have already hit indexformat too old exception
                        Debug.Assert(false);
                    }
                }
            }

            SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, null, diagnostics, CollectionsHelper.UnmodifiableMap(attributes));

            info.Files = files;

            SegmentCommitInfo infoPerCommit = new SegmentCommitInfo(info, delCount, delGen, -1);

            return(infoPerCommit);
        }