Beispiel #1
0
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         input?.Dispose(); // LUCENENET specific - call dispose on input
     }
 }
Beispiel #2
0
 protected override void Dispose(bool disposing)
 {
     if (disposing)
     {
         input?.Dispose();
     }
 }
Beispiel #3
0
        protected override void Dispose(bool disposing)
        {
            base.Dispose(disposing);

            if (disposing)
            {
                proxStream?.Dispose();
            }
        }
Beispiel #4
0
        /// <summary>
        /// When the IndexOutput is closed we ensure that the file is flushed and written locally and also persisted to master storage
        /// </summary>
        protected override void Dispose(bool disposing)
        {
            _fileMutex.WaitOne();

            try
            {
                var fileName = _name;

                //make sure it's all written out
                //we are only checking for null here in case Close is called multiple times
                if (_cacheDirIndexOutput != null)
                {
                    _cacheDirIndexOutput.Flush();
                    _cacheDirIndexOutput.Dispose();

                    IndexInput cacheInput = null;
                    try
                    {
                        cacheInput = CacheDirectory.OpenInput(fileName);
                    }
                    catch (IOException e)
                    {
                        //This would occur if the file doesn't exist! we previously threw when that happens so we'll keep
                        //doing that for now but this is quicker than first checking if it exists and then opening it.
                        throw;
                    }

                    if (cacheInput != null)
                    {
                        IndexOutput masterOutput = null;
                        try
                        {
                            masterOutput = MasterDirectory.CreateOutput(fileName);
                            cacheInput.CopyTo(masterOutput, fileName);
                        }
                        finally
                        {
                            masterOutput?.Dispose();
                            cacheInput?.Dispose();
                        }
                    }

#if FULLDEBUG
                    Trace.WriteLine($"CLOSED WRITESTREAM {_name}");
#endif

                    // clean up
                    _cacheDirIndexOutput = null;
                }

                GC.SuppressFinalize(this);
            }
            finally
            {
                _fileMutex.ReleaseMutex();
            }
        }
        private static void UnidirectionalSync(AzureDirectory sourceDirectory, Directory destinationDirectory)
        {
            var sourceFiles = sourceDirectory.ListAll();

            var fileNameFilter = IndexFileNameFilter.Filter;

            byte[] buffer = new byte[16384];

            foreach (string sourceFile in sourceFiles)
            {
                // only copy file if it is accepted by Lucene's default filter
                // and it does not already exist (except for segment map files, we always want those)
                if (fileNameFilter.Accept(null, sourceFile) && (!destinationDirectory.FileExists(sourceFile) || sourceFile.StartsWith("segment")))
                {
                    IndexOutput indexOutput = null;
                    IndexInput  indexInput  = null;
                    try
                    {
                        indexOutput = destinationDirectory.CreateOutput(sourceFile);
                        indexInput  = sourceDirectory.OpenInput(sourceFile);

                        long length   = indexInput.Length();
                        long position = 0;
                        while (position < length)
                        {
                            int bytesToRead = position + 16384L > length ? (int)(length - position) : 16384;
                            indexInput.ReadBytes(buffer, 0, bytesToRead);
                            indexOutput.WriteBytes(buffer, bytesToRead);

                            position += bytesToRead;
                        }
                    }
                    finally
                    {
                        try
                        {
                            indexOutput?.Dispose();
                        }
                        finally
                        {
                            indexInput?.Dispose();
                        }
                    }
                }
            }

            // we'll remove old files from both AzureDirectory's cache directory, as well as our destination directory
            // (only when older than 45 minutes - old files may still have active searches on them so we need a margin)
            var referenceTimestamp = LuceneTimestampFromDateTime(DateTime.UtcNow.AddMinutes(-45));

            // remove old files from AzureDirectory cache directory
            RemoveOldFiles(sourceDirectory.CacheDirectory, sourceFiles, referenceTimestamp);

            // remove old files from destination directory
            RemoveOldFiles(destinationDirectory, sourceFiles, referenceTimestamp);
        }
Beispiel #6
0
 protected override void Dispose(bool disposing)
 {
     if (disposing)
     {
         try
         {
             IOUtils.Dispose(openFiles);
         }
         finally
         {
             norms.Clear();
             openFiles.Clear();
             singleNormStream?.Dispose(); // LUCENENET: Dispose singleNormStream and set to null
             singleNormStream = null;
         }
     }
 }
Beispiel #7
0
        public virtual void TestOverflow()
        {
            BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("testOverflow"));

            if (dir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)dir).Throttling = Throttling.NEVER;
            }
            int blockBits = TestUtil.NextInt32(Random, 14, 28);
            int blockSize = 1 << blockBits;
            var arr       = new byte[TestUtil.NextInt32(Random, blockSize / 2, blockSize * 2)];

            for (int i = 0; i < arr.Length; ++i)
            {
                arr[i] = (byte)(sbyte)i;
            }
            long numBytes = (1L << 31) + TestUtil.NextInt32(Random, 1, blockSize * 3);
            var  p        = new PagedBytes(blockBits);
            var  @out     = dir.CreateOutput("foo", IOContext.DEFAULT);

            for (long i = 0; i < numBytes;)
            {
                Assert.AreEqual(i, @out.GetFilePointer());
                int len = (int)Math.Min(arr.Length, numBytes - i);
                @out.WriteBytes(arr, len);
                i += len;
            }
            Assert.AreEqual(numBytes, @out.GetFilePointer());
            @out.Dispose();
            IndexInput @in = dir.OpenInput("foo", IOContext.DEFAULT);

            p.Copy(@in, numBytes);
            PagedBytes.Reader reader = p.Freeze(Random.NextBoolean());

            foreach (long offset in new long[] { 0L, int.MaxValue, numBytes - 1, TestUtil.NextInt64(Random, 1, numBytes - 2) })
            {
                BytesRef b = new BytesRef();
                reader.FillSlice(b, offset, 1);
                Assert.AreEqual(arr[(int)(offset % arr.Length)], b.Bytes[b.Offset]);
            }
            @in.Dispose();
            dir.Dispose();
        }
Beispiel #8
0
        protected override void Dispose(bool isDiposing)
        {
            _fileMutex.WaitOne();
            try
            {
#if FULLDEBUG
                Trace.WriteLine($"CLOSED READSTREAM local {_name}");
#endif
                _indexInput.Dispose();
                _indexInput     = null;
                _azureDirectory = null;
                _blobContainer  = null;
                _blob           = null;
                GC.SuppressFinalize(this);
            }
            finally
            {
                _fileMutex.ReleaseMutex();
            }
        }
Beispiel #9
0
 protected override void Dispose(bool disposing)
 {
     if (disposing)
     {
         try
         {
             if (freqIn != null)
             {
                 freqIn.Dispose();
             }
         }
         finally
         {
             if (proxIn != null)
             {
                 proxIn.Dispose();
             }
         }
     }
 }
Beispiel #10
0
        public virtual void TestAddExternalFile()
        {
            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);

            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
            csw.Dispose();

            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
            IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
            AssertSameStreams("d1", expected, actual);
            AssertSameSeekBehavior("d1", expected, actual);
            expected.Dispose();
            actual.Dispose();
            csr.Dispose();

            newDir.Dispose();
        }
        protected override void Dispose(bool disposing)
        {
            _fileMutex.WaitOne();
            try
            {
#if FULLDEBUG
                Debug.WriteLine(String.Format("CLOSED READSTREAM local {0}", _name));
#endif
                _indexInput.Dispose();
                _indexInput     = null;
                _azureDirectory = null;
                _blobContainer  = null;
                _blob           = null;
                GC.SuppressFinalize(this);
            }
            finally
            {
                _fileMutex.ReleaseMutex();
            }
        }
        public override SegmentInfo Read(Directory dir, string segment, IOContext context)
        {
            string     fileName = IndexFileNames.SegmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
            IndexInput input    = dir.OpenInput(fileName, context);
            bool       success  = false;

            try
            {
                CodecUtil.CheckHeader(input, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_START, Lucene40SegmentInfoFormat.VERSION_CURRENT);
                string version  = input.ReadString();
                int    docCount = input.ReadInt32();
                if (docCount < 0)
                {
                    throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")");
                }
                bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;
                IDictionary <string, string> diagnostics = input.ReadStringStringMap();
                input.ReadStringStringMap(); // read deprecated attributes
                ISet <string> files = input.ReadStringSet();

                CodecUtil.CheckEOF(input);

                SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
                si.SetFiles(files);

                success = true;

                return(si);
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(input);
                }
                else
                {
                    input.Dispose();
                }
            }
        }
Beispiel #13
0
        public virtual void TestReadPastEOF()
        {
            SetUp_2();
            var        cr  = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);
            IndexInput @is = cr.OpenInput("f2", NewIOContext(Random));

            @is.Seek(@is.Length - 10);
            var b = new byte[100];

            @is.ReadBytes(b, 0, 10);

            try
            {
                @is.ReadByte();
                Assert.Fail("Single byte read past end of file");
            }
#pragma warning disable 168
            catch (IOException e)
#pragma warning restore 168
            {
                /* success */
                //System.out.println("SUCCESS: single byte read past end of file: " + e);
            }

            @is.Seek(@is.Length - 10);
            try
            {
                @is.ReadBytes(b, 0, 50);
                Assert.Fail("Block read past end of file");
            }
#pragma warning disable 168
            catch (IOException e)
#pragma warning restore 168
            {
                /* success */
                //System.out.println("SUCCESS: block read past end of file: " + e);
            }

            @is.Dispose();
            cr.Dispose();
        }
Beispiel #14
0
        public virtual void TestSingleFile()
        {
            int[] data = new int[] { 0, 1, 10, 100 };
            for (int i = 0; i < data.Length; i++)
            {
                string name = "t" + data[i];
                CreateSequenceFile(Dir, name, (sbyte)0, data[i]);
                CompoundFileDirectory csw = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random), true);
                Dir.Copy(csw, name, name, NewIOContext(Random));
                csw.Dispose();

                CompoundFileDirectory csr      = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random), false);
                IndexInput            expected = Dir.OpenInput(name, NewIOContext(Random));
                IndexInput            actual   = csr.OpenInput(name, NewIOContext(Random));
                AssertSameStreams(name, expected, actual);
                AssertSameSeekBehavior(name, expected, actual);
                expected.Dispose();
                actual.Dispose();
                csr.Dispose();
            }
        }
Beispiel #15
0
        private void Demo_FSIndexInputBug(Directory fsdir, string file)
        {
            // Setup the test file - we need more than 1024 bytes
            IndexOutput os = fsdir.CreateOutput(file, IOContext.DEFAULT);

            for (int i = 0; i < 2000; i++)
            {
                os.WriteByte((byte)(sbyte)i);
            }
            os.Dispose();

            IndexInput @in = fsdir.OpenInput(file, IOContext.DEFAULT);

            // this read primes the buffer in IndexInput
            @in.ReadByte();

            // Close the file
            @in.Dispose();

            // ERROR: this call should fail, but succeeds because the buffer
            // is still filled
            @in.ReadByte();

            // ERROR: this call should fail, but succeeds for some reason as well
            @in.Seek(1099);

            try
            {
                // OK: this call correctly fails. We are now past the 1024 internal
                // buffer, so an actual IO is attempted, which fails
                @in.ReadByte();
                Assert.Fail("expected readByte() to throw exception");
            }
#pragma warning disable 168
            catch (IOException e)
#pragma warning restore 168
            {
                // expected exception
            }
        }
Beispiel #16
0
        public virtual void TestDoubleClose()
        {
            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
            @out.WriteInt(0);
            @out.Dispose();

            csw.Dispose();
            // close a second time - must have no effect according to IDisposable
            csw.Dispose();

            csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random()));
            Assert.AreEqual(0, openInput.ReadInt());
            openInput.Dispose();
            csw.Dispose();
            // close a second time - must have no effect according to IDisposable
            csw.Dispose();

            newDir.Dispose();
        }
Beispiel #17
0
        /// <summary>
        /// Verifies that the code version which wrote the segment is supported. </summary>
        public static void CheckCodeVersion(Directory dir, string segment)
        {
            string     indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
            IndexInput idxStream     = dir.OpenInput(indexStreamFN, IOContext.DEFAULT);

            try
            {
                int format = idxStream.ReadInt32();
                if (format < FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
                if (format > FORMAT_CURRENT)
                {
                    throw new IndexFormatTooNewException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
            }
            finally
            {
                idxStream.Dispose();
            }
        }
Beispiel #18
0
        internal virtual FST <T> DoTest(int prune1, int prune2, bool allowRandomSuffixSharing)
        {
            if (LuceneTestCase.VERBOSE)
            {
                Console.WriteLine("\nTEST: prune1=" + prune1 + " prune2=" + prune2);
            }

            bool willRewrite = Random.NextBoolean();

            Builder <T> builder = new Builder <T>(InputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4, prune1, prune2, prune1 == 0 && prune2 == 0, allowRandomSuffixSharing ? Random.NextBoolean() : true, allowRandomSuffixSharing ? TestUtil.NextInt(Random, 1, 10) : int.MaxValue, Outputs, null, willRewrite, PackedInts.DEFAULT, true, 15);

            if (LuceneTestCase.VERBOSE)
            {
                if (willRewrite)
                {
                    Console.WriteLine("TEST: packed FST");
                }
                else
                {
                    Console.WriteLine("TEST: non-packed FST");
                }
            }

            foreach (InputOutput <T> pair in Pairs)
            {
                if (pair.Output is IList)
                {
                    IList <long>     longValues    = (IList <long>)pair.Output;
                    Builder <object> builderObject = builder as Builder <object>;
                    foreach (long value in longValues)
                    {
                        builderObject.Add(pair.Input, value);
                    }
                }
                else
                {
                    builder.Add(pair.Input, pair.Output);
                }
            }
            FST <T> fst = builder.Finish();

            if (Random.NextBoolean() && fst != null && !willRewrite)
            {
                IOContext   context = LuceneTestCase.NewIOContext(Random);
                IndexOutput @out    = Dir.CreateOutput("fst.bin", context);
                fst.Save(@out);
                @out.Dispose();
                IndexInput @in = Dir.OpenInput("fst.bin", context);
                try
                {
                    fst = new FST <T>(@in, Outputs);
                }
                finally
                {
                    @in.Dispose();
                    Dir.DeleteFile("fst.bin");
                }
            }

            if (LuceneTestCase.VERBOSE && Pairs.Count <= 20 && fst != null)
            {
                TextWriter w = new StreamWriter(new FileStream("out.dot", FileMode.Open), IOUtils.CHARSET_UTF_8);
                Util.toDot(fst, w, false, false);
                w.Close();
                Console.WriteLine("SAVED out.dot");
            }

            if (LuceneTestCase.VERBOSE)
            {
                if (fst == null)
                {
                    Console.WriteLine("  fst has 0 nodes (fully pruned)");
                }
                else
                {
                    Console.WriteLine("  fst has " + fst.NodeCount + " nodes and " + fst.ArcCount + " arcs");
                }
            }

            if (prune1 == 0 && prune2 == 0)
            {
                VerifyUnPruned(InputMode, fst);
            }
            else
            {
                VerifyPruned(InputMode, fst, prune1, prune2);
            }

            return(fst);
        }
Beispiel #19
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            bool success = false;

            try
            {
                int format = input.ReadVInt32();

                if (format > FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
                if (format < FORMAT_CURRENT)
                {
                    throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }

                int         size  = input.ReadVInt32(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string       name            = input.ReadString();
                    int          fieldNumber     = i;
                    byte         bits            = input.ReadByte();
                    bool         isIndexed       = (bits & IS_INDEXED) != 0;
                    bool         storeTermVector = (bits & STORE_TERMVECTOR) != 0;
                    bool         omitNorms       = (bits & OMIT_NORMS) != 0;
                    bool         storePayloads   = (bits & STORE_PAYLOADS) != 0;
                    IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = IndexOptions.NONE;
                    }
                    else if ((bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & OMIT_POSITIONS) != 0)
                    {
                        if (format <= FORMAT_OMIT_POSITIONS)
                        {
                            indexOptions = IndexOptions.DOCS_AND_FREQS;
                        }
                        else
                        {
                            throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")");
                        }
                    }
                    else
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // LUCENE-3027: past indices were able to write
                    // storePayloads=true when omitTFAP is also true,
                    // which is invalid.  We correct that, here:
                    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
                    {
                        storePayloads = false;
                    }
                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
                                             omitNorms, storePayloads, indexOptions, DocValuesType.NONE,
                                             isIndexed && !omitNorms ? DocValuesType.NUMERIC : DocValuesType.NONE,
                                             Collections.EmptyMap <string, string>());
                }

                if (input.GetFilePointer() != input.Length)
                {
                    throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.GetFilePointer() + " vs size " + input.Length + " (resource: " + input + ")");
                }
                FieldInfos fieldInfos = new FieldInfos(infos);
                success = true;
                return(fieldInfos);
            }
            finally
            {
                if (success)
                {
                    input.Dispose();
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(input);
                }
            }
        }
        public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor,
            IComparer<BytesRef> termComp, String segmentSuffix, IOContext context)
        {
            _termComp = termComp;

            Debug.Assert(indexDivisor == -1 || indexDivisor > 0);

            _input =
                dir.OpenInput(
                    IndexFileNames.SegmentFileName(segment, segmentSuffix,
                        FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION),
                    context);

            var success = false;

            try
            {

                _version = ReadHeader(_input);

                if (_version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM)
                    CodecUtil.ChecksumEntireFile(_input);
                
                indexInterval = _input.ReadInt();
                
                if (indexInterval < 1)
                {
                    throw new CorruptIndexException(String.Format("Invalid indexInterval: {0}, Resource: {1}",
                        indexInterval, _input));
                }

                _indexDivisor = indexDivisor;

                if (indexDivisor < 0)
                {
                    _totalIndexInterval = indexInterval;
                }
                else
                {
                    // In case terms index gets loaded, later, on demand
                    _totalIndexInterval = indexInterval*indexDivisor;
                }

                Debug.Assert(_totalIndexInterval > 0);

                SeekDir(_input, _dirOffset);

                // Read directory
                int numFields = _input.ReadVInt();

                if (numFields < 0)
                    throw new CorruptIndexException(String.Format("Invalid numFields: {0}, Resource: {1}", numFields,
                        _input));

                for (int i = 0; i < numFields; i++)
                {
                    int field = _input.ReadVInt();
                    int numIndexTerms = _input.ReadVInt();
                    if (numIndexTerms < 0)
                        throw new CorruptIndexException(String.Format("Invalid numIndexTerms: {0}, Resource: {1}",
                            numIndexTerms,
                            _input));

                    long termsStart = _input.ReadVLong();
                    long indexStart = _input.ReadVLong();
                    long packedIndexStart = _input.ReadVLong();
                    long packedOffsetsStart = _input.ReadVLong();

                    if (packedIndexStart < indexStart)
                        throw new CorruptIndexException(
                            String.Format(
                                "Invalid packedIndexStart: {0}, IndexStart: {1}, NumIndexTerms: {2}, Resource: {3}",
                                packedIndexStart,
                                indexStart, numIndexTerms, _input));

                    FieldInfo fieldInfo = fieldInfos.FieldInfo(field);

                    try
                    {
                        _fields.Add(fieldInfo,
                            new FieldIndexData(numIndexTerms, indexStart, termsStart, packedIndexStart,
                                packedOffsetsStart, this));
                    }
                    catch (ArgumentException)
                    {
                        throw new CorruptIndexException(String.Format("Duplicate field: {0}, Resource {1}",
                            fieldInfo.Name,
                            _input));
                    }


                }
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(_input);
                }
                if (indexDivisor > 0)
                {
                    _input.Dispose();
                    _input = null;
                    if (success)
                        _indexLoaded = true;

                    _termBytesReader = _termBytes.Freeze(true);
                }
            }
        }
Beispiel #21
0
 public void Dispose()
 {
     input.Dispose();
 }
Beispiel #22
0
        public virtual void TestDataInputOutput()
        {
            Random random = Random();

            for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++)
            {
                BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("testOverflow"));
                if (dir is MockDirectoryWrapper)
                {
                    ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER;
                }
                int         blockBits = TestUtil.NextInt(random, 1, 20);
                int         blockSize = 1 << blockBits;
                PagedBytes  p         = new PagedBytes(blockBits);
                IndexOutput @out      = dir.CreateOutput("foo", IOContext.DEFAULT);
                int         numBytes  = TestUtil.NextInt(Random(), 2, 10000000);

                byte[] answer = new byte[numBytes];
                Random().NextBytes(answer);
                int written = 0;
                while (written < numBytes)
                {
                    if (Random().Next(10) == 7)
                    {
                        @out.WriteByte(answer[written++]);
                    }
                    else
                    {
                        int chunk = Math.Min(Random().Next(1000), numBytes - written);
                        @out.WriteBytes(answer, written, chunk);
                        written += chunk;
                    }
                }

                @out.Dispose();
                IndexInput input = dir.OpenInput("foo", IOContext.DEFAULT);
                DataInput  @in   = (DataInput)input.Clone();

                p.Copy(input, input.Length());
                PagedBytes.Reader reader = p.Freeze(random.NextBoolean());

                byte[] verify = new byte[numBytes];
                int    read   = 0;
                while (read < numBytes)
                {
                    if (Random().Next(10) == 7)
                    {
                        verify[read++] = @in.ReadByte();
                    }
                    else
                    {
                        int chunk = Math.Min(Random().Next(1000), numBytes - read);
                        @in.ReadBytes(verify, read, chunk);
                        read += chunk;
                    }
                }
                Assert.IsTrue(Arrays.Equals(answer, verify));

                BytesRef slice = new BytesRef();
                for (int iter2 = 0; iter2 < 100; iter2++)
                {
                    int pos = random.Next(numBytes - 1);
                    int len = random.Next(Math.Min(blockSize + 1, numBytes - pos));
                    reader.FillSlice(slice, pos, len);
                    for (int byteUpto = 0; byteUpto < len; byteUpto++)
                    {
                        Assert.AreEqual(answer[pos + byteUpto], (byte)slice.Bytes[slice.Offset + byteUpto]);
                    }
                }
                input.Dispose();
                dir.Dispose();
            }
        }
Beispiel #23
0
        /// <summary>
        /// Reads the snapshots information from the given <seealso cref="Directory"/>. this
        /// method can be used if the snapshots information is needed, however you
        /// cannot instantiate the deletion policy (because e.g., some other process
        /// keeps a lock on the snapshots directory).
        /// </summary>
        private void LoadPriorSnapshots()
        {
            lock (this)
            {
                long           genLoaded     = -1;
                IOException    ioe           = null;
                IList <string> snapshotFiles = new List <string>();
                foreach (string file in dir.ListAll())
                {
                    if (file.StartsWith(SNAPSHOTS_PREFIX, StringComparison.Ordinal))
                    {
                        long gen = Convert.ToInt64(file.Substring(SNAPSHOTS_PREFIX.Length));
                        if (genLoaded == -1 || gen > genLoaded)
                        {
                            snapshotFiles.Add(file);
                            IDictionary <long, int> m = new Dictionary <long, int>();
                            IndexInput @in            = dir.OpenInput(file, IOContext.DEFAULT);
                            try
                            {
                                CodecUtil.CheckHeader(@in, CODEC_NAME, VERSION_START, VERSION_START);
                                int count = @in.ReadVInt32();
                                for (int i = 0; i < count; i++)
                                {
                                    long commitGen = @in.ReadVInt64();
                                    int  refCount  = @in.ReadVInt32();
                                    m[commitGen] = refCount;
                                }
                            }
                            catch (IOException ioe2)
                            {
                                // Save first exception & throw in the end
                                if (ioe == null)
                                {
                                    ioe = ioe2;
                                }
                            }
                            finally
                            {
                                @in.Dispose();
                            }

                            genLoaded = gen;
                            m_refCounts.Clear();
                            m_refCounts.PutAll(m);
                        }
                    }
                }

                if (genLoaded == -1)
                {
                    // Nothing was loaded...
                    if (ioe != null)
                    {
                        // ... not for lack of trying:
                        throw ioe;
                    }
                }
                else
                {
                    if (snapshotFiles.Count > 1)
                    {
                        // Remove any broken / old snapshot files:
                        string curFileName = SNAPSHOTS_PREFIX + genLoaded;
                        foreach (string file in snapshotFiles)
                        {
                            if (!curFileName.Equals(file, StringComparison.Ordinal))
                            {
                                dir.DeleteFile(file);
                            }
                        }
                    }
                    nextWriteGen = 1 + genLoaded;
                }
            }
        }
Beispiel #24
0
        internal virtual FST <T> DoTest(int prune1, int prune2, bool allowRandomSuffixSharing)
        {
            if (LuceneTestCase.VERBOSE)
            {
                Console.WriteLine("\nTEST: prune1=" + prune1 + " prune2=" + prune2);
            }

            bool willRewrite = random.NextBoolean();

            Builder <T> builder = new Builder <T>(inputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4,
                                                  prune1, prune2,
                                                  prune1 == 0 && prune2 == 0,
                                                  allowRandomSuffixSharing ? random.NextBoolean() : true,
                                                  allowRandomSuffixSharing ? TestUtil.NextInt32(random, 1, 10) : int.MaxValue,
                                                  outputs,
                                                  null,
                                                  willRewrite,
                                                  PackedInt32s.DEFAULT,
                                                  true,
                                                  15);

            if (LuceneTestCase.VERBOSE)
            {
                if (willRewrite)
                {
                    Console.WriteLine("TEST: packed FST");
                }
                else
                {
                    Console.WriteLine("TEST: non-packed FST");
                }
            }

            foreach (InputOutput <T> pair in pairs)
            {
                if (pair.Output is IEnumerable)
                {
                    Builder <object> builderObject = builder as Builder <object>;
                    var values = pair.Output as IEnumerable;
                    foreach (object value in values)
                    {
                        builderObject.Add(pair.Input, value);
                    }
                }
                else
                {
                    builder.Add(pair.Input, pair.Output);
                }
            }
            FST <T> fst = builder.Finish();

            if (random.NextBoolean() && fst != null && !willRewrite)
            {
                IOContext context = LuceneTestCase.NewIOContext(random);
                using (IndexOutput @out = dir.CreateOutput("fst.bin", context))
                {
                    fst.Save(@out);
                }
                IndexInput @in = dir.OpenInput("fst.bin", context);
                try
                {
                    fst = new FST <T>(@in, outputs);
                }
                finally
                {
                    @in.Dispose();
                    dir.DeleteFile("fst.bin");
                }
            }

            if (LuceneTestCase.VERBOSE && pairs.Count <= 20 && fst != null)
            {
                using (TextWriter w = new StreamWriter(new FileStream("out.dot", FileMode.OpenOrCreate), Encoding.UTF8))
                {
                    Util.ToDot(fst, w, false, false);
                }
                Console.WriteLine("SAVED out.dot");
            }

            if (LuceneTestCase.VERBOSE)
            {
                if (fst == null)
                {
                    Console.WriteLine("  fst has 0 nodes (fully pruned)");
                }
                else
                {
                    Console.WriteLine("  fst has " + fst.NodeCount + " nodes and " + fst.ArcCount + " arcs");
                }
            }

            if (prune1 == 0 && prune2 == 0)
            {
                VerifyUnPruned(inputMode, fst);
            }
            else
            {
                VerifyPruned(inputMode, fst, prune1, prune2);
            }

            return(fst);
        }
Beispiel #25
0
        public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info,
            PostingsReaderBase postingsReader, IOContext context,
            String segmentSuffix)
        {
            _postingsReader = postingsReader;

            _input =
                dir.OpenInput(
                    IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION),
                    context);

            var success = false;
            try
            {
                _version = ReadHeader(_input);

                // Have PostingsReader init itself
                postingsReader.Init(_input);

                // Read per-field details
                SeekDir(_input, _dirOffset);

                int numFields = _input.ReadVInt();
                if (numFields < 0)
                {
                    throw new CorruptIndexException(String.Format("Invalid number of fields: {0}, Resource: {1}",
                        numFields, _input));
                }

                for (var i = 0; i < numFields; i++)
                {
                    var field = _input.ReadVInt();
                    var numTerms = _input.ReadVLong();

                    Debug.Assert(numTerms >= 0);

                    var termsStartPointer = _input.ReadVLong();
                    var fieldInfo = fieldInfos.FieldInfo(field);
                    var sumTotalTermFreq = fieldInfo.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY
                        ? -1
                        : _input.ReadVLong();
                    var sumDocFreq = _input.ReadVLong();
                    var docCount = _input.ReadVInt();
                    var longsSize = _version >= BlockTermsWriter.VERSION_META_ARRAY ? _input.ReadVInt() : 0;

                    if (docCount < 0 || docCount > info.DocCount)
                    {
                        // #docs with field must be <= #docs
                        throw new CorruptIndexException(
                            String.Format("Invalid DocCount: {0}, MaxDoc: {1}, Resource: {2}", docCount, info.DocCount,
                                _input));
                    }

                    if (sumDocFreq < docCount)
                    {
                        // #postings must be >= #docs with field
                        throw new CorruptIndexException(
                            String.Format("Invalid sumDocFreq: {0}, DocCount: {1}, Resource: {2}", sumDocFreq, docCount,
                                _input));
                    }

                    if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq)
                    {
                        // #positions must be >= #postings
                        throw new CorruptIndexException(
                            String.Format("Invalid sumTotalTermFreq: {0}, sumDocFreq: {1}, Resource: {2}",
                                sumTotalTermFreq, sumDocFreq, _input));
                    }

                    try
                    {
                        _fields.Add(fieldInfo.Name,
                            new FieldReader(fieldInfo, this, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq,
                                docCount,
                                longsSize));
                    }
                    catch (ArgumentException)
                    {
                        throw new CorruptIndexException(String.Format("Duplicate fields: {0}, Resource: {1}",
                            fieldInfo.Name, _input));
                    }

                }
                success = true;
            }
            finally
            {
                if (!success)
                {
                    _input.Dispose();
                }
            }

            _indexReader = indexReader;
        }
        public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor,
            String segmentSuffix, IOContext context)
        {
            _input =
                dir.OpenInput(
                    IndexFileNames.SegmentFileName(segment, segmentSuffix,
                        VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), new IOContext(context, true));
            var success = false;

            Debug.Assert(indexDivisor == -1 || indexDivisor > 0);

            try
            {

                _version = ReadHeader(_input);
                _indexDivisor = indexDivisor;

                if (_version >= VariableGapTermsIndexWriter.VERSION_CHECKSUM)
                    CodecUtil.ChecksumEntireFile(_input);
                
                SeekDir(_input, _dirOffset);

                // Read directory
                var numFields = _input.ReadVInt();
                if (numFields < 0)
                {
                    throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + _input + ")");
                }

                for (var i = 0; i < numFields; i++)
                {
                    var field = _input.ReadVInt();
                    var indexStart = _input.ReadVLong();
                    var fieldInfo = fieldInfos.FieldInfo(field);
                    
                    try
                    {
                        _fields.Add(fieldInfo, new FieldIndexData(indexStart, this));
                    }
                    catch (ArgumentException)
                    {
                        throw new CorruptIndexException(String.Format("Duplicate Field: {0}, Resource: {1}",
                            fieldInfo.Name, _input));
                    }
                }
                success = true;
            }
            finally
            {
                if (indexDivisor > 0)
                {
                    _input.Dispose();
                    _input = null;
                    if (success)
                    {
                        _indexLoaded = true;
                    }
                }
            }
        }
Beispiel #27
0
        // private string segment;

        public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, SegmentInfo info,
                                PostingsReaderBase postingsReader, IOContext context,
                                string segmentSuffix)
        {
            this.postingsReader = postingsReader;

            // this.segment = segment;
            input = dir.OpenInput(IndexFileNames.SegmentFileName(info.Name, segmentSuffix, BlockTermsWriter.TERMS_EXTENSION),
                                  context);

            bool success = false;

            try
            {
                version = ReadHeader(input);

                // Have PostingsReader init itself
                postingsReader.Init(input);

                // Read per-field details
                SeekDir(input, dirOffset);

                int numFields = input.ReadVInt32();
                if (numFields < 0)
                {
                    throw new CorruptIndexException("invalid number of fields: " + numFields + " (resource=" + input + ")");
                }
                for (int i = 0; i < numFields; i++)
                {
                    int  field    = input.ReadVInt32();
                    long numTerms = input.ReadVInt64();
                    Debug.Assert(numTerms >= 0);
                    long      termsStartPointer = input.ReadVInt64();
                    FieldInfo fieldInfo         = fieldInfos.FieldInfo(field);
                    long      sumTotalTermFreq  = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY ? -1 : input.ReadVInt64();
                    long      sumDocFreq        = input.ReadVInt64();
                    int       docCount          = input.ReadVInt32();
                    int       longsSize         = version >= BlockTermsWriter.VERSION_META_ARRAY ? input.ReadVInt32() : 0;
                    if (docCount < 0 || docCount > info.DocCount)
                    { // #docs with field must be <= #docs
                        throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.DocCount + " (resource=" + input + ")");
                    }
                    if (sumDocFreq < docCount)
                    {  // #postings must be >= #docs with field
                        throw new CorruptIndexException("invalid sumDocFreq: " + sumDocFreq + " docCount: " + docCount + " (resource=" + input + ")");
                    }
                    if (sumTotalTermFreq != -1 && sumTotalTermFreq < sumDocFreq)
                    { // #positions must be >= #postings
                        throw new CorruptIndexException("invalid sumTotalTermFreq: " + sumTotalTermFreq + " sumDocFreq: " + sumDocFreq + " (resource=" + input + ")");
                    }
                    FieldReader previous = fields.Put(fieldInfo.Name, new FieldReader(this, fieldInfo, numTerms, termsStartPointer, sumTotalTermFreq, sumDocFreq, docCount, longsSize));
                    if (previous != null)
                    {
                        throw new CorruptIndexException("duplicate fields: " + fieldInfo.Name + " (resource=" + input + ")");
                    }
                }
                success = true;
            }
            finally
            {
                if (!success)
                {
                    input.Dispose();
                }
            }

            this.indexReader = indexReader;
        }
Beispiel #28
0
        public virtual void TestRandomAccessClones()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);

            // Open two files
            IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random));
            IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random));

            IndexInput a1 = (IndexInput)e1.Clone();
            IndexInput a2 = (IndexInput)e2.Clone();

            // Seek the first pair
            e1.Seek(100);
            a1.Seek(100);
            Assert.AreEqual(100, e1.GetFilePointer());
            Assert.AreEqual(100, a1.GetFilePointer());
            byte be1 = e1.ReadByte();
            byte ba1 = a1.ReadByte();

            Assert.AreEqual(be1, ba1);

            // Now seek the second pair
            e2.Seek(1027);
            a2.Seek(1027);
            Assert.AreEqual(1027, e2.GetFilePointer());
            Assert.AreEqual(1027, a2.GetFilePointer());
            byte be2 = e2.ReadByte();
            byte ba2 = a2.ReadByte();

            Assert.AreEqual(be2, ba2);

            // Now make sure the first one didn't move
            Assert.AreEqual(101, e1.GetFilePointer());
            Assert.AreEqual(101, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now more the first one again, past the buffer length
            e1.Seek(1910);
            a1.Seek(1910);
            Assert.AreEqual(1910, e1.GetFilePointer());
            Assert.AreEqual(1910, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now make sure the second set didn't move
            Assert.AreEqual(1028, e2.GetFilePointer());
            Assert.AreEqual(1028, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Move the second set back, again cross the buffer size
            e2.Seek(17);
            a2.Seek(17);
            Assert.AreEqual(17, e2.GetFilePointer());
            Assert.AreEqual(17, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Finally, make sure the first set didn't move
            // Now make sure the first one didn't move
            Assert.AreEqual(1911, e1.GetFilePointer());
            Assert.AreEqual(1911, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            e1.Dispose();
            e2.Dispose();
            a1.Dispose();
            a2.Dispose();
            cr.Dispose();
        }
Beispiel #29
0
 public override void Dispose()
 {
     @delegate.Dispose();
 }
                public CoreFieldIndex(FieldIndexData outerInstance, long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart,
                                      int numIndexTerms)
                {
                    this.termsStart = termsStart;
                    termBytesStart  = outerInstance.outerInstance.termBytes.GetPointer();

                    IndexInput clone = (IndexInput)outerInstance.outerInstance.input.Clone();

                    clone.Seek(indexStart);

                    // -1 is passed to mean "don't load term index", but
                    // if we are then later loaded it's overwritten with
                    // a real value
                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(outerInstance.outerInstance.indexDivisor > 0);
                    }

                    this.numIndexTerms = 1 + (numIndexTerms - 1) / outerInstance.outerInstance.indexDivisor;

                    if (Debugging.AssertsEnabled)
                    {
                        Debugging.Assert(this.numIndexTerms > 0, "numIndexTerms={0} indexDivisor={1}", numIndexTerms, outerInstance.outerInstance.indexDivisor);
                    }

                    if (outerInstance.outerInstance.indexDivisor == 1)
                    {
                        // Default (load all index terms) is fast -- slurp in the images from disk:

                        try
                        {
                            long numTermBytes = packedIndexStart - indexStart;
                            outerInstance.outerInstance.termBytes.Copy(clone, numTermBytes);

                            // records offsets into main terms dict file
                            termsDictOffsets = PackedInt32s.GetReader(clone);
                            if (Debugging.AssertsEnabled)
                            {
                                Debugging.Assert(termsDictOffsets.Count == numIndexTerms);
                            }

                            // records offsets into byte[] term data
                            termOffsets = PackedInt32s.GetReader(clone);
                            if (Debugging.AssertsEnabled)
                            {
                                Debugging.Assert(termOffsets.Count == 1 + numIndexTerms);
                            }
                        }
                        finally
                        {
                            clone.Dispose();
                        }
                    }
                    else
                    {
                        // Get packed iterators
                        IndexInput clone1 = (IndexInput)outerInstance.outerInstance.input.Clone();
                        IndexInput clone2 = (IndexInput)outerInstance.outerInstance.input.Clone();

                        try
                        {
                            // Subsample the index terms
                            clone1.Seek(packedIndexStart);
                            PackedInt32s.IReaderIterator termsDictOffsetsIter = PackedInt32s.GetReaderIterator(clone1, PackedInt32s.DEFAULT_BUFFER_SIZE);

                            clone2.Seek(packedOffsetsStart);
                            PackedInt32s.IReaderIterator termOffsetsIter = PackedInt32s.GetReaderIterator(clone2, PackedInt32s.DEFAULT_BUFFER_SIZE);

                            // TODO: often we can get by w/ fewer bits per
                            // value, below.. .but this'd be more complex:
                            // we'd have to try @ fewer bits and then grow
                            // if we overflowed it.

                            PackedInt32s.Mutable termsDictOffsetsM = PackedInt32s.GetMutable(this.numIndexTerms, termsDictOffsetsIter.BitsPerValue, PackedInt32s.DEFAULT);
                            PackedInt32s.Mutable termOffsetsM      = PackedInt32s.GetMutable(this.numIndexTerms + 1, termOffsetsIter.BitsPerValue, PackedInt32s.DEFAULT);

                            termsDictOffsets = termsDictOffsetsM;
                            termOffsets      = termOffsetsM;

                            int upto = 0;

                            long termOffsetUpto = 0;

                            while (upto < this.numIndexTerms)
                            {
                                // main file offset copies straight over
                                termsDictOffsetsM.Set(upto, termsDictOffsetsIter.Next());

                                termOffsetsM.Set(upto, termOffsetUpto);

                                long termOffset     = termOffsetsIter.Next();
                                long nextTermOffset = termOffsetsIter.Next();
                                int  numTermBytes   = (int)(nextTermOffset - termOffset);

                                clone.Seek(indexStart + termOffset);
                                if (Debugging.AssertsEnabled)
                                {
                                    Debugging.Assert(indexStart + termOffset < clone.Length, "indexStart={0} termOffset={1} len={2}", indexStart, termOffset, clone.Length);
                                    Debugging.Assert(indexStart + termOffset + numTermBytes < clone.Length);
                                }

                                outerInstance.outerInstance.termBytes.Copy(clone, numTermBytes);
                                termOffsetUpto += numTermBytes;

                                upto++;
                                if (upto == this.numIndexTerms)
                                {
                                    break;
                                }

                                // skip terms:
                                termsDictOffsetsIter.Next();
                                for (int i = 0; i < outerInstance.outerInstance.indexDivisor - 2; i++)
                                {
                                    termOffsetsIter.Next();
                                    termsDictOffsetsIter.Next();
                                }
                            }
                            termOffsetsM.Set(upto, termOffsetUpto);
                        }
                        finally
                        {
                            clone1.Dispose();
                            clone2.Dispose();
                            clone.Dispose();
                        }
                    }
                }
Beispiel #31
0
        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
        {
            string     fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
            IndexInput input    = directory.OpenInput(fileName, iocontext);

            bool success = false;

            try
            {
                CodecUtil.CheckHeader(input, Lucene42FieldInfosFormat.CODEC_NAME,
                                      Lucene42FieldInfosFormat.FORMAT_START,
                                      Lucene42FieldInfosFormat.FORMAT_CURRENT);

                int         size  = input.ReadVInt32(); //read in the size
                FieldInfo[] infos = new FieldInfo[size];

                for (int i = 0; i < size; i++)
                {
                    string       name            = input.ReadString();
                    int          fieldNumber     = input.ReadVInt32();
                    sbyte        bits            = (sbyte)input.ReadByte();
                    bool         isIndexed       = (bits & Lucene42FieldInfosFormat.IS_INDEXED) != 0;
                    bool         storeTermVector = (bits & Lucene42FieldInfosFormat.STORE_TERMVECTOR) != 0;
                    bool         omitNorms       = (bits & Lucene42FieldInfosFormat.OMIT_NORMS) != 0;
                    bool         storePayloads   = (bits & Lucene42FieldInfosFormat.STORE_PAYLOADS) != 0;
                    IndexOptions indexOptions;
                    if (!isIndexed)
                    {
                        indexOptions = IndexOptions.NONE;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_ONLY;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.OMIT_POSITIONS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS;
                    }
                    else if ((bits & Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS) != 0)
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
                    }
                    else
                    {
                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
                    }

                    // DV Types are packed in one byte
                    sbyte         val                       = (sbyte)input.ReadByte();
                    DocValuesType docValuesType             = GetDocValuesType(input, (byte)(val & 0x0F));
                    DocValuesType normsType                 = GetDocValuesType(input, (byte)((val.TripleShift(4)) & 0x0F));
                    IDictionary <string, string> attributes = input.ReadStringStringMap();
                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
                                             omitNorms, storePayloads, indexOptions, docValuesType, normsType, attributes.AsReadOnly());
                }

                CodecUtil.CheckEOF(input);
                FieldInfos fieldInfos = new FieldInfos(infos);
                success = true;
                return(fieldInfos);
            }
            finally
            {
                if (success)
                {
                    input.Dispose();
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(input);
                }
            }
        }
Beispiel #32
0
        public override FieldsProducer FieldsProducer(SegmentReadState state)
        {
            string     seedFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, SEED_EXT);
            IndexInput @in          = state.Directory.OpenInput(seedFileName, state.Context);
            long       seed         = @in.ReadInt64();

            if (LuceneTestCase.Verbose)
            {
                Console.WriteLine("MockRandomCodec: reading from seg=" + state.SegmentInfo.Name + " formatID=" + state.SegmentSuffix + " seed=" + seed);
            }
            @in.Dispose();

            Random random = new J2N.Randomizer(seed);

            int readBufferSize = TestUtil.NextInt32(random, 1, 4096);

            if (LuceneTestCase.Verbose)
            {
                Console.WriteLine("MockRandomCodec: readBufferSize=" + readBufferSize);
            }

            PostingsReaderBase postingsReader;

            if (random.NextBoolean())
            {
                if (LuceneTestCase.Verbose)
                {
                    Console.WriteLine("MockRandomCodec: reading Sep postings");
                }
                postingsReader = new SepPostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo,
                                                       state.Context, new MockInt32StreamFactory(random), state.SegmentSuffix);
            }
            else
            {
                if (LuceneTestCase.Verbose)
                {
                    Console.WriteLine("MockRandomCodec: reading Standard postings");
                }
                postingsReader = new Lucene41PostingsReader(state.Directory, state.FieldInfos, state.SegmentInfo, state.Context, state.SegmentSuffix);
            }

            if (random.NextBoolean())
            {
                int totTFCutoff = TestUtil.NextInt32(random, 1, 20);
                if (LuceneTestCase.Verbose)
                {
                    Console.WriteLine("MockRandomCodec: reading pulsing postings with totTFCutoff=" + totTFCutoff);
                }
                postingsReader = new PulsingPostingsReader(state, postingsReader);
            }

            FieldsProducer fields;
            int            t1 = random.Next(4);

            if (t1 == 0)
            {
                bool success = false;
                try
                {
                    fields  = new FSTTermsReader(state, postingsReader);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        postingsReader.Dispose();
                    }
                }
            }
            else if (t1 == 1)
            {
                bool success = false;
                try
                {
                    fields  = new FSTOrdTermsReader(state, postingsReader);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        postingsReader.Dispose();
                    }
                }
            }
            else if (t1 == 2)
            {
                // Use BlockTree terms dict
                if (LuceneTestCase.Verbose)
                {
                    Console.WriteLine("MockRandomCodec: reading BlockTree terms dict");
                }

                bool success = false;
                try
                {
                    fields = new BlockTreeTermsReader(state.Directory,
                                                      state.FieldInfos,
                                                      state.SegmentInfo,
                                                      postingsReader,
                                                      state.Context,
                                                      state.SegmentSuffix,
                                                      state.TermsIndexDivisor);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        postingsReader.Dispose();
                    }
                }
            }
            else
            {
                if (LuceneTestCase.Verbose)
                {
                    Console.WriteLine("MockRandomCodec: reading Block terms dict");
                }
                TermsIndexReaderBase indexReader;
                bool success = false;
                try
                {
                    bool doFixedGap = random.NextBoolean();

                    // randomness diverges from writer, here:
                    if (state.TermsIndexDivisor != -1)
                    {
                        state.TermsIndexDivisor = TestUtil.NextInt32(random, 1, 10);
                    }

                    if (doFixedGap)
                    {
                        // if termsIndexDivisor is set to -1, we should not touch it. It means a
                        // test explicitly instructed not to load the terms index.
                        if (LuceneTestCase.Verbose)
                        {
                            Console.WriteLine("MockRandomCodec: fixed-gap terms index (divisor=" + state.TermsIndexDivisor + ")");
                        }
                        indexReader = new FixedGapTermsIndexReader(state.Directory,
                                                                   state.FieldInfos,
                                                                   state.SegmentInfo.Name,
                                                                   state.TermsIndexDivisor,
                                                                   BytesRef.UTF8SortedAsUnicodeComparer,
                                                                   state.SegmentSuffix, state.Context);
                    }
                    else
                    {
                        int n2 = random.Next(3);
                        if (n2 == 1)
                        {
                            random.Next();
                        }
                        else if (n2 == 2)
                        {
                            random.NextInt64();
                        }
                        if (LuceneTestCase.Verbose)
                        {
                            Console.WriteLine("MockRandomCodec: variable-gap terms index (divisor=" + state.TermsIndexDivisor + ")");
                        }
                        indexReader = new VariableGapTermsIndexReader(state.Directory,
                                                                      state.FieldInfos,
                                                                      state.SegmentInfo.Name,
                                                                      state.TermsIndexDivisor,
                                                                      state.SegmentSuffix, state.Context);
                    }

                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        postingsReader.Dispose();
                    }
                }

                success = false;
                try
                {
                    fields = new BlockTermsReader(indexReader,
                                                  state.Directory,
                                                  state.FieldInfos,
                                                  state.SegmentInfo,
                                                  postingsReader,
                                                  state.Context,
                                                  state.SegmentSuffix);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        try
                        {
                            postingsReader.Dispose();
                        }
                        finally
                        {
                            indexReader.Dispose();
                        }
                    }
                }
            }

            return(fields);
        }
        public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, string segment, int indexDivisor,
                                        IComparer <BytesRef> termComp, string segmentSuffix, IOContext context)
        {
            this.termComp = termComp;

            if (Debugging.AssertsEnabled)
            {
                Debugging.Assert(indexDivisor == -1 || indexDivisor > 0);
            }

            input = dir.OpenInput(IndexFileNames.SegmentFileName(segment, segmentSuffix, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context);

            bool success = false;

            try
            {
                version = ReadHeader(input);

                if (version >= FixedGapTermsIndexWriter.VERSION_CHECKSUM)
                {
                    CodecUtil.ChecksumEntireFile(input);
                }

                indexInterval = input.ReadInt32();
                if (indexInterval < 1)
                {
                    throw new CorruptIndexException("invalid indexInterval: " + indexInterval + " (resource=" + input + ")");
                }
                this.indexDivisor = indexDivisor;

                if (indexDivisor < 0)
                {
                    totalIndexInterval = indexInterval;
                }
                else
                {
                    // In case terms index gets loaded, later, on demand
                    totalIndexInterval = indexInterval * indexDivisor;
                }
                if (Debugging.AssertsEnabled)
                {
                    Debugging.Assert(totalIndexInterval > 0);
                }

                SeekDir(input, dirOffset);

                // Read directory
                int numFields = input.ReadVInt32();
                if (numFields < 0)
                {
                    throw new CorruptIndexException("invalid numFields: " + numFields + " (resource=" + input + ")");
                }
                //System.out.println("FGR: init seg=" + segment + " div=" + indexDivisor + " nF=" + numFields);
                for (int i = 0; i < numFields; i++)
                {
                    int field         = input.ReadVInt32();
                    int numIndexTerms = input.ReadVInt32();
                    if (numIndexTerms < 0)
                    {
                        throw new CorruptIndexException("invalid numIndexTerms: " + numIndexTerms + " (resource=" + input + ")");
                    }
                    long termsStart         = input.ReadVInt64();
                    long indexStart         = input.ReadVInt64();
                    long packedIndexStart   = input.ReadVInt64();
                    long packedOffsetsStart = input.ReadVInt64();
                    if (packedIndexStart < indexStart)
                    {
                        throw new CorruptIndexException("invalid packedIndexStart: " + packedIndexStart + " indexStart: " + indexStart + "numIndexTerms: " + numIndexTerms + " (resource=" + input + ")");
                    }
                    FieldInfo      fieldInfo = fieldInfos.FieldInfo(field);
                    FieldIndexData previous  = fields.Put(fieldInfo, new FieldIndexData(this, /* fieldInfo, // LUCENENET: Not referenced */
                                                                                        numIndexTerms, indexStart, termsStart, packedIndexStart, packedOffsetsStart));
                    if (previous != null)
                    {
                        throw new CorruptIndexException("duplicate field: " + fieldInfo.Name + " (resource=" + input + ")");
                    }
                }
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.DisposeWhileHandlingException(input);
                }
                if (indexDivisor > 0)
                {
                    input.Dispose();
                    input = null;
                    if (success)
                    {
                        indexLoaded = true;
                    }
                    termBytesReader = termBytes.Freeze(true);
                }
            }
        }