public static void ReadLegacyInfos(SegmentInfos infos, Directory directory, IndexInput input, int format)
        {
            infos.Version = input.ReadLong(); // read version
            infos.Counter = input.ReadInt(); // read counter
            Lucene3xSegmentInfoReader reader = new Lucene3xSegmentInfoReader();
            for (int i = input.ReadInt(); i > 0; i--) // read segmentInfos
            {
                SegmentCommitInfo siPerCommit = reader.ReadLegacySegmentInfo(directory, format, input);
                SegmentInfo si = siPerCommit.Info;

                if (si.Version == null)
                {
                    // Could be a 3.0 - try to open the doc stores - if it fails, it's a
                    // 2.x segment, and an IndexFormatTooOldException will be thrown,
                    // which is what we want.
                    Directory dir = directory;
                    if (Lucene3xSegmentInfoFormat.GetDocStoreOffset(si) != -1)
                    {
                        if (Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si))
                        {
                            dir = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(Lucene3xSegmentInfoFormat.GetDocStoreSegment(si), "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), IOContext.READONCE, false);
                        }
                    }
                    else if (si.UseCompoundFile)
                    {
                        dir = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(si.Name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READONCE, false);
                    }

                    try
                    {
                        Lucene3xStoredFieldsReader.CheckCodeVersion(dir, Lucene3xSegmentInfoFormat.GetDocStoreSegment(si));
                    }
                    finally
                    {
                        // If we opened the directory, close it
                        if (dir != directory)
                        {
                            dir.Dispose();
                        }
                    }

                    // Above call succeeded, so it's a 3.0 segment. Upgrade it so the next
                    // time the segment is read, its version won't be null and we won't
                    // need to open FieldsReader every time for each such segment.
                    si.Version = "3.0";
                }
                else if (si.Version.Equals("2.x"))
                {
                    // If it's a 3x index touched by 3.1+ code, then segments record their
                    // version, whether they are 2.x ones or not. We detect that and throw
                    // appropriate exception.
                    throw new IndexFormatTooOldException("segment " + si.Name + " in resource " + input, si.Version);
                }
                infos.Add(siPerCommit);
            }

            infos.UserData = input.ReadStringStringMap();
        }
Exemplo n.º 2
0
 // Used only by clone
 private Lucene3xStoredFieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int docStoreOffset, IndexInput fieldsStream, IndexInput indexStream)
 {
     this.FieldInfos     = fieldInfos;
     this.NumTotalDocs   = numTotalDocs;
     this.Size           = size;
     this.Format         = format;
     this.DocStoreOffset = docStoreOffset;
     this.FieldsStream   = fieldsStream;
     this.IndexStream    = indexStream;
     this.StoreCFSReader = null;
 }
Exemplo n.º 3
0
        public virtual void TestEmptyCFS()
        {
            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            csw.Dispose();

            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            Assert.AreEqual(0, csr.ListAll().Length);
            csr.Dispose();

            newDir.Dispose();
        }
Exemplo n.º 4
0
        /// <summary>
        /// Setup a larger compound file with a number of components, each of
        ///  which is a sequential file (so that we can easily tell that we are
        ///  reading in the right byte). The methods sets up 20 files - f0 to f19,
        ///  the size of each file is 1000 bytes.
        /// </summary>
        private void SetUp_2()
        {
            CompoundFileDirectory cw = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), true);

            for (int i = 0; i < 20; i++)
            {
                CreateSequenceFile(Dir, "f" + i, (sbyte)0, 2000);
                string fileName = "f" + i;
                Dir.Copy(cw, fileName, fileName, NewIOContext(Random));
            }
            cw.Dispose();
        }
Exemplo n.º 5
0
 // used by clone
 internal Lucene3xTermVectorsReader(FieldInfos fieldInfos, IndexInput tvx, IndexInput tvd, IndexInput tvf, int size, int numTotalDocs, int docStoreOffset, int format)
 {
     this.FieldInfos     = fieldInfos;
     this.Tvx            = tvx;
     this.Tvd            = tvd;
     this.Tvf            = tvf;
     this.Size_Renamed   = size;
     this.NumTotalDocs   = numTotalDocs;
     this.DocStoreOffset = docStoreOffset;
     this.Format         = format;
     this.StoreCFSReader = null;
 }
Exemplo n.º 6
0
        public virtual void TestRandomFiles()
        {
            // Setup the test segment
            string segment = "test";
            int    chunk   = 1024; // internal buffer size used by the stream

            CreateRandomFile(Dir, segment + ".zero", 0);
            CreateRandomFile(Dir, segment + ".one", 1);
            CreateRandomFile(Dir, segment + ".ten", 10);
            CreateRandomFile(Dir, segment + ".hundred", 100);
            CreateRandomFile(Dir, segment + ".big1", chunk);
            CreateRandomFile(Dir, segment + ".big2", chunk - 1);
            CreateRandomFile(Dir, segment + ".big3", chunk + 1);
            CreateRandomFile(Dir, segment + ".big4", 3 * chunk);
            CreateRandomFile(Dir, segment + ".big5", 3 * chunk - 1);
            CreateRandomFile(Dir, segment + ".big6", 3 * chunk + 1);
            CreateRandomFile(Dir, segment + ".big7", 1000 * chunk);

            // Setup extraneous files
            CreateRandomFile(Dir, "onetwothree", 100);
            CreateRandomFile(Dir, segment + ".notIn", 50);
            CreateRandomFile(Dir, segment + ".notIn2", 51);

            // Now test
            CompoundFileDirectory csw = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random), true);

            string[] data = new string[] { ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7" };
            for (int i = 0; i < data.Length; i++)
            {
                string fileName = segment + data[i];
                Dir.Copy(csw, fileName, fileName, NewIOContext(Random));
            }
            csw.Dispose();

            CompoundFileDirectory csr = new CompoundFileDirectory(Dir, "test.cfs", NewIOContext(Random), false);

            for (int i = 0; i < data.Length; i++)
            {
                IndexInput check = Dir.OpenInput(segment + data[i], NewIOContext(Random));
                IndexInput test  = csr.OpenInput(segment + data[i], NewIOContext(Random));
                AssertSameStreams(data[i], check, test);
                AssertSameSeekBehavior(data[i], check, test);
                test.Dispose();
                check.Dispose();
            }
            csr.Dispose();
        }
Exemplo n.º 7
0
        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
        {
            string fileName = IndexFileNames.SegmentFileName(Lucene3xSegmentInfoFormat.GetDocStoreSegment(segmentInfo), "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION);

            // Unfortunately, for 3.x indices, each segment's
            // FieldInfos can lie about hasVectors (claim it's true
            // when really it's false).... so we have to carefully
            // check if the files really exist before trying to open
            // them (4.x has fixed this):
            bool exists;

            if (Lucene3xSegmentInfoFormat.GetDocStoreOffset(segmentInfo) != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(segmentInfo))
            {
                string cfxFileName = IndexFileNames.SegmentFileName(Lucene3xSegmentInfoFormat.GetDocStoreSegment(segmentInfo), "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION);
                if (segmentInfo.Dir.FileExists(cfxFileName))
                {
                    Directory cfsDir = new CompoundFileDirectory(segmentInfo.Dir, cfxFileName, context, false);
                    try
                    {
                        exists = cfsDir.FileExists(fileName);
                    }
                    finally
                    {
                        cfsDir.Dispose();
                    }
                }
                else
                {
                    exists = false;
                }
            }
            else
            {
                exists = directory.FileExists(fileName);
            }

            if (!exists)
            {
                // 3x's FieldInfos sometimes lies and claims a segment
                // has vectors when it doesn't:
                return(null);
            }
            else
            {
                return(new Lucene3xTermVectorsReader(directory, segmentInfo, fieldInfos, context));
            }
        }
Exemplo n.º 8
0
        public virtual void TestAppend()
        {
            Directory             newDir = NewDirectory();
            CompoundFileDirectory csw    = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true);
            int size = 5 + Random.Next(128);

            for (int j = 0; j < 2; j++)
            {
                IndexOutput os = csw.CreateOutput("seg_" + j + "_foo.txt", NewIOContext(Random));
                for (int i = 0; i < size; i++)
                {
                    os.WriteInt32(i * j);
                }
                os.Dispose();
                string[] listAll = newDir.ListAll();
                Assert.AreEqual(1, listAll.Length);
                Assert.AreEqual("d.cfs", listAll[0]);
            }
            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);
            Dir.Copy(csw, "d1", "d1", NewIOContext(Random));
            string[] listAll_ = newDir.ListAll();
            Assert.AreEqual(1, listAll_.Length);
            Assert.AreEqual("d.cfs", listAll_[0]);
            csw.Dispose();
            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false);

            for (int j = 0; j < 2; j++)
            {
                IndexInput openInput = csr.OpenInput("seg_" + j + "_foo.txt", NewIOContext(Random));
                Assert.AreEqual(size * 4, openInput.Length);
                for (int i = 0; i < size; i++)
                {
                    Assert.AreEqual(i * j, openInput.ReadInt32());
                }

                openInput.Dispose();
            }
            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random));
            IndexInput actual   = csr.OpenInput("d1", NewIOContext(Random));

            AssertSameStreams("d1", expected, actual);
            AssertSameSeekBehavior("d1", expected, actual);
            expected.Dispose();
            actual.Dispose();
            csr.Dispose();
            newDir.Dispose();
        }
Exemplo n.º 9
0
        public virtual void TestClonedStreamsClosing()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);

            // basic clone
            IndexInput expected = Dir.OpenInput("f11", NewIOContext(Random));

            // this test only works for FSIndexInput
            Assert.IsTrue(TestHelper.IsSimpleFSIndexInput(expected));
            Assert.IsTrue(TestHelper.IsSimpleFSIndexInputOpen(expected));

            IndexInput one = cr.OpenInput("f11", NewIOContext(Random));

            IndexInput two = (IndexInput)one.Clone();

            AssertSameStreams("basic clone one", expected, one);
            expected.Seek(0);
            AssertSameStreams("basic clone two", expected, two);

            // Now close the first stream
            one.Dispose();

            // The following should really fail since we couldn't expect to
            // access a file once close has been called on it (regardless of
            // buffering and/or clone magic)
            expected.Seek(0);
            two.Seek(0);
            AssertSameStreams("basic clone two/2", expected, two);

            // Now close the compound reader
            cr.Dispose();

            // The following may also fail since the compound stream is closed
            expected.Seek(0);
            two.Seek(0);
            //assertSameStreams("basic clone two/3", expected, two);

            // Now close the second clone
            two.Dispose();
            expected.Seek(0);
            two.Seek(0);
            //assertSameStreams("basic clone two/4", expected, two);

            expected.Dispose();
        }
Exemplo n.º 10
0
        public virtual void TestAppendTwice()
        {
            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            CreateSequenceFile(newDir, "d1", (sbyte)0, 15);
            IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
            @out.WriteInt(0);
            @out.Dispose();
            Assert.AreEqual(1, csw.ListAll().Length);
            Assert.AreEqual("d.xyz", csw.ListAll()[0]);

            csw.Dispose();

            CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            Assert.AreEqual(1, cfr.ListAll().Length);
            Assert.AreEqual("d.xyz", cfr.ListAll()[0]);
            cfr.Dispose();
            newDir.Dispose();
        }
Exemplo n.º 11
0
        public virtual void TestFileNotFound()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random()), false);

            // Open two files
            try
            {
                cr.OpenInput("bogus", NewIOContext(Random()));
                Assert.Fail("File not found");
            }
            catch (Exception e)
            {
                /* success */
                //System.out.println("SUCCESS: File Not Found: " + e);
            }

            cr.Dispose();
        }
Exemplo n.º 12
0
        public virtual void TestAddExternalFile()
        {
            CreateSequenceFile(Dir, "d1", (sbyte)0, 15);

            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            Dir.Copy(csw, "d1", "d1", NewIOContext(Random()));
            csw.Dispose();

            CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            IndexInput expected = Dir.OpenInput("d1", NewIOContext(Random()));
            IndexInput actual = csr.OpenInput("d1", NewIOContext(Random()));
            AssertSameStreams("d1", expected, actual);
            AssertSameSeekBehavior("d1", expected, actual);
            expected.Dispose();
            actual.Dispose();
            csr.Dispose();

            newDir.Dispose();
        }
Exemplo n.º 13
0
        public virtual void TestManySubFiles()
        {
            Directory d          = NewFSDirectory(CreateTempDir("CFSManySubFiles"));
            int       FILE_COUNT = AtLeast(500);

            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
            {
                IndexOutput @out = d.CreateOutput("file." + fileIdx, NewIOContext(Random));
                @out.WriteByte((byte)(sbyte)fileIdx);
                @out.Dispose();
            }

            CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random), true);

            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
            {
                string fileName = "file." + fileIdx;
                d.Copy(cfd, fileName, fileName, NewIOContext(Random));
            }
            cfd.Dispose();

            IndexInput[]          ins = new IndexInput[FILE_COUNT];
            CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random), false);

            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
            {
                ins[fileIdx] = cfr.OpenInput("file." + fileIdx, NewIOContext(Random));
            }

            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
            {
                Assert.AreEqual((byte)fileIdx, ins[fileIdx].ReadByte());
            }

            for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++)
            {
                ins[fileIdx].Dispose();
            }
            cfr.Dispose();
            d.Dispose();
        }
Exemplo n.º 14
0
        public virtual void TestReadPastEOF()
        {
            SetUp_2();
            var        cr  = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);
            IndexInput @is = cr.OpenInput("f2", NewIOContext(Random));

            @is.Seek(@is.Length - 10);
            var b = new byte[100];

            @is.ReadBytes(b, 0, 10);

            try
            {
                @is.ReadByte();
                Assert.Fail("Single byte read past end of file");
            }
#pragma warning disable 168
            catch (IOException e)
#pragma warning restore 168
            {
                /* success */
                //System.out.println("SUCCESS: single byte read past end of file: " + e);
            }

            @is.Seek(@is.Length - 10);
            try
            {
                @is.ReadBytes(b, 0, 50);
                Assert.Fail("Block read past end of file");
            }
#pragma warning disable 168
            catch (IOException e)
#pragma warning restore 168
            {
                /* success */
                //System.out.println("SUCCESS: block read past end of file: " + e);
            }

            @is.Dispose();
            cr.Dispose();
        }
Exemplo n.º 15
0
        public virtual void TestSingleFile()
        {
            int[] data = new int[] { 0, 1, 10, 100 };
            for (int i = 0; i < data.Length; i++)
            {
                string name = "t" + data[i];
                CreateSequenceFile(Dir, name, (sbyte)0, data[i]);
                CompoundFileDirectory csw = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random), true);
                Dir.Copy(csw, name, name, NewIOContext(Random));
                csw.Dispose();

                CompoundFileDirectory csr      = new CompoundFileDirectory(Dir, name + ".cfs", NewIOContext(Random), false);
                IndexInput            expected = Dir.OpenInput(name, NewIOContext(Random));
                IndexInput            actual   = csr.OpenInput(name, NewIOContext(Random));
                AssertSameStreams(name, expected, actual);
                AssertSameSeekBehavior(name, expected, actual);
                expected.Dispose();
                actual.Dispose();
                csr.Dispose();
            }
        }
Exemplo n.º 16
0
 private void CheckHeaders(Directory dir)
 {
     foreach (string file in dir.ListAll())
     {
         if (file.Equals(IndexWriter.WRITE_LOCK_NAME, StringComparison.Ordinal))
         {
             continue; // write.lock has no header, thats ok
         }
         if (file.Equals(IndexFileNames.SEGMENTS_GEN, StringComparison.Ordinal))
         {
             continue; // segments.gen has no header, thats ok
         }
         if (file.EndsWith(IndexFileNames.COMPOUND_FILE_EXTENSION, StringComparison.Ordinal))
         {
             CompoundFileDirectory cfsDir = new CompoundFileDirectory(dir, file, NewIOContext(Random()), false);
             CheckHeaders(cfsDir); // recurse into cfs
             cfsDir.Dispose();
         }
         IndexInput @in     = null;
         bool       success = false;
         try
         {
             @in = dir.OpenInput(file, NewIOContext(Random()));
             int val = @in.ReadInt32();
             Assert.AreEqual(CodecUtil.CODEC_MAGIC, val, file + " has no codec header, instead found: " + val);
             success = true;
         }
         finally
         {
             if (success)
             {
                 IOUtils.Dispose(@in);
             }
             else
             {
                 IOUtils.DisposeWhileHandlingException(@in);
             }
         }
     }
 }
Exemplo n.º 17
0
        public virtual void TestDoubleClose()
        {
            Directory newDir = NewDirectory();
            CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true);
            IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random()));
            @out.WriteInt(0);
            @out.Dispose();

            csw.Dispose();
            // close a second time - must have no effect according to IDisposable
            csw.Dispose();

            csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false);
            IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random()));
            Assert.AreEqual(0, openInput.ReadInt());
            openInput.Dispose();
            csw.Dispose();
            // close a second time - must have no effect according to IDisposable
            csw.Dispose();

            newDir.Dispose();
        }
Exemplo n.º 18
0
        public virtual void TestReadNestedCFP()
        {
            Directory             newDir = NewDirectory();
            CompoundFileDirectory csw    = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true);
            CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", NewIOContext(Random), true);
            IndexOutput           @out   = nested.CreateOutput("b.xyz", NewIOContext(Random));
            IndexOutput           out1   = nested.CreateOutput("b_1.xyz", NewIOContext(Random));

            @out.WriteInt32(0);
            out1.WriteInt32(1);
            @out.Dispose();
            out1.Dispose();
            nested.Dispose();
            newDir.Copy(csw, "b.cfs", "b.cfs", NewIOContext(Random));
            newDir.Copy(csw, "b.cfe", "b.cfe", NewIOContext(Random));
            newDir.DeleteFile("b.cfs");
            newDir.DeleteFile("b.cfe");
            csw.Dispose();

            Assert.AreEqual(2, newDir.ListAll().Length);
            csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false);

            Assert.AreEqual(2, csw.ListAll().Length);
            nested = new CompoundFileDirectory(csw, "b.cfs", NewIOContext(Random), false);

            Assert.AreEqual(2, nested.ListAll().Length);
            IndexInput openInput = nested.OpenInput("b.xyz", NewIOContext(Random));

            Assert.AreEqual(0, openInput.ReadInt32());
            openInput.Dispose();
            openInput = nested.OpenInput("b_1.xyz", NewIOContext(Random));
            Assert.AreEqual(1, openInput.ReadInt32());
            openInput.Dispose();
            nested.Dispose();
            csw.Dispose();
            newDir.Dispose();
        }
Exemplo n.º 19
0
        public static void Main(string[] args)
        {
            string filename = null;
            bool   extract  = false;
            string dirImpl  = null;

            int j = 0;

            while (j < args.Length)
            {
                string arg = args[j];
                if ("-extract".Equals(arg))
                {
                    extract = true;
                }
                else if ("-dir-impl".Equals(arg))
                {
                    if (j == args.Length - 1)
                    {
                        Console.WriteLine("ERROR: missing value for -dir-impl option");
                        Environment.Exit(1);
                    }
                    j++;
                    dirImpl = args[j];
                }
                else if (filename == null)
                {
                    filename = arg;
                }
                j++;
            }

            if (filename == null)
            {
                Console.WriteLine("Usage: org.apache.lucene.index.CompoundFileExtractor [-extract] [-dir-impl X] <cfsfile>");
                return;
            }

            Directory             dir     = null;
            CompoundFileDirectory cfr     = null;
            IOContext             context = IOContext.READ;

            try
            {
                File   file    = new File(filename);
                string dirname = file.AbsoluteFile.Parent;
                filename = file.Name;
                if (dirImpl == null)
                {
                    dir = FSDirectory.open(new File(dirname));
                }
                else
                {
                    dir = CommandLineUtil.newFSDirectory(dirImpl, new File(dirname));
                }

                cfr = new CompoundFileDirectory(dir, filename, IOContext.DEFAULT, false);

                string[] files = cfr.listAll();
                ArrayUtil.timSort(files);   // sort the array of filename so that the output is more readable

                for (int i = 0; i < files.Length; ++i)
                {
                    long len = cfr.fileLength(files[i]);

                    if (extract)
                    {
                        Console.WriteLine("extract " + files[i] + " with " + len + " bytes to local directory...");
                        IndexInput ii = cfr.openInput(files[i], context);

                        FileOutputStream f = new FileOutputStream(files[i]);

                        // read and write with a small buffer, which is more effective than reading byte by byte
                        sbyte[] buffer = new sbyte[1024];
                        int     chunk  = buffer.Length;
                        while (len > 0)
                        {
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final int bufLen = (int) Math.min(chunk, len);
                            int bufLen = (int)Math.Min(chunk, len);
                            ii.readBytes(buffer, 0, bufLen);
                            f.write(buffer, 0, bufLen);
                            len -= bufLen;
                        }

                        f.close();
                        ii.close();
                    }
                    else
                    {
                        Console.WriteLine(files[i] + ": " + len + " bytes");
                    }
                }
            }
            catch (IOException ioe)
            {
                Console.WriteLine(ioe.ToString());
                Console.Write(ioe.StackTrace);
            }
            finally
            {
                try
                {
                    if (dir != null)
                    {
                        dir.close();
                    }
                    if (cfr != null)
                    {
                        cfr.close();
                    }
                }
                catch (IOException ioe)
                {
                    Console.WriteLine(ioe.ToString());
                    Console.Write(ioe.StackTrace);
                }
            }
        }
Exemplo n.º 20
0
        internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInfo si, IOContext context, int termsIndexDivisor)
        {
            fieldsReaderLocal = new AnonymousFieldsReaderLocal(this);
            termVectorsLocal  = new AnonymousTermVectorsLocal(this);

            if (termsIndexDivisor == 0)
            {
                throw new System.ArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
            }

            Codec     codec = si.Info.Codec;
            Directory cfsDir; // confusing name: if (cfs) its the cfsdir, otherwise its the segment's directory.

            bool success = false;

            try
            {
                if (si.Info.UseCompoundFile)
                {
                    cfsDir = cfsReader = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(si.Info.Name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context, false);
                }
                else
                {
                    cfsReader = null;
                    cfsDir    = dir;
                }

                FieldInfos fieldInfos = owner.FieldInfos;

                this.termsIndexDivisor = termsIndexDivisor;
                PostingsFormat   format           = codec.PostingsFormat;
                SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor);
                // Ask codec for its Fields
                fields = format.FieldsProducer(segmentReadState);
                Debug.Assert(fields != null);
                // ask codec for its Norms:
                // TODO: since we don't write any norms file if there are no norms,
                // kinda jaky to assume the codec handles the case of no norms file at all gracefully?!

                if (fieldInfos.HasNorms)
                {
                    normsProducer = codec.NormsFormat.NormsProducer(segmentReadState);
                    Debug.Assert(normsProducer != null);
                }
                else
                {
                    normsProducer = null;
                }

                // LUCENENET TODO: EXCEPTIONS Not sure why this catch block is swallowing AccessViolationException,
                // because it didn't exist in Lucene. Is it really needed? AVE is for protected memory...could
                // this be needed because we are using unchecked??

#if !NETSTANDARD
                try
                {
#endif
                fieldsReaderOrig = si.Info.Codec.StoredFieldsFormat.FieldsReader(cfsDir, si.Info, fieldInfos, context);
#if !NETSTANDARD
            }
#pragma warning disable 168
            catch (System.AccessViolationException ave)
#pragma warning restore 168
            {
            }
#endif

                if (fieldInfos.HasVectors) // open term vector files only as needed
                {
                    termVectorsReaderOrig = si.Info.Codec.TermVectorsFormat.VectorsReader(cfsDir, si.Info, fieldInfos, context);
                }
                else
                {
                    termVectorsReaderOrig = null;
                }

                success = true;
            }
            finally
            {
                if (!success)
                {
                    DecRef();
                }
            }
        }
Exemplo n.º 21
0
        public virtual void TestRandomAccessClones()
        {
            SetUp_2();
            CompoundFileDirectory cr = new CompoundFileDirectory(Dir, "f.comp", NewIOContext(Random), false);

            // Open two files
            IndexInput e1 = cr.OpenInput("f11", NewIOContext(Random));
            IndexInput e2 = cr.OpenInput("f3", NewIOContext(Random));

            IndexInput a1 = (IndexInput)e1.Clone();
            IndexInput a2 = (IndexInput)e2.Clone();

            // Seek the first pair
            e1.Seek(100);
            a1.Seek(100);
            Assert.AreEqual(100, e1.GetFilePointer());
            Assert.AreEqual(100, a1.GetFilePointer());
            byte be1 = e1.ReadByte();
            byte ba1 = a1.ReadByte();

            Assert.AreEqual(be1, ba1);

            // Now seek the second pair
            e2.Seek(1027);
            a2.Seek(1027);
            Assert.AreEqual(1027, e2.GetFilePointer());
            Assert.AreEqual(1027, a2.GetFilePointer());
            byte be2 = e2.ReadByte();
            byte ba2 = a2.ReadByte();

            Assert.AreEqual(be2, ba2);

            // Now make sure the first one didn't move
            Assert.AreEqual(101, e1.GetFilePointer());
            Assert.AreEqual(101, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now more the first one again, past the buffer length
            e1.Seek(1910);
            a1.Seek(1910);
            Assert.AreEqual(1910, e1.GetFilePointer());
            Assert.AreEqual(1910, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            // Now make sure the second set didn't move
            Assert.AreEqual(1028, e2.GetFilePointer());
            Assert.AreEqual(1028, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Move the second set back, again cross the buffer size
            e2.Seek(17);
            a2.Seek(17);
            Assert.AreEqual(17, e2.GetFilePointer());
            Assert.AreEqual(17, a2.GetFilePointer());
            be2 = e2.ReadByte();
            ba2 = a2.ReadByte();
            Assert.AreEqual(be2, ba2);

            // Finally, make sure the first set didn't move
            // Now make sure the first one didn't move
            Assert.AreEqual(1911, e1.GetFilePointer());
            Assert.AreEqual(1911, a1.GetFilePointer());
            be1 = e1.ReadByte();
            ba1 = a1.ReadByte();
            Assert.AreEqual(be1, ba1);

            e1.Dispose();
            e2.Dispose();
            a1.Dispose();
            a2.Dispose();
            cr.Dispose();
        }
Exemplo n.º 22
0
        public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldInfos, IOContext context)
        {
            string segment        = Lucene3xSegmentInfoFormat.GetDocStoreSegment(si);
            int    docStoreOffset = Lucene3xSegmentInfoFormat.GetDocStoreOffset(si);
            int    size           = si.DocCount;

            bool success = false;

            try
            {
                if (docStoreOffset != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si))
                {
                    d = StoreCFSReader = new CompoundFileDirectory(si.Dir, IndexFileNames.SegmentFileName(segment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), context, false);
                }
                else
                {
                    StoreCFSReader = null;
                }
                string idxName = IndexFileNames.SegmentFileName(segment, "", VECTORS_INDEX_EXTENSION);
                Tvx    = d.OpenInput(idxName, context);
                Format = CheckValidFormat(Tvx);
                string fn = IndexFileNames.SegmentFileName(segment, "", VECTORS_DOCUMENTS_EXTENSION);
                Tvd = d.OpenInput(fn, context);
                int tvdFormat = CheckValidFormat(Tvd);
                fn  = IndexFileNames.SegmentFileName(segment, "", VECTORS_FIELDS_EXTENSION);
                Tvf = d.OpenInput(fn, context);
                int tvfFormat = CheckValidFormat(Tvf);

                Debug.Assert(Format == tvdFormat);
                Debug.Assert(Format == tvfFormat);

                NumTotalDocs = (int)(Tvx.Length() >> 4);

                if (-1 == docStoreOffset)
                {
                    this.DocStoreOffset = 0;
                    this.Size_Renamed   = NumTotalDocs;
                    Debug.Assert(size == 0 || NumTotalDocs == size);
                }
                else
                {
                    this.DocStoreOffset = docStoreOffset;
                    this.Size_Renamed   = size;
                    // Verify the file is long enough to hold all of our
                    // docs
                    Debug.Assert(NumTotalDocs >= size + docStoreOffset, "numTotalDocs=" + NumTotalDocs + " size=" + size + " docStoreOffset=" + docStoreOffset);
                }

                this.FieldInfos = fieldInfos;
                success         = true;
            }
            finally
            {
                // With lock-less commits, it's entirely possible (and
                // fine) to hit a FileNotFound exception above. In
                // this case, we want to explicitly close any subset
                // of things that were opened so that we don't have to
                // wait for a GC to do so.
                if (!success)
                {
                    try
                    {
                        Dispose();
                    } // keep our original exception
                    catch (Exception)
                    {
                    }
                }
            }
        }
Exemplo n.º 23
0
        public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
        {
            string segment        = Lucene3xSegmentInfoFormat.GetDocStoreSegment(si);
            int    docStoreOffset = Lucene3xSegmentInfoFormat.GetDocStoreOffset(si);
            int    size           = si.DocCount;
            bool   success        = false;

            FieldInfos = fn;
            try
            {
                if (docStoreOffset != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si))
                {
                    d = StoreCFSReader = new CompoundFileDirectory(si.Dir, IndexFileNames.SegmentFileName(segment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), context, false);
                }
                else
                {
                    StoreCFSReader = null;
                }
                FieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context);
                string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
                IndexStream = d.OpenInput(indexStreamFN, context);

                Format = IndexStream.ReadInt();

                if (Format < FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
                if (Format > FORMAT_CURRENT)
                {
                    throw new IndexFormatTooNewException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }

                long indexSize = IndexStream.Length() - FORMAT_SIZE;

                if (docStoreOffset != -1)
                {
                    // We read only a slice out of this shared fields file
                    this.DocStoreOffset = docStoreOffset;
                    this.Size           = size;

                    // Verify the file is long enough to hold all of our
                    // docs
                    Debug.Assert(((int)(indexSize / 8)) >= size + this.DocStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
                }
                else
                {
                    this.DocStoreOffset = 0;
                    this.Size           = (int)(indexSize >> 3);
                    // Verify two sources of "maxDoc" agree:
                    if (this.Size != si.DocCount)
                    {
                        throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.Size + " but segmentInfo shows " + si.DocCount);
                    }
                }
                NumTotalDocs = (int)(indexSize >> 3);
                success      = true;
            }
            finally
            {
                // With lock-less commits, it's entirely possible (and
                // fine) to hit a FileNotFound exception above. In
                // this case, we want to explicitly close any subset
                // of things that were opened so that we don't have to
                // wait for a GC to do so.
                if (!success)
                {
                    try
                    {
                        Dispose();
                    } // keep our original exception
                    catch (Exception t)
                    {
                    }
                }
            }
        }
 // Used only by clone
 private Lucene3xStoredFieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, int format, int docStoreOffset, IndexInput fieldsStream, IndexInput indexStream)
 {
     this.FieldInfos = fieldInfos;
     this.NumTotalDocs = numTotalDocs;
     this.Size = size;
     this.Format = format;
     this.DocStoreOffset = docStoreOffset;
     this.FieldsStream = fieldsStream;
     this.IndexStream = indexStream;
     this.StoreCFSReader = null;
 }
Exemplo n.º 25
0
        internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInfo si, IOContext context, int termsIndexDivisor)
        {
            fieldsReaderLocal = new AnonymousFieldsReaderLocal(this);
            termVectorsLocal  = new AnonymousTermVectorsLocal(this);

            if (termsIndexDivisor == 0)
            {
                throw new ArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
            }

            Codec     codec = si.Info.Codec;
            Directory cfsDir; // confusing name: if (cfs) its the cfsdir, otherwise its the segment's directory.

            bool success = false;

            try
            {
                if (si.Info.UseCompoundFile)
                {
                    cfsDir = cfsReader = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(si.Info.Name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context, false);
                }
                else
                {
                    cfsReader = null;
                    cfsDir    = dir;
                }

                FieldInfos fieldInfos = owner.FieldInfos;

                this.termsIndexDivisor = termsIndexDivisor;
                PostingsFormat   format           = codec.PostingsFormat;
                SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor);
                // Ask codec for its Fields
                fields = format.FieldsProducer(segmentReadState);
                Debug.Assert(fields != null);
                // ask codec for its Norms:
                // TODO: since we don't write any norms file if there are no norms,
                // kinda jaky to assume the codec handles the case of no norms file at all gracefully?!

                if (fieldInfos.HasNorms)
                {
                    normsProducer = codec.NormsFormat.NormsProducer(segmentReadState);
                    Debug.Assert(normsProducer != null);
                }
                else
                {
                    normsProducer = null;
                }

                fieldsReaderOrig = si.Info.Codec.StoredFieldsFormat.FieldsReader(cfsDir, si.Info, fieldInfos, context);

                if (fieldInfos.HasVectors) // open term vector files only as needed
                {
                    termVectorsReaderOrig = si.Info.Codec.TermVectorsFormat.VectorsReader(cfsDir, si.Info, fieldInfos, context);
                }
                else
                {
                    termVectorsReaderOrig = null;
                }

                success = true;
            }
            finally
            {
                if (!success)
                {
                    DecRef();
                }
            }
        }
Exemplo n.º 26
0
        /// <summary>
        /// Prints the filename and size of each file within a given compound file.
        /// Add the -extract flag to extract files to the current working directory.
        /// In order to make the extracted version of the index work, you have to copy
        /// the segments file from the compound index into the directory where the extracted files are stored. </summary>
        ///// <param name="args"> Usage: org.apache.lucene.index.IndexReader [-extract] &lt;cfsfile&gt; </param>
        public static void Main(string[] args)
        {
            string filename = null;
            bool   extract  = false;
            string dirImpl  = null;

            int j = 0;

            while (j < args.Length)
            {
                string arg = args[j];
                if ("-extract".Equals(arg, StringComparison.Ordinal))
                {
                    extract = true;
                }
                else if ("-dir-impl".Equals(arg, StringComparison.Ordinal))
                {
                    if (j == args.Length - 1)
                    {
                        // LUCENENET specific - our wrapper console shows the correct usage
                        throw new ArgumentException("ERROR: missing value for --directory-type option");
                        //Console.WriteLine("ERROR: missing value for -dir-impl option");
                        //Environment.Exit(1);
                    }
                    j++;
                    dirImpl = args[j];
                }
                else if (filename == null)
                {
                    filename = arg;
                }
                j++;
            }

            if (filename == null)
            {
                // LUCENENET specific - our wrapper console shows the correct usage
                throw new ArgumentException("ERROR: CFS-FILE is required");
                //Console.WriteLine("Usage: org.apache.lucene.index.CompoundFileExtractor [-extract] [-dir-impl X] <cfsfile>");
                //return;
            }

            Store.Directory       dir     = null;
            CompoundFileDirectory cfr     = null;
            IOContext             context = IOContext.READ;

            try
            {
                FileInfo file    = new FileInfo(filename);
                string   dirname = file.DirectoryName;
                filename = file.Name;
                if (dirImpl == null)
                {
                    dir = FSDirectory.Open(new DirectoryInfo(dirname));
                }
                else
                {
                    dir = CommandLineUtil.NewFSDirectory(dirImpl, new DirectoryInfo(dirname));
                }

                cfr = new CompoundFileDirectory(dir, filename, IOContext.DEFAULT, false);

                string[] files = cfr.ListAll();
                ArrayUtil.TimSort(files); // sort the array of filename so that the output is more readable

                for (int i = 0; i < files.Length; ++i)
                {
                    long len = cfr.FileLength(files[i]);

                    if (extract)
                    {
                        Console.WriteLine("extract " + files[i] + " with " + len + " bytes to local directory...");
                        using (IndexInput ii = cfr.OpenInput(files[i], context))
                        {
                            using (FileStream f = new FileStream(files[i], FileMode.Open, FileAccess.ReadWrite))
                            {
                                // read and write with a small buffer, which is more effective than reading byte by byte
                                byte[] buffer = new byte[1024];
                                int    chunk  = buffer.Length;
                                while (len > 0)
                                {
                                    int bufLen = (int)Math.Min(chunk, len);
                                    ii.ReadBytes(buffer, 0, bufLen);
                                    f.Write(buffer, 0, bufLen);
                                    len -= bufLen;
                                }
                            }
                        }
                    }
                    else
                    {
                        Console.WriteLine(files[i] + ": " + len + " bytes");
                    }
                }
            }
            catch (IOException ioe)
            {
                Console.WriteLine(ioe.ToString());
                //Console.Write(ioe.StackTrace);
            }
            finally
            {
                try
                {
                    if (dir != null)
                    {
                        dir.Dispose();
                    }
                    if (cfr != null)
                    {
                        cfr.Dispose();
                    }
                }
                catch (IOException ioe)
                {
                    Console.WriteLine(ioe.ToString());
                    //Console.Write(ioe.StackTrace);
                }
            }
        }
        public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
        {
            string segment = Lucene3xSegmentInfoFormat.GetDocStoreSegment(si);
            int docStoreOffset = Lucene3xSegmentInfoFormat.GetDocStoreOffset(si);
            int size = si.DocCount;
            bool success = false;
            FieldInfos = fn;
            try
            {
                if (docStoreOffset != -1 && Lucene3xSegmentInfoFormat.GetDocStoreIsCompoundFile(si))
                {
                    d = StoreCFSReader = new CompoundFileDirectory(si.Dir, IndexFileNames.SegmentFileName(segment, "", Lucene3xCodec.COMPOUND_FILE_STORE_EXTENSION), context, false);
                }
                else
                {
                    StoreCFSReader = null;
                }
                FieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context);
                string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
                IndexStream = d.OpenInput(indexStreamFN, context);

                Format = IndexStream.ReadInt();

                if (Format < FORMAT_MINIMUM)
                {
                    throw new IndexFormatTooOldException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }
                if (Format > FORMAT_CURRENT)
                {
                    throw new IndexFormatTooNewException(IndexStream, Format, FORMAT_MINIMUM, FORMAT_CURRENT);
                }

                long indexSize = IndexStream.Length() - FORMAT_SIZE;

                if (docStoreOffset != -1)
                {
                    // We read only a slice out of this shared fields file
                    this.DocStoreOffset = docStoreOffset;
                    this.Size = size;

                    // Verify the file is long enough to hold all of our
                    // docs
                    Debug.Assert(((int)(indexSize / 8)) >= size + this.DocStoreOffset, "indexSize=" + indexSize + " size=" + size + " docStoreOffset=" + docStoreOffset);
                }
                else
                {
                    this.DocStoreOffset = 0;
                    this.Size = (int)(indexSize >> 3);
                    // Verify two sources of "maxDoc" agree:
                    if (this.Size != si.DocCount)
                    {
                        throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.Size + " but segmentInfo shows " + si.DocCount);
                    }
                }
                NumTotalDocs = (int)(indexSize >> 3);
                success = true;
            }
            finally
            {
                // With lock-less commits, it's entirely possible (and
                // fine) to hit a FileNotFound exception above. In
                // this case, we want to explicitly close any subset
                // of things that were opened so that we don't have to
                // wait for a GC to do so.
                if (!success)
                {
                    try
                    {
                        Dispose();
                    } // keep our original exception
                    catch (Exception t)
                    {
                    }
                }
            }
        }
Exemplo n.º 28
0
	  public static void Main(string[] args)
	  {
		string filename = null;
		bool extract = false;
		string dirImpl = null;

		int j = 0;
		while (j < args.Length)
		{
		  string arg = args[j];
		  if ("-extract".Equals(arg))
		  {
			extract = true;
		  }
		  else if ("-dir-impl".Equals(arg))
		  {
			if (j == args.Length - 1)
			{
			  Console.WriteLine("ERROR: missing value for -dir-impl option");
			  Environment.Exit(1);
			}
			j++;
			dirImpl = args[j];
		  }
		  else if (filename == null)
		  {
			filename = arg;
		  }
		  j++;
		}

		if (filename == null)
		{
		  Console.WriteLine("Usage: org.apache.lucene.index.CompoundFileExtractor [-extract] [-dir-impl X] <cfsfile>");
		  return;
		}

		Directory dir = null;
		CompoundFileDirectory cfr = null;
		IOContext context = IOContext.READ;

		try
		{
		  File file = new File(filename);
		  string dirname = file.AbsoluteFile.Parent;
		  filename = file.Name;
		  if (dirImpl == null)
		  {
			dir = FSDirectory.open(new File(dirname));
		  }
		  else
		  {
			dir = CommandLineUtil.newFSDirectory(dirImpl, new File(dirname));
		  }

		  cfr = new CompoundFileDirectory(dir, filename, IOContext.DEFAULT, false);

		  string[] files = cfr.listAll();
		  ArrayUtil.timSort(files); // sort the array of filename so that the output is more readable

		  for (int i = 0; i < files.Length; ++i)
		  {
			long len = cfr.fileLength(files[i]);

			if (extract)
			{
			  Console.WriteLine("extract " + files[i] + " with " + len + " bytes to local directory...");
			  IndexInput ii = cfr.openInput(files[i], context);

			  FileOutputStream f = new FileOutputStream(files[i]);

			  // read and write with a small buffer, which is more effective than reading byte by byte
			  sbyte[] buffer = new sbyte[1024];
			  int chunk = buffer.Length;
			  while (len > 0)
			  {
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final int bufLen = (int) Math.min(chunk, len);
				int bufLen = (int) Math.Min(chunk, len);
				ii.readBytes(buffer, 0, bufLen);
				f.write(buffer, 0, bufLen);
				len -= bufLen;
			  }

			  f.close();
			  ii.close();
			}
			else
			{
			  Console.WriteLine(files[i] + ": " + len + " bytes");
			}
		  }
		}
		catch (IOException ioe)
		{
		  Console.WriteLine(ioe.ToString());
		  Console.Write(ioe.StackTrace);
		}
		finally
		{
		  try
		  {
			if (dir != null)
			{
			  dir.close();
			}
			if (cfr != null)
			{
			  cfr.close();
			}
		  }
		  catch (IOException ioe)
		  {
			Console.WriteLine(ioe.ToString());
			Console.Write(ioe.StackTrace);
		  }
		}
	  }
Exemplo n.º 29
0
        /// <summary>
        /// NOTE: this method creates a compound file for all files returned by
        /// info.files(). While, generally, this may include separate norms and
        /// deletion files, this SegmentInfo must not reference such files when this
        /// method is called, because they are not allowed within a compound file.
        /// </summary>
        public static ICollection<string> CreateCompoundFile(InfoStream infoStream, Directory directory, CheckAbort checkAbort, SegmentInfo info, IOContext context)
        {
            string fileName = Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_EXTENSION);
            if (infoStream.IsEnabled("IW"))
            {
                infoStream.Message("IW", "create compound file " + fileName);
            }
            Debug.Assert(Lucene3xSegmentInfoFormat.GetDocStoreOffset(info) == -1);
            // Now merge all added files
            ICollection<string> files = info.Files;
            CompoundFileDirectory cfsDir = new CompoundFileDirectory(directory, fileName, context, true);
            IOException prior = null;
            try
            {
                foreach (string file in files)
                {
                    directory.Copy(cfsDir, file, file, context);
                    checkAbort.Work(directory.FileLength(file));
                }
            }
            catch (System.IO.IOException ex)
            {
                prior = ex;
            }
            finally
            {
                bool success = false;
                try
                {
                    IOUtils.CloseWhileHandlingException(prior, cfsDir);
                    success = true;
                }
                finally
                {
                    if (!success)
                    {
                        try
                        {
                            directory.DeleteFile(fileName);
                        }
                        catch (Exception)
                        {
                        }
                        try
                        {
                            directory.DeleteFile(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
                        }
                        catch (Exception)
                        {
                        }
                    }
                }
            }

            // Replace all previous files with the CFS/CFE files:
            HashSet<string> siFiles = new HashSet<string>();
            siFiles.Add(fileName);
            siFiles.Add(Lucene.Net.Index.IndexFileNames.SegmentFileName(info.Name, "", Lucene.Net.Index.IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
            info.Files = siFiles;

            return files;
        }