Пример #1
0
		internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
		{
			fieldInfos = fn;
			fieldsStream = d.CreateOutput(segment + ".fdt");
			indexStream = d.CreateOutput(segment + ".fdx");
			doClose = true;
		}
        public PreFlexRWStoredFieldsWriter(Directory directory, string segment, IOContext context)
        {
            Debug.Assert(directory != null);
            this.Directory = directory;
            this.Segment = segment;

            bool success = false;
            try
            {
                FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION), context);
                IndexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION), context);

                FieldsStream.WriteInt(Lucene3xStoredFieldsReader.FORMAT_CURRENT);
                IndexStream.WriteInt(Lucene3xStoredFieldsReader.FORMAT_CURRENT);

                success = true;
            }
            finally
            {
                if (!success)
                {
                    Abort();
                }
            }
        }
        public TermVectorsWriter(Directory directory, System.String segment, FieldInfos fieldInfos)
        {
            // Open files for TermVector storage
            tvx = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
            tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT);
            tvd = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
            tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT);
            tvf = directory.CreateOutput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
            tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT);

            this.fieldInfos = fieldInfos;
        }
Пример #4
0
        public TermVectorsWriter(Directory directory, System.String segment, FieldInfos fieldInfos)
        {
            // Open files for TermVector storage
            tvx = directory.CreateOutput(segment + TVX_EXTENSION);
            tvx.WriteInt(FORMAT_VERSION);
            tvd = directory.CreateOutput(segment + TVD_EXTENSION);
            tvd.WriteInt(FORMAT_VERSION);
            tvf = directory.CreateOutput(segment + TVF_EXTENSION);
            tvf.WriteInt(FORMAT_VERSION);

            this.fieldInfos = fieldInfos;
            fields = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(fieldInfos.Size()));
            terms = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
        }
        /// <summary>
        /// Save a single segment's info. </summary>
        public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext)
        {
            string fileName = IndexFileNames.SegmentFileName(si.Name, "", Lucene46SegmentInfoFormat.SI_EXTENSION);
            si.AddFile(fileName);

            IndexOutput output = dir.CreateOutput(fileName, ioContext);

            bool success = false;
            try
            {
                CodecUtil.WriteHeader(output, Lucene46SegmentInfoFormat.CODEC_NAME, Lucene46SegmentInfoFormat.VERSION_CURRENT);
                // Write the Lucene version that created this segment, since 3.1
                output.WriteString(si.Version);
                output.WriteInt(si.DocCount);

                output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
                output.WriteStringStringMap(si.Diagnostics);
                output.WriteStringSet(si.Files);
                CodecUtil.WriteFooter(output);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(output);
                    si.Dir.DeleteFile(fileName);
                }
                else
                {
                    output.Dispose();
                }
            }
        }
Пример #6
0
 /// <summary>Creates a file of the specified size with random data. </summary>
 private void  CreateRandomFile(Directory dir, System.String name, int size)
 {
     IndexOutput os = dir.CreateOutput(name);
     for (int i = 0; i < size; i++)
     {
         byte b = (byte) ((new System.Random().NextDouble()) * 256);
         os.WriteByte(b);
     }
     os.Close();
 }
Пример #7
0
		private void  Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi)
		{
			indexInterval = interval;
			fieldInfos = fis;
			isIndex = isi;
			output = directory.CreateOutput(segment + (isIndex ? ".tii" : ".tis"));
			output.WriteInt(FORMAT); // write format
			output.WriteLong(0); // leave space for size
			output.WriteInt(indexInterval); // write indexInterval
			output.WriteInt(skipInterval); // write skipInterval
		}
 public PreFlexRWTermVectorsWriter(Directory directory, string segment, IOContext context)
 {
     this.Directory = directory;
     this.Segment = segment;
     bool success = false;
     try
     {
         // Open files for TermVector storage
         Tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context);
         Tvx.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
         Tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
         Tvd.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
         Tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
         Tvf.WriteInt(Lucene3xTermVectorsReader.FORMAT_CURRENT);
         success = true;
     }
     finally
     {
         if (!success)
         {
             Abort();
         }
     }
 }
Пример #9
0
 public virtual void CopyFile(Directory dir, string src, string dest)
 {
     IndexInput @in = dir.OpenInput(src, NewIOContext(Random()));
     IndexOutput @out = dir.CreateOutput(dest, NewIOContext(Random()));
     var b = new byte[1024];
     long remainder = @in.Length();
     while (remainder > 0)
     {
         int len = (int)Math.Min(b.Length, remainder);
         @in.ReadBytes(b, 0, len);
         @out.WriteBytes(b, len);
         remainder -= len;
     }
     @in.Dispose();
     @out.Dispose();
 }
Пример #10
0
        public virtual FieldInfos CreateAndWriteFieldInfos(Directory dir, string filename)
        {
            //Positive test of FieldInfos
            Assert.IsTrue(TestDoc != null);
            FieldInfos.Builder builder = new FieldInfos.Builder();
            foreach (IndexableField field in TestDoc)
            {
                builder.AddOrUpdate(field.Name(), field.FieldType());
            }
            FieldInfos fieldInfos = builder.Finish();
            //Since the complement is stored as well in the fields map
            Assert.IsTrue(fieldInfos.Size() == DocHelper.All.Count); //this is all b/c we are using the no-arg constructor

            IndexOutput output = dir.CreateOutput(filename, NewIOContext(Random()));
            Assert.IsTrue(output != null);
            //Use a RAMOutputStream

            FieldInfosWriter writer = Codec.Default.FieldInfosFormat().FieldInfosWriter;
            writer.Write(dir, filename, "", fieldInfos, IOContext.DEFAULT);
            output.Dispose();
            return fieldInfos;
        }
Пример #11
0
        private int LastFieldNumber = -1; // only for assert

        #endregion Fields

        #region Constructors

        public PreFlexRWNormsConsumer(Directory directory, string segment, IOContext context)
        {
            string normsFileName = IndexFileNames.SegmentFileName(segment, "", NORMS_EXTENSION);
            bool success = false;
            IndexOutput output = null;
            try
            {
                output = directory.CreateOutput(normsFileName, context);
                // output.WriteBytes(NORMS_HEADER, 0, NORMS_HEADER.Length);
                foreach (var @sbyte in NORMS_HEADER)
                {
                    output.WriteByte((byte)@sbyte);
                }
                @out = output;
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(output);
                }
            }
        }
Пример #12
0
        internal void  FinishCommit(Directory dir)
        {
            if (pendingSegnOutput == null)
            {
                throw new System.SystemException("prepareCommit was not called");
            }
            bool success = false;

            try
            {
                pendingSegnOutput.FinishCommit();
                pendingSegnOutput.Close();
                pendingSegnOutput = null;
                success           = true;
            }
            finally
            {
                if (!success)
                {
                    RollbackCommit(dir);
                }
            }

            // NOTE: if we crash here, we have left a segments_N
            // file in the directory in a possibly corrupt state (if
            // some bytes made it to stable storage and others
            // didn't).  But, the segments_N file includes checksum
            // at the end, which should catch this case.  So when a
            // reader tries to read it, it will throw a
            // CorruptIndexException, which should cause the retry
            // logic in SegmentInfos to kick in and load the last
            // good (previous) segments_N-1 file.

            System.String fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation);
            success = false;
            try
            {
                dir.Sync(fileName);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    try
                    {
                        dir.DeleteFile(fileName);
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                }
            }

            lastGeneration = generation;

            try
            {
                IndexOutput genOutput = dir.CreateOutput(IndexFileNames.SEGMENTS_GEN);
                try
                {
                    genOutput.WriteInt(FORMAT_LOCKLESS);
                    genOutput.WriteLong(generation);
                    genOutput.WriteLong(generation);
                }
                finally
                {
                    genOutput.Close();
                }
            }
            catch (System.Exception t)
            {
                // It's OK if we fail to write this file since it's
                // used only as one of the retry fallbacks.
            }
        }
Пример #13
0
        public void FinishCommit(Directory dir)
        {
            if (pendingOutput == null)
                throw new System.Exception("prepareCommit was not called");
            bool success = false;
            try
            {
                pendingOutput.FinishCommit();
                pendingOutput.Close();
                pendingOutput = null;
                success = true;
            }
            finally
            {
                if (!success)
                    RollbackCommit(dir);
            }

            // NOTE: if we crash here, we have left a segments_N
            // file in the directory in a possibly corrupt state (if
            // some bytes made it to stable storage and others
            // didn't).  But, the segments_N file includes checksum
            // at the end, which should catch this case.  So when a
            // reader tries to read it, it will throw a
            // CorruptIndexException, which should cause the retry
            // logic in SegmentInfos to kick in and load the last
            // good (previous) segments_N-1 file.

            String fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", generation);
            success = false;
            try
            {
                dir.Sync(fileName);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    try
                    {
                        dir.DeleteFile(fileName);
                    }
                    catch (System.Exception)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                }
            }

            lastGeneration = generation;

            try
            {
                IndexOutput genOutput = dir.CreateOutput(IndexFileNames.SEGMENTS_GEN);
                try
                {
                    genOutput.WriteInt(FORMAT_LOCKLESS);
                    genOutput.WriteLong(generation);
                    genOutput.WriteLong(generation);
                }
                finally
                {
                    genOutput.Close();
                }
            }
            catch (System.Exception)
            {
                // It's OK if we fail to write this file since it's
                // used only as one of the retry fallbacks.
            }
        }
Пример #14
0
		public void  Write(Directory d, System.String name)
		{
			IndexOutput output = d.CreateOutput(name);
			try
			{
				Write(output);
			}
			finally
			{
				output.Close();
			}
		}
Пример #15
0
        public CachedIndexInput(ICloudProvider CloudProvider, Directory CacheDirectory, string Name)
        {
            this.name = Name;

#if FULLDEBUG
            Debug.WriteLine("Opening " + this.name);
#endif
            this.fileMutex = BlobMutexManager.GrabMutex(this.name);
            this.fileMutex.WaitOne();
            try {
                bool         fFileNeeded   = false;
                FileMetadata cloudMetadata = CloudProvider.FileMetadata(this.name);
                if (!cloudMetadata.Exists)
                {
                    fFileNeeded = false;
                    // TODO: Delete local if it doesn't exist on cloud?

                    /*
                     * if (CacheDirectory.FileExists(this.name)) {
                     *      CacheDirectory.DeleteFile(this.name);
                     * }
                     */
                }
                else if (!CacheDirectory.FileExists(this.name))
                {
                    fFileNeeded = true;
                }
                else
                {
                    long cachedLength = CacheDirectory.FileLength(this.name);

                    long     blobLength          = cloudMetadata.Length;
                    DateTime blobLastModifiedUTC = cloudMetadata.LastModified.ToUniversalTime();

                    if (!cloudMetadata.Exists || cachedLength != blobLength)
                    {
                        fFileNeeded = true;
                    }
                    else
                    {
                        // there seems to be an error of 1 tick which happens every once in a while
                        // for now we will say that if they are within 1 tick of each other and same length
                        DateTime cachedLastModifiedUTC = new DateTime(CacheDirectory.FileModified(this.name), DateTimeKind.Local).ToUniversalTime();
                        if (cachedLastModifiedUTC < blobLastModifiedUTC)
                        {
                            TimeSpan timeSpan = blobLastModifiedUTC.Subtract(cachedLastModifiedUTC);
                            if (timeSpan.TotalSeconds > 1)
                            {
                                fFileNeeded = true;
                            }
                            else
                            {
#if FULLDEBUG
                                Debug.WriteLine("Using cache for " + this.name + ": " + timeSpan.TotalSeconds);
#endif
                                // file not needed
                            }
                        }
                    }
                }

                // if the file does not exist
                // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage)
                if (fFileNeeded)
                {
                    using (StreamOutput fileStream = new StreamOutput(CacheDirectory.CreateOutput(this.name))) {
                        Stream blobStream = CloudProvider.Download(this.name);
                        blobStream.CopyTo(fileStream);

                        fileStream.Flush();
                        Debug.WriteLine("GET {0} RETREIVED {1} bytes", this.name, fileStream.Length);
                    }
                }
                else
                {
#if FULLDEBUG
                    if (!cloudMetadata.Exists)
                    {
                        Debug.WriteLine("Cloud doesn't have " + this.name);
                    }
                    else
                    {
                        Debug.WriteLine("Using cached file for " + this.name);
                    }
#endif
                }

                // open the file in read only mode
                this.indexInput = CacheDirectory.OpenInput(this.name);
            } finally {
                this.fileMutex.ReleaseMutex();
            }
        }
Пример #16
0
 /// <summary>
 /// Writes this vector to the file <code>name</code> in Directory
 ///  <code>d</code>, in a format that can be read by the constructor {@link
 ///  #BitVector(Directory, String, IOContext)}.
 /// </summary>
 public void Write(Directory d, string name, IOContext context)
 {
     Debug.Assert(!(d is CompoundFileDirectory));
     IndexOutput output = d.CreateOutput(name, context);
     try
     {
         output.WriteInt(-2);
         CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT);
         if (Sparse)
         {
             // sparse bit-set more efficiently saved as d-gaps.
             WriteClearedDgaps(output);
         }
         else
         {
             WriteBits(output);
         }
         CodecUtil.WriteFooter(output);
         Debug.Assert(VerifyCount());
     }
     finally
     {
         IOUtils.Close(output);
     }
 }
Пример #17
0
 /// <summary>Writes this vector to the file <code>name</code> in Directory
 /// <code>d</code>, in a format that can be read by the constructor {@link
 /// #BitVector(Directory, String)}.  
 /// </summary>
 public void Write(Directory d, System.String name)
 {
     IndexOutput output = d.CreateOutput(name);
     try
     {
         if (IsSparse())
         {
             WriteDgaps(output); // sparse bit-set more efficiently saved as d-gaps.
         }
         else
         {
             WriteBits(output);
         }
     }
     finally
     {
         output.Close();
     }
 }
Пример #18
0
		public void  Write(Directory directory)
		{
			
			System.String segmentFileName = GetNextSegmentFileName();
			
			// Always advance the generation on write:
			if (generation == - 1)
			{
				generation = 1;
			}
			else
			{
				generation++;
			}
			
			IndexOutput output = directory.CreateOutput(segmentFileName);
			
			bool success = false;
			
			try
			{
				output.WriteInt(CURRENT_FORMAT); // write FORMAT
				output.WriteLong(++version); // every write changes
				// the index
				output.WriteInt(counter); // write counter
				output.WriteInt(Count); // write infos
				for (int i = 0; i < Count; i++)
				{
					Info(i).Write(output);
				}
			}
			finally
			{
				try
				{
					output.Close();
					success = true;
				}
				finally
				{
					if (!success)
					{
						// Try not to leave a truncated segments_N file in
						// the index:
						directory.DeleteFile(segmentFileName);
					}
				}
			}
			
			try
			{
				output = directory.CreateOutput(IndexFileNames.SEGMENTS_GEN);
				try
				{
					output.WriteInt(FORMAT_LOCKLESS);
					output.WriteLong(generation);
					output.WriteLong(generation);
				}
				finally
				{
					output.Close();
				}
			}
			catch (System.IO.IOException e)
			{
				// It's OK if we fail to write this file since it's
				// used only as one of the retry fallbacks.
			}
			
			lastGeneration = generation;
		}
Пример #19
0
        private void Initialize(Directory directory, string segment, FieldInfos fis, int interval, bool isi)
        {
            IndexInterval = interval;
            FieldInfos = fis;
            IsIndex = isi;
            Output = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)), IOContext.DEFAULT);
            bool success = false;
            try
            {
                Output.WriteInt(FORMAT_CURRENT); // write format
                Output.WriteLong(0); // leave space for size
                Output.WriteInt(IndexInterval); // write indexInterval
                Output.WriteInt(SkipInterval); // write skipInterval
                Output.WriteInt(MaxSkipLevels); // write maxSkipLevels
                Debug.Assert(InitUTF16Results());
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.CloseWhileHandlingException(Output);

                    try
                    {
                        directory.DeleteFile(IndexFileNames.SegmentFileName(segment, "", (IsIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION)));
                    }
                    catch (IOException ignored)
                    {
                    }
                }
            }
        }
 /// <summary>
 /// Sole constructor. </summary>
 public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext context)
 {
     this.Directory = directory;
     this.Segment = segment;
     bool success = false;
     try
     {
         // Open files for TermVector storage
         Tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), context);
         CodecUtil.WriteHeader(Tvx, Lucene40TermVectorsReader.CODEC_NAME_INDEX, Lucene40TermVectorsReader.VERSION_CURRENT);
         Tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
         CodecUtil.WriteHeader(Tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT);
         Tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
         CodecUtil.WriteHeader(Tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT);
         Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == Tvx.FilePointer);
         Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == Tvd.FilePointer);
         Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == Tvf.FilePointer);
         success = true;
     }
     finally
     {
         if (!success)
         {
             Abort();
         }
     }
 }
        /// <summary>
        /// Sole constructor. </summary>
        public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context)
        {
            Debug.Assert(directory != null);
            this.Directory = directory;
            this.Segment = segment;

            bool success = false;
            try
            {
                FieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context);
                IndexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION), context);

                CodecUtil.WriteHeader(FieldsStream, CODEC_NAME_DAT, VERSION_CURRENT);
                CodecUtil.WriteHeader(IndexStream, CODEC_NAME_IDX, VERSION_CURRENT);
                Debug.Assert(HEADER_LENGTH_DAT == FieldsStream.FilePointer);
                Debug.Assert(HEADER_LENGTH_IDX == IndexStream.FilePointer);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    Abort();
                }
            }
        }
Пример #22
0
        /// <summary>Merge files with the extensions added up to now.
        /// All files with these extensions are combined sequentially into the
        /// compound stream. After successful merge, the source files
        /// are deleted.
        /// </summary>
        /// <throws>  IllegalStateException if close() had been called before or </throws>
        /// <summary>   if no file has been added to this object
        /// </summary>
        public void Dispose()
        {
            // Extract into protected method if class ever becomes unsealed

            // TODO: Dispose shouldn't throw exceptions!
            if (merged)
            {
                throw new SystemException("Merge already performed");
            }

            if ((entries.Count == 0))
            {
                throw new SystemException("No entries to merge have been defined");
            }

            merged = true;

            // open the compound stream
            IndexOutput os = null;

            try
            {
                os = directory.CreateOutput(fileName);

                // Write the number of entries
                os.WriteVInt(entries.Count);

                // Write the directory with all offsets at 0.
                // Remember the positions of directory entries so that we can
                // adjust the offsets later
                long totalSize = 0;
                foreach (FileEntry fe in entries)
                {
                    fe.directoryOffset = os.FilePointer;
                    os.WriteLong(0); // for now
                    os.WriteString(fe.file);
                    totalSize += directory.FileLength(fe.file);
                }

                // Pre-allocate size of file as optimization --
                // this can potentially help IO performance as
                // we write the file and also later during
                // searching.  It also uncovers a disk-full
                // situation earlier and hopefully without
                // actually filling disk to 100%:
                long finalLength = totalSize + os.FilePointer;
                os.SetLength(finalLength);

                // Open the files and copy their data into the stream.
                // Remember the locations of each file's data section.
                var buffer = new byte[16384];
                foreach (FileEntry fe in entries)
                {
                    fe.dataOffset = os.FilePointer;
                    CopyFile(fe, os, buffer);
                }

                // Write the data offsets into the directory of the compound stream
                foreach (FileEntry fe in entries)
                {
                    os.Seek(fe.directoryOffset);
                    os.WriteLong(fe.dataOffset);
                }

                System.Diagnostics.Debug.Assert(finalLength == os.Length);

                // Close the output stream. Set the os to null before trying to
                // close so that if an exception occurs during the close, the
                // finally clause below will not attempt to close the stream
                // the second time.
                IndexOutput tmp = os;
                os = null;
                tmp.Dispose(); // AA:
            }
            finally
            {
                if (os != null)
                {
                    try
                    {
                        os.Dispose(); // AA:
                    }
                    catch (System.IO.IOException)
                    {
                    }
                }
            }
        }
Пример #23
0
        /// <summary>Merge files with the extensions added up to now.
        /// All files with these extensions are combined sequentially into the
        /// compound stream. After successful merge, the source files
        /// are deleted.
        /// </summary>
        /// <throws>  IllegalStateException if close() had been called before or </throws>
        /// <summary>   if no file has been added to this object
        /// </summary>
        public void  Close()
        {
            if (merged)
            {
                throw new System.SystemException("Merge already performed");
            }

            if ((entries.Count == 0))
            {
                throw new System.SystemException("No entries to merge have been defined");
            }

            merged = true;

            // open the compound stream
            IndexOutput os = null;

            try
            {
                os = directory.CreateOutput(fileName);

                // Write the number of entries
                os.WriteVInt(entries.Count);

                // Write the directory with all offsets at 0.
                // Remember the positions of directory entries so that we can
                // adjust the offsets later
                System.Collections.IEnumerator it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.directoryOffset = os.GetFilePointer();
                    os.WriteLong(0);                     // for now
                    os.WriteString(fe.file);
                }

                // Open the files and copy their data into the stream.
                // Remember the locations of each file's data section.
                byte[] buffer = new byte[16384];
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.dataOffset = os.GetFilePointer();
                    CopyFile(fe, os, buffer);
                }

                // Write the data offsets into the directory of the compound stream
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    os.Seek(fe.directoryOffset);
                    os.WriteLong(fe.dataOffset);
                }

                // Close the output stream. Set the os to null before trying to
                // close so that if an exception occurs during the close, the
                // finally clause below will not attempt to close the stream
                // the second time.
                IndexOutput tmp = os;
                os = null;
                tmp.Close();
            }
            finally
            {
                if (os != null)
                {
                    try
                    {
                        os.Close();
                    }
                    catch (System.IO.IOException)
                    {
                    }
                }
            }
        }
Пример #24
0
 private void Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi)
 {
     indexInterval = interval;
     fieldInfos = fis;
     isIndex = isi;
     output = directory.CreateOutput(segment + (isIndex ? ".tii" : ".tis"));
     output.WriteInt(FORMAT_CURRENT); // write format
     output.WriteLong(0); // leave space for size
     output.WriteInt(indexInterval); // write indexInterval
     output.WriteInt(skipInterval); // write skipInterval
     output.WriteInt(maxSkipLevels); // write maxSkipLevels
     System.Diagnostics.Debug.Assert(InitUTF16Results());
 }
        public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
        {
            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
            IndexOutput output = directory.CreateOutput(fileName, context);
            bool success = false;
            try
            {
                CodecUtil.WriteHeader(output, Lucene42FieldInfosFormat.CODEC_NAME, Lucene42FieldInfosFormat.FORMAT_CURRENT);
                output.WriteVInt(infos.Size());
                foreach (FieldInfo fi in infos)
                {
                    FieldInfo.IndexOptions? indexOptions = fi.FieldIndexOptions;
                    sbyte bits = 0x0;
                    if (fi.HasVectors())
                    {
                        bits |= Lucene42FieldInfosFormat.STORE_TERMVECTOR;
                    }
                    if (fi.OmitsNorms())
                    {
                        bits |= Lucene42FieldInfosFormat.OMIT_NORMS;
                    }
                    if (fi.HasPayloads())
                    {
                        bits |= Lucene42FieldInfosFormat.STORE_PAYLOADS;
                    }
                    if (fi.Indexed)
                    {
                        bits |= Lucene42FieldInfosFormat.IS_INDEXED;
                        Debug.Assert(indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads());
                        if (indexOptions == FieldInfo.IndexOptions.DOCS_ONLY)
                        {
                            bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
                        }
                        else if (indexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
                        {
                            bits |= Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
                        }
                        else if (indexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS)
                        {
                            bits |= Lucene42FieldInfosFormat.OMIT_POSITIONS;
                        }
                    }
                    output.WriteString(fi.Name);
                    output.WriteVInt(fi.Number);
                    output.WriteByte((byte)bits);

                    // pack the DV types in one byte
                    var dv = DocValuesByte(fi.DocValuesType);
                    var nrm = DocValuesByte(fi.NormType);
                    Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0);
                    var val = unchecked((sbyte)(0xff & ((nrm << 4) | dv)));
                    output.WriteByte((byte)val);
                    output.WriteStringStringMap(fi.Attributes());
                }
                success = true;
            }
            finally
            {
                if (success)
                {
                    output.Dispose();
                }
                else
                {
                    IOUtils.CloseWhileHandlingException(output);
                }
            }
        }
Пример #26
0
		internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
		{
			fieldInfos = fn;
			
			bool success = false;
			System.String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;
			try
			{
				fieldsStream = d.CreateOutput(fieldsName);
				fieldsStream.WriteInt(FORMAT_CURRENT);
				success = true;
			}
			finally
			{
				if (!success)
				{
					try
					{
						Close();
					}
					catch (System.Exception t)
					{
						// Suppress so we keep throwing the original exception
					}
					try
					{
						d.DeleteFile(fieldsName);
					}
					catch (System.Exception t)
					{
						// Suppress so we keep throwing the original exception
					}
				}
			}
			
			success = false;
			System.String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
			try
			{
				indexStream = d.CreateOutput(indexName);
				indexStream.WriteInt(FORMAT_CURRENT);
				success = true;
			}
			finally
			{
				if (!success)
				{
					try
					{
						Close();
					}
					catch (System.IO.IOException ioe)
					{
					}
					try
					{
						d.DeleteFile(fieldsName);
					}
					catch (System.Exception t)
					{
						// Suppress so we keep throwing the original exception
					}
					try
					{
						d.DeleteFile(indexName);
					}
					catch (System.Exception t)
					{
						// Suppress so we keep throwing the original exception
					}
				}
			}
			
			doClose = true;
		}
Пример #27
0
 internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
 {
     fieldInfos   = fn;
     fieldsStream = d.CreateOutput(segment + ".fdt");
     indexStream  = d.CreateOutput(segment + ".fdx");
 }
Пример #28
0
        private void  WritePostings(Posting[] postings, System.String segment)
        {
            IndexOutput       freq = null, prox = null;
            TermInfosWriter   tis              = null;
            TermVectorsWriter termVectorWriter = null;

            try
            {
                //open files for inverse index storage
                freq = directory.CreateOutput(segment + ".frq");
                prox = directory.CreateOutput(segment + ".prx");
                tis  = new TermInfosWriter(directory, segment, fieldInfos, termIndexInterval);
                TermInfo      ti           = new TermInfo();
                System.String currentField = null;

                for (int i = 0; i < postings.Length; i++)
                {
                    Posting posting = postings[i];

                    // add an entry to the dictionary with pointers to prox and freq files
                    ti.Set(1, freq.GetFilePointer(), prox.GetFilePointer(), -1);
                    tis.Add(posting.term, ti);

                    // add an entry to the freq file
                    int postingFreq = posting.freq;
                    if (postingFreq == 1)
                    {
                        // optimize freq=1
                        freq.WriteVInt(1);
                    }
                    // set low bit of doc num.
                    else
                    {
                        freq.WriteVInt(0);                         // the document number
                        freq.WriteVInt(postingFreq);               // frequency in doc
                    }

                    int   lastPosition = 0;                   // write positions
                    int[] positions    = posting.positions;
                    for (int j = 0; j < postingFreq; j++)
                    {
                        // use delta-encoding
                        int position = positions[j];
                        prox.WriteVInt(position - lastPosition);
                        lastPosition = position;
                    }
                    // check to see if we switched to a new field
                    System.String termField = posting.term.Field();
                    if (currentField != termField)
                    {
                        // changing field - see if there is something to save
                        currentField = termField;
                        FieldInfo fi = fieldInfos.FieldInfo(currentField);
                        if (fi.storeTermVector)
                        {
                            if (termVectorWriter == null)
                            {
                                termVectorWriter = new TermVectorsWriter(directory, segment, fieldInfos);
                                termVectorWriter.OpenDocument();
                            }
                            termVectorWriter.OpenField(currentField);
                        }
                        else if (termVectorWriter != null)
                        {
                            termVectorWriter.CloseField();
                        }
                    }
                    if (termVectorWriter != null && termVectorWriter.IsFieldOpen())
                    {
                        termVectorWriter.AddTerm(posting.term.Text(), postingFreq, posting.positions, posting.offsets);
                    }
                }
                if (termVectorWriter != null)
                {
                    termVectorWriter.CloseDocument();
                }
            }
            finally
            {
                // make an effort to close all streams we can but remember and re-throw
                // the first exception encountered in this process
                System.IO.IOException keep = null;
                if (freq != null)
                {
                    try
                    {
                        freq.Close();
                    }
                    catch (System.IO.IOException e)
                    {
                        if (keep == null)
                        {
                            keep = e;
                        }
                    }
                }
                if (prox != null)
                {
                    try
                    {
                        prox.Close();
                    }
                    catch (System.IO.IOException e)
                    {
                        if (keep == null)
                        {
                            keep = e;
                        }
                    }
                }
                if (tis != null)
                {
                    try
                    {
                        tis.Close();
                    }
                    catch (System.IO.IOException e)
                    {
                        if (keep == null)
                        {
                            keep = e;
                        }
                    }
                }
                if (termVectorWriter != null)
                {
                    try
                    {
                        termVectorWriter.Close();
                    }
                    catch (System.IO.IOException e)
                    {
                        if (keep == null)
                        {
                            keep = e;
                        }
                    }
                }
                if (keep != null)
                {
                    throw new System.IO.IOException(keep.StackTrace);
                }
            }
        }
 public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context)
 {
     string fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION);
     IndexOutput output = directory.CreateOutput(fileName, context);
     bool success = false;
     try
     {
         output.WriteVInt(FORMAT_PREFLEX_RW);
         output.WriteVInt(infos.Size());
         foreach (FieldInfo fi in infos)
         {
             sbyte bits = 0x0;
             if (fi.HasVectors())
             {
                 bits |= STORE_TERMVECTOR;
             }
             if (fi.OmitsNorms())
             {
                 bits |= OMIT_NORMS;
             }
             if (fi.HasPayloads())
             {
                 bits |= STORE_PAYLOADS;
             }
             if (fi.Indexed)
             {
                 bits |= IS_INDEXED;
                 Debug.Assert(fi.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads());
                 if (fi.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_ONLY)
                 {
                     bits |= OMIT_TERM_FREQ_AND_POSITIONS;
                 }
                 else if (fi.FieldIndexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS)
                 {
                     bits |= OMIT_POSITIONS;
                 }
             }
             output.WriteString(fi.Name);
             /*
              * we need to write the field number since IW tries
              * to stabelize the field numbers across segments so the
              * FI ordinal is not necessarily equivalent to the field number
              */
             output.WriteInt(fi.Number);
             output.WriteByte((byte)bits);
             if (fi.Indexed && !fi.OmitsNorms())
             {
                 // to allow null norm types we need to indicate if norms are written
                 // only in RW case
                 output.WriteByte((byte)(sbyte)(fi.NormType == null ? 0 : 1));
             }
             Debug.Assert(fi.Attributes() == null); // not used or supported
         }
         success = true;
     }
     finally
     {
         if (success)
         {
             output.Dispose();
         }
         else
         {
             IOUtils.CloseWhileHandlingException(output);
         }
     }
 }
        public CachedIndexInput( ICloudProvider CloudProvider, Directory CacheDirectory, string Name )
        {
            this.name = Name;

            #if FULLDEBUG
            Debug.WriteLine( "Opening " + this.name );
            #endif
            this.fileMutex = BlobMutexManager.GrabMutex( this.name );
            this.fileMutex.WaitOne();
            try {

                bool fFileNeeded = false;
                FileMetadata cloudMetadata = CloudProvider.FileMetadata( this.name );
                if ( !cloudMetadata.Exists ) {
                    fFileNeeded = false;
                    // TODO: Delete local if it doesn't exist on cloud?
                    /*
                    if (CacheDirectory.FileExists(this.name)) {
                        CacheDirectory.DeleteFile(this.name);
                    }
                    */
                } else if ( !CacheDirectory.FileExists( this.name ) ) {
                    fFileNeeded = true;
                } else {
                    long cachedLength = CacheDirectory.FileLength( this.name );

                    long blobLength = cloudMetadata.Length;
                    DateTime blobLastModifiedUTC = cloudMetadata.LastModified.ToUniversalTime();

                    if ( !cloudMetadata.Exists || cachedLength != blobLength ) {
                        fFileNeeded = true;
                    } else {
                        // there seems to be an error of 1 tick which happens every once in a while
                        // for now we will say that if they are within 1 tick of each other and same length
                        DateTime cachedLastModifiedUTC = new DateTime( CacheDirectory.FileModified( this.name ), DateTimeKind.Local ).ToUniversalTime();
                        if ( cachedLastModifiedUTC < blobLastModifiedUTC ) {
                            TimeSpan timeSpan = blobLastModifiedUTC.Subtract( cachedLastModifiedUTC );
                            if ( timeSpan.TotalSeconds > 1 ) {
                                fFileNeeded = true;
                            } else {
            #if FULLDEBUG
                                Debug.WriteLine( "Using cache for " + this.name + ": " + timeSpan.TotalSeconds );
            #endif
                                // file not needed
                            }
                        }
                    }
                }

                // if the file does not exist
                // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage)
                if ( fFileNeeded ) {
                    using ( StreamOutput fileStream = new StreamOutput( CacheDirectory.CreateOutput( this.name ) ) ) {

                        Stream blobStream = CloudProvider.Download( this.name );
                        blobStream.CopyTo( fileStream );

                        fileStream.Flush();
                        Debug.WriteLine( "GET {0} RETREIVED {1} bytes", this.name, fileStream.Length );

                    }
                } else {
            #if FULLDEBUG
                    if ( !cloudMetadata.Exists ) {
                        Debug.WriteLine( "Cloud doesn't have " + this.name );
                    } else {
                        Debug.WriteLine( "Using cached file for " + this.name );
                    }
            #endif
                }

                // open the file in read only mode
                this.indexInput = CacheDirectory.OpenInput( this.name );
            } finally {
                this.fileMutex.ReleaseMutex();
            }
        }
Пример #31
0
 private void  Demo_FSIndexInputBug(Directory fsdir, System.String file)
 {
     // Setup the test file - we need more than 1024 bytes
     IndexOutput os = fsdir.CreateOutput(file);
     for (int i = 0; i < 2000; i++)
     {
         os.WriteByte((byte) i);
     }
     os.Close();
     
     IndexInput in_Renamed = fsdir.OpenInput(file);
     
     // This read primes the buffer in IndexInput
     byte b = in_Renamed.ReadByte();
     
     // Close the file
     in_Renamed.Close();
     
     // ERROR: this call should fail, but succeeds because the buffer
     // is still filled
     b = in_Renamed.ReadByte();
     
     // ERROR: this call should fail, but succeeds for some reason as well
     in_Renamed.Seek(1099);
     
     // OK: this call correctly fails. We are now past the 1024 internal
     // buffer, so an actual IO is attempted, which fails
     Assert.Throws<NullReferenceException>(() => in_Renamed.ReadByte(), "expected readByte() to throw exception");
 }
Пример #32
0
		public void  Write(Directory directory)
		{
			IndexOutput output = directory.CreateOutput("segments.new");
			try
			{
				output.WriteInt(FORMAT); // write FORMAT
				output.WriteLong(++version); // every write changes the index
				output.WriteInt(counter); // write counter
				output.WriteInt(Count); // write infos
				for (int i = 0; i < Count; i++)
				{
					SegmentInfo si = Info(i);
					output.WriteString(si.name);
					output.WriteInt(si.docCount);
				}
			}
			finally
			{
				output.Close();
			}
			
			// install new segment info
			directory.RenameFile("segments.new", IndexFileNames.SEGMENTS);
		}
Пример #33
0
 /// <summary>Creates a file of the specified size with sequential data. The first
 /// byte is written as the start byte provided. All subsequent bytes are
 /// computed as start + offset where offset is the number of the byte.
 /// </summary>
 private void  CreateSequenceFile(Directory dir, System.String name, byte start, int size)
 {
     IndexOutput os = dir.CreateOutput(name);
     for (int i = 0; i < size; i++)
     {
         os.WriteByte(start);
         start++;
     }
     os.Close();
 }
Пример #34
0
 /// <summary>Writes this vector to the file <code>name</code> in Directory
 /// <code>d</code>, in a format that can be read by the constructor {@link
 /// #BitVector(Directory, String)}.  
 /// </summary>
 public void Write(Directory d, System.String name)
 {
     IndexOutput output = d.CreateOutput(name);
     try
     {
         output.WriteInt(Size()); // write size
         output.WriteInt(Count()); // write count
         output.WriteBytes(bits, bits.Length); // write bits
     }
     finally
     {
         output.Close();
     }
 }
Пример #35
0
        internal FieldsWriter(Directory d, System.String segment, FieldInfos fn)
        {
            fieldInfos = fn;

            bool success = false;

            System.String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;
            try
            {
                fieldsStream = d.CreateOutput(fieldsName);
                fieldsStream.WriteInt(FORMAT_CURRENT);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    try
                    {
                        Close();
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                    try
                    {
                        d.DeleteFile(fieldsName);
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                }
            }

            success = false;
            System.String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
            try
            {
                indexStream = d.CreateOutput(indexName);
                indexStream.WriteInt(FORMAT_CURRENT);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    try
                    {
                        Close();
                    }
                    catch (System.IO.IOException ioe)
                    {
                    }
                    try
                    {
                        d.DeleteFile(fieldsName);
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                    try
                    {
                        d.DeleteFile(indexName);
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                }
            }

            doClose = true;
        }
		public virtual void  CopyFile(Directory dir, System.String src, System.String dest)
		{
			IndexInput in_Renamed = dir.OpenInput(src);
			IndexOutput out_Renamed = dir.CreateOutput(dest);
			byte[] b = new byte[1024];
			long remainder = in_Renamed.Length();
			while (remainder > 0)
			{
				int len = (int) System.Math.Min(b.Length, remainder);
				in_Renamed.ReadBytes(b, 0, len);
				out_Renamed.WriteBytes(b, len);
				remainder -= len;
			}
			in_Renamed.Close();
			out_Renamed.Close();
		}
Пример #37
0
        private void Write(Directory directory)
        {
            System.String segmentFileName = GetNextSegmentFileName();

            // Always advance the generation on write:
            if (generation == -1)
            {
                generation = 1;
            }
            else
            {
                generation++;
            }

            ChecksumIndexOutput output = new ChecksumIndexOutput(directory.CreateOutput(segmentFileName));

            bool success = false;

            try
            {
                output.WriteInt(CURRENT_FORMAT); // write FORMAT
                output.WriteLong(++version); // every write changes
                // the index
                output.WriteInt(counter); // write counter
                output.WriteInt(Count); // write infos
                for (int i = 0; i < Count; i++)
                {
                    Info(i).Write(output);
                }
                output.PrepareCommit();
                success = true;
                pendingOutput = output;
            }
            finally
            {
                if (!success)
                {
                    // we hit an exception above; try to close the file but suppress any exception:
                    try
                    {
                        output.Close();
                    }
                    catch (System.Exception)
                    {
                        // suppress so we keep throwing the original exception
                    }
                    try
                    {
                        // try not to leave a truncated segments_N file int the index
                        directory.DeleteFile(segmentFileName);
                    }
                    catch (System.Exception)
                    {
                        // suppress so we keep throwing the original exception
                    }
                }
            }
        }
Пример #38
0
        private void  Write(Directory directory)
        {
            System.String segmentFileName = GetNextSegmentFileName();

            // Always advance the generation on write:
            if (generation == -1)
            {
                generation = 1;
            }
            else
            {
                generation++;
            }

            ChecksumIndexOutput segnOutput = new ChecksumIndexOutput(directory.CreateOutput(segmentFileName));

            bool success = false;

            try
            {
                segnOutput.WriteInt(CURRENT_FORMAT);        // write FORMAT
                segnOutput.WriteLong(++version);            // every write changes
                // the index
                segnOutput.WriteInt(counter);               // write counter
                segnOutput.WriteInt(Count);                 // write infos
                for (int i = 0; i < Count; i++)
                {
                    Info(i).Write(segnOutput);
                }
                segnOutput.WriteStringStringMap(userData);
                segnOutput.PrepareCommit();
                success           = true;
                pendingSegnOutput = segnOutput;
            }
            finally
            {
                if (!success)
                {
                    // We hit an exception above; try to close the file
                    // but suppress any exception:
                    try
                    {
                        segnOutput.Close();
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                    try
                    {
                        // Try not to leave a truncated segments_N file in
                        // the index:
                        directory.DeleteFile(segmentFileName);
                    }
                    catch (System.Exception t)
                    {
                        // Suppress so we keep throwing the original exception
                    }
                }
            }
        }