/// <summary> Save this segment's info.</summary>
        internal void  Write(IndexOutput output)
        {
            output.WriteString(name);
            output.WriteInt(docCount);
            output.WriteLong(delGen);
            output.WriteInt(docStoreOffset);
            if (docStoreOffset != -1)
            {
                output.WriteString(docStoreSegment);
                output.WriteByte((byte)(docStoreIsCompoundFile?1:0));
            }

            output.WriteByte((byte)(hasSingleNormFile?1:0));
            if (normGen == null)
            {
                output.WriteInt(NO);
            }
            else
            {
                output.WriteInt(normGen.Length);
                for (int j = 0; j < normGen.Length; j++)
                {
                    output.WriteLong(normGen[j]);
                }
            }
            output.WriteByte((byte)isCompoundFile);
            output.WriteInt(delCount);
            output.WriteByte((byte)(hasProx?1:0));
            output.WriteStringStringMap(diagnostics);
        }
Exemple #2
0
        internal void  WriteField(FieldInfo fi, IFieldable field)
        {
            fieldsStream.WriteVInt(fi.number);
            byte bits = 0;

            if (field.IsTokenized)
            {
                bits |= FieldsWriter.FIELD_IS_TOKENIZED;
            }
            if (field.IsBinary)
            {
                bits |= FieldsWriter.FIELD_IS_BINARY;
            }

            fieldsStream.WriteByte(bits);

            // compression is disabled for the current field
            if (field.IsBinary)
            {
                byte[] data   = field.GetBinaryValue();
                int    len    = field.BinaryLength;
                int    offset = field.BinaryOffset;

                fieldsStream.WriteVInt(len);
                fieldsStream.WriteBytes(data, offset, len);
            }
            else
            {
                fieldsStream.WriteString(field.StringValue);
            }
        }
Exemple #3
0
        public void  Write(Directory directory)
        {
            IndexOutput output = directory.CreateOutput("segments.new");

            try
            {
                output.WriteInt(FORMAT);                // write FORMAT
                output.WriteLong(++version);            // every write changes the index
                output.WriteInt(counter);               // write counter
                output.WriteInt(Count);                 // write infos
                for (int i = 0; i < Count; i++)
                {
                    SegmentInfo si = Info(i);
                    output.WriteString(si.name);
                    output.WriteInt(si.docCount);
                }
            }
            finally
            {
                output.Close();
            }

            // install new segment info
            directory.RenameFile("segments.new", IndexFileNames.SEGMENTS);
        }
Exemple #4
0
 public void  Write(IndexOutput output)
 {
     output.WriteVInt(Size());
     for (int i = 0; i < Size(); i++)
     {
         FieldInfo fi   = FieldInfo(i);
         byte      bits = (byte)(0x0);
         if (fi.isIndexed)
         {
             bits |= IS_INDEXED;
         }
         if (fi.storeTermVector)
         {
             bits |= STORE_TERMVECTOR;
         }
         if (fi.storePositionWithTermVector)
         {
             bits |= STORE_POSITIONS_WITH_TERMVECTOR;
         }
         if (fi.storeOffsetWithTermVector)
         {
             bits |= STORE_OFFSET_WITH_TERMVECTOR;
         }
         if (fi.omitNorms)
         {
             bits |= OMIT_NORMS;
         }
         output.WriteString(fi.name);
         output.WriteByte(bits);
     }
 }
Exemple #5
0
        private void  WriteDeleteableFiles(System.Collections.ArrayList files)
        {
            IndexOutput output = directory.CreateOutput("deleteable.new");

            try
            {
                output.WriteInt(files.Count);
                for (int i = 0; i < files.Count; i++)
                {
                    output.WriteString((System.String)files[i]);
                }
            }
            finally
            {
                output.Close();
            }
            directory.RenameFile("deleteable.new", IndexFileNames.DELETABLE);
        }
Exemple #6
0
 /// <summary> Save this segment's info.</summary>
 internal void  Write(IndexOutput output)
 {
     output.WriteString(name);
     output.WriteInt(docCount);
     output.WriteLong(delGen);
     output.WriteByte((byte)(hasSingleNormFile ? 1 : 0));
     if (normGen == null)
     {
         output.WriteInt(-1);
     }
     else
     {
         output.WriteInt(normGen.Length);
         for (int j = 0; j < normGen.Length; j++)
         {
             output.WriteLong(normGen[j]);
         }
     }
     output.WriteByte((byte)isCompoundFile);
 }
Exemple #7
0
        public void  Write(IndexOutput output)
        {
            output.WriteVInt(CURRENT_FORMAT);
            output.WriteVInt(Size());
            for (int i = 0; i < Size(); i++)
            {
                FieldInfo fi   = FieldInfo(i);
                var       bits = (byte)(0x0);
                if (fi.isIndexed)
                {
                    bits |= IS_INDEXED;
                }
                if (fi.storeTermVector)
                {
                    bits |= STORE_TERMVECTOR;
                }
                if (fi.storePositionWithTermVector)
                {
                    bits |= STORE_POSITIONS_WITH_TERMVECTOR;
                }
                if (fi.storeOffsetWithTermVector)
                {
                    bits |= STORE_OFFSET_WITH_TERMVECTOR;
                }
                if (fi.omitNorms)
                {
                    bits |= OMIT_NORMS;
                }
                if (fi.storePayloads)
                {
                    bits |= STORE_PAYLOADS;
                }
                if (fi.omitTermFreqAndPositions)
                {
                    bits |= OMIT_TERM_FREQ_AND_POSITIONS;
                }

                output.WriteString(fi.name);
                output.WriteByte(bits);
            }
        }
Exemple #8
0
        /// <summary>
        /// Save a single segment's info. </summary>
        public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext)
        {
            string fileName = IndexFileNames.SegmentFileName(si.Name, "", Lucene46SegmentInfoFormat.SI_EXTENSION);

            si.AddFile(fileName);

            IndexOutput output = dir.CreateOutput(fileName, ioContext);

            bool success = false;

            try
            {
                CodecUtil.WriteHeader(output, Lucene46SegmentInfoFormat.CODEC_NAME, Lucene46SegmentInfoFormat.VERSION_CURRENT);
                // Write the Lucene version that created this segment, since 3.1
                output.WriteString(si.Version);
                output.WriteInt32(si.DocCount);

                output.WriteByte((byte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
                output.WriteStringStringMap(si.Diagnostics);
                output.WriteStringSet(si.GetFiles());
                CodecUtil.WriteFooter(output);
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.DisposeWhileHandlingException(output);
                    si.Dir.DeleteFile(fileName);
                }
                else
                {
                    output.Dispose();
                }
            }
        }
Exemple #9
0
		/// <summary> Save this segment's info.</summary>
		internal void  Write(IndexOutput output)
		{
			output.WriteString(name);
			output.WriteInt(docCount);
			output.WriteLong(delGen);
			output.WriteInt(docStoreOffset);
			if (docStoreOffset != - 1)
			{
				output.WriteString(docStoreSegment);
				output.WriteByte((byte) (docStoreIsCompoundFile?1:0));
			}
			
			output.WriteByte((byte) (hasSingleNormFile?1:0));
			if (normGen == null)
			{
				output.WriteInt(NO);
			}
			else
			{
				output.WriteInt(normGen.Length);
				for (int j = 0; j < normGen.Length; j++)
				{
					output.WriteLong(normGen[j]);
				}
			}
			output.WriteByte((byte) isCompoundFile);
			output.WriteInt(delCount);
			output.WriteByte((byte) (hasProx?1:0));
			output.WriteStringStringMap(diagnostics);
		}
        internal void  WriteField(FieldInfo fi, Fieldable field)
        {
            // if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
            // and field.binaryValue() already returns the compressed value for a field
            // with isCompressed()==true, so we disable compression in that case
            bool disableCompression = (field is FieldsReader.FieldForMerge);

            fieldsStream.WriteVInt(fi.number);
            byte bits = 0;

            if (field.IsTokenized())
            {
                bits |= FieldsWriter.FIELD_IS_TOKENIZED;
            }
            if (field.IsBinary())
            {
                bits |= FieldsWriter.FIELD_IS_BINARY;
            }
            if (field.IsCompressed())
            {
                bits |= FieldsWriter.FIELD_IS_COMPRESSED;
            }

            fieldsStream.WriteByte(bits);

            if (field.IsCompressed())
            {
                // compression is enabled for the current field
                byte[] data;
                int    len;
                int    offset;

                if (disableCompression)
                {
                    // optimized case for merging, the data
                    // is already compressed
                    data = field.GetBinaryValue();
                    System.Diagnostics.Debug.Assert(data != null);
                    len    = field.GetBinaryLength();
                    offset = field.GetBinaryOffset();
                }
                else
                {
                    // check if it is a binary field
                    if (field.IsBinary())
                    {
                        data = Compress(field.GetBinaryValue(), field.GetBinaryOffset(), field.GetBinaryLength());
                    }
                    else
                    {
                        byte[] x = System.Text.Encoding.UTF8.GetBytes(field.StringValue());
                        data = Compress(x, 0, x.Length);
                    }
                    len    = data.Length;
                    offset = 0;
                }

                fieldsStream.WriteVInt(len);
                fieldsStream.WriteBytes(data, offset, len);
            }
            else
            {
                // compression is disabled for the current field
                if (field.IsBinary())
                {
                    int length = field.GetBinaryLength();
                    fieldsStream.WriteVInt(length);
                    fieldsStream.WriteBytes(field.BinaryValue(), field.GetBinaryOffset(), length);
                }
                else
                {
                    fieldsStream.WriteString(field.StringValue());
                }
            }
        }
		public void  Write(IndexOutput output)
		{
			output.WriteVInt(CURRENT_FORMAT);
			output.WriteVInt(Size());
			for (int i = 0; i < Size(); i++)
			{
				FieldInfo fi = FieldInfo(i);
				byte bits = (byte) (0x0);
				if (fi.isIndexed)
					bits |= IS_INDEXED;
				if (fi.storeTermVector)
					bits |= STORE_TERMVECTOR;
				if (fi.storePositionWithTermVector)
					bits |= STORE_POSITIONS_WITH_TERMVECTOR;
				if (fi.storeOffsetWithTermVector)
					bits |= STORE_OFFSET_WITH_TERMVECTOR;
				if (fi.omitNorms)
					bits |= OMIT_NORMS;
				if (fi.storePayloads)
					bits |= STORE_PAYLOADS;
				if (fi.omitTermFreqAndPositions)
					bits |= OMIT_TERM_FREQ_AND_POSITIONS;
				
				output.WriteString(fi.name);
				output.WriteByte(bits);
			}
		}
Exemple #12
0
        /// <summary>Merge files with the extensions added up to now.
        /// All files with these extensions are combined sequentially into the
        /// compound stream. After successful merge, the source files
        /// are deleted.
        /// </summary>
        /// <throws>  IllegalStateException if close() had been called before or </throws>
        /// <summary>   if no file has been added to this object
        /// </summary>
        public void  Close()
        {
            if (merged)
            {
                throw new System.SystemException("Merge already performed");
            }

            if ((entries.Count == 0))
            {
                throw new System.SystemException("No entries to merge have been defined");
            }

            merged = true;

            // open the compound stream
            IndexOutput os = null;

            try
            {
                os = directory.CreateOutput(fileName);

                // Write the number of entries
                os.WriteVInt(entries.Count);

                // Write the directory with all offsets at 0.
                // Remember the positions of directory entries so that we can
                // adjust the offsets later
                System.Collections.IEnumerator it = entries.GetEnumerator();
                long totalSize = 0;
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.directoryOffset = os.GetFilePointer();
                    os.WriteLong(0);                     // for now
                    os.WriteString(fe.file);
                    totalSize += directory.FileLength(fe.file);
                }

                // Pre-allocate size of file as optimization --
                // this can potentially help IO performance as
                // we write the file and also later during
                // searching.  It also uncovers a disk-full
                // situation earlier and hopefully without
                // actually filling disk to 100%:
                long finalLength = totalSize + os.GetFilePointer();
                os.SetLength(finalLength);

                // Open the files and copy their data into the stream.
                // Remember the locations of each file's data section.
                byte[] buffer = new byte[16384];
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.dataOffset = os.GetFilePointer();
                    CopyFile(fe, os, buffer);
                }

                // Write the data offsets into the directory of the compound stream
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    os.Seek(fe.directoryOffset);
                    os.WriteLong(fe.dataOffset);
                }

                System.Diagnostics.Debug.Assert(finalLength == os.Length());

                // Close the output stream. Set the os to null before trying to
                // close so that if an exception occurs during the close, the
                // finally clause below will not attempt to close the stream
                // the second time.
                IndexOutput tmp = os;
                os = null;
                tmp.Close();
            }
            finally
            {
                if (os != null)
                {
                    try
                    {
                        os.Close();
                    }
                    catch (System.IO.IOException e)
                    {
                    }
                }
            }
        }
Exemple #13
0
        internal void  AddDocument(Document doc)
        {
            indexStream.WriteLong(fieldsStream.GetFilePointer());

            int storedCount = 0;

            System.Collections.IEnumerator fieldIterator = doc.GetFields().GetEnumerator();
            while (fieldIterator.MoveNext())
            {
                Fieldable field = (Fieldable)fieldIterator.Current;
                if (field.IsStored())
                {
                    storedCount++;
                }
            }
            fieldsStream.WriteVInt(storedCount);

            fieldIterator = doc.GetFields().GetEnumerator();
            while (fieldIterator.MoveNext())
            {
                Fieldable field = (Fieldable)fieldIterator.Current;
                // if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
                // and field.binaryValue() already returns the compressed value for a field
                // with isCompressed()==true, so we disable compression in that case
                bool disableCompression = (field is FieldsReader.FieldForMerge);
                if (field.IsStored())
                {
                    fieldsStream.WriteVInt(fieldInfos.FieldNumber(field.Name()));

                    byte bits = 0;
                    if (field.IsTokenized())
                    {
                        bits |= FieldsWriter.FIELD_IS_TOKENIZED;
                    }
                    if (field.IsBinary())
                    {
                        bits |= FieldsWriter.FIELD_IS_BINARY;
                    }
                    if (field.IsCompressed())
                    {
                        bits |= FieldsWriter.FIELD_IS_COMPRESSED;
                    }

                    fieldsStream.WriteByte(bits);

                    if (field.IsCompressed())
                    {
                        // compression is enabled for the current field
                        byte[] data = null;

                        if (disableCompression)
                        {
                            // optimized case for merging, the data
                            // is already compressed
                            data = field.BinaryValue();
                        }
                        else
                        {
                            // check if it is a binary field
                            if (field.IsBinary())
                            {
                                data = Compress(field.BinaryValue());
                            }
                            else
                            {
                                data = Compress(System.Text.Encoding.GetEncoding("UTF-8").GetBytes(field.StringValue()));
                            }
                        }
                        int len = data.Length;
                        fieldsStream.WriteVInt(len);
                        fieldsStream.WriteBytes(data, len);
                    }
                    else
                    {
                        // compression is disabled for the current field
                        if (field.IsBinary())
                        {
                            byte[] data = field.BinaryValue();
                            int    len  = data.Length;
                            fieldsStream.WriteVInt(len);
                            fieldsStream.WriteBytes(data, len);
                        }
                        else
                        {
                            fieldsStream.WriteString(field.StringValue());
                        }
                    }
                }
            }
        }
Exemple #14
0
		/// <summary> Save this segment's info.</summary>
		internal void  Write(IndexOutput output)
		{
			output.WriteString(name);
			output.WriteInt(docCount);
			output.WriteLong(delGen);
			output.WriteByte((byte) (hasSingleNormFile ? 1 : 0));
			if (normGen == null)
			{
				output.WriteInt(- 1);
			}
			else
			{
				output.WriteInt(normGen.Length);
				for (int j = 0; j < normGen.Length; j++)
				{
					output.WriteLong(normGen[j]);
				}
			}
			output.WriteByte((byte) isCompoundFile);
		}
        internal void  AddDocument(Document doc)
        {
            indexStream.WriteLong(fieldsStream.GetFilePointer());

            int storedCount = 0;

            foreach (Field field  in doc.Fields())
            {
                if (field.IsStored())
                {
                    storedCount++;
                }
            }
            fieldsStream.WriteVInt(storedCount);

            foreach (Field field in doc.Fields())
            {
                if (field.IsStored())
                {
                    fieldsStream.WriteVInt(fieldInfos.FieldNumber(field.Name()));

                    byte bits = 0;
                    if (field.IsTokenized())
                    {
                        bits |= FieldsWriter.FIELD_IS_TOKENIZED;
                    }
                    if (field.IsBinary())
                    {
                        bits |= FieldsWriter.FIELD_IS_BINARY;
                    }
                    if (field.IsCompressed())
                    {
                        bits |= FieldsWriter.FIELD_IS_COMPRESSED;
                    }

                    fieldsStream.WriteByte(bits);

                    if (field.IsCompressed())
                    {
                        // compression is enabled for the current field
                        byte[] data = null;
                        // check if it is a binary field
                        if (field.IsBinary())
                        {
                            data = Compress(field.BinaryValue());
                        }
                        else
                        {
                            data = Compress(System.Text.Encoding.GetEncoding("UTF-8").GetBytes(field.StringValue()));
                        }
                        int len = data.Length;
                        fieldsStream.WriteVInt(len);
                        fieldsStream.WriteBytes(data, len);
                    }
                    else
                    {
                        // compression is disabled for the current field
                        if (field.IsBinary())
                        {
                            byte[] data = field.BinaryValue();
                            int    len  = data.Length;
                            fieldsStream.WriteVInt(len);
                            fieldsStream.WriteBytes(data, len);
                        }
                        else
                        {
                            fieldsStream.WriteString(field.StringValue());
                        }
                    }
                }
            }
        }
Exemple #16
0
        /// <summary>Merge files with the extensions added up to now.
        /// All files with these extensions are combined sequentially into the
        /// compound stream. After successful merge, the source files
        /// are deleted.
        /// </summary>
        /// <throws>  IllegalStateException if close() had been called before or </throws>
        /// <summary>   if no file has been added to this object
        /// </summary>
        public void Dispose()
        {
            // Extract into protected method if class ever becomes unsealed

            // TODO: Dispose shouldn't throw exceptions!
            if (merged)
                throw new SystemException("Merge already performed");

            if ((entries.Count == 0))
                throw new SystemException("No entries to merge have been defined");

            merged = true;

            // open the compound stream
            IndexOutput os = null;
            try
            {
                os = directory.CreateOutput(fileName);

                // Write the number of entries
                os.WriteVInt(entries.Count);

                // Write the directory with all offsets at 0.
                // Remember the positions of directory entries so that we can
                // adjust the offsets later
                long totalSize = 0;
                foreach (FileEntry fe in entries)
                {
                    fe.directoryOffset = os.FilePointer;
                    os.WriteLong(0); // for now
                    os.WriteString(fe.file);
                    totalSize += directory.FileLength(fe.file);
                }

                // Pre-allocate size of file as optimization --
                // this can potentially help IO performance as
                // we write the file and also later during
                // searching.  It also uncovers a disk-full
                // situation earlier and hopefully without
                // actually filling disk to 100%:
                long finalLength = totalSize + os.FilePointer;
                os.SetLength(finalLength);

                // Open the files and copy their data into the stream.
                // Remember the locations of each file's data section.
                var buffer = new byte[16384];
                foreach (FileEntry fe in entries)
                {
                    fe.dataOffset = os.FilePointer;
                    CopyFile(fe, os, buffer);
                }

                // Write the data offsets into the directory of the compound stream
                foreach (FileEntry fe in entries)
                {
                    os.Seek(fe.directoryOffset);
                    os.WriteLong(fe.dataOffset);
                }

                System.Diagnostics.Debug.Assert(finalLength == os.Length);

                // Close the output stream. Set the os to null before trying to
                // close so that if an exception occurs during the close, the
                // finally clause below will not attempt to close the stream
                // the second time.
                IndexOutput tmp = os;
                os = null;
                tmp.Close();
            }
            finally
            {
                if (os != null)
                    try
                    {
                        os.Close();
                    }
                    catch (System.IO.IOException)
                    {
                    }
            }
        }
Exemple #17
0
        internal void  WriteField(FieldInfo fi, Fieldable field)
        {
            // if the field as an instanceof FieldsReader.FieldForMerge, we're in merge mode
            // and field.binaryValue() already returns the compressed value for a field
            // with isCompressed()==true, so we disable compression in that case
            bool disableCompression = (field is FieldsReader.FieldForMerge);

            fieldsStream.WriteVInt(fi.number);
            byte bits = 0;

            if (field.IsTokenized())
            {
                bits |= FieldsWriter.FIELD_IS_TOKENIZED;
            }
            if (field.IsBinary())
            {
                bits |= FieldsWriter.FIELD_IS_BINARY;
            }
            if (field.IsCompressed())
            {
                bits |= FieldsWriter.FIELD_IS_COMPRESSED;
            }

            fieldsStream.WriteByte(bits);

            if (field.IsCompressed())
            {
                // compression is enabled for the current field
                byte[] data = null;

                if (disableCompression)
                {
                    // optimized case for merging, the data
                    // is already compressed
                    data = field.BinaryValue();
                }
                else
                {
                    // check if it is a binary field
                    if (field.IsBinary())
                    {
                        data = Compress(field.BinaryValue());
                    }
                    else
                    {
                        data = Compress(System.Text.Encoding.GetEncoding("UTF-8").GetBytes(field.StringValue()));
                    }
                }
                int len = data.Length;
                fieldsStream.WriteVInt(len);
                fieldsStream.WriteBytes(data, len);
            }
            else
            {
                // compression is disabled for the current field
                if (field.IsBinary())
                {
                    byte[] data = field.BinaryValue();
                    int    len  = data.Length;
                    fieldsStream.WriteVInt(len);
                    fieldsStream.WriteBytes(data, len);
                }
                else
                {
                    fieldsStream.WriteString(field.StringValue());
                }
            }
        }
        /// <summary>Merge files with the extensions added up to now.
        /// All files with these extensions are combined sequentially into the
        /// compound stream. After successful merge, the source files
        /// are deleted.
        /// </summary>
        /// <throws>  IllegalStateException if close() had been called before or </throws>
        /// <summary>   if no file has been added to this object
        /// </summary>
        public void  Close()
        {
            if (merged)
            {
                throw new System.SystemException("Merge already performed");
            }

            if ((entries.Count == 0))
            {
                throw new System.SystemException("No entries to merge have been defined");
            }

            merged = true;

            // open the compound stream
            IndexOutput os = null;

            try
            {
                os = directory.CreateOutput(fileName);

                // Write the number of entries
                os.WriteVInt(entries.Count);

                // Write the directory with all offsets at 0.
                // Remember the positions of directory entries so that we can
                // adjust the offsets later
                System.Collections.IEnumerator it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.directoryOffset = os.GetFilePointer();
                    os.WriteLong(0);                     // for now
                    os.WriteString(fe.file);
                }

                // Open the files and copy their data into the stream.
                // Remember the locations of each file's data section.
                byte[] buffer = new byte[1024];
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    fe.dataOffset = os.GetFilePointer();
                    CopyFile(fe, os, buffer);
                }

                // Write the data offsets into the directory of the compound stream
                it = entries.GetEnumerator();
                while (it.MoveNext())
                {
                    FileEntry fe = (FileEntry)it.Current;
                    os.Seek(fe.directoryOffset);
                    os.WriteLong(fe.dataOffset);
                }

                // Close the output stream. Set the os to null before trying to
                // close so that if an exception occurs during the close, the
                // finally clause below will not attempt to close the stream
                // the second time.
                IndexOutput tmp = os;
                os = null;
                tmp.Close();
            }
            finally
            {
                if (os != null)
                {
                    try
                    {
                        os.Close();
                    }
                    catch (System.IO.IOException)
                    {
                    }
                }
            }
        }