private void MergeTerms() { try { freqOutput = directory.CreateFile(segment + ".frq"); proxOutput = directory.CreateFile(segment + ".prx"); termInfosWriter = new TermInfosWriter(directory, segment, fieldInfos); skipInterval = termInfosWriter.skipInterval; queue = new SegmentMergeQueue(readers.Count); MergeTermInfos(); } finally { if (freqOutput != null) { freqOutput.Close(); } if (proxOutput != null) { proxOutput.Close(); } if (termInfosWriter != null) { termInfosWriter.Close(); } if (queue != null) { queue.Close(); } } }
private void MergeNorms() { for (int i = 0; i < fieldInfos.Size(); i++) { FieldInfo fi = fieldInfos.FieldInfo(i); if (fi.isIndexed) { OutputStream output = directory.CreateFile(segment + ".f" + i); try { for (int j = 0; j < readers.Count; j++) { Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[j]; byte[] input = reader.Norms(fi.name); int maxDoc = reader.MaxDoc(); for (int k = 0; k < maxDoc; k++) { byte norm = input != null?input[k]:(byte)0; if (!reader.IsDeleted(k)) { output.WriteByte(norm); } } } } finally { output.Close(); } } } }
public void Write(Directory directory) { OutputStream output = directory.CreateFile("segments.new"); try { output.WriteInt(FORMAT); // write FORMAT output.WriteLong(++version); // every write changes the index output.WriteInt(counter); // write counter output.WriteInt(Count); // write infos for (int i = 0; i < Count; i++) { SegmentInfo si = Info(i); output.WriteString(si.name); output.WriteInt(si.docCount); } } finally { output.Close(); } // install new segment info directory.RenameFile("segments.new", "segments"); }
/// <summary>Called to complete TermInfos creation. </summary> public /*internal*/ void Close() { output.Seek(4); // write size after format output.WriteLong(size); output.Close(); if (!isIndex) { other.Close(); } }
public void Write(Directory d, System.String name) { OutputStream output = d.CreateFile(name); try { Write(output); } finally { output.Close(); } }
/// <summary>Writes this vector to the file <code>name</code> in Directory /// <code>d</code>, in a format that can be read by the constructor {@link /// #BitVector(Directory, String)}. /// </summary> public void Write(Directory d, System.String name) { OutputStream output = d.CreateFile(name); try { output.WriteInt(Size()); // write size output.WriteInt(Count()); // write count output.WriteBytes(bits, bits.Length); // write bits } finally { output.Close(); } }
public int number; // private -> public public void ReWrite() // private -> public { // NOTE: norms are re-written in regular directory, not cfs OutputStream out_Renamed = Enclosing_Instance.Directory().CreateFile(Enclosing_Instance.segment + ".tmp"); try { out_Renamed.WriteBytes(bytes, Enclosing_Instance.MaxDoc()); } finally { out_Renamed.Close(); } System.String fileName = Enclosing_Instance.segment + ".f" + number; Enclosing_Instance.Directory().RenameFile(Enclosing_Instance.segment + ".tmp", fileName); this.dirty = false; }
private void WriteDeleteableFiles(System.Collections.ArrayList files) { OutputStream output = directory.CreateFile("deleteable.new"); try { output.WriteInt(files.Count); for (int i = 0; i < files.Count; i++) { output.WriteString((System.String)files[i]); } } finally { output.Close(); } directory.RenameFile("deleteable.new", "deletable"); }
private void WriteNorms(Document doc, System.String segment) { for (int n = 0; n < fieldInfos.Size(); n++) { FieldInfo fi = fieldInfos.FieldInfo(n); if (fi.isIndexed) { float norm = fieldBoosts[n] * similarity.LengthNorm(fi.name, fieldLengths[n]); OutputStream norms = directory.CreateFile(segment + ".f" + n); try { norms.WriteByte(Monodoc.Lucene.Net.Search.Similarity.EncodeNorm(norm)); } finally { norms.Close(); } } } }
internal void Close() { fieldsStream.Close(); indexStream.Close(); }
/// <summary>Merge files with the extensions added up to now. /// All files with these extensions are combined sequentially into the /// compound stream. After successful merge, the source files /// are deleted. /// </summary> public void Close() { if (merged) { throw new System.SystemException("Merge already performed"); } if ((entries.Count == 0)) { throw new System.SystemException("No entries to merge have been defined"); } merged = true; // open the compound stream OutputStream os = null; try { os = directory.CreateFile(fileName); // Write the number of entries os.WriteVInt(entries.Count); // Write the directory with all offsets at 0. // Remember the positions of directory entries so that we can // adjust the offsets later System.Collections.IEnumerator it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.directoryOffset = os.GetFilePointer(); os.WriteLong(0); // for now os.WriteString(fe.file); } // Open the files and copy their data into the stream. // Remeber the locations of each file's data section. byte[] buffer = new byte[1024]; it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; fe.dataOffset = os.GetFilePointer(); CopyFile(fe, os, buffer); } // Write the data offsets into the directory of the compound stream it = entries.GetEnumerator(); while (it.MoveNext()) { FileEntry fe = (FileEntry)it.Current; os.Seek(fe.directoryOffset); os.WriteLong(fe.dataOffset); } // Close the output stream. Set the os to null before trying to // close so that if an exception occurs during the close, the // finally clause below will not attempt to close the stream // the second time. OutputStream tmp = os; os = null; tmp.Close(); } finally { if (os != null) { try { os.Close(); } catch (System.IO.IOException) { } } } }
private void MergeTerms() { try { freqOutput = directory.CreateFile(segment + ".frq"); proxOutput = directory.CreateFile(segment + ".prx"); termInfosWriter = new TermInfosWriter(directory, segment, fieldInfos); skipInterval = termInfosWriter.skipInterval; queue = new SegmentMergeQueue(readers.Count); MergeTermInfos(); } finally { if (freqOutput != null) freqOutput.Close(); if (proxOutput != null) proxOutput.Close(); if (termInfosWriter != null) termInfosWriter.Close(); if (queue != null) queue.Close(); } }
private void WritePostings(Posting[] postings, System.String segment) { OutputStream freq = null, prox = null; TermInfosWriter tis = null; TermVectorsWriter termVectorWriter = null; try { //open files for inverse index storage freq = directory.CreateFile(segment + ".frq"); prox = directory.CreateFile(segment + ".prx"); tis = new TermInfosWriter(directory, segment, fieldInfos); TermInfo ti = new TermInfo(); System.String currentField = null; for (int i = 0; i < postings.Length; i++) { Posting posting = postings[i]; // add an entry to the dictionary with pointers to prox and freq files ti.Set(1, freq.GetFilePointer(), prox.GetFilePointer(), -1); tis.Add(posting.term, ti); // add an entry to the freq file int postingFreq = posting.freq; if (postingFreq == 1) { // optimize freq=1 freq.WriteVInt(1); } // set low bit of doc num. else { freq.WriteVInt(0); // the document number freq.WriteVInt(postingFreq); // frequency in doc } int lastPosition = 0; // write positions int[] positions = posting.positions; for (int j = 0; j < postingFreq; j++) { // use delta-encoding int position = positions[j]; prox.WriteVInt(position - lastPosition); lastPosition = position; } // check to see if we switched to a new Field System.String termField = posting.term.Field(); if ((System.Object)currentField != (System.Object)termField) { // changing Field - see if there is something to save currentField = termField; FieldInfo fi = fieldInfos.FieldInfo(currentField); if (fi.storeTermVector) { if (termVectorWriter == null) { termVectorWriter = new TermVectorsWriter(directory, segment, fieldInfos); termVectorWriter.OpenDocument(); } termVectorWriter.OpenField(currentField); } else if (termVectorWriter != null) { termVectorWriter.CloseField(); } } if (termVectorWriter != null && termVectorWriter.IsFieldOpen()) { termVectorWriter.AddTerm(posting.term.Text(), postingFreq); } } if (termVectorWriter != null) { termVectorWriter.CloseDocument(); } } finally { // make an effort to close all streams we can but remember and re-throw // the first exception encountered in this process System.IO.IOException keep = null; if (freq != null) { try { freq.Close(); } catch (System.IO.IOException e) { if (keep == null) { keep = e; } } } if (prox != null) { try { prox.Close(); } catch (System.IO.IOException e) { if (keep == null) { keep = e; } } } if (tis != null) { try { tis.Close(); } catch (System.IO.IOException e) { if (keep == null) { keep = e; } } } if (termVectorWriter != null) { try { termVectorWriter.Close(); } catch (System.IO.IOException e) { if (keep == null) { keep = e; } } } if (keep != null) { throw new System.IO.IOException(keep.StackTrace); } } }