/// <summary>
        /// Called if we hit an exception at a bad time (when
        ///  updating the index files) and must discard all
        ///  currently buffered docs.  this resets our state,
        ///  discarding any docs added since last flush.
        /// </summary>
        internal virtual void Abort(ISet <string> createdFiles)
        {
            //System.out.println(Thread.currentThread().getName() + ": now abort seg=" + segmentInfo.name);
            HasAborted = Aborting = true;
            try
            {
                if (InfoStream.IsEnabled("DWPT"))
                {
                    InfoStream.Message("DWPT", "now abort");
                }
                try
                {
                    Consumer.Abort();
                }
                catch (Exception t)
                {
                }

                PendingUpdates.Clear();
                CollectionsHelper.AddAll(createdFiles, Directory.CreatedFiles);
            }
            finally
            {
                Aborting = false;
                if (InfoStream.IsEnabled("DWPT"))
                {
                    InfoStream.Message("DWPT", "done abort");
                }
            }
        }
Esempio n. 2
0
 /// <summary>
 /// Expert: create a ParallelCompositeReader based on the provided
 ///  readers and storedFieldReaders; when a document is
 ///  loaded, only storedFieldsReaders will be used.
 /// </summary>
 public ParallelCompositeReader(bool closeSubReaders, CompositeReader[] readers, CompositeReader[] storedFieldReaders)
     : base(PrepareSubReaders(readers, storedFieldReaders))
 {
     this.CloseSubReaders = closeSubReaders;
     CollectionsHelper.AddAll(CompleteReaderSet, readers);
     CollectionsHelper.AddAll(CompleteReaderSet, storedFieldReaders);
     // update ref-counts (like MultiReader):
     if (!closeSubReaders)
     {
         foreach (IndexReader reader in CompleteReaderSet)
         {
             reader.IncRef();
         }
     }
     // finally add our own synthetic readers, so we close or decRef them, too (it does not matter what we do)
     CollectionsHelper.AddAll(CompleteReaderSet, GetSequentialSubReaders());
 }
Esempio n. 3
0
        /// <summary>
        /// Returns all files in use by this segment. </summary>
        public virtual ICollection <string> Files()
        {
            // Start from the wrapped info's files:
            ISet <string> files = new HashSet <string>(Info.Files);

            // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for
            // updates) and then maybe even be able to remove LiveDocsFormat.files().

            // Must separately add any live docs files:
            Info.Codec.LiveDocsFormat().Files(this, files);

            // Must separately add any field updates files
            foreach (ISet <string> updateFiles in GenUpdatesFiles_Renamed.Values)
            {
                CollectionsHelper.AddAll(files, updateFiles);
            }

            return(files);
        }
        /// <summary>
        /// Seals the <seealso cref="SegmentInfo"/> for the new flushed segment and persists
        /// the deleted documents <seealso cref="MutableBits"/>.
        /// </summary>
        internal virtual void SealFlushedSegment(FlushedSegment flushedSegment)
        {
            Debug.Assert(flushedSegment != null);

            SegmentCommitInfo newSegment = flushedSegment.SegmentInfo;

            IndexWriter.SetDiagnostics(newSegment.Info, IndexWriter.SOURCE_FLUSH);

            IOContext context = new IOContext(new FlushInfo(newSegment.Info.DocCount, newSegment.SizeInBytes()));

            bool success = false;

            try
            {
                if (IndexWriterConfig.UseCompoundFile)
                {
                    CollectionsHelper.AddAll(FilesToDelete, IndexWriter.CreateCompoundFile(InfoStream, Directory, MergeState.CheckAbort.NONE, newSegment.Info, context));
                    newSegment.Info.UseCompoundFile = true;
                }

                // Have codec write SegmentInfo.  Must do this after
                // creating CFS so that 1) .si isn't slurped into CFS,
                // and 2) .si reflects useCompoundFile=true change
                // above:
                Codec.SegmentInfoFormat().SegmentInfoWriter.Write(Directory, newSegment.Info, flushedSegment.FieldInfos, context);

                // TODO: ideally we would freeze newSegment here!!
                // because any changes after writing the .si will be
                // lost...

                // Must write deleted docs after the CFS so we don't
                // slurp the del file into CFS:
                if (flushedSegment.LiveDocs != null)
                {
                    int delCount = flushedSegment.DelCount;
                    Debug.Assert(delCount > 0);
                    if (InfoStream.IsEnabled("DWPT"))
                    {
                        InfoStream.Message("DWPT", "flush: write " + delCount + " deletes gen=" + flushedSegment.SegmentInfo.DelGen);
                    }

                    // TODO: we should prune the segment if it's 100%
                    // deleted... but merge will also catch it.

                    // TODO: in the NRT case it'd be better to hand
                    // this del vector over to the
                    // shortly-to-be-opened SegmentReader and let it
                    // carry the changes; there's no reason to use
                    // filesystem as intermediary here.

                    SegmentCommitInfo info  = flushedSegment.SegmentInfo;
                    Codec             codec = info.Info.Codec;
                    codec.LiveDocsFormat().WriteLiveDocs(flushedSegment.LiveDocs, Directory, info, delCount, context);
                    newSegment.DelCount = delCount;
                    newSegment.AdvanceDelGen();
                }

                success = true;
            }
            finally
            {
                if (!success)
                {
                    if (InfoStream.IsEnabled("DWPT"))
                    {
                        InfoStream.Message("DWPT", "hit exception creating compound file for newly flushed segment " + newSegment.Info.Name);
                    }
                }
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Expert: create a ParallelAtomicReader based on the provided
        ///  readers and storedFieldReaders; when a document is
        ///  loaded, only storedFieldsReaders will be used.
        /// </summary>
        public ParallelAtomicReader(bool closeSubReaders, AtomicReader[] readers, AtomicReader[] storedFieldsReaders)
        {
            if (!InstanceFieldsInitialized)
            {
                InitializeInstanceFields();
                InstanceFieldsInitialized = true;
            }
            this.CloseSubReaders = closeSubReaders;
            if (readers.Length == 0 && storedFieldsReaders.Length > 0)
            {
                throw new System.ArgumentException("There must be at least one main reader if storedFieldsReaders are used.");
            }
            this.ParallelReaders     = (AtomicReader[])readers.Clone();
            this.StoredFieldsReaders = (AtomicReader[])storedFieldsReaders.Clone();
            if (ParallelReaders.Length > 0)
            {
                AtomicReader first = ParallelReaders[0];
                this.maxDoc       = first.MaxDoc;
                this.numDocs      = first.NumDocs;
                this.hasDeletions = first.HasDeletions;
            }
            else
            {
                this.maxDoc       = this.numDocs = 0;
                this.hasDeletions = false;
            }
            CollectionsHelper.AddAll(CompleteReaderSet, this.ParallelReaders);
            CollectionsHelper.AddAll(CompleteReaderSet, this.StoredFieldsReaders);

            // check compatibility:
            foreach (AtomicReader reader in CompleteReaderSet)
            {
                if (reader.MaxDoc != maxDoc)
                {
                    throw new System.ArgumentException("All readers must have same maxDoc: " + maxDoc + "!=" + reader.MaxDoc);
                }
            }

            // TODO: make this read-only in a cleaner way?
            FieldInfos.Builder builder = new FieldInfos.Builder();
            // build FieldInfos and fieldToReader map:
            foreach (AtomicReader reader in this.ParallelReaders)
            {
                FieldInfos readerFieldInfos = reader.FieldInfos;
                foreach (FieldInfo fieldInfo in readerFieldInfos)
                {
                    // NOTE: first reader having a given field "wins":
                    if (!FieldToReader.ContainsKey(fieldInfo.Name))
                    {
                        builder.Add(fieldInfo);
                        FieldToReader[fieldInfo.Name] = reader;
                        if (fieldInfo.HasVectors())
                        {
                            TvFieldToReader[fieldInfo.Name] = reader;
                        }
                    }
                }
            }
            FieldInfos_Renamed = builder.Finish();

            // build Fields instance
            foreach (AtomicReader reader in this.ParallelReaders)
            {
                Fields readerFields = reader.Fields;
                if (readerFields != null)
                {
                    foreach (string field in readerFields)
                    {
                        // only add if the reader responsible for that field name is the current:
                        if (FieldToReader[field].Equals(reader))
                        {
                            this.Fields_Renamed.AddField(field, readerFields.Terms(field));
                        }
                    }
                }
            }

            // do this finally so any Exceptions occurred before don't affect refcounts:
            foreach (AtomicReader reader in CompleteReaderSet)
            {
                if (!closeSubReaders)
                {
                    reader.IncRef();
                }
                reader.RegisterParentReader(this);
            }
        }