CloseReaders() private method

close all IndexReaders that have been added. Should not be called before merge().
private CloseReaders ( ) : void
return void
Ejemplo n.º 1
0
        public virtual void  TestMerge()
        {
            //System.out.println("----------------TestMerge------------------");
            SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment, false);

            merger.Add(reader1);
            merger.Add(reader2);
            try
            {
                int docsMerged = merger.Merge();
                merger.CloseReaders();
                Assert.IsTrue(docsMerged == 2);
                //Should be able to open a new SegmentReader against the new directory
                SegmentReader mergedReader = new SegmentReader(new SegmentInfo(mergedSegment, docsMerged, mergedDir));
                Assert.IsTrue(mergedReader != null);
                Assert.IsTrue(mergedReader.NumDocs() == 2);
                Document newDoc1 = mergedReader.Document(0);
                Assert.IsTrue(newDoc1 != null);
                //There are 2 unstored fields on the document
                Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
                Document newDoc2 = mergedReader.Document(1);
                Assert.IsTrue(newDoc2 != null);
                Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);

                TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "Field"));
                Assert.IsTrue(termDocs != null);
                Assert.IsTrue(termDocs.Next() == true);

                System.Collections.ICollection stored = mergedReader.GetIndexedFieldNames(true);
                Assert.IsTrue(stored != null);
                //System.out.println("stored size: " + stored.size());
                Assert.IsTrue(stored.Count == 2);

                TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
                Assert.IsTrue(vector != null);
                System.String[] terms = vector.GetTerms();
                Assert.IsTrue(terms != null);
                //System.out.println("Terms size: " + terms.length);
                Assert.IsTrue(terms.Length == 3);
                int[] freqs = vector.GetTermFrequencies();
                Assert.IsTrue(freqs != null);
                //System.out.println("Freqs size: " + freqs.length);

                for (int i = 0; i < terms.Length; i++)
                {
                    System.String term = terms[i];
                    int           freq = freqs[i];
                    //System.out.println("Term: " + term + " Freq: " + freq);
                    Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != -1);
                    Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
                }
            }
            catch (System.IO.IOException e)
            {
                System.Console.Error.WriteLine(e.StackTrace);
                Assert.IsTrue(false);
            }
            //System.out.println("---------------------end TestMerge-------------------");
        }
Ejemplo n.º 2
0
		public virtual void  TestMerge()
		{
			//System.out.println("----------------TestMerge------------------");
			SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment, false);
			merger.Add(reader1);
			merger.Add(reader2);
			try
			{
				int docsMerged = merger.Merge();
				merger.CloseReaders();
				Assert.IsTrue(docsMerged == 2);
				//Should be able to open a new SegmentReader against the new directory
				SegmentReader mergedReader = new SegmentReader(new SegmentInfo(mergedSegment, docsMerged, mergedDir));
				Assert.IsTrue(mergedReader != null);
				Assert.IsTrue(mergedReader.NumDocs() == 2);
				Document newDoc1 = mergedReader.Document(0);
				Assert.IsTrue(newDoc1 != null);
				//There are 2 unstored fields on the document
				Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
				Document newDoc2 = mergedReader.Document(1);
				Assert.IsTrue(newDoc2 != null);
				Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);
				
				TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "Field"));
				Assert.IsTrue(termDocs != null);
				Assert.IsTrue(termDocs.Next() == true);
				
				System.Collections.ICollection stored = mergedReader.GetIndexedFieldNames(true);
				Assert.IsTrue(stored != null);
				//System.out.println("stored size: " + stored.size());
				Assert.IsTrue(stored.Count == 2);
				
				TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
				Assert.IsTrue(vector != null);
				System.String[] terms = vector.GetTerms();
				Assert.IsTrue(terms != null);
				//System.out.println("Terms size: " + terms.length);
				Assert.IsTrue(terms.Length == 3);
				int[] freqs = vector.GetTermFrequencies();
				Assert.IsTrue(freqs != null);
				//System.out.println("Freqs size: " + freqs.length);
				
				for (int i = 0; i < terms.Length; i++)
				{
					System.String term = terms[i];
					int freq = freqs[i];
					//System.out.println("Term: " + term + " Freq: " + freq);
					Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
					Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
				}
			}
			catch (System.IO.IOException e)
			{
                System.Console.Error.WriteLine(e.StackTrace);
				Assert.IsTrue(false);
			}
			//System.out.println("---------------------end TestMerge-------------------");
		}
Ejemplo n.º 3
0
        /// <summary>Merges the named range of segments, replacing them in the stack with a
        /// single segment.
        /// </summary>
        private void  MergeSegments(int minSegment, int end)
        {
            System.String mergedName = NewSegmentName();
            if (infoStream != null)
            {
                infoStream.Write("merging segments");
            }
            SegmentMerger merger = new SegmentMerger(this, mergedName);

            System.Collections.ArrayList segmentsToDelete = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
            for (int i = minSegment; i < end; i++)
            {
                SegmentInfo si = segmentInfos.Info(i);
                if (infoStream != null)
                {
                    infoStream.Write(" " + si.name + " (" + si.docCount + " docs)");
                }
                IndexReader reader = SegmentReader.Get(si);
                merger.Add(reader);
                if ((reader.Directory() == this.directory) || (reader.Directory() == this.ramDirectory))
                {
                    segmentsToDelete.Add(reader);                     // queue segment for deletion
                }
            }

            int mergedDocCount = merger.Merge();

            if (infoStream != null)
            {
                infoStream.WriteLine(" into " + mergedName + " (" + mergedDocCount + " docs)");
            }

            for (int i = end - 1; i >= minSegment; i--)
            {
                // remove old infos & add new
                segmentInfos.RemoveAt(i);
            }
            segmentInfos.Add(new SegmentInfo(mergedName, mergedDocCount, directory));

            // close readers before we attempt to delete now-obsolete segments
            merger.CloseReaders();

            lock (directory)
            {
                // in- & inter-process sync
                new AnonymousClassWith3(segmentsToDelete, this, directory.MakeLock(COMMIT_LOCK_NAME), COMMIT_LOCK_TIMEOUT).Run();
            }

            if (useCompoundFile)
            {
                System.Collections.ArrayList filesToDelete = merger.CreateCompoundFile(mergedName + ".tmp");
                lock (directory)
                {
                    // in- & inter-process sync
                    new AnonymousClassWith4(mergedName, filesToDelete, this, directory.MakeLock(COMMIT_LOCK_NAME), COMMIT_LOCK_TIMEOUT).Run();
                }
            }
        }
Ejemplo n.º 4
0
        public virtual void  TestMerge()
        {
            SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment);

            merger.Add(reader1);
            merger.Add(reader2);
            int docsMerged = merger.Merge();

            merger.CloseReaders();
            Assert.IsTrue(docsMerged == 2);
            //Should be able to open a new SegmentReader against the new directory
            SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));

            Assert.IsTrue(mergedReader != null);
            Assert.IsTrue(mergedReader.NumDocs() == 2);
            Document newDoc1 = mergedReader.Document(0);

            Assert.IsTrue(newDoc1 != null);
            //There are 2 unstored fields on the document
            Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
            Document newDoc2 = mergedReader.Document(1);

            Assert.IsTrue(newDoc2 != null);
            Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);

            TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));

            Assert.IsTrue(termDocs != null);
            Assert.IsTrue(termDocs.Next() == true);

            System.Collections.Generic.ICollection <string> stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
            Assert.IsTrue(stored != null);
            //System.out.println("stored size: " + stored.size());
            Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");

            TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);

            Assert.IsTrue(vector != null);
            System.String[] terms = vector.GetTerms();
            Assert.IsTrue(terms != null);
            //System.out.println("Terms size: " + terms.length);
            Assert.IsTrue(terms.Length == 3);
            int[] freqs = vector.GetTermFrequencies();
            Assert.IsTrue(freqs != null);
            //System.out.println("Freqs size: " + freqs.length);
            Assert.IsTrue(vector is TermPositionVector == true);

            for (int i = 0; i < terms.Length; i++)
            {
                System.String term = terms[i];
                int           freq = freqs[i];
                //System.out.println("Term: " + term + " Freq: " + freq);
                Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != -1);
                Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
            }

            TestSegmentReader.CheckNorms(mergedReader);
        }
Ejemplo n.º 5
0
		internal static void  Merge(System.String seg1, System.String seg2, System.String merged)
		{
			Directory directory = FSDirectory.GetDirectory("test", false);
			
			SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
			SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));
			
			SegmentMerger merger = new SegmentMerger(directory, merged, false);
			merger.Add(r1);
			merger.Add(r2);
			merger.Merge();
			merger.CloseReaders();
			
			directory.Close();
		}
Ejemplo n.º 6
0
        private void  Merge(System.String seg1, System.String seg2, System.String merged, bool useCompoundFile)
        {
            Directory directory = FSDirectory.GetDirectory(indexDir, false);

            SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
            SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));

            SegmentMerger merger = new SegmentMerger(directory, merged, useCompoundFile);

            merger.Add(r1);
            merger.Add(r2);
            merger.Merge();
            merger.CloseReaders();

            directory.Close();
        }
Ejemplo n.º 7
0
        internal static void  Merge(System.String seg1, System.String seg2, System.String merged)
        {
            Directory directory = FSDirectory.GetDirectory("test", false);

            SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
            SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));

            SegmentMerger merger = new SegmentMerger(directory, merged, false);

            merger.Add(r1);
            merger.Add(r2);
            merger.Merge();
            merger.CloseReaders();

            directory.Close();
        }
Ejemplo n.º 8
0
        private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
        {
            SegmentReader r1 = SegmentReader.Get(si1);
            SegmentReader r2 = SegmentReader.Get(si2);

            SegmentMerger merger = new SegmentMerger(si1.dir, merged);

            merger.Add(r1);
            merger.Add(r2);
            merger.Merge();
            merger.CloseReaders();

            if (useCompoundFile)
            {
                System.Collections.IList filesToDelete = merger.CreateCompoundFile(merged + ".cfs");
                for (System.Collections.IEnumerator iter = filesToDelete.GetEnumerator(); iter.MoveNext();)
                {
                    si1.dir.DeleteFile((System.String)iter.Current);
                }
            }

            return(new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir, useCompoundFile, true));
        }
Ejemplo n.º 9
0
        private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
        {
            SegmentReader r1 = SegmentReader.Get(true, si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null);
            SegmentReader r2 = SegmentReader.Get(true, si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, null);

            SegmentMerger merger = new SegmentMerger(si1.dir, merged);

            merger.Add(r1);
            merger.Add(r2);
            merger.Merge(null);
            merger.CloseReaders();

            if (useCompoundFile)
            {
                System.Collections.Generic.ICollection <string> filesToDelete = merger.CreateCompoundFile(merged + ".cfs");
                for (System.Collections.IEnumerator iter = filesToDelete.GetEnumerator(); iter.MoveNext();)
                {
                    si1.dir.DeleteFile((System.String)iter.Current, null);
                }
            }

            return(new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir, useCompoundFile, true));
        }
Ejemplo n.º 10
0
		private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
		{
			SegmentReader r1 = SegmentReader.Get(si1);
			SegmentReader r2 = SegmentReader.Get(si2);
			
			SegmentMerger merger = new SegmentMerger(si1.dir, merged);
			
			merger.Add(r1);
			merger.Add(r2);
			merger.Merge();
			merger.CloseReaders();
			
			if (useCompoundFile)
			{
				System.Collections.Generic.ICollection<string> filesToDelete = merger.CreateCompoundFile(merged + ".cfs");
				for (System.Collections.IEnumerator iter = filesToDelete.GetEnumerator(); iter.MoveNext(); )
				{
					si1.dir.DeleteFile((System.String) iter.Current);
				}
			}
			
			return new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir, useCompoundFile, true);
		}
Ejemplo n.º 11
0
		public virtual void  TestMerge()
		{
			SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment);
			merger.Add(reader1);
			merger.Add(reader2);
			int docsMerged = merger.Merge();
			merger.CloseReaders();
			Assert.IsTrue(docsMerged == 2);
			//Should be able to open a new SegmentReader against the new directory
			SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));
			Assert.IsTrue(mergedReader != null);
			Assert.IsTrue(mergedReader.NumDocs() == 2);
			Lucene.Net.Documents.Document newDoc1 = mergedReader.Document(0);
			Assert.IsTrue(newDoc1 != null);
			//There are 2 unstored fields on the document
			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
			Lucene.Net.Documents.Document newDoc2 = mergedReader.Document(1);
			Assert.IsTrue(newDoc2 != null);
			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
			
			TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
			Assert.IsTrue(termDocs != null);
			Assert.IsTrue(termDocs.Next() == true);
			
			System.Collections.ICollection stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
			Assert.IsTrue(stored != null);
			//System.out.println("stored size: " + stored.size());
			Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");
			
			TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
			Assert.IsTrue(vector != null);
			System.String[] terms = vector.GetTerms();
			Assert.IsTrue(terms != null);
			//System.out.println("Terms size: " + terms.length);
			Assert.IsTrue(terms.Length == 3);
			int[] freqs = vector.GetTermFrequencies();
			Assert.IsTrue(freqs != null);
			//System.out.println("Freqs size: " + freqs.length);
			Assert.IsTrue(vector is TermPositionVector == true);
			
			for (int i = 0; i < terms.Length; i++)
			{
				System.String term = terms[i];
				int freq = freqs[i];
				//System.out.println("Term: " + term + " Freq: " + freq);
				Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
				Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
			}
			
			TestSegmentReader.CheckNorms(mergedReader);
		}
Ejemplo n.º 12
0
        /** Does the actual (time-consuming) work of the merge,
         *  but without holding synchronized lock on IndexWriter
         *  instance */
        private int MergeMiddle(MergePolicy.OneMerge merge)
        {
            merge.CheckAborted(directory);

            string mergedName = merge.info.name;

            SegmentMerger merger = null;

            int mergedDocCount = 0;

            SegmentInfos sourceSegments = merge.segments;
            SegmentInfos sourceSegmentsClone = merge.segmentsClone;
            int numSegments = sourceSegments.Count;

            if (infoStream != null)
                Message("merging " + merge.SegString(directory));

            merger = new SegmentMerger(this, mergedName, merge);

            bool success = false;

            // This is try/finally to make sure merger's readers are
            // closed:
            try
            {
                int totDocCount = 0;

                for (int i = 0; i < numSegments; i++)
                {
                    SegmentInfo si = sourceSegmentsClone.Info(i);
                    IndexReader reader = SegmentReader.Get(true, si, MERGE_READ_BUFFER_SIZE, merge.mergeDocStores); // no need to set deleter (yet)
                    merger.Add(reader);
                    totDocCount += reader.NumDocs();
                }
                if (infoStream != null)
                {
                    Message("merge: total " + totDocCount + " docs");
                }

                merge.CheckAborted(directory);

                // This is where all the work happens:
                mergedDocCount = merge.info.docCount = merger.Merge(merge.mergeDocStores);

                System.Diagnostics.Debug.Assert(mergedDocCount == totDocCount);

                success = true;

            }
            finally
            {
                // close readers before we attempt to delete
                // now-obsolete segments
                if (merger != null)
                {
                    merger.CloseReaders();
                }
            }

            if (!CommitMerge(merge, merger, mergedDocCount))
                // commitMerge will return false if this merge was aborted
                return 0;

            if (merge.useCompoundFile)
            {

                // Maybe force a sync here to allow reclaiming of the
                // disk space used by the segments we just merged:
                if (autoCommit && DoCommitBeforeMergeCFS(merge))
                {
                    long size;
                    lock (this)
                    {
                        size = merge.info.SizeInBytes();
                    }
                    Commit(size);
                }

                success = false;
                string compoundFileName = mergedName + "." + IndexFileNames.COMPOUND_FILE_EXTENSION;

                try
                {
                    merger.CreateCompoundFile(compoundFileName);
                    success = true;
                }
                catch (System.IO.IOException ioe)
                {
                    lock (this)
                    {
                        if (merge.IsAborted())
                        {
                            // This can happen if rollback or close(false)
                            // is called -- fall through to logic below to
                            // remove the partially created CFS:
                            success = true;
                        }
                        else
                            HandleMergeException(ioe, merge);
                    }
                }
                catch (System.Exception t)
                {
                    HandleMergeException(t, merge);
                }
                finally
                {
                    if (!success)
                    {
                        if (infoStream != null)
                            Message("hit exception creating compound file during merge");
                        lock (this)
                        {
                            deleter.DeleteFile(compoundFileName);
                        }
                    }
                }

                if (merge.IsAborted())
                {
                    if (infoStream != null)
                        Message("abort merge after building CFS");
                    deleter.DeleteFile(compoundFileName);
                    return 0;
                }

                lock (this)
                {
                    if (segmentInfos.IndexOf(merge.info) == -1 || merge.IsAborted())
                    {
                        // Our segment (committed in non-compound
                        // format) got merged away while we were
                        // building the compound format.
                        deleter.DeleteFile(compoundFileName);
                    }
                    else
                    {
                        merge.info.SetUseCompoundFile(true);
                        Checkpoint();
                    }
                }
            }

            // Force a sync after commiting the merge.  Once this
            // sync completes then all index files referenced by the
            // current segmentInfos are on stable storage so if the
            // OS/machine crashes, or power cord is yanked, the
            // index will be intact.  Note that this is just one
            // (somewhat arbitrary) policy; we could try other
            // policies like only sync if it's been > X minutes or
            // more than Y bytes have been written, etc.
            if (autoCommit)
            {
                long size;
                lock (this)
                {
                    size = merge.info.SizeInBytes();
                }
                Commit(size);
            }

            return mergedDocCount;
        }
Ejemplo n.º 13
0
		private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
		{
			Directory directory = FSDirectory.GetDirectory(indexDir, false);
			
			SegmentReader r1 = SegmentReader.Get(si1);
			SegmentReader r2 = SegmentReader.Get(si2);
			
			SegmentMerger merger = new SegmentMerger(directory, merged);
			
			merger.Add(r1);
			merger.Add(r2);
			merger.Merge();
			merger.CloseReaders();
			
			if (useCompoundFile)
			{
				System.Collections.ArrayList filesToDelete = merger.CreateCompoundFile(merged + ".cfs");
				for (System.Collections.IEnumerator iter = filesToDelete.GetEnumerator(); iter.MoveNext(); )
				{
					directory.DeleteFile((System.String) iter.Current);
				}
			}
			
			directory.Close();
			return new SegmentInfo(merged, si1.docCount + si2.docCount, directory, useCompoundFile, true);
		}
Ejemplo n.º 14
0
		/// <summary>Merges the named range of segments, replacing them in the stack with a
		/// single segment. 
		/// </summary>
		private void  MergeSegments(int minSegment, int end)
		{
			System.String mergedName = NewSegmentName();
			if (infoStream != null)
				infoStream.Write("merging segments");
			SegmentMerger merger = new SegmentMerger(this, mergedName);
			
			System.Collections.ArrayList segmentsToDelete = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
			for (int i = minSegment; i < end; i++)
			{
				SegmentInfo si = segmentInfos.Info(i);
				if (infoStream != null)
					infoStream.Write(" " + si.name + " (" + si.docCount + " docs)");
				IndexReader reader = SegmentReader.Get(si);
				merger.Add(reader);
				if ((reader.Directory() == this.directory) || (reader.Directory() == this.ramDirectory))
					segmentsToDelete.Add(reader); // queue segment for deletion
			}
			
			int mergedDocCount = merger.Merge();
			
			if (infoStream != null)
			{
				infoStream.WriteLine(" into " + mergedName + " (" + mergedDocCount + " docs)");
			}
			
			for (int i = end - 1; i >= minSegment; i--)
    			// remove old infos & add new
				segmentInfos.RemoveAt(i);
			segmentInfos.Add(new SegmentInfo(mergedName, mergedDocCount, directory));
			
			// close readers before we attempt to delete now-obsolete segments
			merger.CloseReaders();
			
			lock (directory)
			{
				// in- & inter-process sync
				new AnonymousClassWith3(segmentsToDelete, this, directory.MakeLock(COMMIT_LOCK_NAME), COMMIT_LOCK_TIMEOUT).Run();
			}
			
			if (useCompoundFile)
			{
				System.Collections.ArrayList filesToDelete = merger.CreateCompoundFile(mergedName + ".tmp");
				lock (directory)
				{
					// in- & inter-process sync
					new AnonymousClassWith4(mergedName, filesToDelete, this, directory.MakeLock(COMMIT_LOCK_NAME), COMMIT_LOCK_TIMEOUT).Run();
				}
			}
		}
Ejemplo n.º 15
0
		private void  Merge(System.String seg1, System.String seg2, System.String merged, bool useCompoundFile)
		{
			Directory directory = FSDirectory.GetDirectory(indexDir, false);
			
			SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
			SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));
			
			SegmentMerger merger = new SegmentMerger(directory, merged, useCompoundFile);
			
			merger.Add(r1);
			merger.Add(r2);
			merger.Merge();
			merger.CloseReaders();
			
			directory.Close();
		}
Ejemplo n.º 16
0
		/// <summary>Does the actual (time-consuming) work of the merge,
		/// but without holding synchronized lock on IndexWriter
		/// instance 
		/// </summary>
		private int MergeMiddle(MergePolicy.OneMerge merge)
		{
			
			merge.CheckAborted(directory);
			
			System.String mergedName = merge.info.name;
			
			SegmentMerger merger = null;
			
			int mergedDocCount = 0;
			
			SegmentInfos sourceSegments = merge.segments;
			SegmentInfos sourceSegmentsClone = merge.segmentsClone;
			int numSegments = sourceSegments.Count;
			
			if (infoStream != null)
				Message("merging " + merge.SegString(directory));
			
			merger = new SegmentMerger(this, mergedName, merge);
			
			// This is try/finally to make sure merger's readers are
			// closed:
			
			bool success = false;
			
			try
			{
				int totDocCount = 0;
				
				for (int i = 0; i < numSegments; i++)
				{
					SegmentInfo si = sourceSegmentsClone.Info(i);
					IndexReader reader = SegmentReader.Get(si, MERGE_READ_BUFFER_SIZE, merge.mergeDocStores); // no need to set deleter (yet)
					merger.Add(reader);
					totDocCount += reader.NumDocs();
				}
				if (infoStream != null)
				{
					Message("merge: total " + totDocCount + " docs");
				}
				
				merge.CheckAborted(directory);
				
				mergedDocCount = merge.info.docCount = merger.Merge(merge.mergeDocStores);
				
				System.Diagnostics.Debug.Assert(mergedDocCount == totDocCount);
				
				success = true;
			}
			finally
			{
				// close readers before we attempt to delete
				// now-obsolete segments
				if (merger != null)
				{
					merger.CloseReaders();
				}
				if (!success)
				{
					if (infoStream != null)
						Message("hit exception during merge; now refresh deleter on segment " + mergedName);
					lock (this)
					{
						AddMergeException(merge);
						deleter.Refresh(mergedName);
					}
				}
			}
			
			if (!CommitMerge(merge))
			// commitMerge will return false if this merge was aborted
				return 0;
			
			if (merge.useCompoundFile)
			{
				
				success = false;
				bool skip = false;
				System.String compoundFileName = mergedName + "." + IndexFileNames.COMPOUND_FILE_EXTENSION;
				
				try
				{
					try
					{
						merger.CreateCompoundFile(compoundFileName);
						success = true;
					}
					catch (System.IO.IOException ioe)
					{
						lock (this)
						{
							if (segmentInfos.IndexOf(merge.info) == - 1)
							{
								// If another merge kicked in and merged our
								// new segment away while we were trying to
								// build the compound file, we can hit a
								// FileNotFoundException and possibly
								// IOException over NFS.  We can tell this has
								// happened because our SegmentInfo is no
								// longer in the segments; if this has
								// happened it is safe to ignore the exception
								// & skip finishing/committing our compound
								// file creating.
								if (infoStream != null)
									Message("hit exception creating compound file; ignoring it because our info (segment " + merge.info.name + ") has been merged away");
								skip = true;
							}
							else
								throw ioe;
						}
					}
				}
				finally
				{
					if (!success)
					{
						if (infoStream != null)
							Message("hit exception creating compound file during merge: skip=" + skip);
						
						lock (this)
						{
							if (!skip)
								AddMergeException(merge);
							deleter.DeleteFile(compoundFileName);
						}
					}
				}
				
				if (!skip)
				{
					
					lock (this)
					{
						if (skip || segmentInfos.IndexOf(merge.info) == - 1 || merge.IsAborted())
						{
							// Our segment (committed in non-compound
							// format) got merged away while we were
							// building the compound format.
							deleter.DeleteFile(compoundFileName);
						}
						else
						{
							success = false;
							try
							{
								merge.info.SetUseCompoundFile(true);
								Checkpoint();
								success = true;
							}
							finally
							{
								if (!success)
								{
									if (infoStream != null)
										Message("hit exception checkpointing compound file during merge");
									
									// Must rollback:
									AddMergeException(merge);
									merge.info.SetUseCompoundFile(false);
									DeletePartialSegmentsFile();
									deleter.DeleteFile(compoundFileName);
								}
							}
							
							// Give deleter a chance to remove files now.
							deleter.Checkpoint(segmentInfos, autoCommit);
						}
					}
				}
			}
			
			return mergedDocCount;
		}