Exemplo n.º 1
0
        internal int[] docMap = null; // maps around deleted docs

        internal SegmentMergeInfo(int b, TermEnum te, Monodoc.Lucene.Net.Index.IndexReader r)
        {
            base_Renamed = b;
            reader       = r;
            termEnum     = te;
            term         = te.Term();
            postings     = reader.TermPositions();

            // build array which maps document numbers around deletions
            if (reader.HasDeletions())
            {
                int maxDoc = reader.MaxDoc();
                docMap = new int[maxDoc];
                int j = 0;
                for (int i = 0; i < maxDoc; i++)
                {
                    if (reader.IsDeleted(i))
                    {
                        docMap[i] = -1;
                    }
                    else
                    {
                        docMap[i] = j++;
                    }
                }
            }
        }
 private void  MergeNorms()
 {
     for (int i = 0; i < fieldInfos.Size(); i++)
     {
         FieldInfo fi = fieldInfos.FieldInfo(i);
         if (fi.isIndexed)
         {
             OutputStream output = directory.CreateFile(segment + ".f" + i);
             try
             {
                 for (int j = 0; j < readers.Count; j++)
                 {
                     Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[j];
                     byte[] input  = reader.Norms(fi.name);
                     int    maxDoc = reader.MaxDoc();
                     for (int k = 0; k < maxDoc; k++)
                     {
                         byte norm = input != null?input[k]:(byte)0;
                         if (!reader.IsDeleted(k))
                         {
                             output.WriteByte(norm);
                         }
                     }
                 }
             }
             finally
             {
                 output.Close();
             }
         }
     }
 }
Exemplo n.º 3
0
        public MultiTermEnum(Monodoc.Lucene.Net.Index.IndexReader[] readers, int[] starts, Term t)
        {
            queue = new SegmentMergeQueue(readers.Length);
            for (int i = 0; i < readers.Length; i++)
            {
                Monodoc.Lucene.Net.Index.IndexReader reader = readers[i];
                TermEnum termEnum;

                if (t != null)
                {
                    termEnum = reader.Terms(t);
                }
                else
                {
                    termEnum = reader.Terms();
                }

                SegmentMergeInfo smi = new SegmentMergeInfo(starts[i], termEnum, reader);
                if (t == null?smi.Next():termEnum.Term() != null)
                {
                    queue.Put(smi);
                }
                // initialize queue
                else
                {
                    smi.Close();
                }
            }

            if (t != null && queue.Size() > 0)
            {
                Next();
            }
        }
 /// <summary> close all Monodoc.Lucene.Net.Index.IndexReaders that have been added.
 /// Should not be called before merge().
 /// </summary>
 /// <throws>  IOException </throws>
 public /*internal*/ void  CloseReaders()
 {
     for (int i = 0; i < readers.Count; i++)
     {
         // close readers
         Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[i];
         reader.Close();
     }
 }
        private void  MergeTermInfos()
        {
            int base_Renamed = 0;

            for (int i = 0; i < readers.Count; i++)
            {
                Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[i];
                TermEnum         termEnum = reader.Terms();
                SegmentMergeInfo smi      = new SegmentMergeInfo(base_Renamed, termEnum, reader);
                base_Renamed += reader.NumDocs();
                if (smi.Next())
                {
                    queue.Put(smi);
                }
                // initialize queue
                else
                {
                    smi.Close();
                }
            }

            SegmentMergeInfo[] match = new SegmentMergeInfo[readers.Count];

            while (queue.Size() > 0)
            {
                int matchSize = 0; // pop matching terms
                match[matchSize++] = (SegmentMergeInfo)queue.Pop();
                Term             term = match[0].term;
                SegmentMergeInfo top  = (SegmentMergeInfo)queue.Top();

                while (top != null && term.CompareTo(top.term) == 0)
                {
                    match[matchSize++] = (SegmentMergeInfo)queue.Pop();
                    top = (SegmentMergeInfo)queue.Top();
                }

                MergeTermInfo(match, matchSize); // add new TermInfo

                while (matchSize > 0)
                {
                    SegmentMergeInfo smi = match[--matchSize];
                    if (smi.Next())
                    {
                        queue.Put(smi);
                    }
                    // restore queue
                    else
                    {
                        smi.Close(); // done with a segment
                    }
                }
            }
        }
        /// <summary> Creates a new <code>MultipleTermPositions</code> instance.
        ///
        /// </summary>
        /// <param name="indexReader">an <code>Monodoc.Lucene.Net.Index.IndexReader</code> value
        /// </param>
        /// <param name="terms">a <code>Term[]</code> value
        /// </param>
        /// <exception cref=""> IOException if an error occurs
        /// </exception>
        public MultipleTermPositions(Monodoc.Lucene.Net.Index.IndexReader indexReader, Term[] terms)
        {
            System.Collections.IList termPositions = new System.Collections.ArrayList();

            for (int i = 0; i < terms.Length; i++)
            {
                termPositions.Add(indexReader.TermPositions(terms[i]));
            }

            _termPositionsQueue = new TermPositionsQueue(termPositions);
            _posList            = new IntQueue();
        }
        /// <summary> Merge the TermVectors from each of the segments into the new one.</summary>
        /// <throws>  IOException </throws>
        private void  MergeVectors()
        {
            TermVectorsWriter termVectorsWriter = new TermVectorsWriter(directory, segment, fieldInfos);

            try
            {
                for (int r = 0; r < readers.Count; r++)
                {
                    Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[r];
                    int maxDoc = reader.MaxDoc();
                    for (int docNum = 0; docNum < maxDoc; docNum++)
                    {
                        // skip deleted docs
                        if (reader.IsDeleted(docNum))
                        {
                            continue;
                        }
                        termVectorsWriter.OpenDocument();

                        // get all term vectors
                        TermFreqVector[] sourceTermVector = reader.GetTermFreqVectors(docNum);

                        if (sourceTermVector != null)
                        {
                            for (int f = 0; f < sourceTermVector.Length; f++)
                            {
                                // translate Field numbers
                                TermFreqVector termVector = sourceTermVector[f];
                                termVectorsWriter.OpenField(termVector.GetField());
                                System.String[] terms = termVector.GetTerms();
                                int[]           freqs = termVector.GetTermFrequencies();

                                for (int t = 0; t < terms.Length; t++)
                                {
                                    termVectorsWriter.AddTerm(terms[t], freqs[t]);
                                }
                            }
                            termVectorsWriter.CloseDocument();
                        }
                    }
                }
            }
            finally
            {
                termVectorsWriter.Close();
            }
        }
Exemplo n.º 8
0
			public override System.Object DoBody()
			{
				SegmentInfos infos = new SegmentInfos();
				infos.Read(directory);
				if (infos.Count == 1)
				{
					// index is optimized
					return new SegmentReader(infos, infos.Info(0), closeDirectory);
				}
				else
				{
					Monodoc.Lucene.Net.Index.IndexReader[] readers = new Monodoc.Lucene.Net.Index.IndexReader[infos.Count];
					for (int i = 0; i < infos.Count; i++)
						readers[i] = new SegmentReader(infos.Info(i));
					return new MultiReader(directory, infos, closeDirectory, readers);
				}
			}
Exemplo n.º 9
0
 public override System.Collections.ICollection GetIndexedFieldNames(bool storedTermVector)
 {
     // maintain a unique set of Field names
     System.Collections.Hashtable fieldSet = new System.Collections.Hashtable();
     for (int i = 0; i < subReaders.Length; i++)
     {
         Monodoc.Lucene.Net.Index.IndexReader reader = subReaders[i];
         System.Collections.ICollection       names  = reader.GetIndexedFieldNames(storedTermVector);
         foreach (object item in names)
         {
             if (fieldSet.ContainsKey(item) == false)
             {
                 fieldSet.Add(item, item);
             }
         }
     }
     return(fieldSet);
 }
Exemplo n.º 10
0
            public override System.Object DoBody()
            {
                SegmentInfos infos = new SegmentInfos();

                infos.Read(directory);
                if (infos.Count == 1)
                {
                    // index is optimized
                    return(new SegmentReader(infos, infos.Info(0), closeDirectory));
                }
                else
                {
                    Monodoc.Lucene.Net.Index.IndexReader[] readers = new Monodoc.Lucene.Net.Index.IndexReader[infos.Count];
                    for (int i = 0; i < infos.Count; i++)
                    {
                        readers[i] = new SegmentReader(infos.Info(i));
                    }
                    return(new MultiReader(directory, infos, closeDirectory, readers));
                }
            }
Exemplo n.º 11
0
 /// <seealso cref="Monodoc.Lucene.Net.Index.IndexReader#GetFieldNames(boolean)">
 /// </seealso>
 public override System.Collections.ICollection GetFieldNames(bool indexed)
 {
     // maintain a unique set of Field names
     System.Collections.Hashtable fieldSet = new System.Collections.Hashtable();
     for (int i = 0; i < subReaders.Length; i++)
     {
         Monodoc.Lucene.Net.Index.IndexReader reader = subReaders[i];
         System.Collections.ICollection       names  = reader.GetFieldNames(indexed);
         for (System.Collections.IEnumerator iterator = names.GetEnumerator(); iterator.MoveNext();)
         {
             System.Collections.DictionaryEntry fi = (System.Collections.DictionaryEntry)iterator.Current;
             System.String s = fi.Key.ToString();
             if (fieldSet.ContainsKey(s) == false)
             {
                 fieldSet.Add(s, s);
             }
         }
     }
     return(fieldSet);
 }
        /// <summary> </summary>
        /// <returns> The number of documents in all of the readers
        /// </returns>
        /// <throws>  IOException </throws>
        private int MergeFields()
        {
            fieldInfos = new FieldInfos(); // merge Field names
            int docCount = 0;

            for (int i = 0; i < readers.Count; i++)
            {
                Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[i];
                fieldInfos.AddIndexed(reader.GetIndexedFieldNames(true), true);
                fieldInfos.AddIndexed(reader.GetIndexedFieldNames(false), false);
                fieldInfos.Add(reader.GetFieldNames(false), false);
            }
            fieldInfos.Write(directory, segment + ".fnm");

            FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, fieldInfos);

            try
            {
                for (int i = 0; i < readers.Count; i++)
                {
                    Monodoc.Lucene.Net.Index.IndexReader reader = (Monodoc.Lucene.Net.Index.IndexReader)readers[i];
                    int maxDoc = reader.MaxDoc();
                    for (int j = 0; j < maxDoc; j++)
                    {
                        if (!reader.IsDeleted(j))
                        {
                            // skip deleted docs
                            fieldsWriter.AddDocument(reader.Document(j));
                            docCount++;
                        }
                    }
                }
            }
            finally
            {
                fieldsWriter.Close();
            }
            return(docCount);
        }
Exemplo n.º 13
0
		internal int[] docMap = null; // maps around deleted docs
		
		internal SegmentMergeInfo(int b, TermEnum te, Monodoc.Lucene.Net.Index.IndexReader r)
		{
			base_Renamed = b;
			reader = r;
			termEnum = te;
			term = te.Term();
			postings = reader.TermPositions();
			
			// build array which maps document numbers around deletions 
			if (reader.HasDeletions())
			{
				int maxDoc = reader.MaxDoc();
				docMap = new int[maxDoc];
				int j = 0;
				for (int i = 0; i < maxDoc; i++)
				{
					if (reader.IsDeleted(i))
						docMap[i] = - 1;
					else
						docMap[i] = j++;
				}
			}
		}
 /// <summary> Add an Monodoc.Lucene.Net.Index.IndexReader to the collection of readers that are to be merged</summary>
 /// <param name="">reader
 /// </param>
 public /*internal*/ void  Add(Monodoc.Lucene.Net.Index.IndexReader reader)
 {
     readers.Add(reader);
 }
Exemplo n.º 15
0
 /// <summary> <p>Construct a FilterIndexReader based on the specified base reader.
 /// Directory locking for delete, undeleteAll, and setNorm operations is
 /// left to the base reader.</p>
 /// <p>Note that base reader is closed if this FilterIndexReader is closed.</p>
 /// </summary>
 /// <param name="in">specified base reader.
 /// </param>
 public FilterIndexReader(Monodoc.Lucene.Net.Index.IndexReader in_Renamed) : base(in_Renamed.Directory())
 {
     this.in_Renamed = in_Renamed;
 }
Exemplo n.º 16
0
 internal AnonymousClassWith1(Monodoc.Lucene.Net.Index.IndexReader enclosingInstance, Lucene.Net.Store.Lock Param1, long Param2) : base(Param1, Param2)
 {
     InitBlock(enclosingInstance);
 }
Exemplo n.º 17
0
 private void  InitBlock(Monodoc.Lucene.Net.Index.IndexReader enclosingInstance)
 {
     this.enclosingInstance = enclosingInstance;
 }
Exemplo n.º 18
0
 protected internal virtual TermDocs TermDocs(Monodoc.Lucene.Net.Index.IndexReader reader)
 {
     return(reader.TermDocs());
 }
Exemplo n.º 19
0
 protected internal override TermDocs TermDocs(Monodoc.Lucene.Net.Index.IndexReader reader)
 {
     return((TermDocs)reader.TermPositions());
 }
		/// <summary> <p>Construct a FilterIndexReader based on the specified base reader.
		/// Directory locking for delete, undeleteAll, and setNorm operations is
		/// left to the base reader.</p>
		/// <p>Note that base reader is closed if this FilterIndexReader is closed.</p>
		/// </summary>
		/// <param name="in">specified base reader.
		/// </param>
		public FilterIndexReader(Monodoc.Lucene.Net.Index.IndexReader in_Renamed):base(in_Renamed.Directory())
		{
			this.in_Renamed = in_Renamed;
		}