Example #1
0
			public override System.Object DoBody()
			{
				SegmentInfos infos = new SegmentInfos();
				infos.Read(directory);
				if (infos.Count == 1)
				{
					// index is optimized
					return new SegmentReader(infos, infos.Info(0), closeDirectory);
				}
				else
				{
					Monodoc.Lucene.Net.Index.IndexReader[] readers = new Monodoc.Lucene.Net.Index.IndexReader[infos.Count];
					for (int i = 0; i < infos.Count; i++)
						readers[i] = new SegmentReader(infos.Info(i));
					return new MultiReader(directory, infos, closeDirectory, readers);
				}
			}
Example #2
0
            public override System.Object DoBody()
            {
                SegmentInfos infos = new SegmentInfos();

                infos.Read(directory);
                if (infos.Count == 1)
                {
                    // index is optimized
                    return(new SegmentReader(infos, infos.Info(0), closeDirectory));
                }
                else
                {
                    Monodoc.Lucene.Net.Index.IndexReader[] readers = new Monodoc.Lucene.Net.Index.IndexReader[infos.Count];
                    for (int i = 0; i < infos.Count; i++)
                    {
                        readers[i] = new SegmentReader(infos.Info(i));
                    }
                    return(new MultiReader(directory, infos, closeDirectory, readers));
                }
            }
 /// <summary>Returns the number of documents currently in this index. </summary>
 public virtual int DocCount()
 {
     lock (this)
     {
         int count = 0;
         for (int i = 0; i < segmentInfos.Count; i++)
         {
             SegmentInfo si = segmentInfos.Info(i);
             count += si.docCount;
         }
         return(count);
     }
 }
 /// <summary>Merges all segments from an array of indexes into this index.
 ///
 /// <p>This may be used to parallelize batch indexing.  A large document
 /// collection can be broken into sub-collections.  Each sub-collection can be
 /// indexed in parallel, on a different thread, process or machine.  The
 /// complete index can then be created by merging sub-collection indexes
 /// with this method.
 ///
 /// <p>After this completes, the index is optimized.
 /// </summary>
 public virtual void  AddIndexes(Directory[] dirs)
 {
     lock (this)
     {
         Optimize(); // start with zero or 1 seg
         for (int i = 0; i < dirs.Length; i++)
         {
             SegmentInfos sis = new SegmentInfos(); // read infos from dir
             sis.Read(dirs[i]);
             for (int j = 0; j < sis.Count; j++)
             {
                 segmentInfos.Add(sis.Info(j)); // add each info
             }
         }
         Optimize(); // final cleanup
     }
 }
		/// <summary>Merges all segments from an array of indexes into this index.
		/// 
		/// <p>This may be used to parallelize batch indexing.  A large document
		/// collection can be broken into sub-collections.  Each sub-collection can be
		/// indexed in parallel, on a different thread, process or machine.  The
		/// complete index can then be created by merging sub-collection indexes
		/// with this method.
		/// 
		/// <p>After this completes, the index is optimized. 
		/// </summary>
		public virtual void  AddIndexes(Directory[] dirs)
		{
			lock (this)
			{
				Optimize(); // start with zero or 1 seg
				for (int i = 0; i < dirs.Length; i++)
				{
					SegmentInfos sis = new SegmentInfos(); // read infos from dir
					sis.Read(dirs[i]);
					for (int j = 0; j < sis.Count; j++)
					{
						segmentInfos.Add(sis.Info(j)); // add each info
					}
				}
				Optimize(); // final cleanup
			}
		}