示例#1
0
文件: Hits.cs 项目: vernon016/mono
        public /*internal*/ bool debugCheckedForDeletions = false;                                                                      // for test purposes.

        internal Hits(Searcher s, Query q, Filter f)
        {
            weight     = q.Weight(s);
            searcher   = s;
            filter     = f;
            nDeletions = CountDeletions(s);
            GetMoreDocs(50);             // retrieve 100 initially
            lengthAtStart = length;
        }
示例#2
0
文件: Hits.cs 项目: carrie901/mono
		public /*internal*/ bool debugCheckedForDeletions = false; // for test purposes.
		
		internal Hits(Searcher s, Query q, Filter f)
		{
			weight = q.Weight(s);
			searcher = s;
			filter = f;
			nDeletions = CountDeletions(s);
			GetMoreDocs(50); // retrieve 100 initially
			lengthAtStart = length;
		}
示例#3
0
        /// <summary> Create weight in multiple index scenario.
        ///
        /// Distributed query processing is done in the following steps:
        /// 1. rewrite query
        /// 2. extract necessary terms
        /// 3. collect dfs for these terms from the Searchables
        /// 4. create query weight using aggregate dfs.
        /// 5. distribute that weight to Searchables
        /// 6. merge results
        ///
        /// Steps 1-4 are done here, 5+6 in the search() methods
        ///
        /// </summary>
        /// <returns> rewritten queries
        /// </returns>
        public /*protected internal*/ override Weight CreateWeight(Query original)
        {
            // step 1
            Query rewrittenQuery = Rewrite(original);

            // step 2
            System.Collections.Hashtable terms = new System.Collections.Hashtable();
            rewrittenQuery.ExtractTerms(terms);

            // step3
            Term[] allTermsArray = new Term[terms.Count];
            int    index         = 0;

            System.Collections.IEnumerator e = terms.Keys.GetEnumerator();
            while (e.MoveNext())
            {
                allTermsArray[index++] = e.Current as Term;
            }
            int[] aggregatedDfs = new int[terms.Count];
            for (int i = 0; i < searchables.Length; i++)
            {
                int[] dfs = searchables[i].DocFreqs(allTermsArray);
                for (int j = 0; j < aggregatedDfs.Length; j++)
                {
                    aggregatedDfs[j] += dfs[j];
                }
            }

            System.Collections.Hashtable dfMap = new System.Collections.Hashtable();
            for (int i = 0; i < allTermsArray.Length; i++)
            {
                dfMap[allTermsArray[i]] = (System.Int32)aggregatedDfs[i];
            }

            // step4
            int            numDocs  = MaxDoc();
            CachedDfSource cacheSim = new CachedDfSource(dfMap, numDocs, GetSimilarity());

            return(rewrittenQuery.Weight(cacheSim));
        }
示例#4
0
 /// <summary> creates a weight for <code>query</code></summary>
 /// <returns> new weight
 /// </returns>
 public /*protected internal*/ virtual Weight CreateWeight(Query query)
 {
     return(query.Weight(this));
 }
示例#5
0
        public override DocIdSet GetDocIdSet(IndexReader reader)
        {
            Weight weight = query.Weight(new IndexSearcher(reader));

            return(new AnonymousClassDocIdSet(weight, reader, this));
        }
示例#6
0
		/// <summary> creates a weight for <code>query</code></summary>
		/// <returns> new weight
		/// </returns>
		public /*protected internal*/ virtual Weight CreateWeight(Query query)
		{
			return query.Weight(this);
		}