//////////////////////////////////////////////////////////////// public void DoQuery(Query query, IQueryResult result, ICollection search_subset_uris, // should be internal uris QueryPartHook query_part_hook, HitFilter hit_filter) { if (Debug) { Logger.Log.Debug("###### {0}: Starting low-level queries", IndexName); } Stopwatch total, a, b, c, d, e, f; total = new Stopwatch(); a = new Stopwatch(); b = new Stopwatch(); c = new Stopwatch(); d = new Stopwatch(); e = new Stopwatch(); f = new Stopwatch(); total.Start(); a.Start(); ArrayList primary_required_part_queries; ArrayList secondary_required_part_queries; LNS.BooleanQuery primary_prohibited_part_query; LNS.BooleanQuery secondary_prohibited_part_query; AndHitFilter all_hit_filters; ArrayList term_list; // Assemble all of the parts into a bunch of Lucene queries term_list = AssembleQuery(query, query_part_hook, hit_filter, out primary_required_part_queries, out secondary_required_part_queries, out primary_prohibited_part_query, out secondary_prohibited_part_query, out all_hit_filters); a.Stop(); if (Debug) { Log.Debug("###### {0}: Building queries took {1}", IndexName, a); } // If we have no required parts, give up. if (primary_required_part_queries == null) { return; } b.Start(); // // Now that we have all of these nice queries, let's execute them! // IndexReader primary_reader; LNS.IndexSearcher primary_searcher; IndexReader secondary_reader; LNS.IndexSearcher secondary_searcher; // Create the searchers that we will need. if (!BuildSearchers(out primary_reader, out primary_searcher, out secondary_reader, out secondary_searcher)) { return; } b.Stop(); if (Debug) { Log.Debug("###### {0}: Readers/searchers built in {1}", IndexName, b); } // Build whitelists and blacklists for search subsets. c.Start(); // Possibly create our whitelists from the search subset. LuceneBitArray primary_whitelist, secondary_whitelist; CreateQueryWhitelists(search_subset_uris, primary_searcher, secondary_searcher, primary_prohibited_part_query, secondary_prohibited_part_query, out primary_whitelist, out secondary_whitelist); c.Stop(); if (Debug) { Log.Debug("###### {0}: Whitelists and blacklists built in {1}", IndexName, c); } // Now run the low level queries against our indexes. d.Start(); BetterBitArray primary_matches = null; if (primary_required_part_queries != null) { if (secondary_searcher != null) { primary_matches = DoRequiredQueries_TwoIndex(primary_searcher, secondary_searcher, primary_required_part_queries, secondary_required_part_queries, primary_whitelist, secondary_whitelist); } else { primary_matches = DoRequiredQueries(primary_searcher, primary_required_part_queries, primary_whitelist); } } d.Stop(); if (Debug) { Logger.Log.Debug("###### {0}: Low-level queries finished in {1}", IndexName, d); } e.Start(); // Only generate results if we got some matches if (primary_matches != null && primary_matches.ContainsTrue()) { GenerateQueryResults(primary_reader, secondary_reader, primary_matches, result, term_list, query.MaxHits, new HitFilter(all_hit_filters.HitFilter), IndexName); } e.Stop(); if (Debug) { Log.Debug("###### {0}: Query results generated in {1}", IndexName, e); } // // Finally, we clean up after ourselves. // f.Start(); CloseSearchers(primary_reader, primary_searcher, secondary_reader, secondary_searcher); f.Stop(); if (Debug) { Log.Debug("###### {0}: Readers/searchers released in {1}", IndexName, f); } total.Stop(); if (Debug) { Log.Debug("###### {0}: Query time breakdown:", IndexName); Log.Debug("###### {0}: Build queries {1,6} ({2:0.0}%)", IndexName, a, 100 * a.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: Got readers {1,6} ({2:0.0}%)", IndexName, b, 100 * b.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: Whitelists {1,6} ({2:0.0}%)", IndexName, c, 100 * c.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: Queries {1,6} ({2:0.0}%)", IndexName, d, 100 * d.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: Gen'd Results {1,6} ({2:0.0}%)", IndexName, e, 100 * e.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: Reader cleanup {1,6} ({2:0.0}%)", IndexName, f, 100 * f.ElapsedTime / total.ElapsedTime); Log.Debug("###### {0}: TOTAL {1,6}", IndexName, total); Logger.Log.Debug("###### {0}: Total query run in {1}", IndexName, total); } }
// search_subset_uris is a list of Uris that this search should be // limited to. static protected void QueryPartToQuery (QueryPart abstract_part, bool only_build_primary_query, ArrayList term_list, QueryPartHook query_part_hook, out LNS.Query primary_query, out LNS.Query secondary_query, out HitFilter hit_filter) { primary_query = null; secondary_query = null; // By default, we assume that our lucene queries will return exactly the // matching set of objects. We need to set the hit filter if further // refinement of the search results is required. (As in the case of // date range queries, for example.) We essentially have to do this // to make OR queries work correctly. hit_filter = true_hit_filter; // The exception is when dealing with a prohibited part. Just return // null for the hit filter in that case. This works since // prohibited parts are not allowed inside of OR queries. if (abstract_part.Logic == QueryPartLogic.Prohibited) hit_filter = null; if (abstract_part == null) return; // Run the backend hook first. // This gives a chance to modify create new queries based on // backend specific properties if (query_part_hook != null) abstract_part = query_part_hook (abstract_part); if (abstract_part == null) return; if (abstract_part is QueryPart_Text) { QueryPart_Text part = (QueryPart_Text) abstract_part; if (! (part.SearchFullText || part.SearchTextProperties)) return; LNS.BooleanQuery p_query = new LNS.BooleanQuery (); LNS.BooleanQuery s_query = new LNS.BooleanQuery (); bool added_subquery = false; if (part.SearchFullText) { LNS.Query subquery; subquery = StringToQuery ("Text", part.Text, term_list); if (subquery != null) { p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); added_subquery = true; } // FIXME: HotText is ignored for now! // subquery = StringToQuery ("HotText", part.Text); // if (subquery != null) { // p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); // added_subquery = true; // } } if (part.SearchTextProperties) { LNS.Query subquery; subquery = StringToQuery ("PropertyText", part.Text, term_list); if (subquery != null) { p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); // Properties can live in either index if (! only_build_primary_query) s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD); added_subquery = true; } // The "added_subquery" check is to handle the situation where // a part of the text is a stop word. Normally, a search for // "hello world" would break down into this query: // // (Text:hello OR PropertyText:hello OR PropertyKeyword:hello) // AND (Text:world OR PropertText:world OR PropertyKeyword:world) // // This fails with stop words, though. Let's assume that "world" // is a stop word. You would end up with: // // (Text:hello OR PropertyText:hello OR PropertyKeyword:hello) // AND (PropertyKeyword:world) // // Which is not what we want. We'd want to match documents that // had only "hello" without also having a keyword "world". In // this case, don't create the PropertyKeyword part of the query, // since it would be included in the larger set if it weren't // required anyway. if (added_subquery) { Term term; term = new Term ("PropertyKeyword", part.Text.ToLower ()); // make sure text is lowercased // FIXME: terms are already added in term_list. But they may have been tokenized // The term here is non-tokenized version. Should this be added to term_list ? // term_list is used to calculate scores if (term_list != null) term_list.Add (term); subquery = new LNS.TermQuery (term); p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); // Properties can live in either index if (! only_build_primary_query) s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD); } else { // Reset these so we return a null query p_query = null; s_query = null; } } primary_query = p_query; if (! only_build_primary_query) secondary_query = s_query; return; } if (abstract_part is QueryPart_Wildcard) { QueryPart_Wildcard part = (QueryPart_Wildcard) abstract_part; LNS.BooleanQuery p_query = new LNS.BooleanQuery (); LNS.BooleanQuery s_query = new LNS.BooleanQuery (); Term term; LNS.Query subquery; // Lower case the terms for searching string query_string_lower = part.QueryString.ToLower (); // Search text content if (! part.PropertyOnly) { term = new Term ("Text", query_string_lower); subquery = new LNS.WildcardQuery (term); p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); term_list.Add (term); } // Search text properties term = new Term ("PropertyText", query_string_lower); subquery = new LNS.WildcardQuery (term); p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); // Properties can live in either index if (! only_build_primary_query) s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD); term_list.Add (term); if (! part.PropertyOnly) { // Search property keywords term = new Term ("PropertyKeyword", query_string_lower); term_list.Add (term); subquery = new LNS.WildcardQuery (term); p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD); // Properties can live in either index if (! only_build_primary_query) s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD); } primary_query = p_query; if (! only_build_primary_query) secondary_query = s_query; return; } if (abstract_part is QueryPart_DateRange) { QueryPart_DateRange part = (QueryPart_DateRange) abstract_part; // FIXME: We don't handle prohibited queries with sub-date // accuracy. For example, if we say we prohibit matches // between 5 May 2007 at 2 PM and 8 May at 5 AM, we'll // miss any matches that happen between midnight and 2 PM // on 5 May 2007 and between midnight and 5 AM on 8 May. primary_query = GetDateRangeQuery (part, out hit_filter); // Date properties can live in either index if (! only_build_primary_query && primary_query != null) secondary_query = primary_query.Clone () as LNS.Query; return; } if (abstract_part is QueryPart_Or) { QueryPart_Or part = (QueryPart_Or) abstract_part; // Assemble a new BooleanQuery combining all of the sub-parts. LNS.BooleanQuery p_query; p_query = new LNS.BooleanQuery (); LNS.BooleanQuery s_query = null; if (! only_build_primary_query) s_query = new LNS.BooleanQuery (); primary_query = p_query; secondary_query = s_query; OrHitFilter or_hit_filter = null; foreach (QueryPart sub_part in part.SubParts) { LNS.Query p_subq, s_subq; HitFilter sub_hit_filter; // FIXME: This is (and must be) ignored // FIXME: Any subpart in an OR which has a hit filter won't work // correctly, because we can't tell which part of an OR we matched // against to filter correctly. This affects date range queries. QueryPartToQuery (sub_part, only_build_primary_query, term_list, query_part_hook, out p_subq, out s_subq, out sub_hit_filter); if (p_subq != null) p_query.Add (p_subq, LNS.BooleanClause.Occur.SHOULD); if (s_subq != null) s_query.Add (s_subq, LNS.BooleanClause.Occur.SHOULD); if (sub_hit_filter != null) { if (or_hit_filter == null) or_hit_filter = new OrHitFilter (); or_hit_filter.Add (sub_hit_filter); } } if (or_hit_filter != null) hit_filter = new HitFilter (or_hit_filter.HitFilter); return; } if (abstract_part is QueryPart_Uri) { QueryPart_Uri part = (QueryPart_Uri) abstract_part; // Do a term query on the Uri field. // This is probably less efficient that using a TermEnum; // but this is required for the query API where the uri query // can be part of a prohibited query or a boolean or query. Term term; term = new Term ("Uri", UriFu.UriToEscapedString (part.Uri)); if (term_list != null) term_list.Add (term); primary_query = new LNS.TermQuery (term); // Query only the primary index return; } if (abstract_part is QueryPart_Property) { QueryPart_Property part = (QueryPart_Property) abstract_part; string field_name; if (part.Key == QueryPart_Property.AllProperties) field_name = TypeToWildcardField (part.Type); else field_name = PropertyToFieldName (part.Type, part.Key); // Details of the conversion here depends on BeagrepAnalyzer::TokenStream if (part.Type == PropertyType.Text) primary_query = StringToQuery (field_name, part.Value, term_list); else { Term term; // FIXME: Handle date queries for other date fields if (part.Type == PropertyType.Internal || field_name.StartsWith ("prop:k:" + Property.PrivateNamespace)) term = new Term (field_name, part.Value); else term = new Term (field_name, part.Value.ToLower ()); if (term_list != null) term_list.Add (term); primary_query = new LNS.TermQuery (term); } // Properties can live in either index if (! only_build_primary_query && primary_query != null) secondary_query = primary_query.Clone () as LNS.Query; return; } throw new Exception ("Unhandled QueryPart type! " + abstract_part.ToString ()); }
//////////////////////////////////////////////////////////////// public int DoCountMatchQuery(Query query, QueryPartHook query_part_hook) { if (Debug) { Logger.Log.Debug("###### {0}: Starting low-level queries", IndexName); } Stopwatch total; total = new Stopwatch(); total.Start(); ArrayList primary_required_part_queries; ArrayList secondary_required_part_queries; LNS.BooleanQuery primary_prohibited_part_query; LNS.BooleanQuery secondary_prohibited_part_query; AndHitFilter all_hit_filters; ArrayList term_list; term_list = AssembleQuery(query, query_part_hook, null, out primary_required_part_queries, out secondary_required_part_queries, out primary_prohibited_part_query, out secondary_prohibited_part_query, out all_hit_filters); // If we have no required parts, give up. if (primary_required_part_queries == null) { return(0); } IndexReader primary_reader; LNS.IndexSearcher primary_searcher; IndexReader secondary_reader; LNS.IndexSearcher secondary_searcher; if (!BuildSearchers(out primary_reader, out primary_searcher, out secondary_reader, out secondary_searcher)) { return(0); } // Build whitelists and blacklists for search subsets. LuceneBitArray primary_whitelist, secondary_whitelist; CreateQueryWhitelists(null, primary_searcher, secondary_searcher, primary_prohibited_part_query, secondary_prohibited_part_query, out primary_whitelist, out secondary_whitelist); // Now run the low level queries against our indexes. BetterBitArray primary_matches = null; if (primary_required_part_queries != null) { if (secondary_searcher != null) { primary_matches = DoRequiredQueries_TwoIndex(primary_searcher, secondary_searcher, primary_required_part_queries, secondary_required_part_queries, primary_whitelist, secondary_whitelist); } else { primary_matches = DoRequiredQueries(primary_searcher, primary_required_part_queries, primary_whitelist); } } int result = 0; // FIXME: Pass the count through uri-filter and other validation checks if (primary_matches != null) { result = primary_matches.TrueCount; } CloseSearchers(primary_reader, primary_searcher, secondary_reader, secondary_searcher); total.Stop(); if (Debug) { Logger.Log.Debug("###### {0}: Total query run in {1}", IndexName, total); } return(result); }
//////////////////////////////////////////////////////////////// // Returns the lists of terms in the query private ArrayList AssembleQuery(Query query, QueryPartHook query_part_hook, HitFilter hit_filter, out ArrayList primary_required_part_queries, out ArrayList secondary_required_part_queries, out LNS.BooleanQuery primary_prohibited_part_query, out LNS.BooleanQuery secondary_prohibited_part_query, out AndHitFilter all_hit_filters) { primary_required_part_queries = null; secondary_required_part_queries = null; primary_prohibited_part_query = null; secondary_prohibited_part_query = null; all_hit_filters = new AndHitFilter(); if (hit_filter != null) { all_hit_filters.Add(hit_filter); } ArrayList term_list = new ArrayList(); foreach (QueryPart part in query.Parts) { LNS.Query primary_part_query; LNS.Query secondary_part_query; HitFilter part_hit_filter; QueryPartToQuery(part, false, // we want both primary and secondary queries part.Logic == QueryPartLogic.Required ? term_list : null, query_part_hook, out primary_part_query, out secondary_part_query, out part_hit_filter); if (primary_part_query == null) { continue; } switch (part.Logic) { case QueryPartLogic.Required: if (primary_required_part_queries == null) { primary_required_part_queries = new ArrayList(); secondary_required_part_queries = new ArrayList(); } primary_required_part_queries.Add(primary_part_query); secondary_required_part_queries.Add(secondary_part_query); if (part_hit_filter != null) { all_hit_filters.Add(part_hit_filter); } break; case QueryPartLogic.Prohibited: if (primary_prohibited_part_query == null) { primary_prohibited_part_query = new LNS.BooleanQuery(); } primary_prohibited_part_query.Add(primary_part_query, LNS.BooleanClause.Occur.SHOULD); if (secondary_part_query != null) { if (secondary_prohibited_part_query == null) { secondary_prohibited_part_query = new LNS.BooleanQuery(); } secondary_prohibited_part_query.Add(secondary_part_query, LNS.BooleanClause.Occur.SHOULD); } if (part_hit_filter != null) { NotHitFilter nhf; nhf = new NotHitFilter(part_hit_filter); all_hit_filters.Add(new HitFilter(nhf.HitFilter)); } break; } } return(term_list); }
//////////////////////////////////////////////////////////////// public void DoQuery (Query query, IQueryResult result, ICollection search_subset_uris, // should be internal uris QueryPartHook query_part_hook, HitFilter hit_filter) { if (Debug) Logger.Log.Debug ("###### {0}: Starting low-level queries", IndexName); Stopwatch total, a, b, c, d, e, f; total = new Stopwatch (); a = new Stopwatch (); b = new Stopwatch (); c = new Stopwatch (); d = new Stopwatch (); e = new Stopwatch (); f = new Stopwatch (); total.Start (); a.Start (); ArrayList primary_required_part_queries; ArrayList secondary_required_part_queries; LNS.BooleanQuery primary_prohibited_part_query; LNS.BooleanQuery secondary_prohibited_part_query; AndHitFilter all_hit_filters; ArrayList term_list; // Assemble all of the parts into a bunch of Lucene queries term_list = AssembleQuery (query, query_part_hook, hit_filter, out primary_required_part_queries, out secondary_required_part_queries, out primary_prohibited_part_query, out secondary_prohibited_part_query, out all_hit_filters); a.Stop (); if (Debug) Log.Debug ("###### {0}: Building queries took {1}", IndexName, a); // If we have no required parts, give up. if (primary_required_part_queries == null) return; b.Start (); // // Now that we have all of these nice queries, let's execute them! // IndexReader primary_reader; LNS.IndexSearcher primary_searcher; IndexReader secondary_reader; LNS.IndexSearcher secondary_searcher; // Create the searchers that we will need. if (! BuildSearchers (out primary_reader, out primary_searcher, out secondary_reader, out secondary_searcher)) return; b.Stop (); if (Debug) Log.Debug ("###### {0}: Readers/searchers built in {1}", IndexName, b); // Build whitelists and blacklists for search subsets. c.Start (); // Possibly create our whitelists from the search subset. LuceneBitArray primary_whitelist, secondary_whitelist; CreateQueryWhitelists (search_subset_uris, primary_searcher, secondary_searcher, primary_prohibited_part_query, secondary_prohibited_part_query, out primary_whitelist, out secondary_whitelist); c.Stop (); if (Debug) Log.Debug ("###### {0}: Whitelists and blacklists built in {1}", IndexName, c); // Now run the low level queries against our indexes. d.Start (); BetterBitArray primary_matches = null; if (primary_required_part_queries != null) { if (secondary_searcher != null) primary_matches = DoRequiredQueries_TwoIndex (primary_searcher, secondary_searcher, primary_required_part_queries, secondary_required_part_queries, primary_whitelist, secondary_whitelist); else primary_matches = DoRequiredQueries (primary_searcher, primary_required_part_queries, primary_whitelist); } d.Stop (); if (Debug) Logger.Log.Debug ("###### {0}: Low-level queries finished in {1}", IndexName, d); e.Start (); // Only generate results if we got some matches if (primary_matches != null && primary_matches.ContainsTrue ()) { GenerateQueryResults (primary_reader, secondary_reader, primary_matches, result, term_list, query.MaxHits, new HitFilter (all_hit_filters.HitFilter), IndexName); } e.Stop (); if (Debug) Log.Debug ("###### {0}: Query results generated in {1}", IndexName, e); // // Finally, we clean up after ourselves. // f.Start (); CloseSearchers (primary_reader, primary_searcher, secondary_reader, secondary_searcher); f.Stop (); if (Debug) Log.Debug ("###### {0}: Readers/searchers released in {1}", IndexName, f); total.Stop (); if (Debug) { Log.Debug ("###### {0}: Query time breakdown:", IndexName); Log.Debug ("###### {0}: Build queries {1,6} ({2:0.0}%)", IndexName, a, 100 * a.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: Got readers {1,6} ({2:0.0}%)", IndexName, b, 100 * b.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: Whitelists {1,6} ({2:0.0}%)", IndexName, c, 100 * c.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: Queries {1,6} ({2:0.0}%)", IndexName, d, 100 * d.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: Gen'd Results {1,6} ({2:0.0}%)", IndexName, e, 100 * e.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: Reader cleanup {1,6} ({2:0.0}%)", IndexName, f, 100 * f.ElapsedTime / total.ElapsedTime); Log.Debug ("###### {0}: TOTAL {1,6}", IndexName, total); Logger.Log.Debug ("###### {0}: Total query run in {1}", IndexName, total); } }
//////////////////////////////////////////////////////////////// public int DoCountMatchQuery (Query query, QueryPartHook query_part_hook) { if (Debug) Logger.Log.Debug ("###### {0}: Starting low-level queries", IndexName); Stopwatch total; total = new Stopwatch (); total.Start (); ArrayList primary_required_part_queries; ArrayList secondary_required_part_queries; LNS.BooleanQuery primary_prohibited_part_query; LNS.BooleanQuery secondary_prohibited_part_query; AndHitFilter all_hit_filters; ArrayList term_list; term_list = AssembleQuery ( query, query_part_hook, null, out primary_required_part_queries, out secondary_required_part_queries, out primary_prohibited_part_query, out secondary_prohibited_part_query, out all_hit_filters); // If we have no required parts, give up. if (primary_required_part_queries == null) return 0; IndexReader primary_reader; LNS.IndexSearcher primary_searcher; IndexReader secondary_reader; LNS.IndexSearcher secondary_searcher; if (! BuildSearchers (out primary_reader, out primary_searcher, out secondary_reader, out secondary_searcher)) return 0; // Build whitelists and blacklists for search subsets. LuceneBitArray primary_whitelist, secondary_whitelist; CreateQueryWhitelists (null, primary_searcher, secondary_searcher, primary_prohibited_part_query, secondary_prohibited_part_query, out primary_whitelist, out secondary_whitelist); // Now run the low level queries against our indexes. BetterBitArray primary_matches = null; if (primary_required_part_queries != null) { if (secondary_searcher != null) primary_matches = DoRequiredQueries_TwoIndex (primary_searcher, secondary_searcher, primary_required_part_queries, secondary_required_part_queries, primary_whitelist, secondary_whitelist); else primary_matches = DoRequiredQueries (primary_searcher, primary_required_part_queries, primary_whitelist); } int result = 0; // FIXME: Pass the count through uri-filter and other validation checks if (primary_matches != null) result = primary_matches.TrueCount; CloseSearchers (primary_reader, primary_searcher, secondary_reader, secondary_searcher); total.Stop (); if (Debug) Logger.Log.Debug ("###### {0}: Total query run in {1}", IndexName, total); return result; }
//////////////////////////////////////////////////////////////// // Returns the lists of terms in the query private ArrayList AssembleQuery (Query query, QueryPartHook query_part_hook, HitFilter hit_filter, out ArrayList primary_required_part_queries, out ArrayList secondary_required_part_queries, out LNS.BooleanQuery primary_prohibited_part_query, out LNS.BooleanQuery secondary_prohibited_part_query, out AndHitFilter all_hit_filters) { primary_required_part_queries = null; secondary_required_part_queries = null; primary_prohibited_part_query = null; secondary_prohibited_part_query = null; all_hit_filters = new AndHitFilter (); if (hit_filter != null) all_hit_filters.Add (hit_filter); ArrayList term_list = new ArrayList (); foreach (QueryPart part in query.Parts) { LNS.Query primary_part_query; LNS.Query secondary_part_query; HitFilter part_hit_filter; QueryPartToQuery (part, false, // we want both primary and secondary queries part.Logic == QueryPartLogic.Required ? term_list : null, query_part_hook, out primary_part_query, out secondary_part_query, out part_hit_filter); if (primary_part_query == null) continue; switch (part.Logic) { case QueryPartLogic.Required: if (primary_required_part_queries == null) { primary_required_part_queries = new ArrayList (); secondary_required_part_queries = new ArrayList (); } primary_required_part_queries.Add (primary_part_query); secondary_required_part_queries.Add (secondary_part_query); if (part_hit_filter != null) all_hit_filters.Add (part_hit_filter); break; case QueryPartLogic.Prohibited: if (primary_prohibited_part_query == null) primary_prohibited_part_query = new LNS.BooleanQuery (); primary_prohibited_part_query.Add (primary_part_query, LNS.BooleanClause.Occur.SHOULD); if (secondary_part_query != null) { if (secondary_prohibited_part_query == null) secondary_prohibited_part_query = new LNS.BooleanQuery (); secondary_prohibited_part_query.Add (secondary_part_query, LNS.BooleanClause.Occur.SHOULD); } if (part_hit_filter != null) { NotHitFilter nhf; nhf = new NotHitFilter (part_hit_filter); all_hit_filters.Add (new HitFilter (nhf.HitFilter)); } break; } } return term_list; }