/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, (System.Int32)results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) { return; } for (int i = -1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher)searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary> /// Tests that a query matches the an expected set of documents using a /// HitCollector. /// <para> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </para> /// </summary> /// <param name="luceneTestCase"> The current test instance. </param> /// <param name="query"> The query to test. </param> /// <param name="searcher"> The searcher to test the query against. </param> /// <param name="defaultFieldName"> Used for displaying the query in assertion messages. </param> /// <param name="results"> A list of documentIds that must match the query. </param> /// <seealso cref="DoCheckHits(LuceneTestCase, Random, Query, string, IndexSearcher, int[])"/> // LUCENENET specific // Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/> public static void CheckHitCollector(LuceneTestCase luceneTestCase, Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) #endif { QueryUtils.Check( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION luceneTestCase, #endif random, query, searcher); Trace.TraceInformation("Checked"); SortedSet <int?> correct = new SortedSet <int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i], CultureInfo.InvariantCulture)); } SortedSet <int?> actual = new SortedSet <int?>(); ICollector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); IndexSearcher s = QueryUtils.WrapUnderlyingReader( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION luceneTestCase, #endif random, searcher, i); s.Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary> /// Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </p> </summary> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaying the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <param name="similarity"> /// LUCENENET specific /// Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/> /// </param> /// <seealso cref=#checkHits </seealso> public static void CheckHitCollector(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results, Similarity similarity) { QueryUtils.Check(random, query, searcher, similarity); Trace.TraceInformation("Checked"); SortedSet <int?> correct = new SortedSet <int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i], CultureInfo.InvariantCulture)); } SortedSet <int?> actual = new SortedSet <int?>(); ICollector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); IndexSearcher s = QueryUtils.WrapUnderlyingReader(random, searcher, i, similarity); s.Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs; System.Collections.ArrayList correct = new System.Collections.ArrayList(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, results[i]); } correct.Sort(); System.Collections.ArrayList actual = new System.Collections.ArrayList(); for (int i = 0; i < hits.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(actual, hits[i].doc); } actual.Sort(); Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }
public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase) { Hits hits = searcher.Search(query); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { correct.Add((System.Int32) results[i], null); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); for (int i = 0; i < hits.Length(); i++) { actual.Add((System.Int32) hits.Id(i), null); } //Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); if (correct.Count != 0) { System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator(); bool status = false; while (iter.MoveNext()) { status = actual.ContainsKey(iter.Key); if (status == false) break; } Assert.IsTrue(status, query.ToString(defaultFieldName)); } }
public static void CheckHits_(Query query, System.String defaultFieldName, Searcher searcher, int[] results, TestCase testCase) { Hits hits = searcher.Search(query); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { correct.Add((System.Int32)results[i], null); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); for (int i = 0; i < hits.Length(); i++) { actual.Add((System.Int32)hits.Id(i), null); } //Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); if (correct.Count != 0) { System.Collections.IDictionaryEnumerator iter = correct.GetEnumerator(); bool status = false; while (iter.MoveNext()) { status = actual.ContainsKey(iter.Key); if (status == false) { break; } } Assert.IsTrue(status, query.ToString(defaultFieldName)); } }
public TopDocs SearchForQuery(string querytext, out Lucene.Net.Search.Query query, bool toProcess, bool toExpand) // Searches index with query text { Stopwatch stopwatch2 = Stopwatch.StartNew(); if (!toProcess) { querytext = "\"" + querytext + "\""; } else { if (toExpand) { var standardParser = new QueryParser(VERSION, URL_FN, new StandardAnalyzer(VERSION)); querytext = standardParser.Parse(querytext).ToString().Replace("Url:", "").Replace("Query:", ""); querytext = GetSynonyms(querytext); } querytext = querytext.ToLower(); } if (querytext != string.Empty) { query = parser.Parse(querytext); currentQuery = query; stopwatch2.Stop(); queryTime = stopwatch2.Elapsed.TotalSeconds.ToString(); finalQuery = query.ToString(); TopDocs results = searcher.Search(query, 100); return(results); } else { query = null; return(null); } }
/// <summary> /// Tests that a query matches the an expected set of documents using Hits. /// /// <para>Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// </para> /// </summary> /// <param name="luceneTestCase"> The current test instance. </param> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaing the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <seealso cref="CheckHitCollector(LuceneTestCase, Random, Query, string, IndexSearcher, int[])"/> // LUCENENET specific // Removes dependency on <see cref="LuceneTestCase.ClassEnv.Similarity"/> public static void DoCheckHits(LuceneTestCase luceneTestCase, Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) #endif { ScoreDoc[] hits = searcher.Search(query, 1000).ScoreDocs; SortedSet <int?> correct = new SortedSet <int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i], CultureInfo.InvariantCulture)); } SortedSet <int?> actual = new SortedSet <int?>(); for (int i = 0; i < hits.Length; i++) { actual.Add(Convert.ToInt32(hits[i].Doc, CultureInfo.InvariantCulture)); } Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION luceneTestCase, #endif random, query, searcher, LuceneTestCase.Rarely(random)); }
public ExplanationAsserter(Query q, string defaultFieldName, IndexSearcher s, bool deep) { this.q = q; this.s = s; this.d = q.ToString(defaultFieldName); this.deep = deep; }
/// <summary> /// classes must be immutable /// </summary> public FullTextQueryImpl(Lucene.Net.Search.Query query, System.Type[] classes, ISession session, ParameterMetadata parameterMetadata) : base(query.ToString(), FlushMode.Unspecified, session.GetSessionImplementation(), parameterMetadata) { luceneQuery = query; resultSize = -1; this.classes = classes; this.filterDefinitions = new Dictionary <string, FullTextFilterImpl>(); }
//====================================================================================== INodeQueryCompiler Members public string Compile(NodeQuery query, out NodeQueryParameter[] parameters) { _nodeQuery = query; CompiledQuery = TreeWalker(query); parameters = new NodeQueryParameter[0]; return CompiledQuery.ToString(); }
/// <summary>Prints a user-readable version of this query. </summary> public override System.String ToString(System.String s) { System.Text.StringBuilder buffer = new System.Text.StringBuilder(); buffer.Append("filtered("); buffer.Append(query.ToString(s)); buffer.Append(")->"); buffer.Append(filter); buffer.Append(ToStringUtils.Boost(GetBoost())); return(buffer.ToString()); }
private void CountHits(Analyzer analyzer, string[] docs, Query q, int expected) { Directory d = GetDirectory(analyzer, docs); IndexReader r = DirectoryReader.Open(d); IndexSearcher s = new IndexSearcher(r); TotalHitCountCollector c = new TotalHitCountCollector(); s.Search(q, c); Assert.AreEqual(expected, c.TotalHits, q.ToString()); r.Dispose(); d.Dispose(); }
public static TopDocs Search(this Searcher searcher, string type, Query query, Query filter, int numResults, Sort sort) { try { return(searcher.Search(query, JsonMappingUtils.GetTypeFilter(type, filter), numResults, sort)); } catch (Exception ex) { Log.Logger.ErrorFormat("Error while searching {0}, {1}", type, query.ToString()); throw ex; } }
/// <summary>Prettyprint us.</summary> /// <param name="field">the field to which we are applied /// </param> /// <returns> a string that shows what we do, of the form "(disjunct1 | disjunct2 | ... | disjunctn)^boost" /// </returns> public override System.String ToString(System.String field) { System.Text.StringBuilder buffer = new System.Text.StringBuilder(); buffer.Append("("); int numDisjunctions = disjuncts.Count; for (int i = 0; i < numDisjunctions; i++) { Query subquery = (Query)disjuncts[i]; if (subquery is BooleanQuery) { // wrap sub-bools in parens buffer.Append("("); buffer.Append(subquery.ToString(field)); buffer.Append(")"); } else { buffer.Append(subquery.ToString(field)); } if (i != numDisjunctions - 1) { buffer.Append(" | "); } } buffer.Append(")"); if (tieBreakerMultiplier != 0.0f) { buffer.Append("~"); buffer.Append(tieBreakerMultiplier); } if (GetBoost() != 1.0) { buffer.Append("^"); buffer.Append(GetBoost()); } return(buffer.ToString()); }
private int _maxResults = 20; //Environment.MaxResults; #region Constructors /// <summary> /// classes must be immutable /// </summary> public FullTextQueryImpl(Lucene.Net.Search.Query query, System.Type[] classes, ISession session, ParameterMetadata parameterMetadata, INHibernateLogger log = null) : base(query?.ToString() ?? throw new ArgumentNullException(nameof(query)), FlushMode.Unspecified, session?.GetSessionImplementation() ?? throw new ArgumentNullException(nameof(session)), parameterMetadata) // ?? throw new ArgumentNullException(nameof(parameterMetadata))) { this.log = log ?? NHibernateLogger.For(typeof(FullTextQueryImpl)); luceneQuery = query; resultSize = -1; this.classes = classes; this.filterDefinitions = new Dictionary <string, FullTextFilterImpl>(); }
public TopDocs SearchForQuery(string querytext, out Lucene.Net.Search.Query query, bool toProcess) // Searches index with query text { Stopwatch stopwatch2 = Stopwatch.StartNew(); querytext = querytext.ToLower(); if (!toProcess) { querytext = "\"" + querytext + "\""; } query = parser.Parse(querytext); stopwatch2.Stop(); queryTime = stopwatch2.Elapsed.TotalSeconds.ToString(); finalQuery = query.ToString(); TopDocs results = searcher.Search(query, 100); return(results); }
// LUCENENET specific - de-nested SetCollector /// <summary> /// Tests that a query matches the an expected set of documents using Hits. /// /// <para>Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// </para> /// </summary> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaing the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <seealso cref="CheckHitCollector(Random, Query, string, IndexSearcher, int[])"/> public static void DoCheckHits(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) { ScoreDoc[] hits = searcher.Search(query, 1000).ScoreDocs; JCG.SortedSet <int> correct = new JCG.SortedSet <int>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i], CultureInfo.InvariantCulture)); } JCG.SortedSet <int> actual = new JCG.SortedSet <int>(); for (int i = 0; i < hits.Length; i++) { actual.Add(Convert.ToInt32(hits[i].Doc, CultureInfo.InvariantCulture)); } Assert.AreEqual(correct, actual, aggressive: false, () => query.ToString(defaultFieldName)); QueryUtils.Check(random, query, searcher, LuceneTestCase.Rarely(random)); }
public static void CheckEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2) { const float scoreTolerance = 1.0e-6f; if (hits1.Length != hits2.Length) { Assert.Fail("Unequal lengths: hits1=" + hits1.Length + ",hits2=" + hits2.Length); } for (int i = 0; i < hits1.Length; i++) { if (hits1[i].Doc != hits2[i].Doc) { Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString()); } if ((hits1[i].Doc != hits2[i].Doc) || Math.Abs(hits1[i].Score - hits2[i].Score) > scoreTolerance) { Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].Doc + " and " + hits2[i].Doc + "\nunequal : " + hits1[i].Score + "\n and: " + hits2[i].Score + "\nfor query:" + query.ToString()); } } }
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32) doc)) continue; Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
/// <summary> /// Tests that a query matches the an expected set of documents using Hits. /// /// <p> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// </p> </summary> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaing the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <seealso cref= #checkHitCollector </seealso> public static void DoCheckHits(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) { ScoreDoc[] hits = searcher.Search(query, 1000).ScoreDocs; SortedSet <int?> correct = new SortedSet <int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i])); } SortedSet <int?> actual = new SortedSet <int?>(); for (int i = 0; i < hits.Length; i++) { actual.Add(Convert.ToInt32(hits[i].Doc)); } Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(random, query, searcher, LuceneTestCase.Rarely(random)); }
/// <summary> /// Tests that a query matches the an expected set of documents using a /// HitCollector. /// <para> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </para> /// </summary> /// <param name="query"> The query to test. </param> /// <param name="searcher"> The searcher to test the query against. </param> /// <param name="defaultFieldName"> Used for displaying the query in assertion messages. </param> /// <param name="results"> A list of documentIds that must match the query. </param> /// <seealso cref="DoCheckHits(Random, Query, string, IndexSearcher, int[])"/> public static void CheckHitCollector(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) { QueryUtils.Check(random, query, searcher); Trace.TraceInformation("Checked"); JCG.SortedSet <int> correct = new JCG.SortedSet <int>(results); JCG.SortedSet <int> actual = new JCG.SortedSet <int>(); ICollector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, aggressive: false, () => "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); IndexSearcher s = QueryUtils.WrapUnderlyingReader(random, searcher, i); s.Search(query, c); Assert.AreEqual(correct, actual, aggressive: false, () => "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
/// <summary> /// Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates that /// the document does not match /// </summary> public static void CheckNoMatchExplanations(Query q, string defaultFieldName, IndexSearcher searcher, int[] results) { string d = q.ToString(defaultFieldName); SortedSet<int?> ignore = new SortedSet<int?>(); for (int i = 0; i < results.Length; i++) { ignore.Add(Convert.ToInt32(results[i])); } int maxDoc = searcher.IndexReader.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains(Convert.ToInt32(doc))) { continue; } Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.IsFalse(exp.IsMatch, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32)results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32)doc)) { continue; } Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
/// <summary> /// Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates that /// the document does not match /// </summary> public static void CheckNoMatchExplanations(Query q, string defaultFieldName, IndexSearcher searcher, int[] results) { string d = q.ToString(defaultFieldName); SortedSet <int?> ignore = new SortedSet <int?>(); for (int i = 0; i < results.Length; i++) { ignore.Add(Convert.ToInt32(results[i], CultureInfo.InvariantCulture)); } int maxDoc = searcher.IndexReader.MaxDoc; for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains(Convert.ToInt32(doc, CultureInfo.InvariantCulture))) { continue; } Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.IsFalse(exp.IsMatch, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } }
/// <summary> /// Tests that a query matches the an expected set of documents using Hits. /// /// <p> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// </p> </summary> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaing the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <seealso cref= #checkHitCollector </seealso> public static void DoCheckHits(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) { ScoreDoc[] hits = searcher.Search(query, 1000).ScoreDocs; SortedSet<int?> correct = new SortedSet<int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i])); } SortedSet<int?> actual = new SortedSet<int?>(); for (int i = 0; i < hits.Length; i++) { actual.Add(Convert.ToInt32(hits[i].Doc)); } Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(random, query, searcher, LuceneTestCase.Rarely(random)); }
public static Hits FuzzySearchEx(out Query query) { Hits hits = null; query = null; try { List<IndexReader> readerList = new List<IndexReader>(); if (searchIndexList.Count > 0) { foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; readerList.Add(IndexReader.Open(indexSet.Path)); } } else { foreach (IndexSet indexSet in indexFieldsDict.Keys) { if (indexSet.Type == IndexTypeEnum.Increment) continue; readerList.Add(IndexReader.Open(indexSet.Path)); } } MultiReader multiReader = new MultiReader(readerList.ToArray()); IndexSearcher searcher = new IndexSearcher(multiReader); query = GetQuery(); #if DEBUG System.Console.WriteLine(query.ToString()); #endif SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, query.ToString()); hits = searcher.Search(query); } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return hits; }
public static List<SearchRecord> HighLightSearch(out Query query, out Dictionary<string,List<int>> statistics) { List<SearchRecord> recordList = new List<SearchRecord>(); query = GetQuery(); statistics = new Dictionary<string,List<int>>(); try { if (searchIndexList.Count > 0) { foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query theQuery = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif Highlighter highlighter = new Highlighter(new QueryScorer(theQuery)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList = new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Document doc = searcher.Doc(scoreDocs[i].doc); Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { string key = field.Name(); string value = field.StringValue(); string output = SupportClass.String.DropHTML(value); TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output)); string result = ""; result = highlighter.GetBestFragment(tokenStream, output); if (result != null && string.IsNullOrEmpty(result.Trim()) == false) { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } else { foreach (IndexSet indexSet in indexFieldsDict.Keys) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query theQuery = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif Highlighter highlighter = new Highlighter(new QueryScorer(theQuery)); highlighter.SetTextFragmenter(new SimpleFragmenter(SupportClass.FRAGMENT_SIZE)); TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList=new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Document doc = searcher.Doc(scoreDocs[i].doc); Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { string key = field.Name(); string value = field.StringValue(); string output = SupportClass.String.DropHTML(value); TokenStream tokenStream = analyzer.TokenStream(key, new System.IO.StringReader(output)); string result = ""; result = highlighter.GetBestFragment(tokenStream, output); if (result != null && string.IsNullOrEmpty(result.Trim()) == false) { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, result, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } else { if (fpDict.ContainsKey(key)) sfList.Add(new SearchField(key, fpDict[key].Caption, value, value, field.GetBoost(), fpDict[key].IsTitle, true, fpDict[key].Order)); else sfList.Add(new SearchField(key, key, value, result, field.GetBoost(), false, false, 0)); } } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return recordList; }
public override System.String ToString() { return(occur.ToString() + query.ToString()); }
private void AssertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2) { // The constrained query // should be a subset to the unconstrained query. if (top2.TotalHits > top1.TotalHits) { Assert.Fail("Constrained results not a subset:\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString()); } for (int hit = 0; hit < top2.TotalHits; hit++) { int id = top2.ScoreDocs[hit].Doc; float score = top2.ScoreDocs[hit].Score; bool found = false; // find this doc in other hits for (int other = 0; other < top1.TotalHits; other++) { if (top1.ScoreDocs[other].Doc == id) { found = true; float otherScore = top1.ScoreDocs[other].Score; // check if scores match Assert.AreEqual(score, otherScore, CheckHits.ExplainToleranceDelta(score, otherScore), "Doc " + id + " scores don't match\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString()); } } // check if subset if (!found) { Assert.Fail("Doc " + id + " not found\n" + CheckHits.TopdocsString(top1, 0, 0) + CheckHits.TopdocsString(top2, 0, 0) + "for query:" + q.ToString()); } } }
/* * Return 'key' string. 'key' is the field name of the Query. * If not fieldMatch, 'key' will be null. */ private String GetKey(Query query) { if (!fieldMatch) return null; if (query is TermQuery) return ((TermQuery)query).GetTerm().Field(); else if (query is PhraseQuery) { PhraseQuery pq = (PhraseQuery)query; Term[] terms = pq.GetTerms(); return terms[0].Field(); } else throw new System.ApplicationException("query \"" + query.ToString() + "\" must be flatten first."); }
public override string ToString(string field) { return(@in.ToString(field)); }
private static string MakeResults(IndexSearcher searcher, TopDocs topDocs, int skip, int take, bool includeExplanation, Query query, long elapsed, IDictionary<string, int> rankings, PackageSearcherManager manager) { // note the use of a StringBuilder because we have the response data already formatted as JSON in the fields in the index StringBuilder strBldr = new StringBuilder(); string timestamp; if (!searcher.IndexReader.CommitUserData.TryGetValue("commit-time-stamp", out timestamp)) { timestamp = null; } strBldr.AppendFormat("{{\"totalHits\":{0},\"timeTakenInMs\":{1},\"index\":\"{2}\"", topDocs.TotalHits, elapsed, manager.IndexName); if (!String.IsNullOrEmpty(timestamp)) { strBldr.AppendFormat(",\"indexTimestamp\":\"{0}\"", timestamp); } if (includeExplanation) { // JsonConvert.Serialize does escaping and quoting. strBldr.AppendFormat(",\"executedQuery\":{0}", Newtonsoft.Json.JsonConvert.SerializeObject(query.ToString())); } strBldr.Append(",\"data\":["); bool hasResult = false; for (int i = skip; i < topDocs.ScoreDocs.Length; i++) { ScoreDoc scoreDoc = topDocs.ScoreDocs[i]; Document doc = searcher.Doc(scoreDoc.Doc); string data = doc.Get("Data"); string id = doc.Get("Id"); NuGet.Versioning.NuGetVersion ngVersion = new Versioning.NuGetVersion(doc.Get("Version")); if (!String.IsNullOrEmpty(id) && ngVersion != null) { Tuple<int,int> countRecord = manager.GetDownloadCount(id,ngVersion.ToNormalizedString()); if (countRecord != null) { // Patch the data in to the JSON JObject parsed = JObject.Parse(data); parsed["DownloadCount"] = countRecord.Item1; parsed["PackageRegistration"]["DownloadCount"] = countRecord.Item2; data = parsed.ToString(Formatting.None); } } if (includeExplanation) { data = AddExplanation(searcher, data, query, scoreDoc, rankings); } strBldr.Append(data); strBldr.Append(","); hasResult = true; } if (hasResult) { strBldr.Remove(strBldr.Length - 1, 1); } strBldr.Append("]}"); string result = strBldr.ToString(); return result; }
/// <summary> /// Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </p> </summary> /// <param name="query"> the query to test </param> /// <param name="searcher"> the searcher to test the query against </param> /// <param name="defaultFieldName"> used for displaying the query in assertion messages </param> /// <param name="results"> a list of documentIds that must match the query </param> /// <seealso cref= #checkHits </seealso> public static void CheckHitCollector(Random random, Query query, string defaultFieldName, IndexSearcher searcher, int[] results) { QueryUtils.Check(random, query, searcher); Trace.TraceInformation("Checked"); SortedSet<int?> correct = new SortedSet<int?>(); for (int i = 0; i < results.Length; i++) { correct.Add(Convert.ToInt32(results[i])); } SortedSet<int?> actual = new SortedSet<int?>(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = -1; i < 2; i++) { actual.Clear(); IndexSearcher s = QueryUtils.WrapUnderlyingReader(random, searcher, i); s.Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
public static List<SearchRecord> ExactFastSearch(out Query query) { List<SearchRecord> docList = new List<SearchRecord>(); query = null; try { List<IndexReader> readerList = new List<IndexReader>(); foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; readerList.Add(IndexReader.Open(indexSet.Path)); } MultiReader multiReader = new MultiReader(readerList.ToArray()); IndexSearcher searcher = new IndexSearcher(multiReader); query = GetQuery(); #if DEBUG System.Console.WriteLine(query.ToString()); #endif TopDocs topDocs = searcher.Search(query.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; for (int i = 0; i < scoreDocs.Length; i++) { Document doc = searcher.Doc(scoreDocs[i].doc); float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; docList.Add(doc); } } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return docList; }
private void AssertSubsetOfSameScores(Query q, TopDocs top1, TopDocs top2) { // The constrained query // should be a subset to the unconstrained query. if (top2.TotalHits > top1.TotalHits) { Assert.Fail("Constrained results not a subset:\n" + CheckHits.TopDocsString(top1, 0, 0) + CheckHits.TopDocsString(top2, 0, 0) + "for query:" + q.ToString()); } for (int hit = 0; hit < top2.TotalHits; hit++) { int id = top2.ScoreDocs[hit].Doc; float score = top2.ScoreDocs[hit].Score; bool found = false; // find this doc in other hits for (int other = 0; other < top1.TotalHits; other++) { if (top1.ScoreDocs[other].Doc == id) { found = true; float otherScore = top1.ScoreDocs[other].Score; // check if scores match Assert.AreEqual(score, otherScore, CheckHits.ExplainToleranceDelta(score, otherScore), "Doc " + id + " scores don't match\n" + CheckHits.TopDocsString(top1, 0, 0) + CheckHits.TopDocsString(top2, 0, 0) + "for query:" + q.ToString()); } } // check if subset if (!found) { Assert.Fail("Doc " + id + " not found\n" + CheckHits.TopDocsString(top1, 0, 0) + CheckHits.TopDocsString(top2, 0, 0) + "for query:" + q.ToString()); } } }
public static void CheckUnequal(Query q1, Query q2) { Assert.IsTrue(q1.ToString() != q2.ToString()); Assert.IsTrue(q2.ToString() != q1.ToString()); // possible this test can fail on a hash collision... if that // happens, please change test to use a different example. Assert.IsTrue(q1.GetHashCode() != q2.GetHashCode()); }
public static void CheckEqual(Query query, Hits hits1, Hits hits2) { float scoreTolerance = 1.0e-6f; if (hits1.Length() != hits2.Length()) { Assert.Fail("Unequal lengths: hits1=" + hits1.Length() + ",hits2=" + hits2.Length()); } for (int i = 0; i < hits1.Length(); i++) { if (hits1.Id(i) != hits2.Id(i)) { Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString()); } if ((hits1.Id(i) != hits2.Id(i)) || System.Math.Abs(hits1.Score(i) - hits2.Score(i)) > scoreTolerance) { Assert.Fail("Hit " + i + ", doc nrs " + hits1.Id(i) + " and " + hits2.Id(i) + "\nunequal : " + hits1.Score(i) + "\n and: " + hits2.Score(i) + "\nfor query:" + query.ToString()); } } }
public override String ToString(String field) { return(match.ToString(field) + "/" + context.ToString(field)); }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// </p> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { System.Collections.ArrayList correct = new System.Collections.ArrayList(results.Length); for (int i = 0; i < results.Length; i++) { correct.Add(results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); searcher.Search(query, new AnonymousClassHitCollector(actual)); System.Collections.IDictionaryEnumerator e = actual.GetEnumerator(); while (e.MoveNext()) { Assert.Contains(e.Key, correct, query.ToString(defaultFieldName)); } QueryUtils.Check(query, searcher); }
/// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, (System.Int32) results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) return ; for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher) searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } }
private string QueryToString(Query query) { try { var visitor = new ToStringVisitor(); visitor.Visit(query); return visitor.ToString(); } catch (Exception e) { Logger.WriteException(e); var c = query.ToString().ToCharArray(); for (int i = 0; i < c.Length; i++) if (c[i] < ' ') c[i] = '.'; return new String(c); } }
/// <summary>Prints a user-readable version of this query. </summary> public override System.String ToString(System.String field) { System.Text.StringBuilder buffer = new System.Text.StringBuilder(); bool needParens = (GetBoost() != 1.0) || (GetMinimumNumberShouldMatch() > 0); if (needParens) { buffer.Append("("); } for (int i = 0; i < clauses.Count; i++) { BooleanClause c = (BooleanClause)clauses[i]; if (c.IsProhibited()) { buffer.Append("-"); } else if (c.IsRequired()) { buffer.Append("+"); } Query subQuery = c.GetQuery(); if (subQuery != null) { if (subQuery is BooleanQuery) { // wrap sub-bools in parens buffer.Append("("); buffer.Append(subQuery.ToString(field)); buffer.Append(")"); } else { buffer.Append(subQuery.ToString(field)); } } else { buffer.Append("null"); } if (i != clauses.Count - 1) { buffer.Append(" "); } } if (needParens) { buffer.Append(")"); } if (GetMinimumNumberShouldMatch() > 0) { buffer.Append('~'); buffer.Append(GetMinimumNumberShouldMatch()); } if (GetBoost() != 1.0f) { buffer.Append(ToStringUtils.Boost(GetBoost())); } return(buffer.ToString()); }
public virtual List<SkinnyItem> RunQuery(Query query, bool showAllVersions, Sort sorter, int start, int end, out int totalResults) { Assert.ArgumentNotNull(Index, "Index"); var items = new List<SkinnyItem>(); if (query == null || string.IsNullOrEmpty(query.ToString())) { Log.Debug("SitecoreSearchContrib: Attempt to execute an empty query."); totalResults = 0; return items; } try { using (var context = new IndexSearchContext(Index)) { Log.Debug(string.Format("SitecoreSearchContrib: Executing query: {0}", query)); SearchHits searchhits; if (sorter != null) { var hits = context.Searcher.Search(query, sorter); searchhits = new SearchHits(hits); } else { searchhits = this.UsePreparedQuery ? context.Search(new PreparedQuery(query)) : context.Search(query); } if (searchhits == null) { totalResults = 0; return null; } totalResults = searchhits.Length; if (end == 0 || end > searchhits.Length) { end = totalResults; } Log.Debug(string.Format("SitecoreSearchContrib: Total hits: {0}", totalResults)); var resultCollection = searchhits.FetchResults(start, end - start); SearchHelper.GetItemsFromSearchResult(resultCollection, items, showAllVersions); Log.Debug(string.Format("SitecoreSearchContrib: Total results: {0}", resultCollection.Count)); } } catch (Exception exception) { Log.Error("scSearchContrib.Searcher. There was a problem while running a search query. Details: " + exception.Message, this); Log.Error(exception.StackTrace, this); throw; } return items; }
/// <summary> /// Perform search /// </summary> /// <param name="query"></param> /// <param name="startIndex"></param> /// <param name="blockSize"></param> /// <param name="indexDirEs"></param> /// <param name="indexDirEn"></param> /// <param name="sortBy"></param> /// <returns></returns> public List<IssueDocument> MedesSearch(Query query, int startIndex, int blockSize, Directory indexDirEs, Directory indexDirEn, Directory indexDirHe, string sortBy) { #if DEBUG T.TraceMessage(string.Format("Begin search , query: '{0}'", query.ToString())); #endif List<IssueDocument> result = new List<IssueDocument>(); try { // build a multi searcher across the 2 indexes MultiSearcher mSearcher = CombineSearchers(indexDirEs, indexDirEn, indexDirHe); TopDocs tDocs = null; int iterateLast = startIndex + blockSize; string customScoreField = "article_id"; FieldScoreQuery dateBooster = new FieldScoreQuery(customScoreField, FieldScoreQuery.Type.FLOAT); CustomScoreQuery customQuery = new CustomScoreQuery(query, dateBooster); tDocs = mSearcher.Search(customQuery, 1000); //ScoreDoc[] hits = tpDcs.scoreDocs; if (startIndex + blockSize > tDocs.TotalHits) iterateLast = tDocs.TotalHits; for (int i = startIndex; i < iterateLast; i++) { // Document hitDoc = mSearcher.Doc(hits[i].doc); Document hitDoc = mSearcher.Doc(i); result.Add(new IssueDocument() { Id = Int32.Parse(hitDoc.Get("issue_id").ToString())}); } // close the searcher and indexes mSearcher.Dispose(); indexDirEs.Dispose(); indexDirEn.Dispose(); indexDirHe.Dispose(); } catch (Exception ex) { T.TraceError("Error MedesSearch, query '{0}'", query.ToString()); T.TraceError(ex); throw ex; } return result; }
/// <summary> /// Prints a user-readable version of this query. </summary> public override string ToString(string field) { StringBuilder buffer = new StringBuilder(); bool needParens = Boost != 1.0 || MinimumNumberShouldMatch > 0; if (needParens) { buffer.Append("("); } for (int i = 0; i < clauses.Count; i++) { BooleanClause c = clauses[i]; if (c.Prohibited) { buffer.Append("-"); } else if (c.Required) { buffer.Append("+"); } Query subQuery = c.Query; if (subQuery != null) { if (subQuery is BooleanQuery) // wrap sub-bools in parens { buffer.Append("("); buffer.Append(subQuery.ToString(field)); buffer.Append(")"); } else { buffer.Append(subQuery.ToString(field)); } } else { buffer.Append("null"); } if (i != clauses.Count - 1) { buffer.Append(" "); } } if (needParens) { buffer.Append(")"); } if (MinimumNumberShouldMatch > 0) { buffer.Append('~'); buffer.Append(MinimumNumberShouldMatch); } if (Boost != 1.0f) { buffer.Append(ToStringUtils.Boost(Boost)); } return(buffer.ToString()); }
private static void GetValue(Query query, List<SitecoreItem> items) { if (Index is Index) { if ((Index as Index).Name == "itembuckets_templates") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrTemplateItem>>(); SolrQueryResults<SolrTemplateItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } if ((Index as Index).Name == "itembuckets_buckets") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrBucketItem>>(); SolrQueryResults<SolrBucketItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } if ((Index as Index).Name == "itembuckets_sitecore") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrSitecoreItem>>(); SolrQueryResults<SolrSitecoreItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } if ((Index as Index).Name == "itembuckets_layoutsfolder") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrLayoutItem>>(); SolrQueryResults<SolrLayoutItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } if ((Index as Index).Name == "itembuckets_systemfolder") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrSystemItem>>(); SolrQueryResults<SolrSystemItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } if ((Index as Index).Name == "itembuckets_medialibrary") { var solr = ServiceLocator.Current.GetInstance<ISolrOperations<SolrMediaItem>>(); SolrQueryResults<SolrMediaItem> remoteSearch = solr.Query(new SolrQuery(query.ToString())); SearchHelper.GetItemsFromSearchResultFromSOLR(remoteSearch, items); } } }
public void Add(Query query) { if (query is TermQuery) { AddTerm(((TermQuery)query).Term.Text, query.Boost); } else if (query is PrefixQuery) { AddTerm(((PrefixQuery)query).Prefix.Text + "*", query.Boost); } else if (query is PhraseQuery) { PhraseQuery pq = (PhraseQuery)query; Term[] terms = pq.GetTerms(); HashMap<String, QueryPhraseMap> map = subMap; QueryPhraseMap qpm = null; foreach (Term term in terms) { qpm = GetOrNewMap(map, term.Text); map = qpm.subMap; } qpm.MarkTerminal(pq.Slop, pq.Boost); } else throw new ApplicationException("query \"" + query.ToString() + "\" must be flatten first."); }
public static List<SearchRecord> SearchEx(out Query query,out Dictionary<string,List<int>> statistics) { List<SearchRecord> recordList = new List<SearchRecord>(); query = GetQuery(); statistics = new Dictionary<string,List<int>>(); try { if (searchIndexList.Count > 0) { foreach (IndexSet indexSet in searchIndexList) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query theQuery = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(query.ToString()); #endif TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList = new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { Document doc = searcher.Doc(scoreDocs[i].doc); float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { if (fpDict.ContainsKey(field.Name())) sfList.Add(new SearchField(field, fpDict[field.Name()])); else sfList.Add(new SearchField(field)); } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } else { foreach (IndexSet indexSet in indexFieldsDict.Keys) { if (indexSet.Type == IndexTypeEnum.Increment) continue; Query theQuery = GetQuery(indexSet); Source source = indexDict[indexSet]; Dictionary<string, IndexField> fpDict = source.FieldDict; //IndexSearcher searcher = new IndexSearcher(indexSet.Path); IndexSearcher presearcher = new IndexSearcher(indexSet.Path); ParallelMultiSearcher searcher = new ParallelMultiSearcher(new IndexSearcher[] { presearcher }); #if DEBUG System.Console.WriteLine(theQuery.ToString()); #endif TopDocs topDocs = searcher.Search(theQuery.Weight(searcher), null, searchSet.MaxMatches); ScoreDoc[] scoreDocs = topDocs.scoreDocs; List<int> posList = new List<int>(); for (int i = 0; i < scoreDocs.Length; i++) { Document doc = searcher.Doc(scoreDocs[i].doc); float score = scoreDocs[i].score; if (score < searchSet.MinScore) continue; Field[] fields = new Field[doc.GetFields().Count]; doc.GetFields().CopyTo(fields, 0); List<SearchField> sfList = new List<SearchField>(); foreach (Field field in fields) { if (fpDict.ContainsKey(field.Name())) sfList.Add(new SearchField(field, fpDict[field.Name()])); else sfList.Add(new SearchField(field)); } recordList.Add(new SearchRecord(indexSet, sfList, indexDict[indexSet].PrimaryKey, score)); posList.Add(recordList.Count - 1); } try { statistics.Add(indexSet.Caption, posList); } catch (Exception) { int i = 2; while (statistics.ContainsKey(indexSet.Caption + i.ToString())) i++; statistics.Add(indexSet.Caption + i.ToString(), posList); } } } } catch (Exception e) { SupportClass.FileUtil.WriteToLog(SupportClass.LogPath, e.StackTrace.ToString()); } return recordList; }
public static void CheckEqual(Query q1, Query q2) { Assert.AreEqual(q1.ToString(), q2.ToString()); Assert.AreEqual(q1.GetHashCode(), q2.GetHashCode()); }
internal void _Search(Query q, IndexSearcher searcher) { textParsed.Text = q.ToString(); DateTime start = DateTime.Now; TopDocs hits = searcher.Search(q, null, Int16.MaxValue); _luke.ShowStatus(((TimeSpan)(DateTime.Now - start)).TotalMilliseconds.ToString() + " ms"); listSearch.BeginUpdate(); listSearch.Items.Clear(); try { if (hits == null || hits.totalHits == 0) { if (listSearch.Columns.Count < 3) { int width = listSearch.Width - listSearch.Columns[0].Width - listSearch.Columns[1].Width; listSearch.Columns.Add("", width, HorizontalAlignment.Left); } ListViewItem noResults = new ListViewItem(); noResults.SubItems.AddRange(new String[] { "", _luke.resources.GetString("NoResults") }); listSearch.Items.Add(noResults); labelSearchRes.Text = "0"; return; } labelSearchRes.Text = hits.totalHits.ToString(); searchedDocIds = new int[hits.scoreDocs.Length]; for (int i = 0; i < hits.scoreDocs.Length; i++) { ListViewItem item = new ListViewItem((Math.Round((double)1000 * hits.scoreDocs[i].score, 1) / 10).ToString()); item.SubItems.Add(hits.scoreDocs[i].doc.ToString()); Document doc = searcher.Doc( hits.scoreDocs[i].doc); searchedDocIds[i] = hits.scoreDocs[i].doc; for (int j = 0; j < _indexFields.Length; j++) { item.SubItems.Add(doc.Get(_indexFields[j])); } listSearch.Items.Add(item); } query = q; } finally { listSearch.EndUpdate(); } }
public ExplanationAsserter(Query q, string defaultFieldName, IndexSearcher s, bool deep) { this.q = q; this.s = s; this.d = q.ToString(defaultFieldName); this.Deep = deep; }
public override string ToString(string field) { return((new StringBuilder("ConstantScore(")).Append((m_query == null) ? m_filter.ToString() : m_query.ToString(field)).Append(')').Append(ToStringUtils.Boost(Boost)).ToString()); }
public override string ToString() { return(ToString(occur) + query.ToString()); }
public string ToString() { return(ToString(occur) + query.ToString()); }
/// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; SortedSet<int> correct = new SortedSet<int>(); for (int i = 0; i < results.Length; i++) { correct.Add(results[i]); } SortedSet<int> actual = new SortedSet<int>(); for (int i = 0; i < hits.Length; i++) { actual.Add(hits[i].Doc); } Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); }