public StandardQueryParser GetParser(Analyzer a) { if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true); StandardQueryParser qp = new StandardQueryParser(a); qp.DefaultOperator = (Operator.OR); return qp; }
// verify parsing of query using a stopping analyzer private void assertStopQueryEquals(String qtxt, String expectedRes) { String[] fields = { "b", "t" }; BooleanClause.Occur[] occur = { BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD }; // LUCENENET TODO: Make this Occur.Should instead of BooleanClause.Occur.Should TestQPHelper.QPTestAnalyzer a = new TestQPHelper.QPTestAnalyzer(); StandardQueryParser mfqp = new StandardQueryParser(); mfqp.SetMultiFields(fields); mfqp.Analyzer = (a); Query q = mfqp.Parse(qtxt, null); assertEquals(expectedRes, q.toString()); q = QueryParserUtil.Parse(qtxt, fields, occur, a); assertEquals(expectedRes, q.toString()); }
/// <summary> /// Parses a query which searches on the fields specified. /// <para/> /// If x fields are specified, this effectively constructs: /// <code> /// (field1:query1) (field2:query2) (field3:query3)...(fieldx:queryx) /// </code> /// </summary> /// <param name="queries">Queries strings to parse</param> /// <param name="fields">Fields to search on</param> /// <param name="analyzer">Analyzer to use</param> /// <exception cref="ArgumentException"> /// if the length of the queries array differs from the length of the /// fields array /// </exception> public static Query Parse(string[] queries, string[] fields, Analyzer analyzer) { if (queries.Length != fields.Length) throw new ArgumentException("queries.length != fields.length"); BooleanQuery bQuery = new BooleanQuery(); StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = analyzer; for (int i = 0; i < fields.Length; i++) { Query q = qp.Parse(queries[i], fields[i]); if (q != null && // q never null, just being defensive (!(q is BooleanQuery) || ((BooleanQuery)q).GetClauses().Count > 0)) { bQuery.Add(q, BooleanClause.Occur.SHOULD); } } return bQuery; }
public void TestDateRange() { String startDate = GetLocalizedDate(2002, 1, 1); String endDate = GetLocalizedDate(2002, 1, 4); //// we use the default Locale/TZ since LuceneTestCase randomizes it //Calendar endDateExpected = new GregorianCalendar(TimeZone.getDefault(), Locale.getDefault()); //endDateExpected.clear(); //endDateExpected.set(2002, 1, 4, 23, 59, 59); //endDateExpected.set(Calendar.MILLISECOND, 999); // we use the default Locale/TZ since LuceneTestCase randomizes it DateTime endDateExpected = new DateTime(2002, 1, 4, 23, 59, 59, 999, new GregorianCalendar()); String defaultField = "default"; String monthField = "month"; String hourField = "hour"; StandardQueryParser qp = new StandardQueryParser(); IDictionary<string, DateTools.Resolution?> dateRes = new Dictionary<string, DateTools.Resolution?>(); // set a field specific date resolution dateRes.Put(monthField, DateTools.Resolution.MONTH); #pragma warning disable 612, 618 qp.SetDateResolution(dateRes); #pragma warning restore 612, 618 // set default date resolution to MILLISECOND qp.SetDateResolution(DateTools.Resolution.MILLISECOND); // set second field specific date resolution dateRes.Put(hourField, DateTools.Resolution.HOUR); #pragma warning disable 612, 618 qp.SetDateResolution(dateRes); #pragma warning restore 612, 618 // for this field no field specific date resolution has been set, // so verify if the default resolution is used AssertDateRangeQueryEquals(qp, defaultField, startDate, endDate, endDateExpected/*.getTime()*/, DateTools.Resolution.MILLISECOND); // verify if field specific date resolutions are used for these two // fields AssertDateRangeQueryEquals(qp, monthField, startDate, endDate, endDateExpected/*.getTime()*/, DateTools.Resolution.MONTH); AssertDateRangeQueryEquals(qp, hourField, startDate, endDate, endDateExpected/*.getTime()*/, DateTools.Resolution.HOUR); }
public void TestBoost() { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.MakeString("on")); Analyzer oneStopAnalyzer = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet); StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (oneStopAnalyzer); Query q = qp.Parse("on^1.0", "field"); assertNotNull(q); q = qp.Parse("\"hello\"^2.0", "field"); assertNotNull(q); assertEquals(q.Boost, (float)2.0, (float)0.5); q = qp.Parse("hello^2.0", "field"); assertNotNull(q); assertEquals(q.Boost, (float)2.0, (float)0.5); q = qp.Parse("\"on\"^1.0", "field"); assertNotNull(q); StandardQueryParser qp2 = new StandardQueryParser(); qp2.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)); q = qp2.Parse("the^3", "field"); // "the" is a stop word so the result is an empty query: assertNotNull(q); assertEquals("", q.toString()); assertEquals(1.0f, q.Boost, 0.01f); }
public void TestRange() { AssertQueryEquals("[ a TO z]", null, "[a TO z]"); assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]", null)).GetRewriteMethod()); StandardQueryParser qp = new StandardQueryParser(); qp.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)qp.Parse("[ a TO z]", "field")).GetRewriteMethod()); // test open ranges AssertQueryEquals("[ a TO * ]", null, "[a TO *]"); AssertQueryEquals("[ * TO z ]", null, "[* TO z]"); AssertQueryEquals("[ * TO * ]", null, "[* TO *]"); AssertQueryEquals("field>=a", null, "[a TO *]"); AssertQueryEquals("field>a", null, "{a TO *]"); AssertQueryEquals("field<=a", null, "[* TO a]"); AssertQueryEquals("field<a", null, "[* TO a}"); // mixing exclude and include bounds AssertQueryEquals("{ a TO z ]", null, "{a TO z]"); AssertQueryEquals("[ a TO z }", null, "[a TO z}"); AssertQueryEquals("{ a TO * ]", null, "{a TO *]"); AssertQueryEquals("[ * TO z }", null, "[* TO z}"); AssertQueryEquals("[ a TO z ]", null, "[a TO z]"); AssertQueryEquals("{ a TO z}", null, "{a TO z}"); AssertQueryEquals("{ a TO z }", null, "{a TO z}"); AssertQueryEquals("{ a TO z }^2.0", null, "{a TO z}^2.0"); AssertQueryEquals("[ a TO z] OR bar", null, "[a TO z] bar"); AssertQueryEquals("[ a TO z] AND bar", null, "+[a TO z] +bar"); AssertQueryEquals("( bar blar { a TO z}) ", null, "bar blar {a TO z}"); AssertQueryEquals("gack ( bar blar { a TO z}) ", null, "gack (bar blar {a TO z})"); }
public void TestStaticMethod2Old() { String[] fields = { "b", "t" }; BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; StandardQueryParser parser = new StandardQueryParser(); parser.SetMultiFields(fields); parser.Analyzer = (new MockAnalyzer(Random())); Query q = QueryParserUtil.Parse("one", fields, flags, new MockAnalyzer(Random()));// , fields, flags, new // MockAnalyzer()); assertEquals("+b:one -t:one", q.toString()); q = QueryParserUtil.Parse("one two", fields, flags, new MockAnalyzer(Random())); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; q = QueryParserUtil.Parse("blah", fields, flags2, new MockAnalyzer(Random())); fail(); } #pragma warning disable 168 catch (ArgumentException e) #pragma warning restore 168 { // expected exception, array length differs } }
public void TestStopWordSearching() { Analyzer analyzer = new MockAnalyzer(Random()); Store.Directory ramDir = NewDirectory(); IndexWriter iw = new IndexWriter(ramDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); doc.Add(NewTextField("body", "blah the footest blah", Field.Store.NO)); iw.AddDocument(doc); iw.Dispose(); StandardQueryParser mfqp = new StandardQueryParser(); mfqp.SetMultiFields(new String[] { "body" }); mfqp.Analyzer = (analyzer); mfqp.DefaultOperator = (StandardQueryConfigHandler.Operator.AND); Query q = mfqp.Parse("the footest", null); IndexReader ir = DirectoryReader.Open(ramDir); IndexSearcher @is = NewSearcher(ir); ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs; assertEquals(1, hits.Length); ir.Dispose(); ramDir.Dispose(); }
/// <summary> /// Create instance of engine pointing to Directory that has a catalog created with static method QLuceneEngine.CreateCatalog /// </summary> /// <param name="directory">A Lucene Directory object with the catalog you want to search over.</param> public QLuceneEngine(Directory directory) { _directory = directory; _searcher = new IndexSearcher(DirectoryReader.Open(_directory)); _parser = new StandardQueryParser(new StandardAnalyzer(Lucene.Net.Util.LuceneVersion.LUCENE_48)); }
public void TestMultiAnalyzer() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MultiAnalyzer()); // trivial, no multiple tokens: assertEquals("foo", qp.Parse("foo", "").toString()); assertEquals("foo", qp.Parse("\"foo\"", "").toString()); assertEquals("foo foobar", qp.Parse("foo foobar", "").toString()); assertEquals("\"foo foobar\"", qp.Parse("\"foo foobar\"", "").toString()); assertEquals("\"foo foobar blah\"", qp.Parse("\"foo foobar blah\"", "") .toString()); // two tokens at the same position: assertEquals("(multi multi2) foo", qp.Parse("multi foo", "").toString()); assertEquals("foo (multi multi2)", qp.Parse("foo multi", "").toString()); assertEquals("(multi multi2) (multi multi2)", qp.Parse("multi multi", "") .toString()); assertEquals("+(foo (multi multi2)) +(bar (multi multi2))", qp.Parse( "+(foo multi) +(bar multi)", "").toString()); assertEquals("+(foo (multi multi2)) field:\"bar (multi multi2)\"", qp .Parse("+(foo multi) field:\"bar multi\"", "").toString()); // phrases: assertEquals("\"(multi multi2) foo\"", qp.Parse("\"multi foo\"", "") .toString()); assertEquals("\"foo (multi multi2)\"", qp.Parse("\"foo multi\"", "") .toString()); assertEquals("\"foo (multi multi2) foobar (multi multi2)\"", qp.Parse( "\"foo multi foobar multi\"", "").toString()); // fields: assertEquals("(field:multi field:multi2) field:foo", qp.Parse( "field:multi field:foo", "").toString()); assertEquals("field:\"(multi multi2) foo\"", qp.Parse( "field:\"multi foo\"", "").toString()); // three tokens at one position: assertEquals("triplemulti multi3 multi2", qp.Parse("triplemulti", "") .toString()); assertEquals("foo (triplemulti multi3 multi2) foobar", qp.Parse( "foo triplemulti foobar", "").toString()); // phrase with non-default slop: assertEquals("\"(multi multi2) foo\"~10", qp.Parse("\"multi foo\"~10", "") .toString()); // phrase with non-default boost: assertEquals("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2", "") .toString()); // phrase after changing default slop #pragma warning disable 612, 618 qp.SetDefaultPhraseSlop(99); #pragma warning restore 612, 618 assertEquals("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar", "").toString()); assertEquals("\"(multi multi2) foo\"~99 \"foo bar\"~2", qp.Parse( "\"multi foo\" \"foo bar\"~2", "").toString()); #pragma warning disable 612, 618 qp.SetDefaultPhraseSlop(0); #pragma warning restore 612, 618 // non-default operator: qp.DefaultOperator = (StandardQueryConfigHandler.Operator.AND); assertEquals("+(multi multi2) +foo", qp.Parse("multi foo", "").toString()); }
public void TestMatchAllDocs() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); assertEquals(new MatchAllDocsQuery(), qp.Parse("*:*", "field")); assertEquals(new MatchAllDocsQuery(), qp.Parse("(*:*)", "field")); BooleanQuery bq = (BooleanQuery)qp.Parse("+*:* -*:*", "field"); assertTrue(bq.GetClauses()[0].Query is MatchAllDocsQuery); assertTrue(bq.GetClauses()[1].Query is MatchAllDocsQuery); }
private void AssertHits(int expected, String query, IndexSearcher @is) { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); qp.Locale = new CultureInfo("en");// (Locale.ENGLISH); // LUCENENET TODO: Fix API - we probably don't want to set Culture to a property Query q = qp.Parse(query, "date"); ScoreDoc[] hits = @is.Search(q, null, 1000).ScoreDocs; assertEquals(expected, hits.Length); }
public void TestStopwords() { StandardQueryParser qp = new StandardQueryParser(); CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").ToAutomaton()); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, stopSet)); Query result = qp.Parse("a:the OR a:foo", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result is BooleanQuery); assertTrue(((BooleanQuery)result).Clauses.size() + " does not equal: " + 0, ((BooleanQuery)result).Clauses.size() == 0); result = qp.Parse("a:woo OR a:the", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a TermQuery", result is TermQuery); result = qp.Parse( "(fieldX:xxxxx OR fieldy:xxxxxxxx)^2 AND (fieldx:the OR fieldy:foo)", "a"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result is BooleanQuery); if (VERBOSE) Console.WriteLine("Result: " + result); assertTrue(((BooleanQuery)result).Clauses.size() + " does not equal: " + 2, ((BooleanQuery)result).Clauses.size() == 2); }
public void TestPositionIncrement() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = ( new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)); qp.EnablePositionIncrements = (true); String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\""; // 0 2 5 7 8 int[] expectedPositions = { 1, 3, 4, 6, 9 }; PhraseQuery pq = (PhraseQuery)qp.Parse(qtxt, "a"); // System.out.println("Query text: "+qtxt); // System.out.println("Result: "+pq); Term[] t = pq.Terms; int[] pos = pq.Positions; for (int i = 0; i < t.Length; i++) { // System.out.println(i+". "+t[i]+" pos: "+pos[i]); assertEquals("term " + i + " = " + t[i] + " has wrong term-position!", expectedPositions[i], pos[i]); } }
public void TestRegexps() { StandardQueryParser qp = new StandardQueryParser(); String df = "field"; RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]")); assertEquals(q, qp.Parse("/[a-z][123]/", df)); qp.LowercaseExpandedTerms = (true); assertEquals(q, qp.Parse("/[A-Z][123]/", df)); q.Boost = (0.5f); assertEquals(q, qp.Parse("/[A-Z][123]/^0.5", df)); qp.MultiTermRewriteMethod = (MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); q.SetRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); // LUCENENET TODO: Inconsistent API betwen RegexpQuery and StandardQueryParser assertTrue(qp.Parse("/[A-Z][123]/^0.5", df) is RegexpQuery); assertEquals(q, qp.Parse("/[A-Z][123]/^0.5", df)); assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((RegexpQuery)qp.Parse("/[A-Z][123]/^0.5", df)).GetRewriteMethod()); qp.MultiTermRewriteMethod = (MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT); Query escaped = new RegexpQuery(new Term("field", "[a-z]\\/[123]")); assertEquals(escaped, qp.Parse("/[a-z]\\/[123]/", df)); Query escaped2 = new RegexpQuery(new Term("field", "[a-z]\\*[123]")); assertEquals(escaped2, qp.Parse("/[a-z]\\*[123]/", df)); BooleanQuery complex = new BooleanQuery(); complex.Add(new RegexpQuery(new Term("field", "[a-z]\\/[123]")), BooleanClause.Occur.MUST); complex.Add(new TermQuery(new Term("path", "/etc/init.d/")), BooleanClause.Occur.MUST); complex.Add(new TermQuery(new Term("field", "/etc/init[.]d/lucene/")), BooleanClause.Occur.SHOULD); assertEquals(complex, qp.Parse("/[a-z]\\/[123]/ AND path:\"/etc/init.d/\" OR \"/etc\\/init\\[.\\]d/lucene/\" ", df)); Query re = new RegexpQuery(new Term("field", "http.*")); assertEquals(re, qp.Parse("field:/http.*/", df)); assertEquals(re, qp.Parse("/http.*/", df)); re = new RegexpQuery(new Term("field", "http~0.5")); assertEquals(re, qp.Parse("field:/http~0.5/", df)); assertEquals(re, qp.Parse("/http~0.5/", df)); re = new RegexpQuery(new Term("field", "boo")); assertEquals(re, qp.Parse("field:/boo/", df)); assertEquals(re, qp.Parse("/boo/", df)); assertEquals(new TermQuery(new Term("field", "/boo/")), qp.Parse("\"/boo/\"", df)); assertEquals(new TermQuery(new Term("field", "/boo/")), qp.Parse("\\/boo\\/", df)); BooleanQuery two = new BooleanQuery(); two.Add(new RegexpQuery(new Term("field", "foo")), BooleanClause.Occur.SHOULD); two.Add(new RegexpQuery(new Term("field", "bar")), BooleanClause.Occur.SHOULD); assertEquals(two, qp.Parse("field:/foo/ field:/bar/", df)); assertEquals(two, qp.Parse("/foo/ /bar/", df)); }
public void TestPrecedence() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); Query query1 = qp.Parse("A AND B OR C AND D", "field"); Query query2 = qp.Parse("+A +B +C +D", "field"); assertEquals(query1, query2); }
public void TestBooleanQuery() { BooleanQuery.MaxClauseCount = (2); try { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); qp.Parse("one two three", "field"); fail("ParseException expected due to too many boolean clauses"); } #pragma warning disable 168 catch (QueryNodeException expected) #pragma warning restore 168 { // too many boolean clauses, so ParseException is expected } }
public void AssertDateRangeQueryEquals(StandardQueryParser qp, String field, String startDate, String endDate, DateTime endDateInclusive, DateTools.Resolution resolution) { AssertQueryEquals(qp, field, field + ":[" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "]", "[" + GetDate(startDate, resolution) + " TO " + GetDate(endDateInclusive, resolution) + "]"); AssertQueryEquals(qp, field, field + ":{" + EscapeDateString(startDate) + " TO " + EscapeDateString(endDate) + "}", "{" + GetDate(startDate, resolution) + " TO " + GetDate(endDate, resolution) + "}"); }
public void TestMultiPhraseQuery() { Store.Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new CannedAnalyzer())); Document doc = new Document(); doc.Add(NewTextField("field", "", Field.Store.NO)); w.AddDocument(doc); IndexReader r = DirectoryReader.Open(w, true); IndexSearcher s = NewSearcher(r); Query q = new StandardQueryParser(new CannedAnalyzer()).Parse("\"a\"", "field"); assertTrue(q is MultiPhraseQuery); assertEquals(1, s.Search(q, 10).TotalHits); r.Dispose(); w.Dispose(); dir.Dispose(); }
public void TestEscapedWildcard() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r")); assertEquals(q, qp.Parse("foo\\?ba?r", "field")); }
public void TestRegexQueryParsing() { String[] fields = { "b", "t" }; StandardQueryParser parser = new StandardQueryParser(); parser.SetMultiFields(fields); parser.DefaultOperator = (StandardQueryConfigHandler.Operator.AND); parser.Analyzer = (new MockAnalyzer(Random())); BooleanQuery exp = new BooleanQuery(); exp.Add(new BooleanClause(new RegexpQuery(new Term("b", "ab.+")), BooleanClause.Occur.SHOULD));//TODO spezification? was "MUST" exp.Add(new BooleanClause(new RegexpQuery(new Term("t", "ab.+")), BooleanClause.Occur.SHOULD));//TODO spezification? was "MUST" assertEquals(exp, parser.Parse("/ab.+/", null)); RegexpQuery regexpQueryexp = new RegexpQuery(new Term("test", "[abc]?[0-9]")); assertEquals(regexpQueryexp, parser.Parse("test:/[abc]?[0-9]/", null)); }
public void TestPosIncrementAnalyzer() { StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (new PosIncrementAnalyzer()); assertEquals("quick brown", qp.Parse("the quick brown", "").toString()); assertEquals("\"? quick brown\"", qp.Parse("\"the quick brown\"", "") .toString()); assertEquals("quick brown fox", qp.Parse("the quick brown fox", "") .toString()); assertEquals("\"? quick brown fox\"", qp.Parse("\"the quick brown fox\"", "") .toString()); }
public void TestSimple() { String[] fields = { "b", "t" }; StandardQueryParser mfqp = new StandardQueryParser(); mfqp.SetMultiFields(fields); mfqp.Analyzer = (new MockAnalyzer(Random())); Query q = mfqp.Parse("one", null); assertEquals("b:one t:one", q.toString()); q = mfqp.Parse("one two", null); assertEquals("(b:one t:one) (b:two t:two)", q.toString()); q = mfqp.Parse("+one +two", null); assertEquals("+(b:one t:one) +(b:two t:two)", q.toString()); q = mfqp.Parse("+one -two -three", null); assertEquals("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q .toString()); q = mfqp.Parse("one^2 two", null); assertEquals("((b:one t:one)^2.0) (b:two t:two)", q.toString()); q = mfqp.Parse("one~ two", null); assertEquals("(b:one~2 t:one~2) (b:two t:two)", q.toString()); q = mfqp.Parse("one~0.8 two^2", null); assertEquals("(b:one~0 t:one~0) ((b:two t:two)^2.0)", q.toString()); q = mfqp.Parse("one* two*", null); assertEquals("(b:one* t:one*) (b:two* t:two*)", q.toString()); q = mfqp.Parse("[a TO c] two", null); assertEquals("(b:[a TO c] t:[a TO c]) (b:two t:two)", q.toString()); q = mfqp.Parse("w?ldcard", null); assertEquals("b:w?ldcard t:w?ldcard", q.toString()); q = mfqp.Parse("\"foo bar\"", null); assertEquals("b:\"foo bar\" t:\"foo bar\"", q.toString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\"", null); assertEquals("(b:\"aa bb cc\" t:\"aa bb cc\") (b:\"dd ee\" t:\"dd ee\")", q .toString()); q = mfqp.Parse("\"foo bar\"~4", null); assertEquals("b:\"foo bar\"~4 t:\"foo bar\"~4", q.toString()); // LUCENE-1213: QueryParser was ignoring slop when phrase // had a field. q = mfqp.Parse("b:\"foo bar\"~4", null); assertEquals("b:\"foo bar\"~4", q.toString()); // make sure that terms which have a field are not touched: q = mfqp.Parse("one f:two", null); assertEquals("(b:one t:one) f:two", q.toString()); // AND mode: mfqp.DefaultOperator = (StandardQueryConfigHandler.Operator.AND); q = mfqp.Parse("one two", null); assertEquals("+(b:one t:one) +(b:two t:two)", q.toString()); q = mfqp.Parse("\"aa bb cc\" \"dd ee\"", null); assertEquals("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.toString()); }
public void TestBoostsSimple() { IDictionary<String, float?> boosts = new Dictionary<String, float?>(); // LUCENENET TODO: make this non-nullable..? boosts.Put("b", 5); boosts.Put("t", 10); String[] fields = { "b", "t" }; StandardQueryParser mfqp = new StandardQueryParser(); mfqp.SetMultiFields(fields); mfqp.FieldsBoost = (boosts); mfqp.Analyzer = (new MockAnalyzer(Random())); // Check for simple Query q = mfqp.Parse("one", null); assertEquals("b:one^5.0 t:one^10.0", q.toString()); // Check for AND q = mfqp.Parse("one AND two", null); assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q .toString()); // Check for OR q = mfqp.Parse("one OR two", null); assertEquals("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.toString()); // Check for AND and a field q = mfqp.Parse("one AND two AND foo:test", null); assertEquals("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q .toString()); q = mfqp.Parse("one^3 AND two^4", null); assertEquals("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.toString()); }
public void BeforeClass() { ANALYZER = new MockAnalyzer(Random()); qp = new StandardQueryParser(ANALYZER); HashMap<String, /*Number*/object> randomNumberMap = new HashMap<string, object>(); /*SimpleDateFormat*/ string dateFormat; long randomDate; bool dateFormatSanityCheckPass; int count = 0; do { if (count > 100) { fail("This test has problems to find a sane random DateFormat/NumberFormat. Stopped trying after 100 iterations."); } dateFormatSanityCheckPass = true; LOCALE = randomLocale(Random()); TIMEZONE = randomTimeZone(Random()); DATE_STYLE = randomDateStyle(Random()); TIME_STYLE = randomDateStyle(Random()); //// assumes localized date pattern will have at least year, month, day, //// hour, minute //dateFormat = (SimpleDateFormat)DateFormat.getDateTimeInstance( // DATE_STYLE, TIME_STYLE, LOCALE); //// not all date patterns includes era, full year, timezone and second, //// so we add them here //dateFormat.applyPattern(dateFormat.toPattern() + " G s Z yyyy"); //dateFormat.setTimeZone(TIMEZONE); DATE_FORMAT = new NumberDateFormat(DATE_STYLE, TIME_STYLE, LOCALE) { TimeZone = TIMEZONE }; dateFormat = DATE_FORMAT.GetDateFormat(); do { randomDate = Random().nextLong(); // prune date value so it doesn't pass in insane values to some // calendars. randomDate = randomDate % 3400000000000L; // truncate to second randomDate = (randomDate / 1000L) * 1000L; // only positive values randomDate = Math.Abs(randomDate); } while (randomDate == 0L); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, randomDate); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, 0); dateFormatSanityCheckPass &= checkDateFormatSanity(dateFormat, -randomDate); count++; } while (!dateFormatSanityCheckPass); //NUMBER_FORMAT = NumberFormat.getNumberInstance(LOCALE); //NUMBER_FORMAT.setMaximumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumFractionDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMaximumIntegerDigits((Random().nextInt() & 20) + 1); //NUMBER_FORMAT.setMinimumIntegerDigits((Random().nextInt() & 20) + 1); NUMBER_FORMAT = new NumberFormat(LOCALE); double randomDouble; long randomLong; int randomInt; float randomFloat; while ((randomLong = Convert.ToInt64(NormalizeNumber(Math.Abs(Random().nextLong())) )) == 0L) ; while ((randomDouble = Convert.ToDouble(NormalizeNumber(Math.Abs(Random().NextDouble())) )) == 0.0) ; while ((randomFloat = Convert.ToSingle(NormalizeNumber(Math.Abs(Random().nextFloat())) )) == 0.0f) ; while ((randomInt = Convert.ToInt32(NormalizeNumber(Math.Abs(Random().nextInt())))) == 0) ; randomNumberMap.Put(FieldType.NumericType.LONG.ToString(), randomLong); randomNumberMap.Put(FieldType.NumericType.INT.ToString(), randomInt); randomNumberMap.Put(FieldType.NumericType.FLOAT.ToString(), randomFloat); randomNumberMap.Put(FieldType.NumericType.DOUBLE.ToString(), randomDouble); randomNumberMap.Put(DATE_FIELD_NAME, randomDate); RANDOM_NUMBER_MAP = Collections.UnmodifiableMap(randomNumberMap); directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())) .SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)) .SetMergePolicy(NewLogMergePolicy())); Document doc = new Document(); HashMap<String, NumericConfig> numericConfigMap = new HashMap<String, NumericConfig>(); HashMap<String, Field> numericFieldMap = new HashMap<String, Field>(); qp.NumericConfigMap = (numericConfigMap); foreach (FieldType.NumericType type in Enum.GetValues(typeof(FieldType.NumericType))) { numericConfigMap.Put(type.ToString(), new NumericConfig(PRECISION_STEP, NUMBER_FORMAT, type)); FieldType ft2 = new FieldType(IntField.TYPE_NOT_STORED); ft2.NumericTypeValue = (type); ft2.Stored = (true); ft2.NumericPrecisionStep = (PRECISION_STEP); ft2.Freeze(); Field field; switch (type) { case FieldType.NumericType.INT: field = new IntField(type.ToString(), 0, ft2); break; case FieldType.NumericType.FLOAT: field = new FloatField(type.ToString(), 0.0f, ft2); break; case FieldType.NumericType.LONG: field = new LongField(type.ToString(), 0L, ft2); break; case FieldType.NumericType.DOUBLE: field = new DoubleField(type.ToString(), 0.0, ft2); break; default: fail(); field = null; break; } numericFieldMap.Put(type.ToString(), field); doc.Add(field); } numericConfigMap.Put(DATE_FIELD_NAME, new NumericConfig(PRECISION_STEP, DATE_FORMAT, FieldType.NumericType.LONG)); FieldType ft = new FieldType(LongField.TYPE_NOT_STORED); ft.Stored = (true); ft.NumericPrecisionStep = (PRECISION_STEP); LongField dateField = new LongField(DATE_FIELD_NAME, 0L, ft); numericFieldMap.Put(DATE_FIELD_NAME, dateField); doc.Add(dateField); foreach (NumberType numberType in Enum.GetValues(typeof(NumberType))) { setFieldValues(numberType, numericFieldMap); if (VERBOSE) Console.WriteLine("Indexing document: " + doc); writer.AddDocument(doc); } reader = writer.Reader; searcher = NewSearcher(reader); writer.Dispose(); }
public void TestAnalyzerReturningNull() { String[] fields = new String[] { "f1", "f2", "f3" }; StandardQueryParser parser = new StandardQueryParser(); parser.SetMultiFields(fields); parser.Analyzer = (new AnalyzerReturningNull()); Query q = parser.Parse("bla AND blo", null); assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString()); // the following queries are not affected as their terms are not // analyzed anyway: q = parser.Parse("bla*", null); assertEquals("f1:bla* f2:bla* f3:bla*", q.toString()); q = parser.Parse("bla~", null); assertEquals("f1:bla~2 f2:bla~2 f3:bla~2", q.toString()); q = parser.Parse("[a TO c]", null); assertEquals("f1:[a TO c] f2:[a TO c] f3:[a TO c]", q.toString()); }
public void TestConstantScoreAutoRewrite() { StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)); Query q = qp.Parse("foo*bar", "field"); assertTrue(q is WildcardQuery); assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod()); q = qp.Parse("foo*", "field"); assertTrue(q is PrefixQuery); assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod()); q = qp.Parse("[a TO z]", "field"); assertTrue(q is TermRangeQuery); assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery)q).GetRewriteMethod()); }
public void AssertQueryEquals(StandardQueryParser qp, String field, String query, String result) { Query q = qp.Parse(query, field); String s = q.ToString(field); if (!s.equals(result)) { fail("Query /" + query + "/ yielded /" + s + "/, expecting /" + result + "/"); } }
public Query Parse(String query, StandardQueryParser qp) { return qp.Parse(query, DefaultField); }
public Query GetQueryDOA(String query, Analyzer a) { if (a == null) a = new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true); StandardQueryParser qp = new StandardQueryParser(); qp.Analyzer = (a); qp.DefaultOperator = (StandardQueryConfigHandler.Operator.AND); return qp.Parse(query, "field"); }
public static void AfterClass() { searcher = null; reader.Dispose(); reader = null; directory.Dispose(); directory = null; qp = null; }