protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is TermRangeQueryNode) { TermRangeQueryNode rangeNode = (TermRangeQueryNode)node; FieldQueryNode lowerNode = (FieldQueryNode)rangeNode.LowerBound; FieldQueryNode upperNode = (FieldQueryNode)rangeNode.UpperBound; ICharSequence lowerText = lowerNode.Text; ICharSequence upperText = upperNode.Text; if (OPEN_RANGE_TOKEN.Equals(upperNode.GetTextAsString(), StringComparison.Ordinal) && (!(upperText is UnescapedCharSequence) || !((UnescapedCharSequence)upperText) .WasEscaped(0))) { upperText = "".ToCharSequence(); } if (OPEN_RANGE_TOKEN.Equals(lowerNode.GetTextAsString(), StringComparison.Ordinal) && (!(lowerText is UnescapedCharSequence) || !((UnescapedCharSequence)lowerText) .WasEscaped(0))) { lowerText = "".ToCharSequence(); } lowerNode.Text = lowerText; upperNode.Text = upperText; } return(node); }
public virtual Query Build(IQueryNode queryNode) { FieldQueryNode fieldNode = (FieldQueryNode)queryNode; return(new TermQuery(new Term(fieldNode.GetFieldAsString(), fieldNode .GetTextAsString()))); }
protected override IQueryNode PostProcessNode(IQueryNode node) { // the old Lucene Parser ignores FuzzyQueryNode that are also PrefixWildcardQueryNode or WildcardQueryNode // we do the same here, also ignore empty terms if (node is FieldQueryNode || node is FuzzyQueryNode) { FieldQueryNode fqn = (FieldQueryNode)node; string text = fqn.Text.ToString(); // do not process wildcards for TermRangeQueryNode children and // QuotedFieldQueryNode to reproduce the old parser behavior if (fqn.Parent is TermRangeQueryNode || fqn is QuotedFieldQueryNode || text.Length <= 0) { // Ignore empty terms return(node); } // Code below simulates the old lucene parser behavior for wildcards if (IsPrefixWildcard(text)) { PrefixWildcardQueryNode prefixWildcardQN = new PrefixWildcardQueryNode(fqn); return(prefixWildcardQN); } else if (IsWildcard(text)) { WildcardQueryNode wildcardQN = new WildcardQueryNode(fqn); return(wildcardQN); } } return(node); }
public virtual Query Build(IQueryNode queryNode) { TermRangeQueryNode rangeNode = (TermRangeQueryNode)queryNode; FieldQueryNode upper = (FieldQueryNode)rangeNode.UpperBound; FieldQueryNode lower = (FieldQueryNode)rangeNode.LowerBound; string field = StringUtils.ToString(rangeNode.Field); string lowerText = lower.GetTextAsString(); string upperText = upper.GetTextAsString(); if (lowerText.Length == 0) { lowerText = null; } if (upperText.Length == 0) { upperText = null; } TermRangeQuery rangeQuery = TermRangeQuery.NewStringRange(field, lowerText, upperText, rangeNode .IsLowerInclusive, rangeNode.IsUpperInclusive); MultiTermQuery.RewriteMethod method = (MultiTermQuery.RewriteMethod)queryNode .GetTag(MultiTermRewriteMethodProcessor.TAG_ID); if (method != null) { rangeQuery.MultiTermRewriteMethod = method; } return(rangeQuery); }
public void TestAddChildren() { IQueryNode nodeA = new FieldQueryNode("foo", "A", 0, 1); IQueryNode nodeB = new FieldQueryNode("foo", "B", 1, 2); BooleanQueryNode bq = new BooleanQueryNode( Arrays.AsList(nodeA)); bq.Add(Arrays.AsList(nodeB)); assertEquals(2, bq.GetChildren().size()); }
public void TestTags() { IQueryNode node = new FieldQueryNode("foo", "A", 0, 1); node.SetTag("TaG", new Object()); assertTrue(node.TagMap.size() > 0); assertTrue(node.ContainsTag("tAg")); assertTrue(node.GetTag("tAg") != null); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is FieldQueryNode) { FieldQueryNode fqn = (FieldQueryNode)node; if (fqn.Field.ToString().Equals("*") && fqn.Text.ToString().Equals("*")) { return(new MatchAllDocsQueryNode()); } } return(node); }
public void TestRemoveFromParent() { BooleanQueryNode booleanNode = new BooleanQueryNode(Collections.EmptyList<IQueryNode>()); FieldQueryNode fieldNode = new FieldQueryNode("foo", "A", 0, 1); assertNull(fieldNode.Parent); booleanNode.Add(fieldNode); assertNotNull(fieldNode.Parent); fieldNode.RemoveFromParent(); assertNull(fieldNode.Parent); booleanNode.Add(fieldNode); assertNotNull(fieldNode.Parent); booleanNode.Set(Collections.EmptyList<IQueryNode>()); assertNull(fieldNode.Parent); }
public virtual Query Build(IQueryNode queryNode) { MultiPhraseQueryNode phraseNode = (MultiPhraseQueryNode)queryNode; MultiPhraseQuery phraseQuery = new MultiPhraseQuery(); IList <IQueryNode> children = phraseNode.GetChildren(); if (children != null) { IDictionary <int?, List <Term> > positionTermMap = new SortedDictionary <int?, List <Term> >(); foreach (IQueryNode child in children) { FieldQueryNode termNode = (FieldQueryNode)child; TermQuery termQuery = (TermQuery)termNode .GetTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); List <Term> termList; if (!positionTermMap.TryGetValue(termNode.PositionIncrement, out termList) || termList == null) { termList = new List <Term>(); positionTermMap[termNode.PositionIncrement] = termList; } termList.Add(termQuery.Term); } foreach (int positionIncrement in positionTermMap.Keys) { List <Term> termList = positionTermMap[positionIncrement]; phraseQuery.Add(termList.ToArray(/*new Term[termList.size()]*/), positionIncrement); } } return(phraseQuery); }
public virtual Query Build(IQueryNode queryNode) { TokenizedPhraseQueryNode phraseNode = (TokenizedPhraseQueryNode)queryNode; PhraseQuery phraseQuery = new PhraseQuery(); IList <IQueryNode> children = phraseNode.GetChildren(); if (children != null) { foreach (IQueryNode child in children) { TermQuery termQuery = (TermQuery)child .GetTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID); FieldQueryNode termNode = (FieldQueryNode)child; phraseQuery.Add(termQuery.Term, termNode.PositionIncrement); } } return(phraseQuery); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is TermRangeQueryNode) { QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) { TermRangeQueryNode termRangeNode = (TermRangeQueryNode)node; FieldConfig fieldConfig = config.GetFieldConfig(StringUtils .ToString(termRangeNode.Field)); if (fieldConfig != null) { NumericConfig numericConfig = fieldConfig .Get(ConfigurationKeys.NUMERIC_CONFIG); if (numericConfig != null) { FieldQueryNode lower = (FieldQueryNode)termRangeNode.LowerBound; FieldQueryNode upper = (FieldQueryNode)termRangeNode.UpperBound; string lowerText = lower.GetTextAsString(); string upperText = upper.GetTextAsString(); NumberFormat numberFormat = numericConfig.NumberFormat; /*Number*/ object lowerNumber = null, upperNumber = null; if (lowerText.Length > 0) { try { lowerNumber = numberFormat.Parse(lowerText); } catch (FormatException e) { throw new QueryNodeParseException(new Message( QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower .GetTextAsString(), numberFormat.ToString()), e); } } if (upperText.Length > 0) { try { upperNumber = numberFormat.Parse(upperText); } catch (FormatException e) { throw new QueryNodeParseException(new Message( QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper .GetTextAsString(), numberFormat.ToString()), e); } } switch (numericConfig.Type) { case NumericType.INT64: if (upperNumber != null) { upperNumber = Convert.ToInt64(upperNumber); // LUCENENET TODO: Find a way to pass culture } if (lowerNumber != null) { lowerNumber = Convert.ToInt64(lowerNumber); } break; case NumericType.INT32: if (upperNumber != null) { upperNumber = Convert.ToInt32(upperNumber); // LUCENENET TODO: Find a way to pass culture } if (lowerNumber != null) { lowerNumber = Convert.ToInt32(lowerNumber); } break; case NumericType.DOUBLE: if (upperNumber != null) { upperNumber = Convert.ToDouble(upperNumber); // LUCENENET TODO: Find a way to pass culture } if (lowerNumber != null) { lowerNumber = Convert.ToDouble(lowerNumber); } break; case NumericType.SINGLE: if (upperNumber != null) { upperNumber = Convert.ToSingle(upperNumber); // LUCENENET TODO: Find a way to pass culture } if (lowerNumber != null) { lowerNumber = Convert.ToSingle(lowerNumber); } break; } NumericQueryNode lowerNode = new NumericQueryNode( termRangeNode.Field, lowerNumber, numberFormat); NumericQueryNode upperNode = new NumericQueryNode( termRangeNode.Field, upperNumber, numberFormat); bool lowerInclusive = termRangeNode.IsLowerInclusive; bool upperInclusive = termRangeNode.IsUpperInclusive; return(new NumericRangeQueryNode(lowerNode, upperNode, lowerInclusive, upperInclusive, numericConfig)); } } } } return(node); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is ITextableQueryNode && !(node is WildcardQueryNode) && !(node is FuzzyQueryNode) && !(node is RegexpQueryNode) && !(node.Parent is IRangeQueryNode)) { FieldQueryNode fieldNode = ((FieldQueryNode)node); string text = fieldNode.GetTextAsString(); string field = fieldNode.GetFieldAsString(); CachingTokenFilter buffer = null; IPositionIncrementAttribute posIncrAtt = null; int numTokens = 0; int positionCount = 0; bool severalTokensAtSamePosition = false; TokenStream source = null; try { source = this.analyzer.TokenStream(field, text); source.Reset(); buffer = new CachingTokenFilter(source); if (buffer.HasAttribute<IPositionIncrementAttribute>()) { posIncrAtt = buffer.GetAttribute<IPositionIncrementAttribute>(); } try { while (buffer.IncrementToken()) { numTokens++; int positionIncrement = (posIncrAtt != null) ? posIncrAtt .PositionIncrement : 1; if (positionIncrement != 0) { positionCount += positionIncrement; } else { severalTokensAtSamePosition = true; } } } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // ignore } } catch (IOException e) { throw new Exception(e.Message, e); } finally { IOUtils.CloseWhileHandlingException(source); } // rewind the buffer stream buffer.Reset(); if (!buffer.HasAttribute<ICharTermAttribute>()) { return new NoTokenFoundQueryNode(); } ICharTermAttribute termAtt = buffer.GetAttribute<ICharTermAttribute>(); if (numTokens == 0) { return new NoTokenFoundQueryNode(); } else if (numTokens == 1) { string term = null; try { bool hasNext; hasNext = buffer.IncrementToken(); Debug.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // safe to ignore, because we know the number of tokens } fieldNode.Text = term.ToCharSequence(); return fieldNode; } else if (severalTokensAtSamePosition || !(node is QuotedFieldQueryNode)) { if (positionCount == 1 || !(node is QuotedFieldQueryNode)) { // no phrase query: if (positionCount == 1) { // simple case: only one position, with synonyms List<IQueryNode> children = new List<IQueryNode>(); for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); Debug.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // safe to ignore, because we know the number of tokens } children.Add(new FieldQueryNode(field, term, -1, -1)); } return new GroupQueryNode( new StandardBooleanQueryNode(children, positionCount == 1)); } else { // multiple positions IQueryNode q = new StandardBooleanQueryNode(new List<IQueryNode>(), false); IQueryNode currentQuery = null; for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); Debug.Assert(hasNext == true); term = termAtt.ToString(); } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // safe to ignore, because we know the number of tokens } if (posIncrAtt != null && posIncrAtt.PositionIncrement == 0) { if (!(currentQuery is BooleanQueryNode)) { IQueryNode t = currentQuery; currentQuery = new StandardBooleanQueryNode(new List<IQueryNode>(), true); ((BooleanQueryNode)currentQuery).Add(t); } ((BooleanQueryNode)currentQuery).Add(new FieldQueryNode(field, term, -1, -1)); } else { if (currentQuery != null) { if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } } currentQuery = new FieldQueryNode(field, term, -1, -1); } } if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } if (q is BooleanQueryNode) { q = new GroupQueryNode(q); } return q; } } else { // phrase query: MultiPhraseQueryNode mpq = new MultiPhraseQueryNode(); List<FieldQueryNode> multiTerms = new List<FieldQueryNode>(); int position = -1; int i = 0; int termGroupCount = 0; for (; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); Debug.Assert(hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // safe to ignore, because we know the number of tokens } if (positionIncrement > 0 && multiTerms.Count > 0) { foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } // Only increment once for each "group" of // terms that were in the same position: termGroupCount++; multiTerms.Clear(); } position += positionIncrement; multiTerms.Add(new FieldQueryNode(field, term, -1, -1)); } foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } return mpq; } } else { TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode(); int position = -1; for (int i = 0; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); Debug.Assert(hasNext == true); term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // safe to ignore, because we know the number of tokens } FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1); if (this.positionIncrementsEnabled) { position += positionIncrement; newFieldNode.PositionIncrement = position; } else { newFieldNode.PositionIncrement = i; } pq.Add(newFieldNode); } return pq; } } return node; }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is TermRangeQueryNode termRangeNode) { QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) { FieldConfig fieldConfig = config.GetFieldConfig(StringUtils .ToString(termRangeNode.Field)); if (fieldConfig != null) { NumericConfig numericConfig = fieldConfig .Get(ConfigurationKeys.NUMERIC_CONFIG); if (numericConfig != null) { FieldQueryNode lower = (FieldQueryNode)termRangeNode.LowerBound; FieldQueryNode upper = (FieldQueryNode)termRangeNode.UpperBound; string lowerText = lower.GetTextAsString(); string upperText = upper.GetTextAsString(); NumberFormat numberFormat = numericConfig.NumberFormat; J2N.Numerics.Number lowerNumber = null, upperNumber = null; if (lowerText.Length > 0) { try { lowerNumber = numberFormat.Parse(lowerText); } catch (FormatException e) // LUCENENET: In .NET we are expecting the framework to throw FormatException, not ParseException { // LUCENENET: Factored out NLS/Message/IMessage so end users can optionally utilize the built-in .NET localization. throw new QueryNodeParseException(string.Format( QueryParserMessages.COULD_NOT_PARSE_NUMBER, lower .GetTextAsString(), numberFormat.ToString()), e); } } if (upperText.Length > 0) { try { upperNumber = numberFormat.Parse(upperText); } catch (FormatException e) // LUCENENET: In .NET we are expecting the framework to throw FormatException, not ParseException { // LUCENENET: Factored out NLS/Message/IMessage so end users can optionally utilize the built-in .NET localization. throw new QueryNodeParseException(string.Format( QueryParserMessages.COULD_NOT_PARSE_NUMBER, upper .GetTextAsString(), numberFormat.ToString()), e); } } switch (numericConfig.Type) { case NumericType.INT64: if (upperNumber != null) { upperNumber = J2N.Numerics.Int64.GetInstance(upperNumber.ToInt64()); } if (lowerNumber != null) { lowerNumber = J2N.Numerics.Int64.GetInstance(lowerNumber.ToInt64()); } break; case NumericType.INT32: if (upperNumber != null) { upperNumber = J2N.Numerics.Int32.GetInstance(upperNumber.ToInt32()); } if (lowerNumber != null) { lowerNumber = J2N.Numerics.Int32.GetInstance(lowerNumber.ToInt32()); } break; case NumericType.DOUBLE: if (upperNumber != null) { upperNumber = J2N.Numerics.Double.GetInstance(upperNumber.ToDouble()); } if (lowerNumber != null) { lowerNumber = J2N.Numerics.Double.GetInstance(lowerNumber.ToDouble()); } break; case NumericType.SINGLE: if (upperNumber != null) { upperNumber = J2N.Numerics.Single.GetInstance(upperNumber.ToSingle()); } if (lowerNumber != null) { lowerNumber = J2N.Numerics.Single.GetInstance(lowerNumber.ToSingle()); } break; } NumericQueryNode lowerNode = new NumericQueryNode( termRangeNode.Field, lowerNumber, numberFormat); NumericQueryNode upperNode = new NumericQueryNode( termRangeNode.Field, upperNumber, numberFormat); bool lowerInclusive = termRangeNode.IsLowerInclusive; bool upperInclusive = termRangeNode.IsUpperInclusive; return(new NumericRangeQueryNode(lowerNode, upperNode, lowerInclusive, upperInclusive, numericConfig)); } } } } return(node); }
public IQueryNode Clause(string field) { IQueryNode q; Token fieldToken = null, boost = null, @operator = null, term = null; FieldQueryNode qLower, qUpper; bool lowerInclusive, upperInclusive; bool group = false; if (Jj_2_2(3)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: case RegexpToken.OP_EQUAL: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[7] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); q = Term(field); break; case RegexpToken.OP_LESSTHAN: case RegexpToken.OP_LESSTHANEQ: case RegexpToken.OP_MORETHAN: case RegexpToken.OP_MORETHANEQ: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_LESSTHAN: @operator = Jj_consume_token(RegexpToken.OP_LESSTHAN); break; case RegexpToken.OP_LESSTHANEQ: @operator = Jj_consume_token(RegexpToken.OP_LESSTHANEQ); break; case RegexpToken.OP_MORETHAN: @operator = Jj_consume_token(RegexpToken.OP_MORETHAN); break; case RegexpToken.OP_MORETHANEQ: @operator = Jj_consume_token(RegexpToken.OP_MORETHANEQ); break; default: jj_la1[8] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: term = Jj_consume_token(RegexpToken.TERM); break; case RegexpToken.QUOTED: term = Jj_consume_token(RegexpToken.QUOTED); break; case RegexpToken.NUMBER: term = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[9] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } if (term.kind == RegexpToken.QUOTED) { term.image = term.image.Substring(1, (term.image.Length - 1) - 1); } switch (@operator.kind) { case RegexpToken.OP_LESSTHAN: lowerInclusive = true; upperInclusive = false; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_LESSTHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHAN: lowerInclusive = false; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; default: { if (true) throw new Exception("Unhandled case: operator=" + @operator.ToString()); } } q = new TermRangeQueryNode(qLower, qUpper, lowerInclusive, upperInclusive); break; default: jj_la1[10] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } else { switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.LPAREN: case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: if (Jj_2_1(2)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[11] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); } else { ; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: q = Term(field); break; case RegexpToken.LPAREN: Jj_consume_token(RegexpToken.LPAREN); q = Query(field); Jj_consume_token(RegexpToken.RPAREN); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[12] = jj_gen; break; } group = true; break; default: jj_la1[13] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } break; default: jj_la1[14] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } if (boost != null) { float f = (float)1.0; try { f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture); // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQueryNode(q, f); } } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } } if (group) { q = new GroupQueryNode(q); } { if (true) return q; } throw new Exception("Missing return statement in function"); }
public PrefixWildcardQueryNode(FieldQueryNode fqn) : this(fqn.Field, fqn.Text, fqn.Begin, fqn.End) { }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is FieldQueryNode && !(node.Parent is IRangeQueryNode)) { QueryConfigHandler config = GetQueryConfigHandler(); if (config != null) { FieldQueryNode fieldNode = (FieldQueryNode)node; FieldConfig fieldConfig = config.GetFieldConfig(fieldNode .GetFieldAsString()); if (fieldConfig != null) { NumericConfig numericConfig = fieldConfig .Get(ConfigurationKeys.NUMERIC_CONFIG); if (numericConfig != null) { NumberFormat numberFormat = numericConfig.NumberFormat; string text = fieldNode.GetTextAsString(); /*Number*/ object number = null; if (text.Length > 0) { try { number = numberFormat.Parse(text); } catch (FormatException e) { throw new QueryNodeParseException(new MessageImpl( QueryParserMessages.COULD_NOT_PARSE_NUMBER, fieldNode .GetTextAsString(), numberFormat.GetType() .AssemblyQualifiedName), e); } switch (numericConfig.Type) { case FieldType.NumericType.LONG: number = Convert.ToInt64(number); break; case FieldType.NumericType.INT: number = Convert.ToInt32(number); break; case FieldType.NumericType.DOUBLE: number = Convert.ToDouble(number); break; case FieldType.NumericType.FLOAT: number = Convert.ToSingle(number); break; } } else { throw new QueryNodeParseException(new MessageImpl( QueryParserMessages.NUMERIC_CANNOT_BE_EMPTY, fieldNode.GetFieldAsString())); } NumericQueryNode lowerNode = new NumericQueryNode(fieldNode .Field, number, numberFormat); NumericQueryNode upperNode = new NumericQueryNode(fieldNode .Field, number, numberFormat); return(new NumericRangeQueryNode(lowerNode, upperNode, true, true, numericConfig)); } } } } return(node); }
public WildcardQueryNode(FieldQueryNode fqn) : this(fqn.Field, fqn.Text, fqn.Begin, fqn.End) { }
public IQueryNode Term(string field) { Token term, boost = null, fuzzySlop = null, goop1, goop2; bool fuzzy = false; bool regexp = false; bool startInc = false; bool endInc = false; IQueryNode q = null; FieldQueryNode qLower, qUpper; #pragma warning disable 612, 618 float defaultMinSimilarity = Search.FuzzyQuery.DefaultMinSimilarity; #pragma warning restore 612, 618 switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.NUMBER: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: term = Jj_consume_token(RegexpToken.TERM); q = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.REGEXPTERM: term = Jj_consume_token(RegexpToken.REGEXPTERM); regexp = true; break; case RegexpToken.NUMBER: term = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[15] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); fuzzy = true; break; default: jj_la1[16] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); fuzzy = true; break; default: jj_la1[17] = jj_gen; break; } break; default: jj_la1[18] = jj_gen; break; } if (fuzzy) { float fms = defaultMinSimilarity; try { fms = Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture); } #pragma warning disable 168 catch (Exception ignored) { } #pragma warning restore 168 if (fms < 0.0f) { { if (true) throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS)); } } else if (fms >= 1.0f && fms != (int)fms) { { if (true) throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_EDITS)); } } q = new FuzzyQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), fms, term.beginColumn, term.endColumn); } else if (regexp) { string re = term.image.Substring(1, (term.image.Length - 1) - 1); q = new RegexpQueryNode(field, re, 0, re.Length); } break; case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGEIN_START: Jj_consume_token(RegexpToken.RANGEIN_START); startInc = true; break; case RegexpToken.RANGEEX_START: Jj_consume_token(RegexpToken.RANGEEX_START); break; default: jj_la1[19] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_GOOP: goop1 = Jj_consume_token(RegexpToken.RANGE_GOOP); break; case RegexpToken.RANGE_QUOTED: goop1 = Jj_consume_token(RegexpToken.RANGE_QUOTED); break; default: jj_la1[20] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_TO: Jj_consume_token(RegexpToken.RANGE_TO); break; default: jj_la1[21] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_GOOP: goop2 = Jj_consume_token(RegexpToken.RANGE_GOOP); break; case RegexpToken.RANGE_QUOTED: goop2 = Jj_consume_token(RegexpToken.RANGE_QUOTED); break; default: jj_la1[22] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGEIN_END: Jj_consume_token(RegexpToken.RANGEIN_END); endInc = true; break; case RegexpToken.RANGEEX_END: Jj_consume_token(RegexpToken.RANGEEX_END); break; default: jj_la1[23] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[24] = jj_gen; break; } if (goop1.kind == RegexpToken.RANGE_QUOTED) { goop1.image = goop1.image.Substring(1, (goop1.image.Length - 1) - 1); } if (goop2.kind == RegexpToken.RANGE_QUOTED) { goop2.image = goop2.image.Substring(1, (goop2.image.Length - 1) - 1); } qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(goop1.image), goop1.beginColumn, goop1.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(goop2.image), goop2.beginColumn, goop2.endColumn); q = new TermRangeQueryNode(qLower, qUpper, startInc ? true : false, endInc ? true : false); break; case RegexpToken.QUOTED: term = Jj_consume_token(RegexpToken.QUOTED); q = new QuotedFieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image.Substring(1, (term.image.Length - 1) - 1)), term.beginColumn + 1, term.endColumn - 1); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); break; default: jj_la1[25] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[26] = jj_gen; break; } int phraseSlop = 0; if (fuzzySlop != null) { try { phraseSlop = (int)Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture); q = new SlopQueryNode(q, phraseSlop); } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no PhraseSlop", if * slop number is invalid) */ } } break; default: jj_la1[27] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } if (boost != null) { float f = (float)1.0; try { f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture); // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQueryNode(q, f); } } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } } { if (true) return q; } throw new Exception("Missing return statement in function"); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is TermRangeQueryNode) { TermRangeQueryNode termRangeNode = (TermRangeQueryNode)node; FieldQueryNode upper = (FieldQueryNode)termRangeNode.UpperBound; FieldQueryNode lower = (FieldQueryNode)termRangeNode.LowerBound; // LUCENENET TODO: Add a NOT_SET value so we have a logical default? DateTools.Resolution dateRes = DateTools.Resolution.MINUTE /* = null*/; bool inclusive = false; CultureInfo locale = GetQueryConfigHandler().Get(ConfigurationKeys.LOCALE); if (locale == null) { locale = CultureInfo.CurrentCulture; //Locale.getDefault(); } TimeZoneInfo timeZone = GetQueryConfigHandler().Get(ConfigurationKeys.TIMEZONE); if (timeZone == null) { timeZone = TimeZoneInfo.Local; //TimeZone.getDefault(); } string field = termRangeNode.Field; string fieldStr = null; if (field != null) { fieldStr = field.ToString(); } FieldConfig fieldConfig = GetQueryConfigHandler() .GetFieldConfig(fieldStr); if (fieldConfig != null) { dateRes = fieldConfig.Get(ConfigurationKeys.DATE_RESOLUTION); } if (termRangeNode.IsUpperInclusive) { inclusive = true; } string part1 = lower.GetTextAsString(); string part2 = upper.GetTextAsString(); try { //DateFormat df = DateFormat.GetDateInstance(DateFormat.SHORT, locale); //df.setLenient(true); if (part1.Length > 0) { //DateTime d1 = df.parse(part1); DateTime d1 = DateTime.Parse(part1, locale); part1 = DateTools.DateToString(d1, dateRes); lower.Text = new StringCharSequenceWrapper(part1); } if (part2.Length > 0) { //DateTime d2 = df.parse(part2); DateTime d2 = DateTime.Parse(part2, locale); if (inclusive) { // The user can only specify the date, not the time, so make sure // the time is set to the latest possible time of that date to // really // include all documents: //Calendar cal = Calendar.getInstance(timeZone, locale); //cal.setTime(d2); //cal.set(Calendar.HOUR_OF_DAY, 23); //cal.set(Calendar.MINUTE, 59); //cal.set(Calendar.SECOND, 59); //cal.set(Calendar.MILLISECOND, 999); //d2 = cal.getTime(); var cal = locale.Calendar; d2 = cal.AddHours(d2, 23); d2 = cal.AddMinutes(d2, 59); d2 = cal.AddSeconds(d2, 59); d2 = cal.AddMilliseconds(d2, 999); } part2 = DateTools.DateToString(d2, dateRes); upper.Text = new StringCharSequenceWrapper(part2); } } #pragma warning disable 168 catch (Exception e) #pragma warning restore 168 { // do nothing } } return(node); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is ITextableQueryNode && !(node is WildcardQueryNode) && !(node is FuzzyQueryNode) && !(node is RegexpQueryNode) && !(node.Parent is IRangeQueryNode)) { FieldQueryNode fieldNode = ((FieldQueryNode)node); string text = fieldNode.GetTextAsString(); string field = fieldNode.GetFieldAsString(); CachingTokenFilter buffer = null; IPositionIncrementAttribute posIncrAtt = null; int numTokens = 0; int positionCount = 0; bool severalTokensAtSamePosition = false; TokenStream source = null; try { source = this.analyzer.GetTokenStream(field, text); source.Reset(); buffer = new CachingTokenFilter(source); if (buffer.HasAttribute <IPositionIncrementAttribute>()) { posIncrAtt = buffer.GetAttribute <IPositionIncrementAttribute>(); } try { while (buffer.IncrementToken()) { numTokens++; int positionIncrement = (posIncrAtt != null) ? posIncrAtt .PositionIncrement : 1; if (positionIncrement != 0) { positionCount += positionIncrement; } else { severalTokensAtSamePosition = true; } } } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // ignore } } catch (IOException e) { throw new Exception(e.ToString(), e); } finally { IOUtils.DisposeWhileHandlingException(source); } // rewind the buffer stream buffer.Reset(); if (!buffer.HasAttribute <ICharTermAttribute>()) { return(new NoTokenFoundQueryNode()); } ICharTermAttribute termAtt = buffer.GetAttribute <ICharTermAttribute>(); if (numTokens == 0) { return(new NoTokenFoundQueryNode()); } else if (numTokens == 1) { string term = null; try { bool hasNext; hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } fieldNode.Text = term.AsCharSequence(); return(fieldNode); } else if (severalTokensAtSamePosition || !(node is QuotedFieldQueryNode)) { if (positionCount == 1 || !(node is QuotedFieldQueryNode)) { // no phrase query: if (positionCount == 1) { // simple case: only one position, with synonyms List <IQueryNode> children = new List <IQueryNode>(); for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } children.Add(new FieldQueryNode(field, term, -1, -1)); } return(new GroupQueryNode( new StandardBooleanQueryNode(children, positionCount == 1))); } else { // multiple positions IQueryNode q = new StandardBooleanQueryNode(Collections.EmptyList <IQueryNode>(), false); IQueryNode currentQuery = null; for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } if (posIncrAtt != null && posIncrAtt.PositionIncrement == 0) { if (!(currentQuery is BooleanQueryNode)) { IQueryNode t = currentQuery; currentQuery = new StandardBooleanQueryNode(Collections.EmptyList <IQueryNode>(), true); ((BooleanQueryNode)currentQuery).Add(t); } ((BooleanQueryNode)currentQuery).Add(new FieldQueryNode(field, term, -1, -1)); } else { if (currentQuery != null) { if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } } currentQuery = new FieldQueryNode(field, term, -1, -1); } } if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } if (q is BooleanQueryNode) { q = new GroupQueryNode(q); } return(q); } } else { // phrase query: MultiPhraseQueryNode mpq = new MultiPhraseQueryNode(); List <FieldQueryNode> multiTerms = new List <FieldQueryNode>(); int position = -1; int i = 0; int termGroupCount = 0; for (; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } if (positionIncrement > 0 && multiTerms.Count > 0) { foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } // Only increment once for each "group" of // terms that were in the same position: termGroupCount++; multiTerms.Clear(); } position += positionIncrement; multiTerms.Add(new FieldQueryNode(field, term, -1, -1)); } foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } return(mpq); } } else { TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode(); int position = -1; for (int i = 0; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1); if (this.positionIncrementsEnabled) { position += positionIncrement; newFieldNode.PositionIncrement = position; } else { newFieldNode.PositionIncrement = i; } pq.Add(newFieldNode); } return(pq); } } return(node); }
public IQueryNode Clause(string field) { IQueryNode q; Token fieldToken = null, boost = null, @operator = null, term = null; FieldQueryNode qLower, qUpper; bool lowerInclusive, upperInclusive; bool group = false; if (Jj_2_2(3)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: case RegexpToken.OP_EQUAL: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[7] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); q = Term(field); break; case RegexpToken.OP_LESSTHAN: case RegexpToken.OP_LESSTHANEQ: case RegexpToken.OP_MORETHAN: case RegexpToken.OP_MORETHANEQ: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_LESSTHAN: @operator = Jj_consume_token(RegexpToken.OP_LESSTHAN); break; case RegexpToken.OP_LESSTHANEQ: @operator = Jj_consume_token(RegexpToken.OP_LESSTHANEQ); break; case RegexpToken.OP_MORETHAN: @operator = Jj_consume_token(RegexpToken.OP_MORETHAN); break; case RegexpToken.OP_MORETHANEQ: @operator = Jj_consume_token(RegexpToken.OP_MORETHANEQ); break; default: jj_la1[8] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: term = Jj_consume_token(RegexpToken.TERM); break; case RegexpToken.QUOTED: term = Jj_consume_token(RegexpToken.QUOTED); break; case RegexpToken.NUMBER: term = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[9] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } if (term.kind == RegexpToken.QUOTED) { term.image = term.image.Substring(1, (term.image.Length - 1) - 1); } switch (@operator.kind) { case RegexpToken.OP_LESSTHAN: lowerInclusive = true; upperInclusive = false; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_LESSTHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHAN: lowerInclusive = false; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; default: { if (true) { throw new Exception("Unhandled case: operator=" + @operator.ToString()); } } } q = new TermRangeQueryNode(qLower, qUpper, lowerInclusive, upperInclusive); break; default: jj_la1[10] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } else { switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.LPAREN: case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: if (Jj_2_1(2)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[11] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); } else { ; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: q = Term(field); break; case RegexpToken.LPAREN: Jj_consume_token(RegexpToken.LPAREN); q = Query(field); Jj_consume_token(RegexpToken.RPAREN); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[12] = jj_gen; break; } group = true; break; default: jj_la1[13] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } break; default: jj_la1[14] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } if (boost != null) { float f = (float)1.0; try { f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture); // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQueryNode(q, f); } } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } } if (group) { q = new GroupQueryNode(q); } { if (true) { return(q); } } throw new Exception("Missing return statement in function"); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is TermRangeQueryNode) { TermRangeQueryNode termRangeNode = (TermRangeQueryNode)node; FieldQueryNode upper = (FieldQueryNode)termRangeNode.UpperBound; FieldQueryNode lower = (FieldQueryNode)termRangeNode.LowerBound; // LUCENENET specific - set to 0 (instead of null), since it doesn't correspond to any valid setting DateTools.Resolution dateRes = 0 /* = null*/; bool inclusive = false; CultureInfo locale = GetQueryConfigHandler().Get(ConfigurationKeys.LOCALE); if (locale == null) { locale = CultureInfo.CurrentCulture; //Locale.getDefault(); } TimeZoneInfo timeZone = GetQueryConfigHandler().Get(ConfigurationKeys.TIMEZONE); if (timeZone == null) { timeZone = TimeZoneInfo.Local; //TimeZone.getDefault(); } string field = termRangeNode.Field; string fieldStr = null; if (field != null) { fieldStr = field.ToString(); } FieldConfig fieldConfig = GetQueryConfigHandler() .GetFieldConfig(fieldStr); if (fieldConfig != null) { dateRes = fieldConfig.Get(ConfigurationKeys.DATE_RESOLUTION); } if (termRangeNode.IsUpperInclusive) { inclusive = true; } string part1 = lower.GetTextAsString(); string part2 = upper.GetTextAsString(); try { string shortDateFormat = locale.DateTimeFormat.ShortDatePattern; DateTime d1; DateTime d2 = DateTime.MaxValue; // We really don't care what we set this to, but we need something or the compiler will complain below if (DateTime.TryParseExact(part1, shortDateFormat, locale, DateTimeStyles.None, out d1)) { part1 = DateTools.DateToString(d1, dateRes); lower.Text = new StringCharSequenceWrapper(part1); } if (DateTime.TryParseExact(part2, shortDateFormat, locale, DateTimeStyles.None, out d2)) { if (inclusive) { // The user can only specify the date, not the time, so make sure // the time is set to the latest possible time of that date to // really // include all documents: //Calendar cal = Calendar.getInstance(timeZone, locale); //cal.setTime(d2); //cal.set(Calendar.HOUR_OF_DAY, 23); //cal.set(Calendar.MINUTE, 59); //cal.set(Calendar.SECOND, 59); //cal.set(Calendar.MILLISECOND, 999); //d2 = cal.getTime(); d2 = TimeZoneInfo.ConvertTime(d2, timeZone); var cal = locale.Calendar; d2 = cal.AddHours(d2, 23); d2 = cal.AddMinutes(d2, 59); d2 = cal.AddSeconds(d2, 59); d2 = cal.AddMilliseconds(d2, 999); } part2 = DateTools.DateToString(d2, dateRes); upper.Text = new StringCharSequenceWrapper(part2); } } #pragma warning disable 168 catch (Exception e) #pragma warning restore 168 { // do nothing } } return(node); }
public IQueryNode Term(string field) { Token term, boost = null, fuzzySlop = null, goop1, goop2; bool fuzzy = false; bool regexp = false; bool startInc = false; bool endInc = false; IQueryNode q = null; FieldQueryNode qLower, qUpper; #pragma warning disable 612, 618 float defaultMinSimilarity = Search.FuzzyQuery.DefaultMinSimilarity; #pragma warning restore 612, 618 switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.NUMBER: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: term = Jj_consume_token(RegexpToken.TERM); q = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.REGEXPTERM: term = Jj_consume_token(RegexpToken.REGEXPTERM); regexp = true; break; case RegexpToken.NUMBER: term = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[15] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); fuzzy = true; break; default: jj_la1[16] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); fuzzy = true; break; default: jj_la1[17] = jj_gen; break; } break; default: jj_la1[18] = jj_gen; break; } if (fuzzy) { float fms = defaultMinSimilarity; try { fms = Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture); } #pragma warning disable 168 catch (Exception ignored) { } #pragma warning restore 168 if (fms < 0.0f) { { if (true) { throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS)); } } } else if (fms >= 1.0f && fms != (int)fms) { { if (true) { throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_EDITS)); } } } q = new FuzzyQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), fms, term.beginColumn, term.endColumn); } else if (regexp) { string re = term.image.Substring(1, (term.image.Length - 1) - 1); q = new RegexpQueryNode(field, re, 0, re.Length); } break; case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGEIN_START: Jj_consume_token(RegexpToken.RANGEIN_START); startInc = true; break; case RegexpToken.RANGEEX_START: Jj_consume_token(RegexpToken.RANGEEX_START); break; default: jj_la1[19] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_GOOP: goop1 = Jj_consume_token(RegexpToken.RANGE_GOOP); break; case RegexpToken.RANGE_QUOTED: goop1 = Jj_consume_token(RegexpToken.RANGE_QUOTED); break; default: jj_la1[20] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_TO: Jj_consume_token(RegexpToken.RANGE_TO); break; default: jj_la1[21] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGE_GOOP: goop2 = Jj_consume_token(RegexpToken.RANGE_GOOP); break; case RegexpToken.RANGE_QUOTED: goop2 = Jj_consume_token(RegexpToken.RANGE_QUOTED); break; default: jj_la1[22] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.RANGEIN_END: Jj_consume_token(RegexpToken.RANGEIN_END); endInc = true; break; case RegexpToken.RANGEEX_END: Jj_consume_token(RegexpToken.RANGEEX_END); break; default: jj_la1[23] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[24] = jj_gen; break; } if (goop1.kind == RegexpToken.RANGE_QUOTED) { goop1.image = goop1.image.Substring(1, (goop1.image.Length - 1) - 1); } if (goop2.kind == RegexpToken.RANGE_QUOTED) { goop2.image = goop2.image.Substring(1, (goop2.image.Length - 1) - 1); } qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(goop1.image), goop1.beginColumn, goop1.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(goop2.image), goop2.beginColumn, goop2.endColumn); q = new TermRangeQueryNode(qLower, qUpper, startInc ? true : false, endInc ? true : false); break; case RegexpToken.QUOTED: term = Jj_consume_token(RegexpToken.QUOTED); q = new QuotedFieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image.Substring(1, (term.image.Length - 1) - 1)), term.beginColumn + 1, term.endColumn - 1); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.FUZZY_SLOP: fuzzySlop = Jj_consume_token(RegexpToken.FUZZY_SLOP); break; default: jj_la1[25] = jj_gen; break; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[26] = jj_gen; break; } int phraseSlop = 0; if (fuzzySlop != null) { try { phraseSlop = (int)Convert.ToSingle(fuzzySlop.image.Substring(1), CultureInfo.InvariantCulture); q = new SlopQueryNode(q, phraseSlop); } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no PhraseSlop", if * slop number is invalid) */ } } break; default: jj_la1[27] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } if (boost != null) { float f = (float)1.0; try { f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture); // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQueryNode(q, f); } } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } } { if (true) { return(q); } } throw new Exception("Missing return statement in function"); }