public virtual Query Build(IQueryNode queryNode) { GroupQueryNode groupNode = (GroupQueryNode)queryNode; return((Query)(groupNode).GetChild().GetTag( QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID)); }
protected override IQueryNode PostProcessNode(IQueryNode node) { if (node is ITextableQueryNode && !(node is WildcardQueryNode) && !(node is FuzzyQueryNode) && !(node is RegexpQueryNode) && !(node.Parent is IRangeQueryNode)) { FieldQueryNode fieldNode = ((FieldQueryNode)node); string text = fieldNode.GetTextAsString(); string field = fieldNode.GetFieldAsString(); CachingTokenFilter buffer = null; IPositionIncrementAttribute posIncrAtt = null; int numTokens = 0; int positionCount = 0; bool severalTokensAtSamePosition = false; TokenStream source = null; try { source = this.analyzer.GetTokenStream(field, text); source.Reset(); buffer = new CachingTokenFilter(source); if (buffer.HasAttribute <IPositionIncrementAttribute>()) { posIncrAtt = buffer.GetAttribute <IPositionIncrementAttribute>(); } try { while (buffer.IncrementToken()) { numTokens++; int positionIncrement = (posIncrAtt != null) ? posIncrAtt .PositionIncrement : 1; if (positionIncrement != 0) { positionCount += positionIncrement; } else { severalTokensAtSamePosition = true; } } } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // ignore } } catch (IOException e) { throw new Exception(e.ToString(), e); } finally { IOUtils.DisposeWhileHandlingException(source); } // rewind the buffer stream buffer.Reset(); if (!buffer.HasAttribute <ICharTermAttribute>()) { return(new NoTokenFoundQueryNode()); } ICharTermAttribute termAtt = buffer.GetAttribute <ICharTermAttribute>(); if (numTokens == 0) { return(new NoTokenFoundQueryNode()); } else if (numTokens == 1) { string term = null; try { bool hasNext; hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } fieldNode.Text = term.AsCharSequence(); return(fieldNode); } else if (severalTokensAtSamePosition || !(node is QuotedFieldQueryNode)) { if (positionCount == 1 || !(node is QuotedFieldQueryNode)) { // no phrase query: if (positionCount == 1) { // simple case: only one position, with synonyms List <IQueryNode> children = new List <IQueryNode>(); for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } children.Add(new FieldQueryNode(field, term, -1, -1)); } return(new GroupQueryNode( new StandardBooleanQueryNode(children, positionCount == 1))); } else { // multiple positions IQueryNode q = new StandardBooleanQueryNode(Collections.EmptyList <IQueryNode>(), false); IQueryNode currentQuery = null; for (int i = 0; i < numTokens; i++) { string term = null; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } if (posIncrAtt != null && posIncrAtt.PositionIncrement == 0) { if (!(currentQuery is BooleanQueryNode)) { IQueryNode t = currentQuery; currentQuery = new StandardBooleanQueryNode(Collections.EmptyList <IQueryNode>(), true); ((BooleanQueryNode)currentQuery).Add(t); } ((BooleanQueryNode)currentQuery).Add(new FieldQueryNode(field, term, -1, -1)); } else { if (currentQuery != null) { if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } } currentQuery = new FieldQueryNode(field, term, -1, -1); } } if (this.defaultOperator == Operator.OR) { q.Add(currentQuery); } else { q.Add(new ModifierQueryNode(currentQuery, Modifier.MOD_REQ)); } if (q is BooleanQueryNode) { q = new GroupQueryNode(q); } return(q); } } else { // phrase query: MultiPhraseQueryNode mpq = new MultiPhraseQueryNode(); List <FieldQueryNode> multiTerms = new List <FieldQueryNode>(); int position = -1; int i = 0; int termGroupCount = 0; for (; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } if (positionIncrement > 0 && multiTerms.Count > 0) { foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } // Only increment once for each "group" of // terms that were in the same position: termGroupCount++; multiTerms.Clear(); } position += positionIncrement; multiTerms.Add(new FieldQueryNode(field, term, -1, -1)); } foreach (FieldQueryNode termNode in multiTerms) { if (this.positionIncrementsEnabled) { termNode.PositionIncrement = position; } else { termNode.PositionIncrement = termGroupCount; } mpq.Add(termNode); } return(mpq); } } else { TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode(); int position = -1; for (int i = 0; i < numTokens; i++) { string term = null; int positionIncrement = 1; try { bool hasNext = buffer.IncrementToken(); if (Debugging.AssertsEnabled) { Debugging.Assert(hasNext == true); } term = termAtt.ToString(); if (posIncrAtt != null) { positionIncrement = posIncrAtt.PositionIncrement; } } catch (IOException) // LUCENENET: IDE0059: Remove unnecessary value assignment { // safe to ignore, because we know the number of tokens } FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1); if (this.positionIncrementsEnabled) { position += positionIncrement; newFieldNode.PositionIncrement = position; } else { newFieldNode.PositionIncrement = i; } pq.Add(newFieldNode); } return(pq); } } return(node); }
public IQueryNode Clause(string field) { IQueryNode q; Token fieldToken = null, boost = null, @operator = null, term = null; FieldQueryNode qLower, qUpper; bool lowerInclusive, upperInclusive; bool group = false; if (Jj_2_2(3)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: case RegexpToken.OP_EQUAL: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[7] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); q = Term(field); break; case RegexpToken.OP_LESSTHAN: case RegexpToken.OP_LESSTHANEQ: case RegexpToken.OP_MORETHAN: case RegexpToken.OP_MORETHANEQ: switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_LESSTHAN: @operator = Jj_consume_token(RegexpToken.OP_LESSTHAN); break; case RegexpToken.OP_LESSTHANEQ: @operator = Jj_consume_token(RegexpToken.OP_LESSTHANEQ); break; case RegexpToken.OP_MORETHAN: @operator = Jj_consume_token(RegexpToken.OP_MORETHAN); break; case RegexpToken.OP_MORETHANEQ: @operator = Jj_consume_token(RegexpToken.OP_MORETHANEQ); break; default: jj_la1[8] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.TERM: term = Jj_consume_token(RegexpToken.TERM); break; case RegexpToken.QUOTED: term = Jj_consume_token(RegexpToken.QUOTED); break; case RegexpToken.NUMBER: term = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[9] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } if (term.kind == RegexpToken.QUOTED) { term.image = term.image.Substring(1, (term.image.Length - 1) - 1); } switch (@operator.kind) { case RegexpToken.OP_LESSTHAN: lowerInclusive = true; upperInclusive = false; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_LESSTHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHAN: lowerInclusive = false; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; case RegexpToken.OP_MORETHANEQ: lowerInclusive = true; upperInclusive = true; qLower = new FieldQueryNode(field, EscapeQuerySyntaxImpl.DiscardEscapeChar(term.image), term.beginColumn, term.endColumn); qUpper = new FieldQueryNode(field, "*", term.beginColumn, term.endColumn); break; default: { if (true) { throw new Exception("Unhandled case: operator=" + @operator.ToString()); } } } q = new TermRangeQueryNode(qLower, qUpper, lowerInclusive, upperInclusive); break; default: jj_la1[10] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } else { switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.LPAREN: case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: if (Jj_2_1(2)) { fieldToken = Jj_consume_token(RegexpToken.TERM); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.OP_COLON: Jj_consume_token(RegexpToken.OP_COLON); break; case RegexpToken.OP_EQUAL: Jj_consume_token(RegexpToken.OP_EQUAL); break; default: jj_la1[11] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } field = EscapeQuerySyntaxImpl.DiscardEscapeChar(fieldToken.image).ToString(); } else { ; } switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.QUOTED: case RegexpToken.TERM: case RegexpToken.REGEXPTERM: case RegexpToken.RANGEIN_START: case RegexpToken.RANGEEX_START: case RegexpToken.NUMBER: q = Term(field); break; case RegexpToken.LPAREN: Jj_consume_token(RegexpToken.LPAREN); q = Query(field); Jj_consume_token(RegexpToken.RPAREN); switch ((jj_ntk == -1) ? Jj_ntk() : jj_ntk) { case RegexpToken.CARAT: Jj_consume_token(RegexpToken.CARAT); boost = Jj_consume_token(RegexpToken.NUMBER); break; default: jj_la1[12] = jj_gen; break; } group = true; break; default: jj_la1[13] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } break; default: jj_la1[14] = jj_gen; Jj_consume_token(-1); throw new ParseException(); } } if (boost != null) { float f = (float)1.0; try { f = Convert.ToSingle(boost.image, CultureInfo.InvariantCulture); // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQueryNode(q, f); } } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } } if (group) { q = new GroupQueryNode(q); } { if (true) { return(q); } } throw new Exception("Missing return statement in function"); }