/// <summary> /// Detects untokenized fields and sets as NotAnalyzed in analyzer /// </summary> private static string PreProcessUntokenizedTerms(string query, RangeQueryParser queryParser) { var untokenizedMatches = untokenizedQuery.Matches(query); if (untokenizedMatches.Count < 1) { return(query); } var sb = new StringBuilder(query); MatchCollection fieldMatches = null; // process in reverse order to leverage match string indexes for (var i = untokenizedMatches.Count; i > 0; i--) { var match = untokenizedMatches[i - 1]; // specify that term for this field should not be tokenized var value = match.Groups[2].Value; var term = match.Groups[2]; string name = match.Groups[1].Value; if (string.IsNullOrEmpty(value)) { value = match.Groups[3].Value; term = match.Groups[3]; if (fieldMatches == null) { fieldMatches = fieldQuery.Matches(query); } var lastField = fieldMatches.Cast <Match>().LastOrDefault(x => x.Index <= term.Index); if (lastField != null) { name = lastField.Groups[1].Value; } } var rawTerm = value.Substring(2, value.Length - 4); queryParser.SetUntokenized(name, Unescape(rawTerm)); // introduce " " around the term var startIndex = term.Index; var length = term.Length - 2; if (sb[startIndex + length - 1] != '"') { sb.Insert(startIndex + length, '"'); length += 1; } if (sb[startIndex + 2] != '"') { sb.Insert(startIndex + 2, '"'); length += 1; } // remove enclosing "[[" "]]" from term value (again in reverse order) sb.Remove(startIndex + length, 2); sb.Remove(startIndex, 2); } return(sb.ToString()); }
/// <summary> /// Detects untokenized fields and sets as NotAnalyzed in analyzer /// </summary> private static string PreProcessUntokenizedTerms(string query, RangeQueryParser queryParser) { var untokenizedMatches = untokenizedQuery.Matches(query); if (untokenizedMatches.Count < 1) { return(query); } var sb = new StringBuilder(query); // process in reverse order to leverage match string indexes for (var i = untokenizedMatches.Count; i > 0; i--) { var match = untokenizedMatches[i - 1]; // specify that term for this field should not be tokenized var value = match.Groups[2].Value; var rawTerm = value.Substring(2, value.Length - 4); queryParser.SetUntokenized(match.Groups[1].Value, Unescape(rawTerm)); var term = match.Groups[2]; // introduce " " around the term var startIndex = term.Index; var length = term.Length - 2; if (sb[startIndex + length - 1] != '"') { sb.Insert(startIndex + length, '"'); length += 1; } if (sb[startIndex + 2] != '"') { sb.Insert(startIndex + 2, '"'); length += 1; } // remove enclosing "[[" "]]" from term value (again in reverse order) sb.Remove(startIndex + length, 2); sb.Remove(startIndex, 2); } return(sb.ToString()); }
/// <summary> /// Detects untokenized fields and sets as NotAnalyzed in analyzer /// </summary> private static string PreProcessUntokenizedTerms(string query, RangeQueryParser queryParser) { var untokenizedMatches = untokenizedQuery.Matches(query); if (untokenizedMatches.Count < 1) return query; var sb = new StringBuilder(query); // process in reverse order to leverage match string indexes for (var i = untokenizedMatches.Count; i > 0; i--) { var match = untokenizedMatches[i - 1]; // specify that term for this field should not be tokenized var value = match.Groups[2].Value; var rawTerm = value.Substring(2, value.Length-4); queryParser.SetUntokenized(match.Groups[1].Value, Unescape(rawTerm)); var term = match.Groups[2]; // introduce " " around the term var startIndex = term.Index; var length = term.Length - 2; if (sb[startIndex + length - 1] != '"') { sb.Insert(startIndex + length, '"'); length += 1; } if (sb[startIndex + 2] != '"') { sb.Insert(startIndex + 2, '"'); length += 1; } // remove enclosing "[[" "]]" from term value (again in reverse order) sb.Remove(startIndex + length, 2); sb.Remove(startIndex, 2); } return sb.ToString(); }
/// <summary> /// Detects untokenized fields and sets as NotAnalyzed in analyzer /// </summary> private static string PreProcessUntokenizedTerms(string query, RangeQueryParser queryParser) { var untokenizedMatches = untokenizedQuery.Matches(query); if (untokenizedMatches.Count < 1) return query; var sb = new StringBuilder(query); MatchCollection fieldMatches = null; // process in reverse order to leverage match string indexes for (var i = untokenizedMatches.Count; i > 0; i--) { var match = untokenizedMatches[i - 1]; // specify that term for this field should not be tokenized var value = match.Groups[2].Value; var term = match.Groups[2]; string name = match.Groups[1].Value; if (string.IsNullOrEmpty(value)) { value = match.Groups[3].Value; term = match.Groups[3]; if(fieldMatches == null) fieldMatches = fieldQuery.Matches(query); var lastField = fieldMatches.Cast<Match>().LastOrDefault(x => x.Index <= term.Index); if (lastField != null) { name = lastField.Groups[1].Value; } } var rawTerm = value.Substring(2, value.Length - 4); queryParser.SetUntokenized(name, Unescape(rawTerm)); // introduce " " around the term var startIndex = term.Index; var length = term.Length - 2; if (sb[startIndex + length - 1] != '"') { sb.Insert(startIndex + length, '"'); length += 1; } if (sb[startIndex + 2] != '"') { sb.Insert(startIndex + 2, '"'); length += 1; } // remove enclosing "[[" "]]" from term value (again in reverse order) sb.Remove(startIndex + length, 2); sb.Remove(startIndex, 2); } return sb.ToString(); }