private Filter <T> MatchAndFilter(FilterLexer lexer) { var filters = new List <Filter <T> >(); Filter <T> firstFilter = MatchNegationFilter(lexer); filters.Add(firstFilter); FilterToken nextToken = LookAhead(lexer, 1); while (nextToken != null && nextToken.Type == FilterTokenType.And) { GetNextToken(lexer); filters.Add(MatchNegationFilter(lexer)); nextToken = LookAhead(lexer, 1); } if (filters.Count > 1) { return(new AndFilter <T>(filters)); } else { return(firstFilter); } }
private static Filter <string> MatchValue(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken != null) { if (nextToken.Type == FilterTokenType.RegexWord) { GetNextToken(lexer); RegexOptions options = RegexOptions.CultureInvariant; FilterToken caseInsensitiveToken = LookAhead(lexer, 1); if (caseInsensitiveToken != null && caseInsensitiveToken.Type == FilterTokenType.CaseInsensitiveModifier) { options |= RegexOptions.IgnoreCase; GetNextToken(lexer); } return(new RegexFilter(new Regex(nextToken.Text, options))); } else if (IsWord(nextToken)) { GetNextToken(lexer); return(new EqualityFilter <string>(nextToken.Text)); } } throw new FilterParseException(Resources.FilterParser_ValueExpected); }
private Filter <T> MatchFilter(FilterLexer lexer) { if (lexer.Tokens.Count == 0) { throw new FilterParseException(Resources.FilterParser_EmptyFilterError); } return(MatchOrFilter(lexer)); }
public void EmptyExpressionsAreNotValid(string filter) { FilterLexer lexer = new FilterLexer(filter); Assert.IsNotNull(lexer); Assert.Count(0, lexer.Tokens); Assert.IsNull(lexer.GetNextToken()); Assert.IsNull(lexer.LookAhead(1)); Assert.IsNull(lexer.GetNextToken()); }
private static FilterToken LookAhead(FilterLexer lexer, int index) { FilterToken token = lexer.LookAhead(index); if (token != null && token.Type == FilterTokenType.Error) { throw new FilterParseException(token.Text); } return(token); }
private static void MatchRightBracket(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken == null || nextToken.Type != FilterTokenType.RightBracket) { throw new FilterParseException(Resources.FilterParser_RightBracketExpected); } GetNextToken(lexer); }
private static void MatchComma(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken == null || nextToken.Type != FilterTokenType.Comma) { // Should never happen because we call this method when we know a comma // token is next throw new FilterParseException(Resources.FilterParser_CommaExpected); } GetNextToken(lexer); }
private Filter <T> MatchNegationFilter(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken != null && nextToken.Type == FilterTokenType.Not) { GetNextToken(lexer); return(new NotFilter <T>(MatchNegationFilter(lexer))); } return(MatchParenthesizedFilter(lexer)); }
private static string MatchKey(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken == null || IsNotWord(nextToken)) { // Should never happen because we call this method when we know a word // token is next throw new FilterParseException(Resources.FilterParser_StringLiteralExpected); } GetNextToken(lexer); return(nextToken.Text); }
public void GetNextTokenLookAheadWithOneToken() { FilterLexer lexer = new FilterLexer("a"); Assert.IsNotNull(lexer); Assert.Count(1, lexer.Tokens); Assert.IsNull(lexer.LookAhead(0)); Assert.IsNotNull(lexer.LookAhead(1)); Assert.IsNull(lexer.LookAhead(2)); Assert.IsNotNull(lexer.GetNextToken()); Assert.IsNotNull(lexer.LookAhead(0)); Assert.IsNull(lexer.LookAhead(1)); Assert.IsNull(lexer.GetNextToken()); Assert.IsNull(lexer.LookAhead(0)); Assert.IsNull(lexer.LookAhead(1)); }
private Filter <T> MatchParenthesizedFilter(FilterLexer lexer) { Filter <T> filter = null; FilterToken nextToken = LookAhead(lexer, 1); if (nextToken != null) { if (nextToken.Type == FilterTokenType.LeftBracket) { GetNextToken(lexer); filter = MatchOrFilter(lexer); MatchRightBracket(lexer); } else { filter = MatchSimpleFilter(lexer); } } return(filter); }
private void AppendPhrase(string phrase) { if (phrase.Length == 0) { AppendQuotedString(phrase); return; } else { for (int i = 0; i < phrase.Length; i++) { if (!FilterLexer.IsWordChar(phrase[i])) { AppendQuotedString(phrase); return; } } } result.Append(phrase); }
public void GetNextTokenAndLookAheadWithTwoTokens() { FilterLexer lexer = new FilterLexer("a:"); Assert.IsNotNull(lexer); Assert.Count(2, lexer.Tokens); Assert.IsNull(lexer.LookAhead(0)); Assert.IsNotNull(lexer.LookAhead(1)); Assert.IsNotNull(lexer.LookAhead(2)); Assert.IsNull(lexer.LookAhead(3)); Assert.IsNotNull(lexer.GetNextToken()); Assert.IsNotNull(lexer.LookAhead(0)); Assert.IsNotNull(lexer.LookAhead(1)); Assert.IsNull(lexer.LookAhead(2)); Assert.IsNotNull(lexer.GetNextToken()); Assert.IsNotNull(lexer.LookAhead(0)); Assert.IsNull(lexer.LookAhead(1)); Assert.IsNull(lexer.LookAhead(2)); Assert.IsNull(lexer.GetNextToken()); Assert.IsNull(lexer.LookAhead(0)); Assert.IsNull(lexer.LookAhead(1)); }
private Filter <T> MatchSimpleFilter(FilterLexer lexer) { FilterToken nextToken = LookAhead(lexer, 1); if (nextToken != null) { if (nextToken.Type == FilterTokenType.Star) { GetNextToken(lexer); return(new AnyFilter <T>()); } if (IsWord(nextToken)) { string key = MatchKey(lexer); MatchColon(lexer); Filter <string> valueFilter = MatchMatchSequence(lexer); return(factory.CreateFilter(key, valueFilter)); } } throw new FilterParseException(Resources.FilterParser_FilterExpressionExpected); }
private FilterSet <T> MatchFilterSet(FilterLexer lexer) { var filterRules = new List <FilterRule <T> >(); FilterToken nextToken = LookAhead(lexer, 1); while (nextToken != null) { FilterRuleType filterRuleType; if (nextToken.Type == FilterTokenType.Include) { filterRuleType = FilterRuleType.Inclusion; GetNextToken(lexer); } else if (nextToken.Type == FilterTokenType.Exclude) { filterRuleType = FilterRuleType.Exclusion; GetNextToken(lexer); } else if (filterRules.Count == 0) { // default to include for first filter filterRuleType = FilterRuleType.Inclusion; } else { throw new FilterParseException(Resources.FilterParser_RulesNotSeperated); } Filter <T> filter = MatchOrFilter(lexer); filterRules.Add(new FilterRule <T>(filterRuleType, filter)); nextToken = LookAhead(lexer, 1); } return(new FilterSet <T>(filterRules)); }
private static Filter <string> MatchMatchSequence(FilterLexer lexer) { var valueFilters = new List <Filter <string> >(); valueFilters.Add(MatchValue(lexer)); FilterToken nextToken = LookAhead(lexer, 1); while (nextToken != null && nextToken.Type == FilterTokenType.Comma) { MatchComma(lexer); valueFilters.Add(MatchValue(lexer)); nextToken = LookAhead(lexer, 1); } if (valueFilters.Count == 1) { return(valueFilters[0]); } else { return(new OrFilter <string>(valueFilters)); } }
public void DelimitedElementWithMissingEndDelimiter(string filter) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(1, lexer.Tokens); FilterToken token = lexer.Tokens[0]; Assert.AreEqual(token.Type, FilterTokenType.Error); Assert.AreEqual(token.Position, filter.Length - 1); Assert.IsTrue(token.Text.StartsWith("Missing end " + filter[0])); }
public void DelimitedElementWithEscapedDelimiter(string filter, string tokenType) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(1, lexer.Tokens); FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType)); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(filter)); }
public void DelimitersAreUnescaped(string filter, string expected, string tokenType) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(1, lexer.Tokens); FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType)); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, expected); }
public void FilterWithOneValue(string filter, string text) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(3, lexer.Tokens); { FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, "Type"); } { FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.Colon); Assert.AreEqual(secondToken.Position, 4); Assert.AreEqual(secondToken.Text, null); } { FilterToken thirdToken = lexer.Tokens[2]; Assert.AreEqual(thirdToken.Type, FilterTokenType.Word); Assert.AreEqual(thirdToken.Position, 5); Assert.AreEqual(thirdToken.Text, text); } }
public void UnrecognizedElement(string filter) { FilterLexer lexer = new FilterLexer(filter); Assert.AreEqual(lexer.Tokens.Count, 1); FilterToken filterToken = lexer.Tokens[0]; Assert.AreEqual(filterToken.Type, FilterTokenType.Word); Assert.AreEqual(filterToken.Position, 0); Assert.AreEqual(filterToken.Text, filter); }
public void CaseSensitiveRegex(string key, string text) { string filter = key; FilterLexer lexer = new FilterLexer(filter); Assert.Count(2, lexer.Tokens); FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, text); FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.Word); }
public void QuotedElements(string key, string colon, string value) { string filter = key + colon + value; FilterLexer lexer = new FilterLexer(filter); Assert.Count(3, lexer.Tokens); { FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key)); } { FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.Colon); Assert.AreEqual(secondToken.Position, key.Length); Assert.AreEqual(secondToken.Text, null); } { FilterToken thirdToken = lexer.Tokens[2]; Assert.AreEqual(thirdToken.Type, FilterTokenType.Word); Assert.AreEqual(thirdToken.Position, key.Length + 1); Assert.AreEqual(thirdToken.Text, GetUnquotedString(value)); } }
public void FilterWithTwovalues(string filter) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(5, lexer.Tokens); { FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, "Type"); } { FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.Colon); Assert.AreEqual(secondToken.Position, 4); Assert.AreEqual(secondToken.Text, null); } { FilterToken thirdToken = lexer.Tokens[2]; Assert.AreEqual(thirdToken.Type, FilterTokenType.Word); Assert.AreEqual(thirdToken.Position, 5); Assert.AreEqual(thirdToken.Text, "Fixture1"); } { FilterToken fourthToken = lexer.Tokens[3]; Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma); Assert.AreEqual(fourthToken.Text, null); } { FilterToken fifthToken = lexer.Tokens[4]; Assert.AreEqual(fifthToken.Type, FilterTokenType.Word); Assert.AreEqual(fifthToken.Text, "Fixture2"); } }
public void TwoElements(string filter, string type1, string type2) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(2, lexer.Tokens); FilterToken filterToken = lexer.Tokens[0]; Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type1)); Assert.AreEqual(filterToken.Position, 0); Assert.AreEqual(filterToken.Text, null); filterToken = lexer.Tokens[1]; Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type2)); }
public void CaseInsensitiveRegex(string key, string text) { string filter = key; FilterLexer lexer = new FilterLexer(filter); Assert.Count(2, lexer.Tokens); { FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, text); } { FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.CaseInsensitiveModifier); Assert.AreEqual(secondToken.Position, key.Length - 1); Assert.AreEqual(secondToken.Text, null); } }
public void WordsWithEscapedCharacters(string key, string text) { string filter = key; FilterLexer lexer = new FilterLexer(filter); Assert.AreEqual(lexer.Tokens.Count, 1); FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, text); }
private static void GetNextToken(FilterLexer lexer) { lexer.GetNextToken(); }
public void BackslashesShouldBeFollowedByAnEscapableCharacter(string filter, int tokenCount) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(tokenCount, lexer.Tokens); { FilterToken errorToken = lexer.Tokens[tokenCount - 1]; Assert.AreEqual(errorToken.Type, FilterTokenType.Error); } }
public void QuotedElementsAndMultipleValues(string key, string value1, string value2) { string filter = key + ":" + value1 + "," + value2; FilterLexer lexer = new FilterLexer(filter); Assert.Count(5, lexer.Tokens); { FilterToken firstFilterToken = lexer.Tokens[0]; Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word); Assert.AreEqual(firstFilterToken.Position, 0); Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key)); } { FilterToken secondToken = lexer.Tokens[1]; Assert.AreEqual(secondToken.Type, FilterTokenType.Colon); Assert.AreEqual(secondToken.Position, key.Length); Assert.AreEqual(secondToken.Text, null); } { FilterToken thirdToken = lexer.Tokens[2]; Assert.AreEqual(thirdToken.Type, FilterTokenType.Word); Assert.AreEqual(thirdToken.Position, key.Length + 1); Assert.AreEqual(thirdToken.Text, GetUnquotedString(value1)); } { FilterToken fourthToken = lexer.Tokens[3]; Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma); Assert.AreEqual(fourthToken.Position, key.Length + value1.Length + 1); Assert.AreEqual(fourthToken.Text, null); } { FilterToken fifthToken = lexer.Tokens[4]; Assert.AreEqual(fifthToken.Type, FilterTokenType.Word); Assert.AreEqual(fifthToken.Position, key.Length + value1.Length + 2); Assert.AreEqual(fifthToken.Text, GetUnquotedString(value2)); } }
public void SingleElement(string filter, string type) { FilterLexer lexer = new FilterLexer(filter); Assert.Count(1, lexer.Tokens); FilterToken filterToken = lexer.Tokens[0]; Assert.AreEqual(filterToken.Type, ParseTokenType(type)); Assert.AreEqual(filterToken.Position, 0); Assert.AreEqual(filterToken.Text, null); }