Example #1
0
        private Filter <T> MatchAndFilter(FilterLexer lexer)
        {
            var        filters     = new List <Filter <T> >();
            Filter <T> firstFilter = MatchNegationFilter(lexer);

            filters.Add(firstFilter);

            FilterToken nextToken = LookAhead(lexer, 1);

            while (nextToken != null && nextToken.Type == FilterTokenType.And)
            {
                GetNextToken(lexer);
                filters.Add(MatchNegationFilter(lexer));
                nextToken = LookAhead(lexer, 1);
            }

            if (filters.Count > 1)
            {
                return(new AndFilter <T>(filters));
            }
            else
            {
                return(firstFilter);
            }
        }
Example #2
0
        private static Filter <string> MatchValue(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken != null)
            {
                if (nextToken.Type == FilterTokenType.RegexWord)
                {
                    GetNextToken(lexer);
                    RegexOptions options = RegexOptions.CultureInvariant;
                    FilterToken  caseInsensitiveToken = LookAhead(lexer, 1);
                    if (caseInsensitiveToken != null && caseInsensitiveToken.Type == FilterTokenType.CaseInsensitiveModifier)
                    {
                        options |= RegexOptions.IgnoreCase;
                        GetNextToken(lexer);
                    }
                    return(new RegexFilter(new Regex(nextToken.Text, options)));
                }
                else if (IsWord(nextToken))
                {
                    GetNextToken(lexer);
                    return(new EqualityFilter <string>(nextToken.Text));
                }
            }

            throw new FilterParseException(Resources.FilterParser_ValueExpected);
        }
Example #3
0
        private Filter <T> MatchFilter(FilterLexer lexer)
        {
            if (lexer.Tokens.Count == 0)
            {
                throw new FilterParseException(Resources.FilterParser_EmptyFilterError);
            }

            return(MatchOrFilter(lexer));
        }
 public void EmptyExpressionsAreNotValid(string filter)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.IsNotNull(lexer);
     Assert.Count(0, lexer.Tokens);
     Assert.IsNull(lexer.GetNextToken());
     Assert.IsNull(lexer.LookAhead(1));
     Assert.IsNull(lexer.GetNextToken());
 }
Example #5
0
        private static FilterToken LookAhead(FilterLexer lexer, int index)
        {
            FilterToken token = lexer.LookAhead(index);

            if (token != null && token.Type == FilterTokenType.Error)
            {
                throw new FilterParseException(token.Text);
            }
            return(token);
        }
Example #6
0
        private static void MatchRightBracket(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken == null || nextToken.Type != FilterTokenType.RightBracket)
            {
                throw new FilterParseException(Resources.FilterParser_RightBracketExpected);
            }
            GetNextToken(lexer);
        }
Example #7
0
        private static void MatchComma(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken == null || nextToken.Type != FilterTokenType.Comma)
            {
                // Should never happen because we call this method when we know a comma
                // token is next
                throw new FilterParseException(Resources.FilterParser_CommaExpected);
            }
            GetNextToken(lexer);
        }
Example #8
0
        private Filter <T> MatchNegationFilter(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken != null && nextToken.Type == FilterTokenType.Not)
            {
                GetNextToken(lexer);
                return(new NotFilter <T>(MatchNegationFilter(lexer)));
            }

            return(MatchParenthesizedFilter(lexer));
        }
Example #9
0
        private static string MatchKey(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken == null || IsNotWord(nextToken))
            {
                // Should never happen because we call this method when we know a word
                // token is next
                throw new FilterParseException(Resources.FilterParser_StringLiteralExpected);
            }
            GetNextToken(lexer);

            return(nextToken.Text);
        }
Example #10
0
 public void GetNextTokenLookAheadWithOneToken()
 {
     FilterLexer lexer = new FilterLexer("a");
     Assert.IsNotNull(lexer);
     Assert.Count(1, lexer.Tokens);
     Assert.IsNull(lexer.LookAhead(0));
     Assert.IsNotNull(lexer.LookAhead(1));
     Assert.IsNull(lexer.LookAhead(2));
     Assert.IsNotNull(lexer.GetNextToken());
     Assert.IsNotNull(lexer.LookAhead(0));
     Assert.IsNull(lexer.LookAhead(1));
     Assert.IsNull(lexer.GetNextToken());
     Assert.IsNull(lexer.LookAhead(0));
     Assert.IsNull(lexer.LookAhead(1));
 }
Example #11
0
        private Filter <T> MatchParenthesizedFilter(FilterLexer lexer)
        {
            Filter <T>  filter    = null;
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken != null)
            {
                if (nextToken.Type == FilterTokenType.LeftBracket)
                {
                    GetNextToken(lexer);
                    filter = MatchOrFilter(lexer);
                    MatchRightBracket(lexer);
                }
                else
                {
                    filter = MatchSimpleFilter(lexer);
                }
            }

            return(filter);
        }
Example #12
0
        private void AppendPhrase(string phrase)
        {
            if (phrase.Length == 0)
            {
                AppendQuotedString(phrase);
                return;
            }
            else
            {
                for (int i = 0; i < phrase.Length; i++)
                {
                    if (!FilterLexer.IsWordChar(phrase[i]))
                    {
                        AppendQuotedString(phrase);
                        return;
                    }
                }
            }

            result.Append(phrase);
        }
Example #13
0
 public void GetNextTokenAndLookAheadWithTwoTokens()
 {
     FilterLexer lexer = new FilterLexer("a:");
     Assert.IsNotNull(lexer);
     Assert.Count(2, lexer.Tokens);
     Assert.IsNull(lexer.LookAhead(0));
     Assert.IsNotNull(lexer.LookAhead(1));
     Assert.IsNotNull(lexer.LookAhead(2));
     Assert.IsNull(lexer.LookAhead(3));
     Assert.IsNotNull(lexer.GetNextToken());
     Assert.IsNotNull(lexer.LookAhead(0));
     Assert.IsNotNull(lexer.LookAhead(1));
     Assert.IsNull(lexer.LookAhead(2));
     Assert.IsNotNull(lexer.GetNextToken());
     Assert.IsNotNull(lexer.LookAhead(0));
     Assert.IsNull(lexer.LookAhead(1));
     Assert.IsNull(lexer.LookAhead(2));
     Assert.IsNull(lexer.GetNextToken());
     Assert.IsNull(lexer.LookAhead(0));
     Assert.IsNull(lexer.LookAhead(1));
 }
Example #14
0
        private Filter <T> MatchSimpleFilter(FilterLexer lexer)
        {
            FilterToken nextToken = LookAhead(lexer, 1);

            if (nextToken != null)
            {
                if (nextToken.Type == FilterTokenType.Star)
                {
                    GetNextToken(lexer);
                    return(new AnyFilter <T>());
                }
                if (IsWord(nextToken))
                {
                    string key = MatchKey(lexer);
                    MatchColon(lexer);
                    Filter <string> valueFilter = MatchMatchSequence(lexer);
                    return(factory.CreateFilter(key, valueFilter));
                }
            }

            throw new FilterParseException(Resources.FilterParser_FilterExpressionExpected);
        }
Example #15
0
        private FilterSet <T> MatchFilterSet(FilterLexer lexer)
        {
            var filterRules = new List <FilterRule <T> >();

            FilterToken nextToken = LookAhead(lexer, 1);

            while (nextToken != null)
            {
                FilterRuleType filterRuleType;
                if (nextToken.Type == FilterTokenType.Include)
                {
                    filterRuleType = FilterRuleType.Inclusion;
                    GetNextToken(lexer);
                }
                else if (nextToken.Type == FilterTokenType.Exclude)
                {
                    filterRuleType = FilterRuleType.Exclusion;
                    GetNextToken(lexer);
                }
                else if (filterRules.Count == 0)
                {
                    // default to include for first filter
                    filterRuleType = FilterRuleType.Inclusion;
                }
                else
                {
                    throw new FilterParseException(Resources.FilterParser_RulesNotSeperated);
                }

                Filter <T> filter = MatchOrFilter(lexer);
                filterRules.Add(new FilterRule <T>(filterRuleType, filter));

                nextToken = LookAhead(lexer, 1);
            }

            return(new FilterSet <T>(filterRules));
        }
Example #16
0
        private static Filter <string> MatchMatchSequence(FilterLexer lexer)
        {
            var valueFilters = new List <Filter <string> >();

            valueFilters.Add(MatchValue(lexer));

            FilterToken nextToken = LookAhead(lexer, 1);

            while (nextToken != null && nextToken.Type == FilterTokenType.Comma)
            {
                MatchComma(lexer);
                valueFilters.Add(MatchValue(lexer));
                nextToken = LookAhead(lexer, 1);
            }

            if (valueFilters.Count == 1)
            {
                return(valueFilters[0]);
            }
            else
            {
                return(new OrFilter <string>(valueFilters));
            }
        }
Example #17
0
 public void DelimitedElementWithMissingEndDelimiter(string filter)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(1, lexer.Tokens);
     FilterToken token = lexer.Tokens[0];
     Assert.AreEqual(token.Type, FilterTokenType.Error);
     Assert.AreEqual(token.Position, filter.Length - 1);
     Assert.IsTrue(token.Text.StartsWith("Missing end " + filter[0]));
 }
Example #18
0
 public void DelimitedElementWithEscapedDelimiter(string filter, string tokenType)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(1, lexer.Tokens);
     FilterToken firstFilterToken = lexer.Tokens[0];
     Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType));
     Assert.AreEqual(firstFilterToken.Position, 0);
     Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(filter));
 }
Example #19
0
 public void DelimitersAreUnescaped(string filter, string expected, string tokenType)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(1, lexer.Tokens);
     FilterToken firstFilterToken = lexer.Tokens[0];
     Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType));
     Assert.AreEqual(firstFilterToken.Position, 0);
     Assert.AreEqual(firstFilterToken.Text, expected);
 }
Example #20
0
 public void FilterWithOneValue(string filter, string text)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(3, lexer.Tokens);
     {
         FilterToken firstFilterToken = lexer.Tokens[0];
         Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
         Assert.AreEqual(firstFilterToken.Position, 0);
         Assert.AreEqual(firstFilterToken.Text, "Type");
     }
     {
         FilterToken secondToken = lexer.Tokens[1];
         Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
         Assert.AreEqual(secondToken.Position, 4);
         Assert.AreEqual(secondToken.Text, null);
     }
     {
         FilterToken thirdToken = lexer.Tokens[2];
         Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
         Assert.AreEqual(thirdToken.Position, 5);
         Assert.AreEqual(thirdToken.Text, text);
     }
 }
Example #21
0
 public void UnrecognizedElement(string filter)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.AreEqual(lexer.Tokens.Count, 1);
     FilterToken filterToken = lexer.Tokens[0];
     Assert.AreEqual(filterToken.Type, FilterTokenType.Word);
     Assert.AreEqual(filterToken.Position, 0);
     Assert.AreEqual(filterToken.Text, filter);
 }
Example #22
0
        public void CaseSensitiveRegex(string key, string text)
        {
            string filter = key;
            FilterLexer lexer = new FilterLexer(filter);
            Assert.Count(2, lexer.Tokens);

            FilterToken firstFilterToken = lexer.Tokens[0];
            Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord);
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, text);

            FilterToken secondToken = lexer.Tokens[1];
            Assert.AreEqual(secondToken.Type, FilterTokenType.Word);
        }
Example #23
0
 public void QuotedElements(string key, string colon, string value)
 {
     string filter = key + colon + value;
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(3, lexer.Tokens);
     {
         FilterToken firstFilterToken = lexer.Tokens[0];
         Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
         Assert.AreEqual(firstFilterToken.Position, 0);
         Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key));
     }
     {
         FilterToken secondToken = lexer.Tokens[1];
         Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
         Assert.AreEqual(secondToken.Position, key.Length);
         Assert.AreEqual(secondToken.Text, null);
     }
     {
         FilterToken thirdToken = lexer.Tokens[2];
         Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
         Assert.AreEqual(thirdToken.Position, key.Length + 1);
         Assert.AreEqual(thirdToken.Text, GetUnquotedString(value));
     }
 }
Example #24
0
        public void FilterWithTwovalues(string filter)
        {
            FilterLexer lexer = new FilterLexer(filter);
            Assert.Count(5, lexer.Tokens);

            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, "Type");
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, 4);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, 5);
                Assert.AreEqual(thirdToken.Text, "Fixture1");
            }
            {
                FilterToken fourthToken = lexer.Tokens[3];
                Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma);
                Assert.AreEqual(fourthToken.Text, null);
            }
            {
                FilterToken fifthToken = lexer.Tokens[4];
                Assert.AreEqual(fifthToken.Type, FilterTokenType.Word);
                Assert.AreEqual(fifthToken.Text, "Fixture2");
            }
        }
Example #25
0
        public void TwoElements(string filter, string type1, string type2)
        {
            FilterLexer lexer = new FilterLexer(filter);
            Assert.Count(2, lexer.Tokens);

            FilterToken filterToken = lexer.Tokens[0];
            Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type1));
            Assert.AreEqual(filterToken.Position, 0);
            Assert.AreEqual(filterToken.Text, null);

            filterToken = lexer.Tokens[1];
            Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type2));
        }
Example #26
0
 public void CaseInsensitiveRegex(string key, string text)
 {
     string filter = key;
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(2, lexer.Tokens);
     {
         FilterToken firstFilterToken = lexer.Tokens[0];
         Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord);
         Assert.AreEqual(firstFilterToken.Position, 0);
         Assert.AreEqual(firstFilterToken.Text, text);
     }
     {
         FilterToken secondToken = lexer.Tokens[1];
         Assert.AreEqual(secondToken.Type, FilterTokenType.CaseInsensitiveModifier);
         Assert.AreEqual(secondToken.Position, key.Length - 1);
         Assert.AreEqual(secondToken.Text, null);
     }
 }
Example #27
0
        public void WordsWithEscapedCharacters(string key, string text)
        {
            string filter = key;
            FilterLexer lexer = new FilterLexer(filter);
            Assert.AreEqual(lexer.Tokens.Count, 1);

            FilterToken firstFilterToken = lexer.Tokens[0];
            Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, text);
        }
Example #28
0
 private static void GetNextToken(FilterLexer lexer)
 {
     lexer.GetNextToken();
 }
Example #29
0
 public void BackslashesShouldBeFollowedByAnEscapableCharacter(string filter, int tokenCount)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(tokenCount, lexer.Tokens);
     {
         FilterToken errorToken = lexer.Tokens[tokenCount - 1];
         Assert.AreEqual(errorToken.Type, FilterTokenType.Error);
     }
 }
Example #30
0
        public void QuotedElementsAndMultipleValues(string key, string value1, string value2)
        {
            string filter = key + ":" + value1 + "," + value2;
            FilterLexer lexer = new FilterLexer(filter);
            Assert.Count(5, lexer.Tokens);

            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key));
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, key.Length);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, key.Length + 1);
                Assert.AreEqual(thirdToken.Text, GetUnquotedString(value1));
            }
            {
                FilterToken fourthToken = lexer.Tokens[3];
                Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma);
                Assert.AreEqual(fourthToken.Position, key.Length + value1.Length + 1);
                Assert.AreEqual(fourthToken.Text, null);
            }
            {
                FilterToken fifthToken = lexer.Tokens[4];
                Assert.AreEqual(fifthToken.Type, FilterTokenType.Word);
                Assert.AreEqual(fifthToken.Position, key.Length + value1.Length + 2);
                Assert.AreEqual(fifthToken.Text, GetUnquotedString(value2));
            }
        }
Example #31
0
 public void SingleElement(string filter, string type)
 {
     FilterLexer lexer = new FilterLexer(filter);
     Assert.Count(1, lexer.Tokens);
     FilterToken filterToken = lexer.Tokens[0];
     Assert.AreEqual(filterToken.Type, ParseTokenType(type));
     Assert.AreEqual(filterToken.Position, 0);
     Assert.AreEqual(filterToken.Text, null);
 }