Exemplo n.º 1
0
        public void FilterWithTwovalues(string filter)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(5, lexer.Tokens);

            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, "Type");
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, 4);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, 5);
                Assert.AreEqual(thirdToken.Text, "Fixture1");
            }
            {
                FilterToken fourthToken = lexer.Tokens[3];
                Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma);
                Assert.AreEqual(fourthToken.Text, null);
            }
            {
                FilterToken fifthToken = lexer.Tokens[4];
                Assert.AreEqual(fifthToken.Type, FilterTokenType.Word);
                Assert.AreEqual(fifthToken.Text, "Fixture2");
            }
        }
Exemplo n.º 2
0
        static void Try(string input)
        {
            var str = new AntlrInputStream(input);

            System.Console.WriteLine(input);
            var lexer           = new FilterLexer(str);
            var tokens          = new CommonTokenStream(lexer);
            var parser          = new FilterParser(tokens);
            var listener_lexer  = new ErrorListener <int>();
            var listener_parser = new ErrorListener <IToken>();

            lexer.AddErrorListener(listener_lexer);
            parser.AddErrorListener(listener_parser);
            var tree = parser.booleanAndExpression();

            if (listener_lexer.had_error || listener_parser.had_error)
            {
                System.Console.WriteLine("error in parse.");
            }
            else
            {
                System.Console.WriteLine("parse completed.");
            }
            foreach (var t in tokens.GetTokens())
            {
                System.Console.WriteLine(t);
            }
            System.Console.WriteLine(TreeOutput.OutputTree(tree, lexer, parser, tokens));
        }
Exemplo n.º 3
0
        public void QuotedElements(string key, string colon, string value)
        {
            string      filter = key + colon + value;
            FilterLexer lexer  = new FilterLexer(filter);

            Assert.Count(3, lexer.Tokens);
            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key));
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, key.Length);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, key.Length + 1);
                Assert.AreEqual(thirdToken.Text, GetUnquotedString(value));
            }
        }
Exemplo n.º 4
0
        private IList <FilterToken> Tokenize(string value)
        {
            FilterLexer tokenizer = new FilterLexer(value);

            IList <FilterToken> tokens = tokenizer.Tokenize();

            return(tokens);
        }
Exemplo n.º 5
0
        public void BackslashesShouldBeFollowedByAnEscapableCharacter(string filter, int tokenCount)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(tokenCount, lexer.Tokens);
            {
                FilterToken errorToken = lexer.Tokens[tokenCount - 1];
                Assert.AreEqual(errorToken.Type, FilterTokenType.Error);
            }
        }
Exemplo n.º 6
0
        public void EmptyExpressionsAreNotValid(string filter)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.IsNotNull(lexer);
            Assert.Count(0, lexer.Tokens);
            Assert.IsNull(lexer.GetNextToken());
            Assert.IsNull(lexer.LookAhead(1));
            Assert.IsNull(lexer.GetNextToken());
        }
Exemplo n.º 7
0
        public void DelimitedElementWithMissingEndDelimiter(string filter)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(1, lexer.Tokens);
            FilterToken token = lexer.Tokens[0];

            Assert.AreEqual(token.Type, FilterTokenType.Error);
            Assert.AreEqual(token.Position, filter.Length - 1);
            Assert.IsTrue(token.Text.StartsWith("Missing end " + filter[0]));
        }
Exemplo n.º 8
0
        public void UnrecognizedElement(string filter)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.AreEqual(lexer.Tokens.Count, 1);
            FilterToken filterToken = lexer.Tokens[0];

            Assert.AreEqual(filterToken.Type, FilterTokenType.Word);
            Assert.AreEqual(filterToken.Position, 0);
            Assert.AreEqual(filterToken.Text, filter);
        }
Exemplo n.º 9
0
        public void DelimitersAreUnescaped(string filter, string expected, string tokenType)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(1, lexer.Tokens);
            FilterToken firstFilterToken = lexer.Tokens[0];

            Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType));
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, expected);
        }
Exemplo n.º 10
0
        public void DelimitedElementWithEscapedDelimiter(string filter, string tokenType)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(1, lexer.Tokens);
            FilterToken firstFilterToken = lexer.Tokens[0];

            Assert.AreEqual(firstFilterToken.Type, ParseTokenType(tokenType));
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(filter));
        }
Exemplo n.º 11
0
        public void SingleElement(string filter, string type)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(1, lexer.Tokens);
            FilterToken filterToken = lexer.Tokens[0];

            Assert.AreEqual(filterToken.Type, ParseTokenType(type));
            Assert.AreEqual(filterToken.Position, 0);
            Assert.AreEqual(filterToken.Text, null);
        }
Exemplo n.º 12
0
        public void WordsWithEscapedCharacters(string key, string text)
        {
            string      filter = key;
            FilterLexer lexer  = new FilterLexer(filter);

            Assert.AreEqual(lexer.Tokens.Count, 1);

            FilterToken firstFilterToken = lexer.Tokens[0];

            Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, text);
        }
Exemplo n.º 13
0
        public void Regex(string key, string text)
        {
            string      filter = key;
            FilterLexer lexer  = new FilterLexer(filter);

            Assert.Count(1, lexer.Tokens);

            FilterToken firstFilterToken = lexer.Tokens[0];

            Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord);
            Assert.AreEqual(firstFilterToken.Position, 0);
            Assert.AreEqual(firstFilterToken.Text, text);
        }
Exemplo n.º 14
0
        public static FilterDefinition <T> FilterString <T>(this FilterDefinitionBuilder <T> _bulder, string input)
        {
            ICharStream  stream = CharStreams.fromstring(input);
            ITokenSource lexer  = new FilterLexer(stream);
            ITokenStream tokens = new CommonTokenStream(lexer);
            FilterParser parser = new FilterParser(tokens);

            parser.BuildParseTree = true;
            IParseTree tree = parser.filter();

            var filter = new FilterVisitorBuilder <T>();

            return(filter.Visit(tree));
        }
Exemplo n.º 15
0
        public void TwoElements(string filter, string type1, string type2)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(2, lexer.Tokens);

            FilterToken filterToken = lexer.Tokens[0];

            Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type1));
            Assert.AreEqual(filterToken.Position, 0);
            Assert.AreEqual(filterToken.Text, null);

            filterToken = lexer.Tokens[1];
            Assert.AreEqual(filterToken.Type, Enum.Parse(typeof(FilterTokenType), type2));
        }
Exemplo n.º 16
0
        public void GetNextTokenLookAheadWithOneToken()
        {
            FilterLexer lexer = new FilterLexer("a");

            Assert.IsNotNull(lexer);
            Assert.Count(1, lexer.Tokens);
            Assert.IsNull(lexer.LookAhead(0));
            Assert.IsNotNull(lexer.LookAhead(1));
            Assert.IsNull(lexer.LookAhead(2));
            Assert.IsNotNull(lexer.GetNextToken());
            Assert.IsNotNull(lexer.LookAhead(0));
            Assert.IsNull(lexer.LookAhead(1));
            Assert.IsNull(lexer.GetNextToken());
            Assert.IsNull(lexer.LookAhead(0));
            Assert.IsNull(lexer.LookAhead(1));
        }
Exemplo n.º 17
0
        private Func <MockItem, bool> BuildQuery(string text)
        {
            var inputStream       = new AntlrInputStream(text);
            var filterLexer       = new FilterLexer(inputStream);
            var commonTokenStream = new CommonTokenStream(filterLexer);
            var filterParser      = new FilterParser(commonTokenStream);
            var context           = filterParser.query();
            var weekParser        = new WeekDatesBehavior();
            var dateParser        = new DateParser(new List <IDateParserBehavior>()
            {
                weekParser
            }, new DateParserConfig());
            var visitor    = new FilterVisitor <MockItem>(dateParser);
            var expression = visitor.Visit(context);

            return(expression.Compile());
        }
Exemplo n.º 18
0
        public void CaseInsensitiveRegex(string key, string text)
        {
            string      filter = key;
            FilterLexer lexer  = new FilterLexer(filter);

            Assert.Count(2, lexer.Tokens);
            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.RegexWord);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, text);
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.CaseInsensitiveModifier);
                Assert.AreEqual(secondToken.Position, key.Length - 1);
                Assert.AreEqual(secondToken.Text, null);
            }
        }
Exemplo n.º 19
0
        /// <summary>
        /// Create expression from string
        /// </summary>
        public FilterExpression(string filterStatement)
        {
            // Parse
            var lexer = new FilterLexer(new AntlrInputStream(filterStatement));

            lexer.RemoveErrorListeners();
            lexer.AddErrorListener(new RaiseException <int>());
            var parser = new FilterParser(new CommonTokenStream(lexer));

            parser.RemoveErrorListeners();
            parser.AddErrorListener(new RaiseException <IToken>());
            var context = parser.parse();

            // Fill in select and where clause
            SelectClause = new SimpleAttributeOperandCollection();
            WhereClause  = new ContentFilter();

            if (context.selectList().STAR() != null)
            {
                // Select all / default
            }
            else
            {
                foreach (var expr in context.selectList().selectexpr())
                {
                    expr.attr_op().GetText();

                    var nodeId          = expr.attr_op().nodeId().STRING_LITERAL().GetText();
                    var browsePathElems = expr.attr_op().nodeId().browsePathElement();
                    var attributeId     = Enum.Parse <NodeAttribute>(expr.attr_op().attributeId().GetText(), true);

                    var operand = new SimpleAttributeOperand {
                        // TypeDefinitionId = expr.attr_op()
                        // AttributeId = (field.InstanceDeclaration.NodeClass == NodeClass.Object) ? Attributes.NodeId : Attributes.Value,
                        // BrowsePath = field.InstanceDeclaration.BrowsePath
                    };
                    SelectClause.Add(operand);
                }
            }

            Evaluate(context.elem_op());
        }
Exemplo n.º 20
0
        public void GetNextTokenAndLookAheadWithTwoTokens()
        {
            FilterLexer lexer = new FilterLexer("a:");

            Assert.IsNotNull(lexer);
            Assert.Count(2, lexer.Tokens);
            Assert.IsNull(lexer.LookAhead(0));
            Assert.IsNotNull(lexer.LookAhead(1));
            Assert.IsNotNull(lexer.LookAhead(2));
            Assert.IsNull(lexer.LookAhead(3));
            Assert.IsNotNull(lexer.GetNextToken());
            Assert.IsNotNull(lexer.LookAhead(0));
            Assert.IsNotNull(lexer.LookAhead(1));
            Assert.IsNull(lexer.LookAhead(2));
            Assert.IsNotNull(lexer.GetNextToken());
            Assert.IsNotNull(lexer.LookAhead(0));
            Assert.IsNull(lexer.LookAhead(1));
            Assert.IsNull(lexer.LookAhead(2));
            Assert.IsNull(lexer.GetNextToken());
            Assert.IsNull(lexer.LookAhead(0));
            Assert.IsNull(lexer.LookAhead(1));
        }
Exemplo n.º 21
0
        public void QuotedElementsAndMultipleValues(string key, string value1, string value2)
        {
            string      filter = key + ":" + value1 + "," + value2;
            FilterLexer lexer  = new FilterLexer(filter);

            Assert.Count(5, lexer.Tokens);

            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, GetUnquotedString(key));
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, key.Length);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, key.Length + 1);
                Assert.AreEqual(thirdToken.Text, GetUnquotedString(value1));
            }
            {
                FilterToken fourthToken = lexer.Tokens[3];
                Assert.AreEqual(fourthToken.Type, FilterTokenType.Comma);
                Assert.AreEqual(fourthToken.Position, key.Length + value1.Length + 1);
                Assert.AreEqual(fourthToken.Text, null);
            }
            {
                FilterToken fifthToken = lexer.Tokens[4];
                Assert.AreEqual(fifthToken.Type, FilterTokenType.Word);
                Assert.AreEqual(fifthToken.Position, key.Length + value1.Length + 2);
                Assert.AreEqual(fifthToken.Text, GetUnquotedString(value2));
            }
        }
Exemplo n.º 22
0
        public void FilterWithOneValue(string filter, string text)
        {
            FilterLexer lexer = new FilterLexer(filter);

            Assert.Count(3, lexer.Tokens);
            {
                FilterToken firstFilterToken = lexer.Tokens[0];
                Assert.AreEqual(firstFilterToken.Type, FilterTokenType.Word);
                Assert.AreEqual(firstFilterToken.Position, 0);
                Assert.AreEqual(firstFilterToken.Text, "Type");
            }
            {
                FilterToken secondToken = lexer.Tokens[1];
                Assert.AreEqual(secondToken.Type, FilterTokenType.Colon);
                Assert.AreEqual(secondToken.Position, 4);
                Assert.AreEqual(secondToken.Text, null);
            }
            {
                FilterToken thirdToken = lexer.Tokens[2];
                Assert.AreEqual(thirdToken.Type, FilterTokenType.Word);
                Assert.AreEqual(thirdToken.Position, 5);
                Assert.AreEqual(thirdToken.Text, text);
            }
        }
Exemplo n.º 23
0
        public FilterParser(string input)
        {
            var lexer = new FilterLexer(input);

            tokens = lexer.Tokenize();
        }
Exemplo n.º 24
0
        private IList<FilterToken> Tokenize(string value)
        {
            FilterLexer tokenizer = new FilterLexer(value);

            IList<FilterToken> tokens = tokenizer.Tokenize();
            return tokens;
        }