Exemplo n.º 1
0
        public void Tokenize_WithUndefinedSymbol_ThrowsExceptionWithMessage()
        {
            //ARRANGE
            string source = "[[]";

            //ACT
            //ASSERT
            var ex = Assert.Throws <Exception>(() => _lexer.Tokenize(source).ToList());

            Assert.That(ex.Message, Is.EqualTo("Unrecognized symbol '['."));
        }
Exemplo n.º 2
0
        public void TestMethod3()
        {
            Lexer.Lexer lexer = new Lexer.Lexer();

            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex("[Aa][Dd][Dd]"), Type = TokenTyp.Add
            });
            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex("\\("), Type = TokenTyp.OpenParenthesis
            });
            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex("\\)"), Type = TokenTyp.CloseParenthesis
            });
            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex(","), Type = TokenTyp.Comma
            });
            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex(@"\d+"), Type = TokenTyp.Number
            });
            lexer.AddDefinition(new TokenDefinition {
                Regex = new Regex(@"\s+"), Type = TokenTyp.Whitespace, IsIgnored = true
            });

            LanguageParser parser = new LanguageParser();
            int            res    = parser.Parse(lexer.Tokenize("ADD(ADD(1,ADD(1,3)) ,ADD(1,5)").ToList());

            Assert.AreEqual(res, 11);
        }
Exemplo n.º 3
0
        /// <summary>
        /// コンパイルするコードを指定してコンパイルし、実行します。
        /// </summary>
        /// <param name="code">コンパイルされるコード文字列。</param>
        /// <returns>コンパイルに成功したとき true、それ以外のとき false。</returns>
        public LuryObject Evaluate(string code)
        {
            var lexer           = new Lexer.Lexer(code + '\n');
            var succeedTokenize = lexer.Tokenize();

            lexer.Logger.CopyTo(this.OutputLogger);

            if (!succeedTokenize)
            {
                return(null);
            }

            var globalContext = new LuryContext();

            Intrinsic.SetBuiltInFunctions(globalContext);

            var     parser  = new Parser();
            Routine routine = null;

            // parsing
            try
            {
                routine = (Routine)parser.yyparse(new Lex2yyInput(lexer), new yydebug.yyDebugSimple());
            }
            catch (yyParser.yyException ex)
            {
                this.ReportyyException(ex, code);
                return(null);
            }

            // running
            if (routine == null)
            {
                return(null);
            }
            else
            {
                try
                {
                    var exit = routine.Evaluate(globalContext);

                    if (exit.ExitReason == StatementExitReason.Break)
                    {
                        throw new LuryException(LuryExceptionType.WrongBreak);
                    }

                    return(exit.ReturnValue);
                }
                catch (LuryException ex)
                {
                    Console.Error.WriteLine(ex.Message);
                    return(null);
                }
            }
        }
Exemplo n.º 4
0
        public void Tokenize__ConstantSum__Correct()
        {
            var expr   = "int.MaxValue + long.MaxValue";
            var lexer  = new Lexer.Lexer(expr);
            var tokens = lexer.Tokenize();

            Assert.Equal(TokenType.Constant, tokens[0].Type);
            Assert.Equal(int.MaxValue.ToString(), tokens[0].Value);
            Assert.Equal(TokenType.Plus, tokens[1].Type);
            Assert.Equal(TokenType.Constant, tokens[2].Type);
            Assert.Equal(long.MaxValue.ToString(), tokens[2].Value);
        }
Exemplo n.º 5
0
        public void Tokenize__ConstantMulWithExcessBrackets__Correct()
        {
            var expr   = "(int.MaxValue + (long.MaxValue))*(int.MinValue*long.MinValue)";
            var lexer  = new Lexer.Lexer(expr);
            var tokens = lexer.Tokenize();

            Assert.Equal(TokenType.Constant, tokens[1].Type);
            Assert.Equal(TokenType.Constant, tokens[4].Type);
            Assert.Equal(TokenType.Minus, tokens[9].Type);
            Assert.Equal(TokenType.Constant, tokens[10].Type);
            Assert.Equal(TokenType.Minus, tokens[12].Type);
            Assert.Equal(TokenType.Constant, tokens[13].Type);
        }
Exemplo n.º 6
0
        public void Tokenize__ConstantMul__Correct()
        {
            var expr   = "(int.MaxValue + long.MaxValue)*(int.MinValue*long.MinValue)";
            var lexer  = new Lexer.Lexer(expr);
            var tokens = lexer.Tokenize();

            Assert.Equal(TokenType.Constant, tokens[1].Type);
            Assert.Equal(TokenType.Constant, tokens[3].Type);
            Assert.Equal(TokenType.Minus, tokens[7].Type);
            Assert.Equal(TokenType.Constant, tokens[8].Type);
            Assert.Equal(TokenType.Minus, tokens[10].Type);
            Assert.Equal(TokenType.Constant, tokens[11].Type);
        }
Exemplo n.º 7
0
        public static IStatement[] GetParseResultStatements(string expression, MethodInfo[] methodInfos = null)
        {
            var lexer        = new Lexer.Lexer(expression);
            var readOnlyList = lexer.Tokenize();
            var context      = new ParserContext(
                readOnlyList,
                new Dictionary <string, CompilerType>
            {
                { "x", CompilerType.Long }, { "y", CompilerType.Long }, { "z", CompilerType.Long }
            },
                new Dictionary <string, FieldInfo>(),
                methodInfos?.ToDictionary(x => x.Name, x => x) ?? new Dictionary <string, MethodInfo>(),
                true);
            var parser = new Parser.Parser(context);
            var result = parser.Parse();

            return(result.Statements);
        }
Exemplo n.º 8
0
        public void Tokenize__BracketNumBracket__LexerDoesntReturnMethodCallExpression()
        {
            var randomExpr = @"x *(8 * (5))";
            var lexer      = new Lexer.Lexer(randomExpr);
            var tokens     = lexer.Tokenize();

            Assert.Equal(tokens[0].Type, TokenType.Variable);
            Assert.Equal(tokens[0].Value, "x");
            Assert.Equal(tokens[1].Type, TokenType.Star);
            Assert.Equal(tokens[2].Type, TokenType.LeftParent);
            Assert.Equal(tokens[3].Type, TokenType.Constant);
            Assert.Equal(tokens[3].Value, "8");
            Assert.Equal(tokens[4].Type, TokenType.Star);
            Assert.Equal(tokens[5].Type, TokenType.LeftParent);
            Assert.Equal(tokens[6].Type, TokenType.Constant);
            Assert.Equal(tokens[6].Value, "5");
            Assert.Equal(tokens[7].Type, TokenType.RightParent);
            Assert.Equal(tokens[8].Type, TokenType.RightParent);
        }
Exemplo n.º 9
0
        public static IExpression GetParseResultExpression(string expression, bool constantFolding = true,
                                                           MethodInfo[] methodInfos = null)
        {
            var lexer        = new Lexer.Lexer(expression);
            var readOnlyList = lexer.Tokenize();
            var context      = new ParserContext(
                readOnlyList,
                new Dictionary <string, CompilerType>
            {
                { "x", CompilerType.Long }, { "y", CompilerType.Long }, { "z", CompilerType.Long }
            },
                null,
                methodInfos?.ToDictionary(x => x.Name, x => x) ?? new Dictionary <string, MethodInfo>(),
                constantFolding
                );
            var parser = new Parser.Parser(context);
            var result = parser.ParseExpression();

            return(result);
        }
Exemplo n.º 10
0
        public void Tokenize__IfElseStatement__Correct()
        {
            var expr = "if(1 == 1) {return 1} else {return 2}";

            var lexer  = new Lexer.Lexer(expr);
            var result = lexer.Tokenize();

            Assert.Equal(TokenType.IfWord, result[0].Type);
            Assert.Equal(TokenType.LeftParent, result[1].Type);
            Assert.Equal(TokenType.Constant, result[2].Type);
            Assert.Equal(TokenType.EqualTo, result[3].Type);
            Assert.Equal(TokenType.Constant, result[4].Type);
            Assert.Equal(TokenType.RightParent, result[5].Type);
            Assert.Equal(TokenType.LeftBrace, result[6].Type);
            Assert.Equal(TokenType.ReturnWord, result[7].Type);
            Assert.Equal(TokenType.Constant, result[8].Type);
            Assert.Equal(TokenType.RightBrace, result[9].Type);
            Assert.Equal(TokenType.ElseWord, result[10].Type);
            Assert.Equal(TokenType.LeftBrace, result[11].Type);
            Assert.Equal(TokenType.ReturnWord, result[12].Type);
            Assert.Equal(TokenType.Constant, result[13].Type);
            Assert.Equal(TokenType.RightBrace, result[14].Type);
        }
Exemplo n.º 11
0
        public void ParseAxisParameter_WithSubsequentIdentifiers_SucceedsReturnsAxisParameter()
        {
            //ARRANGE
            const string queryString = "[Aaa].[Bbb].[Ccc].FUNCTION(1, 2).FUNCTION";

            const string expectedString = "[Aaa].[Bbb].[Ccc].FUNCTION(1, 2).FUNCTION";

            //ACT
            MdxExpressionBase expression;
            bool isSucceeded = MdxParser.TryParseMember(_lexer.Tokenize(queryString).GetStatedTwoWayEnumerator(), out expression);

            //ASSERT
            Assert.That(isSucceeded, Is.True);
            Assert.That(expression, Is.InstanceOf <MdxMember>());
            Assert.That(expression.ToString(), Is.EqualTo(expectedString));
        }
Exemplo n.º 12
0
        private IReadOnlyList <Token> GetLexerResult(string expr)
        {
            var lexer = new Lexer.Lexer(expr);

            return(lexer.Tokenize());
        }
Exemplo n.º 13
0
        private ExpressionNode ParseInterpolatedString()
        {
            Expect(TokenType.INTERPOLATED_STRING);
            var pos = Position();
            var str = Current().Text;

            Next();

            var parts = new List <ExpressionNode>();

            bool inExpr = false;
            int  level  = 0;
            var  start  = 0;

            int i = 0;

            char peek() => str[i];
            string current() => str.Substring(start, i - start - 1);

            bool accept(char c)
            {
                if (peek() == c)
                {
                    i++;
                    return(true);
                }
                return(false);
            }

            void addString()
            {
                var cur = current();

                parts.Add(new StringNode(new SourcePosition(pos.Line, pos.Column + i - cur.Length + 1), cur));
            }

            void addExpression()
            {
                var cur    = current();
                var lexer  = new Lexer.Lexer(cur, new SourcePosition(pos.Line - 1, 0 /* @TODO wrong column number */));
                var parser = new Parser("<interpolated string expression>", lexer.Tokenize());

                parts.Add(parser.ParseExpression());
            }

            while (i < str.Length)
            {
                if (inExpr)
                {
                    if (accept('{'))
                    {
                        level++;
                    }
                    else if (accept('}'))
                    {
                        level--;
                    }

                    if (level == 0)
                    {
                        addExpression();
                        inExpr = false;
                        start  = i;
                    }
                    else
                    {
                        i++;
                    }
                }
                else
                {
                    if (accept('{'))
                    {
                        addString();
                        inExpr = true;
                        level  = 1;
                        start  = i;
                    }
                    else
                    {
                        i++;
                    }
                }
            }

            if (inExpr)
            {
                throw new ParseException("String interpolation expression not ended");
            }
            if (start != i)
            {
                i++;
                addString();
            }

            if (parts.Count == 1)
            {
                return(parts[0]);
            }
            else
            {
                var ret = new BinaryNode(pos /* @TODO: wrong column number */, BinaryOP.CONCAT, parts[0], parts[1]);

                for (var j = 2; j < parts.Count; j++)
                {
                    ret = new BinaryNode(pos /* @TODO: wrong column number */, BinaryOP.CONCAT, ret, parts[j]);
                }

                return(ret);
            }
        }
Exemplo n.º 14
0
        private void LexerTests()
        {
            Lexer.Lexer lex = new Lexer.Lexer();

            List <Token> tokens = lex.Tokenize("//uibutton");

            Debug.Assert(tokens.Count == 2);
            Debug.Assert(tokens [0].Content == "//");
            Debug.Assert(tokens [0].Desc == "ANCESTOR");
            Debug.Assert(tokens [1].Content == "uibutton");
            Debug.Assert(tokens [1].Desc == "IDENTIFIER");


            tokens = lex.Tokenize("/uibutton");

            Debug.Assert(tokens.Count == 2);
            Debug.Assert(tokens [0].Content == "/");
            Debug.Assert(tokens [0].Desc == "CHILD");
            Debug.Assert(tokens [1].Content == "uibutton");
            Debug.Assert(tokens [1].Desc == "IDENTIFIER");

            tokens = lex.Tokenize("//uilabel[3]");

            Debug.Assert(tokens.Count == 5);
            Debug.Assert(tokens [0].Content == "//");
            Debug.Assert(tokens [1].Content == "uilabel");
            Debug.Assert(tokens [2].Content == "[");
            Debug.Assert(tokens [3].Content == "3");
            Debug.Assert(tokens [4].Content == "]");
            Debug.Assert(tokens [2].Desc == "OPEN_PREDICATE");
            Debug.Assert(tokens [3].Desc == "NUMBER");


            tokens = lex.Tokenize("/ uibutton");

            Debug.Assert(tokens.Count == 3);
            Debug.Assert(tokens [0].Content == "/");
            Debug.Assert(tokens [0].Desc == "CHILD");
            Debug.Assert(tokens [2].Content == "uibutton");
            Debug.Assert(tokens [1].Content == " ");
            Debug.Assert(tokens [1].Desc == "WHITE_SPACE");
            Debug.Assert(tokens [2].Desc == "IDENTIFIER");

            tokens = lex.Tokenize("//uilabel/uibutton//altceva");

            Debug.Assert(tokens.Count == 6);
            Debug.Assert(tokens [3].Content == "uibutton");
            Debug.Assert(tokens [3].Desc == "IDENTIFIER");
            Debug.Assert(tokens [2].Desc == "CHILD");

            tokens = lex.Tokenize("//label[@text=\"ceva\"]");

            Debug.Assert(tokens.Count == 7);
            Debug.Assert(tokens [3].Content == "text");
            Debug.Assert(tokens [3].Desc == "ATTRIBUTE");
            Debug.Assert(tokens [4].Content == "=");
            Debug.Assert(tokens [4].Desc == "EQUAL");
            Debug.Assert(tokens [5].Content == "ceva");
            Debug.Assert(tokens [5].Desc == "STRING");

            tokens = lex.Tokenize("//label[@text=\"ceva\" and @tre=\"23232\"]");

            Debug.Assert(tokens.Count == 13);
            Debug.Assert(tokens [3].Content == "text");
            Debug.Assert(tokens [3].Desc == "ATTRIBUTE");
            Debug.Assert(tokens [4].Content == "=");
            Debug.Assert(tokens [4].Desc == "EQUAL");
            Debug.Assert(tokens [5].Content == "ceva");
            Debug.Assert(tokens [5].Desc == "STRING");
            Debug.Assert(tokens [7].Content == "and");
            Debug.Assert(tokens [7].Desc == "AND");
            Debug.Assert(tokens [11].Content == "23232");
            Debug.Assert(tokens [11].Desc == "STRING");

            tokens = lex.Tokenize("//label[@text=\"ceva\" and @tre=\"23232\" and @rez=\"ultim and\"]");

            Debug.Assert(tokens.Count == 19);
            Debug.Assert(tokens [3].Content == "text");
            Debug.Assert(tokens [3].Desc == "ATTRIBUTE");
            Debug.Assert(tokens [4].Content == "=");
            Debug.Assert(tokens [4].Desc == "EQUAL");
            Debug.Assert(tokens [5].Content == "ceva");
            Debug.Assert(tokens [5].Desc == "STRING");
            Debug.Assert(tokens [7].Content == "and");
            Debug.Assert(tokens [7].Desc == "AND");
            Debug.Assert(tokens [11].Content == "23232");
            Debug.Assert(tokens [11].Desc == "STRING");
            Debug.Assert(tokens [13].Content == "and");
            Debug.Assert(tokens [13].Desc == "AND");
            Debug.Assert(tokens [15].Content == "rez");
            Debug.Assert(tokens [15].Desc == "ATTRIBUTE");
            Debug.Assert(tokens [17].Content == "ultim and");
            Debug.Assert(tokens [17].Desc == "STRING");
        }
Exemplo n.º 15
0
        public List <XPathNode> Parse(string xPath)
        {
            List <XPathNode> ret = new List <XPathNode> ();

            //adding // if the path doesn't
            if (xPath.StartsWith("/") == false && xPath.StartsWith("//") == false)
            {
                xPath = "//" + xPath;
            }

            List <Token> tokens = _lexer.Tokenize(xPath);

            int currentIndex = 0;

            while (currentIndex < tokens.Count)
            {
                //searching for ancestry
                XPathNode newNode = new XPathNode();
                newNode.IsChild = true;

                if (tokens [currentIndex].Desc == "ANCESTOR")
                {
                    newNode.IsChild = false;
                }

                currentIndex++;

                while (tokens [currentIndex].Desc == "WHITE_SPACE")
                {
                    currentIndex++;
                }

                newNode.TagName = tokens [currentIndex].Content;

                currentIndex++;

                while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                {
                    currentIndex++;
                }

                //check for attributes
                if (currentIndex < tokens.Count && tokens [currentIndex].Desc == "OPEN_PREDICATE")
                {
                    //maybe the input is broken and we don't have
                    //a closing bracket
                    while (currentIndex < tokens.Count && tokens [currentIndex].Desc != "END_PREDICATE")
                    {
                        currentIndex++;

                        while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                        {
                            currentIndex++;
                        }

                        //number
                        if (tokens [currentIndex].Desc == "NUMBER")
                        {
                            var numberPredicate = new XPathNumberPredicate();
                            numberPredicate.Number = int.Parse(tokens[currentIndex].Content);

                            newNode.predicates.Add(numberPredicate);

                            currentIndex++;

                            while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                            {
                                currentIndex++;
                            }

                            //we have some junk in here
                            //we gonna parse until here and bail
                            if (tokens [currentIndex].Desc != "END_PREDICATE")
                            {
                                ret.Add(newNode);
                                break;
                            }
                        }

                        //attribute
                        if (tokens [currentIndex].Desc == "ATTRIBUTE")
                        {
                            XPathAttribute attribute = new XPathAttribute();
                            attribute.Name = tokens [currentIndex].Content;
                            newNode.predicates.Add(attribute);

                            currentIndex++;

                            while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                            {
                                currentIndex++;
                            }

                            //maybe we have a test for existence of attribute
                            if (tokens [currentIndex].Desc != "END_PREDICATE")
                            {
                                //how to compare
                                if (tokens [currentIndex].Desc == "EQUAL")
                                {
                                }

                                currentIndex++;

                                while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                                {
                                    currentIndex++;
                                }

                                attribute.ValueToMatch = tokens [currentIndex].Content;

                                currentIndex++;

                                while (currentIndex < tokens.Count && tokens [currentIndex].Desc == "WHITE_SPACE")
                                {
                                    currentIndex++;
                                }

                                if (currentIndex < tokens.Count && tokens [currentIndex].Desc == "AND")
                                {
                                    currentIndex++;
                                }
                            }
                        }
                    }

                    currentIndex++;
                }

                ret.Add(newNode);
            }

            return(ret);
        }
Exemplo n.º 16
0
        /// <summary>
        /// コンパイルするコードを指定してコンパイルし、実行します。
        /// </summary>
        /// <param name="code">コンパイルされるコード文字列。</param>
        /// <returns>コンパイルに成功したとき true、それ以外のとき false。</returns>
        public LuryObject Evaluate(string code)
        {
            var lexer = new Lexer.Lexer(code + '\n');
            var succeedTokenize = lexer.Tokenize();
            lexer.Logger.CopyTo(this.OutputLogger);

            if (!succeedTokenize)
                return null;

            var globalContext = new LuryContext();
            Intrinsic.SetBuiltInFunctions(globalContext);

            var parser = new Parser();
            Routine routine = null;

            // parsing
            try
            {
                routine = (Routine)parser.yyparse(new Lex2yyInput(lexer), new yydebug.yyDebugSimple());
            }
            catch (yyParser.yyException ex)
            {
                this.ReportyyException(ex, code);
                return null;
            }

            // running
            if (routine == null)
                return null;
            else
            {
                try
                {
                    var exit = routine.Evaluate(globalContext);

                    if (exit.ExitReason == StatementExitReason.Break)
                        throw new LuryException(LuryExceptionType.WrongBreak);

                    return exit.ReturnValue;
                }
                catch (LuryException ex)
                {
                    Console.Error.WriteLine(ex.Message);
                    return null;
                }
            }
        }