Exemple #1
0
 public void Initialize()
 {
     lexer = new Lexer <TokenType>();
     lexer.AddToken(@"\d+", TokenType.Number);
     lexer.AddToken(@"\+|-", TokenType.Add);
     lexer.AddToken(@"\*|/", TokenType.Mul);
     lexer.AddToken(@"\(", TokenType.Open);
     lexer.AddToken(@"\)", TokenType.Close);
 }
Exemple #2
0
        protected override void EnterEndState(Lexer lexer)
        {
            char firstCh = tokenText[0];

            switch (firstCh)
            {
            case '+':
            {
                Token token = new Token(TokenType.Plus, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '-':
            {
                Token token = new Token(TokenType.Minus, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '*':
            {
                Token token = new Token(TokenType.Star, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '/':
            {
                Token token = new Token(TokenType.Slash, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '=':
            {
                Token token = new Token(TokenType.Assignment, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '<':
            {
                Token token = new Token(TokenType.Smaller, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '>':
            {
                Token token = new Token(TokenType.Bigger, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }
            }
            tokenText.Clear();
            lexer.LexerState = startLexerState;
        }
Exemple #3
0
        protected override void Separator(Lexer lexer)
        {
            PopLastChar();
            string identifier = GetTokenStringText();

            if (IsKeyWord(identifier))
            {
                string    UpIdentifier = identifier.Substring(0, 1).ToUpper() + identifier.Substring(1, identifier.Length - 1);
                TokenType type         = (TokenType)Enum.Parse(typeof(TokenType), UpIdentifier);
                Token     token        = new Token(type, identifier, lexer.Line);
                tokenText.Add(ch);
                lexer.AddToken(token);
            }
            else
            {
                Token token = new Token(TokenType.Identifier, identifier, lexer.Line);
                tokenText.Add(ch);
                lexer.AddToken(token);
            }
        }
Exemple #4
0
        protected override void Initialize(out Lexer <TokenType> lexer, out IParser <Lexer <TokenType> .Token, int, object> parser)
        {
            lexer = new Lexer <TokenType>();
            lexer.AddToken(@"\d+", TokenType.Number);
            lexer.AddToken(@"\+|-", TokenType.Add);
            lexer.AddToken(@"\*|/", TokenType.Mul);
            lexer.AddToken(@"\(", TokenType.Open);
            lexer.AddToken(@"\)", TokenType.Close);
            lexer.AddToken(@"\s+", TokenType.Blank);

            var factor = CreateRule();
            var term   = CreateRule();
            var exp    = CreateRule();

            var NUMBER = tk(TokenType.Number, "数字").Convert(t => int.Parse(t.Value));
            var ADD    = tk(TokenType.Add).Convert(t => t.Value);
            var MUL    = tk(TokenType.Mul).Convert(t => t.Value);
            var OPEN   = tk(TokenType.Open, "左括号");
            var CLOSE  = tk(TokenType.Close, "右括号");

            factor.Imply(Alt(NUMBER, OPEN.Right(exp).Left(CLOSE)));
            term.Imply(Seq(factor, Seq(MUL, factor).Loop()).Convert(Op));
            exp.Imply(Seq(term, Seq(ADD, term).Loop()).Convert(Op));

            parser = exp;
        }
Exemple #5
0
        protected override void Separator(Lexer lexer)
        {
            if (ch == '.' && Commcount == 0)
            {
                Commcount = 1;
                return;
            }
            PopLastChar();
            Token token = new Token(TokenType.Number, GetTokenStringText(), lexer.Line);

            tokenText.Add(ch);
            lexer.AddToken(token);
            Commcount = 0;
        }
Exemple #6
0
        protected override void Initialize(out Lexer <TokenType> lexer, out IParser <Lexer <TokenType> .Token, Program, object> parser)
        {
            string operatorRegex = @"([+\-*/\\<>=%:&|^!])+";

            lexer = new Lexer <TokenType>();
            lexer.AddToken(@"case|of|end|let|do|var", TokenType.Keyword);
            lexer.AddToken(@"\d+\.\d+", TokenType.Float);
            lexer.AddToken(@"\d+", TokenType.Integer);
            lexer.AddToken(@"""([^""]|\.)*""", TokenType.String);
            lexer.AddToken(@"'[a-zA-Z_]\w*", TokenType.Flag);
            lexer.AddToken(@"([a-zA-Z_]\w*)|(\(" + operatorRegex + @"\))", TokenType.Identifier);
            lexer.AddToken(@"\(", TokenType.OpenBracket);
            lexer.AddToken(@"\)", TokenType.CloseBracket);
            lexer.AddToken(@"\[", TokenType.OpenSquare);
            lexer.AddToken(@"\]", TokenType.CloseSquare);
            lexer.AddToken(@":", TokenType.Colon);
            lexer.AddToken(@",", TokenType.Comma);
            lexer.AddToken(@"=>", TokenType.Infer);
            lexer.AddToken(@"=", TokenType.Equal);
            lexer.AddToken(@";", TokenType.Semicolon);
            lexer.AddToken(@"\\", TokenType.Lambda);
            lexer.AddToken(@":>", TokenType.Invoke);
            lexer.AddToken(operatorRegex, TokenType.Operator);
            lexer.AddToken(@"\s+", TokenType.Blank);

            var simple    = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var primitive = new RuleParser <Lexer <TokenType> .Token, Expression, object>();

            var termExpr   = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var mulExpr    = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var addExpr    = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var andExpr    = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var orExpr     = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var compExpr   = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var bxorExpr   = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var bandExpr   = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var borExpr    = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var opExpr     = new RuleParser <Lexer <TokenType> .Token, Expression, object>();
            var invokeExpr = new RuleParser <Lexer <TokenType> .Token, Expression, object>();

            var expression = invokeExpr;
            var program    = new RuleParser <Lexer <TokenType> .Token, Program, object>();

            var INTEGER = tk(TokenType.Integer, "表达式").Convert(t => (Expression) new PrimitiveExpression()
            {
                TokenPosition = t,
                Value         = int.Parse(t.Value)
            });
            var FLOAT = tk(TokenType.Float, "表达式").Convert(t => (Expression) new PrimitiveExpression()
            {
                TokenPosition = t,
                Value         = double.Parse(t.Value)
            });
            var STRING = tk(TokenType.String, "表达式").Convert(t => (Expression) new PrimitiveExpression()
            {
                TokenPosition = t,
                Value         = Escape(t.Value.Substring(1, t.Value.Length - 2))
            });
            var BOOLEAN = Alt(tk("true"), tk("false")).Convert(t => (Expression) new PrimitiveExpression()
            {
                TokenPosition = t,
                Value         = t.Value == "true"
            });

            var id = tk(TokenType.Identifier, "表达式").Convert(t => (Expression) new IdentifierExpression()
            {
                TokenPosition = t,
                Name          = t.Value
            });
            var flag = tk(TokenType.Flag, "表达式").Convert(t => (Expression) new FlagExpression()
            {
                TokenPosition = t,
                Name          = t.Value
            });

            var array = Seq(
                tk("["),
                Seq(expression, tk(",").Right(expression).Loop()).Opt(),
                tk("]")
                )
                        .Convert(p =>
            {
                if (p.Value2.Count() == 0)
                {
                    return((Expression) new ArrayExpression()
                    {
                        TokenPosition = p.Value1,
                        Elements = new List <Expression>()
                    });
                }
                else
                {
                    return((Expression) new ArrayExpression()
                    {
                        TokenPosition = p.Value1,
                        Elements = new Expression[] { p.Value2.First().Value1 }.Concat(p.Value2.First().Value2).ToList()
                    });
                }
            });

            var list = tk("(").Right(Seq(expression, tk(":").Right(expression).Loop())).Left(tk(")")).Convert(p =>
            {
                if (p.Value2.Count() == 0)
                {
                    return(p.Value1);
                }
                else
                {
                    return((Expression) new ListExpression()
                    {
                        TokenPosition = p.Value1.TokenPosition,
                        Elements = new Expression[] { p.Value1 }.Concat(p.Value2).ToList()
                    });
                }
            });

            simple.Imply(Alt(
                             INTEGER, FLOAT, STRING, BOOLEAN, id, flag, array, list));

            var match = Seq(
                tk("case").Right(expression).Left(tk("of")),
                Seq(expression, tk("=>").Right(expression)).Left(tk(";")).Loop().Left(tk("end"))
                )
                        .Convert(p =>
            {
                return((Expression) new CaseExpression()
                {
                    TokenPosition = p.Value1.TokenPosition,
                    Source = p.Value1,
                    Pairs = p.Value2.Select(pair => new CaseExpression.CasePair()
                    {
                        Pattern = pair.Value1,
                        Expression = pair.Value2
                    }).ToList()
                });
            });

            var monad = Seq(
                Seq(tk("do"), tk("(").Right(expression).Left(tk(")")).Opt()),
                expression.Left(tk(";")).Loop().Left(tk("end"))
                )
                        .Convert(p =>
            {
                return((Expression) new DoExpression()
                {
                    TokenPosition = p.Value1.Value1,
                    MonadProvider = p.Value1.Value2.Count() == 0 ? null : p.Value1.Value2.First(),
                    Expressions = p.Value2.ToList()
                });
            });

            var monadvar = Seq(tk("var"), expression, tk("="), expression).Convert(p =>
            {
                return((Expression) new VarExpression()
                {
                    TokenPosition = p.Value1,
                    Pattern = p.Value2,
                    Expression = p.Value4
                });
            });

            var lambda = Seq(
                tk("\\").Right(Seq(id, tk(",").Right(id).Loop())),
                tk("=>").Right(expression)
                )
                         .Convert(p =>
            {
                return((Expression) new LambdaExpression()
                {
                    TokenPosition = p.Value1.Value1.TokenPosition,
                    Parameters = new string[] { (p.Value1.Value1 as IdentifierExpression).Name }
                    .Concat(p.Value1.Value2.Select(v => (v as IdentifierExpression).Name))
                    .ToList(),
                    Expression = p.Value2
                });
            });

            var def = Seq(
                tk("let").Right(id),
                simple.Loop(),
                tk("=").Right(expression)
                )
                      .Convert(p =>
            {
                return((Expression) new DefinitionExpression()
                {
                    TokenPosition = p.Value1.TokenPosition,
                    Name = (p.Value1 as IdentifierExpression).Name,
                    Patterns = p.Value2.ToList(),
                    Expression = p.Value3
                });
            });

            primitive.Imply(Alt(
                                match, monad, monadvar, lambda, def, simple));

            termExpr.Imply(Seq(primitive, primitive.Loop()).Convert(ToInvoke));

            mulExpr.Imply(Seq(termExpr, Seq(tks(new string[] { @"*", @"/", @"%" }), termExpr).Loop()).Convert(ToOperator));
            addExpr.Imply(Seq(mulExpr, Seq(tks(new string[] { @"+", @"-" }), mulExpr).Loop()).Convert(ToOperator));
            andExpr.Imply(Seq(addExpr, Seq(tk(@"&"), addExpr).Loop()).Convert(ToOperator));
            orExpr.Imply(Seq(andExpr, Seq(tk(@"|"), andExpr).Loop()).Convert(ToOperator));
            compExpr.Imply(Seq(orExpr, Seq(tks(new string[] { @"<", @">", @"<=", @">=", @"<>", @"==" }), orExpr).Loop()).Convert(ToOperator));
            bxorExpr.Imply(Seq(compExpr, Seq(tk(@"^"), compExpr).Loop()).Convert(ToOperator));
            bandExpr.Imply(Seq(bxorExpr, Seq(tk(@"&&"), bxorExpr).Loop()).Convert(ToOperator));
            borExpr.Imply(Seq(bandExpr, Seq(tk(@"||"), bandExpr).Loop()).Convert(ToOperator));
            opExpr.Imply(Seq(borExpr, Seq(tk(TokenType.Operator), borExpr).Loop()).Convert(ToOperator));
            invokeExpr.Imply(Seq(opExpr, tk(@":>").Right(opExpr).Loop()).Convert(ToInvokeReverse));

            program.Imply(expression.Left(tk(";")).LoopToEnd().Convert(es => new Program()
            {
                Definitions = es.ToList()
            }));

            parser = program;
        }
Exemple #7
0
        protected override void Separator(Lexer lexer)
        {
            string symbol = new string(tokenText.ToArray());

            switch (symbol)
            {
            case "++":
            {
                Token token = new Token(TokenType.DoublePlus, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "--":
            {
                Token token = new Token(TokenType.DoubleMinus, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "==":
            {
                Token token = new Token(TokenType.Equal, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "-=":
            {
                Token token = new Token(TokenType.MinusAssignment, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "+=":
            {
                Token token = new Token(TokenType.PlusAssignment, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "*=":
            {
                Token token = new Token(TokenType.StarAssignment, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "/=":
            {
                Token token = new Token(TokenType.SlashAssignment, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "//":
            {
                lexer.NextLine();
                tokenText.Clear();
                lexer.LexerState = startLexerState;
                return;
            }

            case "..":
            {
                ReadChar(lexer);
                if (ch == '.')
                {
                    Token token1 = new Token(TokenType.Vararg, GetTokenStringText(), lexer.Line);
                    lexer.AddToken(token1);
                    lexer.LexerState = startLexerState;
                    return;
                }
                else
                {
                    lexer.Undo();
                    PopLastChar();
                    ch = tokenText.Last();
                }
                Token token = new Token(TokenType.Connect, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case ">=":
            {
                Token token = new Token(TokenType.BiggerEqual, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "<=":
            {
                Token token = new Token(TokenType.SmallerEqual, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "~=":
            {
                Token token = new Token(TokenType.NotEqual, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            case "::":
            {
                Token token = new Token(TokenType.DoubleLable, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                return;
            }

            default:
                break;
            }
            char firstCh = tokenText[0];

            switch (firstCh)
            {
            case '+':
            {
                Token token = new Token(TokenType.Plus, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '-':
            {
                Token token = new Token(TokenType.Minus, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '*':
            {
                Token token = new Token(TokenType.Star, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '/':
            {
                Token token = new Token(TokenType.Slash, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '=':
            {
                Token token = new Token(TokenType.Assignment, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '~':
            {
                Token token = new Token(TokenType.Error, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case ':':
            {
                Token token = new Token(TokenType.SingleLable, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '.':
            {
                Token token = new Token(TokenType.Dawn, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '<':
            {
                Token token = new Token(TokenType.Smaller, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }

            case '>':
            {
                Token token = new Token(TokenType.Bigger, firstCh.ToString(), lexer.Line);
                lexer.AddToken(token);
                break;
            }
            }
            char secondChar = tokenText[1];

            switch (secondChar)
            {
            case '0':
            case '1':
            case '2':
            case '3':
            case '4':
            case '5':
            case '6':
            case '7':
            case '8':
            case '9':
            {
                tokenText.Clear();
                tokenText.Add(secondChar);
                lexer.LexerState = numberLexerState;
                break;
            }

            case ',':
            {
                Token token = new Token(TokenType.Comma, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case ';':
            {
                Token token = new Token(TokenType.SemiColon, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case '(':
            {
                Token token = new Token(TokenType.LeftParen, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case ')':
            {
                Token token = new Token(TokenType.RightParen, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case '[':
            {
                Token token = new Token(TokenType.LeftSquare, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case ']':
            {
                Token token = new Token(TokenType.RightSquare, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case '{':
            {
                Token token = new Token(TokenType.LeftBig, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case '}':
            {
                Token token = new Token(TokenType.RightBig, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            case '#':
            {
                Token token = new Token(TokenType.Len, GetTokenStringText(), lexer.Line);
                lexer.AddToken(token);
                lexer.LexerState = startLexerState;
                break;
            }

            default:
            {
                tokenText.Clear();
                tokenText.Add(secondChar);
                lexer.LexerState = identifierLexerState;
                break;
            }
            }
        }