private static void StatementList()
        {
            Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList, new CompositeToken
            {
                TokenEnum.StatementList,
                TokenEnum.NewLine(),
                TokenEnum.Statement
            }));
            Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList, new CompositeToken
            {
                TokenEnum.NewLine(),
                TokenEnum.Statement
            }));
            Grammar.Add(new GrammarReplaceRule(TokenEnum.Statement, new CompositeToken
            {
                TokenEnum.UnlabeledStatement
            }));
            Grammar.Add(new GrammarReplaceRule(TokenEnum.Statement, new CompositeToken
            {
                TokenEnum.Label(),
                TokenEnum.String(":"),
                TokenEnum.UnlabeledStatement
            }));


            Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList1, new CompositeToken
            {
                TokenEnum.StatementList,
                TokenEnum.NewLine()
            }));
        }
示例#2
0
        internal AssignmentStatement(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // VARIABLE = EXPR;
            // VARIABLE += EXPR;
            // VARIABLE -= EXPR;
            // VARIABLE *= EXPR;
            // VARIABLE /= EXPR;
            // VARIABLE %= EXPR;
            // VARIABLE &= EXPR;
            // VARIABLE |= EXPR;
            // TO DO: VARIABLE++/--;
            // or
            // EXPR;

            _scope = scope;
            switch (lexer.TokenType)
            {
            case TokenEnum.VARIABLE:
            {
                if (Parser.TypeTokens[lexer.TokenContents] != null)
                {
                    DeclVariable d = new DeclVariable(scope, lexer);
                    _varExpr = d;
                    _varName = d.varName;
                }
                else
                {
                    _varName = lexer.TokenContents;
                    _varExpr = new IndexedExpression(scope, lexer).Get();
                }

                _assigntype = lexer.TokenType;
                if (_assigntype == TokenEnum.ASSIGN ||
                    _assigntype == TokenEnum.PLUSASSIGN ||
                    _assigntype == TokenEnum.MINUSASSIGN ||
                    _assigntype == TokenEnum.ASTERISKASSIGN ||
                    _assigntype == TokenEnum.SLASHASSIGN ||
                    _assigntype == TokenEnum.PERCENTASSIGN ||
                    _assigntype == TokenEnum.AMPASSIGN ||
                    _assigntype == TokenEnum.PIPEASSIGN
                    )
                {
                    lexer.Next(); //ASSIGN
                    _value = new Expression(scope, lexer).Get();
                }
                else
                {
                    _assigntype = TokenEnum.NOTHING;
                }
            }
            break;

            default:
            {
                _assigntype = TokenEnum.NOTHING;
                _value      = new Expression(scope, lexer).Get();
            }
            break;
            }
        }
        private static void DefinitionList()
        {
            Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList, new CompositeToken
            {
                TokenEnum.DefList,
                TokenEnum.String(";"),
                TokenEnum.Def
            }));
            Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList, new CompositeToken
            {
                TokenEnum.Def
            }));

            Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList1, new CompositeToken
            {
                TokenEnum.DefList
            }));

            Grammar.Add(new GrammarReplaceRule(TokenEnum.Def, new CompositeToken
            {
                TokenEnum.IdList1,
                TokenEnum.String(":"),
                TokenEnum.String("float")
            }));
        }
        public static void InitGrammar()
        {
            //Program rule
            Grammar.Add(new GrammarReplaceRule(TokenEnum.Program, new CompositeToken
            {
                TokenEnum.ProgramName,
                TokenEnum.NewLine(),
                TokenEnum.String("var"),
                TokenEnum.DefList1,
                TokenEnum.NewLine(),
                TokenEnum.String("begin"),
                TokenEnum.StatementList1,
                TokenEnum.String("end")
            }));

            //Definition list
            DefinitionList();

            //Statement list
            StatementList();

            //Unlabeled operator
            UnlabeledOperator();

            IdList();

            //Expression
            FillExpression();

            FillLogicalExpression();
        }
示例#5
0
 public Token(TokenEnum kind, int start, int end, string value = "")
 {
     _kind = kind;
     _start = start;
     _end = end;
     _value = value;
 }
示例#6
0
 public Token(TokenEnum kind, int start, int end, string value = "")
 {
     _kind  = kind;
     _start = start;
     _end   = end;
     _value = value;
 }
示例#7
0
        public static string GetKindDescription(TokenEnum kind)
        {
            var tokenDictionary = new Dictionary <TokenEnum, string>();
            var output          = "";

            tokenDictionary.Add(TokenEnum.EOF, "EOF");
            tokenDictionary.Add(TokenEnum.BANG, "!");
            tokenDictionary.Add(TokenEnum.DOLLAR, "$");
            tokenDictionary.Add(TokenEnum.PAREN_L, "(");
            tokenDictionary.Add(TokenEnum.PAREN_R, ")");
            tokenDictionary.Add(TokenEnum.SPREAD, "...");
            tokenDictionary.Add(TokenEnum.COLON, ":");
            tokenDictionary.Add(TokenEnum.EQUALS, "=");
            tokenDictionary.Add(TokenEnum.AT, "@");
            tokenDictionary.Add(TokenEnum.BRACKET_L, "[");
            tokenDictionary.Add(TokenEnum.BRACKET_R, "]");
            tokenDictionary.Add(TokenEnum.BRACE_L, "{");
            tokenDictionary.Add(TokenEnum.PIPE, "|");
            tokenDictionary.Add(TokenEnum.BRACE_R, "}");
            tokenDictionary.Add(TokenEnum.NAME, "Name");
            tokenDictionary.Add(TokenEnum.VARIABLE, "Variable");
            tokenDictionary.Add(TokenEnum.INT, "Int");
            tokenDictionary.Add(TokenEnum.FLOAT, "Float");
            tokenDictionary.Add(TokenEnum.STRING, "String");

            return(tokenDictionary.TryGetValue(kind, out output) ? output : null);
        }
示例#8
0
 /// <summary>
 /// Construtor da classe, atribui valores para os atributos.
 /// </summary>
 /// <param name="enumToken">Enum do token</param>
 /// <param name="lexema">Lexema do token </param>
 /// <param name="linha">Linha do token</param>
 /// <param name="coluna">Coluna do token</param>
 public Token(TokenEnum enumToken, String lexema, int linha, int coluna)
 {
     EnumToken = enumToken;
     Lexema    = lexema;
     Linha     = linha;
     Coluna    = coluna;
 }
        public async Task <string> RetrieveToken(TokenEnum tokenEnum)
        {
            var(clientIdName, appKeyName, tenantName) = GetSecretNames(tokenEnum);
            var(clientId, appKey, tenant)             = await GetSecretValues(clientIdName, appKeyName, tenantName);

            return(await _graphApiService.AcquireToken(clientId, appKey, tenant));
        }
示例#10
0
        public static TokenDescriptionAttribute?GetTokenDescriptionAttribute(this TokenEnum tokenEnum)
        {
            var enumType  = typeof(TokenEnum);
            var EnumValue = enumType.GetMember(tokenEnum.ToString()).First();

            return(EnumValue.GetCustomAttribute <TokenDescriptionAttribute>());
        }
示例#11
0
        public static string GetKindDescription(TokenEnum kind)
        {
            var tokenDictionary = new Dictionary<TokenEnum, string>();
            var output = "";

            tokenDictionary.Add(TokenEnum.EOF, "EOF");
            tokenDictionary.Add(TokenEnum.BANG, "!");
            tokenDictionary.Add(TokenEnum.DOLLAR, "$");
            tokenDictionary.Add(TokenEnum.PAREN_L, "(");
            tokenDictionary.Add(TokenEnum.PAREN_R, ")");
            tokenDictionary.Add(TokenEnum.SPREAD, "...");
            tokenDictionary.Add(TokenEnum.COLON, ":");
            tokenDictionary.Add(TokenEnum.EQUALS, "=");
            tokenDictionary.Add(TokenEnum.AT, "@");
            tokenDictionary.Add(TokenEnum.BRACKET_L, "[");
            tokenDictionary.Add(TokenEnum.BRACKET_R, "]");
            tokenDictionary.Add(TokenEnum.BRACE_L, "{");
            tokenDictionary.Add(TokenEnum.PIPE, "|");
            tokenDictionary.Add(TokenEnum.BRACE_R, "}");
            tokenDictionary.Add(TokenEnum.NAME, "Name");
            tokenDictionary.Add(TokenEnum.VARIABLE, "Variable");
            tokenDictionary.Add(TokenEnum.INT, "Int");
            tokenDictionary.Add(TokenEnum.FLOAT, "Float");
            tokenDictionary.Add(TokenEnum.STRING, "String");

            return tokenDictionary.TryGetValue(kind, out output) ? output : null;
        }
示例#12
0
 public override bool Equals(object?obj)
 {
     return(obj switch
     {
         Token t => this.GetTokenEnum() == t.GetTokenEnum(),
         TokenEnum k => this.GetTokenEnum() == k,
         _ => false,
     });
示例#13
0
 public SectionRangeToken(ILocation location, ISection section, int idx, int length, TokenEnum code)
 {
     _location = location;
     _code = code;
     _section = section;
     _idx = idx;
     _length = length;
 }
示例#14
0
 internal ParseException(Lexer lexer, TokenEnum expected) : base(Resource.Strings.Error_ParseException_6
                                                                 .F(lexer.TokenContents.Replace("\n", "")
                                                                    , lexer.SourceName
                                                                    , lexer.LineNumber
                                                                    , lexer.TokenPosition
                                                                    , expected.GetEnumName()
                                                                    , lexer.LineText))
 {
 }
 private static void Assignment()
 {
     Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken
     {
         TokenEnum.Id(),
         TokenEnum.String("="),
         TokenEnum.Expression1
     }));
 }
示例#16
0
        internal PrimaryExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            TokenEnum t = lexer.TokenType;

            switch (t)
            {
            case TokenEnum.BRACKETOPEN:
                _child = new ParenthesizedExpression(scope, lexer).Get();
                break;

            case TokenEnum.FUNCTION:
                _child = new Function(scope, lexer).Get();
                break;

            case TokenEnum.VARIABLE:
                _child = new Variable(scope, lexer).Get();
                break;

            case TokenEnum.NEW:
                _child = new NewObjectExpression(scope, lexer).Get();
                break;

            //Literals
            case TokenEnum.NULLLITERAL:
                _child = new NullLiteral(scope, lexer).Get();
                break;

            case TokenEnum.BOOLEANLITERAL:
                _child = new BoolLiteral(scope, lexer).Get();
                break;

            case TokenEnum.DECIMALINTEGERLITERAL:
                _child = new IntLiteral(scope, lexer).Get();
                break;

            case TokenEnum.HEXINTEGERLITERAL:
                _child = new HexLiteral(scope, lexer).Get();
                break;

            case TokenEnum.REALLITERAL:
                _child = new SingleLiteral(scope, lexer).Get();
                break;

            case TokenEnum.STRINGLITERAL:
                _child = new StringLiteral(scope, lexer).Get();
                break;

            case TokenEnum.BRACEOPEN:
                _child = new ArrayLiteral(scope, lexer).Get();
                break;

            default:
                throw new ParseException(lexer);
            }
        }
 private static void If()
 {
     Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken
     {
         TokenEnum.String("if"),
         TokenEnum.LogicalExpression1,
         TokenEnum.String("then"),
         TokenEnum.String("goto"),
         TokenEnum.Label()
     }));
 }
示例#18
0
 /// <summary>
 /// Verifica se o Token atual for igual ao tokenEnum do paramentro
 /// avança a entrada
 /// </summary>
 /// <param name="tokenEnum">parametro a ser comporado</param>
 /// <returns></returns>
 public bool Eat(TokenEnum tokenEnum)
 {
     if (Token.EnumToken == tokenEnum)
     {
         Advance();
         return(true);
     }
     else
     {
         return(false);
     }
 }
示例#19
0
        // Accept

        /// <summary>
        /// true if next token is predicate, else false
        /// </summary>
        /// <param name="tokenEnum"></param>
        /// <returns></returns>
        public bool Accept(TokenEnum tokenEnum)
        {
            if (Peek(tokenEnum))
            {
                _ = Read();
                return(true);
            }
            else
            {
                return(false);
            }
        }
示例#20
0
 public static void Write(this TokenEnum token, StringBuilder sb, Padding padding = Padding.NONE)
 {
     if ((padding & Padding.PREFIX) > 0)
     {
         sb.Append(" ");
     }
     sb.Append(Definitions[token]);
     if ((padding & Padding.SUFFIX) > 0)
     {
         sb.Append(" ");
     }
 }
示例#21
0
        private int LookAhead(out TokenEnum token, out string content, out int position, out LintType lint)
        {
            if (lineRemaining == null)
            {
                token    = 0;
                content  = "";
                position = Position;
                lint     = LintType.NONE;
                return(0);
            }
            foreach (var def in m_tokenDefinitions)
            {
                var matched = def.Matcher.Match(lineRemaining);
                if (matched > 0)
                {
                    position = Position + matched;
                    token    = def.Token;
                    lint     = def.Lint;
                    content  = lineRemaining.Substring(0, matched);

                    // special case for linting for type
                    if (lint == LintType.VARIABLE_OR_TYPE && token == TokenEnum.VARIABLE)
                    {
                        if (Parser.TypeTokens.Contains(content))
                        {
                            lint = LintType.TYPE;
                        }
                        else
                        {
                            lint = LintType.VARIABLE;
                        }
                    }

                    // whitespace elimination
                    if (content.Trim().Length == 0)
                    {
                        DoNext(matched, token, content, position);
                        return(LookAhead(out token, out content, out position, out lint));
                    }

                    // comment elimination
                    if (token == TokenEnum.COMMENT)
                    {
                        Linter.Add(new LintElement(LineNumber, Position, LintType.COMMENT));
                        nextLine();
                        return(LookAhead(out token, out content, out position, out lint));
                    }

                    return(matched);
                }
            }
            throw new Exception(Resource.Strings.Error_Lexer_InvalidToken.F(LineNumber, Position, lineRemaining));
        }
 private static void Loop()
 {
     Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken
     {
         TokenEnum.String("do"),
         TokenEnum.Id(),
         TokenEnum.String("="),
         TokenEnum.Expression1,
         TokenEnum.String("to"),
         TokenEnum.Expression2,
         TokenEnum.StatementList1,
         TokenEnum.String("next")
     }));
 }
示例#23
0
        internal AddExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // MULTIEXPR + MULTIEXPR ...
            // MULTIEXPR - MULTIEXPR ...

            _first = GetNext(scope, lexer);

            while (lexer.TokenType == TokenEnum.PLUS || // +
                   lexer.TokenType == TokenEnum.MINUS // -
                   )
            {
                TokenEnum _type = lexer.TokenType;
                lexer.Next(); //PLUS / MINUS
                _set.Add(GetNext(scope, lexer), _type);
            }
        }
示例#24
0
        public static string Write(this TokenEnum token, Padding padding = Padding.NONE)
        {
            switch (padding)
            {
            case Padding.PREFIX:
                return(" " + Definitions[token]);

            case Padding.SUFFIX:
                return(Definitions[token] + " ");

            case Padding.BOTH:
                return(" " + Definitions[token] + " ");

            default:
                return(Definitions[token]);
            }
        }
 private static void InputOutput()
 {
     Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken
     {
         TokenEnum.String("readl"),
         TokenEnum.String("("),
         TokenEnum.IdList1,
         TokenEnum.String(")")
     }));
     Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken
     {
         TokenEnum.String("writel"),
         TokenEnum.String("("),
         TokenEnum.IdList1,
         TokenEnum.String(")")
     }));
 }
示例#26
0
        internal MultiplyExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // UNARYEXPR * UNARYEXPR ...
            // UNARYEXPR / UNARYEXPR ...
            // UNARYEXPR % UNARYEXPR ...

            _first = GetNext(scope, lexer);

            while (lexer.TokenType == TokenEnum.ASTERISK || // *
                   lexer.TokenType == TokenEnum.SLASH || // /
                   lexer.TokenType == TokenEnum.PERCENT // %
                   )
            {
                TokenEnum _type = lexer.TokenType;
                lexer.Next(); //ASTERISK / SLASH / PERCENT
                _set.Add(GetNext(scope, lexer), _type);
            }
        }
示例#27
0
        internal EqualityExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // RELATEEXPR == RELATEEXPR ...
            // RELATEEXPR != RELATEEXPR ...

            _first = GetNext(scope, lexer);

            TokenEnum _type = lexer.TokenType;

            if (_type == TokenEnum.EQUAL // ==
                )
            {
                lexer.Next(); //EQUAL
                _second = GetNext(scope, lexer);
            }
            else if (_type == TokenEnum.NOTEQUAL // !=
                     )
            {
                lexer.Next(); //NOTEQUAL
                isUnequal = true;
                _second   = GetNext(scope, lexer);
            }
        }
 private static void IdList()
 {
     Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList1, new CompositeToken
     {
         TokenEnum.IdList
     }));
     Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList, new CompositeToken
     {
         TokenEnum.String(","),
         TokenEnum.Id()
     }));
     Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList, new CompositeToken
     {
         TokenEnum.IdList,
         TokenEnum.String(","),
         TokenEnum.Id()
     }));
     Grammar.Add(new GrammarReplaceRule(TokenEnum.ProgramName, new CompositeToken
     {
         TokenEnum.String("program"),
         TokenEnum.Id()
     }));
 }
示例#29
0
        private static bool UpdateTokenData(TokenEnum tokenEnum, string tokenCode, ExtTokenData data)
        {
            bool result = false;

            try
            {
                if (!String.IsNullOrWhiteSpace(tokenCode))
                {
                    switch (tokenEnum)
                    {
                    case TokenEnum.ADD:
                        if (DIC_TOKEN_DATA.ContainsKey(tokenCode))
                        {
                            DIC_TOKEN_DATA.Remove(tokenCode);
                        }
                        DIC_TOKEN_DATA[tokenCode] = data;
                        break;

                    case TokenEnum.REMOVE:
                        if (DIC_TOKEN_DATA.ContainsKey(tokenCode))
                        {
                            DIC_TOKEN_DATA.Remove(tokenCode);
                        }
                        break;

                    default:
                        break;
                    }
                    result = true;
                }
            }
            catch (Exception ex)
            {
                LogSystem.Error(ex);
            }
            return(result);
        }
示例#30
0
 private bool DoNext(int matched, TokenEnum token, string content, int position)
 {
     TokenType     = token;
     TokenContents = content;
     TokenPosition = Position;
     Position      = position;
     if (LineNumber == 206)
     {
     }
     if (matched > 0)
     {
         lineRemaining = lineRemaining.Substring(matched);
         if (lineRemaining.Length == 0)
         {
             nextLine();
         }
         return(true);
     }
     if (lineRemaining == null || lineRemaining.Length == 0)
     {
         nextLine();
     }
     return(false);
 }
示例#31
0
        internal UnaryExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // + PRIAMRY
            // - PRIAMRY
            // ! PRIAMRY
            // ~ PRIAMRY // not supported yet
            // TO DO: ++/-- PRIMARY;
            // ^

            _type = lexer.TokenType;
            if (_type == TokenEnum.PLUS || // +
                _type == TokenEnum.MINUS || // -
                _type == TokenEnum.NOT // !
                )
            {
                lexer.Next(); // PLUS / MINUS / NOT
                _primary = GetNext(scope, lexer);
            }
            else
            {
                _primary = GetNext(scope, lexer);
                _type    = TokenEnum.NOTHING;
            }
        }
        internal RelationalExpression(ContextScope scope, Lexer lexer) : base(scope, lexer)
        {
            // ADDEXPR < ADDEXPR
            // ADDEXPR > ADDEXPR
            // ADDEXPR <= ADDEXPR
            // ADDEXPR <= ADDEXPR

            _first = GetNext(scope, lexer);

            _type = lexer.TokenType;
            if (_type == TokenEnum.LESSTHAN || // <
                _type == TokenEnum.GREATERTHAN || // >
                _type == TokenEnum.LESSEQUAL || // <=
                _type == TokenEnum.GREATEREQUAL // >=
                )
            {
                lexer.Next(); //LESSTHAN / GREATERTHAN / LESSEQUAL / GREATEREQUAL
                _second = GetNext(scope, lexer);
            }
            else
            {
                _type = TokenEnum.NOTHING;
            }
        }
示例#33
0
 public TokenDefinition(string regex, TokenEnum token, LintType lint)
 {
     Matcher = new RegexMatcher(regex, RegexOptions.CultureInvariant);
     Token   = token;
     Lint    = lint;
 }
示例#34
0
 public FixedTextToken(string text, ILocation location, TokenEnum code)
 {
     _location = location;
     _code = code;
     _text = text;
 }
 public SingleCharTokenRecognizer(char c, TokenEnum code)
 {
     _c = c;
     _code = code;
 }