Example #1
0
        public static Token GetNextToken(string text, ref int index)
        {
            if (text == null || text.Length == 0 || index >= text.Length)
            {
                return(Token.Empty);
            }
            Token.Types context = Token.Types.None, newContext;
            do
            {
                context = GetCharToken(context, ref text, index, index);
                index++;
            } while (index < text.Length && context == Token.Types.None);
            int startIndex = index - 1;

            while (index < text.Length)
            {
                newContext = GetCharToken(context, ref text, startIndex, index);
                if (newContext != Token.Types.Invalid && context != newContext)
                {
                    break;
                }
                context = newContext;
                index++;
            }
            return(new Token(context, text.Substring(startIndex, index - startIndex), startIndex));
        }
Example #2
0
        public LexicalFormater(LexicalAnalyzer lexic)
        {
            text = new StringBuilder();
            if (lexic.Tokens.Count == 0)
            {
                return;
            }
            Token.Types context = Token.Types.None;
            foreach (Token token in lexic.Tokens)
            {
                switch (context)
                {
                case Token.Types.None:
                case Token.Types.PrefixSymbol:
                    break;

                default:
                    if (token.Type != Token.Types.SufixSymbol)
                    {
                        text.Append(LexicalRules.DisposableChar);
                    }
                    break;
                }
                text.Append(token.Text);
                context = token.Type;
            }
        }
Example #3
0
        private Token CreateToken(Token.Types Type, string Value)
        {
            int colNum = columnNumber - (Value.Length != 0 ? Value.Length - 1 : 0);

            currentToken = new Token(Type, lineNumber, colNum, Value);

            return(currentToken);
        }
Example #4
0
 void RequireTokenType(Token.Types type)
 {
     if (!this.TokenIs(type))
     {
         this.RequireTokenError(
             new Token.Types?[] { type },
             new string[] { },
             this.CurrentToken());
     }
 }
Example #5
0
 void RequireTokenExactly(Token.Types type, string content)
 {
     if (!this.TokenIs(type, content))
     {
         this.RequireTokenError(
             new Token.Types?[] { type },
             new string[] { content },
             this.CurrentToken());
     }
 }
Example #6
0
 bool TokenIs(Token.Types type, string content = null)
 {
     if (!Finished())
     {
         var token = this.CurrentToken();
         return(token.Type == type && (content == null || token.Content == content));
     }
     else
     {
         return(false);
     }
 }
Example #7
0
        Token TokenizePred(Token.Types type, Func <char, bool> pred)
        {
            int startPos = this.pos;

            while (!this.Finished() && pred(this.CurrentChar()))
            {
                this.NextChar();
            }
            return(new Token()
            {
                StartPos = startPos,
                EndPos = this.pos - 1,
                Type = type,
                Content = this.source.Content.Substring(startPos, this.pos - startPos)
            });
        }
Example #8
0
        public SyntaxAnalyzer(LexicalAnalyzer lexic, List <string> variables)
        {
            Tokens = new List <SyntaxToken>();
            Stack <Token> branches = new Stack <Token>();

            Token.Types context  = Token.Types.None;
            Token       previous = null;

            SyntaxToken.Qualifiers qualifiers;
            int groupLevel = 0;

            foreach (Token token in lexic.Tokens)
            {
                qualifiers = SyntaxToken.Qualifiers.Correct;
                switch (token.Type)
                {
                case Token.Types.Id:
                    if (variables.Contains(token.Text))
                    {
                        if (context == Token.Types.Id || context == Token.Types.SufixSymbol ||
                            context == Token.Types.GroupEnds)
                        {
                            qualifiers = SyntaxToken.Qualifiers.Unexpected;
                        }
                    }
                    else
                    {
                        qualifiers = SyntaxToken.Qualifiers.Wrong;
                    }
                    break;

                case Token.Types.OperatorSymbol:
                    if (context == Token.Types.GroupBegins || context == Token.Types.None ||
                        context == Token.Types.OperatorSymbol || context == Token.Types.PrefixSymbol)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    break;

                case Token.Types.PrefixSymbol:
                    if (context == Token.Types.Id || context == Token.Types.SufixSymbol)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    else if (context == Token.Types.PrefixSymbol && previous.Text != "!")
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    break;

                case Token.Types.SufixSymbol:
                    if (context != Token.Types.Id)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    break;

                case Token.Types.Numeric:
                    if (context != Token.Types.OperatorSymbol)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    else if (context == Token.Types.OperatorSymbol && (previous.Text != "=" || previous.Text != "!="))
                    {
                        qualifiers = SyntaxToken.Qualifiers.InvalidUseOf;
                    }
                    break;

                case Token.Types.GroupBegins:
                    groupLevel++;
                    branches.Push(token);
                    if (context == Token.Types.SufixSymbol || context == Token.Types.Id ||
                        context == Token.Types.GroupEnds)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    break;

                case Token.Types.GroupEnds:
                    groupLevel--;
                    if (groupLevel > 0)
                    {
                        branches.Pop();
                    }
                    if (groupLevel < 0 || context == Token.Types.PrefixSymbol ||
                        context == Token.Types.OperatorSymbol || context == Token.Types.None ||
                        context == Token.Types.GroupBegins)
                    {
                        qualifiers = SyntaxToken.Qualifiers.Unexpected;
                    }
                    break;
                }
                Tokens.Add(new SyntaxToken(token, qualifiers));
                context  = token.Type;
                previous = token;
            }
            if (groupLevel > 0)  //Mising ')' error
            {
                while (branches.Count > 0)
                {
                    Tokens.Add(new SyntaxToken(branches.Pop(), SyntaxToken.Qualifiers.NonClosed));
                }
            }
        }
Example #9
0
 private Token CreateToken(Token.Types Type, char Value)
 {
     return(CreateToken(Type, Value.ToString()));
 }
Example #10
0
 void ConsumeToken(Token.Types type, string content)
 {
     this.RequireTokenExactly(type, content);
     this.NextToken();
 }
Example #11
0
        public static Token.Types GetCharToken(Token.Types context, ref string text, int startIndex, int index)
        {
            char ch = text[index];

            switch (context)
            {
            case Token.Types.None:
                if (ValidDisposableChar(ch))
                {
                    return(Token.Types.None);
                }
                if (ValidGroupBeginsChar(ch))
                {
                    return(Token.Types.GroupBegins);
                }
                if (ValidGroupEndsChar(ch))
                {
                    return(Token.Types.GroupEnds);
                }
                if (ValidStartIdExpressionChar(ch))
                {
                    return(Token.Types.Id);
                }
                if (PrefixSymbols.Any(str => str.First() == ch))
                {
                    return(Token.Types.PrefixSymbol);
                }
                if (SufixSymbols.Any(str => str.First() == ch))
                {
                    return(Token.Types.SufixSymbol);
                }
                if (OperatorSymbols.Any(str => str.First() == ch))
                {
                    return(Token.Types.OperatorSymbol);
                }
                if (ValidNumericExpressionChar(ch))
                {
                    return(Token.Types.Numeric);
                }
                return(Token.Types.Invalid);

            case Token.Types.GroupBegins:
            case Token.Types.GroupEnds:
                return(Token.Types.None);

            case Token.Types.Id:
                if (ValidIdExpressionChar(ch))
                {
                    return(context);
                }
                break;

            case Token.Types.Numeric:
                if (ValidNumericExpressionChar(ch))
                {
                    return(context);
                }
                if (ValidIdExpressionChar(ch))
                {
                    return(Token.Types.Invalid);
                }
                break;

            case Token.Types.PrefixSymbol:
                if (index - startIndex > 0 && ValidPrefixSymbol(text.Substring(startIndex, index - startIndex + 1)))
                {
                    return(Token.Types.PrefixSymbol);
                }
                break;

            case Token.Types.UnionSymbol:
                if (index - startIndex > 0 && ValidUnionSymbol(text.Substring(startIndex, index - startIndex + 1)))
                {
                    return(Token.Types.UnionSymbol);
                }
                break;

            case Token.Types.SufixSymbol:
                if (index - startIndex > 0 && ValidSufixSymbol(text.Substring(startIndex, index - startIndex + 1)))
                {
                    return(Token.Types.SufixSymbol);
                }
                break;

            case Token.Types.OperatorSymbol:
                if (index - startIndex > 0 && ValidOperatorSymbol(text.Substring(startIndex, index - startIndex + 1)))
                {
                    return(Token.Types.OperatorSymbol);
                }
                break;

            case Token.Types.Invalid:
                if (ValidDisposableChar(ch) || PrefixSymbols.Any(str => str.First() == ch) || SufixSymbols.Any(str => str.First() == ch) || OperatorSymbols.Any(str => str.First() == ch))
                {
                    return(Token.Types.None);
                }
                return(context);
            }
            return(Token.Types.None);
        }
Example #12
0
    private bool IsOfType(Token.Types type)
    {
        Token token = Curr;

        return null != token && token.Type == type;
    }