Example #1
0
 public BinaryExpr(SourceLocation location, Expr left, TokenizerToken binaryOperatorToken, Expr right)
     : base(location, right.IsStatement, true)
 {
     Left = left;
     BinaryOperatorToken = binaryOperatorToken;
     Right = right;
 }
Example #2
0
 /// <summary>
 /// Matches a token. Forwards the head on success.
 /// </summary>
 /// <param name="token">Must be one of <see cref="TokenizerToken"/> value (not an Error one).</param>
 /// <returns>True if the given token matches.</returns>
 public bool Match(TokenizerToken token)
 {
     if (token < 0)
     {
         throw new ArgumentException("Token must not be an Error token.");
     }
     if (_token == (int)token)
     {
         Forward();
         return(true);
     }
     return(false);
 }
Example #3
0
        Expr HandleAssign(AccessorExpr left, AccessorExpr leftSource, string unboundName = null)
        {
            Debug.Assert(left != null);
            var location = _parser.Location;

            if (_parser.Match(TokenizerToken.Assign))
            {
                return(new AssignExpr(location, left, Expression(2)));
            }
            if (leftSource == null)
            {
                leftSource = new AccessorMemberExpr(_parser.Location, null, unboundName, false);
            }
            TokenizerToken binaryTokenType = _parser.CurrentToken.FromAssignOperatorToBinary();

            _parser.Forward();
            return(new AssignExpr(location, left, new BinaryExpr(location, leftSource, binaryTokenType, Expression(2))));
        }
Example #4
0
        /// <summary>
        /// Wandelt einen String, der eine mathematische Funktion enthält, in einzelne logische Blöcke um, über die iteriert werden kann.
        /// </summary>
        /// <param name="reader">Ein Stream, aus dem die Funktion ausgelesen wird.</param>
        /// <param name="arrVariables">Liste mit Variablen, die in der mathematischen Funktion enthalten sind.</param>
        /// <returns>Ein Iterator mit den logischen Blöcken der Funktion</returns>
        private static IEnumerable <Token> TokenizeInternal(TextReader reader, params string[] arrVariables)
        {
            StringBuilder sbNextToken = new StringBuilder(10);

            int   iNext;
            Token lastToken = new TokenizerToken(TokenType.Unknown, "");

            while ((iNext = reader.Peek()) != -1)
            {
                char c = (char)iNext;

                sbNextToken.Append(c);

                if (_dictToken.ContainsKey(sbNextToken.ToString()))
                {
                    lastToken = _dictToken[sbNextToken.ToString()];
                    reader.Read();
                }
                else if (arrVariables.Contains(sbNextToken.ToString()))
                {
                    lastToken = GetVariableToken(sbNextToken.ToString());
                    reader.Read();
                }
                else if (lastToken.Type != TokenType.Unknown)
                {
                    sbNextToken.Remove(sbNextToken.Length - 1, 1);
                    yield return(new TokenizerToken(lastToken.Type, sbNextToken.ToString(), lastToken.Associativity,
                                                    lastToken.Precedence));

                    lastToken = new TokenizerToken(TokenType.Unknown, "");
                    sbNextToken.Clear();
                }
                else
                {
                    reader.Read();
                }
            }

            yield return(new TokenizerToken(lastToken.Type, sbNextToken.ToString(), lastToken.Associativity));
        }
Example #5
0
        Expr HandleCallOrIndex(Expr left, TokenizerToken closer)
        {
            SourceLocation loc        = _parser.PrevNonCommentLocation;
            IList <Expr>   parameters = null;
            IReadOnlyList <AccessorLetExpr> declaredFunctions = null;

            if (!_parser.Match(closer))
            {
                _scope.OpenScope();
                for ( ;;)
                {
                    Debug.Assert(TokenizerToken.Comma.PrecedenceLevel() == 2);
                    Expr e = Expression(2);
                    if (e is SyntaxErrorExpr)
                    {
                        return(e);
                    }
                    if (parameters == null)
                    {
                        parameters = new List <Expr>();
                    }
                    parameters.Add(e);

                    if (_parser.Match(closer))
                    {
                        break;
                    }
                    if (!_parser.Match(TokenizerToken.Comma) &&
                        !e.IsStatement &&
                        !_options.AllowSemiColonAsActualParameterSeparator)
                    {
                        return(new SyntaxErrorExpr(_parser.Location, $"Expected {closer} opened at {0}.", loc));
                    }
                }
                declaredFunctions = _scope.CloseScope();
            }
            var arguments = parameters != null?parameters.ToArray() : Expr.EmptyArray;

            return(new AccessorCallExpr(loc, left, arguments, declaredFunctions, _parser.Match(TokenizerToken.SemiColon), closer == TokenizerToken.CloseSquare));
        }
Example #6
0
 public UnaryExpr(SourceLocation location, TokenizerToken type, Expr e)
     : base(location, e.IsStatement, false)
 {
     TokenType  = type;
     Expression = e;
 }
Example #7
0
        /// <summary>
        /// Express this token as a sample string ("idenfier" for <see cref="TokenizerToken.Identifier"/>,
        /// 6.02214129e+23 for <see cref="TokenizerToken.Float"/>, etc.)
        /// </summary>
        /// <param name="t">This token.</param>
        /// <returns>A sample string for the token type.</returns>
        public static string Explain(this TokenizerToken t)
        {
            if (t < 0)
            {
                return(((TokenizerError)t).ToString());
            }
            if ((t & TokenizerToken.IsAssignOperator) != 0)
            {
                return(_assignOperator[((int)t & 15) - 1]);
            }
            if ((t & TokenizerToken.IsBinaryOperator) != 0)
            {
                return(_binaryOperator[((int)t & 15) - 1]);
            }
            if ((t & TokenizerToken.IsCompareOperator) != 0)
            {
                return(_compareOperator[((int)t & 15) - 1]);
            }
            if ((t & TokenizerToken.IsPunctuation) != 0)
            {
                return(_punctuations[((int)t & 15) - 1]);
            }
            if ((t & TokenizerToken.IsUnaryOperator) != 0)
            {
                return(_unaryOperator[((int)t & 15) - 1]);
            }

            if (t == TokenizerToken.Identifier)
            {
                return("identifier");
            }
            if (t == TokenizerToken.And)
            {
                return("&&");
            }
            if (t == TokenizerToken.Or)
            {
                return("||");
            }
            if (t == TokenizerToken.PlusPlus)
            {
                return("++");
            }
            if (t == TokenizerToken.MinusMinus)
            {
                return("--");
            }

            if (t == TokenizerToken.String)
            {
                return("\"string\"");
            }

            if (t == TokenizerToken.Float)
            {
                return("6.02214129e+23");
            }
            if (t == TokenizerToken.Integer)
            {
                return("42");
            }
            if (t == TokenizerToken.HexNumber)
            {
                return("0x00CF12A4");
            }
            if (t == TokenizerToken.NaN)
            {
                return("NaN");
            }
            if (t == TokenizerToken.Infinity)
            {
                return("Infinity");
            }

            if (t == TokenizerToken.StarComment)
            {
                return("/* ... */");
            }
            if (t == TokenizerToken.LineComment)
            {
                return("// ..." + Environment.NewLine);
            }

            if (t == TokenizerToken.Regex)
            {
                return("/regex/gi");
            }

            if (t == TokenizerToken.OpenPar)
            {
                return("(");
            }
            if (t == TokenizerToken.ClosePar)
            {
                return(")");
            }
            if (t == TokenizerToken.OpenBracket)
            {
                return("[");
            }
            if (t == TokenizerToken.CloseBracket)
            {
                return("]");
            }
            if (t == TokenizerToken.OpenCurly)
            {
                return("{");
            }
            if (t == TokenizerToken.CloseCurly)
            {
                return("}");
            }


            return(TokenizerToken.None.ToString());
        }
Example #8
0
 static internal TokenizerToken FromAssignOperatorToBinary(this TokenizerToken assignment)
 {
     Debug.Assert((assignment & TokenizerToken.IsAssignOperator) != 0 && assignment != TokenizerToken.Assign);
     return(_assignBinaryMap[((int)assignment & 15) - 2]);
 }
Example #9
0
 /// <summary>
 /// Computes the precedence with a provision of 1 bit to ease the handling of right associative infix operators.
 /// </summary>
 /// <returns>An even precedence level between 30 and 2. 0 if the token has <see cref="TokenizerError.IsErrorOrEndOfInput"/> bit set.</returns>
 /// <remarks>
 /// This uses <see cref="TokenizerToken.OpLevelMask"/> and <see cref="TokenizerToken.OpLevelShift"/>.
 /// </remarks>
 public static int PrecedenceLevel(this TokenizerToken t)
 {
     return(t > 0 ? (((int)(t & TokenizerToken.OpLevelMask)) >> (int)TokenizerToken.OpLevelShift) << 1 : 0);
 }