Esempio n. 1
0
        /// <summary>
        /// Reads an interpolated string in format "${variable} some text ${othervariable + 2}."
        /// </summary>
        /// <param name="quote"></param>
        /// <param name="readLine">Whether or not to only read to the end of the line.</param>
        /// <param name="includeNewLine">Whether or not to include the new line in parsing.</param>
        /// <param name="setPositionAfterToken">Whether or not set the position of lexer after the token.</param>
        /// <returns></returns>
        public Token ReadInterpolatedString(char quote, bool readLine = false, bool includeNewLine = false, bool setPositionAfterToken = true)
        {
            var allTokens = new List<TokenData>();
            var interpolationCount = 0;
            // Only supporting following:
            // 1. id's abcd with "_"
            // 2. "."
            // 3. math ops ( + - / * %)
            // "name" 'name' "name\"s" 'name\'"
            var buffer = new StringBuilder();
            var curr = _scanner.ReadChar();
            var next = _scanner.PeekChar();
            var matched = false;
            var escapeChar = '\\';
            Token token = null;
            while (_scanner.State.Pos <= _scanner.LAST_POSITION)
            {
                // End string " or '
                if (!readLine && curr == quote)
                {
                    matched = true;
                    _scanner.MoveChars(1);
                    break;
                }
                // End of line.
                if (readLine && ( curr == '\r' || curr == '\n' ))
                {
                    matched = true;
                    if (!includeNewLine) break;
                    var is2CharNewLine = _scanner.ScanNewLine(curr);
                    var newline = is2CharNewLine ? "\r\n" : "\n";
                    buffer.Append(newline);
                    token = Tokens.NewLine;
                    break;
                }
                // Interpolation.
                else if (curr == _interpolatedStartChar && next == '{')
                {
                    // Keep track of interpolations and their start positions.
                    interpolationCount++;
                    int interpolatedStringStartPos = _scanner.State.LineCharPosition + 2;
                    int interpolatedStringLinePos = _scanner.State.Line;

                    // Add any existing text before the interpolation as a token.
                    if (buffer.Length > 0)
                    {
                        string text = buffer.ToString();
                        token = Tokens.ToLiteralString(text);
                        var t = new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line };
                        allTokens.Add(t);
                        buffer.Clear();
                    }
                    _scanner.MoveChars(1);
                    var tokens = ReadInterpolatedTokens();
                    token = Tokens.ToInterpolated(string.Empty, tokens);
                    var iTokenData = new TokenData() { Token = token, LineCharPos = interpolatedStringStartPos, Line = interpolatedStringLinePos };
                    allTokens.Add(iTokenData);
                }
                // Not an \ for escaping so just append.
                else if (curr != escapeChar)
                {
                    buffer.Append(curr);
                }
                // Escape \
                else if (curr == escapeChar)
                {
                    var result = _scanner.ScanEscape(quote, false);
                    buffer.Append(result.Text);
                    _scanner.MoveChars(1);
                }

                curr = _scanner.ReadChar();
                next = _scanner.PeekChar();
            }

            // Error: Unterminated string constant.
            if (!matched && !readLine && _scanner.State.Pos >= _scanner.LAST_POSITION)
            {
                throw new LangException("Syntax Error", "Unterminated string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition);
            }

            // At this point the pos is already after token.
            // If matched and need to set at end of token, move back 1 char
            if (matched && !setPositionAfterToken) _scanner.MoveChars(-1);
            if (interpolationCount == 0)
            {
                var text = buffer.ToString();
                return Tokens.ToLiteralString(text);
            }
            if (buffer.Length > 0)
            {
                var text = buffer.ToString();
                token = Tokens.ToLiteralString(text);
                allTokens.Add(new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line });
            }
            return Tokens.ToInterpolated(string.Empty, allTokens);
        }
Esempio n. 2
0
        private List<TokenData> ReadInterpolatedTokens()
        {
            var c = _scanner.ReadChar();
            var n = _scanner.PeekChar();
            var tokens = new List<TokenData>();

            while (c != '}' && !_scanner.IsAtEnd())
            {
                var pos = _scanner.State.Pos;
                // Variable
                if (_scanner.IsIdentStart(c))
                {
                    _lastToken = ReadWord();
                }
                // Empty space.
                else if (c == ' ' || c == '\t')
                {
                    _lastToken = Tokens.WhiteSpace;
                }
                else if (_scanner.IsOp(c) == true)
                {
                    _lastToken = ReadOperator();
                }
                else if (c == '(')
                {
                    _lastToken = Tokens.LeftParenthesis;
                }
                else if (c == ')')
                {
                    _lastToken = Tokens.RightParenthesis;
                }
                else if (c == '[')
                {
                    _lastToken = Tokens.LeftBracket;
                }
                else if (c == ']')
                {
                    _lastToken = Tokens.RightBracket;
                }
                else if (c == '.')
                {
                    _lastToken = Tokens.Dot;
                }
                else if (c == ',')
                {
                    _lastToken = Tokens.Comma;
                }
                else if (c == ':')
                {
                    _lastToken = Tokens.Colon;
                }
                else if (_scanner.IsNumeric(c))
                {
                    _lastToken = ReadNumber();
                }
                else if (c == '\r')
                {
                    bool is2CharNewline = n == '\n';
                    IncrementLine(is2CharNewline);
                }
                else
                {
                    throw new LangException("syntax", "unexpected text in string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition);
                }

                var t = new TokenData() { Token = _lastToken, Line = _scanner.State.Line, LineCharPos = _scanner.State.LineCharPosition, Pos = pos };
                tokens.Add(t);

                // Single char symbol - char advancement was not made.
                if ( (t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos  )
                    _scanner.ReadChar();
                c = _scanner.State.CurrentChar;
                n = _scanner.PeekChar();
            }
            return tokens;
        }
Esempio n. 3
0
        /// <summary>
        /// Reads the next token from the reader.
        /// </summary>
        /// <returns> A token, or <c>null</c> if there are no more tokens. </returns>
        public TokenData NextToken()
        {
            // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            // LEXER ALWAYS READS NEXT CHAR
            var c = _scanner.State.CurrentChar;
            var n = _scanner.PeekChar();
            //var tokenLengthCalcMode = TokenLengthCalcMode.Direct;

            var pos = _scanner.State.Pos;
            var line = _scanner.State.Line;
            var tokenLength = 0;
            var cpos = _scanner.State.LineCharPosition;

            if (_scanner.IsEnded())
            {
                _lastToken = Tokens.EndToken;
            }
            // Empty space.
            else if (c == ' ' || c == '\t')
            {
                _scanner.ConsumeWhiteSpace(false, true);
                _lastToken = Tokens.WhiteSpace;
                tokenLength = (_scanner.State.Pos - pos) + 1;
                //tokenLengthCalcMode = TokenLengthCalcMode.WhiteSpace;
            }
            // Variable
            else if (_scanner.IsIdentStart(c))
            {
                _lastToken = ReadWord();
            }
            // Single line
            else if (c == '/' && n == '/')
            {
                _scanner.MoveChars(2);
                var result = _scanner.ScanToNewLine(false, true);
                //tokenLengthCalcMode = TokenLengthCalcMode.String;
                tokenLength = (_scanner.State.Pos - pos) + 1;
                _lastToken = Tokens.ToComment(false, result.Text);
            }
            // Multi-line
            else if (c == '/' && n == '*')
            {
                _scanner.MoveChars(2);
                var result = _scanner.ScanUntilChars(false, '*', '/', false, true);
                //tokenLengthCalcMode = TokenLengthCalcMode.MultilineComment;
                tokenLength = _scanner.State.LineCharPosition;
                _lastToken = Tokens.ToComment(true, result.Text);
            }
            else if (c == '|' && n != '|')
            {
                _lastToken = Tokens.Pipe;
            }
            // Operator ( Math, Compare, Increment ) * / + -, < < > >= ! =
            else if (_scanner.IsOp(c) == true)
            {
                _lastToken = ReadOperator();
            }
            else if (c == '(')
            {
                _lastToken = Tokens.LeftParenthesis;
            }
            else if (c == ')')
            {
                _lastToken = Tokens.RightParenthesis;
            }
            else if (c == '[')
            {
                _lastToken = Tokens.LeftBracket;
            }
            else if (c == ']')
            {
                _lastToken = Tokens.RightBracket;
            }
            else if (c == '.')
            {
                _lastToken = Tokens.Dot;
            }
            else if (c == ',')
            {
                _lastToken = Tokens.Comma;
            }
            else if (c == ':')
            {
                _lastToken = Tokens.Colon;
            }
            else if (c == '{')
            {
                _lastToken = Tokens.LeftBrace;
            }
            else if (c == '}')
            {
                _lastToken = Tokens.RightBrace;
            }
            else if (c == ';')
            {
                _lastToken = Tokens.Semicolon;
            }
            else if (c == '$')
            {
                _lastToken = Tokens.Dollar;
            }
            else if (c == '@')
            {
                _lastToken = Tokens.At;
            }
            else if (c == '#')
            {
                _lastToken = Tokens.Pound;
            }
            else if (c == '\\')
            {
                _lastToken = Tokens.BackSlash;
            }
            // String literal
            else if (c == '"' || c == '\'')
            {
                _lastToken = ReadString( c == '"');
                //tokenLengthCalcMode = TokenLengthCalcMode.String;
                if (_lastToken.Kind == TokenKind.Multi)
                {
                    tokenLength = (_scanner.State.Pos - pos) -2;
                    string text = _scanner.State.Text.Substring(pos + 1, tokenLength);
                    _lastToken.SetText(text);
                }
                else
                {
                    tokenLength = _lastToken.Text.Length + 2;
                }
            }
            else if (_scanner.IsNumeric(c))
            {
                _lastToken = ReadNumber();
            }
            else if (c == '\r')
            {
                bool is2CharNewline = n == '\n';
                IncrementLine(is2CharNewline);
            }
            else
            {
                _lastToken = Tokens.Unknown;
            }
            var t = new TokenData() { Token = _lastToken, Line = line, LineCharPos = cpos, Pos = pos };
            _lastTokenData = t;

            // Single char symbol - char advancement was not made.
            if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.Unknown || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos)
                _scanner.ReadChar();
            return t;
        }
Esempio n. 4
0
        /// <summary>
        /// Peeks at the next token.
        /// </summary>
        /// <returns></returns>
        public TokenData PeekToken(bool allowSpace = false)
        {
            // Check if ended
            if (_scanner.State.Pos >= _scanner.State.Text.Length)
            {
                // Store this perhaps?
                if (_endTokenData != null) return _endTokenData;

                // Create endToken data.
                _endTokenData = new TokenData() { Token = Tokens.EndToken, Line = _scanner.State.Line, Pos = _scanner.State.Pos, LineCharPos = _scanner.State.LineCharPosition };
                return _endTokenData;
            }

            var line = _scanner.State.Line;
            var linepos = _scanner.State.LineCharPosition;
            var lastToken = _lastToken;
            var lastTokenData = _lastTokenData;
            var iSc = _interpolatedStartChar;
            var pos = _scanner.State.Pos;

            // Get the next token.
            var token = NextToken();
            if (!allowSpace && token.Token == Tokens.WhiteSpace)
            {
                while (token.Token == Tokens.WhiteSpace)
                {
                    token = NextToken();
                }
            }
            // Reset the data back to the last token.
            _scanner.State.Line = line;
            _scanner.State.LineCharPosition = linepos;
            _lastToken = lastToken;
            _lastTokenData = lastTokenData;
            _interpolatedStartChar = iSc;
            _scanner.ResetPos(pos, true);
            return token;
        }
Esempio n. 5
0
        private LangLimitException BuildLimitExceptionFromToken(TokenData token, string error, int limit = -1)
        {
            if (limit != -1)
                error = "Limit for : " + error + " reached at " + limit;

            string script = "";
            int lineNumber = 0;
            int charPos = 0;
            if (token != null)
            {
                lineNumber = token.Line;
                charPos = token.LineCharPos;
            }
            var ex = new LangLimitException(error, script, lineNumber);
            ex.Error.Column = charPos;
            return ex;
        }
Esempio n. 6
0
 /// <summary>
 /// Checks the nested statement depth.
 /// </summary>
 /// <param name="token"></param>
 /// <param name="statementNestedCount">The number of nested statements</param>
 internal void CheckParserStatementNested(TokenData token, int statementNestedCount)
 {
     if (_hasSettings && _ctx.Settings.MaxStatementsNested > 0 && statementNestedCount > _ctx.Settings.MaxStatementsNested)
         throw BuildLimitExceptionFromToken(token, "nested statements", _ctx.Settings.MaxStatementsNested);
 }
Esempio n. 7
0
 /// <summary>
 /// Checks the nested statement depth.
 /// </summary>
 /// <param name="token"></param>
 /// <param name="functionCallCount">The number of nested statements</param>
 internal void CheckParserFuncCallNested(TokenData token, int functionCallCount)
 {
     if (_hasSettings && _ctx.Settings.MaxFuncCallNested > 0 && functionCallCount > _ctx.Settings.MaxFuncCallNested)
         throw BuildLimitExceptionFromToken(token, "nested function call", _ctx.Settings.MaxFuncCallNested);
 }
Esempio n. 8
0
        /// <summary>
        /// run step 123.
        /// </summary>
        /// <returns></returns>
        public override Token[] Parse()
        {
            // *.xml .xml .doc *.doc
            var takeoverToken = _lexer.LastTokenData;
            var pretext = takeoverToken.Token.Text;
            var line = _lexer.State.Line;
            var pos = _lexer.State.LineCharPosition;

            // 1. Check if starts with "*" in which case its really "*."
            if (_lexer.State.CurrentChar == '.')
            {
                _lexer.Scanner.ReadChar();
                pretext += ".";
            }

            // 2. Get the file extension name.
            var lineTokenPart = _lexer.ReadWord();
            var finalText = pretext + lineTokenPart.Text;
            var token = TokenBuilder.ToLiteralString(finalText);
            var t = new TokenData() { Token = token, Line = line, LineCharPos = pos };
            _lexer.ParsedTokens.Add(t);
            return new Token[] { token };
        }
Esempio n. 9
0
 /// <summary>
 /// run step 123.
 /// </summary>
 /// <returns></returns>
 public override Token[] Parse()
 {
     // http https ftp ftps www
     var takeoverToken = _lexer.LastTokenData;
     var line = _lexer.State.Line;
     var pos = _lexer.State.LineCharPosition;
     var lineTokenPart = _lexer.ReadCustomWord('@', '.');
     var finalText = takeoverToken.Token.Text + lineTokenPart.Text;
     var lineToken = TokenBuilder.ToLiteralString(finalText);
     var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos };
     _lexer.ParsedTokens.Add(t);
     return new Token[] { lineToken };
 }