Example #1
0
        /// <summary>
        /// Converts from c# datatypes to fluentscript datatypes inside
        /// </summary>
        /// <param name="val"></param>
        public static LObject ConvertToLangLiteral(Token token)
        {
            if (token.Type == TokenTypes.Null)
                return LObjects.Null;

            var type = token.Type;
            var kind = token.Kind;
            if (type == TokenTypes.LiteralNumber)
                return new LNumber(Convert.ToDouble(token.Value));

            if (type == TokenTypes.LiteralString)
                return new LString(Convert.ToString(token.Value));

            if (type == TokenTypes.LiteralDate)
                return new LDate(Convert.ToDateTime(token.Value));

            if (type == TokenTypes.LiteralTime)
                return new LTime((TimeSpan)token.Value);

            if (type == TokenTypes.LiteralDay)
                return new LDayOfWeek((DayOfWeek)token.Value);

            if (kind == TokenKind.LiteralBool)
                return new LBool(Convert.ToBoolean(token.Value));

            return new LClass(token.Value);
        }
Example #2
0
        /// <summary>
        /// Whether or not this uri plugin can handle the current token.
        /// </summary>
        /// <param name="current"></param>
        /// <returns></returns>
        public override bool CanHandle(Token current)
        {
            var currentWord = _lexer.LastToken.Text;

            var peekResult = _lexer.Scanner.PeekCustomLimitedWord(false, '@', 25, '@', '.');
            if (!peekResult.Success) return false;

            var possibleEmail = currentWord + peekResult.Text;
            if (Regex.IsMatch(possibleEmail, _emailRegex))
                return true;
            return false;
        }
Example #3
0
 /// <summary>
 /// Sets values from another token.
 /// </summary>
 /// <param name="t"></param>
 internal void SetValues(Token t)
 {
     _kind = t._kind;
     _type = t._type;
 }
Example #4
0
 /// <summary>
 /// Adds the token to the lookup
 /// </summary>
 /// <param name="token"></param>
 public static void AddToLookup(Token token)
 {
     AllTokens[token.Text] = token;
 }
Example #5
0
 /// <summary>
 /// Checks if the token supplied is a math op ( * / + - % )
 /// </summary>
 /// <param name="token"></param>
 /// <returns></returns>
 public static bool IsMath(Token token)
 {
     return MathTokens.ContainsKey(token.Text);
 }
Example #6
0
 /// <summary>
 /// Checks if the token is a comparison token ( less lessthan more morethan equal not equal ).
 /// </summary>
 /// <param name="token"></param>
 /// <returns></returns>
 public static bool IsCompare(Token token)
 {
     return CompareTokens.ContainsKey(token.Text);
 }
Example #7
0
 /// <summary>
 /// Creates a token from an existing token and position information.
 /// </summary>
 /// <param name="token">The token to copy from.</param>
 /// <param name="line">The line number</param>
 /// <param name="lineCharPos">The line char position</param>
 /// <param name="charPos">The char position</param>
 /// <returns></returns>
 public static Token ToToken(Token token, int line, int lineCharPos, int charPos)
 {
     var t = new Token(token.Kind, token.Type, token.Text, token.Value);
     //{ Line = line, LineCharPos = lineCharPos, Pos = charPos };
     return t;
 }
Example #8
0
        private List<TokenData> ReadInterpolatedTokens()
        {
            var c = _scanner.ReadChar();
            var n = _scanner.PeekChar();
            var tokens = new List<TokenData>();

            while (c != '}' && !_scanner.IsAtEnd())
            {
                var pos = _scanner.State.Pos;
                // Variable
                if (_scanner.IsIdentStart(c))
                {
                    _lastToken = ReadWord();
                }
                // Empty space.
                else if (c == ' ' || c == '\t')
                {
                    _lastToken = Tokens.WhiteSpace;
                }
                else if (_scanner.IsOp(c) == true)
                {
                    _lastToken = ReadOperator();
                }
                else if (c == '(')
                {
                    _lastToken = Tokens.LeftParenthesis;
                }
                else if (c == ')')
                {
                    _lastToken = Tokens.RightParenthesis;
                }
                else if (c == '[')
                {
                    _lastToken = Tokens.LeftBracket;
                }
                else if (c == ']')
                {
                    _lastToken = Tokens.RightBracket;
                }
                else if (c == '.')
                {
                    _lastToken = Tokens.Dot;
                }
                else if (c == ',')
                {
                    _lastToken = Tokens.Comma;
                }
                else if (c == ':')
                {
                    _lastToken = Tokens.Colon;
                }
                else if (_scanner.IsNumeric(c))
                {
                    _lastToken = ReadNumber();
                }
                else if (c == '\r')
                {
                    bool is2CharNewline = n == '\n';
                    IncrementLine(is2CharNewline);
                }
                else
                {
                    throw new LangException("syntax", "unexpected text in string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition);
                }

                var t = new TokenData() { Token = _lastToken, Line = _scanner.State.Line, LineCharPos = _scanner.State.LineCharPosition, Pos = pos };
                tokens.Add(t);

                // Single char symbol - char advancement was not made.
                if ( (t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos  )
                    _scanner.ReadChar();
                c = _scanner.State.CurrentChar;
                n = _scanner.PeekChar();
            }
            return tokens;
        }
Example #9
0
        /// <summary>
        /// Reads the next token from the reader.
        /// </summary>
        /// <returns> A token, or <c>null</c> if there are no more tokens. </returns>
        public TokenData NextToken()
        {
            // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            // LEXER ALWAYS READS NEXT CHAR
            var c = _scanner.State.CurrentChar;
            var n = _scanner.PeekChar();
            //var tokenLengthCalcMode = TokenLengthCalcMode.Direct;

            var pos = _scanner.State.Pos;
            var line = _scanner.State.Line;
            var tokenLength = 0;
            var cpos = _scanner.State.LineCharPosition;

            if (_scanner.IsEnded())
            {
                _lastToken = Tokens.EndToken;
            }
            // Empty space.
            else if (c == ' ' || c == '\t')
            {
                _scanner.ConsumeWhiteSpace(false, true);
                _lastToken = Tokens.WhiteSpace;
                tokenLength = (_scanner.State.Pos - pos) + 1;
                //tokenLengthCalcMode = TokenLengthCalcMode.WhiteSpace;
            }
            // Variable
            else if (_scanner.IsIdentStart(c))
            {
                _lastToken = ReadWord();
            }
            // Single line
            else if (c == '/' && n == '/')
            {
                _scanner.MoveChars(2);
                var result = _scanner.ScanToNewLine(false, true);
                //tokenLengthCalcMode = TokenLengthCalcMode.String;
                tokenLength = (_scanner.State.Pos - pos) + 1;
                _lastToken = Tokens.ToComment(false, result.Text);
            }
            // Multi-line
            else if (c == '/' && n == '*')
            {
                _scanner.MoveChars(2);
                var result = _scanner.ScanUntilChars(false, '*', '/', false, true);
                //tokenLengthCalcMode = TokenLengthCalcMode.MultilineComment;
                tokenLength = _scanner.State.LineCharPosition;
                _lastToken = Tokens.ToComment(true, result.Text);
            }
            else if (c == '|' && n != '|')
            {
                _lastToken = Tokens.Pipe;
            }
            // Operator ( Math, Compare, Increment ) * / + -, < < > >= ! =
            else if (_scanner.IsOp(c) == true)
            {
                _lastToken = ReadOperator();
            }
            else if (c == '(')
            {
                _lastToken = Tokens.LeftParenthesis;
            }
            else if (c == ')')
            {
                _lastToken = Tokens.RightParenthesis;
            }
            else if (c == '[')
            {
                _lastToken = Tokens.LeftBracket;
            }
            else if (c == ']')
            {
                _lastToken = Tokens.RightBracket;
            }
            else if (c == '.')
            {
                _lastToken = Tokens.Dot;
            }
            else if (c == ',')
            {
                _lastToken = Tokens.Comma;
            }
            else if (c == ':')
            {
                _lastToken = Tokens.Colon;
            }
            else if (c == '{')
            {
                _lastToken = Tokens.LeftBrace;
            }
            else if (c == '}')
            {
                _lastToken = Tokens.RightBrace;
            }
            else if (c == ';')
            {
                _lastToken = Tokens.Semicolon;
            }
            else if (c == '$')
            {
                _lastToken = Tokens.Dollar;
            }
            else if (c == '@')
            {
                _lastToken = Tokens.At;
            }
            else if (c == '#')
            {
                _lastToken = Tokens.Pound;
            }
            else if (c == '\\')
            {
                _lastToken = Tokens.BackSlash;
            }
            // String literal
            else if (c == '"' || c == '\'')
            {
                _lastToken = ReadString( c == '"');
                //tokenLengthCalcMode = TokenLengthCalcMode.String;
                if (_lastToken.Kind == TokenKind.Multi)
                {
                    tokenLength = (_scanner.State.Pos - pos) -2;
                    string text = _scanner.State.Text.Substring(pos + 1, tokenLength);
                    _lastToken.SetText(text);
                }
                else
                {
                    tokenLength = _lastToken.Text.Length + 2;
                }
            }
            else if (_scanner.IsNumeric(c))
            {
                _lastToken = ReadNumber();
            }
            else if (c == '\r')
            {
                bool is2CharNewline = n == '\n';
                IncrementLine(is2CharNewline);
            }
            else
            {
                _lastToken = Tokens.Unknown;
            }
            var t = new TokenData() { Token = _lastToken, Line = line, LineCharPos = cpos, Pos = pos };
            _lastTokenData = t;

            // Single char symbol - char advancement was not made.
            if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.Unknown || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos)
                _scanner.ReadChar();
            return t;
        }
Example #10
0
 /// <summary>
 /// Increments the line number
 /// </summary>
 /// <param name="is2CharNewLine"></param>
 public void IncrementLine(bool is2CharNewLine)
 {
     _scanner.IncrementLine(is2CharNewLine);
     _lastToken = Tokens.NewLine;
 }
Example #11
0
 public bool IsMatchingType(Token token)
 {
     if (string.IsNullOrEmpty(this.TokenType))
         return false;
     if (this.TokenType == "@number" && token.Kind == TokenKind.LiteralNumber)
         return true;
     if (this.TokenType == "@time" && token.Kind == TokenKind.LiteralTime)
         return true;
     if (this.TokenType == "@word" && token.Kind == TokenKind.Ident)
         return true;
     return false;
 }
Example #12
0
 public bool IsMatchingValue(Token token)
 {
     if (this.Values == null || this.Values.Length == 0)
         return true;
     return this.Values.Contains(token.Text);
 }
Example #13
0
 /// <summary>
 /// Converts to negative number.
 /// </summary>
 /// <param name="token"></param>
 /// <returns></returns>
 public static LObject ConvertToLangNegativeNumber(Token token)
 {
     var num = Convert.ToDouble(token.Value, CultureInfo.InvariantCulture) * -1;
     return new LNumber(num);
 }
Example #14
0
        /// <summary>
        /// Whether or not this uri plugin can handle the current token.
        /// </summary>
        /// <param name="token"></param>
        /// <returns></returns>
        public override bool CanHandle(Token token)
        {
            var next = _lexer.Scanner.PeekChar();
            var curr = _lexer.State.CurrentChar;
            var last = ' ';
            if(_lexer.State.Pos - 2 >= 0)
                last = _lexer.State.Text[_lexer.State.Pos - 2];

            // [ ident.xml, 9.doc ]
            if (Char.IsLetterOrDigit(last)) return false;
            if (last == ')' || last == ']' ) return false;
            if (token.Text == "." && !Char.IsLetter(curr)) return false;

            // *.
            if (token == ComLib.Lang.Core.Tokens.Multiply && curr == '.')
            {
                var result = _lexer.Scanner.PeekWord(true);
                if (!result.Success) return false;
                if (_extLookup.ContainsKey(result.Text))
                    return true;
            }
            else if (token == ComLib.Lang.Core.Tokens.Dot)
            {
                var result = _lexer.Scanner.PeekWord(false);
                if (!result.Success) return false;
                if (_extLookup.ContainsKey(result.Text))
                    return true;
            }
            return false;
        }
Example #15
0
 /// <summary>
 /// Whether or not the token supplied is a new line.
 /// </summary>
 /// <param name="token"></param>
 /// <returns></returns>
 public static bool IsNewLine(Token token)
 {
     return (token._type == TokenTypes.NewLine || token._type == TokenTypes.NewLine);
 }
Example #16
0
 /// <summary>
 /// Clones this instance of the token and returns a new instance with the same values.
 /// </summary>
 /// <returns></returns>
 public Token Clone()
 {
     var token = new Token(this.Kind, this.Type, this.Text, this.Value);
     return token;
 }
Example #17
0
        /// <summary>
        /// Peeks at the next token.
        /// </summary>
        /// <returns></returns>
        public TokenData PeekToken(bool allowSpace = false)
        {
            // Check if ended
            if (_scanner.State.Pos >= _scanner.State.Text.Length)
            {
                // Store this perhaps?
                if (_endTokenData != null) return _endTokenData;

                // Create endToken data.
                _endTokenData = new TokenData() { Token = Tokens.EndToken, Line = _scanner.State.Line, Pos = _scanner.State.Pos, LineCharPos = _scanner.State.LineCharPosition };
                return _endTokenData;
            }

            var line = _scanner.State.Line;
            var linepos = _scanner.State.LineCharPosition;
            var lastToken = _lastToken;
            var lastTokenData = _lastTokenData;
            var iSc = _interpolatedStartChar;
            var pos = _scanner.State.Pos;

            // Get the next token.
            var token = NextToken();
            if (!allowSpace && token.Token == Tokens.WhiteSpace)
            {
                while (token.Token == Tokens.WhiteSpace)
                {
                    token = NextToken();
                }
            }
            // Reset the data back to the last token.
            _scanner.State.Line = line;
            _scanner.State.LineCharPosition = linepos;
            _lastToken = lastToken;
            _lastTokenData = lastTokenData;
            _interpolatedStartChar = iSc;
            _scanner.ResetPos(pos, true);
            return token;
        }