/// <summary> /// Can only handle print if no ( and " supplied. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var nextToken = _lexer.PeekToken(); if (nextToken.Token == Tokens.LeftParenthesis || nextToken.Token.Text == "\"") return false; return true; }
// </fs:plugin-autogenerate> /// <summary> /// Whether or not this parser can handle the supplied token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var next = _tokenIt.Peek().Token; if (string.Compare(next.Text, "of", StringComparison.InvariantCultureIgnoreCase) == 0) return true; return false; }
/// <summary> /// Whether or not this plugin can handle current token(s). /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var t = _tokenIt.Peek(1, false); if (_parser.Context.Symbols.IsFunc(t.Token.Text)) return true; return false; }
/// <summary> /// This can not handle all idtoken based expressions. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { if (!(current.Kind == TokenKind.Ident)) return false; var next = _tokenIt.Peek(1, false); if (!(next.Token.Kind == TokenKind.Ident)) return false; // Check if multi-word function name. // e.g. "refill inventory" // 1. Is it a function call? var tokens = _tokenIt.PeekConsequetiveIdsAppendedWithTokenCounts(true, _tokenIt.LLK); _result = FluentHelper.MatchFunctionName(_parser.Context, tokens); // Validate. // 1. The function must exist. if (!_result.Exists) return false; // 2. Only fluentscript functions support wildcard. if (_result.FunctionMode != MemberMode.FunctionScript) return false; // 3. Has wildcard flag must be turned on. var sym = _parser.Context.Symbols.GetSymbol(_result.Name) as SymbolFunction; var func = sym.FuncExpr as FunctionExpr; //var func = _parser.Context.Functions.GetByName(_result.Name); if (!func.Meta.HasWildCard) return false; return true; }
/// <summary> /// Converts from c# datatypes to fluentscript datatypes inside /// </summary> /// <param name="val"></param> public static LObject ConvertToLangLiteral(Token token) { if (token.Type == TokenTypes.Null) return LObjects.Null; var type = token.Type; var kind = token.Kind; if (type == TokenTypes.LiteralNumber) return new LNumber(Convert.ToDouble(token.Value, CultureInfo.InvariantCulture)); //fix if (type == TokenTypes.LiteralString) return new LString(Convert.ToString(token.Value)); if (type == TokenTypes.LiteralDate) return new LDate(Convert.ToDateTime(token.Value)); if (type == TokenTypes.LiteralTime) return new LTime((TimeSpan)token.Value); if (type == TokenTypes.LiteralDay) return new LDayOfWeek((DayOfWeek)token.Value); if (kind == TokenKind.LiteralBool) return new LBool(Convert.ToBoolean(token.Value)); return LangTypeHelper.ConvertToLangClass(token.Value); }
/// <summary> /// Whether or not this uri plugin can handle the current token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var n1 = _lexer.PeekToken(false); if (n1.Token.Text == "if" || n1.Token.Text == "endif") return true; return false; }
/// <summary> /// Whether or not this can handle the current token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { if (current == Tokens.Var) return true; var next = _tokenIt.Peek().Token; if (next == Tokens.Assignment) return true; return false; }
/// <summary> /// Whether or not this can handle the token supplied. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var ahead = _tokenIt.Peek(1, false); if (string.Compare(ahead.Token.Text, "words", StringComparison.InvariantCultureIgnoreCase) == 0) return true; return false; }
/// <summary> /// Whether or not this parser can handle the supplied token. /// </summary> /// <param name="token"></param> /// <returns></returns> public override bool CanHandle(Token token) { string name = token.Text.ToLower(); string name2 = _tokenIt.Peek().Token.Text.ToLower(); if (_holidays.ContainsKey(name)) return true; if (_holidays.ContainsKey(name + " " + name2)) return true; return false; }
/// <summary> /// Whether or not the lexer can handle this token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var peekResult = _lexer.Scanner.PeekWord(false); if (peekResult.Success && MarkerPlugin._markers.ContainsKey(peekResult.Text)) return true; return false; }
/// <summary> /// This can not handle all idtoken based expressions. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { if (!(current.Kind == TokenKind.Ident)) return false; var next = _tokenIt.Peek(1, false); if (!(next.Token.Kind == TokenKind.Ident)) return false; // Check if multi-word function name. var ids = _tokenIt.PeekConsequetiveIdsAppendedWithTokenCounts(true, _tokenIt.LLK); _result = FluentHelper.MatchFunctionName(_parser.Context, ids); return _result.Exists; }
/// <summary> /// Whether or not this uri plugin can handle the current token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var currentWord = _lexer.LastToken.Text; var peekResult = _lexer.Scanner.PeekCustomLimitedWord(false, '@', 25, '@', '.'); if (!peekResult.Success) return false; var possibleEmail = currentWord + peekResult.Text; if (Regex.IsMatch(possibleEmail, _emailRegex)) return true; return false; }
/// <summary> /// Whether or not this uri plugin can handle the current token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { _endPos = -1; var next = ""; var pos = _lexer.State.Pos; // Check position. if (pos > _lexer.LAST_POSITION) return false; char n = _lexer.State.Text[pos]; // Check that the next char is date part separator as in 3/10/2012 or 3-10-2012 if (n != '-' && n != '/' && n != '\\') return false; while (pos <= _lexer.LAST_POSITION) { n = _lexer.State.Text[pos]; if (Char.IsDigit(n)) { next += n; } else if (n == '-' || n == '/' || n == '\\') { next += '/'; } else break; pos++; } // No need to try parse the text if next lenght is < 5 if (next.Length < 5) return false; var result = DateTime.MinValue; var combinedWord = current.Text + next; if (DateTime.TryParse(combinedWord, out result)) { _endPos = pos - 1; return true; } return false; }
/// <summary> /// Whether or not this parser can handle the supplied token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var n = _tokenIt.Peek().Token; var nextText = n.Text; if (nextText != "st" && nextText != "nd" && nextText != "rd" && nextText != "th") return false; n = _tokenIt.Peek(2).Token; if (n.Kind != TokenKind.Ident) return false; // Finally check if there is a plural symbol that exists. var id = n.Text; if (this.Ctx.Symbols.Contains(id + "s")) return true; return false; }
/// <summary> /// Whether or not this plugin can handle current token(s). /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { var t = _tokenIt.Peek(1, false); if (t.Token != Tokens.Percent) return false; // Now check that the token after % is not a number or a ident. // e.g. The following would indicate doing a modulo operation // 1. 25 % 4 // 2. 25 % result t = _tokenIt.Peek(2, false); if (t.Token.IsLiteralAny()) return false; if (t.Token.Kind == TokenKind.Ident) return false; //if (t.Token is LiteralToken) //{ // if (((LiteralToken)t.Token).IsNumeric()) // return false; //} return true; }
/// <summary> /// Increments the line number /// </summary> /// <param name="is2CharNewLine"></param> public void IncrementLine(bool is2CharNewLine) { _scanner.IncrementLine(is2CharNewLine); _lastToken = Tokens.NewLine; }
private List<TokenData> ReadInterpolatedTokens() { var c = _scanner.ReadChar(); var n = _scanner.PeekChar(); var tokens = new List<TokenData>(); while (c != '}' && !_scanner.IsAtEnd()) { var pos = _scanner.State.Pos; // Variable if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Empty space. else if (c == ' ' || c == '\t') { _lastToken = Tokens.WhiteSpace; } else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { throw new LangException("syntax", "unexpected text in string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } var t = new TokenData() { Token = _lastToken, Line = _scanner.State.Line, LineCharPos = _scanner.State.LineCharPosition, Pos = pos }; tokens.Add(t); // Single char symbol - char advancement was not made. if ( (t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos ) _scanner.ReadChar(); c = _scanner.State.CurrentChar; n = _scanner.PeekChar(); } return tokens; }
/// <summary> /// Reads the next token from the reader. /// </summary> /// <returns> A token, or <c>null</c> if there are no more tokens. </returns> public TokenData NextToken() { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // LEXER ALWAYS READS NEXT CHAR var c = _scanner.State.CurrentChar; var n = _scanner.PeekChar(); //var tokenLengthCalcMode = TokenLengthCalcMode.Direct; var pos = _scanner.State.Pos; var line = _scanner.State.Line; var tokenLength = 0; var cpos = _scanner.State.LineCharPosition; if (_scanner.IsEnded()) { _lastToken = Tokens.EndToken; } // Empty space. else if (c == ' ' || c == '\t') { _scanner.ConsumeWhiteSpace(false, true); _lastToken = Tokens.WhiteSpace; tokenLength = (_scanner.State.Pos - pos) + 1; //tokenLengthCalcMode = TokenLengthCalcMode.WhiteSpace; } // Variable else if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Single line else if (c == '/' && n == '/') { _scanner.MoveChars(2); var result = _scanner.ScanToNewLine(false, true); //tokenLengthCalcMode = TokenLengthCalcMode.String; tokenLength = (_scanner.State.Pos - pos) + 1; _lastToken = TokenBuilder.ToComment(false, result.Text); } // Multi-line else if (c == '/' && n == '*') { _scanner.MoveChars(2); var result = _scanner.ScanUntilChars(false, '*', '/', false, true); //tokenLengthCalcMode = TokenLengthCalcMode.MultilineComment; tokenLength = _scanner.State.LineCharPosition; _lastToken = TokenBuilder.ToComment(true, result.Text); } else if (c == '|' && n != '|') { _lastToken = Tokens.Pipe; } // Operator ( Math, Compare, Increment ) * / + -, < < > >= ! = else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (c == '{') { _lastToken = Tokens.LeftBrace; } else if (c == '}') { _lastToken = Tokens.RightBrace; } else if (c == ';') { _lastToken = Tokens.Semicolon; } else if (c == '$') { _lastToken = Tokens.Dollar; } else if (c == '@') { _lastToken = Tokens.At; } else if (c == '#') { _lastToken = Tokens.Pound; } else if ( c== '?') { _lastToken = Tokens.Question; } else if (c == '\\') { _lastToken = Tokens.BackSlash; } // String literal else if (c == '"' || c == '\'') { _lastToken = ReadString( c == '"'); //tokenLengthCalcMode = TokenLengthCalcMode.String; if (_lastToken.Kind == TokenKind.Multi) { tokenLength = (_scanner.State.Pos - pos) -2; string text = _scanner.State.Text.Substring(pos + 1, tokenLength); _lastToken.SetText(text); } else { tokenLength = _lastToken.Text.Length + 2; } } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { _lastToken = Tokens.Unknown; } var t = new TokenData() { Token = _lastToken, Line = line, LineCharPos = cpos, Pos = pos }; _lastTokenData = t; // Single char symbol - char advancement was not made. if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.Unknown || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos) _scanner.ReadChar(); return t; }
/// <summary> /// Peeks at the next token. /// </summary> /// <returns></returns> public TokenData PeekToken(bool allowSpace = false) { // Check if ended if (_scanner.State.Pos >= _scanner.State.Text.Length) { // Store this perhaps? if (_endTokenData != null) return _endTokenData; // Create endToken data. _endTokenData = new TokenData() { Token = Tokens.EndToken, Line = _scanner.State.Line, Pos = _scanner.State.Pos, LineCharPos = _scanner.State.LineCharPosition }; return _endTokenData; } var line = _scanner.State.Line; var linepos = _scanner.State.LineCharPosition; var lastToken = _lastToken; var lastTokenData = _lastTokenData; var iSc = _interpolatedStartChar; var pos = _scanner.State.Pos; // Get the next token. var token = NextToken(); if (!allowSpace && token.Token == Tokens.WhiteSpace) { while (token.Token == Tokens.WhiteSpace) { token = NextToken(); } } // Reset the data back to the last token. _scanner.State.Line = line; _scanner.State.LineCharPosition = linepos; _lastToken = lastToken; _lastTokenData = lastTokenData; _interpolatedStartChar = iSc; _scanner.ResetPos(pos, true); return token; }
/// <summary> /// End of statement script. /// </summary> /// <param name="endOfStatementToken"></param> /// <returns></returns> protected bool IsEndOfStatementOrEndOfScript(Token endOfStatementToken) { // Copied code... to avoid 2 function calls. if (_tokenIt.NextToken.Token == endOfStatementToken) return true; if (_tokenIt.NextToken.Token == Tokens.EndToken) return true; return false; }
/// <summary> /// Whether or not this parser can handle the supplied token. /// </summary> /// <param name="current"></param> /// <returns></returns> public virtual bool CanHandle(Token current) { return IsAutoMatched; }
/// <summary> /// Whether or not this plugin can handle current token(s). /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { if (current != Tokens.At && !(current.Kind == TokenKind.Ident)) return false; Token idToken = current, afterIdToken = null; if (current == Tokens.At) { idToken = _tokenIt.Peek().Token; afterIdToken = _tokenIt.Peek(2).Token; } else if (current.Kind == TokenKind.Ident) { afterIdToken = _tokenIt.Peek(1).Token; } return afterIdToken == Tokens.BackSlash; }
/// <summary> /// Handles a comment token. /// </summary> /// <param name="tokenData"></param> /// <param name="token"></param> protected void HandleComment(TokenData tokenData, Token token) { var text = token.Text; // Case 1: Summary tag for functions or scriptmeta tag for script info if (text.Contains("@summary") || text.Contains(" @summary") || text.Contains("@scriptmeta-start") || text.Contains(" @scriptmeta-start")) { _hasSummaryComments = true; _lastCommentToken = tokenData; } // Case 2: else if (text.Contains("@scriptmeta-end") || text.Contains(" @scriptmeta-end")) { this.ClearCommentHandling(); } if (_hasSummaryComments) _comments.Add(token); // Finally advance the token. _tokenIt.Advance(); }
/// <summary> /// Match the current token to the token supplied. /// </summary> /// <param name="token1">The first token to expect</param> /// <param name="token2">The second token to expect</param> /// <param name="token3">The third token to expect</param> protected void ExpectMany(Token token1, Token token2 = null, Token token3 = null) { _tokenIt.ExpectMany(token1, token2, token3); }
/// <summary> /// Whether at end of statement. /// </summary> /// <returns></returns> protected bool IsEndOfStatement(Token endOfStatementToken) { return (_tokenIt.NextToken.Token == endOfStatementToken); }
/// <summary> /// Returns whether or not the token represents a reference to a Class /// </summary> /// <param name="token"></param> /// <returns></returns> public bool IsClass(Token token) { if (_ctx.Types.Contains(token.Text)) return true; if (_ctx.Symbols.IsVar(token.Text)) return false; // Try converting the first char to uppercase var name = Char.ToUpper(token.Text[0]) + token.Text.Substring(1); if (_ctx.Types.Contains(name)) return true; return false; }
/// <summary> /// Match the current token to the token supplied. /// </summary> /// <param name="token">The token to match the current token against</param> protected void Expect(Token token) { _tokenIt.Expect(token); }
/// <summary> /// Returns whether or not the token represents a reference to a class instance /// </summary> /// <param name="token"></param> /// <returns></returns> public bool IsInstance(Token token) { var sym = _ctx.Symbols.GetSymbol(token.Text); if (sym == null) return false; if (_ctx.Types.Contains(sym.DataTypeName)) return true; return false; }
/// <summary> /// Checks whether or not this plugin can handle the current token. /// </summary> /// <param name="current"></param> /// <returns></returns> public override bool CanHandle(Token current) { if (!(current.Kind == TokenKind.Ident)) return false; if (current.Text == "log" || LogPluginConstants.ContainsKey(current.Text)) return true; return false; }
/// <summary> /// Returns whether or not the token represents an argument. /// </summary> /// <param name="token"></param> /// <returns></returns> public bool IsArg(Token token) { if (token.IsLiteralAny()) return true; if (token.Kind == TokenKind.Ident && _ctx.Symbols.IsVar(token.Text)) return true; return false; }