/// <summary> /// todo i need /// </summary> /// <returns></returns> public override Token[] Parse() { // print no quotes needed! var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; // What is the next word? var marker = _lexer.ReadWord(); var m = new TokenData() { Token = marker, Line = line, LineCharPos = pos }; _lexer.Scanner.ReadChar(); _lexer.Scanner.ConsumeWhiteSpace(false, true); Token token = null; line = _lexer.State.Line; pos = _lexer.State.LineCharPosition; char c = _lexer.State.CurrentChar; if (c == '\'' || c == '"') token = _lexer.ReadInterpolatedString(c, false, false, true); else token = _lexer.ReadLine(false); var t = new TokenData() { Token = token, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(takeoverToken); _lexer.ParsedTokens.Add(m); _lexer.ParsedTokens.Add(t); return new Token[] { takeoverToken.Token, token }; }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // print no quotes needed! var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; var lineToken = _lexer.ReadLine(false); var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(takeoverToken); _lexer.ParsedTokens.Add(t); return new Token[] { takeoverToken.Token, lineToken }; }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // http https ftp ftps www var takeoverToken = _lexer.LastTokenData; var line = _lexer.State.Line; var pos = _lexer.State.LineCharPosition; //var n = _lexer.ReadChar(); var token = _lexer.ReadLineRaw(false); token = TokenBuilder.ToComment(false, token.Text); var t = new TokenData() { Token = token, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(t); return new Token[] { token }; }
/// <summary> /// Parse the entire line. /// </summary> /// <param name="includeNewLine"></param> /// <returns></returns> protected Token[] ParseLine(bool includeNewLine) { // print no quotes needed! var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; // This stops on the last char before the newline. // So move forward one. var lineToken = _lexer.ReadLine(false); var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(takeoverToken); _lexer.ParsedTokens.Add(t); return new Token[] { takeoverToken.Token, lineToken }; }
/// <summary> /// Handles a comment token. /// </summary> /// <param name="tokenData"></param> /// <param name="token"></param> protected void HandleComment(TokenData tokenData, Token token) { var text = token.Text; // Case 1: Summary tag for functions or scriptmeta tag for script info if (text.Contains("@summary") || text.Contains(" @summary") || text.Contains("@scriptmeta-start") || text.Contains(" @scriptmeta-start")) { _hasSummaryComments = true; _lastCommentToken = tokenData; } // Case 2: else if (text.Contains("@scriptmeta-end") || text.Contains(" @scriptmeta-end")) { this.ClearCommentHandling(); } if (_hasSummaryComments) _comments.Add(token); // Finally advance the token. _tokenIt.Advance(); }
private List<TokenData> ReadInterpolatedTokens() { var c = _scanner.ReadChar(); var n = _scanner.PeekChar(); var tokens = new List<TokenData>(); while (c != '}' && !_scanner.IsAtEnd()) { var pos = _scanner.State.Pos; // Variable if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Empty space. else if (c == ' ' || c == '\t') { _lastToken = Tokens.WhiteSpace; } else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { throw new LangException("syntax", "unexpected text in string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } var t = new TokenData() { Token = _lastToken, Line = _scanner.State.Line, LineCharPos = _scanner.State.LineCharPosition, Pos = pos }; tokens.Add(t); // Single char symbol - char advancement was not made. if ( (t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos ) _scanner.ReadChar(); c = _scanner.State.CurrentChar; n = _scanner.PeekChar(); } return tokens; }
/// <summary> /// Peeks at the next token. /// </summary> /// <returns></returns> public TokenData PeekToken(bool allowSpace = false) { // Check if ended if (_scanner.State.Pos >= _scanner.State.Text.Length) { // Store this perhaps? if (_endTokenData != null) return _endTokenData; // Create endToken data. _endTokenData = new TokenData() { Token = Tokens.EndToken, Line = _scanner.State.Line, Pos = _scanner.State.Pos, LineCharPos = _scanner.State.LineCharPosition }; return _endTokenData; } var line = _scanner.State.Line; var linepos = _scanner.State.LineCharPosition; var lastToken = _lastToken; var lastTokenData = _lastTokenData; var iSc = _interpolatedStartChar; var pos = _scanner.State.Pos; // Get the next token. var token = NextToken(); if (!allowSpace && token.Token == Tokens.WhiteSpace) { while (token.Token == Tokens.WhiteSpace) { token = NextToken(); } } // Reset the data back to the last token. _scanner.State.Line = line; _scanner.State.LineCharPosition = linepos; _lastToken = lastToken; _lastTokenData = lastTokenData; _interpolatedStartChar = iSc; _scanner.ResetPos(pos, true); return token; }
private void CaptureVariableReference(List<Expr> pathExps, TokenData lastPathToken, string path) { // Add existing path as a constant expr. if (!string.IsNullOrEmpty(path)) AppendPathPart(pathExps, lastPathToken, path, true); // Move past the "@" var n = _tokenIt.Advance(); // CASE 1: "{" - Brace indicates an expression in between "{" and "}" // e.g. @{file.name} if (n.Token == Tokens.LeftBrace) { _tokenIt.Advance(); var exp = _parser.ParseExpression(null, true, true, false, false); pathExps.Add(exp); if (_tokenIt.NextToken.Token != Tokens.RightBrace) throw _tokenIt.BuildSyntaxExpectedTokenException(Tokens.RightBrace); return; } // CASE 2: Next token is id expression. AppendPathPart(pathExps, n, n.Token.Text, false); }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // 04/20/1979 var lastToken = _lexer.LastTokenData; var line = _lexer.State.Line; var pos = _lexer.State.LineCharPosition; var separator = _lexer.State.CurrentChar; var textToken = _lexer.ReadToPosition(_endPos); var dateText = lastToken.Token.Text + separator + textToken.Text; dateText = dateText.Replace("-", "/"); dateText = dateText.Replace("\\", "/"); var dateToken = TokenBuilder.ToLiteralDate(dateText); var t = new TokenData() { Token = dateToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(t); return new Token[] { dateToken }; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="varname"></param> /// <param name="sourceExpr"></param> /// <returns></returns> public static Expr ForEach(string varname, Expr sourceExpr, TokenData token) { var exp = new ForEachExpr(); exp.VarName = varname; exp.SourceExpr = sourceExpr; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr For(Expr start, Expr condition, Expr increment, TokenData token) { var exp = new ForExpr(); exp.Start = start; exp.Condition = condition; exp.Increment = increment; SetupContext(exp, token); return exp; }
public static Expr AssignMulti(bool declare, List<AssignExpr> exprs, TokenData token) { var exp = new AssignMultiExpr(); exp.Assignments = exprs; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Assign(bool declare, Expr left, Expr right, TokenData token) { var exp = new AssignExpr(); exp.IsDeclaration = declare; exp.VarExp = left; exp.ValueExp = right; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Negate(Expr expr, TokenData token) { var exp = new NegateExpr(); exp.Expression = expr; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Unary(string name, Expr incExpr, double incValue, Operator op, TokenData token) { var exp = new UnaryExpr(); exp.Name = name; exp.Op = op; exp.Increment = incValue; exp.Expression = incExpr; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a variable expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <returns></returns> public static Expr Ident(string name, TokenData token) { var exp = new VariableExpr(); exp.Name = name; SetupContext(exp, token); return exp; }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // env.<ident> // env.sys.<ident> // env.user.<ident> var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; // First "." _lexer.Scanner.ReadChar(); // Read the next part. // Case 1: variable env.path // Case 2: sys or user env.user or env.sys Token part = _lexer.ReadWord(); string varName = part.Text; string scope = string.Empty; if (string.Compare(part.Text, "sys", StringComparison.InvariantCultureIgnoreCase) == 0 || string.Compare(part.Text, "user", StringComparison.InvariantCultureIgnoreCase) == 0) { // Second "." _lexer.Scanner.ReadChar(); // "env. (sys | user ) scope = part.Text.ToLower(); // Final variable name. part = _lexer.ReadWord(); varName = part.Text; } string finalText = varName; EnvToken envToken = new EnvToken(scope, varName); var t = new TokenData() { Token = envToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(t); return new Token[] { envToken }; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Condition(Expr left, Operator op, Expr right, TokenData token) { var exp = new ConditionExpr(); exp.Left = left; exp.Op = op; exp.Right = right; SetupContext(exp, token); return exp; }
private void AppendPathPart(List<Expr> pathExps, TokenData token, string text, bool isConstant) { if (string.IsNullOrEmpty(text)) text = token.Token.Text; var start = isConstant ? Exprs.Const(new LString(text), _tokenIt.NextToken) as Expr : Exprs.Ident(text, _tokenIt.NextToken) as Expr; _parser.SetupContext(start, token); pathExps.Add(start); }
/// <summary> /// Creates an expr that checks if the list variable supplied has any items. /// </summary> /// <param name="varName"></param> /// <param name="token"></param> /// <returns></returns> public static Expr ListCheck(TokenData name, TokenData token) { var exp = new ListCheckExpr(); exp.NameExp = Ident(name.Token.Text, name); SetupContext(exp, token); return exp; }
/// <summary> /// Reads the next token from the reader. /// </summary> /// <returns> A token, or <c>null</c> if there are no more tokens. </returns> public TokenData NextToken() { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // LEXER ALWAYS READS NEXT CHAR var c = _scanner.State.CurrentChar; var n = _scanner.PeekChar(); //var tokenLengthCalcMode = TokenLengthCalcMode.Direct; var pos = _scanner.State.Pos; var line = _scanner.State.Line; var tokenLength = 0; var cpos = _scanner.State.LineCharPosition; if (_scanner.IsEnded()) { _lastToken = Tokens.EndToken; } // Empty space. else if (c == ' ' || c == '\t') { _scanner.ConsumeWhiteSpace(false, true); _lastToken = Tokens.WhiteSpace; tokenLength = (_scanner.State.Pos - pos) + 1; //tokenLengthCalcMode = TokenLengthCalcMode.WhiteSpace; } // Variable else if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Single line else if (c == '/' && n == '/') { _scanner.MoveChars(2); var result = _scanner.ScanToNewLine(false, true); //tokenLengthCalcMode = TokenLengthCalcMode.String; tokenLength = (_scanner.State.Pos - pos) + 1; _lastToken = TokenBuilder.ToComment(false, result.Text); } // Multi-line else if (c == '/' && n == '*') { _scanner.MoveChars(2); var result = _scanner.ScanUntilChars(false, '*', '/', false, true); //tokenLengthCalcMode = TokenLengthCalcMode.MultilineComment; tokenLength = _scanner.State.LineCharPosition; _lastToken = TokenBuilder.ToComment(true, result.Text); } else if (c == '|' && n != '|') { _lastToken = Tokens.Pipe; } // Operator ( Math, Compare, Increment ) * / + -, < < > >= ! = else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (c == '{') { _lastToken = Tokens.LeftBrace; } else if (c == '}') { _lastToken = Tokens.RightBrace; } else if (c == ';') { _lastToken = Tokens.Semicolon; } else if (c == '$') { _lastToken = Tokens.Dollar; } else if (c == '@') { _lastToken = Tokens.At; } else if (c == '#') { _lastToken = Tokens.Pound; } else if ( c== '?') { _lastToken = Tokens.Question; } else if (c == '\\') { _lastToken = Tokens.BackSlash; } // String literal else if (c == '"' || c == '\'') { _lastToken = ReadString( c == '"'); //tokenLengthCalcMode = TokenLengthCalcMode.String; if (_lastToken.Kind == TokenKind.Multi) { tokenLength = (_scanner.State.Pos - pos) -2; string text = _scanner.State.Text.Substring(pos + 1, tokenLength); _lastToken.SetText(text); } else { tokenLength = _lastToken.Text.Length + 2; } } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { _lastToken = Tokens.Unknown; } var t = new TokenData() { Token = _lastToken, Line = line, LineCharPos = cpos, Pos = pos }; _lastTokenData = t; // Single char symbol - char advancement was not made. if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.Unknown || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos) _scanner.ReadChar(); return t; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr NamedParam(string paramName, Expr val, TokenData token) { var exp = new NamedParameterExpr(); exp.Name = paramName; exp.Value = val; SetupContext(exp, token); return exp; }
/// <summary> /// Reads an interpolated string in format "${variable} some text ${othervariable + 2}." /// </summary> /// <param name="quote"></param> /// <param name="readLine">Whether or not to only read to the end of the line.</param> /// <param name="includeNewLine">Whether or not to include the new line in parsing.</param> /// <param name="setPositionAfterToken">Whether or not set the position of lexer after the token.</param> /// <returns></returns> public Token ReadInterpolatedString(char quote, bool readLine = false, bool includeNewLine = false, bool setPositionAfterToken = true) { var allTokens = new List<TokenData>(); var interpolationCount = 0; // Only supporting following: // 1. id's abcd with "_" // 2. "." // 3. math ops ( + - / * %) // "name" 'name' "name\"s" 'name\'" var buffer = new StringBuilder(); var curr = _scanner.ReadChar(); var next = _scanner.PeekChar(); var matched = false; var escapeChar = '\\'; Token token = null; while (_scanner.State.Pos <= _scanner.LAST_POSITION) { // End string " or ' if (!readLine && curr == quote) { matched = true; _scanner.MoveChars(1); break; } // End of line. if (readLine && ( curr == '\r' || curr == '\n' )) { matched = true; if (!includeNewLine) break; var is2CharNewLine = _scanner.ScanNewLine(curr); var newline = is2CharNewLine ? "\r\n" : "\n"; buffer.Append(newline); token = Tokens.NewLine; break; } // Interpolation. else if (curr == _interpolatedStartChar && next == '{') { // Keep track of interpolations and their start positions. interpolationCount++; int interpolatedStringStartPos = _scanner.State.LineCharPosition + 2; int interpolatedStringLinePos = _scanner.State.Line; // Add any existing text before the interpolation as a token. if (buffer.Length > 0) { string text = buffer.ToString(); token = TokenBuilder.ToLiteralString(text); var t = new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line }; allTokens.Add(t); buffer.Clear(); } _scanner.MoveChars(1); var tokens = ReadInterpolatedTokens(); token = TokenBuilder.ToInterpolated(string.Empty, tokens); var iTokenData = new TokenData() { Token = token, LineCharPos = interpolatedStringStartPos, Line = interpolatedStringLinePos }; allTokens.Add(iTokenData); } // Not an \ for escaping so just append. else if (curr != escapeChar) { buffer.Append(curr); } // Escape \ else if (curr == escapeChar) { var result = _scanner.ScanEscape(quote, false); buffer.Append(result.Text); _scanner.MoveChars(1); } curr = _scanner.ReadChar(); next = _scanner.PeekChar(); } // Error: Unterminated string constant. if (!matched && !readLine && _scanner.State.Pos >= _scanner.LAST_POSITION) { throw new LangException("Syntax Error", "Unterminated string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } // At this point the pos is already after token. // If matched and need to set at end of token, move back 1 char if (matched && !setPositionAfterToken) _scanner.MoveChars(-1); if (interpolationCount == 0) { var text = buffer.ToString(); return TokenBuilder.ToLiteralString(text); } if (buffer.Length > 0) { var text = buffer.ToString(); token = TokenBuilder.ToLiteralString(text); allTokens.Add(new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line }); } return TokenBuilder.ToInterpolated(string.Empty, allTokens); }
/// <summary> /// Creates a function call expression. /// </summary> /// <param name="nameExpr"></param> /// <param name="parameters"></param> /// <param name="token"></param> /// <returns></returns> public static Expr FunctionCall(Expr nameExpr, List<Expr> parameters, TokenData token) { var funcExp = new FunctionCallExpr(); funcExp.NameExp = nameExpr; funcExp.ParamListExpressions = parameters == null ? new List<Expr>() : parameters; funcExp.ParamList = new List<object>(); SetupContext(funcExp, token); return funcExp; }
private void PerformDiagnostics(TokenData tokenData) { this.DiagnosticData.TotalTokens++; if (tokenData.Token == Tokens.NewLine) this.DiagnosticData.TotalNewLineTokens++; else if (tokenData.Token == Tokens.WhiteSpace) this.DiagnosticData.TotalWhiteSpaceTokens++; }
public static Expr BindingCall(string bindingName, string functionName, TokenData token) { var bexpr = new BindingCallExpr(); bexpr.Name = functionName; bexpr.FullName = "sys." + bindingName + "." + functionName; bexpr.ParamListExpressions = new List<Expr>(); bexpr.ParamList = new List<object>(); return bexpr; }
/// <summary> /// Collects an unexpected token error and advances to next token. /// </summary> public void AddError(TokenData token, string error) { var ex = new LangException("Parse", error, this._scriptPath, token.Line, token.LineCharPos); this._parseErrors.Add(ex); }
/// <summary> /// Creates a function call expression. /// </summary> /// <param name="nameExpr"></param> /// <param name="parameters"></param> /// <param name="token"></param> /// <returns></returns> public static Expr MemberAccess(Expr nameExpr, string memberName, bool isAssignment, TokenData token) { var exp = new MemberAccessExpr(); exp.IsAssignment = isAssignment; exp.VarExp = nameExpr; exp.MemberName = memberName; SetupContext(exp, token); return exp; }
private void ClearCommentHandling() { // Clear the comment state. _comments.Clear(); _hasSummaryComments = false; _lastCommentToken = null; }
/// <summary> /// Sets up the context, symbol scope and script source reference for the expression supplied. /// </summary> /// <param name="expr"></param> /// <param name="token"></param> public static void SetupContext(Expr expr, TokenData token) { if (expr == null) return; var reftoken = (token == null && _tokenIt != null) ? _tokenIt.NextToken : token; expr.Ctx = _ctx; if(expr.SymScope == null) expr.SymScope = _ctx.Symbols.Current; if(expr.Token == null ) expr.Token = reftoken; if(expr.Ref == null && token != null ) expr.Ref = new ScriptRef(_scriptName, reftoken.Line, reftoken.LineCharPos); }