private IncludeToken(Token parent, Scope scope, Span span, PreprocessorToken prepToken, string fileName, bool searchFileDir) : base(parent, scope, span) { _prepToken = prepToken; _fileName = fileName; _searchFileDir = searchFileDir; }
public static ReplaceSetToken Parse(Token parent, Scope scope, PreprocessorToken replaceToken) { #if DEBUG if (replaceToken == null) throw new ArgumentNullException("replaceToken"); #endif var file = scope.File; var startPos = replaceToken.Span.Start; var scopeIndent = scope.Indent(); var ret = new ReplaceSetToken(parent, scope, new Span(startPos, startPos)); ret._replaceToken = new ReplaceToken(ret, scope, replaceToken.Span, ret); ret._tokens.Add(ret._replaceToken); var done = false; while (!file.EndOfFile && !done) { var token = file.ParseToken(ret, scopeIndent); if (token == null) break; if (token.GetType() == typeof(PreprocessorToken)) { if ((token as PreprocessorToken).Text == "#with" && ret._withToken == null) { ret._withToken = new ReplaceToken(ret, scope, token.Span, ret); ret._tokens.Add(ret._withToken); } else if ((token as PreprocessorToken).Text == "#endreplace") { ret._endReplaceToken = new ReplaceToken(ret, scope, token.Span, ret); ret._tokens.Add(ret._endReplaceToken); done = true; } else { ret._tokens.Add(token); if (ret._withToken == null) ret._oldTokens.Add(token); else ret._newTokens.Add(token); } } else { ret._tokens.Add(token); if (ret._withToken == null) ret._oldTokens.Add(token); else ret._newTokens.Add(token); } } ret.Span = new Span(startPos, file.Position); return ret; }
public static IncludeToken Parse(Token parent, Scope scope, PreprocessorToken prepToken) { var startPos = scope.File.Position; scope.File.SeekEndOfLine(); var lineText = scope.File.GetText(new Span(startPos, scope.File.Position)).Trim(); var fileName = ""; var searchFileDir = false; var match = _rxAngleBrackets.Match(lineText); if (match.Success) { fileName = match.Groups[1].Value.Trim(); } else if ((match = _rxQuotes.Match(lineText)).Success) { fileName = match.Groups[1].Value.Trim(); searchFileDir = true; } return new IncludeToken(parent, scope, new Span(prepToken.Span.Start, scope.File.Position), prepToken, fileName, searchFileDir); }
public static Token Parse(Token parent, Scope scope, PreprocessorToken insertToken) { var ret = new InsertToken(parent, scope); var scopeIndent = scope.Indent(); var tokens = new List<Token>(); tokens.Add(ret._startToken = new InsertBoundaryToken(ret, scope, insertToken.Span, true)); while (true) { var token = scope.File.ParseToken(parent, scopeIndent); if (token == null) break; if (token.GetType() == typeof(PreprocessorToken) && token.Text == "#endinsert") { tokens.Add(ret._endToken = new InsertBoundaryToken(ret, scope, token.Span, false)); break; } else tokens.Add(token); } ret._tokens = tokens.ToArray(); ret.Span = new Span(ret._tokens.First().Span.Start, ret._tokens.Last().Span.End); return ret; }
public static Token Parse(Token parent, Scope scope, PreprocessorToken prepToken) { var startPos = scope.File.Position; var defineScope = scope; defineScope.Hint |= ScopeHint.SuppressFunctionDefinition | ScopeHint.SuppressVarDecl | ScopeHint.SuppressFunctionCall; var lineNum = prepToken.Span.Start.LineNum; var tokens = new List<Token>(); var bodyTokens = new List<Token>(); tokens.Add(prepToken); // Define name var token = scope.File.ParseSingleToken(parent, defineScope); if (token == null || token.GetType() != typeof(IdentifierToken) || token.Span.Start.LineNum != lineNum) { scope.File.Position = startPos; return prepToken; } var nameToken = token as IdentifierToken; tokens.Add(nameToken); var resetPos = scope.File.Position; // Check for line break before next token. if (!scope.File.SkipWhiteSpaceAndComments() || scope.File.Position.LineNum != lineNum) return new DefineToken(parent, scope, tokens, nameToken, null, bodyTokens); // Arguments BracketsToken argsToken = null; token = scope.File.ParseToken(parent, defineScope); if (token == null || token.Span.Start.LineNum != lineNum) { scope.File.Position = resetPos; return new DefineToken(parent, scope, tokens, nameToken, null, bodyTokens); } if (token.GetType() == typeof(BracketsToken)) { argsToken = token as BracketsToken; tokens.Add(argsToken); resetPos = scope.File.Position; // Check for line break before next token. if (!scope.File.SkipWhiteSpaceAndComments() || scope.File.Position.LineNum != lineNum) return new DefineToken(parent, scope, tokens, nameToken, argsToken, bodyTokens); token = scope.File.ParseToken(parent, defineScope); if (token == null || token.Span.Start.LineNum != lineNum) { scope.File.Position = resetPos; return new DefineToken(parent, scope, tokens, nameToken, argsToken, bodyTokens); } } // Body enclosed in { } if (token.GetType() == typeof(BracesToken)) { tokens.Add(token); bodyTokens.Add(token); return new DefineToken(parent, scope, tokens, nameToken, argsToken, bodyTokens); } var done = false; while (!done) { if (token.GetType() == typeof(UnknownToken) && token.Text == "\\") { // Line end token lineNum++; tokens.Add(token); } else { tokens.Add(token); bodyTokens.Add(token); } if (!scope.File.SkipWhiteSpaceAndComments()) break; var pos = scope.File.Position; if (pos.LineNum != lineNum) break; token = scope.File.ParseSingleToken(parent, defineScope); if (token == null || token.Span.Start.LineNum != lineNum) { scope.File.Position = pos; done = true; } } return new DefineToken(parent, scope, tokens, nameToken, argsToken, bodyTokens); }