private void Lex() { start = new SourceLocation(lexer.CurrentLocation.line - 1, lexer.CurrentLocation.col - 1); int token = lexer.Lex(); while (token == (int)Token.WHITE_SPACE && !lexer.IsEob()) { start = new SourceLocation(lexer.CurrentLocation.line - 1, lexer.CurrentLocation.col - 1); token = lexer.Lex(); } end = new SourceLocation(lexer.CurrentLocation.line - 1, lexer.CurrentLocation.col - 1); _token = token; }
public void Parse(SquirrelLexer scanner, string src) { Debug.WriteLine("Parse"); TokenInfo ti = new TokenInfo(); Stack <LexerTokenDesc> braces = new Stack <LexerTokenDesc>(); scanner.SetSource(src, 0); LexerTokenDesc td = new LexerTokenDesc(); bool hastokens = scanner.Lex(ref td); pairs.Clear(); hiddenRegions.Clear(); while (hastokens) { switch (td.token) { case (int)Token.MLINE_COMMENT: { TextSpan start = new TextSpan(); TextSpan end = new TextSpan(); start.iStartIndex = td.span.iStartIndex; start.iStartLine = td.span.iStartLine; start.iEndIndex = start.iStartIndex + 2; start.iEndLine = start.iStartLine; end.iStartIndex = td.span.iEndIndex; end.iStartLine = td.span.iEndLine; end.iEndIndex = end.iStartIndex + 2; end.iEndLine = end.iStartLine; pairs.Add(new LexPair(td.token, start, end)); hiddenRegions.Add(td.span); /* req.Sink.MatchPair(start, end, 1); * if (req.Sink.HiddenRegions) * { * req.Sink.ProcessHiddenRegions = true; * req.Sink.AddHiddenRegion(td.span); * }*/ } break; case '{': case '(': case '[': braces.Push(td); break; case '}': { LexerTokenDesc start; if (GetMatchingBracket('{', braces, out start)) { if (start.span.iStartLine != td.span.iEndLine) { TextSpan hideSpan = new TextSpan(); hideSpan.iStartIndex = start.span.iStartIndex; hideSpan.iStartLine = start.span.iStartLine; hideSpan.iEndIndex = td.span.iEndIndex; hideSpan.iEndLine = td.span.iEndLine; //req.Sink.ProcessHiddenRegions = true; //req.Sink.AddHiddenRegion(hideSpan); hiddenRegions.Add(hideSpan); } //req.Sink.MatchPair(start.span, td.span, 1); pairs.Add(new LexPair(td.token, start.span, td.span)); } } break; case ')': { LexerTokenDesc start; if (GetMatchingBracket('(', braces, out start)) { pairs.Add(new LexPair(td.token, start.span, td.span)); } } break; case ']': { LexerTokenDesc start; if (GetMatchingBracket('[', braces, out start)) { pairs.Add(new LexPair(td.token, start.span, td.span)); } } break; } hastokens = scanner.Lex(ref td); } }
public void Parse(string buffer, int versionnumber) { FileVersion = versionnumber; lock (_scanner) { _classificationInfo.Clear(); Children.Clear(); _scanner.SetSource(buffer, 0); LexerTokenDesc currentDesc = new LexerTokenDesc(); while (_scanner.Lex(ref currentDesc)) { switch (currentDesc.token) { case ((int)Token.CLASS): ParseClass(this, ref currentDesc); break; case ((int)Token.FUNCTION): ParseFunction(this, ref currentDesc); break; case ((int)Token.STATIC): case ((int)Token.LOCAL): CaptureKeyword(this, ref currentDesc); if (LexSkipSpace(this, ref currentDesc) && currentDesc.token == (int)Token.IDENTIFIER) { SQScope temp = new SQScope() { Level = this.Level + 1 }; SQVariable v = ParseVariable(temp, ref currentDesc, SQDeclarationType.Variable); LexSkipSpace(v, ref currentDesc); if (currentDesc.token == (int)Token.EQ || currentDesc.token == (int)'=') { v.Parent = this; this.Children.Add(new SQDeclare(v.Name, v)); } } break; case '{': TryParseScope(this, ref currentDesc, false); break; case ((int)Token.EQ): SkipToEndLine(this, ref currentDesc, ';', '{'); if (currentDesc.token == '{') { goto case '{'; } break; case ((int)Token.ENUM): ParseEnum(this, ref currentDesc); break; default: TryParseCommon(this, ref currentDesc); break; } } GetSpans(_classificationInfo, this); } }