protected LexicalInfo ToLexicalInfo(antlr.IToken token) { int line = token.getLine(); int startColumn = token.getColumn(); int endColumn = token.getColumn() + token.getText().Length; String filename = token.getFilename(); return new LexicalInfo(filename, line, startColumn, endColumn); }
void OnParserError(antlr.RecognitionException error) { var location = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn()); var nvae = error as antlr.NoViableAltException; if (null != nvae) ParserError(location, nvae); else GenericParserError(location, error); }
public static SourceLocation ToSourceLocation(antlr.IToken token) { return new SourceLocation(token.getLine(), token.getColumn()); }
public static LexicalInfo ToLexicalInfo(antlr.IToken token) { return new LexicalInfo(token.getFilename(), token.getLine(), token.getColumn()); }
public static SourceLocation ToEndSourceLocation(antlr.IToken token) { return new SourceLocation(token.getLine(), token.getColumn() + token.getText().Length - 1); }
public static SourceLocation ToEndSourceLocation(antlr.IToken token) { string text = token.getText() ?? ""; return new SourceLocation(token.getLine(), token.getColumn() + text.Length - 1); }
void OnParserError(antlr.RecognitionException error) { LexicalInfo data = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn()); antlr.NoViableAltException nvae = error as antlr.NoViableAltException; if (null != nvae) { ParserError(data, nvae); } else { _context.Errors.Add(CompilerErrorFactory.GenericParserError(data, error)); } }
antlr.IToken CreateToken(antlr.IToken prototype, int newTokenType, string newTokenText) { return new BooToken(newTokenType, newTokenText, prototype.getFilename(), prototype.getLine(), prototype.getColumn()+SafeGetLength(prototype.getText())); }
void FlushBuffer(antlr.IToken token) { if (0 == _buffer.Length) return; string text = _buffer.ToString(); string[] lines = text.Split(NewLineCharArray); if (lines.Length > 1) { string lastLine = lines[lines.Length-1]; // Protect against mixed indentation issues if (String.Empty != lastLine) { if (null == _expectedIndent) { _expectedIndent = lastLine.Substring(0, 1); } if (String.Empty != lastLine.Replace(_expectedIndent, String.Empty)) { string literal = _expectedIndent == "\t" ? "tabs" : _expectedIndent == "\f" ? "form feeds" // The lexer allows them :p : "spaces"; throw new TokenStreamRecognitionException( new RecognitionException( "Mixed indentation, expected the use of " + literal, token.getFilename(), token.getLine(), // Point exactly to the first invalid char lastLine.Length - lastLine.TrimStart(_expectedIndent[0]).Length + 1 ) ); } } if (lastLine.Length > CurrentIndentLevel) { EnqueueIndent(token); _indentStack.Push(lastLine.Length); } else if (lastLine.Length < CurrentIndentLevel) { EnqueueEOS(token); do { EnqueueDedent(); _indentStack.Pop(); } while (lastLine.Length < CurrentIndentLevel); } else { EnqueueEOS(token); } } }
private Tuple<int, int> CalculateEndpoint(antlr.IToken token, int endLine, int endIndex, int delimiterLength) { var startIndex = positionMap[token.getLine() - 1][token.getColumn() - 1]; var startLine = token.getLine() - 1; if (startLine > endLine || startLine == endLine && startIndex > endIndex) whitespaces.Add(new TextSpan { iStartLine = endLine, iStartIndex = endIndex, iEndLine = startLine, iEndIndex = startIndex }); endLine = startLine - 1; endIndex = 0; var runningIndex = startIndex + delimiterLength; foreach (var part in token.getText().Split(new[] { "\r\n" }, StringSplitOptions.None)) { endLine++; endIndex = runningIndex + part.Length; runningIndex = 0; } endIndex += delimiterLength; //endIndex = positionMap[endLine][endIndex]; var cluster = new MappedToken( startLine * lineSize + startIndex, endIndex - startIndex); if (tokenMap.Count > 0 && tokenMap[tokenMap.Count() - 1].Index >= cluster.Index) throw new ArgumentException("Token Mapping order"); tokenMap.Add(cluster); return new Tuple<int, int>(endLine, endIndex); }