public void ResolveBooTokenStartAndEndIndex(antlr.CommonToken token, TokenInfo tokenInfo)
        {
            int oneCharBack = token.getColumn() - 1;
            int lengthOfTokenText = token.getText() == null ? 0 : token.getText().Length;
            int oneCharAfterToken = token.getColumn() + lengthOfTokenText;

            // single quoted string
            if (token.Type == BooLexer.SINGLE_QUOTED_STRING || token.Type == BooLexer.DOUBLE_QUOTED_STRING)
            {
                tokenInfo.StartIndex = oneCharBack;
                tokenInfo.EndIndex = oneCharAfterToken;
            }
            else if (token.Type == BooLexer.TRIPLE_QUOTED_STRING)
            {
                tokenInfo.StartIndex = oneCharBack;
                tokenInfo.EndIndex = oneCharBack+ 5 + token.getText().Length;
            }
            else if (token.Type == 1)
            {
                return;
            }
            else
            {
                tokenInfo.StartIndex = oneCharBack;
                tokenInfo.EndIndex = oneCharBack + (token.getText().Length - 1);
            }
        }
	protected LexicalInfo ToLexicalInfo(antlr.IToken token)
	{
		int line = token.getLine();
		int startColumn = token.getColumn();
		int endColumn = token.getColumn() + token.getText().Length;
		String filename = token.getFilename();
		return new LexicalInfo(filename, line, startColumn, endColumn);
	}
 void OnParserError(antlr.RecognitionException error)
 {
     var location = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn());
     var nvae = error as antlr.NoViableAltException;
     if (null != nvae)
         ParserError(location, nvae);
     else
         GenericParserError(location, error);
 }
 public static SourceLocation ToSourceLocation(antlr.IToken token)
 {
     return new SourceLocation(token.getLine(), token.getColumn());
 }
 public static LexicalInfo ToLexicalInfo(antlr.IToken token)
 {
     return new LexicalInfo(token.getFilename(), token.getLine(), token.getColumn());
 }
 public static SourceLocation ToEndSourceLocation(antlr.IToken token)
 {
     return new SourceLocation(token.getLine(), token.getColumn() + token.getText().Length - 1);
 }
		public static SourceLocation ToEndSourceLocation(antlr.IToken token)
		{
			string text = token.getText() ?? "";
			return new SourceLocation(token.getLine(), token.getColumn() + text.Length - 1);
		}
Exemple #8
0
        void OnParserError(antlr.RecognitionException error)
        {
            LexicalInfo data = new LexicalInfo(error.getFilename(), error.getLine(), error.getColumn());

            antlr.NoViableAltException nvae = error as antlr.NoViableAltException;
            if (null != nvae)
            {
                ParserError(data, nvae);
            }
            else
            {
                _context.Errors.Add(CompilerErrorFactory.GenericParserError(data, error));
            }
        }
 antlr.IToken CreateToken(antlr.IToken prototype, int newTokenType, string newTokenText)
 {
     return new BooToken(newTokenType, newTokenText,
         prototype.getFilename(),
         prototype.getLine(),
         prototype.getColumn()+SafeGetLength(prototype.getText()));
 }
Exemple #10
0
        private Tuple<int, int> CalculateEndpoint(antlr.IToken token, int endLine, int endIndex, int delimiterLength)
        {
            var startIndex = positionMap[token.getLine() - 1][token.getColumn() - 1];
            var startLine = token.getLine() - 1;

            if (startLine > endLine || startLine == endLine && startIndex > endIndex)
                whitespaces.Add(new TextSpan { iStartLine = endLine, iStartIndex = endIndex, iEndLine = startLine, iEndIndex = startIndex });

            endLine = startLine - 1;
            endIndex = 0;

            var runningIndex = startIndex + delimiterLength;
            foreach (var part in token.getText().Split(new[] { "\r\n" }, StringSplitOptions.None))
            {
                endLine++;
                endIndex = runningIndex + part.Length;
                runningIndex = 0;
            }
            endIndex += delimiterLength;

            //endIndex = positionMap[endLine][endIndex];

            var cluster = new MappedToken(
                startLine * lineSize + startIndex,
                endIndex - startIndex);

            if (tokenMap.Count > 0
                && tokenMap[tokenMap.Count() - 1].Index >= cluster.Index)
                throw new ArgumentException("Token Mapping order");

            tokenMap.Add(cluster);
            return new Tuple<int, int>(endLine, endIndex);
        }