protected ParserRuleContext ParseTokens(SourceCodeFile sourceCodeFile, AntlrMemoryErrorListener errorListener, BufferedTokenStream codeTokenStream, Func <ITokenStream, Parser> initParserFunc = null, Func <Parser, ParserRuleContext> parseFunc = null) { Parser parser = initParserFunc != null?initParserFunc(codeTokenStream) : InitParser(codeTokenStream); parser.RemoveErrorListeners(); Parser = parser; ParserRuleContext syntaxTree; if (UseFastParseStrategyAtFirst) { parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.ErrorHandler = new BailErrorStrategy(); parser.TrimParseTree = true; parserLock.EnterReadLock(); try { syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } catch (ParseCanceledException) { parserLock.ExitReadLock(); parser.AddErrorListener(errorListener); codeTokenStream.Reset(); parser.Reset(); parser.Interpreter.PredictionMode = PredictionMode.Ll; parser.ErrorHandler = new DefaultErrorStrategy(); parserLock.EnterReadLock(); syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } finally { parserLock.ExitReadLock(); } } else { parser.AddErrorListener(errorListener); parserLock.EnterReadLock(); try { syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } finally { parserLock.ExitReadLock(); } } ClearParserCacheIfRequired(parser); #if DEBUG var tree = syntaxTree.ToStringTree(parser); #endif return(syntaxTree); }
public ParseTree Parse(IList <IToken> tokens, out TimeSpan parserTimeSpan) { if (SourceFile == null) { throw new ArgumentNullException(nameof(SourceFile)); } if (ErrorListener == null) { ErrorListener = new AntlrMemoryErrorListener(); ErrorListener.Logger = Logger; ErrorListener.LineOffset = LineOffset; } ErrorListener.SourceFile = SourceFile; AntlrParseTree result = null; try { var commentTokens = new List <IToken>(); foreach (IToken token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } var stopwatch = Stopwatch.StartNew(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(ErrorListener, codeTokenStream); stopwatch.Stop(); parserTimeSpan = stopwatch.Elapsed; result = Create(syntaxTree); result.Tokens = tokens; result.Comments = commentTokens; result.SourceFile = SourceFile; } catch (Exception ex) when(!(ex is ThreadAbortException)) { Logger.LogError(new ParsingException(SourceFile, ex)); } finally { HandleMemoryConsumption(); } return(result); }
public IList <IToken> GetTokens(TextFile sourceFile, out TimeSpan lexerTimeSpan) { SourceFile = sourceFile; if (ErrorListener == null) { ErrorListener = new AntlrMemoryErrorListener(); ErrorListener.Logger = Logger; ErrorListener.LineOffset = LineOffset; } ErrorListener.SourceFile = sourceFile; var preprocessedText = PreprocessText(sourceFile); AntlrInputStream inputStream; if (Language.IsCaseInsensitive()) { inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType); } else { inputStream = new AntlrInputStream(preprocessedText); } inputStream.name = sourceFile.RelativeName; IList <IToken> tokens; try { var stopwatch = Stopwatch.StartNew(); Lexer lexer = InitLexer(inputStream); lexer.Interpreter = new LexerATNSimulator(lexer, GetOrCreateAtn(LexerSerializedATN)); lexer.RemoveErrorListeners(); lexer.AddErrorListener(ErrorListener); tokens = lexer.GetAllTokens(); stopwatch.Stop(); lexerTimeSpan = stopwatch.Elapsed; } catch (Exception ex) { Logger.LogError(new LexingException(SourceFile, ex)); tokens = new List <IToken>(); } finally { HandleMemoryConsumption(); } return(tokens); }
protected ParserRuleContext ParseTokens(CodeFile sourceCodeFile, AntlrMemoryErrorListener errorListener, BufferedTokenStream codeTokenStream, Func <ITokenStream, Parser> initParserFunc = null, Func <Parser, ParserRuleContext> parseFunc = null) { Parser parser = initParserFunc != null?initParserFunc(codeTokenStream) : InitParser(codeTokenStream); parser.Interpreter = new ParserATNSimulator(parser, GetOrCreateAtn(false)); parser.RemoveErrorListeners(); Parser = parser; ParserRuleContext syntaxTree = null; if (UseFastParseStrategyAtFirst) { parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.ErrorHandler = new BailErrorStrategy(); parser.TrimParseTree = true; try { syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } catch (ParseCanceledException) { parser.AddErrorListener(errorListener); codeTokenStream.Reset(); parser.Reset(); parser.Interpreter.PredictionMode = PredictionMode.Ll; parser.ErrorHandler = new DefaultErrorStrategy(); syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } } else { parser.AddErrorListener(errorListener); syntaxTree = parseFunc != null?parseFunc(parser) : Parse(parser); } return(syntaxTree); }
public ParseTree Parse(CodeFile sourceCodeFile) { AntlrParseTree result = null; var filePath = sourceCodeFile.RelativeName; if (sourceCodeFile.Code != null) { var errorListener = new AntlrMemoryErrorListener(); errorListener.CodeFile = sourceCodeFile; errorListener.Logger = Logger; errorListener.LineOffset = LineOffset; try { var preprocessedText = PreprocessText(sourceCodeFile); AntlrInputStream inputStream; if (Language.IsCaseInsensitive) { inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType); } else { inputStream = new AntlrInputStream(preprocessedText); } inputStream.name = filePath; Lexer lexer = InitLexer(inputStream); lexer.Interpreter = new LexerATNSimulator(lexer, GetOrCreateAtn(true)); lexer.RemoveErrorListeners(); lexer.AddErrorListener(errorListener); var commentTokens = new List <IToken>(); var stopwatch = Stopwatch.StartNew(); IList <IToken> tokens = lexer.GetAllTokens(); stopwatch.Stop(); long lexerTimeSpanTicks = stopwatch.ElapsedTicks; foreach (IToken token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } stopwatch.Restart(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream); stopwatch.Stop(); long parserTimeSpanTicks = stopwatch.ElapsedTicks; result = Create(syntaxTree); result.LexerTimeSpan = new TimeSpan(lexerTimeSpanTicks); result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks); result.Tokens = tokens; result.Comments = commentTokens; } catch (Exception ex) when(!(ex is ThreadAbortException)) { Logger.LogError(new ParsingException(sourceCodeFile, ex)); if (result == null) { result = Create(null); } } finally { Interlocked.Increment(ref processedFilesCount); Interlocked.Add(ref processedBytesCount, sourceCodeFile.Code.Length); } } else { result = Create(null); } result.SourceCodeFile = sourceCodeFile; return(result); }
protected virtual ParseTree TokenizeAndParse(SourceCodeFile sourceCodeFile) { AntlrParseTree result = null; var filePath = Path.Combine(sourceCodeFile.RelativePath, sourceCodeFile.Name); if (sourceCodeFile.Code != null) { var errorListener = new AntlrMemoryErrorListener(); errorListener.FileName = filePath; errorListener.FileData = sourceCodeFile.Code; errorListener.Logger = Logger; errorListener.LineOffset = sourceCodeFile.LineOffset; try { var preprocessedText = PreprocessText(sourceCodeFile); AntlrInputStream inputStream; if (Language.IsCaseInsensitive()) { inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType); } else { inputStream = new AntlrInputStream(preprocessedText); } inputStream.name = filePath; Lexer lexer = InitLexer(inputStream); Lexer = lexer; lexer.RemoveErrorListeners(); lexer.AddErrorListener(errorListener); var commentTokens = new List <IToken>(); var stopwatch = Stopwatch.StartNew(); IList <IToken> tokens = GetAllTokens(lexer); stopwatch.Stop(); long lexerTimeSpanTicks = stopwatch.ElapsedTicks; #if DEBUG var codeTokensStr = AntlrHelper.GetTokensString(tokens, Vocabulary, onlyDefaultChannel: false); #endif ClearLexerCacheIfRequired(lexer); foreach (var token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } stopwatch.Restart(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream); stopwatch.Stop(); long parserTimeSpanTicks = stopwatch.ElapsedTicks; IncrementProcessedFilesCount(); result = Create(syntaxTree); result.LexerTimeSpan = new TimeSpan(lexerTimeSpanTicks); result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks); result.Tokens = tokens; result.Comments = commentTokens; } catch (Exception ex) { Logger.LogError(new ParsingException(filePath, ex)); if (result == null) { result = Create(null); } } } else { result = Create(null); } result.FileName = filePath; result.FileData = sourceCodeFile.Code; return(result); }