public ParseTree Parse(IList <IToken> tokens, out TimeSpan parserTimeSpan) { if (SourceFile == null) { throw new ArgumentNullException(nameof(SourceFile)); } if (ErrorListener == null) { ErrorListener = new AntlrMemoryErrorListener(); ErrorListener.Logger = Logger; ErrorListener.LineOffset = LineOffset; } ErrorListener.SourceFile = SourceFile; AntlrParseTree result = null; try { var commentTokens = new List <IToken>(); foreach (IToken token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } var stopwatch = Stopwatch.StartNew(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(ErrorListener, codeTokenStream); stopwatch.Stop(); parserTimeSpan = stopwatch.Elapsed; result = Create(syntaxTree); result.Tokens = tokens; result.Comments = commentTokens; result.SourceFile = SourceFile; } catch (Exception ex) when(!(ex is ThreadAbortException)) { Logger.LogError(new ParsingException(SourceFile, ex)); } finally { HandleMemoryConsumption(); } return(result); }
public ParseTree Parse(CodeFile sourceCodeFile) { AntlrParseTree result = null; var filePath = sourceCodeFile.RelativeName; if (sourceCodeFile.Code != null) { var errorListener = new AntlrMemoryErrorListener(); errorListener.CodeFile = sourceCodeFile; errorListener.Logger = Logger; errorListener.LineOffset = LineOffset; try { var preprocessedText = PreprocessText(sourceCodeFile); AntlrInputStream inputStream; if (Language.IsCaseInsensitive) { inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType); } else { inputStream = new AntlrInputStream(preprocessedText); } inputStream.name = filePath; Lexer lexer = InitLexer(inputStream); lexer.Interpreter = new LexerATNSimulator(lexer, GetOrCreateAtn(true)); lexer.RemoveErrorListeners(); lexer.AddErrorListener(errorListener); var commentTokens = new List <IToken>(); var stopwatch = Stopwatch.StartNew(); IList <IToken> tokens = lexer.GetAllTokens(); stopwatch.Stop(); long lexerTimeSpanTicks = stopwatch.ElapsedTicks; foreach (IToken token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } stopwatch.Restart(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream); stopwatch.Stop(); long parserTimeSpanTicks = stopwatch.ElapsedTicks; result = Create(syntaxTree); result.LexerTimeSpan = new TimeSpan(lexerTimeSpanTicks); result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks); result.Tokens = tokens; result.Comments = commentTokens; } catch (Exception ex) when(!(ex is ThreadAbortException)) { Logger.LogError(new ParsingException(sourceCodeFile, ex)); if (result == null) { result = Create(null); } } finally { Interlocked.Increment(ref processedFilesCount); Interlocked.Add(ref processedBytesCount, sourceCodeFile.Code.Length); } } else { result = Create(null); } result.SourceCodeFile = sourceCodeFile; return(result); }
protected virtual ParseTree TokenizeAndParse(SourceCodeFile sourceCodeFile) { AntlrParseTree result = null; var filePath = Path.Combine(sourceCodeFile.RelativePath, sourceCodeFile.Name); if (sourceCodeFile.Code != null) { var errorListener = new AntlrMemoryErrorListener(); errorListener.FileName = filePath; errorListener.FileData = sourceCodeFile.Code; errorListener.Logger = Logger; errorListener.LineOffset = sourceCodeFile.LineOffset; try { var preprocessedText = PreprocessText(sourceCodeFile); AntlrInputStream inputStream; if (Language.IsCaseInsensitive()) { inputStream = new AntlrCaseInsensitiveInputStream(preprocessedText, CaseInsensitiveType); } else { inputStream = new AntlrInputStream(preprocessedText); } inputStream.name = filePath; Lexer lexer = InitLexer(inputStream); Lexer = lexer; lexer.RemoveErrorListeners(); lexer.AddErrorListener(errorListener); var commentTokens = new List <IToken>(); var stopwatch = Stopwatch.StartNew(); IList <IToken> tokens = GetAllTokens(lexer); stopwatch.Stop(); long lexerTimeSpanTicks = stopwatch.ElapsedTicks; #if DEBUG var codeTokensStr = AntlrHelper.GetTokensString(tokens, Vocabulary, onlyDefaultChannel: false); #endif ClearLexerCacheIfRequired(lexer); foreach (var token in tokens) { if (token.Channel == CommentsChannel) { commentTokens.Add(token); } } stopwatch.Restart(); var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); ParserRuleContext syntaxTree = ParseTokens(sourceCodeFile, errorListener, codeTokenStream); stopwatch.Stop(); long parserTimeSpanTicks = stopwatch.ElapsedTicks; IncrementProcessedFilesCount(); result = Create(syntaxTree); result.LexerTimeSpan = new TimeSpan(lexerTimeSpanTicks); result.ParserTimeSpan = new TimeSpan(parserTimeSpanTicks); result.Tokens = tokens; result.Comments = commentTokens; } catch (Exception ex) { Logger.LogError(new ParsingException(filePath, ex)); if (result == null) { result = Create(null); } } } else { result = Create(null); } result.FileName = filePath; result.FileData = sourceCodeFile.Code; return(result); }