public GrammarCheckedState Check(InputState inputState, CancellationToken cancellationToken = default) { var grammar = inputState.Grammar; _result = new GrammarCheckedState(inputState); try { var antlrErrorListener = new AntlrErrorListener(); antlrErrorListener.ErrorEvent += ErrorEvent; antlrErrorListener.ErrorEvent += (sender, error) => { lock (_result.Errors) { _result.Errors.Add(error); } }; foreach (string grammarFileName in grammar.Files) { ProcessGrammarFile(grammar, grammarFileName, antlrErrorListener, cancellationToken); } } catch (Exception ex) { _result.Exception = ex; if (!(ex is OperationCanceledException)) { ErrorEvent?.Invoke(this, new ParsingError(ex, WorkflowStage.GrammarChecked)); } } return(_result); }
public void CreateListener() { var loggerMock = new Mock <ILogger>(MockBehavior.Strict); var listener = new AntlrErrorListener(loggerMock.Object); loggerMock.Verify(); Assert.NotNull(listener); }
public void LogSyntaxErrorBasic() { var loggerMock = new Mock <ILogger>(); var writerMock = new Mock <TextWriter>(); var recognizerMock = new Mock <IRecognizer>(); var charStreamMock = new Mock <ICharStream>(); var lexer = new ConfigReferenceLexer(charStreamMock.Object); var listener = new AntlrErrorListener(loggerMock.Object); listener.SyntaxError(writerMock.Object, recognizerMock.Object, 0, 4711, 42, "some message to write", new RecognitionException(lexer, charStreamMock.Object)); Assert.NotNull(listener); }
/// <summary> /// 编译公式,默认 /// </summary> /// <param name="exp">公式</param> /// <returns></returns> public bool Parse(string exp) { if (string.IsNullOrWhiteSpace(exp)) { LastError = "Parameter exp invalid !"; return(false); } //try { var stream = new CaseChangingCharStream(new AntlrInputStream(exp)); var lexer = new mathLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new mathParser(tokens); var antlrErrorListener = new AntlrErrorListener(); parser.RemoveErrorListeners(); parser.AddErrorListener(antlrErrorListener); var context = parser.prog(); var end = context.Stop.StopIndex; if (end + 1 < exp.Length) { _context = null; LastError = "Parameter exp invalid !"; return(false); } if (antlrErrorListener.IsError) { _context = null; LastError = antlrErrorListener.ErrorMsg; return(false); } _context = context; return(true); //} catch (Exception ex) { // LastError = ex.Message; // return false; //} }
private void ProcessGrammarFile(Grammar grammar, string grammarFileName, AntlrErrorListener antlrErrorListener, CancellationToken cancellationToken) { string code = File.ReadAllText(Path.Combine(grammar.Directory, grammarFileName)); var inputStream = new AntlrInputStream(code); var codeSource = new CodeSource(grammarFileName, inputStream.ToString()); _result.GrammarFilesData.Add(grammarFileName, codeSource); string extension = Path.GetExtension(grammarFileName); if (extension != Grammar.AntlrDotExt) { return; } antlrErrorListener.CodeSource = codeSource; var antlr4Lexer = new ANTLRv4Lexer(inputStream); antlr4Lexer.RemoveErrorListeners(); antlr4Lexer.AddErrorListener(antlrErrorListener); var tokens = antlr4Lexer.GetAllTokens(); var codeTokenSource = new ListTokenSource(tokens); cancellationToken.ThrowIfCancellationRequested(); var codeTokenStream = new CommonTokenStream(codeTokenSource); var antlr4Parser = new ANTLRv4Parser(codeTokenStream); antlr4Parser.RemoveErrorListeners(); antlr4Parser.AddErrorListener(antlrErrorListener); var tree = antlr4Parser.grammarSpec(); var grammarInfoCollectorListener = new GrammarInfoCollectorListener(); grammarInfoCollectorListener.CollectInfo(antlrErrorListener.CodeSource, tree); var shortFileName = Path.GetFileNameWithoutExtension(grammarFileName); _result.GrammarActionsTextSpan[grammarFileName] = grammarInfoCollectorListener.CodeInsertions; var grammarType = grammarInfoCollectorListener.GrammarType; if (grammarType == GrammarType.Lexer || grammarType == GrammarType.Combined) { _result.LexerSuperClass = grammarInfoCollectorListener.SuperClass; } if (grammarType == GrammarType.Separated || grammarType == GrammarType.Combined) { _result.ParserSuperClass = grammarInfoCollectorListener.SuperClass; _result.Rules = grammarInfoCollectorListener.Rules; } void ErrorAction(ParsingError parsingError) { ErrorEvent?.Invoke(this, parsingError); _result.Errors.Add(parsingError); } var caseInsensitiveTypeOptionMatcher = new CaseInsensitiveTypeOptionMatcher(codeSource, grammarType, ErrorAction); var runtimeOptionMatcher = new RuntimeOptionMatcher(codeSource, grammarType, ErrorAction); var visitorOptionMatcher = new VisitorOptionMatcher(codeSource, grammarType, ErrorAction); var listenerOptionMatcher = new ListenerOptionMatcher(codeSource, grammarType, ErrorAction); var packageOptionMatcher = new PackageOptionMatcher(codeSource, grammarType, ErrorAction); var rootOptionMatcher = new RootOptionMatcher(codeSource, grammarType, ErrorAction, _result.Rules); var predictionOptionMatcher = new PredictionModeOptionMatcher(codeSource, grammarType, ErrorAction); foreach (IToken token in tokens) { if (token.Type == ANTLRv4Lexer.LINE_COMMENT || token.Type == ANTLRv4Lexer.BLOCK_COMMENT) { if (caseInsensitiveTypeOptionMatcher.Match(token, out var caseInsensitiveType)) { _result.CaseInsensitiveType = caseInsensitiveType; continue; } if (runtimeOptionMatcher.Match(token, out Runtime runtime)) { _result.Runtime = runtime; continue; } if (packageOptionMatcher.Match(token, out string package)) { _result.Package = package; continue; } if (visitorOptionMatcher.Match(token, out bool generateVisitor)) { _result.Visitor = generateVisitor; continue; } if (listenerOptionMatcher.Match(token, out bool generateListener)) { _result.Listener = generateListener; continue; } if (rootOptionMatcher.Match(token, out string root)) { _result.Root = root; continue; } if (predictionOptionMatcher.Match(token, out PredictionMode predictionMode)) { _result.PredictionMode = predictionMode; continue; } } } }
void ParseWikiArticles() { Console.Write("Loading articles..."); XmlSerializer serializer = new XmlSerializer(typeof(Article[]), new XmlRootAttribute() { ElementName = "Articles" }); StreamReader reader = File.OpenText("articles.xml"); articles = ((Article[])serializer.Deserialize(reader)). OrderBy(a => a.PageId).ToDictionary(a => a.PageId, a => a); Console.WriteLine(" Done"); Console.WriteLine(" Articles: " + articles.Count); Console.WriteLine(); int processed = 0; int lexerErrors = 0; int parserErrors = 0; fragments = new List<Fragment>(); if (File.Exists("fragments.xml")) { Console.Write("Loading fragments..."); serializer = new XmlSerializer(typeof(Fragment[]), new XmlRootAttribute() { ElementName = "Fragments" }); reader = File.OpenText("fragments.xml"); fragments = ((Fragment[])serializer.Deserialize(reader)).ToList(); Console.WriteLine(" Done"); Console.WriteLine(" Fragments: " + fragments.Count); Console.WriteLine(); } else { Console.Write("Parsing articles"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); Parallel.ForEach(articles, kv => //Parallel.ForEach(articles.Skip(2000).Take(1000), kv => { Article article = kv.Value; AntlrErrorListener ael = new AntlrErrorListener(); AntlrInputStream inputStream = new AntlrInputStream(article.WikiText); WikiLexer lexer = new WikiLexer(inputStream); lexer.RemoveErrorListeners(); lexer.AddErrorListener(ael); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); WikiParser parser = new WikiParser(commonTokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(ael); WikiParser.InitContext initContext = parser.init(); WikiVisitor visitor = new WikiVisitor(article.PageId); visitor.VisitInit(initContext); article.Errors = ael.ErrorList; lock (fragments) fragments.AddRange(visitor.Fragments); Interlocked.Add(ref lexerErrors, ael.LexerErrors); Interlocked.Add(ref parserErrors, ael.ParserErrors); if (Interlocked.Increment(ref processed) % 50 == 0) Console.Write('.'); }); stopwatch.Stop(); StreamWriter writer = File.CreateText("fragments.xml"); serializer = new XmlSerializer(typeof(Fragment[]), new XmlRootAttribute() { ElementName = "Fragments" }); serializer.Serialize(writer, fragments.ToArray()); writer.Close(); Console.WriteLine(" Done"); Console.WriteLine(" Fragments: " + fragments.Count); Console.WriteLine(" Parser errors: " + parserErrors); Console.WriteLine(" Lexer errors: " + lexerErrors); Console.WriteLine(" Parsing time: " + stopwatch.Elapsed.TotalSeconds + " sec"); Console.WriteLine(); List<string> errorLog = new List<string>(); foreach (Article article in articles.Values) { if (article.Errors == null) continue; if (article.Errors.Count == 0) continue; errorLog.Add("Статья: " + article.Title); errorLog.AddRange(article.Errors); } File.WriteAllLines("error_log.txt", errorLog.ToArray(), Encoding.UTF8); } }