public SyntaxProcessor(IExpressionParser expressionParser, ISyntaxTokenParser tokenParser, ISyntaxAnalyzer syntaxAnalyzer) { _expressionParser = expressionParser; _tokenParser = tokenParser; _syntaxAnalyzer = syntaxAnalyzer; }
public Action <PumaSyntaxNodeAnalysisContext> Report(ISyntaxAnalyzer analyzer) { return(c => { var syntaxContext = c.RosylnContext; analyzer.GetSinks(syntaxContext, c.DiagnosticId); }); }
private Action <PumaCompilationStartAnalysisContext> RegisterPumaActions(ISyntaxAnalyzer syntaxAnalyzer) { return(c => { c.RegisterSyntaxNodeAction(_pumaSyntaxNodeAnalysisContextReporterService.Report(syntaxAnalyzer), syntaxAnalyzer.SinkKind, syntaxAnalyzer.GetDiagnosticId()); c.RegisterCompilationEndAction(_pumaCompilationAnalysisReporterService.Report(syntaxAnalyzer), syntaxAnalyzer.GetDiagnosticId()); }); }
public Controller(IMainForm view) { this.view = view; view.SourceCodeAnalyzeRequired += View_SourceCodeAnalyzeRequired; view.GrammarAnalyzeRequired += View_GrammarAnalyzeRequired; view.OpenSourceCodeFileClick += View_OpenSourceCodeFileClick; view.SaveFileClick += View_SaveFileClick; view.BuildRequired += View_BuildRequired; view.SetColorizer(new Colorizer <Lexeme>(new LexemePainter()) { SourceFilter = x => x.Is(LexemeFlags.Reserved | LexemeFlags.TypeDefinition | LexemeFlags.Const), WordSelector = x => x.Body }); fileManager = new FileManager(); _lexicalAnalyzer = new LexicalAnalyzer(); _syntaxAnalyzer = new PDASyntaxAnalyzer(view.SetPDAOutput); _polizGenerator = new PolizGenerator(_lexicalAnalyzer); }
private void View_SourceCodeAnalyzeRequired(string source) { view.EnableRunButton(); view.HideConsole(); _lexicalAnalyzer = new LexicalAnalyzer(); _syntaxAnalyzer = new PDASyntaxAnalyzer(view.SetPDAOutput); _polizGenerator = new PolizGenerator(_lexicalAnalyzer); var lexemes = _lexicalAnalyzer.Analyze(source); view.HighlightSourceCode(lexemes); view.DisplayConstants(_lexicalAnalyzer.Constants.Select(x => new { Code = x.ConstCode, Body = x.Body, Type = x.Type })); view.DisplayIdentificators(_lexicalAnalyzer.Identificators.Select(x => new { Code = x.ConstCode, Body = x.Body, Type = x.Type })); view.DisplayLexemes(lexemes.Select(x => new { Body = x.Body, Type = x.Flags, Line = x.Line, Code = x.Code, ConstCode = x.ConstCode })); if (_lexicalAnalyzer.HasErrors) { ShowErrors(_lexicalAnalyzer.Errors.Select(x => $"Lexical error: {x}")); view.DisableRunButton(); _polizGenerator.Clear(); } else { view.EnableRunButton(); _syntaxAnalyzer.Analyze(lexemes); if (_syntaxAnalyzer.HasErrors) { ShowErrors(_syntaxAnalyzer.Errors.Select(x => $"Syntax error: {x}")); view.DisableRunButton(); _polizGenerator.Clear(); } else { _polizGenerator.MakePoliz(); } } view.SetPoliz(_polizGenerator.Poliz); }
public Action <PumaCompilationAnalysisContext> Report(ISyntaxAnalyzer analyzer) { return(pumaContext => { try { var context = pumaContext.RosylnContext; analyzer.OnCompilationEnd(pumaContext); while (!analyzer.VulnerableSyntaxNodes.IsEmpty) { VulnerableSyntaxNode vulnerableSyntaxNode; if (!analyzer.VulnerableSyntaxNodes.TryPop(out vulnerableSyntaxNode)) { continue; } if (!context.Compilation.SyntaxTrees.Contains(vulnerableSyntaxNode.Sink.SyntaxTree)) { continue; } if (!vulnerableSyntaxNode.Suppressed) { var supportedDiagnostic = analyzer.GetSupportedDiagnosticAttribute(); var diagnosticInfo = new DiagnosticInfo(vulnerableSyntaxNode.Sink.GetLocation(), vulnerableSyntaxNode.MessageArgs); var diagnostic = _diagnosticFactory.Create(supportedDiagnostic.GetDescriptor(), diagnosticInfo); context.ReportDiagnostic(diagnostic); } } } catch (Exception ex) { Console.WriteLine(ex); } }); }
private Compiler() { SyntaxAnalyzer = SyntaxAnalyzerWithTable.sharedAnalyzer; LexemList = new LexemList(); }
private void AssertSyntax(IEnumerable<Token> tokenList, RootNode root) { // Act _sys = new RootSyntaxAnalyzer(new Queue<Token>(tokenList)); var result = _sys.Analyze(); // Assert Result result.Should().Be(root, BecauseObjectGraphsEqual); // Assert Any Expected Errors result.Errors.ShouldAllBeEquivalentTo(root.Errors); }
private void AssertSyntax(IEnumerable<Token> tokenList, RootNode root, IList<ParseError> errors) { // Act _sys = new SyntaxAnalyzer(new Queue<Token>(tokenList)); var result = _sys.Analyze(); // Assert Result result.Should().Be(root, BecauseObjectGraphsEqual); // Assert Any Expected Errors if (ReferenceEquals(null, errors)) errors = new List<ParseError>(); // We don't have null collections. result.Errors.ShouldAllBeEquivalentTo(errors); }