/// <summary> /// Exports the specified component to a temporary file, loads, and then parses the exported file. /// </summary> /// <param name="component"></param> public IDictionary<Tuple<string, DeclarationType>, Attributes> Parse(VBComponent component) { var path = _exporter.Export(component); if (!File.Exists(path)) { // a document component without any code wouldn't be exported (file would be empty anyway). return new Dictionary<Tuple<string, DeclarationType>, Attributes>(); } var code = File.ReadAllText(path); File.Delete(path); var type = component.Type == vbext_ComponentType.vbext_ct_StdModule ? DeclarationType.Module : DeclarationType.Class; var listener = new AttributeListener(Tuple.Create(component.Name, type)); var stream = new AntlrInputStream(code); var lexer = new VBALexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new VBAParser(tokens); // parse tree isn't usable for declarations because // line numbers are offset due to module header and attributes // (these don't show up in the VBE, that's why we're parsing an exported file) var tree = parser.startRule(); ParseTreeWalker.Default.Walk(listener, tree); return listener.Attributes; }
public Questionnaire BuildAST(string inputString) { MemoryStream inputStream = new MemoryStream(Encoding.UTF8.GetBytes(inputString ?? "")); AntlrInputStream antlrInputStream = new AntlrInputStream(inputStream); QLLexer lexer = new QLLexer(antlrInputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); _parser = new QLParser(tokens); //Replaxe lexer/parser error listeners lexer.RemoveErrorListeners(); _parser.RemoveErrorListeners(); lexer.AddErrorListener(new LexerErrorListener() { OnError = LexerErrors.Add }); _parser.AddErrorListener(new ParserErrorListener() { OnError = ParserErrors.Add }); //set manager on partial parser class _parser.SetIdManager(Memory); //build AST _parseTree = _parser.questionnaire(); AST = _parser.GetAST(); //check for lexer/parser errors if (!LexerErrors.Any() && !ParserErrors.Any()) { TypeChecker.Run(AST); } return AST; }
private static ParserState ParseGrammar(ICharStream stream, IExportHandle?export) { var state = new ParserState() { Exporter = export ?? new MockExportHandle() }; var lexer = new UnitsGrammarLexer(stream); var tokens = new Antlr4.Runtime.CommonTokenStream(lexer); var parser = new UnitsGrammarParser(tokens) { BuildParseTree = true }; parser.AddParseListener(new SemanticUnitsListener(state)); try { Antlr4.Runtime.Tree.IParseTree tree = parser.prog(); } catch (HandleException he) { Console.WriteLine($"Error[#{he.ErrorCode}] on line {parser.CurrentToken.Line}: {he.Message}"); throw; } catch (Exception ex) { Console.WriteLine($"Error on line {parser.CurrentToken.Line}: {ex.Message}"); throw; } return(state); }
public ValueSpecification evaluateExpression(Dictionary<string, ValueSpecification> c) { MascaretApplication.Instance.VRComponentFactory.Log("Evaluate Expression"); foreach(KeyValuePair<string,ValueSpecification> val in c) { string valueS = "NotDef"; ValueSpecification value = val.Value; if (value.GetType().ToString() == "Mascaret.InstanceValue") valueS = ((InstanceValue)value).SpecValue.getFullName(); else MascaretApplication.Instance.VRComponentFactory.Log(value.GetType().ToString()); MascaretApplication.Instance.VRComponentFactory.Log("Context : " + val.Key + " = " + valueS); } OCLExpressionLexer lex = new OCLExpressionLexer(new AntlrInputStream(expressionValue)); CommonTokenStream tokens = new CommonTokenStream(lex); OCLExpressionParser parser = new OCLExpressionParser(tokens); parser.context = c; parser.expression(); MascaretApplication.Instance.VRComponentFactory.Log("Parsing : " + expressionValue); MascaretApplication.Instance.VRComponentFactory.Log("Nb Erreur : " + parser.NumberOfSyntaxErrors); return (LiteralBoolean)(parser.value); }
public static void Main(string[] args) { var path = @"C:\Users\alexander.higgins\source\repos\ILDisassembler\MSILParser\IlDisassembler.il"; var text = File.ReadAllText(path); var stream = new Antlr4.Runtime.AntlrInputStream(text); var lexer = new MSILLexer((ICharStream)stream); var tokenStream = new CommonTokenStream(lexer); var parser = new MSILParser(tokenStream); parser.BuildParseTree = true; var ctx = parser.decls(); MSILParser.StartContext start = parser.start(); var classDecs = parser.start(); var vistor = new MsilVisitor(); vistor.Visit(start); var decl = parser.decl(); vistor.Visit(decl); //parser.d }
protected void Build(string input) { Inputstream = new AntlrInputStream(input); Lexer = new QLLexer(Inputstream); Tokenstream = new CommonTokenStream(Lexer); Parser = new QLParser(Tokenstream); }
public static bool TryGetLineStatements(string text, int lineNumber, out IList<IParseTree> statementTrees, out IList<IToken> tokens) { Contract.Requires<ArgumentNullException>(text != null, "text"); Contract.Requires<ArgumentOutOfRangeException>(lineNumber >= 0); try { AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); statementTrees = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(lineNumber, tokens); ParseTreeWalker.Default.Walk(listener, result); statementTrees = listener.StatementTrees; return true; } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; statementTrees = null; tokens = null; return false; } }
/// <summary> /// run the reader /// </summary> public bool Process() { if (_input != null) { try { otCSVLexer _lexer = new otCSVLexer(_input); _lexer.Delimiter = _delimiter; // wrap a token-stream around the lexer Antlr4.Runtime.CommonTokenStream tokens = new Antlr4.Runtime.CommonTokenStream(_lexer); // create the aParser otCSVParser aParser = new otCSVParser(tokens); aParser.RemoveErrorListeners(); aParser.AddErrorListener(new ErrorListener()); otCSVParser.CsvbufferContext aTree = aParser.csvbuffer(); // walk the parse tree DataBuilder aListener = new DataBuilder(aParser); Antlr4.Runtime.Tree.ParseTreeWalker.Default.Walk(aListener, aTree); // result _header = aListener.Header; _rows = aListener.Rows; return(true); } catch (Exception ex) { return(false); } } return(false); }
private void button1_Click(object sender, EventArgs e) { textBox2.Text = ""; sPrograma = this.textBox1.Text; var entrada = sPrograma + Environment.NewLine; byte[] byteArray = Encoding.ASCII.GetBytes(entrada); MemoryStream stream = new MemoryStream(byteArray); var parametro1 = new AntlrInputStream(entrada); SIC_gramaticaLexer lex = new SIC_gramaticaLexer(parametro1); //CREAMOS UN LEXER CON LA CADENA QUE ESCRIBIO EL USUARIO Antlr4.Runtime.CommonTokenStream tokens = new Antlr4.Runtime.CommonTokenStream(lex); //CREAMOS LOS TOKENS SEGUN EL LEXER CREADO SIC_gramaticaParser parser = new SIC_gramaticaParser(tokens); //CREAMOS EL PARSER CON LOS TOKENS CREADOS try { //parser.prog(); this.separarPrograma(); this.separarEtiquetas(); this.separarInstrucciones(); this.separDirecciones(); //this.separDirecciones(); this.buscarErrores(); this.crearArchivo(); } catch (Exception error) { //Console.Error.WriteLine(e.StackTrace); MessageBox.Show("A ocurrido un error inesperado " + error.Message); } }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) filename = textDocument.FilePath; var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new PhpLexer(input); lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer); var tokens = new CommonTokenStream(lexer); var parser = new PhpParser(tokens); parser.BuildParseTree = true; List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow)); var result = parser.compileUnit(); OutliningTreesListener listener = new OutliningTreesListener(); ParseTreeWalker.Default.Walk(listener, result); OnParseComplete(new PhpOutliningParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.OutliningTrees)); }
internal static AntlrParseResultEventArgs ParseSnapshot(ITextSnapshot snapshot) { Stopwatch timer = Stopwatch.StartNew(); ITokenSource tokenSource = new GrammarLexer(new AntlrInputStream(snapshot.GetText())); CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); GrammarParser.GrammarSpecContext parseResult; GrammarParser parser = new GrammarParser(tokenStream); List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); try { parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.RemoveErrorListeners(); parser.BuildParseTree = true; parser.ErrorHandler = new BailErrorStrategy(); parseResult = parser.grammarSpec(); } catch (ParseCanceledException ex) { if (!(ex.InnerException is RecognitionException)) throw; tokenStream.Reset(); parser.Interpreter.PredictionMode = PredictionMode.Ll; //parser.AddErrorListener(DescriptiveErrorListener.Default); parser.SetInputStream(tokenStream); parser.ErrorHandler = new DefaultErrorStrategy(); parseResult = parser.grammarSpec(); } return new AntlrParseResultEventArgs(snapshot, errors, timer.Elapsed, tokenStream.GetTokens(), parseResult); }
public void ParsesEmptyForm() { var code = @" VERSION 5.00 Begin {C62A69F0-16DC-11CE-9E98-00AA00574A4F} Form1 Caption = ""Form1"" ClientHeight = 2640 ClientLeft = 45 ClientTop = 375 ClientWidth = 4710 OleObjectBlob = ""Form1.frx"":0000 StartUpPosition = 1 'CenterOwner End Attribute VB_Name = ""Form1"" Attribute VB_GlobalNameSpace = False Attribute VB_Creatable = False Attribute VB_PredeclaredId = True Attribute VB_Exposed = False "; var stream = new AntlrInputStream(code); var lexer = new VBALexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new VBAParser(tokens); parser.ErrorListeners.Clear(); parser.ErrorListeners.Add(new ExceptionErrorListener()); var tree = parser.startRule(); Assert.IsNotNull(tree); }
public static void Main(string[] args) { Stream inputStream = Console.OpenStandardInput(); AntlrInputStream input = new AntlrInputStream(inputStream); CalculatorLexer lexer = new CalculatorLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); CalculatorParser parser = new CalculatorParser(tokens); }
private static void Lex(string codePath, TextWriter output) { var lexer = new AdamantLexer(codePath); var tokens = new CommonTokenStream(lexer); tokens.Fill(); foreach(var token in tokens.GetTokens()) output.WriteLine(Format(token)); }
private static Expression ParseTokens(CommonTokenStream input) { ExpressionParser.ExpressionContext startContext = new ExpressionParser(input).expression(); ExpressionVisitor visit = new ExpressionVisitor(); Expression expr = visit.Visit(startContext); return expr; }
public Expression(string expression) { var input = new AntlrInputStream(expression); var lexer = new EstimatingExpressionEvaluatorLexer(input); var tokens = new CommonTokenStream(lexer); this.parser = new EstimatingExpressionEvaluatorParser(tokens); this.parseTree = parser.parse(); }
private void Init() { var inputStream = new AntlrInputStream(new System.IO.StreamReader(_filename)); var lexer = new PropositionalCalculusLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); _parser = new PropositionalCalculusParser(tokenStream) { BuildParseTree = true }; _tree = _parser.text(); }
private ZealCpuDriver(ICharStream antlrInputStream) { _lexer = new ZealCpuLexer(antlrInputStream); _tokenStream = new CommonTokenStream(_lexer); _parser = new ZealCpuParser(_tokenStream); _parser.RemoveErrorListeners(); _parser.AddErrorListener(new CpuErrorListener(this)); }
public void ParseClojure() { var inputStream = new AntlrInputStream("(def lst '(a b c))"); var lexer = new ClojureLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var parser = new ClojureParser(commonTokenStream); var visitor = new CstBuilderForAntlr4(parser); visitor.Visit(parser.file()); Console.WriteLine(visitor.FinishParsing()); }
public String GetParseTreeAsString(String rule) { AntlrInputStream input = new AntlrInputStream(rule); RuleSetLexer lexer = new RuleSetLexer(input); var tokens = new CommonTokenStream(lexer); RuleSetParser parser = new RuleSetParser(tokens); return parser.rule_set().ToStringTree(parser); }
public static IEnumerable<string> GenerateFile(string file) { var name = Path.GetFileNameWithoutExtension(file); var directory = Path.GetDirectoryName(file); File representation; using (var stream = System.IO.File.OpenRead(file)) { var lexer = new GrammarLexer(new AntlrInputStream(stream)); var tokenStream = new CommonTokenStream(lexer); var context = new GrammarParser(tokenStream).program(); var dsl = new Dsl(name); dsl.Visit(context); representation = dsl.File; } var outputs = representation.GenerateSource(); if (outputs.HasMessageContent) { if (outputs.HasMarkers) { var path = Path.Combine(directory, $"{representation.Name}MarkerInterfaces.cs"); System.IO.File.WriteAllText(path, outputs.Markers); yield return path; } if (outputs.HasMessages) { var path = Path.Combine(directory, $"{representation.Name}Messages.cs"); System.IO.File.WriteAllText(path, outputs.Messages); yield return path; } if (outputs.HasEnumerations) { var path = Path.Combine(directory, $"{representation.Name}Enums.cs"); System.IO.File.WriteAllText(path, outputs.Enumerations); yield return path; } } if (outputs.HasStateDefinitions) { var path = Path.Combine(directory, $"{representation.Name}StateSubscriptions.cs"); System.IO.File.WriteAllText(path, outputs.StateDefinitions); yield return path; } if (outputs.HasEntities) { var path = Path.Combine(directory, $"{representation.Name}Entities.cs"); System.IO.File.WriteAllText(path, outputs.Entities); yield return path; } }
public static List<Expression> getConstraints() { List<Expression> pathConstraints = new List<Expression>(); FileStream fileStream = new FileStream(@"c:\example2.txt", FileMode.Open, FileAccess.Read); AntlrInputStream input = new AntlrInputStream(fileStream); VBGrammarLexer lexer = new VBGrammarLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); VBGrammarParser parser = new VBGrammarParser(tokens); IParseTree tree = parser.startRule(); //Console.WriteLine(tree.ToStringTree(parser)); // Use visitor pattern to get "raw" constraints VbaTreeVisitor eval = new VbaTreeVisitor(); Expression exp = eval.Visit(tree); List<RawConstraint> rawConstraints = eval.rawConstraints; // Create negated expressions for ELSE blocks RawConstraint.updateElseBlocks(rawConstraints); // Update constraints to transform into binary tree and get leaf nodes RawConstraint.updateParents(rawConstraints); List<RawConstraint> leafNodes = RawConstraint.getLeafNodes(rawConstraints); // For each leaf node, work the way up to generate PathConstraints foreach (RawConstraint leaf in leafNodes) { var parentLine = leaf.ParentLineNumber; if (leaf.Expr == null) { continue; } // find parent RawConstraint parent = findParent(rawConstraints, parentLine); // AND with parent BinaryExpression binExp = (BinaryExpression)leaf.Expr; while (parent != null) { if (parent.Expr == null) { break; } else { binExp = BinaryExpression.And(binExp, parent.Expr); } parent = findParent(rawConstraints, parent.ParentLineNumber); } pathConstraints.Add(binExp); // Console.WriteLine("Leaf: " + leaf.Expr); // Console.WriteLine("AND: " + binExp); } return pathConstraints; }
/// <summary> /// Using http://programming-pages.com/2013/12/14/antlr-4-with-c-and-visual-studio-2012/ as a guide. /// </summary> /// <param name="args"></param> static void Main(string[] args) { string test = "a = 10*10"; var input = new AntlrInputStream(test); var lexer = new calculatorLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); var parser = new calculatorParser(tokens); var tree = parser.equation(); Console.WriteLine(tree.ToStringTree(parser)); }
public Library TranslateLibrary(string source) { var inputStream = new AntlrInputStream(source); var lexer = new cqlLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new cqlParser(tokenStream); var parseTree = parser.logic(); var visitor = new cqlTranslationVisitor(); return visitor.Visit(parseTree) as Library; }
public IEnumerable<NodeSelector> Parse(string source) { var inputStream = new AntlrInputStream(source); var lexer = new TreeSelectorLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var parser = new TreeSelectorParser(commonTokenStream); var context = parser.init(); var visitor = new TreeSelectorVisitor(); return visitor.VisitInit(context); }
public static ObjectTree.Tree Parse2(CommonTokenStream input) { Logger.LogAnalyzer("Parse twee file ..."); Twee.StartContext startContext = new Twee(input).start(); TweeVisitor visit = new TweeVisitor(); visit.Visit(startContext); Logger.LogAnalyzer("Convert parse tree into object tree ..."); return visit.Tree; }
/* * There's one fairly important difference between mod resolution in rustc and what we do. * Given following code: * mod bar { mod a; } mod bar { mod b; } * We will merget this to mod bar { mod a; mod b; }, but rustc will error out. */ public static ModuleImport ParseImports(ICharStream stream) { var lexer = new ModuleLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new ModuleParser(tokens); BodyContext root = parser.body(); var imports = new ModuleImport(); TraverseForImports(root, imports); return imports; }
public InterfaceExtruder(string objcFileContent) { content = objcFileContent; var input = new AntlrInputStream (content); lexer = new ObjCLexer (input); tokens = new CommonTokenStream (lexer); parser = new ObjCParser (tokens); listner = new ObjCListnerImpl (); walker = new ParseTreeWalker (); }
/// <summary> /// Parses the specified input. /// </summary> /// <param name="input">The input.</param> /// <returns>The AST for the input</returns> /// <exception cref="System.ArgumentNullException">input</exception> public INode Parse(string input) { if (string.IsNullOrEmpty(input)) throw new ArgumentNullException("input"); AntlrInputStream InputStream = new AntlrInputStream(input); PNZRLexer Lexer = new PNZRLexer(InputStream); CommonTokenStream TokenStream = new CommonTokenStream(Lexer); PNZRParser Parser = new PNZRParser(TokenStream); IParseTree Tree = Parser.program(); PNZRVisitor Visitor = new PNZRVisitor(); return Visitor.Visit(Tree); }
public static LinearIntegerExpression parseLinearExpression(String input) { var stream = new AntlrInputStream(input); var lexer = new SymbolicStringsLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new SymbolicStringsParser(tokens); parser.BuildParseTree = true; var tree = parser.integer(); if (parser.NumberOfSyntaxErrors == 0) return tree.value; return null; }
public string Tokenize(string fileContent) { AntlrInputStream inputStream = new AntlrInputStream(fileContent); CPP14Lexer lexer = new CPP14Lexer(inputStream); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); string result = ""; IList <Antlr4.Runtime.IToken> tokens = tokenStream.GetTokens(); foreach (var token in tokens) { result += (token.Type == CPP14Lexer.Identifier) ? "ID" : token.Text; } result = result.Replace("<EOF>", ""); return(result); }
public TextRange Parse(TextSnapshotRange snapshotRange, ActiproLex.ILexerTarget parseTarget) { //System.Diagnostics.Debug.WriteLine("LexParse " + snapshotRange.ToString()); int index = snapshotRange.StartOffset; int ix = index; parseTarget.OnPreParse(ref ix); if (parseTarget.HasInitialContext) { } else { int l = snapshotRange.EndOffset - index; ITextSnapshotReader reader = snapshotRange.Snapshot.GetReader(index); if (reader.Offset != index) { throw new System.Exception("What??!!?"); } StringBuilder text = new StringBuilder(); var read = reader.ReadText(l); text.Append(read); //System.Diagnostics.Debug.WriteLine($"Parse read {read.Length} chars: {text.ToString()}"); var lexer = new SBLexer(new Antlr.AntlrInputStream(text.ToString())); var tokens = new Antlr.CommonTokenStream(lexer); tokens.Fill(); foreach (var token in tokens.GetTokens()) { if (token.Type >= 0) { parseTarget.OnTokenParsed(new SyntaxEditorAntlrToken(token, index, snapshotRange.StartLine.Index), null); } } } parseTarget.OnPostParse(snapshotRange.EndOffset); return(snapshotRange.TextRange); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) { filename = textDocument.FilePath; } var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new PhpLexer(input); lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer); var tokens = new CommonTokenStream(lexer); var parser = new PhpParser(tokens); parser.BuildParseTree = true; List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow)); var result = parser.compileUnit(); OutliningTreesListener listener = new OutliningTreesListener(); ParseTreeWalker.Default.Walk(listener, result); OnParseComplete(new PhpOutliningParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.OutliningTrees)); }
///<summary> /// Constructor. ///</summary> public DomainProblem(string domainfile, string problemfile) : this() { using (var domstream = new StreamReader(domainfile)) { var inp = new Antlr4.Runtime.AntlrInputStream(domstream); var lexer = new pddlLexer(inp); var stream = new Antlr4.Runtime.CommonTokenStream(lexer); var parser = new pddlParser(stream); var tree = parser.domain(); this.domain = new DomainListener(); var walker = new Antlr4.Runtime.Tree.ParseTreeWalker(); walker.Walk(this.domain, tree); } using (var probstream = new StreamReader(problemfile)) { var inp = new Antlr4.Runtime.AntlrInputStream(probstream); var lexer = new pddlLexer(inp); var stream = new Antlr4.Runtime.CommonTokenStream(lexer); var parser = new pddlParser(stream); var tree = parser.problem(); this.problem = new ProblemListener(); var walker = new Antlr4.Runtime.Tree.ParseTreeWalker(); walker.Walk(this.problem, tree); } }
/// <summary> /// Parse the specification from string input into objects /// </summary> /// <param name="spec">string input of the model</param> /// <param name="option">option for LTL parsing, usually it is an empty string</param> protected virtual void ParseSpec(string spec, string options) { System.Diagnostics.Debug.WriteLine("parsing spec... "); IsParsing = true; if (GlobalConstantDatabase == null) { GlobalConstantDatabase = new Dictionary <string, Expression>(); } // Start parsing ADL AntlrInputStream inputStream = new AntlrInputStream(spec); ADLLexer speakLexer = new ADLLexer(inputStream); Antlr4.Runtime.CommonTokenStream commonTokenStream = new Antlr4.Runtime.CommonTokenStream(speakLexer); ADL_Parser.ADLParser parser = new ADL_Parser.ADLParser(commonTokenStream); ADLVisitor visitor = new ADLVisitor(); Object element; StatementContext statement; CSPGenerator generator = new CSPGenerator(this); while (true) { statement = parser.statement(); // parsing architecture elements if ((element = statement.archelement()) != null) { element = visitor.Visit((ArchelementContext)element); if (element is Component) { Component comp = (Component)element; Console.WriteLine(comp.ToString()); ComponentDatabase.Add(comp.Name, comp); } else if (element is Connector) { Connector conn = ((Connector)element); Console.WriteLine(conn.ToString()); ConnectorDatabase.Add(conn.Name, conn); } else if (element is SystemConfig) { Console.WriteLine(((SystemConfig)element).ToString()); // Console.WriteLine("system:"+((SystemConfig)element).ToString()); generator.parse((SystemConfig)element); } // parsing assetion } else if ((element = statement.assertion()) != null) { AssertionExpr assertion = (AssertionExpr)visitor.VisitAssertion((AssertionContext)element); Console.WriteLine(assertion.ToString()); generator.AddAssertion(assertion, options); } else { break; } } // End parsing ADL StaticAnalysis(); CheckingConflictingEventsAndVariables(); foreach (KeyValuePair <string, Definition> pair in DefinitionDatabase) { List <string> gVar = pair.Value.GlobalVars; int i = 0; while (i < gVar.Count) { //Console.WriteLine("parsed " + gVar[i]); if (SpecValuation.Variables != null && !SpecValuation.Variables.ContainsKey(gVar[i])) { gVar.RemoveAt(i); } else { i++; } } } //get the relevant channels; if (ChannelDatabase.Count > 0) { SyncrhonousChannelNames = new List <string>(0); Dictionary <string, ChannelQueue> newChannelDatabase = new Dictionary <string, ChannelQueue>(); foreach (KeyValuePair <string, ChannelQueue> pair in ChannelDatabase) { if (pair.Value.Size == 0) { SyncrhonousChannelNames.Add(pair.Key); } else { newChannelDatabase.Add(pair.Key, pair.Value); } } SpecValuation.Channels = newChannelDatabase; HasSyncrhonousChannel = SyncrhonousChannelNames.Count > 0; } foreach (KeyValuePair <string, AssertionBase> entry in AssertionDatabase) { entry.Value.Initialize(this); } CheckVariableRange(); // Console.WriteLine("Finish parsing spec...................."); Console.WriteLine(this.GetSpecification()); }
public void Lex(string script) { _lexer = new CSharpLexer(new Antlr4.Runtime.AntlrInputStream(script)); Ts = new Antlr4.Runtime.CommonTokenStream(_lexer, CSharpLexer.DefaultTokenChannel); Ts.Fill(); }
private static Document Build( AntlrDocumentBuilder antlrDocumentBuilder, string data ) { using( MemoryStream memoryStream = new MemoryStream( Encoding.UTF8.GetBytes( data ) ) ) using( StreamReader streamReader = new StreamReader( memoryStream ) ) { AntlrInputStream inputStream = new AntlrInputStream( streamReader ); GraphQLLexer lexer = new GraphQLLexer( inputStream ); if( antlrDocumentBuilder.LexerErrorListeners != null ) { lexer.RemoveErrorListeners(); antlrDocumentBuilder.LexerErrorListeners.Apply( x => lexer.AddErrorListener( x ) ); } CommonTokenStream tokenStream = new CommonTokenStream( lexer ); GraphQLParser parser = new GraphQLParser( tokenStream ); if( antlrDocumentBuilder.ParserErrorListeners != null ) { parser.RemoveErrorListeners(); antlrDocumentBuilder.ParserErrorListeners.Apply( x => parser.AddErrorListener( x ) ); } GraphQLVisitor vistor = new GraphQLVisitor(); return vistor.Visit( parser.document() ) as Document; } }
public static CsharpParseResults InvokeParse(string fileName) { if (string.IsNullOrWhiteSpace(fileName)) return null; if (!System.IO.File.Exists(fileName)) return null; var tr = System.IO.File.OpenRead(fileName); var input = new AntlrInputStream(tr); var lexer = new CSharp4Lexer(input); var tokens = new CommonTokenStream(lexer); var parser = new CSharp4Parser(tokens); var tree = parser.compilation_unit(); var walker = new ParseTreeWalker(); var loader = new CsharpParseTree(); walker.Walk(loader, tree); var results = loader.Results; tr.Close(); return results; }
internal void LoadCommandStack(string Script) { // Clear the current stack // this.Commands.Clear(); this._CompileErrorMessages.Clear(); // Create a token stream and do lexal analysis // AntlrInputStream TextStream = new AntlrInputStream(Script); HScriptLexer HorseLexer = new HScriptLexer(TextStream); // Parse the script // CommonTokenStream HorseTokenStream = new CommonTokenStream(HorseLexer); HScriptParser HorseParser = new HScriptParser(HorseTokenStream); HorseParser.RemoveErrorListeners(); HorseParser.AddErrorListener(new ParserErrorListener()); // Create an executer object // CommandVisitor processor = new CommandVisitor(this.Home); // Load the call stack // try { foreach (HScriptParser.CommandContext context in HorseParser.compile_unit().command_set().command()) { this.Commands.Add(context); } } catch (Exception e) { this._CompileErrorMessages.Add(e.Message); } }
private void btnParse_Click(object sender, EventArgs e) { txtOutput.Clear(); string inputString = txtInput.Text; MemoryStream inputStream = new MemoryStream(Encoding.UTF8.GetBytes(inputString ?? "")); AntlrInputStream input = new AntlrInputStream(inputStream); QLLexer lexer = new QLLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); QLParser parser = new QLParser(tokens); lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); lexer.AddErrorListener(new LexerErrorListener(){OnError = WriteError}); parser.AddErrorListener(new ParserErrorListener(){OnError = WriteError}); IParseTree tree = parser.questionnaire(); //if (parser.NumberOfSyntaxErrors > 0) // txtOutput.Text += string.Format("Parser errors found: {0}", parser.NumberOfSyntaxErrors); QLVisitor visitor = new QLVisitor(); visitor.Visit(tree); txtOutput.Text += string.Format(@"{0}{0} Generated parse tree: {0} {1} {0} {2}" , Environment.NewLine , tree.ToStringTree(parser) , visitor.Visit(tree)); }