public NCalcLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { OnCreated(); }
public CommonToken(ICharStream input, int type, int channel, int start, int stop) { this.input = input; this.type = type; this.channel = channel; this.start = start; this.stop = stop; }
public AssemblerLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { OnCreated(); }
public UniAspectLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { OnCreated(); }
public AlloyColorizerLexer(ICharStream input, AlloyClassifierLexer lexer) : this(input) { Contract.Requires<ArgumentNullException>(lexer != null, "lexer"); _lexer = lexer; }
public ChapelCodeClassifierLexer(ICharStream input, ChapelClassifierLexer lexer) : this(input) { Contract.Requires<ArgumentNullException>(lexer != null, "lexer"); _lexer = lexer; }
public MessageContractsLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { OnCreated(); }
public MatlabLexer(ICharStream input, Configuration configuration) : this(input) { Checker.CheckNotNull(configuration); this.Configuration = configuration; }
public csLexerWithPreProcessor(ICharStream stream, IEnumerable<string> defines) : base(stream) { // By default we are preprocessing input Processing.Push(true); // Grab what's defined from the command line MacroDefines.UnionWith(defines); }
public static MAst Compile(AstHelper runtime, ICharStream stream) { var lexer = new TigerLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new TigerParser(tokens); ProgramExpression programExpression = parser.parse(); if (parser.NumberOfSyntaxErrors > 0) { IEnumerable<string> errors = from e in parser.Errors select e.ToString(); throw new SyntaxException(errors); } AstHelper helper = runtime.CreateChild(function: true, variables: true, types: true); programExpression.CheckSemantics(helper); if (helper.Errors.HasErrors) { throw new SemanticException(helper.Errors); } return programExpression.Transform(); }
public void Compile(ICharStream input) { try { AssemblerLexer lex = new AssemblerLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); AssemblerParser p = new AssemblerParser(tokens); BytecodeGenerator gen = new BytecodeGenerator(Defaults.SystemMethods.Values); p.SetGenerator(gen); p.TraceDestination = _traceDestination; p.program(); if (p.NumberOfSyntaxErrors > 0 && _listener != null) { _listener.Error(Convert.ToString(p.NumberOfSyntaxErrors) + " syntax error(s)"); return; } _result = gen.Result; } catch (GenerationException ex) { _listener.Error(ex.Message); } }
public FuncProtoToShimLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { OnCreated(); }
private void compilacion(ICharStream input, string pathSalida) { compilacionOK = false; //Plantillas //TextReader groupFileR = new StreamReader("C:\\Proyectos\\ProyectosVS\\FKVM\\FKVM\\src\\antlr\\FkvmIL.stg"); TextReader groupFileR = new StreamReader( System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream("FKVM.src.antlr.FkvmIL.stg")); StringTemplateGroup templates = new StringTemplateGroup(groupFileR); groupFileR.Close(); //Análisis Léxico-Sintáctico Console.WriteLine("Análisis léxico-sintáctico..."); //ANTLRFileStream input = new ANTLRFileStream(pathEntrada); FKVMLexer lexer = new FKVMLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); FKVMParser parser = new FKVMParser(tokens); parser.TreeAdaptor = adaptor; parser.reportarError = re; FKVMParser.programa_return result = parser.programa(); //Si no hay errores léxicos ni sintácticos ==> Análisis Semántico if (lexer.numErrors + parser.numErrors == 0) { //Analisis Semántico Console.WriteLine("Análisis semántico..."); CommonTree t = ((CommonTree)result.Tree); //Console.WriteLine(t.ToStringTree() + "\n\n"); // CommonTreeNodeStream nodes2 = new CommonTreeNodeStream(t); nodes2.TokenStream = tokens; FKVMSem walker2 = new FKVMSem(nodes2); walker2.reportarError = re; walker2.programa(parser.symtable); //Si no hay errores en el análisis semántico ==> Generación de código if (walker2.numErrors == 0) { //Generación de Código Console.WriteLine("Generación de código..."); CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); nodes.TokenStream = tokens; FKVMGen walker = new FKVMGen(nodes); walker.TemplateLib = templates; FKVMGen.programa_return r2 = walker.programa(parser.numVars); //Presentación de resultados StringTemplate output = (StringTemplate)r2.Template; //Console.WriteLine(output.ToString()); StreamWriter pw = new StreamWriter(pathSalida); pw.WriteLine(output.ToString()); pw.Flush(); pw.Close(); compilacionOK = true; } } }
public RecognitionException(Lexer lexer, ICharStream input) { // TODO: make a dummy recognizer for the interpreter to use? // Next two (ctx,input) should be what is in recognizer, but // won't work when interpreting this.recognizer = lexer; this.input = input; }
internal AntlrGrammarClassifierLexer(ICharStream input, AntlrClassifierLexer lexer) : this(input) { Contract.Requires<ArgumentNullException>(input != null, "input"); Contract.Requires<ArgumentNullException>(lexer != null, "lexer"); _lexer = lexer; }
public JavaUnicodeInputStream([NotNull] ICharStream source) { if (source == null) throw new ArgumentNullException("source"); this._source = source; this._la1 = source.La(1); }
private ZealCpuDriver(ICharStream antlrInputStream) { _lexer = new ZealCpuLexer(antlrInputStream); _tokenStream = new CommonTokenStream(_lexer); _parser = new ZealCpuParser(_tokenStream); _parser.RemoveErrorListeners(); _parser.AddErrorListener(new CpuErrorListener(this)); }
public InWorldz.Phlox.Glue.CompilerFrontend Compile(ICharStream input) { InWorldz.Phlox.Glue.CompilerFrontend frontEnd = new InWorldz.Phlox.Glue.CompilerFrontend(_listener, "..\\..\\..\\..\\grammar", true); frontEnd.OutputASTGraph = true; frontEnd.Compile(input); return frontEnd; }
/* * There's one fairly important difference between mod resolution in rustc and what we do. * Given following code: * mod bar { mod a; } mod bar { mod b; } * We will merget this to mod bar { mod a; mod b; }, but rustc will error out. */ public static ModuleImport ParseImports(ICharStream stream) { var lexer = new ModuleLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new ModuleParser(tokens); BodyContext root = parser.body(); var imports = new ModuleImport(); TraverseForImports(root, imports); return imports; }
protected virtual IParseTree CreateTree(ICharStream input, IEnumerable<IAntlrErrorListener<int>> lexerErrorListeners, IEnumerable<IAntlrErrorListener<IToken>> errorListeners) { if (input == null) throw new ArgumentNullException("input"); var builder = _treeBuilder; var tree = builder.CreateTree(input, lexerErrorListeners, errorListeners); return tree; }
public static IQueryValue ParseValue(IEnvironment env, ICharStream input) { IQLangLexer lexer = new IQLangLexer(input); lexer.RemoveErrorListeners(); lexer.AddErrorListener(LexerErrorListener.Instance); CommonTokenStream tokenStream = new CommonTokenStream(lexer); IQLangParser parser = new IQLangParser(tokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(ParserErrorListener.Instance); return parser.value().expr.Evaluate(env); }
public ITokenSource CreateTokenSource(ICharStream charStream, IEnumerable<IAntlrErrorListener<int>> errorListeners) { var tokenSource = CreateLexer(charStream); var currentListeners = errorListeners.ToArray(); if (currentListeners.Any()) { foreach (var listener in currentListeners) { tokenSource.AddErrorListener(listener); } } return tokenSource; }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this.Interpreter = new LexerATNSimulator(this, atn); }
public static IQueryStatement[] Parse(ICharStream input) { IQLangLexer lexer = new IQLangLexer(input); lexer.RemoveErrorListeners(); lexer.AddErrorListener(LexerErrorListener.Instance); CommonTokenStream tokenStream = new CommonTokenStream(lexer); IQLangParser parser = new IQLangParser(tokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(ParserErrorListener.Instance); return parser.compileUnit().list.ToArray(); }
public static IExecutable Parse(ICharStream input) { DSLangLexer lexer = new DSLangLexer(input); lexer.RemoveErrorListeners(); lexer.AddErrorListener(LexerErrorListener.Instance); CommonTokenStream tokenStream = new CommonTokenStream(lexer); DSLangParser parser = new DSLangParser(tokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(ParserErrorListener.Instance); IExecutable executable = parser.compileUnit().executable; return executable; }
public static ICompileNode Parse(ICharStream input) { SLangLexer lexer = new SLangLexer(input); lexer.RemoveErrorListeners(); lexer.AddErrorListener(LexerErrorListener.Instance); CommonTokenStream tokenStream = new CommonTokenStream(lexer); SLangParser parser = new SLangParser(tokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(ParserErrorListener.Instance); ICompileNode rootNode = parser.compileUnit().rootNode; return rootNode; }
public CommonToken(IToken oldToken) { this.charPositionInLine = -1; this.index = -1; this.text = oldToken.Text; this.type = oldToken.Type; this.line = oldToken.Line; this.index = oldToken.TokenIndex; this.charPositionInLine = oldToken.CharPositionInLine; this.channel = oldToken.Channel; this.input = oldToken.InputStream; if (oldToken is CommonToken) { this.start = ((CommonToken) oldToken).start; this.stop = ((CommonToken) oldToken).stop; } }
/// <summary> /// Parses the specified input stream. /// </summary> /// <param name="input">The input.</param> /// <returns>A list of ExecutableCommands.</returns> public static CommandList Parse(ICharStream input) { DScriptLexer lexer = new DScriptLexer(input); CommonTokenStream tokenStream = new CommonTokenStream(lexer); DScriptParser parser = new DScriptParser(tokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(new LogErrorListener()); List<ExecutableCommand> commands = parser.compileUnit().finalCommands; if (parser.NumberOfSyntaxErrors > 0) { throw new ParseException("Parser finished with syntax errors"); } return new CommandList(commands); }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this.decisionToDFA = new DFA[atn.NumberOfDecisions]; for (int i = 0; i < decisionToDFA.Length; i++) { decisionToDFA[i] = new DFA(atn.GetDecisionState(i), i); } this.Interpreter = new LexerATNSimulator(this, atn, decisionToDFA, sharedContextCache); }
public LexerInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable<string> ruleNames, IEnumerable<string> modeNames, ATN atn, ICharStream input) : base(input) { if (atn.grammarType != ATNType.Lexer) { throw new ArgumentException("The ATN must be a lexer ATN."); } this.grammarFileName = grammarFileName; this.atn = atn; #pragma warning disable 612 // 'fieldName' is obsolete this.tokenNames = new string[atn.maxTokenType]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetDisplayName(i); } #pragma warning restore 612 this.ruleNames = ruleNames.ToArray(); this.modeNames = modeNames.ToArray(); this.vocabulary = vocabulary; this._interp = new LexerATNSimulator(this, atn); }
protected Java9LexerBase(ICharStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { _input = input; }
public Combined1Lexer(ICharStream input) : base(input) { _interp = new LexerATNSimulator(this, _ATN); }
/// <summary>Reinitialize parser.</summary> public void ReInit(ICharStream stream, int lexState) { ReInit(stream); SwitchTo(lexState); }
///<summary> ///Constructs a new CaseChangingCharStream wrapping the given <paramref name="stream"/> forcing ///all characters to upper case or lower case. ///</summary> ///<param name="stream">The stream to wrap.</param> ///<param name="upper">If true force each symbol to upper case, otherwise force to lower.</param> public CaseChangingCharStream(ICharStream stream, bool upper) { this.stream = stream; this.upper = upper; }
public PlanningLexer(ICharStream input) : base(input) { _interp = new LexerATNSimulator(this, _ATN); }
public JavaScriptBaseLexer(ICharStream input) : base(input) { }
/// <summary>Constructor.</summary> public StandardSyntaxParserTokenManager(ICharStream stream) { m_input_stream = stream; }
public command_lineLexer(ICharStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { Interpreter = new LexerATNSimulator(this, _ATN, decisionToDFA, sharedContextCache); }
public InvertedPolishCalculatorLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }
public MySqlLexer(ICharStream input) : base(input) { }
public MySqlLexer(ICharStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { }
/// <summary>Constructor.</summary> public StandardSyntaxParserTokenManager(ICharStream stream, int lexState) : this(stream) { SwitchTo(lexState); }
public DebugGrammarLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { }
public void Format(ICharStream inputStream, TextWriter output) { ZilLexer lexer = new ZilLexer(inputStream); Stack <Nesting> nesting = new Stack <Nesting>(); int last = -1, width = 0; nesting.Push(Nesting.TopLevel); for (IToken token = lexer.NextToken(); token.Type >= 0; token = lexer.NextToken()) { /*if (token.Type == ZilLexer.WS) * continue;*/ //bool brokeLine = false; if (NeedBreakBetween(last, token.Type, nesting)) { output.WriteLine(); //brokeLine = true; width = 0; for (int i = 1; i < nesting.Count; i++) { output.Write(" "); width += 2; } } else if (NeedSpaceBetween(last, token.Type)) { output.Write(' '); width++; } switch (token.Type) { case ZilLexer.WS: // ignore continue; case ZilLexer.ATOM: case ZilLexer.NUM: case ZilLexer.STRING: case ZilLexer.CHAR: break; case ZilLexer.LANGLE: nesting.Push(Nesting.Form); break; case ZilLexer.RANGLE: if (nesting.Peek() == Nesting.Form) { nesting.Pop(); } break; case ZilLexer.LPAREN: nesting.Push(Nesting.List); break; case ZilLexer.RPAREN: if (nesting.Peek() == Nesting.List) { nesting.Pop(); } break; case ZilLexer.LSQUARE: nesting.Push(Nesting.Vector); break; case ZilLexer.RSQUARE: if (nesting.Peek() == Nesting.Vector) { nesting.Pop(); } break; } /*if (!brokeLine && NeedBreakBetween(last, token.Type, nesting)) * { * output.WriteLine(); * brokeLine = true; * * width = 0; * for (int i = 1; i < nesting.Count; i++) * { * output.Write(" "); * width += 2; * } * }*/ output.Write(token.Text); width += token.Text.Length; last = token.Type; } }
public DebugGrammarLexer(ICharStream input) : this(input, new RecognizerSharedState()) { }
public ExprLexer(ICharStream input) : this(input, new RecognizerSharedState()) { }
protected override ITokenSource CreateLexer(ICharStream stream) { return(new ObjectiveCLexer(stream)); }
public DustLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }
public FormulaLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }
public ExprLexer(ICharStream input, RecognizerSharedState state) : base(input, state) { }
public MipsAsmLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }
/// <summary> /// Initializes a new instance of the <see cref="CaseChangingCharStream"/> class. /// </summary> /// <param name="internalStream">The stream.</param> public CaseChangingCharStream(ICharStream internalStream) { InternalStream = internalStream; }
public calculatorLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }
public InvertedPolishCalculatorLexer(ICharStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { Interpreter = new LexerATNSimulator(this, _ATN, decisionToDFA, sharedContextCache); }
public ExplicitCtorLexer(string text) { this.source = new TextReaderCharStream(new StringReader(text)); }
public command_lineLexer(ICharStream input) : this(input, Console.Out, Console.Error) { }