protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) filename = textDocument.FilePath; var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new PhpLexer(input); lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer); var tokens = new CommonTokenStream(lexer); var parser = new PhpParser(tokens); parser.BuildParseTree = true; List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow)); var result = parser.compileUnit(); OutliningTreesListener listener = new OutliningTreesListener(); ParseTreeWalker.Default.Walk(listener, result); OnParseComplete(new PhpOutliningParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.OutliningTrees)); }
public static bool TryGetLineStatements(string text, int lineNumber, out IList<IParseTree> statementTrees, out IList<IToken> tokens) { Contract.Requires<ArgumentNullException>(text != null, "text"); Contract.Requires<ArgumentOutOfRangeException>(lineNumber >= 0); try { AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); statementTrees = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(lineNumber, tokens); ParseTreeWalker.Default.Walk(listener, result); statementTrees = listener.StatementTrees; return true; } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; statementTrees = null; tokens = null; return false; } }
internal static AntlrParseResultEventArgs ParseSnapshot(ITextSnapshot snapshot) { Stopwatch timer = Stopwatch.StartNew(); ITokenSource tokenSource = new GrammarLexer(new AntlrInputStream(snapshot.GetText())); CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); GrammarParser.GrammarSpecContext parseResult; GrammarParser parser = new GrammarParser(tokenStream); List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); try { parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.RemoveErrorListeners(); parser.BuildParseTree = true; parser.ErrorHandler = new BailErrorStrategy(); parseResult = parser.grammarSpec(); } catch (ParseCanceledException ex) { if (!(ex.InnerException is RecognitionException)) throw; tokenStream.Reset(); parser.Interpreter.PredictionMode = PredictionMode.Ll; //parser.AddErrorListener(DescriptiveErrorListener.Default); parser.SetInputStream(tokenStream); parser.ErrorHandler = new DefaultErrorStrategy(); parseResult = parser.grammarSpec(); } return new AntlrParseResultEventArgs(snapshot, errors, timer.Elapsed, tokenStream.GetTokens(), parseResult); }
private static void Lex(string codePath, TextWriter output) { var lexer = new AdamantLexer(codePath); var tokens = new CommonTokenStream(lexer); tokens.Fill(); foreach(var token in tokens.GetTokens()) output.WriteLine(Format(token)); }
public void CanTokenizeSource() { var locale = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + @"\lexerSpecs\basicTypes.go"; var goSource = File.ReadAllText(locale); var goSourceStream = new StringReader(goSource); //Create lexer var lexer = new LexerGo(new AntlrInputStream(goSourceStream)); var tokenStream = new CommonTokenStream(lexer); var toks = tokenStream.GetTokens(); tokenStream.Fill(); Assert.IsTrue(toks.Count > 0); }
public string Tokenize(string fileContent) { AntlrInputStream inputStream = new AntlrInputStream(fileContent); CPP14Lexer lexer = new CPP14Lexer(inputStream); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); string result = ""; IList <Antlr4.Runtime.IToken> tokens = tokenStream.GetTokens(); foreach (var token in tokens) { result += (token.Type == CPP14Lexer.Identifier) ? "ID" : token.Text; } result = result.Replace("<EOF>", ""); return(result); }
public TextRange Parse(TextSnapshotRange snapshotRange, ActiproLex.ILexerTarget parseTarget) { //System.Diagnostics.Debug.WriteLine("LexParse " + snapshotRange.ToString()); int index = snapshotRange.StartOffset; int ix = index; parseTarget.OnPreParse(ref ix); if (parseTarget.HasInitialContext) { } else { int l = snapshotRange.EndOffset - index; ITextSnapshotReader reader = snapshotRange.Snapshot.GetReader(index); if (reader.Offset != index) { throw new System.Exception("What??!!?"); } StringBuilder text = new StringBuilder(); var read = reader.ReadText(l); text.Append(read); //System.Diagnostics.Debug.WriteLine($"Parse read {read.Length} chars: {text.ToString()}"); var lexer = new SBLexer(new Antlr.AntlrInputStream(text.ToString())); var tokens = new Antlr.CommonTokenStream(lexer); tokens.Fill(); foreach (var token in tokens.GetTokens()) { if (token.Type >= 0) { parseTarget.OnTokenParsed(new SyntaxEditorAntlrToken(token, index, snapshotRange.StartLine.Index), null); } } } parseTarget.OnPostParse(snapshotRange.EndOffset); return(snapshotRange.TextRange); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) { filename = textDocument.FilePath; } var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new PhpLexer(input); lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer); var tokens = new CommonTokenStream(lexer); var parser = new PhpParser(tokens); parser.BuildParseTree = true; List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow)); var result = parser.compileUnit(); OutliningTreesListener listener = new OutliningTreesListener(); ParseTreeWalker.Default.Walk(listener, result); OnParseComplete(new PhpOutliningParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.OutliningTrees)); }
// System.out.println(Arrays.toString(elements)); // TODO: check for invalid token/rule names, bad syntax public virtual XPathElement[] Split(string path) { AntlrInputStream @in; try { @in = new AntlrInputStream(new StringReader(path)); } catch (IOException ioe) { throw new ArgumentException("Could not read path: " + path, ioe); } XPathLexer lexer = new _XPathLexer_87(@in); lexer.RemoveErrorListeners(); lexer.AddErrorListener(new XPathLexerErrorListener()); CommonTokenStream tokenStream = new CommonTokenStream(lexer); try { tokenStream.Fill(); } catch (LexerNoViableAltException e) { int pos = lexer.Column; string msg = "Invalid tokens or characters at index " + pos + " in path '" + path + "'"; throw new ArgumentException(msg, e); } IList<IToken> tokens = tokenStream.GetTokens(); // System.out.println("path="+path+"=>"+tokens); IList<XPathElement> elements = new List<XPathElement>(); int n = tokens.Count; int i = 0; while (i < n) { IToken el = tokens[i]; IToken next = null; switch (el.Type) { case XPathLexer.Root: case XPathLexer.Anywhere: { bool anywhere = el.Type == XPathLexer.Anywhere; i++; next = tokens[i]; bool invert = next.Type == XPathLexer.Bang; if (invert) { i++; next = tokens[i]; } XPathElement pathElement = GetXPathElement(next, anywhere); pathElement.invert = invert; elements.Add(pathElement); i++; break; } case XPathLexer.TokenRef: case XPathLexer.RuleRef: case XPathLexer.Wildcard: { elements.Add(GetXPathElement(el, false)); i++; break; } case TokenConstants.Eof: { goto loop_break; } default: { throw new ArgumentException("Unknowth path element " + el); } } } loop_break: ; return elements.ToArray(); }
private static void Tokenize(string codePath, string outputPath) { var output = outputPath != null ? File.CreateText(outputPath) : Console.Out; var stream = new AntlrFileStream(codePath); var lexer = new AdamantLexer(stream); var tokens = new CommonTokenStream(lexer); tokens.Fill(); foreach(var token in tokens.GetTokens()) output.WriteLine(Format(token)); }
private bool TryGetAssociatedTree(out IParseTree associatedTree, out IList<IToken> tokens) { try { string sourcePath = _location.GetSourcePath(); if (!File.Exists(sourcePath)) { associatedTree = null; tokens = null; return false; } string text = File.ReadAllText(sourcePath); AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); associatedTree = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(_location, tokens); ParseTreeWalker.Default.Walk(listener, result); List<IParseTree> potentialTrees = listener.AssociatedTree; if (potentialTrees.Count == 1) { associatedTree = potentialTrees[0]; } else if (potentialTrees.Count > 1) { byte[] bytecode = _location.GetMethod().GetBytecodes(); DisassembledMethod disassembledMethod = BytecodeDisassembler.Disassemble(bytecode); var constantPool = _location.GetDeclaringType().GetConstantPool(); ReadOnlyCollection<ExceptionTableEntry> exceptionTable; try { exceptionTable = _location.GetMethod().GetExceptionTable(); } catch (DebuggerException) { exceptionTable = new ReadOnlyCollection<ExceptionTableEntry>(new ExceptionTableEntry[0]); } ImmutableList<int?> evaluationStackDepths = BytecodeDisassembler.GetEvaluationStackDepths(disassembledMethod, constantPool, exceptionTable); ReadOnlyCollection<ILocation> locations = _location.GetMethod().GetLineLocations(); // find all bytecode offsets with evaluation stack depth 0 on the current line List<int> relevantOffsets = new List<int>(); for (int i = 0; i < locations.Count; i++) { if (locations[i].GetLineNumber() != _location.GetLineNumber()) continue; long offsetLimit = i < locations.Count - 1 ? locations[i + 1].GetCodeIndex() : bytecode.Length; // start with the instruction for this bytecode offset for (int j = GetInstructionAtOffset(disassembledMethod, locations[i].GetCodeIndex()); j >= 0 && j < disassembledMethod.Instructions.Count && disassembledMethod.Instructions[j].Offset < offsetLimit; j++) { if (evaluationStackDepths[j] == 0) { // ignore unconditional branches if (disassembledMethod.Instructions[j].OpCode.FlowControl == JavaFlowControl.Branch) continue; relevantOffsets.Add(disassembledMethod.Instructions[j].Offset); } } } if (relevantOffsets.Count == potentialTrees.Count) { // heuristic: assume they appear in the same order as the source code on this line int treeIndex = relevantOffsets.IndexOf((int)_location.GetCodeIndex()); if (treeIndex >= 0) associatedTree = potentialTrees[treeIndex]; } } if (associatedTree == null) { tokens = null; return false; } return true; } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; associatedTree = null; tokens = null; return false; } }
public int parseFile(ICharStream input, int thread) { Checksum checksum = new CRC32(); Debug.Assert(thread >= 0 && thread < NUMBER_OF_THREADS); try { IParseTreeListener listener = sharedListeners[thread]; if (listener == null) { listener = (IParseTreeListener)Activator.CreateInstance(listenerClass); sharedListeners[thread] = listener; } Lexer lexer = sharedLexers[thread]; if (REUSE_LEXER && lexer != null) { lexer.SetInputStream(input); } else { lexer = (Lexer)lexerCtor.Invoke(new object[] { input }); sharedLexers[thread] = lexer; if (!ENABLE_LEXER_DFA) { lexer.Interpreter = new NonCachingLexerATNSimulator(lexer, lexer.Atn); } else if (!REUSE_LEXER_DFA) { lexer.Interpreter = new LexerATNSimulator(lexer, sharedLexerATNs[thread]); } } lexer.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; if (ENABLE_LEXER_DFA && !REUSE_LEXER_DFA) { lexer.Interpreter.atn.ClearDFA(); } CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.Fill(); Interlocked.Add(ref tokenCount, tokens.Size); if (COMPUTE_CHECKSUM) { foreach (IToken token in tokens.GetTokens()) { updateChecksum(checksum, token); } } if (!RUN_PARSER) { return (int)checksum.Value; } Parser parser = sharedParsers[thread]; if (REUSE_PARSER && parser != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); if (!TWO_STAGE_PARSING) { parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); } if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } else if (!REUSE_PARSER_DFA) { parser.Interpreter = new ParserATNSimulator(parser, sharedParserATNs[thread]); } if (ENABLE_PARSER_DFA && !REUSE_PARSER_DFA) { parser.Interpreter.atn.ClearDFA(); } parser.Interpreter.PredictionMode = TWO_STAGE_PARSING ? PredictionMode.Sll : PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT && !TWO_STAGE_PARSING; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST || TWO_STAGE_PARSING; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR || TWO_STAGE_PARSING) { parser.ErrorHandler = new BailErrorStrategy(); } MethodInfo parseMethod = parserClass.GetMethod(entryPoint); object parseResult; IParseTreeListener checksumParserListener = null; try { if (COMPUTE_CHECKSUM) { checksumParserListener = new ChecksumParseTreeListener(checksum); parser.AddParseListener(checksumParserListener); } parseResult = parseMethod.Invoke(parser, null); } catch (TargetInvocationException ex) { if (!TWO_STAGE_PARSING) { throw; } string sourceName = tokens.SourceName; sourceName = !string.IsNullOrEmpty(sourceName) ? sourceName + ": " : ""; Console.Error.WriteLine(sourceName + "Forced to retry with full context."); if (!(ex.InnerException is ParseCanceledException)) { throw; } tokens.Reset(); if (REUSE_PARSER && sharedParsers[thread] != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } parser.Interpreter.PredictionMode = PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR) { parser.ErrorHandler = new BailErrorStrategy(); } parseResult = parseMethod.Invoke(parser, null); } finally { if (checksumParserListener != null) { parser.RemoveParseListener(checksumParserListener); } } Assert.IsInstanceOfType(parseResult, typeof(IParseTree)); if (BUILD_PARSE_TREES && BLANK_LISTENER) { ParseTreeWalker.Default.Walk(listener, (ParserRuleContext)parseResult); } } catch (Exception e) { if (!REPORT_SYNTAX_ERRORS && e is ParseCanceledException) { return (int)checksum.Value; } throw; } return (int)checksum.Value; }