protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); try { Stopwatch stopwatch = Stopwatch.StartNew(); var snapshot = TextBuffer.CurrentSnapshot; SnapshotCharStream input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); GoLexer lexer = new GoLexer(input); GoSemicolonInsertionTokenSource tokenSource = new GoSemicolonInsertionTokenSource(lexer); CommonTokenStream tokens = new CommonTokenStream(tokenSource); GoParser parser = new GoParser(tokens); List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; ITextDocument document; if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null) { string fileName = document.FilePath; var line = snapshot.GetLineFromPosition(e.Span.Start); message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message); } if (message.Length > 100) message = message.Substring(0, 100) + " ..."; if (outputWindow != null) outputWindow.WriteLine(message); if (errors.Count > 100) throw new OperationCanceledException(); }; var result = parser.compilationUnit(); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; try { if (outputWindow != null) outputWindow.WriteLine(e.Message); } catch (Exception ex2) { if (ErrorHandler.IsCriticalException(ex2)) throw; } } }
public void TestEmptyComment() { string inputText = "/**/ "; var input = new ANTLRStringStream(inputText); var lexer = new PreprocessorLexer(input); var tokenStream = new CommonTokenStream(lexer); tokenStream.Fill(); List<IToken> tokens = tokenStream.GetTokens(); Assert.AreEqual(2, tokens.Count); Assert.AreEqual(PreprocessorLexer.DELIMITED_COMMENT, tokens[0].Type); Assert.AreEqual("/**/", tokens[0].Text); Assert.AreEqual(PreprocessorLexer.EOF, tokens[1].Type); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) filename = textDocument.FilePath; var snapshot = TextBuffer.CurrentSnapshot; ANTLRStringStream input = new ANTLRStringStream(snapshot.GetText()); Java2Lexer lexer = new Java2Lexer(new JavaUnicodeStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); Java2Parser parser = new Java2Parser(tokens); List<ParseErrorEventArgs> errors = new List<ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; if (message.Length > 100) message = message.Substring(0, 100) + " ..."; ITextSnapshotLine startLine = snapshot.GetLineFromPosition(e.Span.Start); int line = startLine.LineNumber; int column = e.Span.Start - startLine.Start; if (outputWindow != null) outputWindow.WriteLine(string.Format("{0}({1},{2}): {3}", filename, line + 1, column + 1, message)); if (errors.Count > 100) throw new OperationCanceledException(); }; var result = parser.compilationUnit(); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); }
public static List<Tuple<ITree, string, IToken[]>> ParseAsn1InputFiles(IEnumerable<string> inputFiles) { var parsedInputFiles = new List<Tuple<ITree, string, IToken[]>>(); foreach (var inFileName in inputFiles) { ICharStream input = new ANTLRFileStream(inFileName); asn1Lexer lexer = new asn1Lexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); List<IToken> tokenslst = new List<IToken>(); foreach (IToken token in tokens.GetTokens()) { tokenslst.Add(token); } asn1Parser parser = new asn1Parser(tokens); asn1Parser.moduleDefinitions_return result = parser.moduleDefinitions(); ITree tree = (CommonTree)result.Tree; CommonTreeNodeStream nodes = new CommonTreeNodeStream(tree); nodes.TokenStream = tokens; parsedInputFiles.Add(Tuple.Create(tree, inFileName, tokenslst.ToArray() )); } return parsedInputFiles; }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to process ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); /* Want to collect information from the following: * - module (name) * Want to provide navigation info for the following types: * - sig * - enum * Want to provide navigation info for the following members: * - decl (within a sigBody) * - fun * - pred * - nameList (within an enumBody) * Eventually should consider the following: * - cmdDecl * - fact * - assert */ List<IToken> navigationKeywords = new List<IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case AlloyLexer.KW_MODULE: case AlloyLexer.KW_SIG: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: //case AlloyLexer.KW_ASSERT: //case AlloyLexer.KW_FACT: navigationKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List<IEditorNavigationTarget> navigationTargets = new List<IEditorNavigationTarget>(); AstParserRuleReturnScope<CommonTree, IToken> moduleTree = null; CommonTreeAdaptor treeAdaptor = new CommonTreeAdaptor(); foreach (var token in navigationKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) break; if (interpreter.Contexts.All(context => context.BoundedStart)) break; } interpreter.CombineBoundedStartContexts(); #if false // since we're using the AlloyParser, I don't think we need this. while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) break; if (interpreter.Contexts.All(context => context.BoundedEnd)) break; } #endif foreach (var context in interpreter.Contexts) { switch (token.Type) { case AlloyLexer.KW_MODULE: { InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null); if (firstMatch == null) continue; tokens.Seek(firstMatch.TokenIndex.Value); AlloyParser parser = new AlloyParser(tokens); AstParserRuleReturnScope<CommonTree, IToken> result = parser.module(); if (result == null || parser.NumberOfSyntaxErrors > 0) continue; moduleTree = result; break; } case AlloyLexer.KW_SIG: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: { InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null); if (firstMatch == null) continue; tokens.Seek(firstMatch.TokenIndex.Value); AlloyParser parser = new AlloyParser(tokens); AstParserRuleReturnScope<CommonTree, IToken> result = null; switch (token.Type) { case AlloyLexer.KW_SIG: result = parser.sigDeclNoBlock(); break; case AlloyLexer.KW_ENUM: result = parser.enumDecl(); break; case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: result = parser.funDeclGenericBody(); break; } if (result == null || parser.NumberOfSyntaxErrors > 0) continue; if (moduleTree != null) { object tree = treeAdaptor.Nil(); treeAdaptor.AddChild(tree, moduleTree.Tree); treeAdaptor.AddChild(tree, result.Tree); treeAdaptor.SetTokenBoundaries(tree, moduleTree.Start, result.Stop); result.Start = moduleTree.Start; result.Tree = (CommonTree)tree; } navigationTargets.AddRange(AlloyEditorNavigationSourceWalker.ExtractNavigationTargets(result, tokens.GetTokens().AsReadOnly(), _provider, snapshot)); break; } default: continue; } break; #if false InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE); InterpretTraceTransition lastBraceTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstBraceTransition == null || lastBraceTransition == null) continue; if (token.Type == AlloyLexer.KW_SIG) { InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody); if (lastBodyBraceTransition != lastBraceTransition) { var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot); if (bodySpan != null) navigationTargets.Add(bodySpan); firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex); } } var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot); if (blockSpan != null) navigationTargets.Add(blockSpan); #endif } } _navigationTargets = navigationTargets; OnNavigationTargetsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
/// <summary> /// Print the tokens. /// </summary> /// <param name="tokens">Tokens</param> private static void PrintTokens(CommonTokenStream tokens) { foreach (var token in tokens.GetTokens()) { Print(token.ToString()); } }
public static List<IToken> GetTokens(string input) { input = input.Replace("\r", ""); ANTLRStringStream Input = new ANTLRStringStream(input); SugarCppLexer lexer = new SugarCppLexer(Input); CommonTokenStream tokens = new CommonTokenStream(lexer); return tokens.GetTokens(); }