protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) { filename = textDocument.FilePath; } var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new PhpLexer(input); lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer); var tokens = new CommonTokenStream(lexer); var parser = new PhpParser(tokens); parser.BuildParseTree = true; List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow)); var result = parser.compileUnit(); NavigationTreesListener listener = new NavigationTreesListener(); ParseTreeWalker.Default.Walk(listener, result); OnParseComplete(new PhpEditorNavigationParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.NavigationTrees)); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); try { var snapshot = TextBuffer.CurrentSnapshot; string content = GetHTMLText(snapshot.GetText(), true); OnParseComplete(new MarkdownParseResultEventArgs(snapshot, content)); } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) { throw; } try { if (outputWindow != null) { outputWindow.WriteLine(e.Message); } } catch (Exception e2) { if (ErrorHandler.IsCriticalException(e2)) { throw; } } } }
public override void DisplayRecognitionError(string[] tokenNames, RecognitionException e) { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (outputWindow != null) { string header = GetErrorHeader(e); string message = GetErrorMessage(e, tokenNames); Span span = new Span(); if (e.Token != null) { span = Span.FromBounds(e.Token.StartIndex, e.Token.StopIndex + 1); } if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } ITextDocument document; if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null) { string fileName = document.FilePath; var line = Snapshot.GetLineFromPosition(span.Start); message = string.Format("{0}({1},{2}): {3}: {4}", fileName, line.LineNumber + 1, span.Start - line.Start.Position + 1, GetType().Name, message); } outputWindow.WriteLine(message); } base.DisplayRecognitionError(tokenNames, e); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); string filename = "<Unknown File>"; ITextDocument textDocument = TextDocument; if (textDocument != null) { filename = textDocument.FilePath; } var snapshot = TextBuffer.CurrentSnapshot; ANTLRStringStream input = new ANTLRStringStream(snapshot.GetText()); Java2Lexer lexer = new Java2Lexer(new JavaUnicodeStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); Java2Parser parser = new Java2Parser(tokens); List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } ITextSnapshotLine startLine = snapshot.GetLineFromPosition(e.Span.Start); int line = startLine.LineNumber; int column = e.Span.Start - startLine.Start; if (outputWindow != null) { outputWindow.WriteLine(string.Format("{0}({1},{2}): {3}", filename, line + 1, column + 1, message)); } if (errors.Count > 100) { throw new OperationCanceledException(); } }; var result = parser.compilationUnit(); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); var snapshot = TextBuffer.CurrentSnapshot; SnapshotCharStream input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); AlloyLexer lexer = new AlloyLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); AlloyParser parser = new AlloyParser(tokens); List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; ITextDocument document; if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null) { string fileName = document.FilePath; var line = snapshot.GetLineFromPosition(e.Span.Start); message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message); } if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (outputWindow != null) { outputWindow.WriteLine(message); } if (errors.Count > 100) { throw new OperationCanceledException(); } }; var result = parser.compilationUnit(); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); Stopwatch stopwatch = Stopwatch.StartNew(); var snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); #if false var lexer = new AntlrErrorProvidingLexer(input); var tokens = new Antlr4ParserTokenStream(lexer); var parser = new AntlrErrorProvidingParser(tokens); lexer.Parser = parser; tokens.Parser = parser; List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (outputWindow != null) { outputWindow.WriteLine(message); } if (errors.Count > 100) { throw new OperationCanceledException(); } }; AntlrTool.ToolPathRoot = typeof(AntlrTool).Assembly.Location; ErrorManager.SetErrorListener(new AntlrErrorProvidingParser.ErrorListener()); Grammar g = new Grammar(); var result = parser.grammar_(g); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); #endif }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); try { Stopwatch stopwatch = Stopwatch.StartNew(); var snapshot = TextBuffer.CurrentSnapshot; SnapshotCharStream input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); GroupLexer lexer = new GroupLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); GroupParserWrapper parser = new GroupParserWrapper(tokens); List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; ITextDocument document; if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null) { string fileName = document.FilePath; var line = snapshot.GetLineFromPosition(e.Span.Start); message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message); } if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (outputWindow != null) { outputWindow.WriteLine(message); } if (errors.Count > 100) { throw new OperationCanceledException(); } }; TemplateGroupWrapper group = new TemplateGroupWrapper('<', '>'); parser.group(group, "/"); TemplateGroupRuleReturnScope returnScope = BuiltAstForGroupTemplates(group); // Also parse the input using the V4 lexer/parser for downstream operations that make use of it IList <Antlr4.Runtime.IToken> v4tokens; TemplateParser.GroupFileContext v4result = ParseWithAntlr4(snapshot, out v4tokens); OnParseComplete(new StringTemplateParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), returnScope, v4tokens, v4result)); } catch (Exception e) when(!ErrorHandler.IsCriticalException(e)) { try { if (outputWindow != null) { outputWindow.WriteLine(e.Message); } } catch (Exception ex2) when(!ErrorHandler.IsCriticalException(ex2)) { } } }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* easy to handle the following definitions: * - module (name) * - open (external symbol reference) ... as (name) * - fact (name)? * - assert (name)? * - fun (ref.name | name) * - pred (ref.name | name) * - (name): run|check * - sig (namelist) * - enum (name) * moderate to handle the following definitions: * - decl name(s) * harder to handle the following definitions: */ /* A single name follows the following keywords: * - KW_MODULE * - KW_OPEN * - KW_AS * - KW_ENUM * - KW_FACT (name is optional) * - KW_ASSERT (name is optional) */ List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.IDENTIFIER: identifiers.Add(tokens.LT(1)); break; case GoLexer.KW_PACKAGE: case GoLexer.KW_IMPORT: case GoLexer.KW_TYPE: case GoLexer.KW_VAR: case GoLexer.KW_FUNC: case GoLexer.KW_CONST: //case GoLexer.KW_MODULE: //case GoLexer.KW_OPEN: //case GoLexer.KW_AS: //case GoLexer.KW_ENUM: //case GoLexer.KW_FACT: //case GoLexer.KW_ASSERT: //case GoLexer.KW_RUN: //case GoLexer.KW_CHECK: //case GoLexer.KW_EXTENDS: //case GoLexer.KW_FUN: //case GoLexer.KW_PRED: //case GoLexer.KW_SIG: nameKeywords.Add(tokens.LT(1)); break; case GoLexer.DEFEQ: case GoLexer.COLON: declColons.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var token in nameKeywords) { tokens.Seek(token.TokenIndex); NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); if (token.Type == GoLexer.COLON) { IToken potentialLabel = tokens.LT(-2); if (potentialLabel.Type != GoLexer.IDENTIFIER) { continue; } } NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); } interpreter.CombineBoundedStartContexts(); if (!AllAgree(interpreter.Contexts)) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #if false foreach (var token in identifiers) { if (definitions.Contains(token) || references.Contains(token)) { continue; } tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #endif // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); JavaUnicodeStream inputWrapper = new JavaUnicodeStream(input); var lexer = new Java2Lexer(inputWrapper); var tokens = new CommonTokenStream(lexer); tokens.Fill(); List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); GetLl2SymbolSets(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { // covered by the double-sided check if (_definitionOnlySourceSet.Contains(tokens.LA(1))) { if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { definitions.Add(tokens.LT(2)); } } else if (_referenceOnlySourceSet.Contains(tokens.LA(1))) { if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { references.Add(tokens.LT(2)); } } if (_definitionOnlyFollowSet.Contains(tokens.LA(1))) { IToken previous = tokens.LT(-1); if (previous != null && previous.Type == Java2Lexer.IDENTIFIER) { definitions.Add(previous); } } else if (_referenceOnlyFollowSet.Contains(tokens.LA(1))) { IToken previous = tokens.LT(-1); if (previous != null && previous.Type == Java2Lexer.IDENTIFIER) { references.Add(previous); } } if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { IntervalSet bothWaysFollowDefinition; IntervalSet bothWaysFollowReference; _definitionContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowDefinition); _referenceContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowReference); bool couldBeDef = bothWaysFollowDefinition != null && bothWaysFollowDefinition.Contains(tokens.LA(3)); bool couldBeRef = bothWaysFollowReference != null && bothWaysFollowReference.Contains(tokens.LA(3)); if (couldBeDef && !couldBeRef) { definitions.Add(tokens.LT(2)); } else if (couldBeRef && !couldBeDef) { references.Add(tokens.LT(2)); } } if (tokens.LA(3) == Java2Lexer.IDENTIFIER && _definitionSourceSet.Contains(tokens.LA(2))) { IntervalSet sourceDefinition2; IntervalSet sourceReference2; _definitionSourceSet2.TryGetValue(tokens.LA(2), out sourceDefinition2); _referenceSourceSet2.TryGetValue(tokens.LA(2), out sourceReference2); bool couldBeDef = sourceDefinition2 != null && sourceDefinition2.Contains(tokens.LA(1)); bool couldBeRef = sourceReference2 != null && sourceReference2.Contains(tokens.LA(1)); if (couldBeDef && !couldBeRef) { definitions.Add(tokens.LT(3)); } else if (couldBeRef && !couldBeDef) { references.Add(tokens.LT(3)); } } if (_definitionFollowSet.Contains(tokens.LA(1))) { declColons.Add(tokens.LT(1)); } if (tokens.LA(1) == Java2Lexer.IDENTIFIER) { identifiers.Add(tokens.LT(1)); } tokens.Consume(); } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); IToken potentialDeclaration = tokens.LT(-2); if (potentialDeclaration.Type != Java2Lexer.IDENTIFIER || definitions.Contains(potentialDeclaration) || references.Contains(potentialDeclaration)) { continue; } bool agree = false; NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens, token.Type); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex); if (agree) { break; } } interpreter.CombineBoundedStartContexts(); if (!agree) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex); if (agree) { break; } } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case Java2Lexer.IDENTIFIER: //case Java2Lexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true // set to true to mark all unknown identifiers as references (requires complete analysis of definitions) references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
private void ParseReferenceSourceFile(object state) { string fileName = state as string; if (string.IsNullOrEmpty(fileName)) { return; } try { if (!File.Exists(fileName)) { return; } string sourceText = File.ReadAllText(fileName); var inputStream = new ANTLRStringStream(sourceText, fileName); var unicodeStream = new JavaUnicodeStream(inputStream); var lexer = new Java2Lexer(unicodeStream); var tokenStream = new CommonTokenStream(lexer); var parser = new Java2Parser(tokenStream); int[] lineOffsets = null; var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (lineOffsets == null) { lineOffsets = FindLineOffsets(sourceText); } int line = Array.BinarySearch(lineOffsets, e.Span.Start); if (line < 0) { line = -line - 2; } int column; if (line >= lineOffsets.Length) { column = 0; } else { column = e.Span.Start - lineOffsets[line]; } if (outputWindow != null) { outputWindow.WriteLine(string.Format("{0}({1},{2}): {3}", fileName, line + 1, column + 1, message)); } if (errors.Count > 20) { throw new OperationCanceledException(); } }; var result = parser.compilationUnit(); CodeFileBuilder fileBuilder = new CodeFileBuilder(fileName); var treeNodeStream = new CommonTreeNodeStream(result.Tree); treeNodeStream.TokenStream = tokenStream; var walker = new IntelliSenseCacheWalker(treeNodeStream); List <ParseErrorEventArgs> walkerErrors = new List <ParseErrorEventArgs>(); walker.ParseError += (sender, e) => { walkerErrors.Add(e); string message = e.Message; if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (lineOffsets == null) { lineOffsets = FindLineOffsets(sourceText); } int line = Array.BinarySearch(lineOffsets, e.Span.Start); if (line < 0) { line = -line - 2; } int column; if (line >= lineOffsets.Length) { column = 0; } else { column = e.Span.Start - lineOffsets[line]; } if (outputWindow != null) { outputWindow.WriteLine(string.Format("{0}({1},{2}): {3}", fileName, line + 1, column + 1, message)); } if (walkerErrors.Count > 20) { throw new OperationCanceledException(); } }; walker.compilationUnit(fileBuilder); UpdateFile(fileBuilder); } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) { throw; } } }