protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* Want to outline the following blocks: * - import * - type * - const * - func */ List <IToken> outliningKeywords = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.KW_IMPORT: ////case GoLexer.KW_TYPE: case GoLexer.KW_CONST: case GoLexer.KW_STRUCT: case GoLexer.KW_FUNC: case GoLexer.KW_VAR: outliningKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >(); foreach (var token in outliningKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { switch (token.Type) { case GoLexer.KW_IMPORT: case GoLexer.KW_VAR: case GoLexer.KW_CONST: { InterpretTraceTransition firstTransition = context.Transitions.Where(i => i.Transition.IsMatch).ElementAtOrDefault(1); InterpretTraceTransition lastTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstTransition == null || lastTransition == null) { continue; } if (firstTransition.Symbol != GoLexer.LPAREN) { continue; } var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } break; } case GoLexer.KW_STRUCT: case GoLexer.KW_FUNC: { InterpretTraceTransition firstTransition = context.Transitions.FirstOrDefault(i => i.Symbol == GoLexer.LBRACE); InterpretTraceTransition lastTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstTransition == null || lastTransition == null) { continue; } var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } break; } } } } _outliningRegions = outliningRegions; OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* easy to handle the following definitions: * - module (name) * - open (external symbol reference) ... as (name) * - fact (name)? * - assert (name)? * - fun (ref.name | name) * - pred (ref.name | name) * - (name): run|check * - sig (namelist) * - enum (name) * moderate to handle the following definitions: * - decl name(s) * harder to handle the following definitions: */ /* A single name follows the following keywords: * - KW_MODULE * - KW_OPEN * - KW_AS * - KW_ENUM * - KW_FACT (name is optional) * - KW_ASSERT (name is optional) */ List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.IDENTIFIER: identifiers.Add(tokens.LT(1)); break; case GoLexer.KW_PACKAGE: case GoLexer.KW_IMPORT: case GoLexer.KW_TYPE: case GoLexer.KW_VAR: case GoLexer.KW_FUNC: case GoLexer.KW_CONST: //case GoLexer.KW_MODULE: //case GoLexer.KW_OPEN: //case GoLexer.KW_AS: //case GoLexer.KW_ENUM: //case GoLexer.KW_FACT: //case GoLexer.KW_ASSERT: //case GoLexer.KW_RUN: //case GoLexer.KW_CHECK: //case GoLexer.KW_EXTENDS: //case GoLexer.KW_FUN: //case GoLexer.KW_PRED: //case GoLexer.KW_SIG: nameKeywords.Add(tokens.LT(1)); break; case GoLexer.DEFEQ: case GoLexer.COLON: declColons.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var token in nameKeywords) { tokens.Seek(token.TokenIndex); NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); if (token.Type == GoLexer.COLON) { IToken potentialLabel = tokens.LT(-2); if (potentialLabel.Type != GoLexer.IDENTIFIER) { continue; } } NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); } interpreter.CombineBoundedStartContexts(); if (!AllAgree(interpreter.Contexts)) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #if false foreach (var token in identifiers) { if (definitions.Contains(token) || references.Contains(token)) { continue; } tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #endif // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }