private NetworkInterpreter CreateVarDeclarationNetworkInterpreter(ITokenStream tokens) { NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens); //// make sure we can handle forward and backward walking from ':' //interpreter.BoundaryRules.Add(interpreter.Network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ExprCaseClause)); //interpreter.BoundaryRules.Add(interpreter.Network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeCaseClause)); //interpreter.ExcludedStartRules.Add(interpreter.Network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ExprCaseClause)); //interpreter.ExcludedStartRules.Add(interpreter.Network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeCaseClause)); return(interpreter); }
private NetworkInterpreter CreateNetworkInterpreter(ITokenStream tokens) { Network network = NetworkBuilder <AlloySimplifiedAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); RuleBinding memberSelectRule = network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.BinOpExpr18); interpreter.BoundaryRules.Add(memberSelectRule); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.LetDecl)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.QuantDecls)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Decl)); ////interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.NameList)); ////interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.NameListName)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Ref)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Open)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FactDecl)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.AssertDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunctionName)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CmdDecl)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Typescope)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.EnumDecl)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.ElseClause)); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Module)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.LetDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.QuantDecls)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Decl)); /* adding this rule definitely didn't help! */ //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Expr)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Module)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FactDeclHeader)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.AssertDeclHeader)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunFunctionName)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.PredFunctionName)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunctionReturn)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.SigDeclHeader)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.SigExt)); //interpreter.ExcludedStartRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CallArguments)); return(interpreter); }
private NetworkInterpreter CreateNetworkInterpreter(ITokenStream tokens) { Network network = NetworkBuilder <AlloyOutliningAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); interpreter.BoundaryRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.AssertDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.EnumDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.FactDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.FunDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.SigDecl)); interpreter.ExcludedStartRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.SigBody)); interpreter.ExcludedStartRules.Add(network.GetRule(AlloyOutliningAtnBuilder.RuleNames.Block)); return(interpreter); }
private NetworkInterpreter CreateNetworkInterpreter(ITokenStream tokens) { Network network = NetworkBuilder <GoOutliningAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.ImportDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.TypeDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.ConstDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.FunctionDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.MethodDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.VarDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.StructType)); interpreter.ExcludedStartRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.FunctionType)); interpreter.ExcludedStartRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.TypeSwitchGuard)); interpreter.ExcludedStartRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.Block)); interpreter.ExcludedStartRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.Parameters)); interpreter.ExcludedStartRules.Add(network.GetRule(GoOutliningAtnBuilder.RuleNames.Receiver)); return(interpreter); }
private NetworkInterpreter CreateFullNetworkInterpreter(ITokenStream tokens) { Network network = NetworkBuilder <GoReducedAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); // make sure we can handle forward walking from 'package' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PackageClause)); // make sure we can handle forward walking from 'import' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ImportDecl)); // make sure we can handle forward walking from 'type' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSwitchGuard)); // make sure we can handle forward walking from 'const' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ConstDecl)); // make sure we can handle forward walking from 'var' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.VarDecl)); // make sure we can handle forward walking from 'func' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionType)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionDeclHeader)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.MethodDeclHeader)); // make sure we can handle forward and backward walking from ':=' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ShortVarDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.SimpleStmt)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.RangeClause)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.CommCase)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Expression)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PrimaryExpr)); //// make sure we can handle forward and backward walking from ':' //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ExprCaseClause)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeCaseClause)); interpreter.ExcludedStartRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Block)); return(interpreter); }
private NetworkInterpreter CreateNetworkInterpreter(CommonTokenStream tokens) { Network network = NetworkBuilder <AntlrAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); IToken previousToken = tokens.LT(-1); if (previousToken == null) { return(new NetworkInterpreter(network, new CommonTokenStream())); } switch (previousToken.Type) { case ANTLRLexer.RULE_REF: case ANTLRLexer.TOKEN_REF: case ANTLRLexer.DOLLAR: case ANTLRLexer.ACTION: case ANTLRLexer.FORCED_ACTION: case ANTLRLexer.SEMPRED: case ANTLRLexer.ARG_ACTION: interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.Grammar)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.Option)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.DelegateGrammar)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.TokenSpec)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.AttrScope)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.Action)); interpreter.BoundaryRules.Add(network.GetRule(AntlrAtnBuilder.RuleNames.Rule)); break; default: return(new NetworkInterpreter(network, new CommonTokenStream())); } return(interpreter); }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); /* Want to outline the following blocks: * - assert * - enum * - fact * - fun * - pred * - sig (block and body) */ List <IToken> outliningKeywords = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case AlloyLexer.KW_ASSERT: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FACT: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: case AlloyLexer.KW_SIG: outliningKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >(); foreach (var token in outliningKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } foreach (var context in interpreter.Contexts) { InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE); InterpretTraceTransition lastBraceTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstBraceTransition == null || lastBraceTransition == null) { continue; } if (token.Type == AlloyLexer.KW_SIG) { InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody); if (lastBodyBraceTransition != lastBraceTransition && lastBodyBraceTransition != null) { var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot); if (bodySpan != null) { outliningRegions.Add(bodySpan); } firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex); } } var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } } } _outliningRegions = outliningRegions; OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* easy to handle the following definitions: * - module (name) * - open (external symbol reference) ... as (name) * - fact (name)? * - assert (name)? * - fun (ref.name | name) * - pred (ref.name | name) * - (name): run|check * - sig (namelist) * - enum (name) * moderate to handle the following definitions: * - decl name(s) * harder to handle the following definitions: */ /* A single name follows the following keywords: * - KW_MODULE * - KW_OPEN * - KW_AS * - KW_ENUM * - KW_FACT (name is optional) * - KW_ASSERT (name is optional) */ List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.IDENTIFIER: identifiers.Add(tokens.LT(1)); break; case GoLexer.KW_PACKAGE: case GoLexer.KW_IMPORT: case GoLexer.KW_TYPE: case GoLexer.KW_VAR: case GoLexer.KW_FUNC: case GoLexer.KW_CONST: //case GoLexer.KW_MODULE: //case GoLexer.KW_OPEN: //case GoLexer.KW_AS: //case GoLexer.KW_ENUM: //case GoLexer.KW_FACT: //case GoLexer.KW_ASSERT: //case GoLexer.KW_RUN: //case GoLexer.KW_CHECK: //case GoLexer.KW_EXTENDS: //case GoLexer.KW_FUN: //case GoLexer.KW_PRED: //case GoLexer.KW_SIG: nameKeywords.Add(tokens.LT(1)); break; case GoLexer.DEFEQ: case GoLexer.COLON: declColons.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var token in nameKeywords) { tokens.Seek(token.TokenIndex); NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); if (token.Type == GoLexer.COLON) { IToken potentialLabel = tokens.LT(-2); if (potentialLabel.Type != GoLexer.IDENTIFIER) { continue; } } NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); } interpreter.CombineBoundedStartContexts(); if (!AllAgree(interpreter.Contexts)) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #if false foreach (var token in identifiers) { if (definitions.Contains(token) || references.Contains(token)) { continue; } tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #endif // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); JavaUnicodeStream inputWrapper = new JavaUnicodeStream(input); var lexer = new Java2Lexer(inputWrapper); var tokens = new CommonTokenStream(lexer); tokens.Fill(); List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); GetLl2SymbolSets(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { // covered by the double-sided check if (_definitionOnlySourceSet.Contains(tokens.LA(1))) { if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { definitions.Add(tokens.LT(2)); } } else if (_referenceOnlySourceSet.Contains(tokens.LA(1))) { if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { references.Add(tokens.LT(2)); } } if (_definitionOnlyFollowSet.Contains(tokens.LA(1))) { IToken previous = tokens.LT(-1); if (previous != null && previous.Type == Java2Lexer.IDENTIFIER) { definitions.Add(previous); } } else if (_referenceOnlyFollowSet.Contains(tokens.LA(1))) { IToken previous = tokens.LT(-1); if (previous != null && previous.Type == Java2Lexer.IDENTIFIER) { references.Add(previous); } } if (tokens.LA(2) == Java2Lexer.IDENTIFIER) { IntervalSet bothWaysFollowDefinition; IntervalSet bothWaysFollowReference; _definitionContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowDefinition); _referenceContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowReference); bool couldBeDef = bothWaysFollowDefinition != null && bothWaysFollowDefinition.Contains(tokens.LA(3)); bool couldBeRef = bothWaysFollowReference != null && bothWaysFollowReference.Contains(tokens.LA(3)); if (couldBeDef && !couldBeRef) { definitions.Add(tokens.LT(2)); } else if (couldBeRef && !couldBeDef) { references.Add(tokens.LT(2)); } } if (tokens.LA(3) == Java2Lexer.IDENTIFIER && _definitionSourceSet.Contains(tokens.LA(2))) { IntervalSet sourceDefinition2; IntervalSet sourceReference2; _definitionSourceSet2.TryGetValue(tokens.LA(2), out sourceDefinition2); _referenceSourceSet2.TryGetValue(tokens.LA(2), out sourceReference2); bool couldBeDef = sourceDefinition2 != null && sourceDefinition2.Contains(tokens.LA(1)); bool couldBeRef = sourceReference2 != null && sourceReference2.Contains(tokens.LA(1)); if (couldBeDef && !couldBeRef) { definitions.Add(tokens.LT(3)); } else if (couldBeRef && !couldBeDef) { references.Add(tokens.LT(3)); } } if (_definitionFollowSet.Contains(tokens.LA(1))) { declColons.Add(tokens.LT(1)); } if (tokens.LA(1) == Java2Lexer.IDENTIFIER) { identifiers.Add(tokens.LT(1)); } tokens.Consume(); } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); IToken potentialDeclaration = tokens.LT(-2); if (potentialDeclaration.Type != Java2Lexer.IDENTIFIER || definitions.Contains(potentialDeclaration) || references.Contains(potentialDeclaration)) { continue; } bool agree = false; NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens, token.Type); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex); if (agree) { break; } } interpreter.CombineBoundedStartContexts(); if (!agree) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex); if (agree) { break; } } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case Java2Lexer.IDENTIFIER: //case Java2Lexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true // set to true to mark all unknown identifiers as references (requires complete analysis of definitions) references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
private NetworkInterpreter CreateFullNetworkInterpreter(ITokenStream tokens, int startToken) { Network network = NetworkBuilder <JavaSimplifiedAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); switch (startToken) { case Java2Lexer.CLASS: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ClassHeader)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.IdentifierSuffix)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); break; case Java2Lexer.INTERFACE: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceHeader)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AnnotationInterfaceHeader)); break; case Java2Lexer.ENUM: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.EnumHeader)); break; case Java2Lexer.COMMA: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeParameters)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.EnumBody)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.EnumConstants)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeList)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FieldDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeArguments)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.QualifiedNameList)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FormalParameterDecls)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ElementValuePairs)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ElementValueArrayInitializer)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.LocalVariableDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ExpressionList)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ArrayInitializer)); interpreter.ExcludedStartRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceFieldDeclaration)); break; case Java2Lexer.COLON: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Statement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.SwitchLabel)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ForStatement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AssignmentOperator)); break; case Java2Lexer.EQ: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.VariableDeclarator)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ElementValuePair)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AssignmentOperator)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.RelationalOp)); break; case Java2Lexer.EXTENDS: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.NormalClassExtends)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ExtendsTypeList)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeParameter)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeArgument)); break; case Java2Lexer.LPAREN: case Java2Lexer.RPAREN: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FormalParameters)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Annotation)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AnnotationMethodDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.CatchClause)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ForStatement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ParExpression)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.CastExpression)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Arguments)); break; case Java2Lexer.GT: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeParameters)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeArguments)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AssignmentOperator)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.RelationalOp)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ShiftOp)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.NonWildcardTypeArguments)); break; case Java2Lexer.LBRACKET: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.MethodDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.VariableDeclarator)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Type)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.NormalParameterDecl)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FormalParameter)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.IdentifierSuffix)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Selector)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ArrayCreator)); interpreter.ExcludedStartRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceMethodDeclaration)); break; case Java2Lexer.SEMI: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.PackageDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ImportDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.EnumBodyDeclarations)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ClassBodyDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.MethodDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FieldDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceBodyDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceMethodDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InterfaceFieldDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ExplicitConstructorInvocation)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AnnotationTypeElementDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.AnnotationMethodDeclaration)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.LocalVariableDeclarationStatement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Statement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ForStatement)); break; default: break; } //// make sure we can handle forward walking from 'package' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.PackageClause)); //// make sure we can handle forward walking from 'import' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ImportDecl)); //// make sure we can handle forward walking from 'type' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeDecl)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeSwitchGuard)); //// make sure we can handle forward walking from 'const' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ConstDecl)); //// make sure we can handle forward walking from 'var' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.VarDecl)); //// make sure we can handle forward walking from 'func' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FunctionType)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.FunctionDeclHeader)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.MethodDeclHeader)); //// make sure we can handle forward and backward walking from ':=' //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ShortVarDecl)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.SimpleStmt)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.RangeClause)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.CommCase)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Expression)); //interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.PrimaryExpr)); return(interpreter); }
private NetworkInterpreter CreateVarDeclarationNetworkInterpreter(ITokenStream tokens, int startToken) { NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens, startToken); return(interpreter); }
private static void GetLl2SymbolSets() { lock (_lookupTablesLock) { if (_definitionContextSet1 != null) { return; } Network setAnalysisNetwork = NetworkBuilder <JavaSimplifiedAtnBuilder> .GetOrBuildNetwork(); JavaSimplifiedAtnBuilder setAnalysisBuilder = (JavaSimplifiedAtnBuilder)setAnalysisNetwork.Builder; _definitionSourceSet = setAnalysisBuilder.DefinitionSourceSet; _referenceSourceSet = setAnalysisBuilder.ReferenceSourceSet; _definitionFollowSet = setAnalysisBuilder.DefinitionFollowSet; _referenceFollowSet = setAnalysisBuilder.ReferenceFollowSet; _definitionOnlySourceSet = setAnalysisBuilder.DefinitionOnlySourceSet; _referenceOnlySourceSet = setAnalysisBuilder.ReferenceOnlySourceSet; _definitionOnlyFollowSet = setAnalysisBuilder.DefinitionOnlyFollowSet; _referenceOnlyFollowSet = setAnalysisBuilder.ReferenceOnlyFollowSet; _definitionContextSet1 = new Dictionary <int, IntervalSet>(); _referenceContextSet1 = new Dictionary <int, IntervalSet>(); _definitionSourceSet2 = new Dictionary <int, IntervalSet>(); _referenceSourceSet2 = new Dictionary <int, IntervalSet>(); _definitionFollowSet2 = new Dictionary <int, IntervalSet>(); _referenceFollowSet2 = new Dictionary <int, IntervalSet>(); var sharedSourceTokens = setAnalysisBuilder.DefinitionSourceSet.Intersect(setAnalysisBuilder.ReferenceSourceSet); foreach (var sharedSourceToken in sharedSourceTokens) { CommonTokenStream analysisTokenStream = new CommonTokenStream(new ArrayTokenSource(sharedSourceToken, Java2Lexer.IDENTIFIER)); analysisTokenStream.Fill(); analysisTokenStream.Seek(1); // definition context set NetworkInterpreter ll2analyzer = new NetworkInterpreter(setAnalysisNetwork, analysisTokenStream); ll2analyzer.ExcludedStartRules.Add(setAnalysisNetwork.GetRule(JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier)); ll2analyzer.TryStepForward(); ll2analyzer.TryStepBackward(); _definitionContextSet1[sharedSourceToken] = ll2analyzer.GetFollowSet(); _definitionSourceSet2[sharedSourceToken] = ll2analyzer.GetSourceSet(); // reference context set ll2analyzer = new NetworkInterpreter(setAnalysisNetwork, analysisTokenStream); ll2analyzer.ExcludedStartRules.Add(setAnalysisNetwork.GetRule(JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier)); ll2analyzer.TryStepForward(); ll2analyzer.TryStepBackward(); _referenceContextSet1[sharedSourceToken] = ll2analyzer.GetFollowSet(); _referenceSourceSet2[sharedSourceToken] = ll2analyzer.GetSourceSet(); } var sharedFollowTokens = setAnalysisBuilder.DefinitionFollowSet.Intersect(setAnalysisBuilder.ReferenceFollowSet); foreach (var sharedFollowToken in sharedFollowTokens) { CommonTokenStream analysisTokenStream = new CommonTokenStream(new ArrayTokenSource(Java2Lexer.IDENTIFIER, sharedFollowToken)); analysisTokenStream.Fill(); analysisTokenStream.Seek(0); // definition follow set NetworkInterpreter ll2analyzer = new NetworkInterpreter(setAnalysisNetwork, analysisTokenStream); ll2analyzer.ExcludedStartRules.Add(setAnalysisNetwork.GetRule(JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier)); ll2analyzer.TryStepForward(); ll2analyzer.TryStepBackward(); _definitionFollowSet2[sharedFollowToken] = ll2analyzer.GetFollowSet(); // reference follow set ll2analyzer = new NetworkInterpreter(setAnalysisNetwork, analysisTokenStream); ll2analyzer.ExcludedStartRules.Add(setAnalysisNetwork.GetRule(JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier)); ll2analyzer.TryStepForward(); ll2analyzer.TryStepBackward(); _referenceFollowSet2[sharedFollowToken] = ll2analyzer.GetFollowSet(); } } }
public void AugmentQuickInfoSession(IQuickInfoSession session, IList <object> quickInfoContent, out ITrackingSpan applicableToSpan) { applicableToSpan = null; if (session == null || quickInfoContent == null) { return; } if (session.TextView.TextBuffer == this.TextBuffer) { ITextSnapshot currentSnapshot = this.TextBuffer.CurrentSnapshot; SnapshotPoint?triggerPoint = session.GetTriggerPoint(currentSnapshot); if (!triggerPoint.HasValue) { return; } #region experimental /* use the experimental model to locate and process the expression */ Stopwatch stopwatch = Stopwatch.StartNew(); // lex the entire document var input = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); // locate the last token before the trigger point while (true) { IToken nextToken = tokens.LT(1); if (nextToken.Type == CharStreamConstants.EndOfFile) { break; } if (nextToken.StartIndex > triggerPoint.Value.Position) { break; } tokens.Consume(); } switch (tokens.LA(-1)) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: //case GoLexer.KW_UNIV: //case GoLexer.KW_IDEN: //case GoLexer.KW_INT2: //case GoLexer.KW_SEQINT: break; default: return; } Network network = NetworkBuilder <GoSimplifiedAtnBuilder> .GetOrBuildNetwork(); RuleBinding memberSelectRule = network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PrimaryExpr); #if false HashSet <Transition> memberSelectTransitions = new HashSet <Transition>(); GetReachableTransitions(memberSelectRule, memberSelectTransitions); #endif NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); interpreter.BoundaryRules.Add(memberSelectRule); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Label)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSwitchGuard)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FieldName)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Receiver)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.BaseTypeName)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSpec)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.IdentifierList)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.MethodName)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ParameterDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FieldIdentifierList)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PackageName)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeName)); interpreter.ExcludedStartRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Block)); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } /* we want all traces to start outside the binOpExpr18 rule, which means all * traces with a transition reachable from binOpExpr18 should contain a push * transition with binOpExpr18's start state as its target. */ if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); IOutputWindowPane pane = Provider.OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Located {0} QuickInfo expression(s) in {1}ms.", interpreter.Contexts.Count, stopwatch.ElapsedMilliseconds)); } HashSet <string> finalResult = new HashSet <string>(); SnapshotSpan? contextSpan = null; foreach (var context in interpreter.Contexts) { Span?span = null; //List<string> results = AnalyzeInterpreterTrace(context, memberSelectRule, out span); foreach (var transition in context.Transitions) { if (!transition.Transition.IsMatch) { continue; } IToken token = transition.Token; Span tokenSpan = new Span(token.StartIndex, token.StopIndex - token.StartIndex + 1); if (span == null) { span = tokenSpan; } else { span = Span.FromBounds(Math.Min(span.Value.Start, tokenSpan.Start), Math.Max(span.Value.End, tokenSpan.End)); } } if (span.HasValue && !span.Value.IsEmpty) { contextSpan = new SnapshotSpan(currentSnapshot, span.Value); } //if (results.Count > 0) //{ // finalResult.UnionWith(results); // applicableToSpan = currentSnapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeExclusive); //} } foreach (var result in finalResult) { quickInfoContent.Add(result); } #endregion #if false var selection = session.TextView.Selection.StreamSelectionSpan; if (selection.IsEmpty || !selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) { SnapshotSpan?expressionSpan = Provider.IntellisenseCache.GetExpressionSpan(triggerPoint.Value); if (expressionSpan.HasValue) { selection = new VirtualSnapshotSpan(expressionSpan.Value); } } #endif VirtualSnapshotSpan selection = new VirtualSnapshotSpan(); if (contextSpan.HasValue) { selection = new VirtualSnapshotSpan(contextSpan.Value); } if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) { applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive); quickInfoContent.Add(selection.GetText()); //try //{ // Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection); // if (currentExpression != null) // { // SnapshotSpan? span = currentExpression.Span; // if (span.HasValue) // applicableToSpan = span.Value.Snapshot.CreateTrackingSpan(span.Value, SpanTrackingMode.EdgeExclusive); // quickInfoContent.Add(currentExpression.ToString()); // } // else // { // quickInfoContent.Add("Could not parse expression."); // } //} //catch (Exception ex) when (!ErrorHandler.IsCriticalException(ex)) //{ // quickInfoContent.Add(ex.Message); //} } } }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); /* easy to handle the following definitions: * - module (name) * - open (external symbol reference) ... as (name) * - fact (name)? * - assert (name)? * - fun (ref.name | name) * - pred (ref.name | name) * - (name): run|check * - sig (namelist) * - enum (name) * moderate to handle the following definitions: * - decl name(s) * harder to handle the following definitions: */ /* A single name follows the following keywords: * - KW_MODULE * - KW_OPEN * - KW_AS * - KW_ENUM * - KW_FACT (name is optional) * - KW_ASSERT (name is optional) */ List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case AlloyLexer.IDENTIFIER: identifiers.Add(tokens.LT(1)); break; case AlloyLexer.KW_MODULE: case AlloyLexer.KW_OPEN: case AlloyLexer.KW_AS: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FACT: case AlloyLexer.KW_ASSERT: case AlloyLexer.KW_RUN: case AlloyLexer.KW_CHECK: case AlloyLexer.KW_EXTENDS: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: case AlloyLexer.KW_SIG: nameKeywords.Add(tokens.LT(1)); break; case AlloyLexer.COLON: declColons.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var token in nameKeywords) { tokens.Seek(token.TokenIndex); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case AlloyLexer.IDENTIFIER: case AlloyLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition) { references.Add(tokens.Get(transition.TokenIndex.Value)); } if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case AlloyLexer.IDENTIFIER: case AlloyLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition) { references.Add(tokens.Get(transition.TokenIndex.Value)); } if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in identifiers) { if (definitions.Contains(token) || references.Contains(token)) { continue; } tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case AlloyLexer.IDENTIFIER: case AlloyLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition) { references.Add(tokens.Get(transition.TokenIndex.Value)); } if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* Want to outline the following blocks: * - import * - type * - const * - func */ List <IToken> outliningKeywords = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.KW_IMPORT: ////case GoLexer.KW_TYPE: case GoLexer.KW_CONST: case GoLexer.KW_STRUCT: case GoLexer.KW_FUNC: case GoLexer.KW_VAR: outliningKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >(); foreach (var token in outliningKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { switch (token.Type) { case GoLexer.KW_IMPORT: case GoLexer.KW_VAR: case GoLexer.KW_CONST: { InterpretTraceTransition firstTransition = context.Transitions.Where(i => i.Transition.IsMatch).ElementAtOrDefault(1); InterpretTraceTransition lastTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstTransition == null || lastTransition == null) { continue; } if (firstTransition.Symbol != GoLexer.LPAREN) { continue; } var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } break; } case GoLexer.KW_STRUCT: case GoLexer.KW_FUNC: { InterpretTraceTransition firstTransition = context.Transitions.FirstOrDefault(i => i.Symbol == GoLexer.LBRACE); InterpretTraceTransition lastTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstTransition == null || lastTransition == null) { continue; } var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } break; } } } } _outliningRegions = outliningRegions; OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
private List <LabelInfo> FindLabelsInScope(SnapshotPoint triggerPoint) { List <LabelInfo> labels = new List <LabelInfo>(); /* use the experimental model to locate and process the expression */ Stopwatch stopwatch = Stopwatch.StartNew(); // lex the entire document var currentSnapshot = triggerPoint.Snapshot; var input = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length)); var lexer = new ANTLRLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); // locate the last token before the trigger point while (true) { IToken nextToken = tokens.LT(1); if (nextToken.Type == CharStreamConstants.EndOfFile) { break; } if (nextToken.StartIndex > triggerPoint.Position) { break; } tokens.Consume(); } bool inAction = false; IToken triggerToken = tokens.LT(-1); switch (triggerToken.Type) { case ANTLRLexer.RULE_REF: case ANTLRLexer.TOKEN_REF: case ANTLRLexer.DOLLAR: break; case ANTLRLexer.ACTION: case ANTLRLexer.FORCED_ACTION: case ANTLRLexer.SEMPRED: case ANTLRLexer.ARG_ACTION: inAction = true; break; default: return(labels); } NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 4000) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } if (interpreter.Failed) { interpreter.Contexts.Clear(); } interpreter.CombineBoundedStartContexts(); HashSet <IToken> labelTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var context in interpreter.Contexts) { var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList(); for (int i = 1; i < tokenTransitions.Count - 1; i++) { if (tokenTransitions[i].Symbol != ANTLRLexer.TOKEN_REF && tokenTransitions[i].Symbol != ANTLRLexer.RULE_REF) { continue; } // we add explicit labels, plus implicit labels if we're in an action if (tokenTransitions[i + 1].Symbol == ANTLRLexer.ASSIGN || tokenTransitions[i + 1].Symbol == ANTLRLexer.PLUS_ASSIGN) { RuleBinding rule = interpreter.Network.StateRules[tokenTransitions[i + 1].Transition.SourceState.Id]; if (rule.Name == AntlrAtnBuilder.RuleNames.TreeRoot || rule.Name == AntlrAtnBuilder.RuleNames.ElementNoOptionSpec) { labelTokens.Add(tokenTransitions[i].Token); } } else if (inAction && tokenTransitions[i - 1].Symbol != ANTLRLexer.ASSIGN && tokenTransitions[i - 1].Symbol != ANTLRLexer.PLUS_ASSIGN) { RuleBinding rule = interpreter.Network.StateRules[tokenTransitions[i].Transition.SourceState.Id]; if (rule.Name == AntlrAtnBuilder.RuleNames.Terminal || rule.Name == AntlrAtnBuilder.RuleNames.NotTerminal || rule.Name == AntlrAtnBuilder.RuleNames.RuleRef) { labelTokens.Add(tokenTransitions[i].Token); } } } } foreach (var token in labelTokens) { labels.Add(new LabelInfo(token.Text, "(label) " + token.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(token.StartIndex, token.StopIndex + 1)), StandardGlyphGroup.GlyphGroupField, Enumerable.Empty <LabelInfo>())); } /* add scopes */ if (inAction) { /* add global scopes */ IList <IToken> tokensList = tokens.GetTokens(); for (int i = 0; i < tokensList.Count - 1; i++) { var token = tokensList[i]; /* all global scopes appear before the first rule. before the first rule, the only place a ':' can appear is * in the form '::' for things like @lexer::namespace{} */ if (token.Type == ANTLRLexer.COLON && tokensList[i + 1].Type == ANTLRLexer.COLON) { break; } if (token.Type == ANTLRLexer.SCOPE) { var nextToken = tokensList.Skip(i + 1).FirstOrDefault(t => t.Channel == TokenChannels.Default); if (nextToken != null && (nextToken.Type == ANTLRLexer.RULE_REF || nextToken.Type == ANTLRLexer.TOKEN_REF)) { // TODO: parse scope members IToken actionToken = tokensList.Skip(nextToken.TokenIndex + 1).FirstOrDefault(t => t.Channel == TokenChannels.Default); IEnumerable <LabelInfo> members = Enumerable.Empty <LabelInfo>(); if (actionToken != null && actionToken.Type == ANTLRLexer.ACTION) { IEnumerable <IToken> scopeMembers = ExtractScopeAttributes(nextToken); members = scopeMembers.Select(member => { string name = member.Text; SnapshotSpan definition = new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(member.StartIndex, member.StopIndex + 1)); StandardGlyphGroup glyph = StandardGlyphGroup.GlyphGroupField; IEnumerable <LabelInfo> nestedMembers = Enumerable.Empty <LabelInfo>(); return(new LabelInfo(name, string.Empty, definition, glyph, nestedMembers)); }); } labels.Add(new LabelInfo(nextToken.Text, "(global scope) " + nextToken.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(nextToken.StartIndex, nextToken.StopIndex + 1)), StandardGlyphGroup.GlyphGroupNamespace, members)); } } } /* add rule scopes */ // todo } /* add arguments and return values */ if (inAction) { HashSet <IToken> argumentTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var context in interpreter.Contexts) { var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList(); for (int i = 1; i < tokenTransitions.Count; i++) { if (tokenTransitions[i].Symbol == ANTLRLexer.RETURNS || tokenTransitions[i].Symbol == ANTLRLexer.COLON) { break; } if (tokenTransitions[i].Symbol == ANTLRLexer.ARG_ACTION) { argumentTokens.Add(tokenTransitions[i].Token); } } } foreach (var token in argumentTokens) { IEnumerable <IToken> arguments = ExtractArguments(token); foreach (var argument in arguments) { labels.Add(new LabelInfo(argument.Text, "(parameter) " + argument.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(argument.StartIndex, argument.StopIndex + 1)), StandardGlyphGroup.GlyphGroupVariable, Enumerable.Empty <LabelInfo>())); } } } /* add return values */ if (inAction) { HashSet <IToken> returnTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var context in interpreter.Contexts) { var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList(); for (int i = 1; i < tokenTransitions.Count - 1; i++) { if (tokenTransitions[i].Symbol == ANTLRLexer.COLON) { break; } if (tokenTransitions[i].Symbol == ANTLRLexer.RETURNS) { if (tokenTransitions[i + 1].Symbol == ANTLRLexer.ARG_ACTION) { returnTokens.Add(tokenTransitions[i + 1].Token); } break; } } } foreach (var token in returnTokens) { IEnumerable <IToken> returnValues = ExtractArguments(token); foreach (var returnValue in returnValues) { labels.Add(new LabelInfo(returnValue.Text, "(return value) " + returnValue.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(returnValue.StartIndex, returnValue.StopIndex + 1)), StandardGlyphGroup.GlyphGroupVariable, Enumerable.Empty <LabelInfo>())); } } } /* add intrinsic labels ($start, $type, $text, $enclosingRuleName) */ IToken ruleNameToken = null; HashSet <IToken> enclosingRuleNameTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var context in interpreter.Contexts) { var tokenTransitions = context.Transitions.Where(i => i.Symbol == ANTLRLexer.RULE_REF || i.Symbol == ANTLRLexer.TOKEN_REF).ToList(); if (!tokenTransitions.Any()) { continue; } ruleNameToken = tokenTransitions.First().Token; if (ruleNameToken != null) { enclosingRuleNameTokens.Add(ruleNameToken); } } foreach (var token in enclosingRuleNameTokens) { // TODO: add members labels.Add(new LabelInfo(token.Text, "(enclosing rule) " + token.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(token.StartIndex, token.StopIndex + 1)), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); } GrammarType grammarType = GrammarType.None; int mark = tokens.Mark(); try { tokens.Seek(0); bool hasGrammarType = false; while (!hasGrammarType) { int la1 = tokens.LA(1); switch (la1) { case ANTLRLexer.GRAMMAR: IToken previous = tokens.LT(-1); if (previous == null) { grammarType = GrammarType.Combined; } else if (previous.Type == ANTLRLexer.LEXER) { grammarType = GrammarType.Lexer; } else if (previous.Type == ANTLRLexer.PARSER) { grammarType = GrammarType.Parser; } else if (previous.Type == ANTLRLexer.TREE) { grammarType = GrammarType.TreeParser; } else { grammarType = GrammarType.None; } hasGrammarType = true; break; case CharStreamConstants.EndOfFile: hasGrammarType = true; break; default: break; } tokens.Consume(); } } finally { tokens.Rewind(mark); } if (inAction) { switch (grammarType) { case GrammarType.Combined: if (ruleNameToken == null) { goto default; } if (ruleNameToken.Type == ANTLRLexer.RULE_REF) { goto case GrammarType.Parser; } else { goto case GrammarType.Lexer; } case GrammarType.Lexer: labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("type", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("line", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("index", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("pos", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("channel", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("int", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); break; case GrammarType.Parser: labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); break; case GrammarType.TreeParser: labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); break; default: // if we're unsure about the grammar type, include all the possible options to make sure we're covered labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("type", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("line", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("index", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("pos", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("channel", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("int", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>())); break; } } return(labels); }
private NetworkInterpreter CreateTopLevelNetworkInterpreter(ITokenStream tokens) { Network network = NetworkBuilder <GoTopLevelSymbolTaggerAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); //RuleBinding memberSelectRule = network.GetRule(GoSimplifiedAtnBuilder.RuleNames.BinOpExpr18); //interpreter.BoundaryRules.Add(memberSelectRule); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.LetDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.QuantDecls)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Decl)); //////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.NameList)); //////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.NameListName)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Ref)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Open)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FactDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.AssertDecl)); //////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunDecl)); //////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionName)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.CmdDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Typescope)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.EnumDecl)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ElseClause)); ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Module)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.LetDecl)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.QuantDecls)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Decl)); ///* adding this rule definitely didn't help! */ ////interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Expr)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Module)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FactDeclHeader)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.AssertDeclHeader)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunFunctionName)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PredFunctionName)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionReturn)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.SigDeclHeader)); //interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.SigExt)); // make sure we can handle forward walking from 'package' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PackageClause)); // make sure we can handle forward walking from 'import' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ImportDecl)); // make sure we can handle forward walking from 'type' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSwitchGuard)); // make sure we can handle forward walking from 'const' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ConstDecl)); // make sure we can handle forward walking from 'var' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.VarDecl)); // make sure we can handle forward walking from 'func' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionType)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionDeclHeader)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.MethodDeclHeader)); // make sure we can handle forward and backward walking from ':=' interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ShortVarDecl)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.SimpleStmt)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.RangeClause)); interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.CommCase)); interpreter.ExcludedStartRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Block)); interpreter.ExcludedStartRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.LiteralValue)); return(interpreter); }
public void AugmentQuickInfoSession(IQuickInfoSession session, IList <object> quickInfoContent, out ITrackingSpan applicableToSpan) { applicableToSpan = null; if (session == null || quickInfoContent == null) { return; } if (session.TextView.TextBuffer == this.TextBuffer) { ITextSnapshot currentSnapshot = this.TextBuffer.CurrentSnapshot; SnapshotPoint?triggerPoint = session.GetTriggerPoint(currentSnapshot); if (!triggerPoint.HasValue) { return; } #region experimental /* use the experimental model to locate and process the expression */ Stopwatch stopwatch = Stopwatch.StartNew(); // lex the entire document var input = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); // locate the last token before the trigger point while (true) { IToken nextToken = tokens.LT(1); if (nextToken.Type == CharStreamConstants.EndOfFile) { break; } if (nextToken.StartIndex > triggerPoint.Value.Position) { break; } tokens.Consume(); } switch (tokens.LA(-1)) { case AlloyLexer.IDENTIFIER: case AlloyLexer.KW_THIS: case AlloyLexer.KW_UNIV: case AlloyLexer.KW_IDEN: case AlloyLexer.KW_INT2: case AlloyLexer.KW_SEQINT: case AlloyLexer.INTEGER: break; default: return; } Network network = NetworkBuilder <AlloySimplifiedAtnBuilder> .GetOrBuildNetwork(); RuleBinding memberSelectRule = network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.BinOpExpr18); #if DEBUG && false HashSet <Transition> memberSelectTransitions = new HashSet <Transition>(ObjectReferenceEqualityComparer <Transition> .Default); GetReachableTransitions(memberSelectRule, memberSelectTransitions); #endif NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); interpreter.BoundaryRules.Add(memberSelectRule); //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.UnaryExpression)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.LetDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.NameListName)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Ref)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Module)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Open)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FactDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.AssertDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunctionName)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CmdDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Typescope)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.EnumDecl)); interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.ElseClause)); interpreter.ExcludedStartRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CallArguments)); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } /* we want all traces to start outside the binOpExpr18 rule, which means all * traces with a transition reachable from binOpExpr18 should contain a push * transition with binOpExpr18's start state as its target. */ if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } HashSet <InterpretTrace> contexts = new HashSet <InterpretTrace>(BoundedStartInterpretTraceEqualityComparer.Default); if (interpreter.Contexts.Count > 0) { contexts.UnionWith(interpreter.Contexts); } else { contexts.UnionWith(interpreter.BoundedStartContexts); } IOutputWindowPane pane = Provider.OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Located {0} QuickInfo expression(s) in {1}ms.", contexts.Count, stopwatch.ElapsedMilliseconds)); } HashSet <Span> spans = new HashSet <Span>(); foreach (var context in contexts) { Span?span = null; foreach (var transition in context.Transitions) { if (!transition.Transition.IsMatch) { continue; } IToken token = transition.Token; Span tokenSpan = new Span(token.StartIndex, token.StopIndex - token.StartIndex + 1); if (span == null) { span = tokenSpan; } else { span = Span.FromBounds(Math.Min(span.Value.Start, tokenSpan.Start), Math.Max(span.Value.End, tokenSpan.End)); } } if (span.HasValue) { spans.Add(span.Value); } } //List<Expression> expressions = new List<Expression>(); //HashSet<string> finalResult = new HashSet<string>(); //SnapshotSpan? contextSpan = null; bool foundInfo = false; foreach (var span in spans) { if (!span.IsEmpty) { VirtualSnapshotSpan selection = new VirtualSnapshotSpan(new SnapshotSpan(currentSnapshot, span)); if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) { try { Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection); if (currentExpression != null && currentExpression.Span.HasValue && currentExpression.Span.Value.Contains(triggerPoint.Value)) { applicableToSpan = currentExpression.Span.Value.Snapshot.CreateTrackingSpan(currentExpression.Span.Value, SpanTrackingMode.EdgeExclusive); quickInfoContent.Add(currentExpression.ToString()); foundInfo = true; } } catch (Exception ex) { if (ErrorHandler.IsCriticalException(ex)) { throw; } quickInfoContent.Add(ex.Message); } } //try //{ // SnapshotSpan contextSpan = new SnapshotSpan(currentSnapshot, span.Value); // Expression expression = Provider.IntellisenseCache.ParseExpression(contextSpan); // if (expression != null) // expressions.Add(expression); //} //catch (Exception e) //{ // if (ErrorHandler.IsCriticalException(e)) // throw; //} } //if (results.Count > 0) //{ // finalResult.UnionWith(results); // applicableToSpan = currentSnapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeExclusive); //} } if (!foundInfo && spans.Count > 0) { foreach (var span in spans) { if (!span.IsEmpty) { VirtualSnapshotSpan selection = new VirtualSnapshotSpan(new SnapshotSpan(currentSnapshot, span)); if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) { applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive); break; } } } quickInfoContent.Add("Could not parse expression."); } //foreach (var result in finalResult) //{ // quickInfoContent.Add(result); //} #endregion #if false var selection = session.TextView.Selection.StreamSelectionSpan; if (selection.IsEmpty || !selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) { SnapshotSpan?expressionSpan = Provider.IntellisenseCache.GetExpressionSpan(triggerPoint.Value); if (expressionSpan.HasValue) { selection = new VirtualSnapshotSpan(expressionSpan.Value); } } #endif //VirtualSnapshotSpan selection = new VirtualSnapshotSpan(); //if (contextSpan.HasValue) // selection = new VirtualSnapshotSpan(contextSpan.Value); //if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value))) //{ // applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive); // try // { // Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection); // if (currentExpression != null) // { // SnapshotSpan? span = currentExpression.Span; // if (span.HasValue) // applicableToSpan = span.Value.Snapshot.CreateTrackingSpan(span.Value, SpanTrackingMode.EdgeExclusive); // quickInfoContent.Add(currentExpression.ToString()); // } // else // { // quickInfoContent.Add("Could not parse expression."); // } // } // catch (Exception ex) // { // if (ErrorHandler.IsCriticalException(ex)) // throw; // quickInfoContent.Add(ex.Message); // } //} } }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to process ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); /* Want to collect information from the following: * - module (name) * Want to provide navigation info for the following types: * - sig * - enum * Want to provide navigation info for the following members: * - decl (within a sigBody) * - fun * - pred * - nameList (within an enumBody) * Eventually should consider the following: * - cmdDecl * - fact * - assert */ List <IToken> navigationKeywords = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case AlloyLexer.KW_MODULE: case AlloyLexer.KW_SIG: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: //case AlloyLexer.KW_ASSERT: //case AlloyLexer.KW_FACT: navigationKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List <IEditorNavigationTarget> navigationTargets = new List <IEditorNavigationTarget>(); AstParserRuleReturnScope <CommonTree, IToken> moduleTree = null; CommonTreeAdaptor treeAdaptor = new CommonTreeAdaptor(); foreach (var token in navigationKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); #if false // since we're using the AlloyParser, I don't think we need this. while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } #endif foreach (var context in interpreter.Contexts) { switch (token.Type) { case AlloyLexer.KW_MODULE: { InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null); if (firstMatch == null) { continue; } tokens.Seek(firstMatch.TokenIndex.Value); AlloyParser parser = new AlloyParser(tokens); AstParserRuleReturnScope <CommonTree, IToken> result = parser.module(); if (result == null || parser.NumberOfSyntaxErrors > 0) { continue; } moduleTree = result; break; } case AlloyLexer.KW_SIG: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: { InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null); if (firstMatch == null) { continue; } tokens.Seek(firstMatch.TokenIndex.Value); AlloyParser parser = new AlloyParser(tokens); AstParserRuleReturnScope <CommonTree, IToken> result = null; switch (token.Type) { case AlloyLexer.KW_SIG: result = parser.sigDeclNoBlock(); break; case AlloyLexer.KW_ENUM: result = parser.enumDecl(); break; case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: result = parser.funDeclGenericBody(); break; } if (result == null || parser.NumberOfSyntaxErrors > 0) { continue; } if (moduleTree != null) { object tree = treeAdaptor.Nil(); treeAdaptor.AddChild(tree, moduleTree.Tree); treeAdaptor.AddChild(tree, result.Tree); treeAdaptor.SetTokenBoundaries(tree, moduleTree.Start, result.Stop); result.Start = moduleTree.Start; result.Tree = (CommonTree)tree; } navigationTargets.AddRange(AlloyEditorNavigationSourceWalker.ExtractNavigationTargets(result, tokens.GetTokens().AsReadOnly(), _provider, snapshot)); break; } default: continue; } break; #if false InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE); InterpretTraceTransition lastBraceTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstBraceTransition == null || lastBraceTransition == null) { continue; } if (token.Type == AlloyLexer.KW_SIG) { InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody); if (lastBodyBraceTransition != lastBraceTransition) { var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot); if (bodySpan != null) { navigationTargets.Add(bodySpan); } firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex); } } var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot); if (blockSpan != null) { navigationTargets.Add(blockSpan); } #endif } } _navigationTargets = navigationTargets; OnNavigationTargetsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }