Exemplo n.º 1
0
        protected override void ReParseImpl()
        {
            var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);

            Stopwatch stopwatch = Stopwatch.StartNew();

            string        filename     = "<Unknown File>";
            ITextDocument textDocument = TextDocument;

            if (textDocument != null)
            {
                filename = textDocument.FilePath;
            }

            var snapshot = TextBuffer.CurrentSnapshot;
            var input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var lexer    = new PhpLexer(input);

            lexer.TokenFactory = new SnapshotTokenFactory(snapshot, lexer);
            var tokens = new CommonTokenStream(lexer);
            var parser = new PhpParser(tokens);

            parser.BuildParseTree = true;

            List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>();

            parser.AddErrorListener(new ErrorListener(filename, errors, outputWindow));
            var result = parser.compileUnit();

            NavigationTreesListener listener = new NavigationTreesListener();

            ParseTreeWalker.Default.Walk(listener, result);

            OnParseComplete(new PhpEditorNavigationParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result, listener.NavigationTrees));
        }
        protected virtual ICharStream CreateInputStream(SnapshotSpan span)
        {
            SnapshotCharStream input;

            if (span.Length > 1000)
            {
                input = new SnapshotCharStream(span.Snapshot, span.Span);
            }
            else
            {
                input = new SnapshotCharStream(span.Snapshot);
            }

            input.Seek(span.Start.Position);
            return(input);
        }
Exemplo n.º 3
0
        protected override void ReParseImpl()
        {
            var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);

            Stopwatch stopwatch = Stopwatch.StartNew();

            var snapshot = TextBuffer.CurrentSnapshot;
            SnapshotCharStream         input  = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            AlloyLexer                 lexer  = new AlloyLexer(input);
            CommonTokenStream          tokens = new CommonTokenStream(lexer);
            AlloyParser                parser = new AlloyParser(tokens);
            List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>();

            parser.ParseError += (sender, e) =>
            {
                errors.Add(e);

                string message = e.Message;

                ITextDocument document;
                if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null)
                {
                    string fileName = document.FilePath;
                    var    line     = snapshot.GetLineFromPosition(e.Span.Start);
                    message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message);
                }

                if (message.Length > 100)
                {
                    message = message.Substring(0, 100) + " ...";
                }

                if (outputWindow != null)
                {
                    outputWindow.WriteLine(message);
                }

                if (errors.Count > 100)
                {
                    throw new OperationCanceledException();
                }
            };

            var result = parser.compilationUnit();

            OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result));
        }
Exemplo n.º 4
0
        protected override void ReParseImpl()
        {
            var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);

            Stopwatch stopwatch = Stopwatch.StartNew();

            var snapshot = TextBuffer.CurrentSnapshot;
            var input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));

#if false
            var lexer  = new AntlrErrorProvidingLexer(input);
            var tokens = new Antlr4ParserTokenStream(lexer);
            var parser = new AntlrErrorProvidingParser(tokens);

            lexer.Parser  = parser;
            tokens.Parser = parser;

            List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>();
            parser.ParseError += (sender, e) =>
            {
                errors.Add(e);

                string message = e.Message;
                if (message.Length > 100)
                {
                    message = message.Substring(0, 100) + " ...";
                }

                if (outputWindow != null)
                {
                    outputWindow.WriteLine(message);
                }

                if (errors.Count > 100)
                {
                    throw new OperationCanceledException();
                }
            };

            AntlrTool.ToolPathRoot = typeof(AntlrTool).Assembly.Location;
            ErrorManager.SetErrorListener(new AntlrErrorProvidingParser.ErrorListener());
            Grammar g      = new Grammar();
            var     result = parser.grammar_(g);
            OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result));
#endif
        }
Exemplo n.º 5
0
        public void AugmentQuickInfoSession(IQuickInfoSession session, IList <object> quickInfoContent, out ITrackingSpan applicableToSpan)
        {
            applicableToSpan = null;
            if (session == null || quickInfoContent == null)
            {
                return;
            }

            if (session.TextView.TextBuffer == this.TextBuffer)
            {
                ITextSnapshot currentSnapshot = this.TextBuffer.CurrentSnapshot;
                SnapshotPoint?triggerPoint    = session.GetTriggerPoint(currentSnapshot);
                if (!triggerPoint.HasValue)
                {
                    return;
                }

                #region experimental
                /* use the experimental model to locate and process the expression */
                Stopwatch stopwatch = Stopwatch.StartNew();

                // lex the entire document
                var input  = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length));
                var lexer  = new AlloyLexer(input);
                var tokens = new CommonTokenStream(lexer);
                tokens.Fill();

                // locate the last token before the trigger point
                while (true)
                {
                    IToken nextToken = tokens.LT(1);
                    if (nextToken.Type == CharStreamConstants.EndOfFile)
                    {
                        break;
                    }

                    if (nextToken.StartIndex > triggerPoint.Value.Position)
                    {
                        break;
                    }

                    tokens.Consume();
                }

                switch (tokens.LA(-1))
                {
                case AlloyLexer.IDENTIFIER:
                case AlloyLexer.KW_THIS:
                case AlloyLexer.KW_UNIV:
                case AlloyLexer.KW_IDEN:
                case AlloyLexer.KW_INT2:
                case AlloyLexer.KW_SEQINT:
                case AlloyLexer.INTEGER:
                    break;

                default:
                    return;
                }

                Network network = NetworkBuilder <AlloySimplifiedAtnBuilder> .GetOrBuildNetwork();

                RuleBinding memberSelectRule = network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.BinOpExpr18);
#if DEBUG && false
                HashSet <Transition> memberSelectTransitions = new HashSet <Transition>(ObjectReferenceEqualityComparer <Transition> .Default);
                GetReachableTransitions(memberSelectRule, memberSelectTransitions);
#endif

                NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens);

                interpreter.BoundaryRules.Add(memberSelectRule);
                //interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.UnaryExpression));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.LetDecl));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.NameListName));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Ref));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Module));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Open));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FactDecl));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.AssertDecl));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.FunctionName));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CmdDecl));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.Typescope));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.EnumDecl));
                interpreter.BoundaryRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.ElseClause));

                interpreter.ExcludedStartRules.Add(network.GetRule(AlloySimplifiedAtnBuilder.RuleNames.CallArguments));

                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    /* we want all traces to start outside the binOpExpr18 rule, which means all
                     * traces with a transition reachable from binOpExpr18 should contain a push
                     * transition with binOpExpr18's start state as its target.
                     */
                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                HashSet <InterpretTrace> contexts = new HashSet <InterpretTrace>(BoundedStartInterpretTraceEqualityComparer.Default);
                if (interpreter.Contexts.Count > 0)
                {
                    contexts.UnionWith(interpreter.Contexts);
                }
                else
                {
                    contexts.UnionWith(interpreter.BoundedStartContexts);
                }

                IOutputWindowPane pane = Provider.OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
                if (pane != null)
                {
                    pane.WriteLine(string.Format("Located {0} QuickInfo expression(s) in {1}ms.", contexts.Count, stopwatch.ElapsedMilliseconds));
                }

                HashSet <Span> spans = new HashSet <Span>();
                foreach (var context in contexts)
                {
                    Span?span = null;
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Transition.IsMatch)
                        {
                            continue;
                        }

                        IToken token     = transition.Token;
                        Span   tokenSpan = new Span(token.StartIndex, token.StopIndex - token.StartIndex + 1);
                        if (span == null)
                        {
                            span = tokenSpan;
                        }
                        else
                        {
                            span = Span.FromBounds(Math.Min(span.Value.Start, tokenSpan.Start), Math.Max(span.Value.End, tokenSpan.End));
                        }
                    }

                    if (span.HasValue)
                    {
                        spans.Add(span.Value);
                    }
                }

                //List<Expression> expressions = new List<Expression>();
                //HashSet<string> finalResult = new HashSet<string>();
                //SnapshotSpan? contextSpan = null;

                bool foundInfo = false;
                foreach (var span in spans)
                {
                    if (!span.IsEmpty)
                    {
                        VirtualSnapshotSpan selection = new VirtualSnapshotSpan(new SnapshotSpan(currentSnapshot, span));
                        if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                        {
                            try
                            {
                                Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection);
                                if (currentExpression != null && currentExpression.Span.HasValue && currentExpression.Span.Value.Contains(triggerPoint.Value))
                                {
                                    applicableToSpan = currentExpression.Span.Value.Snapshot.CreateTrackingSpan(currentExpression.Span.Value, SpanTrackingMode.EdgeExclusive);
                                    quickInfoContent.Add(currentExpression.ToString());
                                    foundInfo = true;
                                }
                            }
                            catch (Exception ex)
                            {
                                if (ErrorHandler.IsCriticalException(ex))
                                {
                                    throw;
                                }

                                quickInfoContent.Add(ex.Message);
                            }
                        }

                        //try
                        //{
                        //    SnapshotSpan contextSpan = new SnapshotSpan(currentSnapshot, span.Value);
                        //    Expression expression = Provider.IntellisenseCache.ParseExpression(contextSpan);
                        //    if (expression != null)
                        //        expressions.Add(expression);
                        //}
                        //catch (Exception e)
                        //{
                        //    if (ErrorHandler.IsCriticalException(e))
                        //        throw;
                        //}
                    }

                    //if (results.Count > 0)
                    //{
                    //    finalResult.UnionWith(results);
                    //    applicableToSpan = currentSnapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeExclusive);
                    //}
                }

                if (!foundInfo && spans.Count > 0)
                {
                    foreach (var span in spans)
                    {
                        if (!span.IsEmpty)
                        {
                            VirtualSnapshotSpan selection = new VirtualSnapshotSpan(new SnapshotSpan(currentSnapshot, span));
                            if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                            {
                                applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive);
                                break;
                            }
                        }
                    }

                    quickInfoContent.Add("Could not parse expression.");
                }

                //foreach (var result in finalResult)
                //{
                //    quickInfoContent.Add(result);
                //}

                #endregion

#if false
                var selection = session.TextView.Selection.StreamSelectionSpan;
                if (selection.IsEmpty || !selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                {
                    SnapshotSpan?expressionSpan = Provider.IntellisenseCache.GetExpressionSpan(triggerPoint.Value);
                    if (expressionSpan.HasValue)
                    {
                        selection = new VirtualSnapshotSpan(expressionSpan.Value);
                    }
                }
#endif
                //VirtualSnapshotSpan selection = new VirtualSnapshotSpan();
                //if (contextSpan.HasValue)
                //    selection = new VirtualSnapshotSpan(contextSpan.Value);

                //if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                //{
                //    applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive);

                //    try
                //    {
                //        Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection);
                //        if (currentExpression != null)
                //        {
                //            SnapshotSpan? span = currentExpression.Span;
                //            if (span.HasValue)
                //                applicableToSpan = span.Value.Snapshot.CreateTrackingSpan(span.Value, SpanTrackingMode.EdgeExclusive);

                //            quickInfoContent.Add(currentExpression.ToString());
                //        }
                //        else
                //        {
                //            quickInfoContent.Add("Could not parse expression.");
                //        }
                //    }
                //    catch (Exception ex)
                //    {
                //        if (ErrorHandler.IsCriticalException(ex))
                //            throw;

                //        quickInfoContent.Add(ex.Message);
                //    }
                //}
            }
        }
Exemplo n.º 6
0
        protected override void ReParseImpl()
        {
            var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);

            try
            {
                Stopwatch stopwatch = Stopwatch.StartNew();

                var snapshot = TextBuffer.CurrentSnapshot;
                SnapshotCharStream         input  = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
                GroupLexer                 lexer  = new GroupLexer(input);
                CommonTokenStream          tokens = new CommonTokenStream(lexer);
                GroupParserWrapper         parser = new GroupParserWrapper(tokens);
                List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>();
                parser.ParseError += (sender, e) =>
                {
                    errors.Add(e);

                    string message = e.Message;

                    ITextDocument document;
                    if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null)
                    {
                        string fileName = document.FilePath;
                        var    line     = snapshot.GetLineFromPosition(e.Span.Start);
                        message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message);
                    }

                    if (message.Length > 100)
                    {
                        message = message.Substring(0, 100) + " ...";
                    }

                    if (outputWindow != null)
                    {
                        outputWindow.WriteLine(message);
                    }

                    if (errors.Count > 100)
                    {
                        throw new OperationCanceledException();
                    }
                };

                TemplateGroupWrapper group = new TemplateGroupWrapper('<', '>');
                parser.group(group, "/");
                TemplateGroupRuleReturnScope returnScope = BuiltAstForGroupTemplates(group);

                // Also parse the input using the V4 lexer/parser for downstream operations that make use of it
                IList <Antlr4.Runtime.IToken>   v4tokens;
                TemplateParser.GroupFileContext v4result = ParseWithAntlr4(snapshot, out v4tokens);

                OnParseComplete(new StringTemplateParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), returnScope, v4tokens, v4result));
            }
            catch (Exception e) when(!ErrorHandler.IsCriticalException(e))
            {
                try
                {
                    if (outputWindow != null)
                    {
                        outputWindow.WriteLine(e.Message);
                    }
                }
                catch (Exception ex2) when(!ErrorHandler.IsCriticalException(ex2))
                {
                }
            }
        }
Exemplo n.º 7
0
        private List <LabelInfo> FindLabelsInScope(SnapshotPoint triggerPoint)
        {
            List <LabelInfo> labels = new List <LabelInfo>();

            /* use the experimental model to locate and process the expression */
            Stopwatch stopwatch = Stopwatch.StartNew();

            // lex the entire document
            var currentSnapshot = triggerPoint.Snapshot;
            var input           = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length));
            var lexer           = new ANTLRLexer(input);
            var tokens          = new CommonTokenStream(lexer);

            tokens.Fill();

            // locate the last token before the trigger point
            while (true)
            {
                IToken nextToken = tokens.LT(1);
                if (nextToken.Type == CharStreamConstants.EndOfFile)
                {
                    break;
                }

                if (nextToken.StartIndex > triggerPoint.Position)
                {
                    break;
                }

                tokens.Consume();
            }

            bool   inAction     = false;
            IToken triggerToken = tokens.LT(-1);

            switch (triggerToken.Type)
            {
            case ANTLRLexer.RULE_REF:
            case ANTLRLexer.TOKEN_REF:
            case ANTLRLexer.DOLLAR:
                break;

            case ANTLRLexer.ACTION:
            case ANTLRLexer.FORCED_ACTION:
            case ANTLRLexer.SEMPRED:
            case ANTLRLexer.ARG_ACTION:
                inAction = true;
                break;

            default:
                return(labels);
            }

            NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);

            while (interpreter.TryStepBackward())
            {
                if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 4000)
                {
                    break;
                }

                if (interpreter.Contexts.All(context => context.BoundedStart))
                {
                    break;
                }
            }

            if (interpreter.Failed)
            {
                interpreter.Contexts.Clear();
            }

            interpreter.CombineBoundedStartContexts();

            HashSet <IToken> labelTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var context in interpreter.Contexts)
            {
                var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList();
                for (int i = 1; i < tokenTransitions.Count - 1; i++)
                {
                    if (tokenTransitions[i].Symbol != ANTLRLexer.TOKEN_REF && tokenTransitions[i].Symbol != ANTLRLexer.RULE_REF)
                    {
                        continue;
                    }

                    // we add explicit labels, plus implicit labels if we're in an action
                    if (tokenTransitions[i + 1].Symbol == ANTLRLexer.ASSIGN || tokenTransitions[i + 1].Symbol == ANTLRLexer.PLUS_ASSIGN)
                    {
                        RuleBinding rule = interpreter.Network.StateRules[tokenTransitions[i + 1].Transition.SourceState.Id];
                        if (rule.Name == AntlrAtnBuilder.RuleNames.TreeRoot || rule.Name == AntlrAtnBuilder.RuleNames.ElementNoOptionSpec)
                        {
                            labelTokens.Add(tokenTransitions[i].Token);
                        }
                    }
                    else if (inAction && tokenTransitions[i - 1].Symbol != ANTLRLexer.ASSIGN && tokenTransitions[i - 1].Symbol != ANTLRLexer.PLUS_ASSIGN)
                    {
                        RuleBinding rule = interpreter.Network.StateRules[tokenTransitions[i].Transition.SourceState.Id];
                        if (rule.Name == AntlrAtnBuilder.RuleNames.Terminal || rule.Name == AntlrAtnBuilder.RuleNames.NotTerminal || rule.Name == AntlrAtnBuilder.RuleNames.RuleRef)
                        {
                            labelTokens.Add(tokenTransitions[i].Token);
                        }
                    }
                }
            }

            foreach (var token in labelTokens)
            {
                labels.Add(new LabelInfo(token.Text, "(label) " + token.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(token.StartIndex, token.StopIndex + 1)), StandardGlyphGroup.GlyphGroupField, Enumerable.Empty <LabelInfo>()));
            }

            /* add scopes */
            if (inAction)
            {
                /* add global scopes */
                IList <IToken> tokensList = tokens.GetTokens();
                for (int i = 0; i < tokensList.Count - 1; i++)
                {
                    var token = tokensList[i];

                    /* all global scopes appear before the first rule. before the first rule, the only place a ':' can appear is
                     * in the form '::' for things like @lexer::namespace{}
                     */
                    if (token.Type == ANTLRLexer.COLON && tokensList[i + 1].Type == ANTLRLexer.COLON)
                    {
                        break;
                    }

                    if (token.Type == ANTLRLexer.SCOPE)
                    {
                        var nextToken = tokensList.Skip(i + 1).FirstOrDefault(t => t.Channel == TokenChannels.Default);
                        if (nextToken != null && (nextToken.Type == ANTLRLexer.RULE_REF || nextToken.Type == ANTLRLexer.TOKEN_REF))
                        {
                            // TODO: parse scope members
                            IToken actionToken = tokensList.Skip(nextToken.TokenIndex + 1).FirstOrDefault(t => t.Channel == TokenChannels.Default);
                            IEnumerable <LabelInfo> members = Enumerable.Empty <LabelInfo>();

                            if (actionToken != null && actionToken.Type == ANTLRLexer.ACTION)
                            {
                                IEnumerable <IToken> scopeMembers = ExtractScopeAttributes(nextToken);
                                members = scopeMembers.Select(member =>
                                {
                                    string name              = member.Text;
                                    SnapshotSpan definition  = new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(member.StartIndex, member.StopIndex + 1));
                                    StandardGlyphGroup glyph = StandardGlyphGroup.GlyphGroupField;
                                    IEnumerable <LabelInfo> nestedMembers = Enumerable.Empty <LabelInfo>();
                                    return(new LabelInfo(name, string.Empty, definition, glyph, nestedMembers));
                                });
                            }

                            labels.Add(new LabelInfo(nextToken.Text, "(global scope) " + nextToken.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(nextToken.StartIndex, nextToken.StopIndex + 1)), StandardGlyphGroup.GlyphGroupNamespace, members));
                        }
                    }
                }

                /* add rule scopes */
                // todo
            }

            /* add arguments and return values */
            if (inAction)
            {
                HashSet <IToken> argumentTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
                foreach (var context in interpreter.Contexts)
                {
                    var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList();
                    for (int i = 1; i < tokenTransitions.Count; i++)
                    {
                        if (tokenTransitions[i].Symbol == ANTLRLexer.RETURNS || tokenTransitions[i].Symbol == ANTLRLexer.COLON)
                        {
                            break;
                        }

                        if (tokenTransitions[i].Symbol == ANTLRLexer.ARG_ACTION)
                        {
                            argumentTokens.Add(tokenTransitions[i].Token);
                        }
                    }
                }

                foreach (var token in argumentTokens)
                {
                    IEnumerable <IToken> arguments = ExtractArguments(token);
                    foreach (var argument in arguments)
                    {
                        labels.Add(new LabelInfo(argument.Text, "(parameter) " + argument.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(argument.StartIndex, argument.StopIndex + 1)), StandardGlyphGroup.GlyphGroupVariable, Enumerable.Empty <LabelInfo>()));
                    }
                }
            }

            /* add return values */
            if (inAction)
            {
                HashSet <IToken> returnTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
                foreach (var context in interpreter.Contexts)
                {
                    var tokenTransitions = context.Transitions.Where(i => i.TokenIndex != null).ToList();
                    for (int i = 1; i < tokenTransitions.Count - 1; i++)
                    {
                        if (tokenTransitions[i].Symbol == ANTLRLexer.COLON)
                        {
                            break;
                        }

                        if (tokenTransitions[i].Symbol == ANTLRLexer.RETURNS)
                        {
                            if (tokenTransitions[i + 1].Symbol == ANTLRLexer.ARG_ACTION)
                            {
                                returnTokens.Add(tokenTransitions[i + 1].Token);
                            }

                            break;
                        }
                    }
                }

                foreach (var token in returnTokens)
                {
                    IEnumerable <IToken> returnValues = ExtractArguments(token);
                    foreach (var returnValue in returnValues)
                    {
                        labels.Add(new LabelInfo(returnValue.Text, "(return value) " + returnValue.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(returnValue.StartIndex, returnValue.StopIndex + 1)), StandardGlyphGroup.GlyphGroupVariable, Enumerable.Empty <LabelInfo>()));
                    }
                }
            }

            /* add intrinsic labels ($start, $type, $text, $enclosingRuleName) */
            IToken           ruleNameToken           = null;
            HashSet <IToken> enclosingRuleNameTokens = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var context in interpreter.Contexts)
            {
                var tokenTransitions = context.Transitions.Where(i => i.Symbol == ANTLRLexer.RULE_REF || i.Symbol == ANTLRLexer.TOKEN_REF).ToList();
                if (!tokenTransitions.Any())
                {
                    continue;
                }

                ruleNameToken = tokenTransitions.First().Token;
                if (ruleNameToken != null)
                {
                    enclosingRuleNameTokens.Add(ruleNameToken);
                }
            }

            foreach (var token in enclosingRuleNameTokens)
            {
                // TODO: add members
                labels.Add(new LabelInfo(token.Text, "(enclosing rule) " + token.Text, new SnapshotSpan(triggerPoint.Snapshot, Span.FromBounds(token.StartIndex, token.StopIndex + 1)), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
            }

            GrammarType grammarType = GrammarType.None;

            int mark = tokens.Mark();

            try
            {
                tokens.Seek(0);
                bool hasGrammarType = false;
                while (!hasGrammarType)
                {
                    int la1 = tokens.LA(1);
                    switch (la1)
                    {
                    case ANTLRLexer.GRAMMAR:
                        IToken previous = tokens.LT(-1);
                        if (previous == null)
                        {
                            grammarType = GrammarType.Combined;
                        }
                        else if (previous.Type == ANTLRLexer.LEXER)
                        {
                            grammarType = GrammarType.Lexer;
                        }
                        else if (previous.Type == ANTLRLexer.PARSER)
                        {
                            grammarType = GrammarType.Parser;
                        }
                        else if (previous.Type == ANTLRLexer.TREE)
                        {
                            grammarType = GrammarType.TreeParser;
                        }
                        else
                        {
                            grammarType = GrammarType.None;
                        }

                        hasGrammarType = true;
                        break;

                    case CharStreamConstants.EndOfFile:
                        hasGrammarType = true;
                        break;

                    default:
                        break;
                    }

                    tokens.Consume();
                }
            }
            finally
            {
                tokens.Rewind(mark);
            }

            if (inAction)
            {
                switch (grammarType)
                {
                case GrammarType.Combined:
                    if (ruleNameToken == null)
                    {
                        goto default;
                    }
                    if (ruleNameToken.Type == ANTLRLexer.RULE_REF)
                    {
                        goto case GrammarType.Parser;
                    }
                    else
                    {
                        goto case GrammarType.Lexer;
                    }

                case GrammarType.Lexer:
                    labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("type", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("line", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("index", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("pos", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("channel", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("int", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    break;

                case GrammarType.Parser:
                    labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    break;

                case GrammarType.TreeParser:
                    labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    break;

                default:
                    // if we're unsure about the grammar type, include all the possible options to make sure we're covered
                    labels.Add(new LabelInfo("text", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("type", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("line", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("index", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("pos", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("channel", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("start", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("stop", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("int", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("tree", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    labels.Add(new LabelInfo("st", string.Empty, new SnapshotSpan(), StandardGlyphGroup.GlyphGroupIntrinsic, Enumerable.Empty <LabelInfo>()));
                    break;
                }
            }

            return(labels);
        }
Exemplo n.º 8
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot    = TextBuffer.CurrentSnapshot;
            var           input       = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer       = new GoLexer(input);
            var           tokenSource = new GoSemicolonInsertionTokenSource(lexer);
            var           tokens      = new CommonTokenStream(tokenSource);

            tokens.Fill();

            /* Want to outline the following blocks:
             *  - import
             *  - type
             *  - const
             *  - func
             */

            List <IToken> outliningKeywords = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case GoLexer.KW_IMPORT:
                ////case GoLexer.KW_TYPE:
                case GoLexer.KW_CONST:
                case GoLexer.KW_STRUCT:
                case GoLexer.KW_FUNC:
                case GoLexer.KW_VAR:
                    outliningKeywords.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >();

            foreach (var token in outliningKeywords)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    switch (token.Type)
                    {
                    case GoLexer.KW_IMPORT:
                    case GoLexer.KW_VAR:
                    case GoLexer.KW_CONST:
                    {
                        InterpretTraceTransition firstTransition = context.Transitions.Where(i => i.Transition.IsMatch).ElementAtOrDefault(1);
                        InterpretTraceTransition lastTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                        if (firstTransition == null || lastTransition == null)
                        {
                            continue;
                        }

                        if (firstTransition.Symbol != GoLexer.LPAREN)
                        {
                            continue;
                        }

                        var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot);
                        if (blockSpan != null)
                        {
                            outliningRegions.Add(blockSpan);
                            break;
                        }

                        break;
                    }

                    case GoLexer.KW_STRUCT:
                    case GoLexer.KW_FUNC:
                    {
                        InterpretTraceTransition firstTransition = context.Transitions.FirstOrDefault(i => i.Symbol == GoLexer.LBRACE);
                        InterpretTraceTransition lastTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                        if (firstTransition == null || lastTransition == null)
                        {
                            continue;
                        }

                        var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot);
                        if (blockSpan != null)
                        {
                            outliningRegions.Add(blockSpan);
                            break;
                        }
                        break;
                    }
                    }
                }
            }

            _outliningRegions = outliningRegions;
            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
Exemplo n.º 9
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot = TextBuffer.CurrentSnapshot;
            var           input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer    = new AlloyLexer(input);
            var           tokens   = new CommonTokenStream(lexer);

            tokens.Fill();

            /* Want to outline the following blocks:
             *  - assert
             *  - enum
             *  - fact
             *  - fun
             *  - pred
             *  - sig (block and body)
             */

            List <IToken> outliningKeywords = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case AlloyLexer.KW_ASSERT:
                case AlloyLexer.KW_ENUM:
                case AlloyLexer.KW_FACT:
                case AlloyLexer.KW_FUN:
                case AlloyLexer.KW_PRED:
                case AlloyLexer.KW_SIG:
                    outliningKeywords.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >();

            foreach (var token in outliningKeywords)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                foreach (var context in interpreter.Contexts)
                {
                    InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE);
                    InterpretTraceTransition lastBraceTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                    if (firstBraceTransition == null || lastBraceTransition == null)
                    {
                        continue;
                    }

                    if (token.Type == AlloyLexer.KW_SIG)
                    {
                        InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody);
                        if (lastBodyBraceTransition != lastBraceTransition && lastBodyBraceTransition != null)
                        {
                            var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot);
                            if (bodySpan != null)
                            {
                                outliningRegions.Add(bodySpan);
                            }

                            firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex);
                        }
                    }

                    var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot);
                    if (blockSpan != null)
                    {
                        outliningRegions.Add(blockSpan);
                        break;
                    }
                }
            }

            _outliningRegions = outliningRegions;
            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
Exemplo n.º 10
0
        protected override void ReParseImpl()
        {
            Stopwatch timer = Stopwatch.StartNew();

            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot    = TextBuffer.CurrentSnapshot;
            var           input       = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer       = new GoLexer(input);
            var           tokenSource = new GoSemicolonInsertionTokenSource(lexer);
            var           tokens      = new CommonTokenStream(tokenSource);

            tokens.Fill();

            /* easy to handle the following definitions:
             *  - module (name)
             *  - open (external symbol reference) ... as (name)
             *  - fact (name)?
             *  - assert (name)?
             *  - fun (ref.name | name)
             *  - pred (ref.name | name)
             *  - (name): run|check
             *  - sig (namelist)
             *  - enum (name)
             * moderate to handle the following definitions:
             *  - decl name(s)
             * harder to handle the following definitions:
             */

            /* A single name follows the following keywords:
             *  - KW_MODULE
             *  - KW_OPEN
             *  - KW_AS
             *  - KW_ENUM
             *  - KW_FACT (name is optional)
             *  - KW_ASSERT (name is optional)
             */
            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case GoLexer.IDENTIFIER:
                    identifiers.Add(tokens.LT(1));
                    break;

                case GoLexer.KW_PACKAGE:
                case GoLexer.KW_IMPORT:
                case GoLexer.KW_TYPE:
                case GoLexer.KW_VAR:
                case GoLexer.KW_FUNC:
                case GoLexer.KW_CONST:
                    //case GoLexer.KW_MODULE:
                    //case GoLexer.KW_OPEN:
                    //case GoLexer.KW_AS:
                    //case GoLexer.KW_ENUM:
                    //case GoLexer.KW_FACT:
                    //case GoLexer.KW_ASSERT:
                    //case GoLexer.KW_RUN:
                    //case GoLexer.KW_CHECK:
                    //case GoLexer.KW_EXTENDS:

                    //case GoLexer.KW_FUN:
                    //case GoLexer.KW_PRED:

                    //case GoLexer.KW_SIG:
                    nameKeywords.Add(tokens.LT(1));
                    break;

                case GoLexer.DEFEQ:
                case GoLexer.COLON:
                    declColons.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var token in nameKeywords)
            {
                tokens.Seek(token.TokenIndex);
                NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens);
                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                if (token.Type == GoLexer.COLON)
                {
                    IToken potentialLabel = tokens.LT(-2);
                    if (potentialLabel.Type != GoLexer.IDENTIFIER)
                    {
                        continue;
                    }
                }

                NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));
                }

                interpreter.CombineBoundedStartContexts();

                if (!AllAgree(interpreter.Contexts))
                {
                    while (interpreter.TryStepForward())
                    {
                        if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                        {
                            break;
                        }

                        if (interpreter.Contexts.All(context => context.BoundedEnd))
                        {
                            break;
                        }

                        interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));
                    }

                    interpreter.CombineBoundedEndContexts();
                }

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

#if false
            foreach (var token in identifiers)
            {
                if (definitions.Contains(token) || references.Contains(token))
                {
                    continue;
                }

                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens);

                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));

                    if (AllAgree(interpreter.Contexts))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));

                    if (AllAgree(interpreter.Contexts))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }
#endif

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);

#if true
            references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            references.ExceptWith(definitions);
            references.ExceptWith(unknownIdentifiers);
#else
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);
#endif

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition);
            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference);
            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier);
            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            timer.Stop();

            IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
            if (pane != null)
            {
                pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count));
            }

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
Exemplo n.º 11
0
        protected override ITokenSourceWithState <ClassAnchorState> CreateLexer(SnapshotSpan span, ClassAnchorState startState)
        {
            SnapshotCharStream input = new SnapshotCharStream(span);

            return(new JavaClassifierLexerWrapper <ClassAnchorState>(input, startState));
        }
Exemplo n.º 12
0
        protected override void ReParseImpl()
        {
            Stopwatch timer = Stopwatch.StartNew();

            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot     snapshot     = TextBuffer.CurrentSnapshot;
            var               input        = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            JavaUnicodeStream inputWrapper = new JavaUnicodeStream(input);
            var               lexer        = new Java2Lexer(inputWrapper);
            var               tokens       = new CommonTokenStream(lexer);

            tokens.Fill();

            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            GetLl2SymbolSets();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                // covered by the double-sided check
                if (_definitionOnlySourceSet.Contains(tokens.LA(1)))
                {
                    if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                    {
                        definitions.Add(tokens.LT(2));
                    }
                }
                else if (_referenceOnlySourceSet.Contains(tokens.LA(1)))
                {
                    if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                    {
                        references.Add(tokens.LT(2));
                    }
                }

                if (_definitionOnlyFollowSet.Contains(tokens.LA(1)))
                {
                    IToken previous = tokens.LT(-1);
                    if (previous != null && previous.Type == Java2Lexer.IDENTIFIER)
                    {
                        definitions.Add(previous);
                    }
                }
                else if (_referenceOnlyFollowSet.Contains(tokens.LA(1)))
                {
                    IToken previous = tokens.LT(-1);
                    if (previous != null && previous.Type == Java2Lexer.IDENTIFIER)
                    {
                        references.Add(previous);
                    }
                }

                if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                {
                    IntervalSet bothWaysFollowDefinition;
                    IntervalSet bothWaysFollowReference;
                    _definitionContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowDefinition);
                    _referenceContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowReference);
                    bool couldBeDef = bothWaysFollowDefinition != null && bothWaysFollowDefinition.Contains(tokens.LA(3));
                    bool couldBeRef = bothWaysFollowReference != null && bothWaysFollowReference.Contains(tokens.LA(3));

                    if (couldBeDef && !couldBeRef)
                    {
                        definitions.Add(tokens.LT(2));
                    }
                    else if (couldBeRef && !couldBeDef)
                    {
                        references.Add(tokens.LT(2));
                    }
                }

                if (tokens.LA(3) == Java2Lexer.IDENTIFIER && _definitionSourceSet.Contains(tokens.LA(2)))
                {
                    IntervalSet sourceDefinition2;
                    IntervalSet sourceReference2;
                    _definitionSourceSet2.TryGetValue(tokens.LA(2), out sourceDefinition2);
                    _referenceSourceSet2.TryGetValue(tokens.LA(2), out sourceReference2);
                    bool couldBeDef = sourceDefinition2 != null && sourceDefinition2.Contains(tokens.LA(1));
                    bool couldBeRef = sourceReference2 != null && sourceReference2.Contains(tokens.LA(1));

                    if (couldBeDef && !couldBeRef)
                    {
                        definitions.Add(tokens.LT(3));
                    }
                    else if (couldBeRef && !couldBeDef)
                    {
                        references.Add(tokens.LT(3));
                    }
                }

                if (_definitionFollowSet.Contains(tokens.LA(1)))
                {
                    declColons.Add(tokens.LT(1));
                }

                if (tokens.LA(1) == Java2Lexer.IDENTIFIER)
                {
                    identifiers.Add(tokens.LT(1));
                }

                tokens.Consume();
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                IToken potentialDeclaration = tokens.LT(-2);
                if (potentialDeclaration.Type != Java2Lexer.IDENTIFIER || definitions.Contains(potentialDeclaration) || references.Contains(potentialDeclaration))
                {
                    continue;
                }

                bool agree = false;
                NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens, token.Type);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));

                    agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex);
                    if (agree)
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                if (!agree)
                {
                    while (interpreter.TryStepForward())
                    {
                        if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                        {
                            break;
                        }

                        if (interpreter.Contexts.All(context => context.BoundedEnd))
                        {
                            break;
                        }

                        interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));

                        agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex);
                        if (agree)
                        {
                            break;
                        }
                    }

                    interpreter.CombineBoundedEndContexts();
                }

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case Java2Lexer.IDENTIFIER:
                            //case Java2Lexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);

            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);

#if true // set to true to mark all unknown identifiers as references (requires complete analysis of definitions)
            references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            references.ExceptWith(definitions);
            references.ExceptWith(unknownIdentifiers);
#else
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);
#endif

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Definition);
            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Reference);
            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.UnknownIdentifier);
            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            timer.Stop();

            IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
            if (pane != null)
            {
                pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count));
            }

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
Exemplo n.º 13
0
        public void AugmentQuickInfoSession(IQuickInfoSession session, IList <object> quickInfoContent, out ITrackingSpan applicableToSpan)
        {
            applicableToSpan = null;
            if (session == null || quickInfoContent == null)
            {
                return;
            }

            if (session.TextView.TextBuffer == this.TextBuffer)
            {
                ITextSnapshot currentSnapshot = this.TextBuffer.CurrentSnapshot;
                SnapshotPoint?triggerPoint    = session.GetTriggerPoint(currentSnapshot);
                if (!triggerPoint.HasValue)
                {
                    return;
                }

                #region experimental
                /* use the experimental model to locate and process the expression */
                Stopwatch stopwatch = Stopwatch.StartNew();

                // lex the entire document
                var input       = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length));
                var lexer       = new GoLexer(input);
                var tokenSource = new GoSemicolonInsertionTokenSource(lexer);
                var tokens      = new CommonTokenStream(tokenSource);
                tokens.Fill();

                // locate the last token before the trigger point
                while (true)
                {
                    IToken nextToken = tokens.LT(1);
                    if (nextToken.Type == CharStreamConstants.EndOfFile)
                    {
                        break;
                    }

                    if (nextToken.StartIndex > triggerPoint.Value.Position)
                    {
                        break;
                    }

                    tokens.Consume();
                }

                switch (tokens.LA(-1))
                {
                case GoLexer.IDENTIFIER:
                    //case GoLexer.KW_THIS:
                    //case GoLexer.KW_UNIV:
                    //case GoLexer.KW_IDEN:
                    //case GoLexer.KW_INT2:
                    //case GoLexer.KW_SEQINT:
                    break;

                default:
                    return;
                }

                Network network = NetworkBuilder <GoSimplifiedAtnBuilder> .GetOrBuildNetwork();

                RuleBinding memberSelectRule = network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PrimaryExpr);
#if false
                HashSet <Transition> memberSelectTransitions = new HashSet <Transition>();
                GetReachableTransitions(memberSelectRule, memberSelectTransitions);
#endif

                NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens);

                interpreter.BoundaryRules.Add(memberSelectRule);
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Label));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSwitchGuard));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FieldName));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Receiver));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FunctionDecl));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.BaseTypeName));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeSpec));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.IdentifierList));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.MethodName));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.ParameterDecl));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.FieldIdentifierList));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.PackageName));
                interpreter.BoundaryRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.TypeName));

                interpreter.ExcludedStartRules.Add(network.GetRule(GoSimplifiedAtnBuilder.RuleNames.Block));

                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    /* we want all traces to start outside the binOpExpr18 rule, which means all
                     * traces with a transition reachable from binOpExpr18 should contain a push
                     * transition with binOpExpr18's start state as its target.
                     */
                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                IOutputWindowPane pane = Provider.OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
                if (pane != null)
                {
                    pane.WriteLine(string.Format("Located {0} QuickInfo expression(s) in {1}ms.", interpreter.Contexts.Count, stopwatch.ElapsedMilliseconds));
                }

                HashSet <string> finalResult = new HashSet <string>();
                SnapshotSpan?    contextSpan = null;
                foreach (var context in interpreter.Contexts)
                {
                    Span?span = null;
                    //List<string> results = AnalyzeInterpreterTrace(context, memberSelectRule, out span);

                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Transition.IsMatch)
                        {
                            continue;
                        }

                        IToken token     = transition.Token;
                        Span   tokenSpan = new Span(token.StartIndex, token.StopIndex - token.StartIndex + 1);
                        if (span == null)
                        {
                            span = tokenSpan;
                        }
                        else
                        {
                            span = Span.FromBounds(Math.Min(span.Value.Start, tokenSpan.Start), Math.Max(span.Value.End, tokenSpan.End));
                        }
                    }

                    if (span.HasValue && !span.Value.IsEmpty)
                    {
                        contextSpan = new SnapshotSpan(currentSnapshot, span.Value);
                    }

                    //if (results.Count > 0)
                    //{
                    //    finalResult.UnionWith(results);
                    //    applicableToSpan = currentSnapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeExclusive);
                    //}
                }

                foreach (var result in finalResult)
                {
                    quickInfoContent.Add(result);
                }

                #endregion

#if false
                var selection = session.TextView.Selection.StreamSelectionSpan;
                if (selection.IsEmpty || !selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                {
                    SnapshotSpan?expressionSpan = Provider.IntellisenseCache.GetExpressionSpan(triggerPoint.Value);
                    if (expressionSpan.HasValue)
                    {
                        selection = new VirtualSnapshotSpan(expressionSpan.Value);
                    }
                }
#endif
                VirtualSnapshotSpan selection = new VirtualSnapshotSpan();
                if (contextSpan.HasValue)
                {
                    selection = new VirtualSnapshotSpan(contextSpan.Value);
                }

                if (!selection.IsEmpty && selection.Contains(new VirtualSnapshotPoint(triggerPoint.Value)))
                {
                    applicableToSpan = selection.Snapshot.CreateTrackingSpan(selection.SnapshotSpan, SpanTrackingMode.EdgeExclusive);
                    quickInfoContent.Add(selection.GetText());

                    //try
                    //{
                    //    Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection);
                    //    if (currentExpression != null)
                    //    {
                    //        SnapshotSpan? span = currentExpression.Span;
                    //        if (span.HasValue)
                    //            applicableToSpan = span.Value.Snapshot.CreateTrackingSpan(span.Value, SpanTrackingMode.EdgeExclusive);

                    //        quickInfoContent.Add(currentExpression.ToString());
                    //    }
                    //    else
                    //    {
                    //        quickInfoContent.Add("Could not parse expression.");
                    //    }
                    //}
                    //catch (Exception ex) when (!ErrorHandler.IsCriticalException(ex))
                    //{
                    //    quickInfoContent.Add(ex.Message);
                    //}
                }
            }
        }
Exemplo n.º 14
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot = TextBuffer.CurrentSnapshot;
            var           input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer    = new AlloyLexer(input);
            var           tokens   = new CommonTokenStream(lexer);

            tokens.Fill();

            /* easy to handle the following definitions:
             *  - module (name)
             *  - open (external symbol reference) ... as (name)
             *  - fact (name)?
             *  - assert (name)?
             *  - fun (ref.name | name)
             *  - pred (ref.name | name)
             *  - (name): run|check
             *  - sig (namelist)
             *  - enum (name)
             * moderate to handle the following definitions:
             *  - decl name(s)
             * harder to handle the following definitions:
             */

            /* A single name follows the following keywords:
             *  - KW_MODULE
             *  - KW_OPEN
             *  - KW_AS
             *  - KW_ENUM
             *  - KW_FACT (name is optional)
             *  - KW_ASSERT (name is optional)
             */
            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case AlloyLexer.IDENTIFIER:
                    identifiers.Add(tokens.LT(1));
                    break;

                case AlloyLexer.KW_MODULE:
                case AlloyLexer.KW_OPEN:
                case AlloyLexer.KW_AS:
                case AlloyLexer.KW_ENUM:
                case AlloyLexer.KW_FACT:
                case AlloyLexer.KW_ASSERT:
                case AlloyLexer.KW_RUN:
                case AlloyLexer.KW_CHECK:
                case AlloyLexer.KW_EXTENDS:

                case AlloyLexer.KW_FUN:
                case AlloyLexer.KW_PRED:

                case AlloyLexer.KW_SIG:
                    nameKeywords.Add(tokens.LT(1));
                    break;

                case AlloyLexer.COLON:
                    declColons.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var token in nameKeywords)
            {
                tokens.Seek(token.TokenIndex);
                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in identifiers)
            {
                if (definitions.Contains(token) || references.Contains(token))
                {
                    continue;
                }

                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);

            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Definition);

            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Reference);

            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.UnknownIdentifier);

            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
Exemplo n.º 15
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to process
            ITextSnapshot snapshot = TextBuffer.CurrentSnapshot;
            var           input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer    = new AlloyLexer(input);
            var           tokens   = new CommonTokenStream(lexer);

            tokens.Fill();

            /* Want to collect information from the following:
             *  - module (name)
             * Want to provide navigation info for the following types:
             *  - sig
             *  - enum
             * Want to provide navigation info for the following members:
             *  - decl (within a sigBody)
             *  - fun
             *  - pred
             *  - nameList (within an enumBody)
             * Eventually should consider the following:
             *  - cmdDecl
             *  - fact
             *  - assert
             */

            List <IToken> navigationKeywords = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case AlloyLexer.KW_MODULE:
                case AlloyLexer.KW_SIG:
                case AlloyLexer.KW_ENUM:
                case AlloyLexer.KW_FUN:
                case AlloyLexer.KW_PRED:
                    //case AlloyLexer.KW_ASSERT:
                    //case AlloyLexer.KW_FACT:
                    navigationKeywords.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            List <IEditorNavigationTarget> navigationTargets         = new List <IEditorNavigationTarget>();
            AstParserRuleReturnScope <CommonTree, IToken> moduleTree = null;
            CommonTreeAdaptor treeAdaptor = new CommonTreeAdaptor();

            foreach (var token in navigationKeywords)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

#if false // since we're using the AlloyParser, I don't think we need this.
                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }
#endif

                foreach (var context in interpreter.Contexts)
                {
                    switch (token.Type)
                    {
                    case AlloyLexer.KW_MODULE:
                    {
                        InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null);
                        if (firstMatch == null)
                        {
                            continue;
                        }

                        tokens.Seek(firstMatch.TokenIndex.Value);
                        AlloyParser parser = new AlloyParser(tokens);
                        AstParserRuleReturnScope <CommonTree, IToken> result = parser.module();
                        if (result == null || parser.NumberOfSyntaxErrors > 0)
                        {
                            continue;
                        }

                        moduleTree = result;
                        break;
                    }

                    case AlloyLexer.KW_SIG:
                    case AlloyLexer.KW_ENUM:
                    case AlloyLexer.KW_FUN:
                    case AlloyLexer.KW_PRED:
                    {
                        InterpretTraceTransition firstMatch = context.Transitions.FirstOrDefault(i => i.TokenIndex != null);
                        if (firstMatch == null)
                        {
                            continue;
                        }

                        tokens.Seek(firstMatch.TokenIndex.Value);
                        AlloyParser parser = new AlloyParser(tokens);
                        AstParserRuleReturnScope <CommonTree, IToken> result = null;

                        switch (token.Type)
                        {
                        case AlloyLexer.KW_SIG:
                            result = parser.sigDeclNoBlock();
                            break;

                        case AlloyLexer.KW_ENUM:
                            result = parser.enumDecl();
                            break;

                        case AlloyLexer.KW_FUN:
                        case AlloyLexer.KW_PRED:
                            result = parser.funDeclGenericBody();
                            break;
                        }

                        if (result == null || parser.NumberOfSyntaxErrors > 0)
                        {
                            continue;
                        }

                        if (moduleTree != null)
                        {
                            object tree = treeAdaptor.Nil();
                            treeAdaptor.AddChild(tree, moduleTree.Tree);
                            treeAdaptor.AddChild(tree, result.Tree);
                            treeAdaptor.SetTokenBoundaries(tree, moduleTree.Start, result.Stop);

                            result.Start = moduleTree.Start;
                            result.Tree  = (CommonTree)tree;
                        }

                        navigationTargets.AddRange(AlloyEditorNavigationSourceWalker.ExtractNavigationTargets(result, tokens.GetTokens().AsReadOnly(), _provider, snapshot));
                        break;
                    }

                    default:
                        continue;
                    }

                    break;
#if false
                    InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE);
                    InterpretTraceTransition lastBraceTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                    if (firstBraceTransition == null || lastBraceTransition == null)
                    {
                        continue;
                    }

                    if (token.Type == AlloyLexer.KW_SIG)
                    {
                        InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody);
                        if (lastBodyBraceTransition != lastBraceTransition)
                        {
                            var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot);
                            if (bodySpan != null)
                            {
                                navigationTargets.Add(bodySpan);
                            }

                            firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex);
                        }
                    }

                    var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot);
                    if (blockSpan != null)
                    {
                        navigationTargets.Add(blockSpan);
                    }
#endif
                }
            }

            _navigationTargets = navigationTargets;
            OnNavigationTargetsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }