예제 #1
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot    = TextBuffer.CurrentSnapshot;
            var           input       = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer       = new GoLexer(input);
            var           tokenSource = new GoSemicolonInsertionTokenSource(lexer);
            var           tokens      = new CommonTokenStream(tokenSource);

            tokens.Fill();

            /* Want to outline the following blocks:
             *  - import
             *  - type
             *  - const
             *  - func
             */

            List <IToken> outliningKeywords = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case GoLexer.KW_IMPORT:
                ////case GoLexer.KW_TYPE:
                case GoLexer.KW_CONST:
                case GoLexer.KW_STRUCT:
                case GoLexer.KW_FUNC:
                case GoLexer.KW_VAR:
                    outliningKeywords.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >();

            foreach (var token in outliningKeywords)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    switch (token.Type)
                    {
                    case GoLexer.KW_IMPORT:
                    case GoLexer.KW_VAR:
                    case GoLexer.KW_CONST:
                    {
                        InterpretTraceTransition firstTransition = context.Transitions.Where(i => i.Transition.IsMatch).ElementAtOrDefault(1);
                        InterpretTraceTransition lastTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                        if (firstTransition == null || lastTransition == null)
                        {
                            continue;
                        }

                        if (firstTransition.Symbol != GoLexer.LPAREN)
                        {
                            continue;
                        }

                        var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot);
                        if (blockSpan != null)
                        {
                            outliningRegions.Add(blockSpan);
                            break;
                        }

                        break;
                    }

                    case GoLexer.KW_STRUCT:
                    case GoLexer.KW_FUNC:
                    {
                        InterpretTraceTransition firstTransition = context.Transitions.FirstOrDefault(i => i.Symbol == GoLexer.LBRACE);
                        InterpretTraceTransition lastTransition  = context.Transitions.LastOrDefault(i => i.Transition.IsMatch);
                        if (firstTransition == null || lastTransition == null)
                        {
                            continue;
                        }

                        var blockSpan = OutlineBlock(firstTransition.Token, lastTransition.Token, snapshot);
                        if (blockSpan != null)
                        {
                            outliningRegions.Add(blockSpan);
                            break;
                        }
                        break;
                    }
                    }
                }
            }

            _outliningRegions = outliningRegions;
            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
예제 #2
0
        protected override void ReParseImpl()
        {
            Stopwatch timer = Stopwatch.StartNew();

            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot     snapshot     = TextBuffer.CurrentSnapshot;
            var               input        = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            JavaUnicodeStream inputWrapper = new JavaUnicodeStream(input);
            var               lexer        = new Java2Lexer(inputWrapper);
            var               tokens       = new CommonTokenStream(lexer);

            tokens.Fill();

            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            GetLl2SymbolSets();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                // covered by the double-sided check
                if (_definitionOnlySourceSet.Contains(tokens.LA(1)))
                {
                    if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                    {
                        definitions.Add(tokens.LT(2));
                    }
                }
                else if (_referenceOnlySourceSet.Contains(tokens.LA(1)))
                {
                    if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                    {
                        references.Add(tokens.LT(2));
                    }
                }

                if (_definitionOnlyFollowSet.Contains(tokens.LA(1)))
                {
                    IToken previous = tokens.LT(-1);
                    if (previous != null && previous.Type == Java2Lexer.IDENTIFIER)
                    {
                        definitions.Add(previous);
                    }
                }
                else if (_referenceOnlyFollowSet.Contains(tokens.LA(1)))
                {
                    IToken previous = tokens.LT(-1);
                    if (previous != null && previous.Type == Java2Lexer.IDENTIFIER)
                    {
                        references.Add(previous);
                    }
                }

                if (tokens.LA(2) == Java2Lexer.IDENTIFIER)
                {
                    IntervalSet bothWaysFollowDefinition;
                    IntervalSet bothWaysFollowReference;
                    _definitionContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowDefinition);
                    _referenceContextSet1.TryGetValue(tokens.LA(1), out bothWaysFollowReference);
                    bool couldBeDef = bothWaysFollowDefinition != null && bothWaysFollowDefinition.Contains(tokens.LA(3));
                    bool couldBeRef = bothWaysFollowReference != null && bothWaysFollowReference.Contains(tokens.LA(3));

                    if (couldBeDef && !couldBeRef)
                    {
                        definitions.Add(tokens.LT(2));
                    }
                    else if (couldBeRef && !couldBeDef)
                    {
                        references.Add(tokens.LT(2));
                    }
                }

                if (tokens.LA(3) == Java2Lexer.IDENTIFIER && _definitionSourceSet.Contains(tokens.LA(2)))
                {
                    IntervalSet sourceDefinition2;
                    IntervalSet sourceReference2;
                    _definitionSourceSet2.TryGetValue(tokens.LA(2), out sourceDefinition2);
                    _referenceSourceSet2.TryGetValue(tokens.LA(2), out sourceReference2);
                    bool couldBeDef = sourceDefinition2 != null && sourceDefinition2.Contains(tokens.LA(1));
                    bool couldBeRef = sourceReference2 != null && sourceReference2.Contains(tokens.LA(1));

                    if (couldBeDef && !couldBeRef)
                    {
                        definitions.Add(tokens.LT(3));
                    }
                    else if (couldBeRef && !couldBeDef)
                    {
                        references.Add(tokens.LT(3));
                    }
                }

                if (_definitionFollowSet.Contains(tokens.LA(1)))
                {
                    declColons.Add(tokens.LT(1));
                }

                if (tokens.LA(1) == Java2Lexer.IDENTIFIER)
                {
                    identifiers.Add(tokens.LT(1));
                }

                tokens.Consume();
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                IToken potentialDeclaration = tokens.LT(-2);
                if (potentialDeclaration.Type != Java2Lexer.IDENTIFIER || definitions.Contains(potentialDeclaration) || references.Contains(potentialDeclaration))
                {
                    continue;
                }

                bool agree = false;
                NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens, token.Type);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));

                    agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex);
                    if (agree)
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                if (!agree)
                {
                    while (interpreter.TryStepForward())
                    {
                        if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                        {
                            break;
                        }

                        if (interpreter.Contexts.All(context => context.BoundedEnd))
                        {
                            break;
                        }

                        interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));

                        agree = AllAgree(interpreter.Contexts, potentialDeclaration.TokenIndex);
                        if (agree)
                        {
                            break;
                        }
                    }

                    interpreter.CombineBoundedEndContexts();
                }

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case Java2Lexer.IDENTIFIER:
                            //case Java2Lexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == JavaAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);

            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);

#if true // set to true to mark all unknown identifiers as references (requires complete analysis of definitions)
            references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            references.ExceptWith(definitions);
            references.ExceptWith(unknownIdentifiers);
#else
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);
#endif

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Definition);
            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.Reference);
            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(JavaSymbolTaggerClassificationTypeNames.UnknownIdentifier);
            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            timer.Stop();

            IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
            if (pane != null)
            {
                pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count));
            }

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
예제 #3
0
        protected override void ReParseImpl()
        {
            Stopwatch timer = Stopwatch.StartNew();

            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot    = TextBuffer.CurrentSnapshot;
            var           input       = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer       = new GoLexer(input);
            var           tokenSource = new GoSemicolonInsertionTokenSource(lexer);
            var           tokens      = new CommonTokenStream(tokenSource);

            tokens.Fill();

            /* easy to handle the following definitions:
             *  - module (name)
             *  - open (external symbol reference) ... as (name)
             *  - fact (name)?
             *  - assert (name)?
             *  - fun (ref.name | name)
             *  - pred (ref.name | name)
             *  - (name): run|check
             *  - sig (namelist)
             *  - enum (name)
             * moderate to handle the following definitions:
             *  - decl name(s)
             * harder to handle the following definitions:
             */

            /* A single name follows the following keywords:
             *  - KW_MODULE
             *  - KW_OPEN
             *  - KW_AS
             *  - KW_ENUM
             *  - KW_FACT (name is optional)
             *  - KW_ASSERT (name is optional)
             */
            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case GoLexer.IDENTIFIER:
                    identifiers.Add(tokens.LT(1));
                    break;

                case GoLexer.KW_PACKAGE:
                case GoLexer.KW_IMPORT:
                case GoLexer.KW_TYPE:
                case GoLexer.KW_VAR:
                case GoLexer.KW_FUNC:
                case GoLexer.KW_CONST:
                    //case GoLexer.KW_MODULE:
                    //case GoLexer.KW_OPEN:
                    //case GoLexer.KW_AS:
                    //case GoLexer.KW_ENUM:
                    //case GoLexer.KW_FACT:
                    //case GoLexer.KW_ASSERT:
                    //case GoLexer.KW_RUN:
                    //case GoLexer.KW_CHECK:
                    //case GoLexer.KW_EXTENDS:

                    //case GoLexer.KW_FUN:
                    //case GoLexer.KW_PRED:

                    //case GoLexer.KW_SIG:
                    nameKeywords.Add(tokens.LT(1));
                    break;

                case GoLexer.DEFEQ:
                case GoLexer.COLON:
                    declColons.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var token in nameKeywords)
            {
                tokens.Seek(token.TokenIndex);
                NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens);
                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                if (token.Type == GoLexer.COLON)
                {
                    IToken potentialLabel = tokens.LT(-2);
                    if (potentialLabel.Type != GoLexer.IDENTIFIER)
                    {
                        continue;
                    }
                }

                NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));
                }

                interpreter.CombineBoundedStartContexts();

                if (!AllAgree(interpreter.Contexts))
                {
                    while (interpreter.TryStepForward())
                    {
                        if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                        {
                            break;
                        }

                        if (interpreter.Contexts.All(context => context.BoundedEnd))
                        {
                            break;
                        }

                        interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));
                    }

                    interpreter.CombineBoundedEndContexts();
                }

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

#if false
            foreach (var token in identifiers)
            {
                if (definitions.Contains(token) || references.Contains(token))
                {
                    continue;
                }

                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens);

                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references));

                    if (AllAgree(interpreter.Contexts))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }

                    interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references));

                    if (AllAgree(interpreter.Contexts))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case GoLexer.IDENTIFIER:
                            //case GoLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }
#endif

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);

#if true
            references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            references.ExceptWith(definitions);
            references.ExceptWith(unknownIdentifiers);
#else
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);
#endif

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition);
            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference);
            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier);
            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            timer.Stop();

            IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense);
            if (pane != null)
            {
                pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count));
            }

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }
예제 #4
0
        protected override void ReParseImpl()
        {
            // lex the entire document to get the set of identifiers we'll need to classify
            ITextSnapshot snapshot = TextBuffer.CurrentSnapshot;
            var           input    = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length));
            var           lexer    = new AlloyLexer(input);
            var           tokens   = new CommonTokenStream(lexer);

            tokens.Fill();

            /* easy to handle the following definitions:
             *  - module (name)
             *  - open (external symbol reference) ... as (name)
             *  - fact (name)?
             *  - assert (name)?
             *  - fun (ref.name | name)
             *  - pred (ref.name | name)
             *  - (name): run|check
             *  - sig (namelist)
             *  - enum (name)
             * moderate to handle the following definitions:
             *  - decl name(s)
             * harder to handle the following definitions:
             */

            /* A single name follows the following keywords:
             *  - KW_MODULE
             *  - KW_OPEN
             *  - KW_AS
             *  - KW_ENUM
             *  - KW_FACT (name is optional)
             *  - KW_ASSERT (name is optional)
             */
            List <IToken> nameKeywords = new List <IToken>();
            List <IToken> declColons   = new List <IToken>();

            List <IToken> identifiers = new List <IToken>();

            while (tokens.LA(1) != CharStreamConstants.EndOfFile)
            {
                switch (tokens.LA(1))
                {
                case AlloyLexer.IDENTIFIER:
                    identifiers.Add(tokens.LT(1));
                    break;

                case AlloyLexer.KW_MODULE:
                case AlloyLexer.KW_OPEN:
                case AlloyLexer.KW_AS:
                case AlloyLexer.KW_ENUM:
                case AlloyLexer.KW_FACT:
                case AlloyLexer.KW_ASSERT:
                case AlloyLexer.KW_RUN:
                case AlloyLexer.KW_CHECK:
                case AlloyLexer.KW_EXTENDS:

                case AlloyLexer.KW_FUN:
                case AlloyLexer.KW_PRED:

                case AlloyLexer.KW_SIG:
                    nameKeywords.Add(tokens.LT(1));
                    break;

                case AlloyLexer.COLON:
                    declColons.Add(tokens.LT(1));
                    break;

                case CharStreamConstants.EndOfFile:
                    goto doneLexing;

                default:
                    break;
                }

                tokens.Consume();
            }

doneLexing:

            HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default);
            HashSet <IToken> references  = new HashSet <IToken>(TokenIndexEqualityComparer.Default);

            foreach (var token in nameKeywords)
            {
                tokens.Seek(token.TokenIndex);
                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in declColons)
            {
                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            foreach (var token in identifiers)
            {
                if (definitions.Contains(token) || references.Contains(token))
                {
                    continue;
                }

                tokens.Seek(token.TokenIndex);
                tokens.Consume();

                NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens);
                while (interpreter.TryStepBackward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedStart))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedStartContexts();

                while (interpreter.TryStepForward())
                {
                    if (interpreter.Contexts.Count == 0)
                    {
                        break;
                    }

                    if (interpreter.Contexts.All(context => context.BoundedEnd))
                    {
                        break;
                    }
                }

                interpreter.CombineBoundedEndContexts();

                foreach (var context in interpreter.Contexts)
                {
                    foreach (var transition in context.Transitions)
                    {
                        if (!transition.Symbol.HasValue)
                        {
                            continue;
                        }

                        switch (transition.Symbol)
                        {
                        case AlloyLexer.IDENTIFIER:
                        case AlloyLexer.KW_THIS:
                            RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id];
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameDefinition)
                            {
                                references.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            if (rule.Name != AlloySimplifiedAtnBuilder.RuleNames.NameReference)
                            {
                                definitions.Add(tokens.Get(transition.TokenIndex.Value));
                            }
                            break;

                        default:
                            continue;
                        }
                    }
                }
            }

            // tokens which are in both the 'definitions' and 'references' sets are actually unknown.
            HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default);

            unknownIdentifiers.IntersectWith(references);
            definitions.ExceptWith(unknownIdentifiers);
            references.ExceptWith(unknownIdentifiers);

            // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference
            unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default);
            unknownIdentifiers.ExceptWith(definitions);
            unknownIdentifiers.ExceptWith(references);

            List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >();

            IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Definition);

            tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType)));

            IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.Reference);

            tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType)));

            IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(AlloySymbolTaggerClassificationTypeNames.UnknownIdentifier);

            tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType)));

            _tags = tags;

            OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length))));
        }