private bool ConstructTokenDefinition(Core.Config.ConfigSection section, Dictionary <string, TokenNFATable> destination, Dictionary <string, TokenNFATable> support)
        {
            Debug.Assert(!String.IsNullOrEmpty(section.Header.FirstOrDefault()), $"Token Definition {section.GetLocation()} cannot have empty tag");
            Debug.Assert(section.Body.Any(line => !string.IsNullOrEmpty(line)), $"Token Definition {section.GetLocation()} cannot have empty body.");

            string tokenName    = section.Header.FirstOrDefault();
            var    finalRule    = new TokenNFATable();
            var    partialRules = new List <TokenNFATable>();

            foreach (var rule in section.Body)
            {
                if (String.IsNullOrEmpty(rule))
                {
                    continue;
                }

                var ruleComponents = Regex.Split(rule, @"\s+");
                var partialRule    = new TokenNFATable();
                var ptr            = partialRule.CreateNode();
                partialRule.StartState = ptr;

                for (int componentIndex = 0; componentIndex < ruleComponents.Length; componentIndex++)
                {
                    var component = ruleComponents[componentIndex];

                    // Zero or more?
                    Node?zero_or_more_loopPoint = null;
                    if (component.EndsWith(this.ConfigurationFile.GetRule(LexicalConfigurationFile.RULE_ZERO_OR_MORE_KEY)))
                    {
                        zero_or_more_loopPoint = ptr;
                        component = component[..^ 1];
        private NFATable ConstructLexerTable()
        {
            Log.WriteLineVerbose("Constructing subtokens...");
            var definedSubTokens = new Dictionary <string, TokenNFATable>();

            this.GetTokenDefinitions(this.ConfigurationFile.GetSections(LexicalConfigurationFile.SECTION_TAG_SUBTOKEN), definedSubTokens, definedSubTokens);

            foreach (var entry in definedSubTokens)
            {
                this._subtokens_section.AddSection(new AutomataVisualizationSection(entry.Key, entry.Value));
            }

            Log.WriteLineVerbose("Constructing tokens...");
            var definedTokens = new Dictionary <string, TokenNFATable>();

            this.GetTokenDefinitions(this.ConfigurationFile.GetSections(LexicalConfigurationFile.SECTION_TAG_TOKEN), definedTokens, definedSubTokens);

            foreach (var entry in definedTokens)
            {
                this._token_parts.Add(entry.Key, new ParentReportSection(entry.Key));
                this._token_parts[entry.Key].AddSection(new AutomataVisualizationSection("E-NFA", entry.Value)
                {
                    IncludeInTableOfContents = false
                });
            }

            Log.WriteLineVerbose("Minimizing tokens...");
            var minimizedTokens = this.GetMinimizedTokens(definedTokens);

            Log.WriteLineVerbose("Constructing final NFA...");
            var finalNFA = new TokenNFATable();
            var start    = finalNFA.CreateNode();

            finalNFA.StartState = start;

            foreach (var token in minimizedTokens)
            {
                finalNFA.InsertTableTransition(start, token);
            }

            this._section.AddSectionToTop(new AutomataVisualizationSection("Final NFA Parser", finalNFA));

            return(finalNFA);
        }
        private IEnumerable <TokenNFATable> GetMinimizedTokens(Dictionary <string, TokenNFATable> definedTokens)
        {
            var tokenPriorities = this.GetTokenPriorities(this.ConfigurationFile.GetSections(LexicalConfigurationFile.SECTION_TAG_TOKEN));

            foreach (var entry in definedTokens)
            {
                string tokenName    = entry.Key;
                var    definedToken = entry.Value;

                definedToken.EndState.IsFinal = true;
                definedToken.EndState.SetTag(Node.LABEL, $"{tokenName}~{tokenPriorities[tokenName]}");

                var section = this._token_parts[entry.Key];

                // It's much faster to convert the rules to DFA's and minimize them in isolation rather than
                // add them into the same NFA and then convert/minimize since they tend to deal with different symbols.

                Log.WriteLineVerbose($"Computing e-closure and NFA-To-DFA conversion for token {tokenName}...");
                var dfa = definedToken.ToDFATable();
                section.AddSection(new AutomataVisualizationSection("DFA", dfa)
                {
                    IncludeInTableOfContents = false
                });

                Log.WriteLineVerbose($"Minimizing the dfa for token {tokenName}...");
                var minDFA = dfa.Minimize();
                section.AddSection(new AutomataVisualizationSection("Min-DFA", minDFA)
                {
                    IncludeInTableOfContents = false
                });

                // Re-construct it as an NFA by removing trap states.
                Log.WriteLineVerbose($"Removing trap states for token {tokenName}...");
                var minNFA = new TokenNFATable(minDFA.RemoveTrapStates());
                section.AddSection(new AutomataVisualizationSection("Min-NFA", minNFA)
                {
                    IncludeInTableOfContents = false
                });

                yield return(minNFA);
            }
        }