Пример #1
0
 public static object TermJSON(NamedNode N)      // again - don't want to carry JSonImplementation decision into this -- return type is currently LightJson.JSonValue
 {
     try {
         var termNode = (TermNode)N;
         var jSonTok  = (PTokJSON)termNode.tok;
         return(jSonTok.payJSON);
     } catch (Exception) { throw new NNShapeException(); }
 }
Пример #2
0
        public static ConsoleColor indicateCol(NamedNode NN_in)
        {
            // side channel the color result
            indicatorRS indicator     = noHit;                                                                   // init is meaningless - just make the compiler shut up ( it can't know that PathUpTo can't yield empty sequences )
            var         res_NamedNode = NN_in.PathUpTo(NN => { indicator = indicate(NN); return(indicator.hit); });

            if (res_NamedNode == null)
            {
                return(defaultConsoleColor);
            }
            return(indicator.col);
        }
        public ConvertIfToNullCoalescingAction()
        {
            var leftPattern       = PatternHelper.OptionalParentheses(new AnyNode(comparedNodeGroupName));
            var rightPattern      = PatternHelper.OptionalParentheses(new NullReferenceExpression());
            var choicePattern     = new Choice();
            var equalityPattern   = PatternHelper.CommutativeOperator(leftPattern, BinaryOperatorType.Equality, rightPattern);
            var inequalityPattern = PatternHelper.CommutativeOperator(leftPattern.Clone(), BinaryOperatorType.InEquality, rightPattern.Clone());

            choicePattern.Add(equalityPattern);
            choicePattern.Add(inequalityPattern);
            var namedChoicePattern = new NamedNode(expressionGroupName, choicePattern);

            ActionPattern = PatternHelper.OptionalParentheses(namedChoicePattern);
        }
Пример #4
0
        private static TreeNode InsertParent(
            TreeNode parent,
            NamedNode actualParent,
            string name = null,
            Func <ProxyNode, bool> existingNodeFinder = null)
        {
            name ??= actualParent.Name;

            ProxyNode folderProxy = null;

            if (existingNodeFinder != null)
            {
                foreach (var existingChild in parent.Children.OfType <ProxyNode>())
                {
                    if (existingNodeFinder(existingChild))
                    {
                        folderProxy = existingChild;
                        break;
                    }
                }

                if (folderProxy == null)
                {
                    folderProxy = new ProxyNode {
                        Name = name
                    };
                    parent.AddChild(folderProxy);
                }
            }

            if (folderProxy == null)
            {
                folderProxy = parent.GetOrCreateNodeWithName <ProxyNode>(name);
            }

            folderProxy.Original = actualParent;
            if (folderProxy.Highlights.Count == 0)
            {
                folderProxy.Highlights.Add(name);
            }

            folderProxy.IsExpanded = true;
            return(folderProxy);
        }
Пример #5
0
            public object JSonVal;   // atm this is PatchedLJ.JsonValue - but i don't want to have the decision on json library bound here
            public override void build( )
            {
                NamedNode rhs = children[1];

                if (rhs is SharpRefNode)
                {
                    name = (rhs as SharpRefNode).name; type = typeE.sharp; return;
                }
                if (rhs is DollarRefNode)
                {
                    name = (rhs as DollarRefNode).name; type = typeE.dollar; return;
                }

                if (rhs is TermNode && TermEnum(rhs) == PTokE.JSON)
                {
                    type    = typeE.json;
                    JSonVal = ((rhs as TermNode).tok as PTokJSON).payJSON;
                    return;
                }
                throw new NotImplementedException();
            }
Пример #6
0
    // adding more clusterfuck

    public static ParserComb.NamedNode ScopePartial(
        IEnumerable <PTok> toks,
        GrammarEntry GE,
        TranslateLHS TLHS)
    {
        var matches = ParserComb.Parser <PTok> .RUN_with_rest(GE.StartProd, toks);

        if (!matches.Any())
        {
            throw new Exception("can't parse");
        }
        var match = matches.First();
        // don't care about whether there are unconsumed tokens for the most greedy match, do the scoping for the part that did yield a parse
        NamedNode AST_root = match.N;

        TranslationUnit TU = GE.TR_constructor(AST_root);     // <- TranslationUnit generation from RX_TUs fills in AC_typing callbacks as a side effect

        TU.scope(TLHS.scope);                                 // <- fills eventual holes in preCH chains from scope - if possible ( see AssignTR.scope() )

        // TU is thrown away ,used only for its side effects, the TUs still linger in memory due to references in preCHs - garbage collected when scope and AST_root is dumped
        return(AST_root);
    }
Пример #7
0
        private void ProjectVisitor(Project project, Dictionary <string, TreeNode> projects)
        {
            // Make sure this is the same project, nested MSBuild tasks may have spawned additional builds of other projects
            if (AnalyzerManager.NormalizePath(project.ProjectFile) != _projectFilePath)
            {
                return;
            }

            // Get the TFM for this project
            string tfm = project.GetProperty("TargetFrameworkMoniker");

            if (!string.IsNullOrWhiteSpace(tfm))
            {
                // Add this project to the tree for this TFM
                TreeNode tree = null;
                if (!projects.TryGetValue(tfm, out tree))
                {
                    tree = new NamedNode();
                    projects.Add(tfm, tree);
                }
                tree.AddChild(project);
            }
        }
Пример #8
0
    public static ParserComb.NamedNode Scope(
        IEnumerable <PTok> toksIN,
        CH_closedScope scopeIN,
        MG.PI StartProd, Func <NamedNode, TranslationUnit> TRInstantiate,
        out TranslationUnit TRU)
    {
        var matches = MG.RUN_with_rest(StartProd, toksIN).ToArray();

        if (matches.Length.NLSend("matchlen") == 0 || matches[0].rest.Any())
        {
            throw new Exception();                                                                 // no match , or the most greedy match could not consume whole input
        }
        // MAJOR-TODO !!  ambigous grammars with epsilon consuming productions can yield
        //                an INFINITE number of alternatives , if there is a .ToArray() somewhere -> CRASH !!

        NamedNode       NN            = matches[0].N;
        TranslationUnit TR            = TRInstantiate(NN);
        var             deltaScope    = new preCH_deltaScope(scopeIN);
        var             combinedScope = TR.scope(deltaScope);

        TRU = TR;
        return(NN);
    }
Пример #9
0
        public void AddTerm(Node curr, String term, long termFrequencyCount, int id, int level, List <Node> nodeList)
        {
            try
            {
                nodeList.Add(curr);

                //test for common prefix (with possibly different suffix)
                int common = 0;
                if (curr.Children != null)
                {
                    for (int j = 0; j < curr.Children.Count; j++)
                    {
                        NamedNode nnode = curr.Children[j];
                        var       key   = nnode.key;
                        var       node  = nnode.node;

                        for (int i = 0; i < Math.Min(term.Length, key.Length); i++)
                        {
                            if (term[i] == key[i])
                            {
                                common = i + 1;
                            }
                            else
                            {
                                break;
                            }
                        }

                        if (common > 0)
                        {
                            //term already existed
                            //existing ab
                            //new      ab
                            if ((common == term.Length) && (common == key.Length))
                            {
                                if (node.termFrequencyCount == 0)
                                {
                                    termCount++;
                                }
                                node.termFrequencyCount += termFrequencyCount;
                                UpdateMaxCounts(nodeList, node.termFrequencyCount);
                            }
                            //new is subkey
                            //existing abcd
                            //new      ab
                            //if new is shorter (== common), then node(count) and only 1. children add (clause2)
                            else if (common == term.Length)
                            {
                                //insert second part of oldKey as child
                                Node child = new Node(termFrequencyCount);
                                child.Children = new List <NamedNode>
                                {
                                    new NamedNode(key.Substring(common), node)
                                };
                                child.termFrequencyCountChildMax = Math.Max(node.termFrequencyCountChildMax, node.termFrequencyCount);
                                UpdateMaxCounts(nodeList, termFrequencyCount);

                                //insert first part as key, overwrite old node
                                curr.Children[j] = new NamedNode(term.Substring(0, common), child);
                                //sort children descending by termFrequencyCountChildMax to start lookup with most promising branch
                                curr.Children.Sort((x, y) => y.node.termFrequencyCountChildMax.CompareTo(x.node.termFrequencyCountChildMax));
                                //increment termcount by 1
                                termCount++;
                            }
                            //if oldkey shorter (==common), then recursive addTerm (clause1)
                            //existing: te
                            //new:      test
                            else if (common == key.Length)
                            {
                                AddTerm(node, term.Substring(common), termFrequencyCount, id, level + 1, nodeList);
                            }
                            //old and new have common substrings
                            //existing: test
                            //new:      team
                            else
                            {
                                //insert second part of oldKey and of s as child
                                Node child = new Node(0);//count
                                child.Children = new List <NamedNode>
                                {
                                    new NamedNode(key.Substring(common), node),
                                    new NamedNode(term.Substring(common), new Node(termFrequencyCount))
                                };
                                child.termFrequencyCountChildMax = Math.Max(node.termFrequencyCountChildMax, Math.Max(termFrequencyCount, node.termFrequencyCount));
                                UpdateMaxCounts(nodeList, termFrequencyCount);

                                //insert first part as key. overwrite old node
                                curr.Children[j] = new NamedNode(term.Substring(0, common), child);
                                //sort children descending by termFrequencyCountChildMax to start lookup with most promising branch
                                curr.Children.Sort((x, y) => y.node.termFrequencyCountChildMax.CompareTo(x.node.termFrequencyCountChildMax));
                                //increment termcount by 1
                                termCount++;
                            }
                            return;
                        }
                    }
                }

                // initialize dictionary if first key is inserted
                if (curr.Children == null)
                {
                    curr.Children = new List <NamedNode>
                    {
                        new NamedNode(term, new Node(termFrequencyCount))
                    };
                }
                else
                {
                    curr.Children.Add(new NamedNode(term, new Node(termFrequencyCount)));
                    //sort children descending by termFrequencyCountChildMax to start lookup with most promising branch
                    curr.Children.Sort((x, y) => y.node.termFrequencyCountChildMax.CompareTo(x.node.termFrequencyCountChildMax));
                }
                termCount++;
                UpdateMaxCounts(nodeList, termFrequencyCount);
            }
            catch (Exception e) { Console.WriteLine("exception: " + term + " " + e.Message); }
        }
Пример #10
0
        public static ByamlNode FromXml(XmlDocument doc, XmlNode xmlNode, List <string> nodes, List <string> values, List <string> data)
        {
            XmlNode child = xmlNode.FirstChild;

            while (child != null && child.NodeType == XmlNodeType.Comment)
            {
                child = child.NextSibling;
            }

            if (child == null || child.NodeType == XmlNodeType.Element)
            {
                if (xmlNode.Attributes["type"] != null && xmlNode.Attributes["type"].Value == "array")
                {
                    UnamedNode node = new UnamedNode();
                    foreach (XmlNode item in xmlNode.ChildNodes)
                    {
                        if (item.NodeType == XmlNodeType.Element)
                        {
                            node.Nodes.Add(FromXml(doc, item, nodes, values, data));
                        }
                    }
                    return(node);
                }
                else if (xmlNode.Attributes["type"] != null && xmlNode.Attributes["type"].Value == "path")
                {
                    string value;
                    using (MemoryStream ms = new MemoryStream())
                    {
                        using (EndianBinaryWriter wr = new EndianBinaryWriter(ms))
                        {
                            foreach (XmlNode item in xmlNode.ChildNodes)
                            {
                                if (item.NodeType == XmlNodeType.Element && string.Equals(item.Name, "point", StringComparison.OrdinalIgnoreCase))
                                {
                                    wr.Write(float.Parse(item.Attributes["x"].Value.Remove(item.Attributes["x"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["y"].Value.Remove(item.Attributes["y"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["z"].Value.Remove(item.Attributes["z"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["nx"].Value.Remove(item.Attributes["nx"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["ny"].Value.Remove(item.Attributes["ny"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["nz"].Value.Remove(item.Attributes["nz"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(int.Parse(item.Attributes["val"].Value, CultureInfo.InvariantCulture));
                                }
                            }
                        }
                        value = Convert.ToBase64String(ms.ToArray());
                    }
                    if (!data.Contains(value))
                    {
                        data.Add(value);
                    }
                    return(new Data(data.IndexOf(value)));
                }
                else if (xmlNode.Attributes["type"] != null && xmlNode.Attributes["type"].Value == "null")
                {
                    return(new Null());
                }
                else
                {
                    NamedNode node = new NamedNode();
                    foreach (XmlNode item in xmlNode.ChildNodes)
                    {
                        if (item.NodeType == XmlNodeType.Element)
                        {
                            if (!nodes.Contains(item.Name))
                            {
                                nodes.Add(item.Name);
                            }
                            node.Nodes.Add(new KeyValuePair <int, ByamlNode>(nodes.IndexOf(item.Name), FromXml(doc, item, nodes, values, data)));
                        }
                    }
                    foreach (XmlAttribute item in xmlNode.Attributes)
                    {
                        if (item.Prefix != "xmlns" && item.NamespaceURI != "yamlconv")
                        {
                            if (!nodes.Contains(item.Name))
                            {
                                nodes.Add(item.Name);
                            }
                            node.Nodes.Add(new KeyValuePair <int, ByamlNode>(nodes.IndexOf(item.Name), FromXml(doc, item, nodes, values, data)));
                        }
                    }
                    return(node);
                }
            }
            else
            {
                if (xmlNode.Attributes != null && xmlNode.Attributes["type"] != null)
                {
                    if (xmlNode.Attributes["type"].Value == "string")
                    {
                        if (!values.Contains(xmlNode.InnerText))
                        {
                            values.Add(xmlNode.InnerText);
                        }
                        return(new String(values.IndexOf(xmlNode.InnerText)));
                    }
                }

                int  value_int;
                bool value_bool;

                if (xmlNode.InnerText.EndsWith("f", StringComparison.OrdinalIgnoreCase))
                {
                    return(new Single(float.Parse(xmlNode.InnerText.Remove(xmlNode.InnerText.Length - 1), CultureInfo.InvariantCulture)));
                }
                else if (int.TryParse(xmlNode.InnerText, out value_int))
                {
                    return(new Int(value_int));
                }
                else if (bool.TryParse(xmlNode.InnerText, out value_bool))
                {
                    return(new Boolean(value_bool));
                }
                else
                {
                    throw new InvalidDataException();
                }
            }
        }
Пример #11
0
 public object VisitNamedNode(NamedNode namedNode, object data)
 {
     throw new NotImplementedException();
 }
Пример #12
0
        /*
         *
         *  GetAst
         *
         *  functor to decouple the intricacies of translation and scoping ( particularly scoping and choice of grammar start production )
         *  to be moved elswhere later
         *
         *  passing in an already generated AST does not make sense because this only this part is supposed to know how to react to errors ( how pass them on to the interactive shell and so forth )
         */



        public static ShellCommon.AC_Resp AC(ShellCommon.AC_Req shell_ac_request,
                                             Func <IEnumerable <PTokBase>, NamedNode> GetAst  // Shell needs to do its own tokenization
                                             )
        {
            var    RESPONDER  = new Responder(shell_ac_request);
            string str_in     = shell_ac_request.arg;
            int    cursor_pos = shell_ac_request.offs;

            /*
             *  atm error tokens are normal PTok's with E == PTokE.ErrT
             *  no special treatment in stripping needed - there are simply no productions that recognize token sequences with ErrT's among them
             */
            var l_toks = Lexer.Tokenize(str_in, relaxed: true);

            var TokL = new TokLinePlanB <PTok>();

            foreach (PTokBase tb in l_toks)
            {
                if (tb is PTok)
                {
                    TokL.AddTok((PTok)tb);
                }
                else
                {
                    TokL.AddWS((tb as PTokWhitespace).len);
                }
            }


            var CPos = TokL.CPosFromStringpos(cursor_pos);

            PTok AC_Tok = CPos.conflated_clusterF_AC_tok();

            if (AC_Tok == null)
            {
                return(RESPONDER.NoAC(" no acable tok ").ac_response);
            }
            NamedNode AST;

            try {
                AST = GetAst(l_toks);
            } catch (Exception e) {
                return(RESPONDER.NoAC(e.ToString()).ac_response);         // <-- this uses includes variable resolution and all kinds of other shenanigans, that are only needed for membAC, typeAC is a lot simpler - could do with only parsing
            }
            if (AST == null)
            {
                return(RESPONDER.NoAC(" GetAst() == null  ").ac_response);
            }



            // with epsilon consuming productions ( ex: DeclStar ) "all Leafs are TermNodes" can not be relied on anymore
            var TermLeafs = AST.Leafs().Where(nn => nn is MG.TermNode).Select(n => (MG.TermNode)n).ToArray().NLSendRec("term leafs");

            NamedNode AC_Node = null;
            int       i       = 0;

            for (; i < TermLeafs.Length; i++)
            {
                if (TermLeafs[i].tok == AC_Tok)
                {
                    AC_Node = TermLeafs[i]; break;
                }
            }

            if (AC_Node == null)
            {
                return(RESPONDER.NoAC("ac beyond parsable").ac_response);                    // TODO :  D.Assert() that this is actually true with something like :  PToks.Skip(i).Where( term.tok == AC_Tok).Single
            }
            NamedNode descrNode = AC_Node.PathUpTo((n) => (n is MG.ACable));

            if (descrNode == null)
            {
                return(RESPONDER.NoAC(" descrNode == null  ").ac_response);
            }


            if (descrNode is MG.ACableMemb)
            {
                return(MembAcc_AC(TokL, CPos, (MGRX.MemANodeRX)descrNode, RESPONDER).ac_response);                                                   // <- todo cast not typesafe
            }
            else if (descrNode is MG.ACableTypeName)
            {
                return(Type_AC(TokL, CPos, descrNode, RESPONDER).ac_response);
            }
            else if (descrNode is MG.ACableFuncName)
            {
                return(FuncName_AC(TokL, CPos, (MGRX.FuncNameNodeRX)descrNode, RESPONDER).ac_response);
            }
            else
            {
                throw new NotImplementedException();
            }
        }
Пример #13
0
 public static indicatorRS indicate(NamedNode nn) => indicatorsD.ContainsKey(nn.GetType()) ? indicatorsD[nn.GetType()](nn) : noHit;
Пример #14
0
        public static ByamlNode FromXml(XmlDocument doc, XmlNode xmlNode, List<string> nodes, List<string> values, List<string> data)
        {
            XmlNode child = xmlNode.FirstChild;
            while (child != null && child.NodeType == XmlNodeType.Comment)
                child = child.NextSibling;

            if (child == null || child.NodeType == XmlNodeType.Element)
            {
                if (xmlNode.Attributes["type"] != null && xmlNode.Attributes["type"].Value == "array")
                {
                    UnamedNode node = new UnamedNode();
                    foreach (XmlNode item in xmlNode.ChildNodes)
                        if (item.NodeType == XmlNodeType.Element)
                            node.Nodes.Add(FromXml(doc, item, nodes, values, data));
                    return node;
                }
                else if (xmlNode.Attributes["type"] != null && xmlNode.Attributes["type"].Value == "path")
                {
                    string value;
                    using (MemoryStream ms = new MemoryStream())
                    {
                        using (EndianBinaryWriter wr = new EndianBinaryWriter(ms))
                        {
                            foreach (XmlNode item in xmlNode.ChildNodes)
                            {
                                if (item.NodeType == XmlNodeType.Element && string.Equals(item.Name, "point", StringComparison.OrdinalIgnoreCase))
                                {
                                    wr.Write(float.Parse(item.Attributes["x"].Value.Remove(item.Attributes["x"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["y"].Value.Remove(item.Attributes["y"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["z"].Value.Remove(item.Attributes["z"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["nx"].Value.Remove(item.Attributes["nx"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["ny"].Value.Remove(item.Attributes["ny"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(float.Parse(item.Attributes["nz"].Value.Remove(item.Attributes["nz"].Value.Length - 1), CultureInfo.InvariantCulture));
                                    wr.Write(int.Parse(item.Attributes["val"].Value, CultureInfo.InvariantCulture));
                                }
                            }
                        }
                        value = Convert.ToBase64String(ms.ToArray());
                    }
                    if (!data.Contains(value))
                        data.Add(value);
                    return new Data(data.IndexOf(value));
                }
                else
                {
                    NamedNode node = new NamedNode();
                    foreach (XmlNode item in xmlNode.ChildNodes)
                    {
                        if (item.NodeType == XmlNodeType.Element)
                        {
                            if (!nodes.Contains(item.Name))
                                nodes.Add(item.Name);
                            node.Nodes.Add(new KeyValuePair<int, ByamlNode>(nodes.IndexOf(item.Name), FromXml(doc, item, nodes, values, data)));
                        }
                    }
                    foreach (XmlAttribute item in xmlNode.Attributes)
                    {
                        if (!nodes.Contains(item.Name))
                            nodes.Add(item.Name);
                        node.Nodes.Add(new KeyValuePair<int, ByamlNode>(nodes.IndexOf(item.Name), FromXml(doc, item, nodes, values, data)));
                    }
                    return node;
                }
            }
            else
            {
                if (xmlNode.Attributes != null && xmlNode.Attributes["type"] != null)
                {
                    if (xmlNode.Attributes["type"].Value == "string")
                    {
                        if (!values.Contains(xmlNode.InnerText))
                            values.Add(xmlNode.InnerText);
                        return new String(values.IndexOf(xmlNode.InnerText));
                    }
                }

                int value_int;
                bool value_bool;

                if (xmlNode.InnerText.EndsWith("f", StringComparison.OrdinalIgnoreCase))
                    return new Single(float.Parse(xmlNode.InnerText.Remove(xmlNode.InnerText.Length - 1), CultureInfo.InvariantCulture));
                else if (int.TryParse(xmlNode.InnerText, out value_int))
                    return new Int(value_int);
                else if (bool.TryParse(xmlNode.InnerText, out value_bool))
                    return new Boolean(value_bool);
                else
                    throw new InvalidDataException();
            }
        }
Пример #15
0
 /// <summary>
 /// Adds node to tree
 /// </summary>
 /// <param name="node">The node</param>
 public void AddNode(NamedNode node)
 {
     nodes.Add(node);
 }
Пример #16
0
 public static PTok TermTok(NamedNode N)
 {
     try { return((N as TermNode).tok); } catch (Exception) { throw new NNShapeException(); }
 }
Пример #17
0
 public static string TermPay(NamedNode N)
 {
     try { return((N as TermNode).tok.pay); } catch (Exception) { throw new NNShapeException(); }
 }
Пример #18
0
        public static FinalizedResponse Type_AC(TokLinePlanB <PTok> orig_TL, TokLinePlanB <PTok> .CPosC CPos, NamedNode n, Responder RESP)
        {
            Func <PTokE, bool> ON = tokE => { var tok = CPos.insideof_tok;            if (tok == null)
                                              {
                                                  return(false);
                                              }
                                              return(tok.E == tokE); };
            Func <PTokE, bool> AFTER = tokE => { var tok = CPos.immediateLAdj_tok; if (tok == null)
                                                 {
                                                     return(false);
                                                 }
                                                 return(tok.E == tokE); };

            MGRX.TypeNameNodeRX TNRX_node = (MGRX.TypeNameNodeRX)n;

            if (ON(PTokE.CS_name) || AFTER(PTokE.CS_name))
            {
                PTok targetTok = null;
                if ((CPos.insideof_tok != null) && (CPos.insideof_tok.E == PTokE.CS_name))
                {
                    targetTok = CPos.insideof_tok;
                }
                else
                {
                    targetTok = CPos.immediateLAdj_tok;   // todo: unsafe
                }
                var Largs = new List <string>();
                foreach (var tok in TNRX_node.nameToks)
                {
                    Largs.Add(tok.pay); if (tok == targetTok)
                    {
                        break;
                    }
                }                                                                                                  // targetTok might not be the last in sequence - collect all CS_names upto and including

                string prefix    = null;
                var    type_alts = SGA.QTN_AC(Largs.ToArray(), out prefix);

                if (prefix.Length > targetTok.pay.Length)
                {
                    var new_tok = new PTok {
                        E = PTokE.CS_name, pay = prefix
                    };
                    orig_TL.ReplaceTok(targetTok, new_tok);
                    return(RESP.TypeACWithSubst(type_alts, SerializeTokLine(orig_TL), orig_TL.CPosAtEndOfTok(new_tok).StringPos()));
                }
                else
                {
                    return(RESP.TypeACNoSubst(type_alts));
                }
            }
            else if (CPos.immediateLAdj_tok == null)        // abuse this as "on whitespace or EOL" - probably incomplete
            // problem :
            // TokLine was not designed with the possibility in mind that Tokens change without notice
            // thus: insert dummy token | do stuff | replace dummy token with the final one

            {
                PTok placeholderTok = new PTok {
                    E = PTokE.CS_name, pay = ""
                };
                var placeholderCpos = orig_TL.InsertAfterCPos(CPos, placeholderTok);

                // extra evil - accessing internal Node structure directly - wo way to iterate from CPosC yet
                PTok delim = null;  // non whitespace token to the left of insertion point
                var  cand  = placeholderCpos.N.left;
                while (true)
                {
                    if (cand is TokLinePlanB <PTok> .NodeTok)
                    {
                        delim = cand.tok; break;
                    }
                    cand = cand.left;
                }

                var Largs = new List <string>();
                foreach (var tok in TNRX_node.Leafs().Where(N => N is MG.TermNode).Select(MG.TermTok))        // have to iterate over all terminals instead of just CS_names , because delimiter might be some other kind
                {
                    if (tok.E == PTokE.CS_name)
                    {
                        Largs.Add(tok.pay);
                    }
                    if (tok == delim)
                    {
                        break;                  // collect inclusive delimiter
                    }
                }
                Largs.Add(""); // last arg to SuggTree is an empty prefix
                string prefix;
                var    alts = SGA.QTN_AC(Largs.ToArray(), out prefix);
                if (prefix.Length > 0)
                {
                    return(RESP.TypeACNoSubst(alts));
                }
                else
                {
                    var final_tok = new PTok {
                        E = PTokE.CS_name, pay = prefix
                    };
                    orig_TL.ReplaceTok(placeholderTok, final_tok);
                    return(RESP.TypeACWithSubst(alts, SerializeTokLine(orig_TL), orig_TL.CPosAtEndOfTok(final_tok).StringPos()));
                }
            }



            return(RESP.NoAC("kind of type AC not implemented atm"));
        }
Пример #19
0
        /*
         *  since there are little dependencies on the precise structure of col_itm sequence,
         *  atm the simplest possible implementation :
         *
         *  every token gets exactly one col_itm
         *  these could be compressed ( consequtive itms of same color into one ) - i do not yet know what makes the console window so riduculusly slow
         *  - for lazyness sake old code fragment for the "not even partial parse possible"-case is still present -> everything compressed into a single col_itm
         */

        public static IEnumerable <col_itm> GrammarColorize(IEnumerable <PTokBase> _lexxed)
        {
            PTokBase [] lexxed   = _lexxed.ToArray();
            PTok     [] stripped = lexxed.Where(tok => tok is PTok).Select(tok => tok as PTok).ToArray();

            NamedNode rootNode = null;

            try {
                rootNode = MG.RUN_with_rest(ColorizeStartProd, stripped).First().N;
            } catch (Exception) { }

            if (rootNode == null)
            {
                string err_str = "";
                foreach (var tok in lexxed)      // concatenate to a single error token -- maybe yield multiple ( for M-b,M-a style shortkeys ) ?
                {
                    if (tok is PTokWhitespace)
                    {
                        err_str += SUH.NSpace((tok as PTokWhitespace).len);
                    }
                    if (tok is PTok)
                    {
                        err_str += (tok as PTok).pay;
                    }
                }
                yield return(new col_itm {
                    col = ConsoleColor.Red, str = err_str
                });

                yield break;
            }
            // ------
            MG.TermNode [] TNs    = rootNode.Leafs().Where(nn => nn is MG.TermNode).Select(nn => (MG.TermNode)nn).ToArray();     // a NamedNode with STAR as topmost prod _CAN_ be a leaf ( TODO meditate on whether to allow such a construct in the final grammar )
            int            lexx_i = 0;
            int            NN_i   = 0;

            while (true)
            {
                if (lexx_i == lexxed.Length)
                {
                    break;
                }
                PTokBase tok_base = lexxed[lexx_i];

                if (tok_base is PTokWhitespace)
                {
                    #region Whitespace-Block
                    yield return(new col_itm {
                        str = SUH.NSpace((tok_base as PTokWhitespace).len), col = defaultConsoleColor
                    });

                    #endregion
                }
                else
                {
                    if (NN_i == TNs.Length)
                    {
                        break;
                    }
                    #region Token-Block
                    PTok        tok = (PTok)tok_base;
                    MG.TermNode TN  = TNs[NN_i];
                    D.Assert(TN.tok == tok);
                    // ----------

                    yield return(new col_itm {
                        col = indicateCol(TN), str = tok.pay
                    });

                    #endregion
                    NN_i++;
                }
                lexx_i++;
            }
            for (; lexx_i < lexxed.Length; lexx_i++)
            {
                PTokBase tok = lexxed[lexx_i];
                if (tok is PTokWhitespace)
                {
                    yield return new col_itm {
                               col = errorConsoleColor, str = SUH.NSpace((tok as PTokWhitespace).len)
                    }
                }
                ;
                else
                {
                    yield return new col_itm {
                               col = errorConsoleColor, str = (tok as PTok).pay
                    }
                };
            }
        }