Exemple #1
0
 void GenerateArgumentParsingRuleset()
 {
     argumentParsingRuleset = new ParseRuleSet("Cmd_" + Name, GenerateArgumentDelims(), CodeRules.StandardWhitespace);
     argumentParsingRuleset.AddDelimiterFallback(ParseRuleSet.GetContext("default"));
 }
Exemple #2
0
        /// <summary>
        /// used to parse JSON-like objects, and output the results to a new object made of nested <see cref="Dictionary{string, object}"/>, <see cref="List{object}"/>, and primitive value objects
        /// </summary>
        /// <param name="text">JSON-like source text</param>
        /// <param name="data">output</param>
        /// <param name="scope">where to search for variables when resolving unescaped-string tokens</param>
        /// <param name="tokenizer">optional tokenizer, useful if you want to get errors</param>
        /// <param name="parsingRules">rules used to parse. if null, will use Default rules. another example rule set: CommandLine</param>
        /// <returns></returns>
        public static bool TryParse(string text, out object data, object scope = null, Tokenizer tokenizer = null, ParseRuleSet parsingRules = null)
        {
            object value  = null;
            bool   result = TryParseType(typeof(object), text, ref value, scope, tokenizer, parsingRules);

            data = value;
            return(result);
        }
Exemple #3
0
        /// <summary>
        /// used to parse JSON-like objects, and output the results to a new object made of nested <see cref="Dictionary{string, object}"/>, <see cref="List{object}"/>, and primitive value objects
        /// </summary>
        /// <param name="text">JSON-like source text</param>
        /// <param name="data">output</param>
        /// <param name="scope">where to search for variables when resolving unescaped-string tokens</param>
        /// <param name="tokenizer">optional tokenizer, useful if you want to get errors</param>
        /// <param name="parsingRules">rules used to parse. if null, will use Default rules. another example rule set: CommandLine</param>
        /// <returns>always a list of objects or null. if text would be a single object, it's in a list of size 1</returns>
        public static bool TryParseArgs(string text, out List <object> data, object scope = null, Tokenizer tokenizer = null, ParseRuleSet parsingRules = null)
        {
            bool result = TryParse(text, out object d, scope, tokenizer, parsingRules);

            if (!result)
            {
                data = null; return(false);
            }
            switch (d)
            {
            case List <object> list: data = list; break;

            default:
                data = new List <object>();
                if (d is IList <object> ilist)
                {
                    data.Capacity = ilist.Count;
                    for (int i = 0; i < ilist.Count; ++i)
                    {
                        data.Add(ilist[i]);
                    }
                }
                else
                {
                    data.Add(d);
                }
                break;
            }
            return(true);
        }
Exemple #4
0
 /// <summary>
 /// used to parse when Type is known
 /// </summary>
 /// <param name="type"></param>
 /// <param name="text"></param>
 /// <param name="data"></param>
 /// <param name="scope"></param>
 /// <param name="tokenizer"></param>
 /// <returns></returns>
 public static bool TryParseType(Type type, string text, ref object data, object scope, Tokenizer tokenizer = null, ParseRuleSet parsingRules = null)
 {
     if (text == null || text.Trim().Length == 0)
     {
         return(false);
     }
     try {
         if (tokenizer == null)
         {
             tokenizer = new Tokenizer();
         }
         tokenizer.Tokenize(text, parsingRules);
     } catch (Exception e) {
         tokenizer.AddError("Tokenize: " + e + "\n" + tokenizer.DebugPrint());
         return(false);
     }
     //if(tokenizer.errors.Count > 0) { Show.Error(tokenizer.errors.JoinToString("\n")); }
     //Show.Log(Show.GetStack(4));
     //Show.Log(tokenizer.DebugPrint(-1));
     return(TryParseTokens(type, tokenizer.tokens, ref data, scope, tokenizer));
 }