示例#1
0
            public object ParseValue(TextTokenizer tokenizer, ITokenRule endToken)
            {
                var required = endToken == null ? new [] { word, text, startObject, startArray } : new [] { word, text, startObject, startArray, endToken };

                var token = tokenizer.NextToken(required: required);

                if (token.Rule == word)
                {
                    var boolValue   = false;
                    var numberValue = 0.0d;

                    if (bool.TryParse(token.Text, out boolValue))
                    {
                        return(boolValue);
                    }

                    if (double.TryParse(token.Text, out numberValue))
                    {
                        return(numberValue);
                    }

                    throw new NotSupportedException("not supported token");
                }
                else if (token.Rule == text)
                {
                    return(token.Text);
                }
                else if (token.Rule == startArray)
                {
                    var list = new List <object> ();

                    object val = null;

                    while ((val = ParseValue(tokenizer, endArray)) != null)
                    {
                        list.Add(val);

                        if (tokenizer.NextToken(required: new [] { endArray, comma }).Rule == endArray)
                        {
                            break;
                        }
                    }

                    return(list.ToArray());
                }
                else if (token.Rule == startObject)
                {
                    var obj = new Dictionary <string, object> ();

                    do
                    {
                        var prop = tokenizer.NextToken(new [] { word, endObject });

                        if (prop.Rule == endObject)
                        {
                            break;
                        }

                        tokenizer.NextToken(new [] { colon });

                        obj.Add(prop.Text, ParseValue(tokenizer, null));

                        token = tokenizer.NextToken(new [] { comma, endObject });
                    } while(token.Rule != endObject);

                    return(obj);
                }

                return(null);
            }