private object _Parse(LiteJsonTokenizer tokenizer)
        {
            if (tokenizer == null)
            {
                return(null);
            }

            object topNode = null;
            var    type    = PeekAndGetType(tokenizer);

            if (type == TokenType.EOF || type == TokenType.LCURLY || type == TokenType.LSQUARE)
            {
                if (type == TokenType.EOF)
                {
                    topNode = ProduceJsonNull(tokenizer);
                }
                else if (type == TokenType.LCURLY)
                {
                    topNode = ProduceJsonObject(tokenizer);
                }
                else if (type == TokenType.LSQUARE)
                {
                    topNode = ProduceJsonArray(tokenizer);
                }
            }
            else
            {
                // ???
                throw new DotJsonMiniException("Json string should be object or Array. Input tokenType = " + TokenTypes.GetDisplayName(type));
            }

            System.Diagnostics.Debug.WriteLine("topnNode = " + topNode);
            return(topNode);
        }
        private IList <Object> ProduceJsonArrayElements(LiteJsonTokenizer tokenizer)
        {
            IList <Object> elements = new List <Object>();

            var type = PeekAndGetType(tokenizer);

            while (type != TokenType.RSQUARE)
            {
                object element = ProduceJsonArrayElement(tokenizer);
                if (element != null)
                {
                    elements.Add(element);
                }
                type = PeekAndGetType(tokenizer);

                // "consume" the comma.
                // Note: We are very lenient when it comes to extra/repeated commas...
                while (type == TokenType.COMMA)
                {
                    JsonToken t = tokenizer.Next();
                    type = PeekAndGetType(tokenizer);
                }
            }

            System.Diagnostics.Debug.WriteLine("elements = " + elements);
            return(elements);
        }
        private KeyValuePair <String, Object> ProduceJsonObjectMember(LiteJsonTokenizer tokenizer)
        {
            JsonToken keyToken = NextAndGetToken(tokenizer);
            var       keyType  = keyToken.Type;

            if (keyType != TokenType.STRING)
            {
                throw new DotJsonMiniException("JSON object member should start with a string key. keyType = " + keyType + "; ");
            }
            string key = (String)keyToken.Value;

            JsonToken colonToken = NextAndGetToken(tokenizer);               // "consume" :.
            var       colonType  = colonToken.Type;

            if (colonType != TokenType.COLON)
            {
                throw new DotJsonMiniException("JSON object member should include a colon (:). ");
            }

            object value = null;
            var    type  = PeekAndGetType(tokenizer);

            switch (type)
            {
            case TokenType.NULL:
                value = ProduceJsonNull(tokenizer);
                break;

            case TokenType.BOOLEAN:
                value = ProduceJsonBoolean(tokenizer);
                break;

            case TokenType.NUMBER:
                value = ProduceJsonNumber(tokenizer);
                break;

            case TokenType.STRING:
                value = ProduceJsonString(tokenizer);
                break;

            case TokenType.LCURLY:
                value = ProduceJsonObject(tokenizer);
                break;

            case TokenType.LSQUARE:
                value = ProduceJsonArray(tokenizer);
                break;

            default:
                // ???
                throw new DotJsonMiniException("Json array element not recognized: token = " + tokenizer.Peek() + "; ");
            }

            // TBD: Use type factory ???
            KeyValuePair <String, Object> member = new KeyValuePair <String, Object>(key, value);

            System.Diagnostics.Debug.WriteLine("member = " + member);
            return(member);
        }
        private JsonToken NextAndGetToken(LiteJsonTokenizer tokenizer)
        {
            JsonToken s = tokenizer.Next();

            if (JsonToken.IsInvalid(s))
            {
                throw new DotJsonMiniException("Failed to get the next json token. ");
            }
            return(s);
        }
 private char[] PeekCharStream(LiteJsonTokenizer tokenizer)
 {
     if (tokenizer is DotJsonMiniTokenizer)
     {
         return(((DotJsonMiniTokenizer)tokenizer).PeekCharStream(HEAD_TRACE_LENGTH));
     }
     else
     {
         return(null);
     }
 }
        private object ProduceJsonNull(LiteJsonTokenizer tokenizer)
        {
            object jNull = null;

            try {
                JsonToken t = tokenizer.Next();                   // Consume the "null" literal.
                jNull = JsonNull.NULL;
            } catch (Exception e) {
                throw new DotJsonMiniException("Failed to create a Null node. ");
            }
            return(jNull);
        }
        private bool ProduceJsonBoolean(LiteJsonTokenizer tokenizer)
        {
            bool jBoolean = false;               // ?????

            try {
                JsonToken t = tokenizer.Next();
                jBoolean = (bool)t.Value;
            } catch (Exception e) {
                throw new DotJsonMiniException("Failed to create a Boolean node. ");
            }
            return(jBoolean);
        }
        //private int NextAndGetType(LiteJsonTokenizer tokenizer)
        private TokenType NextAndGetType(LiteJsonTokenizer tokenizer)
        {
            JsonToken s = tokenizer.Next();

            if (JsonToken.IsInvalid(s))
            {
                throw new DotJsonMiniException("Failed to get the next json token. ");
            }
            var type = s.Type;

            return(type);
        }
        private string ProduceJsonString(LiteJsonTokenizer tokenizer)
        {
            string jString = null;

            try {
                JsonToken t = tokenizer.Next();
                // System.Diagnostics.Debug.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> t = " + t);
                jString = (String)t.Value;
                // System.Diagnostics.Debug.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> jString = " + jString);
            } catch (Exception e) {
                throw new DotJsonMiniException("Failed to create a string node. ");
            }
            return(jString);
        }
Esempio n. 10
0
        private object _Parse(TextReader reader)
        {
            if (reader == null)
            {
                return(null);
            }

            // TBD:
            // Does this make it thread safe???
            // ...

            LiteJsonTokenizer jsonTokenizer = null;

            jsonTokenizer = new DotJsonMiniTokenizer(reader);

            return(_Parse(jsonTokenizer));
        }
Esempio n. 11
0
        // ????
        private object ProduceJsonNumber(LiteJsonTokenizer tokenizer)
        {
            object jNumber = null;

            try {
                JsonToken t = tokenizer.Next();
                jNumber = t.Value;
                // ???
                if (jNumber is Number)
                {
                    jNumber = ((Number)jNumber).Value;
                }
                // ???
            } catch (Exception e) {
                throw new DotJsonMiniException("Failed to create a Number node. ");
            }
            return(jNumber);
        }
Esempio n. 12
0
        private IList <Object> ProduceJsonArray(LiteJsonTokenizer tokenizer)
        {
            TokenType lsq;

            lsq = NextAndGetType(tokenizer);
            if (lsq != TokenType.LSQUARE)
            {
                // this cannot happen.
                throw new DotJsonMiniException("JSON array should start with [. ");
            }

            IList <Object> list = new List <Object>();
            var            type = PeekAndGetType(tokenizer);

            if (type == TokenType.RSQUARE)
            {
                // empty array
                JsonToken t = tokenizer.Next();                   // discard the trailing ].
            }
            else
            {
                IList <Object> elements = ProduceJsonArrayElements(tokenizer);

                var rsq = NextAndGetType(tokenizer);                  // discard the trailing ].
                if (rsq == TokenType.RSQUARE)
                {
                    // Done
                    foreach (var el in elements)
                    {
                        list.Add(el);
                    }
                }
                else
                {
                    // ???
                    throw new DotJsonMiniException("JSON array should end with ]. ");
                }
            }
            IList <Object> jArray = list;

            System.Diagnostics.Debug.WriteLine("jArray = " + jArray);
            return(jArray);
        }
Esempio n. 13
0
        private IDictionary <String, Object> ProduceJsonObject(LiteJsonTokenizer tokenizer)
        {
            var lcurl = NextAndGetType(tokenizer);               // pop the leading {.

            if (lcurl != TokenType.LCURLY)
            {
                // this cannot happen.
                throw new DotJsonMiniException("JSON object should start with {. ");
            }

            IDictionary <String, Object> map = new Dictionary <String, Object>();
            var type = PeekAndGetType(tokenizer);

            if (type == TokenType.RCURLY)
            {
                // empty object
                JsonToken t = tokenizer.Next();                   // discard the trailing }.
            }
            else
            {
                IDictionary <String, Object> members = ProduceJsonObjectMembers(tokenizer);
                TokenType rcurl;
                rcurl = NextAndGetType(tokenizer);                  // discard the trailing }.
                if (rcurl == TokenType.RCURLY)
                {
                    // Done
                    foreach (var k in members.Keys)
                    {
                        map.Add(k, members[k]);
                    }
                }
                else
                {
                    // ???
                    throw new DotJsonMiniException("JSON object should end with }. ");
                }
            }
            IDictionary <String, Object> jObject = map;

            System.Diagnostics.Debug.WriteLine("jObject = " + jObject);
            return(jObject);
        }
Esempio n. 14
0
        private object ProduceJsonArrayElement(LiteJsonTokenizer tokenizer)
        {
            object element = null;
            var    type    = PeekAndGetType(tokenizer);

            switch (type)
            {
            case TokenType.NULL:
                element = ProduceJsonNull(tokenizer);
                break;

            case TokenType.BOOLEAN:
                element = ProduceJsonBoolean(tokenizer);
                break;

            case TokenType.NUMBER:
                element = ProduceJsonNumber(tokenizer);
                break;

            case TokenType.STRING:
                element = ProduceJsonString(tokenizer);
                break;

            case TokenType.LCURLY:
                element = ProduceJsonObject(tokenizer);
                break;

            case TokenType.LSQUARE:
                element = ProduceJsonArray(tokenizer);
                break;

            default:
                // ???
                throw new DotJsonMiniException("Json array element not recognized: token = " + tokenizer.Peek() + "; ");
            }

            System.Diagnostics.Debug.WriteLine("element = " + element);
            return(element);
        }
Esempio n. 15
0
        private IDictionary <String, Object> ProduceJsonObjectMembers(LiteJsonTokenizer tokenizer)
        {
            IDictionary <String, Object> members = new Dictionary <String, Object>();

            var type = PeekAndGetType(tokenizer);

            while (type != TokenType.RCURLY)
            {
                KeyValuePair <String, Object> member = ProduceJsonObjectMember(tokenizer);
                members.Add(member.Key, member.Value);
                type = PeekAndGetType(tokenizer);

                // "consume" the comma.
                // Note: We are very lenient when it comes to extra/repeated commas...
                while (type == TokenType.COMMA)
                {
                    JsonToken t = tokenizer.Next();
                    type = PeekAndGetType(tokenizer);
                }
            }

            System.Diagnostics.Debug.WriteLine("members = " + members);
            return(members);
        }