private JsonBooleanNode ProduceJsonBoolean(JsonTokenizer tokenizer) { JsonBooleanNode jBoolean = null; try { JsonToken t = tokenizer.Next(); bool? value = (bool?)t.Value; // log.warning(">>>>>>>>>>>>>>>>>> Boolean value = " + value); jBoolean = (JsonBooleanNode)jsonTypeFactory.CreateBoolean(value); // log.warning(">>>>>>>>>>>>>>>>>> jBoolean = " + jBoolean); } catch (Exception e) { throw new UnknownParserException("Failed to create a Boolean node.", e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } return(jBoolean); }
private JsonObjectNode ProduceJsonObject(JsonTokenizer tokenizer) { var lcurl = NextAndGetType(tokenizer); // pop the leading {. if (lcurl != TokenType.LCURLY) { // this cannot happen. throw new InvalidJsonTokenException("JSON object should start with {.", GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } IDictionary <string, object> map = new Dictionary <string, object>(); var type = PeekAndGetType(tokenizer); if (type == TokenType.RCURLY) { // empty object tokenizer.Next(); // discard the trailing }. } else { IDictionary <string, JsonNode> members = ProduceJsonObjectMembers(tokenizer); var rcurl = NextAndGetType(tokenizer); // discard the trailing }. if (rcurl == TokenType.RCURLY) { // Done // ??? // (map as Dictionary<string, object>)?.putAll(members); // map = (IDictionary<string, JsonNode>) members; ???? foreach (var m in members) { map[m.Key] = m.Value; } // ??? } else { // ??? throw new InvalidJsonTokenException("JSON object should end with }.", GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } JsonObjectNode jObject = (JsonObjectNode)jsonTypeFactory.CreateObject(map); //if (log.isLoggable(Level.FINE)) { // log.fine("jObject = " + jObject); //} return(jObject); }
private object ProduceJsonNull(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { object jNull = null; try { JsonToken t = tokenizer.Next(); // Consume the "null" literal. if (TracingEnabled) { tokenTailBuffer.Push(t); } jNull = jsonTypeFactory.CreateNull(); } catch (Exception e) { throw new UnknownParserException("Failed to Create a Null node. " + tokenTailBuffer.ToTraceString(), e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } if (TracingEnabled) { nodeTailBuffer.Push(jNull); } return(jNull); }
private JsonArrayNode ProduceJsonArray(JsonTokenizer tokenizer) { var lsq = NextAndGetType(tokenizer); // pop the leading [. if (lsq != TokenType.LSQUARE) { // this cannot happen. throw new InvalidJsonTokenException("JSON array should start with [.", GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } IList <object> list = new List <object>(); var type = PeekAndGetType(tokenizer); if (type == TokenType.RSQUARE) { // empty array tokenizer.Next(); // discard the trailing ]. } else { IList <JsonNode> elements = ProduceJsonArrayElements(tokenizer); var rsq = NextAndGetType(tokenizer); // discard the trailing ]. if (rsq == TokenType.RSQUARE) { // Done ((List <object>)list).AddRange(elements); } else { // ??? throw new InvalidJsonTokenException("JSON array should end with ].", GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } JsonArrayNode jArray = (JsonArrayNode)jsonTypeFactory.CreateArray(list); //if (log.isLoggable(Level.FINE)) { // log.fine("jArray = " + jArray); //} return(jArray); }
private bool?ProduceJsonBoolean(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { bool?jBoolean = null; try { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } var value = (bool)t.Value; jBoolean = (bool?)jsonTypeFactory.CreateBoolean((bool?)value); } catch (Exception e) { throw new UnknownParserException("Failed to Create a Boolean node. " + tokenTailBuffer.ToTraceString(), e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } if (TracingEnabled) { nodeTailBuffer.Push(jBoolean); } return(jBoolean); }
private Number?ProduceJsonNumber(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { Number?jNumber = null; try { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } var value = (Number)t.Value; jNumber = (Number?)jsonTypeFactory.CreateNumber((Number?)value); } catch (Exception e) { throw new UnknownParserException("Failed to Create a Number node. " + tokenTailBuffer.ToTraceString(), e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } if (TracingEnabled) { nodeTailBuffer.Push(jNumber); } return(jNumber); }
private string ProduceJsonString(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { string jString = null; try { JsonToken t = tokenizer.Next(); // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> t = " + t); if (TracingEnabled) { tokenTailBuffer.Push(t); } string value = (string)t.Value; // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> value = " + value); jString = (string)jsonTypeFactory.CreateString(value); // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> jString = " + jString); } catch (Exception e) { throw new UnknownParserException("Failed to Create a String node. " + tokenTailBuffer.ToTraceString(), e, GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } if (TracingEnabled) { nodeTailBuffer.Push(jString); } return(jString); }
private async Task <IList <object> > ProduceJsonArrayElementsAsync(JsonTokenizer tokenizer, int depth, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { IList <object> elements = new List <object>(); var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); while (type != TokenType.RSQUARE) { object element = await ProduceJsonArrayElementAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); // No depth change... if (element != null) { // This is a bit strange implementation, but it actually works... // The parser traverses the object tree (depth first) down to the leaf, // and on the way up, if it reaches depth==1, it converts the sub-tree below depth==1 to Json String. // the json string is then used as the node value (instead of the object sub tree). // Going above depth > 1, this transformation does not happen. if (depth == 1) { try { // String jStr = _escapeString(jsonBuilder.BuildAsync(element)); string jStr = await jsonBuilder.BuildAsync(element); // Object elObj = jsonBuilder.buildJsonStructure(element); // String jStr = jsonBuilder.BuildAsync(elObj); // String jStr = jsonBuilder.BuildAsync(element, 1); // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> depth = " + depth + " ]]]]]]] jStr = " + jStr); elements.Add(jStr); } catch (JsonBuilderException e) { throw new JsonParserException(e); } } else { elements.Add(element); } } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // "consume" the comma. if (parserPolicy.AllowExtraCommas) { while (type == TokenType.COMMA) { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); } } else { if (type == TokenType.COMMA) { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); if (parserPolicy.AllowTrailingComma) { // Continue. } else { // Invalid char sequence: ",]" if (type == TokenType.RSQUARE) { throw new InvalidJsonTokenException("Syntax error: Array has a trailing comma. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } } } } //if (log.IsLoggable(Level.FINER)) { // log.finer("elements = " + elements); //} return(elements); }
private async Task <IDictionary <string, object> > ProduceJsonObjectAsync(JsonTokenizer tokenizer, int depth, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { var lcurl = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // pop the leading {. if (lcurl != TokenType.LCURLY) { // this cannot happen. throw new InvalidJsonTokenException("JSON object should start with {. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } IDictionary <string, object> map = new Dictionary <string, object>(); var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); if (type == TokenType.RCURLY) { // empty object JsonToken t = tokenizer.Next(); // discard the trailing }. if (TracingEnabled) { tokenTailBuffer.Push(t); } } else { IDictionary <string, object> members = await ProduceJsonObjectMembersAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); TokenType rcurl; if (TracingEnabled) { JsonToken t = NextAndGetToken(tokenizer, tokenTailBuffer, nodeTailBuffer); // discard the trailing }. tokenTailBuffer.Push(t); rcurl = t.Type; } else { rcurl = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // discard the trailing }. } if (rcurl == TokenType.RCURLY) { // Done // ??? // (map as Dictionary<string, object>)?.putAll(members); // map = (IDictionary<string, JsonNode>) members; ???? foreach (var m in members) { map[m.Key] = m.Value; } // ??? } else { // ??? throw new InvalidJsonTokenException("JSON object should end with }. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } IDictionary <string, object> jObject = jsonTypeFactory.CreateObject(map); if (TracingEnabled) { nodeTailBuffer.Push(jObject); } //if (log.IsLoggable(Level.FINE)) { // log.fine("jObject = " + jObject); //} return(jObject); }
private async Task <object> _parseAsync(JsonTokenizer tokenizer, int depth, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { object topNode = null; var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); if (type == TokenType.EOF || type == TokenType.LCURLY || type == TokenType.LSQUARE) { if (type == TokenType.EOF) { topNode = ProduceJsonNull(tokenizer, tokenTailBuffer, nodeTailBuffer); } else if (type == TokenType.LCURLY) { topNode = await ProduceJsonObjectAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); } else if (type == TokenType.LSQUARE) { topNode = await ProduceJsonArrayAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); } } else { // TBD: // Process it here if parserPolicy.AllowLeadingJsonMarker == true, // ??? if (parserPolicy.AllowNonObjectOrNonArray) { // This is actually error according to json.org JSON grammar. // But, we allow partial JSON string. switch (type) { case TokenType.NULL: topNode = ProduceJsonNull(tokenizer, tokenTailBuffer, nodeTailBuffer); break; case TokenType.BOOLEAN: topNode = ProduceJsonBoolean(tokenizer, tokenTailBuffer, nodeTailBuffer); break; case TokenType.NUMBER: topNode = ProduceJsonNumber(tokenizer, tokenTailBuffer, nodeTailBuffer); break; case TokenType.STRING: // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); topNode = ProduceJsonString(tokenizer, tokenTailBuffer, nodeTailBuffer); // log.warning(">>>>>>>>>>>>>>>>>>>>>>>>>>>>> topNode = " + topNode); break; default: // ??? throw new InvalidJsonTokenException("JsonToken not recognized: tokenType = " + TokenTypes.GetDisplayName(type) + "; " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } else { // TBD // this is a bit too lenient probably... // there was some special char sequence which some parsers allowed, which I cannot remember.. // For now, if parserPolicy.AllowLeadingJsonMarker == true is interpreted as allowLeadingNonObjectNonArrayChars.... // --> we remove all leading chars until we reach { or [. if (parserPolicy.AllowLeadingJsonMarker) { while (type != TokenType.LCURLY && type != TokenType.LSQUARE) { JsonToken t = tokenizer.Next(); // swallow one token. if (TracingEnabled) { tokenTailBuffer.Push(t); } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); } if (type == TokenType.LCURLY) { topNode = await ProduceJsonObjectAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); } else if (type == TokenType.LSQUARE) { topNode = await ProduceJsonArrayAsync(tokenizer, depth, tokenTailBuffer, nodeTailBuffer); } else { // ??? throw new InvalidJsonTokenException("Invalid input Json string. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } else { // ??? throw new InvalidJsonTokenException("Json string should be Object or Array. Input tokenType = " + TokenTypes.GetDisplayName(type) + "; " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } } //if (log.IsLoggable(Level.FINE)) { // log.fine("topnNode = " + topNode); //} return(topNode); }
private IList <object> ProduceJsonArray(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { TokenType lsq; if (TracingEnabled) { JsonToken t = NextAndGetToken(tokenizer, tokenTailBuffer, nodeTailBuffer); // pop the leading [. tokenTailBuffer.Push(t); lsq = t.Type; } else { lsq = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); } if (lsq != TokenType.LSQUARE) { // this cannot happen. throw new InvalidJsonTokenException("JSON array should start with [. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } IList <object> list = new List <object>(); var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); if (type == TokenType.RSQUARE) { // empty array JsonToken t = tokenizer.Next(); // discard the trailing ]. if (TracingEnabled) { tokenTailBuffer.Push(t); } } else { IList <object> elements = ProduceJsonArrayElements(tokenizer, tokenTailBuffer, nodeTailBuffer); var rsq = NextAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // discard the trailing ]. if (rsq == TokenType.RSQUARE) { // Done ((List <object>)list).AddRange(elements); } else { // ??? throw new InvalidJsonTokenException("JSON array should end with ]. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } IList <object> jArray = jsonTypeFactory.CreateArray(list); if (TracingEnabled) { // ??? nodeTailBuffer.Push(jArray); } //if (log.isLoggable(Level.FINE)) { // log.fine("jArray = " + jArray); //} return(jArray); }
private IDictionary <string, object> ProduceJsonObjectMembers(JsonTokenizer tokenizer, JsonTokenBuffer tokenTailBuffer, ObjectTailBuffer nodeTailBuffer) { IDictionary <string, object> members = new Dictionary <string, object>(); var type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); while (type != TokenType.RCURLY) { // KeyValuePair<string, object> member = ProduceJsonObjectMember(tokenizer, tokenTailBuffer, nodeTailBuffer); var member = ProduceJsonObjectMember(tokenizer, tokenTailBuffer, nodeTailBuffer); if (member != null) { members[member.Key] = member.Value; } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); // "consume" the comma. if (parserPolicy.AllowExtraCommas) { while (type == TokenType.COMMA) { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); } } else { if (type == TokenType.COMMA) { JsonToken t = tokenizer.Next(); if (TracingEnabled) { tokenTailBuffer.Push(t); } type = PeekAndGetType(tokenizer, tokenTailBuffer, nodeTailBuffer); if (parserPolicy.AllowTrailingComma) { // Continue. } else { // Invalid char sequence: ",}" if (type == TokenType.RCURLY) { throw new InvalidJsonTokenException("Syntax error: Object has a trailing comma. " + tokenTailBuffer.ToTraceString(), GetTailCharStream(tokenizer), PeekCharStream(tokenizer)); } } } } } //if (log.isLoggable(Level.FINER)) { // log.finer("members = " + members); //} return(members); }