private bool PullBoolean(TokenizeResult tokens) { if (Matches(JPlusConstants.TrueKeyword)) { Take(JPlusConstants.TrueKeywordLength); tokens.Add(Token.LiteralValue(JPlusConstants.TrueKeyword, LiteralTokenType.Boolean, this)); return(true); } if (Matches(JPlusConstants.FalseKeyword)) { Take(JPlusConstants.FalseKeywordLength); tokens.Add(Token.LiteralValue(JPlusConstants.FalseKeyword, LiteralTokenType.Boolean, this)); return(true); } if (Matches(JPlusConstants.AltTrueKeyword)) { Take(JPlusConstants.AltTrueKeywordLength); tokens.Add(Token.LiteralValue(JPlusConstants.AltTrueKeyword, LiteralTokenType.Boolean, this)); return(true); } if (Matches(JPlusConstants.AltFalseKeyword)) { Take(JPlusConstants.AltFalseKeywordLength); tokens.Add(Token.LiteralValue(JPlusConstants.AltFalseKeyword, LiteralTokenType.Boolean, this)); return(true); } return(false); }
/// <summary> /// Parses the Json+ source code specified into structured objects. /// </summary> /// <param name="source">The source code that conforms to Json+ specification.</param> /// <param name="includeCallback">Callback used to resolve the `include` directive.</param> /// <param name="resolveSubstitutions">Resolve substitution directives.</param> /// <param name="resolveEnv">Try to resolve environment variables. Does nothing if <paramref name="resolveSubstitutions"/> is `false`.</param> /// <returns></returns> private JsonPlusRoot ParseSource(string source, bool resolveSubstitutions, bool resolveEnv, IncludeCallbackAsync includeCallback) { if (string.IsNullOrWhiteSpace(source)) { throw new JsonPlusParserException(string.Format(RS.SourceEmptyError, nameof(source))); } if (includeCallback != null) { _includeCallback = includeCallback; } try { _tokens = new JPlusTokenizer(source).Tokenize(); _root = new JsonPlusValue(null); ParseTokens(); if (resolveSubstitutions) { ResolveAllSubstitution(resolveEnv); } } catch (JsonPlusTokenizerException e) { throw JsonPlusParserException.Create(e, null, string.Format(RS.TokenizeError, e.Message), e); } catch (JsonPlusException e) { throw JsonPlusParserException.Create(_tokens.Current, Path, e.Message, e); } return(new JsonPlusRoot(_root, _substitutions)); }
private bool PullHexadecimal(TokenizeResult tokens) { if (!Matches("0x", "0X", "&h", "&H")) { return(false); } PushIndex(); StringBuilder sb = new StringBuilder(); Take(2); sb.Append("0x"); while (Peek.IsHexadecimal()) { sb.Append(PeekAndTake()); } try { Convert.ToInt64(sb.ToString(), 16); } catch { ResetIndex(); return(false); } PopIndex(); tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Hexadecimal, this)); return(true); }
public TokenizeResult Tokenize() { TokenizeResult tokens = Tokenize(TokenType.EndOfFile); tokens.Add(new Token(string.Empty, TokenType.EndOfFile, this)); return(tokens); }
private bool PullOctet(TokenizeResult tokens) { PushIndex(); StringBuilder sb = new StringBuilder(); sb.Append(PeekAndTake()); while (Peek.IsOctet()) { sb.Append(PeekAndTake()); } try { Convert.ToInt64(sb.ToString(), 8); } catch { ResetIndex(); return(false); } PopIndex(); tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Octet, this)); return(true); }
/// <summary> /// Returns a <see cref="TokenType.Substitution"/> token from the tokenizer's current position. /// </summary> /// <returns>A <see cref="TokenType.Substitution"/> token from the tokenizer's current position.</returns> private bool PullSubstitution(TokenizeResult tokens) { bool questionMarked = false; if (Matches(JPlusConstants.OptionalSubstitutionOpenBrace)) { Take(JPlusConstants.OptionalSubstitutionOpenBraceLength); questionMarked = true; } else if (Matches(JPlusConstants.SubstitutionOpenBrace)) { Take(JPlusConstants.SubstitutionOpenBraceLength); } else { return(false); } StringBuilder sb = new StringBuilder(); while (!EndOfFile && !Matches(JPlusConstants.SubstitutionCloseBrace)) { sb.Append(PeekAndTake()); } if (EndOfFile) { throw new JsonPlusTokenizerException(RS.UnexpectedTokenEndOfSubstitutionVsEof, Token.Error(this)); } Take(); tokens.Add(Token.Substitution(sb.ToString().TrimWhitespace(), this, questionMarked)); return(true); }
/// <summary> /// Retrieves a triple quoted <see cref="TokenType.LiteralValue"/> token from the tokenizer's current position. /// </summary> /// <returns> /// A <see cref="TokenType.LiteralValue"/> token from the tokenizer's current position. /// </returns> private bool PullTripleQuoted(TokenizeResult tokens, string quoteSequence) { if (!Matches(quoteSequence)) { return(false); } StringBuilder sb = new StringBuilder(); Take(3); while (!EndOfFile && !Matches(quoteSequence)) { if (Matches(JPlusConstants.Escape)) { sb.Append(PullEscapeSequence()); } else { sb.Append(PeekAndTake()); } } if (EndOfFile) { throw new JsonPlusTokenizerException(string.Format(RS.UnexpectedTokenExpectTripleQuote, TokenType.EndOfFile), Token.Error(this)); } Take(3); tokens.Add(Token.TripleQuotedLiteralValue(sb.ToString(), this)); return(true); }
private bool PullLiteral(TokenizeResult tokens) { // Do not change this without looking at `JPlusConstants` switch (Peek) { case JPlusConstants.AltQuoteChar: if (PullTripleQuoted(tokens, JPlusConstants.AltTripleQuote) || PullQuoted(tokens, JPlusConstants.AltQuoteChar)) { return(true); } throw new JsonPlusTokenizerException(RS.CloseLiteralQuoteMissing, Token.Error(this)); case JPlusConstants.QuoteChar: if (PullTripleQuoted(tokens, JPlusConstants.TripleQuote) || PullQuoted(tokens, JPlusConstants.QuoteChar)) { return(true); } throw new JsonPlusTokenizerException(RS.CloseLiteralQuoteMissing, Token.Error(this)); case '-': case '+': return(PullInfinity(tokens) || PullNumbers(tokens) || PullUnquoted(tokens)); case '0': return(PullHexadecimal(tokens) || PullOctet(tokens) || PullNumbers(tokens) || PullUnquoted(tokens)); case '.': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': return(PullNumbers(tokens) || PullUnquoted(tokens)); case JPlusConstants.InfinityKeywordFirstChar: return(PullInfinity(tokens) || PullUnquoted(tokens)); case JPlusConstants.NanKeywordFirstChar: return(PullNan(tokens) || PullUnquoted(tokens)); case JPlusConstants.TrueKeywordFirstChar: // true case JPlusConstants.FalseKeywordFirstChar: // false case JPlusConstants.AltTrueKeywordFirstChar: // yes return(PullBoolean(tokens) || PullUnquoted(tokens)); case JPlusConstants.NullKeywordFirstChar: // null or no return(PullNull(tokens) || PullBoolean(tokens) || PullUnquoted(tokens)); default: return(PullUnquoted(tokens)); } }
private bool PullNull(TokenizeResult tokens) { if (!Matches(JPlusConstants.NullKeyword)) { return(false); } Take(4); tokens.Add(Token.LiteralValue(JPlusConstants.NullKeyword, LiteralTokenType.Null, this)); return(true); }
/// <summary> /// Retrieves a <see cref="TokenType.SelfAssignment"/> token from the tokenizer's current position. /// </summary> /// <returns>A <see cref="TokenType.SelfAssignment"/> token from the tokenizer's current position.</returns> private bool PullSelfAssignment(TokenizeResult tokens) { if (!Matches(JPlusConstants.SelfAssignmentOperator)) { return(false); } Take(JPlusConstants.SelfAssignmentOperatorLength); tokens.Add(new Token(JPlusConstants.SelfAssignmentOperator, TokenType.SelfAssignment, this)); return(true); }
private bool PullNan(TokenizeResult tokens) { if (Matches(JPlusConstants.NanKeyword)) { Take(JPlusConstants.NanKeywordLength); tokens.Add(Token.LiteralValue(JPlusConstants.NanKeyword, LiteralTokenType.Decimal, this)); return(true); } return(false); }
/// <summary> /// Retrieves a <see cref="TokenType.Comment"/> token from the tokenizer's current position. /// </summary> /// <returns>A <see cref="TokenType.EndOfLine"/> token from the tokenizer's last position, discarding the comment.</returns> private bool PullComment(TokenizeResult tokens) { if (!Matches(JPlusConstants.Comment, JPlusConstants.AltComment)) { return(false); } string comment = DiscardRestOfLine(); //tokens.Add(new Token(TokenType.Comment, this, start, Index - start)); tokens.Add(new Token(comment, TokenType.EndOfLine, this)); return(true); }
private bool PullNonNewLineWhitespace(TokenizeResult tokens) { if (!Peek.IsJsonPlusWhitespaceExceptNewLine()) { return(false); } StringBuilder sb = new StringBuilder(); while (Peek.IsJsonPlusWhitespaceExceptNewLine()) { sb.Append(PeekAndTake()); } tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Whitespace, this)); return(true); }
private bool PullUnquoted(TokenizeResult tokens) { if (!IsUnquoted()) { return(false); } StringBuilder sb = new StringBuilder(); while (!EndOfFile && IsUnquoted()) { sb.Append(PeekAndTake()); } tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.UnquotedLiteralValue, this)); return(true); }
private bool PullInclude(TokenizeResult tokens) { // include? will also match this if (!Matches(JPlusConstants.IncludeKeyword)) { return(false); } PushIndex(); TokenizeResult includeTokens = new TokenizeResult(); // now find out it its include? or include bool isOptional = false; if (Matches(JPlusConstants.IncludeOptionalKeyword)) { isOptional = true; } if (isOptional) { Take(JPlusConstants.IncludeOptionalKeywordLength); includeTokens.Add(new Token(JPlusConstants.IncludeOptionalKeyword, TokenType.OptionalInclude, this)); } else { Take(JPlusConstants.IncludeKeywordLength); includeTokens.Add(new Token(JPlusConstants.IncludeKeyword, TokenType.Include, this)); } PullWhitespaces(); if (!PullQuoted(includeTokens, JPlusConstants.QuoteChar) && !PullQuoted(includeTokens, JPlusConstants.AltQuoteChar)) { ResetIndex(); return(false); } PopIndex(); tokens.AddRange(includeTokens); return(true); }
// parse path value private JsonPlusPath ParseKey() { while (_tokens.Current.LiteralType == LiteralTokenType.Whitespace) { _tokens.Next(); } // sanity check if (_tokens.Current.Type != TokenType.LiteralValue) { throw JsonPlusParserException.Create(_tokens.Current, Path, string.Format(RS.UnexpectedKeyType, TokenType.LiteralValue, _tokens.Current.Type)); } if (_tokens.Current.IsNonSignificant()) { ConsumeWhitelines(); } if (_tokens.Current.Type != TokenType.LiteralValue) { return(null); } TokenizeResult keyTokens = new TokenizeResult(); while (_tokens.Current.Type == TokenType.LiteralValue) { keyTokens.Add(_tokens.Current); _tokens.Next(); } keyTokens.Reverse(); while (keyTokens.Count > 0 && keyTokens[0].LiteralType == LiteralTokenType.Whitespace) { keyTokens.RemoveAt(0); } keyTokens.Reverse(); keyTokens.Add(new Token(string.Empty, TokenType.EndOfFile, null)); return(JsonPlusPath.FromTokens(keyTokens)); }
private bool PullInfinity(TokenizeResult tokens) { string[] infinityKeywords = new string[] { JPlusConstants.InfinityKeyword, JPlusConstants.InfinityPositiveKeyword, JPlusConstants.InfinityNegativeKeyword }; foreach (string keyword in infinityKeywords) { if (Matches(keyword)) { Take(keyword.Length); tokens.Add(Token.LiteralValue(keyword, LiteralTokenType.Decimal, this)); return(true); } } return(false); }
internal static JsonPlusPath FromTokens(TokenizeResult tokens) { if (tokens == null) { throw new ArgumentNullException(nameof(tokens)); } List <string> result = new List <string>(); StringBuilder sb = new StringBuilder(); while (tokens.Current.Type == TokenType.LiteralValue) { switch (tokens.Current.LiteralType) { case LiteralTokenType.TripleQuotedLiteralValue: throw JsonPlusParserException.Create(tokens.Current, null, RS.TripleQuoteUnsupportedInPath); case LiteralTokenType.QuotedLiteralValue: // Normalize quoted keys, remove the quotes if the key doesn't need them. //sb.Append(tokens.Current.Value.NeedQuotes() ? $"\"{tokens.Current.Value}\"" : tokens.Current.Value); sb.Append(tokens.Current.Value); break; default: string[] split = tokens.Current.Value.Split('.'); for (int i = 0; i < split.Length - 1; ++i) { sb.Append(split[i]); result.Add(sb.ToString()); sb.Clear(); } sb.Append(split[split.Length - 1]); break; } tokens.Next(); } result.Add(sb.ToString()); return(new JsonPlusPath(result)); }
private bool PullNumbers(TokenizeResult tokens) { StringBuilder sb = new StringBuilder(); // Parse numbers bool parsing = true; Token lastValidToken = null; // coefficient, significand, exponent string state = "coefficient"; while (parsing) { switch (state) { case "coefficient": // possible double number without coefficient if (Matches("-.", "+.", ".")) { state = "significand"; break; } PushIndex(); // long test index if (Matches('+', '-')) { sb.Append(PeekAndTake()); } // numbers could not start with a 0 if (!Peek.IsDigit() || Peek == '0') { ResetIndex(); // reset long test index parsing = false; break; } while (Peek.IsDigit()) { sb.Append(PeekAndTake()); } if (!long.TryParse(sb.ToString(), out _)) { ResetIndex(); // reset long test index parsing = false; break; } PopIndex(); // end long test index lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Integer, this); state = "significand"; break; case "significand": // short logic, no significand, but probably have an exponent if (!Matches("-.", "+.", ".")) { state = "exponent"; break; } PushIndex(); // validate significand in number test if (Matches('+', '-')) { sb.Insert(0, PeekAndTake()); } sb.Append(PeekAndTake()); if (!Peek.IsDigit()) { ResetIndex(); // reset validate significand in number test parsing = false; break; } while (Peek.IsDigit()) { sb.Append(PeekAndTake()); } if (!double.TryParse(sb.ToString(), out _)) { ResetIndex(); // reset validate significand in number test parsing = false; break; } PopIndex(); // end validate significand in number test lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Decimal, this); state = "exponent"; break; case "exponent": // short logic, check if number is a double with exponent if (!Matches('e', 'E')) { parsing = false; break; } PushIndex(); // validate exponent sb.Append(PeekAndTake()); // check for signed exponent if (Matches('-', '+')) { sb.Append(PeekAndTake()); } if (!Peek.IsDigit()) { ResetIndex(); // reset validate exponent parsing = false; break; } while (Peek.IsDigit()) { sb.Append(PeekAndTake()); } if (!double.TryParse(sb.ToString(), out _)) { ResetIndex(); // reset validate exponent parsing = false; break; } PopIndex(); // end validate exponent lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Decimal, this); parsing = false; break; } } if (lastValidToken == null) { return(false); } tokens.Add(lastValidToken); return(true); }
private TokenizeResult Tokenize(TokenType closingTokenType) { TokenizeResult tokens = new TokenizeResult(); while (!EndOfFile) { switch (Peek) { case JPlusConstants.StartOfObjectChar: Take(); tokens.Add(new Token(JPlusConstants.StartOfObject, TokenType.StartOfObject, this)); tokens.AddRange(Tokenize(TokenType.EndOfObject)); continue; case JPlusConstants.EndOfObjectChar: Take(); tokens.Add(new Token(JPlusConstants.EndOfObject, TokenType.EndOfObject, this)); if (closingTokenType != tokens[tokens.Count - 1].Type) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type), tokens[tokens.Count - 1]); } return(tokens); case JPlusConstants.StartOfArrayChar: Take(); tokens.Add(new Token(JPlusConstants.StartOfArray, TokenType.StartOfArray, this)); tokens.AddRange(Tokenize(TokenType.EndOfArray)); continue; case JPlusConstants.EndOfArrayChar: Take(); tokens.Add(new Token(JPlusConstants.EndOfArray, TokenType.EndOfArray, this)); if (closingTokenType != tokens[tokens.Count - 1].Type) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type), tokens[tokens.Count - 1]); } return(tokens); case JPlusConstants.ArraySeparatorChar: Take(); tokens.Add(new Token(JPlusConstants.ArraySeparator, TokenType.ArraySeparator, LiteralTokenType.UnquotedLiteralValue, this)); continue; case JPlusConstants.AssignmentOperatorChar: case JPlusConstants.AltAssignmentOperatorChar: char c = PeekAndTake(); tokens.Add(new Token(c.ToString(), TokenType.Assignment, this)); continue; case JPlusConstants.SelfAssignmentOperatorFirstChar: if (PullSelfAssignment(tokens)) { continue; } break; case JPlusConstants.CommentFirstChar: case JPlusConstants.AltCommentFirstChar: if (PullComment(tokens)) { continue; } break; case JPlusConstants.SubstitutionFirstChar: if (PullSubstitution(tokens)) { continue; } break; case JPlusConstants.NewLineChar: Take(); tokens.Add(new Token(JPlusConstants.NewLine, TokenType.EndOfLine, this)); continue; case JPlusConstants.IncludeKeywordFirstChar: if (PullInclude(tokens)) { continue; } break; } if (PullNonNewLineWhitespace(tokens)) { continue; } if (PullLiteral(tokens)) { continue; } throw new JsonPlusTokenizerException(string.Format(RS.InvalidTokenAtIndex, Index), Token.Error(this)); } if (closingTokenType != TokenType.EndOfFile) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, TokenType.EndOfFile), tokens[tokens.Count - 1]); } return(tokens); }