private bool PullInclude(TokenizeResult tokens) { // include? will also match this if (!Matches(JPlusConstants.IncludeKeyword)) { return(false); } PushIndex(); TokenizeResult includeTokens = new TokenizeResult(); // now find out it its include? or include bool isOptional = false; if (Matches(JPlusConstants.IncludeOptionalKeyword)) { isOptional = true; } if (isOptional) { Take(JPlusConstants.IncludeOptionalKeywordLength); includeTokens.Add(new Token(JPlusConstants.IncludeOptionalKeyword, TokenType.OptionalInclude, this)); } else { Take(JPlusConstants.IncludeKeywordLength); includeTokens.Add(new Token(JPlusConstants.IncludeKeyword, TokenType.Include, this)); } PullWhitespaces(); if (!PullQuoted(includeTokens, JPlusConstants.QuoteChar) && !PullQuoted(includeTokens, JPlusConstants.AltQuoteChar)) { ResetIndex(); return(false); } PopIndex(); tokens.AddRange(includeTokens); return(true); }
private TokenizeResult Tokenize(TokenType closingTokenType) { TokenizeResult tokens = new TokenizeResult(); while (!EndOfFile) { switch (Peek) { case JPlusConstants.StartOfObjectChar: Take(); tokens.Add(new Token(JPlusConstants.StartOfObject, TokenType.StartOfObject, this)); tokens.AddRange(Tokenize(TokenType.EndOfObject)); continue; case JPlusConstants.EndOfObjectChar: Take(); tokens.Add(new Token(JPlusConstants.EndOfObject, TokenType.EndOfObject, this)); if (closingTokenType != tokens[tokens.Count - 1].Type) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type), tokens[tokens.Count - 1]); } return(tokens); case JPlusConstants.StartOfArrayChar: Take(); tokens.Add(new Token(JPlusConstants.StartOfArray, TokenType.StartOfArray, this)); tokens.AddRange(Tokenize(TokenType.EndOfArray)); continue; case JPlusConstants.EndOfArrayChar: Take(); tokens.Add(new Token(JPlusConstants.EndOfArray, TokenType.EndOfArray, this)); if (closingTokenType != tokens[tokens.Count - 1].Type) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type), tokens[tokens.Count - 1]); } return(tokens); case JPlusConstants.ArraySeparatorChar: Take(); tokens.Add(new Token(JPlusConstants.ArraySeparator, TokenType.ArraySeparator, LiteralTokenType.UnquotedLiteralValue, this)); continue; case JPlusConstants.AssignmentOperatorChar: case JPlusConstants.AltAssignmentOperatorChar: char c = PeekAndTake(); tokens.Add(new Token(c.ToString(), TokenType.Assignment, this)); continue; case JPlusConstants.SelfAssignmentOperatorFirstChar: if (PullSelfAssignment(tokens)) { continue; } break; case JPlusConstants.CommentFirstChar: case JPlusConstants.AltCommentFirstChar: if (PullComment(tokens)) { continue; } break; case JPlusConstants.SubstitutionFirstChar: if (PullSubstitution(tokens)) { continue; } break; case JPlusConstants.NewLineChar: Take(); tokens.Add(new Token(JPlusConstants.NewLine, TokenType.EndOfLine, this)); continue; case JPlusConstants.IncludeKeywordFirstChar: if (PullInclude(tokens)) { continue; } break; } if (PullNonNewLineWhitespace(tokens)) { continue; } if (PullLiteral(tokens)) { continue; } throw new JsonPlusTokenizerException(string.Format(RS.InvalidTokenAtIndex, Index), Token.Error(this)); } if (closingTokenType != TokenType.EndOfFile) { throw new JsonPlusTokenizerException( string.Format(RS.UnexpectedToken, closingTokenType, TokenType.EndOfFile), tokens[tokens.Count - 1]); } return(tokens); }