Exemplo n.º 1
0
        private bool PullBoolean(TokenizeResult tokens)
        {
            if (Matches(JPlusConstants.TrueKeyword))
            {
                Take(JPlusConstants.TrueKeywordLength);
                tokens.Add(Token.LiteralValue(JPlusConstants.TrueKeyword, LiteralTokenType.Boolean, this));
                return(true);
            }
            if (Matches(JPlusConstants.FalseKeyword))
            {
                Take(JPlusConstants.FalseKeywordLength);
                tokens.Add(Token.LiteralValue(JPlusConstants.FalseKeyword, LiteralTokenType.Boolean, this));
                return(true);
            }

            if (Matches(JPlusConstants.AltTrueKeyword))
            {
                Take(JPlusConstants.AltTrueKeywordLength);
                tokens.Add(Token.LiteralValue(JPlusConstants.AltTrueKeyword, LiteralTokenType.Boolean, this));
                return(true);
            }
            if (Matches(JPlusConstants.AltFalseKeyword))
            {
                Take(JPlusConstants.AltFalseKeywordLength);
                tokens.Add(Token.LiteralValue(JPlusConstants.AltFalseKeyword, LiteralTokenType.Boolean, this));
                return(true);
            }

            return(false);
        }
Exemplo n.º 2
0
        public TokenizeResult Tokenize()
        {
            TokenizeResult tokens = Tokenize(TokenType.EndOfFile);

            tokens.Add(new Token(string.Empty, TokenType.EndOfFile, this));
            return(tokens);
        }
Exemplo n.º 3
0
        private bool PullHexadecimal(TokenizeResult tokens)
        {
            if (!Matches("0x", "0X", "&h", "&H"))
            {
                return(false);
            }

            PushIndex();

            StringBuilder sb = new StringBuilder();

            Take(2);
            sb.Append("0x");

            while (Peek.IsHexadecimal())
            {
                sb.Append(PeekAndTake());
            }

            try
            {
                Convert.ToInt64(sb.ToString(), 16);
            }
            catch
            {
                ResetIndex();
                return(false);
            }

            PopIndex();

            tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Hexadecimal, this));
            return(true);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Returns a <see cref="TokenType.Substitution"/> token from the tokenizer's current position.
        /// </summary>
        /// <returns>A <see cref="TokenType.Substitution"/> token from the tokenizer's current position.</returns>
        private bool PullSubstitution(TokenizeResult tokens)
        {
            bool questionMarked = false;

            if (Matches(JPlusConstants.OptionalSubstitutionOpenBrace))
            {
                Take(JPlusConstants.OptionalSubstitutionOpenBraceLength);
                questionMarked = true;
            }
            else if (Matches(JPlusConstants.SubstitutionOpenBrace))
            {
                Take(JPlusConstants.SubstitutionOpenBraceLength);
            }
            else
            {
                return(false);
            }

            StringBuilder sb = new StringBuilder();

            while (!EndOfFile && !Matches(JPlusConstants.SubstitutionCloseBrace))
            {
                sb.Append(PeekAndTake());
            }

            if (EndOfFile)
            {
                throw new JsonPlusTokenizerException(RS.UnexpectedTokenEndOfSubstitutionVsEof, Token.Error(this));
            }

            Take();

            tokens.Add(Token.Substitution(sb.ToString().TrimWhitespace(), this, questionMarked));
            return(true);
        }
Exemplo n.º 5
0
        private bool PullOctet(TokenizeResult tokens)
        {
            PushIndex();
            StringBuilder sb = new StringBuilder();

            sb.Append(PeekAndTake());
            while (Peek.IsOctet())
            {
                sb.Append(PeekAndTake());
            }

            try
            {
                Convert.ToInt64(sb.ToString(), 8);
            }
            catch
            {
                ResetIndex();
                return(false);
            }

            PopIndex();
            tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Octet, this));

            return(true);
        }
Exemplo n.º 6
0
        /// <summary>
        /// Retrieves a triple quoted <see cref="TokenType.LiteralValue"/> token from the tokenizer's current position.
        /// </summary>
        /// <returns>
        /// A <see cref="TokenType.LiteralValue"/> token from the tokenizer's current position.
        /// </returns>
        private bool PullTripleQuoted(TokenizeResult tokens, string quoteSequence)
        {
            if (!Matches(quoteSequence))
            {
                return(false);
            }

            StringBuilder sb = new StringBuilder();

            Take(3);
            while (!EndOfFile && !Matches(quoteSequence))
            {
                if (Matches(JPlusConstants.Escape))
                {
                    sb.Append(PullEscapeSequence());
                }
                else
                {
                    sb.Append(PeekAndTake());
                }
            }

            if (EndOfFile)
            {
                throw new JsonPlusTokenizerException(string.Format(RS.UnexpectedTokenExpectTripleQuote, TokenType.EndOfFile), Token.Error(this));
            }

            Take(3);

            tokens.Add(Token.TripleQuotedLiteralValue(sb.ToString(), this));
            return(true);
        }
Exemplo n.º 7
0
        private bool PullInclude(TokenizeResult tokens)
        {
            // include? will also match this
            if (!Matches(JPlusConstants.IncludeKeyword))
            {
                return(false);
            }

            PushIndex();
            TokenizeResult includeTokens = new TokenizeResult();

            // now find out it its include? or include
            bool isOptional = false;

            if (Matches(JPlusConstants.IncludeOptionalKeyword))
            {
                isOptional = true;
            }

            if (isOptional)
            {
                Take(JPlusConstants.IncludeOptionalKeywordLength);
                includeTokens.Add(new Token(JPlusConstants.IncludeOptionalKeyword, TokenType.OptionalInclude, this));
            }
            else
            {
                Take(JPlusConstants.IncludeKeywordLength);
                includeTokens.Add(new Token(JPlusConstants.IncludeKeyword, TokenType.Include, this));
            }
            PullWhitespaces();

            if (!PullQuoted(includeTokens, JPlusConstants.QuoteChar) &&
                !PullQuoted(includeTokens, JPlusConstants.AltQuoteChar))
            {
                ResetIndex();
                return(false);
            }

            PopIndex();
            tokens.AddRange(includeTokens);
            return(true);
        }
Exemplo n.º 8
0
        private bool PullNan(TokenizeResult tokens)
        {
            if (Matches(JPlusConstants.NanKeyword))
            {
                Take(JPlusConstants.NanKeywordLength);
                tokens.Add(Token.LiteralValue(JPlusConstants.NanKeyword, LiteralTokenType.Decimal, this));
                return(true);
            }

            return(false);
        }
Exemplo n.º 9
0
        /// <summary>
        /// Retrieves a <see cref="TokenType.SelfAssignment"/> token from the tokenizer's current position.
        /// </summary>
        /// <returns>A <see cref="TokenType.SelfAssignment"/> token from the tokenizer's current position.</returns>
        private bool PullSelfAssignment(TokenizeResult tokens)
        {
            if (!Matches(JPlusConstants.SelfAssignmentOperator))
            {
                return(false);
            }

            Take(JPlusConstants.SelfAssignmentOperatorLength);
            tokens.Add(new Token(JPlusConstants.SelfAssignmentOperator, TokenType.SelfAssignment, this));
            return(true);
        }
Exemplo n.º 10
0
        private bool PullNull(TokenizeResult tokens)
        {
            if (!Matches(JPlusConstants.NullKeyword))
            {
                return(false);
            }

            Take(4);
            tokens.Add(Token.LiteralValue(JPlusConstants.NullKeyword, LiteralTokenType.Null, this));
            return(true);
        }
Exemplo n.º 11
0
        // parse path value
        private JsonPlusPath ParseKey()
        {
            while (_tokens.Current.LiteralType == LiteralTokenType.Whitespace)
            {
                _tokens.Next();
            }

            // sanity check
            if (_tokens.Current.Type != TokenType.LiteralValue)
            {
                throw JsonPlusParserException.Create(_tokens.Current, Path, string.Format(RS.UnexpectedKeyType, TokenType.LiteralValue, _tokens.Current.Type));
            }

            if (_tokens.Current.IsNonSignificant())
            {
                ConsumeWhitelines();
            }
            if (_tokens.Current.Type != TokenType.LiteralValue)
            {
                return(null);
            }

            TokenizeResult keyTokens = new TokenizeResult();

            while (_tokens.Current.Type == TokenType.LiteralValue)
            {
                keyTokens.Add(_tokens.Current);
                _tokens.Next();
            }

            keyTokens.Reverse();
            while (keyTokens.Count > 0 && keyTokens[0].LiteralType == LiteralTokenType.Whitespace)
            {
                keyTokens.RemoveAt(0);
            }
            keyTokens.Reverse();

            keyTokens.Add(new Token(string.Empty, TokenType.EndOfFile, null));

            return(JsonPlusPath.FromTokens(keyTokens));
        }
Exemplo n.º 12
0
        /// <summary>
        /// Retrieves a <see cref="TokenType.Comment"/> token from the tokenizer's current position.
        /// </summary>
        /// <returns>A <see cref="TokenType.EndOfLine"/> token from the tokenizer's last position, discarding the comment.</returns>
        private bool PullComment(TokenizeResult tokens)
        {
            if (!Matches(JPlusConstants.Comment, JPlusConstants.AltComment))
            {
                return(false);
            }

            string comment = DiscardRestOfLine();

            //tokens.Add(new Token(TokenType.Comment, this, start, Index - start));
            tokens.Add(new Token(comment, TokenType.EndOfLine, this));
            return(true);
        }
Exemplo n.º 13
0
        private bool PullNonNewLineWhitespace(TokenizeResult tokens)
        {
            if (!Peek.IsJsonPlusWhitespaceExceptNewLine())
            {
                return(false);
            }

            StringBuilder sb = new StringBuilder();

            while (Peek.IsJsonPlusWhitespaceExceptNewLine())
            {
                sb.Append(PeekAndTake());
            }
            tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.Whitespace, this));
            return(true);
        }
Exemplo n.º 14
0
        private bool PullUnquoted(TokenizeResult tokens)
        {
            if (!IsUnquoted())
            {
                return(false);
            }

            StringBuilder sb = new StringBuilder();

            while (!EndOfFile && IsUnquoted())
            {
                sb.Append(PeekAndTake());
            }

            tokens.Add(Token.LiteralValue(sb.ToString(), LiteralTokenType.UnquotedLiteralValue, this));
            return(true);
        }
Exemplo n.º 15
0
        private bool PullInfinity(TokenizeResult tokens)
        {
            string[] infinityKeywords = new string[]
            {
                JPlusConstants.InfinityKeyword,
                JPlusConstants.InfinityPositiveKeyword,
                JPlusConstants.InfinityNegativeKeyword
            };

            foreach (string keyword in infinityKeywords)
            {
                if (Matches(keyword))
                {
                    Take(keyword.Length);
                    tokens.Add(Token.LiteralValue(keyword, LiteralTokenType.Decimal, this));
                    return(true);
                }
            }

            return(false);
        }
Exemplo n.º 16
0
        private TokenizeResult Tokenize(TokenType closingTokenType)
        {
            TokenizeResult tokens = new TokenizeResult();

            while (!EndOfFile)
            {
                switch (Peek)
                {
                case JPlusConstants.StartOfObjectChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.StartOfObject, TokenType.StartOfObject, this));
                    tokens.AddRange(Tokenize(TokenType.EndOfObject));
                    continue;

                case JPlusConstants.EndOfObjectChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.EndOfObject, TokenType.EndOfObject, this));
                    if (closingTokenType != tokens[tokens.Count - 1].Type)
                    {
                        throw new JsonPlusTokenizerException(
                                  string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type),
                                  tokens[tokens.Count - 1]);
                    }
                    return(tokens);

                case JPlusConstants.StartOfArrayChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.StartOfArray, TokenType.StartOfArray, this));
                    tokens.AddRange(Tokenize(TokenType.EndOfArray));
                    continue;

                case JPlusConstants.EndOfArrayChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.EndOfArray, TokenType.EndOfArray, this));
                    if (closingTokenType != tokens[tokens.Count - 1].Type)
                    {
                        throw new JsonPlusTokenizerException(
                                  string.Format(RS.UnexpectedToken, closingTokenType, tokens[tokens.Count - 1].Type),
                                  tokens[tokens.Count - 1]);
                    }
                    return(tokens);

                case JPlusConstants.ArraySeparatorChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.ArraySeparator, TokenType.ArraySeparator, LiteralTokenType.UnquotedLiteralValue, this));
                    continue;

                case JPlusConstants.AssignmentOperatorChar:
                case JPlusConstants.AltAssignmentOperatorChar:
                    char c = PeekAndTake();
                    tokens.Add(new Token(c.ToString(), TokenType.Assignment, this));
                    continue;

                case JPlusConstants.SelfAssignmentOperatorFirstChar:
                    if (PullSelfAssignment(tokens))
                    {
                        continue;
                    }
                    break;

                case JPlusConstants.CommentFirstChar:
                case JPlusConstants.AltCommentFirstChar:
                    if (PullComment(tokens))
                    {
                        continue;
                    }
                    break;

                case JPlusConstants.SubstitutionFirstChar:
                    if (PullSubstitution(tokens))
                    {
                        continue;
                    }
                    break;

                case JPlusConstants.NewLineChar:
                    Take();
                    tokens.Add(new Token(JPlusConstants.NewLine, TokenType.EndOfLine, this));
                    continue;

                case JPlusConstants.IncludeKeywordFirstChar:
                    if (PullInclude(tokens))
                    {
                        continue;
                    }
                    break;
                }

                if (PullNonNewLineWhitespace(tokens))
                {
                    continue;
                }
                if (PullLiteral(tokens))
                {
                    continue;
                }

                throw new JsonPlusTokenizerException(string.Format(RS.InvalidTokenAtIndex, Index), Token.Error(this));
            }

            if (closingTokenType != TokenType.EndOfFile)
            {
                throw new JsonPlusTokenizerException(
                          string.Format(RS.UnexpectedToken, closingTokenType, TokenType.EndOfFile),
                          tokens[tokens.Count - 1]);
            }
            return(tokens);
        }
Exemplo n.º 17
0
        private bool PullNumbers(TokenizeResult tokens)
        {
            StringBuilder sb = new StringBuilder();
            // Parse numbers
            bool parsing = true;

            Token lastValidToken = null;

            // coefficient, significand, exponent
            string state = "coefficient";

            while (parsing)
            {
                switch (state)
                {
                case "coefficient":
                    // possible double number without coefficient
                    if (Matches("-.", "+.", "."))
                    {
                        state = "significand";
                        break;
                    }

                    PushIndex();     // long test index

                    if (Matches('+', '-'))
                    {
                        sb.Append(PeekAndTake());
                    }

                    // numbers could not start with a 0
                    if (!Peek.IsDigit() || Peek == '0')
                    {
                        ResetIndex();     // reset long test index
                        parsing = false;
                        break;
                    }

                    while (Peek.IsDigit())
                    {
                        sb.Append(PeekAndTake());
                    }

                    if (!long.TryParse(sb.ToString(), out _))
                    {
                        ResetIndex();     // reset long test index
                        parsing = false;
                        break;
                    }
                    PopIndex();     // end long test index
                    lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Integer, this);
                    state          = "significand";
                    break;

                case "significand":
                    // short logic, no significand, but probably have an exponent
                    if (!Matches("-.", "+.", "."))
                    {
                        state = "exponent";
                        break;
                    }

                    PushIndex();     // validate significand in number test

                    if (Matches('+', '-'))
                    {
                        sb.Insert(0, PeekAndTake());
                    }

                    sb.Append(PeekAndTake());

                    if (!Peek.IsDigit())
                    {
                        ResetIndex();     // reset validate significand in number test
                        parsing = false;
                        break;
                    }

                    while (Peek.IsDigit())
                    {
                        sb.Append(PeekAndTake());
                    }

                    if (!double.TryParse(sb.ToString(), out _))
                    {
                        ResetIndex();     // reset validate significand in number test
                        parsing = false;
                        break;
                    }

                    PopIndex();     // end validate significand in number test
                    lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Decimal, this);
                    state          = "exponent";
                    break;

                case "exponent":
                    // short logic, check if number is a double with exponent
                    if (!Matches('e', 'E'))
                    {
                        parsing = false;
                        break;
                    }

                    PushIndex();     // validate exponent
                    sb.Append(PeekAndTake());

                    // check for signed exponent
                    if (Matches('-', '+'))
                    {
                        sb.Append(PeekAndTake());
                    }

                    if (!Peek.IsDigit())
                    {
                        ResetIndex();     // reset validate exponent
                        parsing = false;
                        break;
                    }

                    while (Peek.IsDigit())
                    {
                        sb.Append(PeekAndTake());
                    }

                    if (!double.TryParse(sb.ToString(), out _))
                    {
                        ResetIndex();     // reset validate exponent
                        parsing = false;
                        break;
                    }

                    PopIndex();     // end validate exponent
                    lastValidToken = Token.LiteralValue(sb.ToString(), LiteralTokenType.Decimal, this);
                    parsing        = false;
                    break;
                }
            }

            if (lastValidToken == null)
            {
                return(false);
            }

            tokens.Add(lastValidToken);
            return(true);
        }