Пример #1
0
        private void Identifiers2()
        {
            AssertTokenizer t = AssertTokens();

            // 'variable' non-terminal needs to set $<String>$ even for keywords,
            // otherwise the content of previous token is stored in token value and is interpreted as string.
            t.Load("//\ntrue")[Tokens.RegexpBeg][Tokens.RegexpEnd][(Tokens)'\n'][Tokens.True].EOF();

            t.Expect();
        }
Пример #2
0
        private void UnicodeEscapes2()
        {
            AssertTokenizer t = AssertTokens();

            t.Load(@":""\u{123456789}""")[Tokens.Symbeg][@"u{123456789}"][Tokens.StringEnd].EOF();
            t.Load(@":""\u123456789""")[Tokens.Symbeg][@"u123456789"][Tokens.StringEnd].EOF();
            t.Load(@"/\u1234/")[Tokens.RegexpBeg][@"\u1234"][Tokens.RegexpEnd].EOF();
            t.Load(@"/\u{101234}/")[Tokens.RegexpBeg][@"\u{101234}"][Tokens.RegexpEnd].EOF();

            t.Expect();
        }
Пример #3
0
        private void Scenario_ParseRegex1()
        {
            AssertTokenizer t = AssertTokens();

            t.Load("//")[Tokens.RegexpBeg][Tokens.RegexpEnd].EOF();
            t.Load("/foo/")[Tokens.RegexpBeg]["foo"][Tokens.RegexpEnd].EOF();

            t.Load("/foo/aib").Skip(2).Read(RubyRegexOptions.IgnoreCase).Expect(Errors.UnknownRegexOption, Errors.UnknownRegexOption);
            t.Load("/foo/9").Skip(2).Read(Tokens.RegexpEnd); // TODO: unexpected token 9
            t.Load("/foo/esuniiimmmxxxooo").Skip(2).
            Read(RubyRegexOptions.IgnoreCase | RubyRegexOptions.Multiline | RubyRegexOptions.Extended | RubyRegexOptions.FIXED | RubyRegexOptions.UTF8);

            t.Expect();
        }
Пример #4
0
        private void Scenario_ParseInstanceClassVariables1()
        {
            AssertTokenizer t = AssertTokens();

            t.Load("@").Read((Tokens)'@');
            t.Load("@@").Read((Tokens)'@');
            t.Load("@1").Read((Tokens)'@').Expect(Errors.InvalidInstanceVariableName);
            t.Load("@@1").Read((Tokens)'@').Expect(Errors.InvalidClassVariableName);
            t.Load("@_").ReadSymbol(Tokens.InstanceVariable, "@_");
            t.Load("@@_").ReadSymbol(Tokens.ClassVariable, "@@_");
            t.Load("@aA1_").ReadSymbol(Tokens.InstanceVariable, "@aA1_");
            t.Load("@@aA1_").ReadSymbol(Tokens.ClassVariable, "@@aA1_");

            t.Expect();
        }
Пример #5
0
        private void Escapes1()
        {
            AssertTokenizer t = AssertTokens();

            // hexa:
            t.Load("\"\\x\n20\"")[Tokens.StringBeg]["?\n20"][Tokens.StringEnd].Expect(Errors.InvalidEscapeCharacter);
            t.Load("\"\\x2\n0\"")[Tokens.StringBeg]["\u0002\n0"][Tokens.StringEnd].EOF();
            t.Load("\"\\x20\n\"")[Tokens.StringBeg][" \n"][Tokens.StringEnd].EOF();

            // octal:
            t.Load("\"\\0\n40\"")[Tokens.StringBeg]["\0\n40"][Tokens.StringEnd].EOF();
            t.Load("\"\\04\n0\"")[Tokens.StringBeg]["\u0004\n0"][Tokens.StringEnd].EOF();
            t.Load("\"\\040\n\"")[Tokens.StringBeg][" \n"][Tokens.StringEnd].EOF();
            t.Load("\"\\123\"")[Tokens.StringBeg]["S"][Tokens.StringEnd].EOF();

            t.Expect();
        }
Пример #6
0
        private void StringLiterals1()
        {
            AssertTokenizer t = AssertTokens();

            for (int i = 0; i < 128; i++)
            {
                switch (i)
                {
                case '(':
                case '{':
                case '[':
                case '<':
                case 'Q':
                case 'q':
                case 'W':
                case 'w':
                case 'x':
                case 'r':
                case 's':
                    break;

                default:
                    var str = "%" + (char)i + "foo" + (char)i;

                    if (Tokenizer.IsDecimalDigit(i))
                    {
                        t.Load(str)[(Tokens)'%'][Tokens.Integer][Tokens.Identifier].Expect(Errors.UnknownQuotedStringType).EOF();
                    }
                    else if (Tokenizer.IsUpperLetter(i))
                    {
                        t.Load(str)[(Tokens)'%'][Tokens.ConstantIdentifier].Expect(Errors.UnknownQuotedStringType).EOF();
                    }
                    else if (Tokenizer.IsLowerLetter(i))
                    {
                        t.Load(str)[(Tokens)'%'][Tokens.Identifier].Expect(Errors.UnknownQuotedStringType).EOF();
                    }
                    else
                    {
                        t.Load(str)[Tokens.StringBeg]["foo"][Tokens.StringEnd].EOF();
                    }
                    break;
                }
            }

            t.Expect();
        }
Пример #7
0
        private void UnicodeIdentifiers1()
        {
            AssertTokenizer t = AssertTokens();

            //t.Load("Σ=@Σ=@@Σ=$Σ")
            //    [Tokens.Identifier][Tokens.Eq]
            //    [Tokens.InstanceVariable][Tokens.Eq]
            //    [Tokens.ClassVariable][Tokens.Eq]
            //    [Tokens.GlobalVariable].EOF();

            //t.Load("def Σ;end")
            //    [Tokens.Def][Tokens.Identifier][(Tokens)';'][Tokens.End].EOF();

            //t.Load("<<Σ\nhello\nΣ")
            //    [Tokens.StringBeg]["hello\n"][Tokens.StringEnd][(Tokens)'\n'].EOF();

            t.Expect();
        }
Пример #8
0
        private void Heredoc1()
        {
            AssertTokenizer t = AssertTokens();

            t.Load("<<LABEL\nhello\nLABEL")
            [Tokens.StringBeg]["hello\n"][Tokens.StringEnd][(Tokens)'\n'].EOF();

            t.Load("<<\"LABEL\"\nhello\nLABEL")
            [Tokens.StringBeg]["hello\n"][Tokens.StringEnd][(Tokens)'\n'].EOF();

            t.Load("<<'LABEL'\nhello\nLABEL")
            [Tokens.StringBeg]["hello\n"][Tokens.StringEnd][(Tokens)'\n'].EOF();

            t.Load("<<`LABEL`\nhello\nLABEL")
            [Tokens.ShellStringBegin]["hello\n"][Tokens.StringEnd][(Tokens)'\n'].EOF();

            t.Expect();
        }
Пример #9
0
        private void UnicodeEscapes1()
        {
            AssertTokenizer t = AssertTokens();

            int[] values = new[] { 0x20, 0x102020, 0x20, 0x20, 0 };
            int[] width  = new[] { 2, 6, 6, 5, 1 };

            for (int i = 0; i < values.Length; i++)
            {
                t.Load(@"""\u{" + i.ToString("x" + width[i]) + @"}""")[Tokens.StringBeg][Char.ConvertFromUtf32(i)][Tokens.StringEnd].EOF();
            }

            t.Load(@":""\u{123456}""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.TooLargeUnicodeCodePoint);
            t.Load(@":""\u{0}""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.NullCharacterInSymbol);
            t.Load(@":""\u0000""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.NullCharacterInSymbol);
            t.Load(@":""\u111""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.InvalidEscapeCharacter);
            t.Load(@":""\u""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.InvalidEscapeCharacter);
            t.Load(@":""\u{123""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.InvalidEscapeCharacter);
            t.Load(@":""\u{123g}""")[Tokens.Symbeg][Tokens.StringContent].Expect(Errors.InvalidEscapeCharacter);

            // regex:
            t.Load(@"/\x20/")[Tokens.RegexpBeg][@"\x20"][Tokens.RegexpEnd].EOF();
            t.Load(@"/\u1234/")[Tokens.RegexpBeg][@"\u1234"][Tokens.RegexpEnd].EOF();
            t.Load(@"/\u{101234}/")[Tokens.RegexpBeg][@"\u{101234}"][Tokens.RegexpEnd].EOF();

            // braces:
            t.Load(@"%{{\u{05d0}}}")[Tokens.StringBeg]["{\u05d0}"][Tokens.StringEnd].EOF();

            // eoln in the middle of \u escape:
            t.Load("\"\\u0020\n\"")[Tokens.StringBeg][" \n"][Tokens.StringEnd].EOF();
            t.Load("\"\\u002\n0\"")[Tokens.StringBeg]["?002\n0"][Tokens.StringEnd].Expect(Errors.InvalidEscapeCharacter).EOF();
            t.Load("\"\\u00\n20\"")[Tokens.StringBeg]["?00\n20"][Tokens.StringEnd].Expect(Errors.InvalidEscapeCharacter).EOF();
            t.Load("\"\\u0\n020\"")[Tokens.StringBeg]["?0\n020"][Tokens.StringEnd].Expect(Errors.InvalidEscapeCharacter).EOF();
            t.Load("\"\\u\n0020\"")[Tokens.StringBeg]["?\n0020"][Tokens.StringEnd].Expect(Errors.InvalidEscapeCharacter).EOF();

            t.Expect();
        }
Пример #10
0
        private void Heredoc1() {
            AssertTokenizer t = new AssertTokenizer(this) { Verbatim = false };
            t.Load("<<LABEL\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<\"LABEL\"\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<'LABEL'\nhello\nLABEL")
                [Tokens.StringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<`LABEL`\nhello\nLABEL")
                [Tokens.ShellStringBegin]["hello\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("<<LABEL\nLABEL123\nLABEL")
                [Tokens.StringBegin]["LABEL123\n"][Tokens.StringEnd][Tokens.NewLine].EOF();

            t.Load("puts <<L1, 1, <<L2, 2\naaa\nL1\nbbb\nL2\n3")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]["aaa\n"][Tokens.StringEnd]
                [Tokens.Comma][1][Tokens.Comma]
                [Tokens.StringBegin]["bbb\n"][Tokens.StringEnd]
                [Tokens.Comma][2]
                [Tokens.NewLine]
                [3].EOF();

            t.Load("puts <<A,1\\\n...\nA\n,2")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]
                ["...\n"]
                [Tokens.StringEnd].State(LexicalState.EXPR_END)
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [1].State(LexicalState.EXPR_END)    // \\n is a whitespace
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [2].State(LexicalState.EXPR_END).
            EOF();

            t.Load("puts <<A,(f\\\n...\nA\n())")
                [Tokens.Identifier, "puts"]
                [Tokens.StringBegin]
                ["...\n"]
                [Tokens.StringEnd].State(LexicalState.EXPR_END)
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [Tokens.LeftExprParenthesis].State(LexicalState.EXPR_BEG)  // CommandMode == true
                [Tokens.Identifier, "f"].State(LexicalState.EXPR_CMDARG)   // \\n is a whitespace, WhitespaceSeen == true
                [Tokens.LeftArgParenthesis].State(LexicalState.EXPR_BEG)
                [Tokens.RightParenthesis]
                [Tokens.RightParenthesis].
            EOF();

            t.Expect();

            AssertTokenizer vt = new AssertTokenizer(this) { Verbatim = true };

            vt.Load("puts <<A,1\\\n...\nA\n,2")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin]
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [1].State(LexicalState.EXPR_END)    
                [Tokens.Whitespace]                             // \\n 
                [Tokens.StringContent, "...\n"]
                [Tokens.VerbatimHeredocEnd].State(LexicalState.EXPR_END) // A label
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [2].State(LexicalState.EXPR_END).
            EOF();

            vt.Load("puts <<A,(f\\\n...\nA\n())")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin]
                [Tokens.Comma].State(LexicalState.EXPR_BEG)
                [Tokens.LeftExprParenthesis].State(LexicalState.EXPR_BEG)  // CommandMode == true
                [Tokens.Identifier, "f"].State(LexicalState.EXPR_CMDARG)   
                [Tokens.Whitespace]
                ["...\n"]
                [Tokens.VerbatimHeredocEnd].State(LexicalState.EXPR_CMDARG)       
                [Tokens.LeftArgParenthesis].State(LexicalState.EXPR_BEG)
                [Tokens.RightParenthesis]
                [Tokens.RightParenthesis].
            EOF();

            vt.Load(@"puts <<A,<<B
1
2#{f <<C,<<D}3#{g <<E}4
c
C
d#{f <<F}d
f
F
D
e
E
5
A
b
b
B")
                [Tokens.Identifier, "puts"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<A
                [Tokens.Comma]
                [Tokens.VerbatimHeredocBegin] // <<B
                [Tokens.EndOfLine]
                ["1\n2"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "f"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<C
                [Tokens.Comma]
                [Tokens.VerbatimHeredocBegin] // <<D
                [Tokens.StringEmbeddedCodeEnd]
                ["3"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "g"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<E
                [Tokens.StringEmbeddedCodeEnd]
                ["4\n"]
                ["c\n"]
                [Tokens.VerbatimHeredocEnd]   // C
                ["d"]
                [Tokens.StringEmbeddedCodeBegin]
                [Tokens.Identifier, "f"]
                [Tokens.Whitespace]
                [Tokens.VerbatimHeredocBegin] // <<F
                [Tokens.StringEmbeddedCodeEnd]
                ["d\n"]
                ["f\n"]
                [Tokens.VerbatimHeredocEnd]   // F
                [Tokens.VerbatimHeredocEnd]   // D
                ["e\n"]
                [Tokens.VerbatimHeredocEnd]   // E
                ["5\n"]
                [Tokens.VerbatimHeredocEnd]   // A
                ["b\nb\n"]
                [Tokens.VerbatimHeredocEnd]   // B
            .EOF();

            t.Expect();

            // index:                                111111111122 2222 222 2333 333 3 3
            //                             0123456789012345678901 2345 678 9012 345 6 7
            TestCategorizer(Engine, null, "puts <<L1, 1, <<L2, 2\naaa\nL1\nbbb\nL2\r\n3", 
            // column:                     1234567890123456789012 1234 123 1234 123 4 1 
            // line:                       1111111111111111111111 2222 333 4444 555 5 6
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(9, 1, 10)), TokenCategory.StringLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(9, 1, 10), new SourceLocation(10, 1, 11)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(10, 1, 11), new SourceLocation(11, 1, 12)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // 1
                new TokenInfo(new SourceSpan(new SourceLocation(11, 1, 12), new SourceLocation(12, 1, 13)), TokenCategory.NumericLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(12, 1, 13), new SourceLocation(13, 1, 14)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(13, 1, 14), new SourceLocation(14, 1, 15)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L2
                new TokenInfo(new SourceSpan(new SourceLocation(14, 1, 15), new SourceLocation(18, 1, 19)), TokenCategory.StringLiteral, TokenTriggers.None),
                // ,
                new TokenInfo(new SourceSpan(new SourceLocation(18, 1, 19), new SourceLocation(19, 1, 20)), TokenCategory.Delimiter, TokenTriggers.ParameterNext),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(19, 1, 20), new SourceLocation(20, 1, 21)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // 2
                new TokenInfo(new SourceSpan(new SourceLocation(20, 1, 21), new SourceLocation(21, 1, 22)), TokenCategory.NumericLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(21, 1, 22), new SourceLocation(22, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(22, 2, 1), new SourceLocation(26, 3, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L1\n
                new TokenInfo(new SourceSpan(new SourceLocation(26, 3, 1), new SourceLocation(29, 4, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // bbb\n
                new TokenInfo(new SourceSpan(new SourceLocation(29, 4, 1), new SourceLocation(33, 5, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L2\r\n
                new TokenInfo(new SourceSpan(new SourceLocation(33, 5, 1), new SourceLocation(37, 6, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // 3
                new TokenInfo(new SourceSpan(new SourceLocation(37, 6, 1), new SourceLocation(38, 6, 2)), TokenCategory.NumericLiteral, TokenTriggers.None)
            );

            // index:                                 1111
            //                             0123456789 0123
            TestCategorizer(Engine, null, "puts <<L1\naaa", 
            // column:                     1234567890 1234
            // line:                       1111111111 2222
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(9, 1, 10)), TokenCategory.StringLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(9, 1, 10), new SourceLocation(10, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(10, 2, 1), new SourceLocation(13, 2, 4)), TokenCategory.StringLiteral, TokenTriggers.None),
                // <missing heredoc end>
                new TokenInfo(new SourceSpan(new SourceLocation(13, 2, 4), new SourceLocation(13, 2, 4)), TokenCategory.StringLiteral, TokenTriggers.None)
            );

            // index:                                1 1111 11111
            //                             01234567890 1234 56789
            TestCategorizer(Engine, null, "puts <<-L1\naaa\n  L1",
            // column:                     12345678901 1234 12345
            // line:                       11111111111 2222 33333
            // 
                // puts
                new TokenInfo(new SourceSpan(new SourceLocation(0, 1, 1), new SourceLocation(4, 1, 5)), TokenCategory.Identifier, TokenTriggers.None),
                // ' '
                new TokenInfo(new SourceSpan(new SourceLocation(4, 1, 5), new SourceLocation(5, 1, 6)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // <<-L1
                new TokenInfo(new SourceSpan(new SourceLocation(5, 1, 6), new SourceLocation(10, 1, 11)), TokenCategory.StringLiteral, TokenTriggers.None),
                // \n
                new TokenInfo(new SourceSpan(new SourceLocation(10, 1, 11), new SourceLocation(11, 2, 1)), TokenCategory.WhiteSpace, TokenTriggers.None),
                // aaa\n
                new TokenInfo(new SourceSpan(new SourceLocation(11, 2, 1), new SourceLocation(15, 3, 1)), TokenCategory.StringLiteral, TokenTriggers.None),
                // L1
                new TokenInfo(new SourceSpan(new SourceLocation(17, 3, 3), new SourceLocation(19, 3, 5)), TokenCategory.StringLiteral, TokenTriggers.None)
            );
        }
Пример #11
0
        private void TokenizeStrings2() {
            AssertTokenizer t = new AssertTokenizer(this) {
                Verbatim = true
            };
            
            // string nested in a string:
            t.Load(@"""abc#{""x#@hello#{{}}y""}def""")
                [Tokens.StringBegin]
                [Tokens.StringContent]
                [Tokens.StringEmbeddedCodeBegin]
                    [Tokens.StringBegin]
                    [Tokens.StringContent]
                    [Tokens.StringEmbeddedVariableBegin]
                        [Tokens.InstanceVariable]
                    [Tokens.StringEmbeddedCodeBegin]
                        [Tokens.LeftBrace]
                        [Tokens.RightBrace]
                    [Tokens.StringEmbeddedCodeEnd]
                    [Tokens.StringContent]
                    [Tokens.StringEnd]
                [Tokens.StringEmbeddedCodeEnd]
                [Tokens.StringContent]
                [Tokens.StringEnd].
            EOF();

            // nested braces:
            t.Load(@"""a#{{{""#{{""#{1}""=>""c""}}""=>""#{2}""}=>""#{3}""}}a""")
                [Tokens.StringBegin]
                [Tokens.StringContent]
                [Tokens.StringEmbeddedCodeBegin]
                    [Tokens.LeftBrace]
                        [Tokens.LeftBrace]
                            [Tokens.StringBegin]
                            [Tokens.StringEmbeddedCodeBegin]
                                [Tokens.LeftBrace]
                                    [Tokens.StringBegin]
                                    [Tokens.StringEmbeddedCodeBegin]
                                        [1]
                                    [Tokens.StringEmbeddedCodeEnd]
                                    [Tokens.StringEnd]
                                [Tokens.DoubleArrow]
                                    [Tokens.StringBegin]
                                    [Tokens.StringContent]
                                    [Tokens.StringEnd]
                                [Tokens.RightBrace]
                            [Tokens.StringEmbeddedCodeEnd]
                            [Tokens.StringEnd]
                        [Tokens.DoubleArrow]
                            [Tokens.StringBegin]
                            [Tokens.StringEmbeddedCodeBegin]
                                [2]
                            [Tokens.StringEmbeddedCodeEnd]
                            [Tokens.StringEnd]
                        [Tokens.RightBrace]
                    [Tokens.DoubleArrow]
                        [Tokens.StringBegin]
                        [Tokens.StringEmbeddedCodeBegin]
                            [3]
                        [Tokens.StringEmbeddedCodeEnd]
                        [Tokens.StringEnd]
                    [Tokens.RightBrace]
                [Tokens.StringEmbeddedCodeEnd]
                [Tokens.StringContent]
                [Tokens.StringEnd].
            EOF();
            
            // =begin .. =end nested in a string:
            t.Load("\"hello#{\n=begin\nxxx\n=end\nidf\n}world\"")
                [Tokens.StringBegin]
                [Tokens.StringContent]
                    [Tokens.StringEmbeddedCodeBegin]
                    [Tokens.EndOfLine]
                    [Tokens.MultiLineComment]
                    [Tokens.Identifier, "idf"]
                    [Tokens.NewLine]
                    [Tokens.StringEmbeddedCodeEnd]
                [Tokens.StringContent]
                [Tokens.StringEnd].
            EOF();

            // braces nesting in word array:
            t.Load("%w{#$a #{b + 1} c}")
                [Tokens.VerbatimWordsBegin]
                [Tokens.StringContent, "#$a"][Tokens.WordSeparator]
                [Tokens.StringContent, "#{b"][Tokens.WordSeparator]
                [Tokens.StringContent, "+"][Tokens.WordSeparator]
                [Tokens.StringContent, "1}"][Tokens.WordSeparator]
                [Tokens.StringContent, "c"]
                [Tokens.StringEnd].
            EOF();
        }
Пример #12
0
        private void Scenario_ParseNumbers1()
        {
            AssertTokenizer t = AssertTokens();

            t.Load("0").Read(0);
            t.Load("0000").Read(0);
            t.Load("0_0_00000_0_0_00000000000000000000_00_00000000000000000000000000000").Read(0);

            t.Load("0777").Read(Convert.ToInt32("777", 8));
            t.Load("000000000000000000000000000000000000076541").Read(Convert.ToInt32("76541", 8));
            AssertTokenBigInteger("0100000000000000_000000000000000000000076541", 8);

            t.Load("0x0").Read(0);
            t.Load("0xa_F").Read(Convert.ToInt32("af", 16));
            t.Load("0x000000_0000000000000000000aF").Read(Convert.ToInt32("af", 16));
            t.Load("0x10000000000_00000000000000aF").ReadBigInteger("1000000000000000000000000aF", 16);

            t.Load("0b0").Read(0);
            t.Load("0b000000000000_000000000000000000000000000101").Read(Convert.ToInt32("101", 2));
            t.Load("0b10000000_0000000000000000000000000000000101").ReadBigInteger("100000000000000000000000000000000000000101", 2);

            t.Load("0d0").Read(0);
            t.Load("0d00000000_0000000000000000000000000000000101").Read(101);
            t.Load("0d10000000_0000000000000000000000_000000000101").ReadBigInteger("100000000000000000000000000000000000000101", 10);

            t.Load("0o0").Read(0);
            t.Load("0o000_000000000000000000000000000000000076541").Read(Convert.ToInt32("76541", 8));
            t.Load("0o100000000_000000000000000000000000000076541").ReadBigInteger("100000000000000000000000000000000000076541", 8);
            t.Load("0.0").Read(0.0D);

            t.Load("0e-000").Read(0.0D);
            t.Load("0e2").Read(0.0D);
            t.Load("0e+2").Read(0.0D);
            t.Load("1e2").Read(100.0D);
            t.Load("1e+2").Read(100.0D);
            t.Load("1e-2").Read(0.01D);

            t.Load("3_1_3_2_1_3_2_1_3_5_4_6_5_3_1_3_2.0").Read(31321321354653132.0D);
            t.Load("1_3_2.3_1_3_2_1_3").Read(132.313213D);

            t.Load("1.1e-0").Read(1.1D);
            t.Load("1.1e+0").Read(1.1D);
            t.Load("1.1e0").Read(1.1D);
            t.Load("1.1e-1").Read(0.11D);

            t.Load("1.1e-1020").Read(0.0D);
            t.Load("1.1e-1021").Read(0.0D).Expect(Errors.FloatOutOfRange);

            t.Load("1.1e1024").Read(Double.PositiveInfinity);
            t.Load("1.1e1025").Read(Double.PositiveInfinity).Expect(Errors.FloatOutOfRange);

            t.Load("1.1e-30000").Read(0.0D).Expect(Errors.FloatOutOfRange);
            t.Load("1.1e3_1_3_2_1_3_2_1_3_5_4_6_5_3_1_3_2").Read(Double.PositiveInfinity).Expect(Errors.FloatOutOfRange);
            t.Load("4.94065645841247e-324").Read(Double.Epsilon);
            t.Load("1_2.4_5e2_2").Read(12.45e22);
            t.Load("1._1").Read(1);
            t.Load("1.").Read(1);

            t.Load("122312.   1212").Read(122312);
            t.Load("01234e12").Read(Convert.ToInt32("1234", 8));
            t.Load("12__1212").Read(12).Expect(Errors.TrailingUnderscoreInNumber);
            t.Load("123_.123").Read(123).Expect(Errors.TrailingUnderscoreInNumber);
            t.Load("08").Read(8).Expect(Errors.IllegalOctalDigit);
            t.Load("0_8").Read(0).Expect(Errors.TrailingUnderscoreInNumber);
            t.Load("0_x").Read(0).Expect(Errors.TrailingUnderscoreInNumber);
            t.Load("0_").Read(0).Expect(Errors.TrailingUnderscoreInNumber);
            t.Load("0x_").Read(0).Expect(Errors.NumericLiteralWithoutDigits);
            t.Load("0x").Read(0).Expect(Errors.NumericLiteralWithoutDigits);
            t.Load("0x_1").Read(0).Expect(Errors.NumericLiteralWithoutDigits);

            t.Load(".20").Read((Tokens)'.', TokenValueType.None).Expect(Errors.NoFloatingLiteral);
            t.Load("1.e").Read(1);
            t.Load("1.2e").Read(1.2D).Expect(Errors.TrailingEInNumber);
            t.Load("1e").Read(1).Expect(Errors.TrailingEInNumber);
            t.Load("1e-").Read(1).Expect(Errors.TrailingMinusInNumber);
            t.Load("1e+").Read(1).Expect(Errors.TrailingPlusInNumber);

            t.Load("00.0").Read(0).Expect(Errors.NoFloatingLiteral);
            t.Load("00.foo").Read(0);
            t.Load("00.e-1").Read(0);
            t.Load("00.foo").Read(0);
            t.Load("0x.0").Read(0).Expect(Errors.NumericLiteralWithoutDigits, Errors.NoFloatingLiteral);
            t.Load("0x.foo").Read(0).Expect(Errors.NumericLiteralWithoutDigits);

            t.Expect();
        }