Esempio n. 1
0
        public void ShouldHandleSignificantWhiteSpace()
        {
            var input          = "\"By  ;   e \" ";
            var expectedTokens = new[] { "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input), Is.EqualTo(expectedTokens));
        }
Esempio n. 2
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 3
0
        public void ShouldHandleComplexScenario()
        {
            var input          = "\r   Hel\nlo  ;  \"   Wo \rrld  \"  ;  \"By  ;   e \" ";
            var expectedTokens = new[] { "Hello", "\"Wo rld\"", "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input, ';'), Is.EqualTo(expectedTokens));
        }
Esempio n. 4
0
        public void ShouldRemoveInsignificantWhiteSpaceInsideQuotes()
        {
            var input          = "hello;\"    world    \"";
            var expectedTokens = new[] { "hello", "\"world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 5
0
        public void ShouldIgnoreInsignificantWhiteSpacePostToken()
        {
            var input          = "   hello    ;world    ";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 6
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.That(Tokenizers.TokenizeString(input, ';').ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 7
0
        public void TestBasicTokenization()
        {
            var input          = "hello;world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 8
0
        public void TestBasicTokenizationWithTrailingToken()
        {
            var input          = "hello;world;";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 9
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 10
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input, ';'));
        }
Esempio n. 11
0
        public void ShouldIgnoreInsignificantWhiteSpacePreToken()
        {
            var input          = "   hello;   world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 12
0
        public void ShouldSampleHpkpHeader()
        {
            var input          = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"; pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"; pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"; max-age=5184000;";
            var expectedTokens = new[]
            {
                "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"",
                "pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"",
                "pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"",
                "max-age=5184000"
            };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 13
0
        public void ShouldThrowExceptionOnMalformedQuotingAtEnd()
        {
            var input = "hello\"";

            Assert.That(() => Tokenizers.TokenizeString(input).ToArray(), Throws.TypeOf <InvalidOperationException>());
        }
Esempio n. 14
0
        public void ShouldThrowExceptionOnMalformedQuoting()
        {
            var input = "\"hello";

            Assert.Throws <InvalidOperationException>(() => Tokenizers.TokenizeString(input).ToArray());
        }