Exemplo n.º 1
0
        public void ShouldHandleSignificantWhiteSpace()
        {
            var input          = "\"By  ;   e \" ";
            var expectedTokens = new[] { "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 2
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 3
0
        public void ShouldHandleComplexScenario()
        {
            var input          = "\r   Hel\nlo  ;  \"   Wo \rrld  \"  ;  \"By  ;   e \" ";
            var expectedTokens = new[] { "Hello", "\"Wo rld\"", "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input, ';'), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 4
0
        public void ShouldRemoveInsignificantWhiteSpaceInsideQuotes()
        {
            var input          = "hello;\"    world    \"";
            var expectedTokens = new[] { "hello", "\"world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 5
0
        public void ShouldIgnoreInsignificantWhiteSpacePostToken()
        {
            var input          = "   hello    ;world    ";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 6
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.That(Tokenizers.TokenizeString(input, ';').ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 7
0
        public void TestBasicTokenization()
        {
            var input          = "hello;world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Exemplo n.º 8
0
        public void TestBasicTokenizationWithTrailingToken()
        {
            var input          = "hello;world;";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 9
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Exemplo n.º 10
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input, ';'));
        }
Exemplo n.º 11
0
        public void ShouldIgnoreInsignificantWhiteSpacePreToken()
        {
            var input          = "   hello;   world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Exemplo n.º 12
0
        public void ShouldSampleHpkpHeader()
        {
            var input          = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"; pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"; pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"; max-age=5184000;";
            var expectedTokens = new[]
            {
                "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"",
                "pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"",
                "pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"",
                "max-age=5184000"
            };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Exemplo n.º 13
0
        public void ShouldThrowExceptionOnMalformedQuotingAtEnd()
        {
            var input = "hello\"";

            Assert.That(() => Tokenizers.TokenizeString(input).ToArray(), Throws.TypeOf <InvalidOperationException>());
        }
Exemplo n.º 14
0
        public void ShouldThrowExceptionOnMalformedQuoting()
        {
            var input = "\"hello";

            Assert.Throws <InvalidOperationException>(() => Tokenizers.TokenizeString(input).ToArray());
        }