public void ShouldHandleSignificantWhiteSpace() { var input = "\"By ; e \" "; var expectedTokens = new[] { "\"By ; e\"" }; Assert.That(Tokenizers.TokenizeString(input), Is.EqualTo(expectedTokens)); }
public void ShouldNotDelimitWhenInQuotes() { var input = "\"hello;world\""; var expectedTokens = new[] { "\"hello;world\"" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldHandleComplexScenario() { var input = "\r Hel\nlo ; \" Wo \rrld \" ; \"By ; e \" "; var expectedTokens = new[] { "Hello", "\"Wo rld\"", "\"By ; e\"" }; Assert.That(Tokenizers.TokenizeString(input, ';'), Is.EqualTo(expectedTokens)); }
public void ShouldRemoveInsignificantWhiteSpaceInsideQuotes() { var input = "hello;\" world \""; var expectedTokens = new[] { "hello", "\"world\"" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldIgnoreInsignificantWhiteSpacePostToken() { var input = " hello ;world "; var expectedTokens = new[] { "hello", "world" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldNotIgnoreWhiteSpaceInToken() { var input = " hell o;worl d"; var expectedTokens = new[] { "hell o", "worl d" }; Assert.That(Tokenizers.TokenizeString(input, ';').ToArray(), Is.EqualTo(expectedTokens)); }
public void TestBasicTokenization() { var input = "hello;world"; var expectedTokens = new[] { "hello", "world" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void TestBasicTokenizationWithTrailingToken() { var input = "hello;world;"; var expectedTokens = new[] { "hello", "world" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldNotDelimitWhenInQuotes() { var input = "\"hello;world\""; var expectedTokens = new[] { "\"hello;world\"" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void ShouldNotIgnoreWhiteSpaceInToken() { var input = " hell o;worl d"; var expectedTokens = new[] { "hell o", "worl d" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input, ';')); }
public void ShouldIgnoreInsignificantWhiteSpacePreToken() { var input = " hello; world"; var expectedTokens = new[] { "hello", "world" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void ShouldSampleHpkpHeader() { var input = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"; pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"; pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"; max-age=5184000;"; var expectedTokens = new[] { "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"", "pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"", "pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"", "max-age=5184000" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldThrowExceptionOnMalformedQuotingAtEnd() { var input = "hello\""; Assert.That(() => Tokenizers.TokenizeString(input).ToArray(), Throws.TypeOf <InvalidOperationException>()); }
public void ShouldThrowExceptionOnMalformedQuoting() { var input = "\"hello"; Assert.Throws <InvalidOperationException>(() => Tokenizers.TokenizeString(input).ToArray()); }