private IEnumerable <Token> ToTokens(string component) { if (component.Length == 0) { return(new Token[0]); } var matchingTokenizers = Tokenizers .Where(tokenizer => component.StartsWith(tokenizer.Key)); if (!matchingTokenizers.Any()) { return(ToTokens(component.Substring(1)) .Prepend(new Token { Type = TokenType.Unknown, Symbol = component.Substring(0, 1) })); } else { var longestMatchingTokenizer = matchingTokenizers .OrderByDescending(tokenizer => tokenizer.Key.Length) .First(); var thisComponent = component.Substring(0, longestMatchingTokenizer.Key.Length); var nextComponent = component.Substring(longestMatchingTokenizer.Key.Length); return(ToTokens(nextComponent) .Prepend(longestMatchingTokenizer.Value(thisComponent))); } }
public void ShouldNotDelimitWhenInQuotes() { var input = "\"hello;world\""; var expectedTokens = new[] { "\"hello;world\"" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldHandleSignificantWhiteSpace() { var input = "\"By ; e \" "; var expectedTokens = new[] { "\"By ; e\"" }; Assert.That(Tokenizers.TokenizeString(input), Is.EqualTo(expectedTokens)); }
public void TestBasicTokenization() { var input = "hello;world"; var expectedTokens = new[] { "hello", "world" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void ShouldNotDelimitWhenInQuotes() { var input = "\"hello;world\""; var expectedTokens = new[] { "\"hello;world\"" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void ShouldNotIgnoreWhiteSpaceInToken() { var input = " hell o;worl d"; var expectedTokens = new[] { "hell o", "worl d" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input, ';')); }
public void ShouldIgnoreInsignificantWhiteSpacePreToken() { var input = " hello; world"; var expectedTokens = new[] { "hello", "world" }; Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input)); }
public void ShouldRemoveInsignificantWhiteSpaceInsideQuotes() { var input = "hello;\" world \""; var expectedTokens = new[] { "hello", "\"world\"" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldHandleComplexScenario() { var input = "\r Hel\nlo ; \" Wo \rrld \" ; \"By ; e \" "; var expectedTokens = new[] { "Hello", "\"Wo rld\"", "\"By ; e\"" }; Assert.That(Tokenizers.TokenizeString(input, ';'), Is.EqualTo(expectedTokens)); }
public void ShouldNotIgnoreWhiteSpaceInToken() { var input = " hell o;worl d"; var expectedTokens = new[] { "hell o", "worl d" }; Assert.That(Tokenizers.TokenizeString(input, ';').ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldIgnoreInsignificantWhiteSpacePostToken() { var input = " hello ;world "; var expectedTokens = new[] { "hello", "world" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void TestBasicTokenizationWithTrailingToken() { var input = "hello;world;"; var expectedTokens = new[] { "hello", "world" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
public void ShouldHandleSimpleIdentifierAndValueWhileIgnoringInsignificantWhiteSpaceWithQuotes() { var input = " hel lo = \"wor ld\" "; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.That(tokenized.Identifier, Is.EqualTo("hel lo")); Assert.That(tokenized.IsQuoted, Is.True); Assert.That(tokenized.Value, Is.EqualTo("wor ld")); }
public void IdentiferAndValueForHPKP() { var input = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\""; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.That(tokenized.Identifier, Is.EqualTo("pin-sha256")); Assert.That(tokenized.IsQuoted, Is.True); Assert.That(tokenized.Value, Is.EqualTo("jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=")); }
public void ShouldHandleSimpleIdentifierAndValueWhileIgnoringInsignificantWhiteSpace() { var input = " hel lo = wor ld "; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.Equal("hel lo", tokenized.Identifier); Assert.False(tokenized.IsQuoted); Assert.Equal("wor ld", tokenized.Value); }
public void ShouldHandleSimpleIdentifierAndValue() { var input = "hello=world"; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.That(tokenized.Identifier, Is.EqualTo("hello")); Assert.That(tokenized.IsQuoted, Is.False); Assert.That(tokenized.Value, Is.EqualTo("world")); }
public void ShouldHandleValuelessIdentifier() { var input = "hello"; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.That(tokenized.Identifier, Is.EqualTo("hello")); Assert.That(tokenized.IsQuoted, Is.False); Assert.That(tokenized.Value, Is.Null); }
public void ShouldHandleValuelessIdentifier() { var input = "hello"; var tokenized = Tokenizers.TokenizeIdentifiers(input); Assert.Equal("hello", tokenized.Identifier); Assert.False(tokenized.IsQuoted); Assert.Null(tokenized.Value); }
public virtual void WriteJson(ElasticsearchCrudJsonWriter elasticsearchCrudJsonWriter) { elasticsearchCrudJsonWriter.JsonWriter.WritePropertyName("analysis"); elasticsearchCrudJsonWriter.JsonWriter.WriteStartObject(); Tokenizers.WriteJson(elasticsearchCrudJsonWriter); Filters.WriteJson(elasticsearchCrudJsonWriter); CharFilters.WriteJson(elasticsearchCrudJsonWriter); Analyzer.WriteJson(elasticsearchCrudJsonWriter); elasticsearchCrudJsonWriter.JsonWriter.WriteEndObject(); }
public void ShouldSampleHpkpHeader() { var input = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"; pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"; pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"; max-age=5184000;"; var expectedTokens = new[] { "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"", "pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"", "pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"", "max-age=5184000" }; Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens)); }
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) { writer.WriteStartObject(); writer.WritePropertyName("name"); writer.WriteStringValue(Name); if (Fields.Any()) { writer.WritePropertyName("fields"); writer.WriteStartArray(); foreach (var item in Fields) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } else { writer.WriteNull("fields"); } if (ScoringProfiles != null && ScoringProfiles.Any()) { writer.WritePropertyName("scoringProfiles"); writer.WriteStartArray(); foreach (var item in ScoringProfiles) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (DefaultScoringProfile != null) { writer.WritePropertyName("defaultScoringProfile"); writer.WriteStringValue(DefaultScoringProfile); } if (CorsOptions != null) { writer.WritePropertyName("corsOptions"); writer.WriteObjectValue(CorsOptions); } if (Suggesters != null && Suggesters.Any()) { writer.WritePropertyName("suggesters"); writer.WriteStartArray(); foreach (var item in Suggesters) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (Analyzers != null && Analyzers.Any()) { writer.WritePropertyName("analyzers"); writer.WriteStartArray(); foreach (var item in Analyzers) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (Tokenizers != null && Tokenizers.Any()) { writer.WritePropertyName("tokenizers"); writer.WriteStartArray(); foreach (var item in Tokenizers) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (TokenFilters != null && TokenFilters.Any()) { writer.WritePropertyName("tokenFilters"); writer.WriteStartArray(); foreach (var item in TokenFilters) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (CharFilters != null && CharFilters.Any()) { writer.WritePropertyName("charFilters"); writer.WriteStartArray(); foreach (var item in CharFilters) { writer.WriteObjectValue(item); } writer.WriteEndArray(); } if (EncryptionKey != null) { writer.WritePropertyName("encryptionKey"); writer.WriteObjectValue(EncryptionKey); } if (Similarity != null) { writer.WritePropertyName("similarity"); writer.WriteObjectValue(Similarity); } if (_etag != null) { writer.WritePropertyName("@odata.etag"); writer.WriteStringValue(_etag); } writer.WriteEndObject(); }
public void ShouldThrowExceptionOnMalformedQuoting() { var input = "\"hello"; Assert.Throws <InvalidOperationException>(() => Tokenizers.TokenizeString(input).ToArray()); }
public void ShouldThrowExceptionOnMalformedQuotingAtEnd() { var input = "hello\""; Assert.That(() => Tokenizers.TokenizeString(input).ToArray(), Throws.TypeOf <InvalidOperationException>()); }
public void ShouldThrowErrorOnMissingKey() { var input = "=hello"; Assert.That(() => Tokenizers.TokenizeIdentifiers(input), Throws.TypeOf <InvalidOperationException>()); }