Esempio n. 1
0
        private IEnumerable <Token> ToTokens(string component)
        {
            if (component.Length == 0)
            {
                return(new Token[0]);
            }

            var matchingTokenizers = Tokenizers
                                     .Where(tokenizer => component.StartsWith(tokenizer.Key));

            if (!matchingTokenizers.Any())
            {
                return(ToTokens(component.Substring(1))
                       .Prepend(new Token {
                    Type = TokenType.Unknown, Symbol = component.Substring(0, 1)
                }));
            }
            else
            {
                var longestMatchingTokenizer = matchingTokenizers
                                               .OrderByDescending(tokenizer => tokenizer.Key.Length)
                                               .First();

                var thisComponent = component.Substring(0, longestMatchingTokenizer.Key.Length);
                var nextComponent = component.Substring(longestMatchingTokenizer.Key.Length);
                return(ToTokens(nextComponent)
                       .Prepend(longestMatchingTokenizer.Value(thisComponent)));
            }
        }
Esempio n. 2
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 3
0
        public void ShouldHandleSignificantWhiteSpace()
        {
            var input          = "\"By  ;   e \" ";
            var expectedTokens = new[] { "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input), Is.EqualTo(expectedTokens));
        }
Esempio n. 4
0
        public void TestBasicTokenization()
        {
            var input          = "hello;world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 5
0
        public void ShouldNotDelimitWhenInQuotes()
        {
            var input          = "\"hello;world\"";
            var expectedTokens = new[] { "\"hello;world\"" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 6
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input, ';'));
        }
Esempio n. 7
0
        public void ShouldIgnoreInsignificantWhiteSpacePreToken()
        {
            var input          = "   hello;   world";
            var expectedTokens = new[] { "hello", "world" };

            Assert.Equal(expectedTokens, Tokenizers.TokenizeString(input));
        }
Esempio n. 8
0
        public void ShouldRemoveInsignificantWhiteSpaceInsideQuotes()
        {
            var input          = "hello;\"    world    \"";
            var expectedTokens = new[] { "hello", "\"world\"" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 9
0
        public void ShouldHandleComplexScenario()
        {
            var input          = "\r   Hel\nlo  ;  \"   Wo \rrld  \"  ;  \"By  ;   e \" ";
            var expectedTokens = new[] { "Hello", "\"Wo rld\"", "\"By  ;   e\"" };

            Assert.That(Tokenizers.TokenizeString(input, ';'), Is.EqualTo(expectedTokens));
        }
Esempio n. 10
0
        public void ShouldNotIgnoreWhiteSpaceInToken()
        {
            var input          = "   hell    o;worl    d";
            var expectedTokens = new[] { "hell    o", "worl    d" };

            Assert.That(Tokenizers.TokenizeString(input, ';').ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 11
0
        public void ShouldIgnoreInsignificantWhiteSpacePostToken()
        {
            var input          = "   hello    ;world    ";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 12
0
        public void TestBasicTokenizationWithTrailingToken()
        {
            var input          = "hello;world;";
            var expectedTokens = new[] { "hello", "world" };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 13
0
        public void ShouldHandleSimpleIdentifierAndValueWhileIgnoringInsignificantWhiteSpaceWithQuotes()
        {
            var input     = "    hel lo  =   \"wor ld\"   ";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.That(tokenized.Identifier, Is.EqualTo("hel lo"));
            Assert.That(tokenized.IsQuoted, Is.True);
            Assert.That(tokenized.Value, Is.EqualTo("wor ld"));
        }
Esempio n. 14
0
        public void IdentiferAndValueForHPKP()
        {
            var input     = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.That(tokenized.Identifier, Is.EqualTo("pin-sha256"));
            Assert.That(tokenized.IsQuoted, Is.True);
            Assert.That(tokenized.Value, Is.EqualTo("jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8="));
        }
Esempio n. 15
0
        public void ShouldHandleSimpleIdentifierAndValueWhileIgnoringInsignificantWhiteSpace()
        {
            var input     = "    hel lo  =   wor ld   ";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.Equal("hel lo", tokenized.Identifier);
            Assert.False(tokenized.IsQuoted);
            Assert.Equal("wor ld", tokenized.Value);
        }
Esempio n. 16
0
        public void ShouldHandleSimpleIdentifierAndValue()
        {
            var input     = "hello=world";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.That(tokenized.Identifier, Is.EqualTo("hello"));
            Assert.That(tokenized.IsQuoted, Is.False);
            Assert.That(tokenized.Value, Is.EqualTo("world"));
        }
Esempio n. 17
0
        public void ShouldHandleValuelessIdentifier()
        {
            var input     = "hello";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.That(tokenized.Identifier, Is.EqualTo("hello"));
            Assert.That(tokenized.IsQuoted, Is.False);
            Assert.That(tokenized.Value, Is.Null);
        }
Esempio n. 18
0
        public void ShouldHandleValuelessIdentifier()
        {
            var input     = "hello";
            var tokenized = Tokenizers.TokenizeIdentifiers(input);

            Assert.Equal("hello", tokenized.Identifier);
            Assert.False(tokenized.IsQuoted);
            Assert.Null(tokenized.Value);
        }
Esempio n. 19
0
        public virtual void WriteJson(ElasticsearchCrudJsonWriter elasticsearchCrudJsonWriter)
        {
            elasticsearchCrudJsonWriter.JsonWriter.WritePropertyName("analysis");
            elasticsearchCrudJsonWriter.JsonWriter.WriteStartObject();

            Tokenizers.WriteJson(elasticsearchCrudJsonWriter);
            Filters.WriteJson(elasticsearchCrudJsonWriter);
            CharFilters.WriteJson(elasticsearchCrudJsonWriter);
            Analyzer.WriteJson(elasticsearchCrudJsonWriter);

            elasticsearchCrudJsonWriter.JsonWriter.WriteEndObject();
        }
Esempio n. 20
0
        public void ShouldSampleHpkpHeader()
        {
            var input          = "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"; pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"; pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"; max-age=5184000;";
            var expectedTokens = new[]
            {
                "pin-sha256=\"jV54RY1EPxNKwrQKIa5QMGDNPSbj3VwLPtXaHiEE8y8=\"",
                "pin-sha256=\"7qVfhXJFRlcy/9VpKFxHBuFzvQZSqajgfRwvsdx1oG8=\"",
                "pin-sha256=\"/sMEqQowto9yX5BozHLPdnciJkhDiL5+Ug0uil3DkUM=\"",
                "max-age=5184000"
            };

            Assert.That(Tokenizers.TokenizeString(input).ToArray(), Is.EqualTo(expectedTokens));
        }
Esempio n. 21
0
 void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 {
     writer.WriteStartObject();
     writer.WritePropertyName("name");
     writer.WriteStringValue(Name);
     if (Fields.Any())
     {
         writer.WritePropertyName("fields");
         writer.WriteStartArray();
         foreach (var item in Fields)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     else
     {
         writer.WriteNull("fields");
     }
     if (ScoringProfiles != null && ScoringProfiles.Any())
     {
         writer.WritePropertyName("scoringProfiles");
         writer.WriteStartArray();
         foreach (var item in ScoringProfiles)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (DefaultScoringProfile != null)
     {
         writer.WritePropertyName("defaultScoringProfile");
         writer.WriteStringValue(DefaultScoringProfile);
     }
     if (CorsOptions != null)
     {
         writer.WritePropertyName("corsOptions");
         writer.WriteObjectValue(CorsOptions);
     }
     if (Suggesters != null && Suggesters.Any())
     {
         writer.WritePropertyName("suggesters");
         writer.WriteStartArray();
         foreach (var item in Suggesters)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (Analyzers != null && Analyzers.Any())
     {
         writer.WritePropertyName("analyzers");
         writer.WriteStartArray();
         foreach (var item in Analyzers)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (Tokenizers != null && Tokenizers.Any())
     {
         writer.WritePropertyName("tokenizers");
         writer.WriteStartArray();
         foreach (var item in Tokenizers)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (TokenFilters != null && TokenFilters.Any())
     {
         writer.WritePropertyName("tokenFilters");
         writer.WriteStartArray();
         foreach (var item in TokenFilters)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (CharFilters != null && CharFilters.Any())
     {
         writer.WritePropertyName("charFilters");
         writer.WriteStartArray();
         foreach (var item in CharFilters)
         {
             writer.WriteObjectValue(item);
         }
         writer.WriteEndArray();
     }
     if (EncryptionKey != null)
     {
         writer.WritePropertyName("encryptionKey");
         writer.WriteObjectValue(EncryptionKey);
     }
     if (Similarity != null)
     {
         writer.WritePropertyName("similarity");
         writer.WriteObjectValue(Similarity);
     }
     if (_etag != null)
     {
         writer.WritePropertyName("@odata.etag");
         writer.WriteStringValue(_etag);
     }
     writer.WriteEndObject();
 }
Esempio n. 22
0
        public void ShouldThrowExceptionOnMalformedQuoting()
        {
            var input = "\"hello";

            Assert.Throws <InvalidOperationException>(() => Tokenizers.TokenizeString(input).ToArray());
        }
Esempio n. 23
0
        public void ShouldThrowExceptionOnMalformedQuotingAtEnd()
        {
            var input = "hello\"";

            Assert.That(() => Tokenizers.TokenizeString(input).ToArray(), Throws.TypeOf <InvalidOperationException>());
        }
Esempio n. 24
0
        public void ShouldThrowErrorOnMissingKey()
        {
            var input = "=hello";

            Assert.That(() => Tokenizers.TokenizeIdentifiers(input), Throws.TypeOf <InvalidOperationException>());
        }