public void CanParseTokenWithoutData() { var tokenizer = new TransformationTextTokenizer(_services); tokenizer.WithTokenType <TargetToken>(); Assert.NotNull(tokenizer.ParseToken(0, TokenWithoutData)); }
public void CanParseTokenWithOptionalData() { var tokenizer = new TransformationTextTokenizer(_services); tokenizer.WithTokenType <ColourToken>(); var token = tokenizer.ParseToken(0, TokenWithOptionalData) as ColourToken; Assert.NotNull(token); Assert.True(token !.UsePattern); }
public void CanParseTokenWithOptionalData() { var tokenizer = new TransformationTextTokenizer(_services); tokenizer.WithTokenType <ColourToken>(); var token = tokenizer.ParseToken(0, TokenWithOptionalData) as ColourToken; Assert.NotNull(token); Assert.Equal("base", token.Part); }
public void CanTokenizeText() { var tokenizer = new TransformationTextTokenizer(_services) .WithTokenType <TargetToken>() .WithTokenType <ColourToken>(); var tokens = tokenizer.GetTokens(SampleText); Assert.Equal(3, tokens.Count); Assert.IsType <TargetToken>(tokens.First()); Assert.IsType <ColourToken>(tokens[1]); Assert.IsType <ColourToken>(tokens[2]); }
public void ParsesTokenLengthCorrectly() { var tokenizer = new TransformationTextTokenizer(_services) .WithTokenType <TargetToken>() .WithTokenType <ColourToken>(); var tokens = tokenizer.GetTokens(SampleText); Assert.Equal(9, tokens.First().Length); Assert.Equal(9, tokens[1].Length); Assert.Equal(14, tokens[2].Length); }
public void ParsesTokenStartIndexCorrectly() { var tokenizer = new TransformationTextTokenizer(_services) .WithTokenType <TargetToken>() .WithTokenType <ColourToken>(); var tokens = tokenizer.GetTokens(SampleText); Assert.Equal(12, tokens.First().Start); Assert.Equal(28, tokens[1].Start); Assert.Equal(47, tokens[2].Start); }