public void CssTokenize() { const string value = @"#someid .someclass { font-size: 20px; width: 50%; text-align: center; -webkit-border-radius: 5px; }"; StringTokenizer t = new StringTokenizer(value); List<TokenTypeEnum> expectedTokens = new List<TokenTypeEnum> { TokenTypeEnum.HashName, TokenTypeEnum.Whitespace, TokenTypeEnum.Operator, TokenTypeEnum.Identifier, TokenTypeEnum.Whitespace, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Dimension, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Percentage, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Dimension, TokenTypeEnum.Delimiter, TokenTypeEnum.Whitespace, TokenTypeEnum.Delimiter }; int i = 0; while (t.MoveNext()) { var token = t.Current; Assert.Equal(expectedTokens[i++], token.Type); } }
public void CDataStartTokenize() { const string value = @"<!--"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.CDataOpen, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void WhitespaceTokenize() { const string value = " \t \r\n"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Whitespace, t.Current.Type); tokenized = true; } Assert.True(tokenized); }
public void VariableTokenize() { const string value = "$variableName"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Variable, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void UrlWithSingleQuotesTokenize() { const string value = "url('someurl')"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Uri, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void SinglelineCommentsTokenize() { const string value = @"// Singleline comment"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Comment, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void SingleQuotesStringTokenize() { const string value = "'test'"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.String, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void OperatorTokenize() { const string value = @"+-.*/"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Operator, t.Current.Type); tokenized = true; } Assert.True(tokenized); }
public void PercentageTokenize() { const string value = @"123.45%"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Percentage, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void MutlilineCommentsTokensTokenize() { const string value = @"/*** ahoj **/"; StringTokenizer t = new StringTokenizer(value, 0, TokenizerOptions.MultilineCommentBeginEndTokens); List<TokenTypeEnum> expectedTokens = new List<TokenTypeEnum> { TokenTypeEnum.MultiLineCommentStart, TokenTypeEnum.Operator, TokenTypeEnum.Operator, TokenTypeEnum.Whitespace, TokenTypeEnum.Identifier, TokenTypeEnum.Whitespace, TokenTypeEnum.Operator, TokenTypeEnum.MultiLineCommentEnd }; int i = 0; while (t.MoveNext()) { var token = t.Current; Assert.Equal(expectedTokens[i++], token.Type); } }
public void NumberWithUnitTokenize() { const string value = @"123.45px"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Dimension, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void MultilineCommentStartTokensTokenize() { const string value = "/*"; StringTokenizer t = new StringTokenizer(value, 0, TokenizerOptions.MultilineCommentBeginEndTokens); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.MultiLineCommentStart, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void ImportClausuleTokenize() { const string value = @"@import"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.AtIdentifier, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void HashTokenize() { const string value = "#someid"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.HashName, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void FunctionTokenize() { const string value = "functionName("; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Function, t.Current.Type); Assert.Equal(value, t.Current.Text); tokenized = true; } Assert.True(tokenized); }
public void DelimitersTokenize() { const string value = @"{}()[]:;,"; StringTokenizer t = new StringTokenizer(value); bool tokenized = false; while (t.MoveNext()) { Assert.Equal(TokenTypeEnum.Delimiter, t.Current.Type); tokenized = true; } Assert.True(tokenized); }