public void OneOfEachSpaced() { Tokenizer t = new Tokenizer(StringToStream(" [ <64 65> << true /Name 1 3.14 >> ] %comment")); t.IgnoreComments = false; TokenArrayOpen a1 = t.GetToken() as TokenArrayOpen; Assert.NotNull(a1); TokenStringHex a2 = t.GetToken() as TokenStringHex; Assert.NotNull(a2); Assert.True(a2.Raw == "64 65"); TokenDictionaryOpen a3 = t.GetToken() as TokenDictionaryOpen; Assert.NotNull(a3); TokenKeyword a4 = t.GetToken() as TokenKeyword; Assert.NotNull(a4); Assert.True(a4.Value == ParseKeyword.True); TokenName a5 = t.GetToken() as TokenName; Assert.NotNull(a5); Assert.True(a5.Value == "Name"); TokenInteger a6 = t.GetToken() as TokenInteger; Assert.NotNull(a6); Assert.True(a6.Value == 1); TokenReal a7 = t.GetToken() as TokenReal; Assert.NotNull(a7); Assert.True(a7.Value == 3.14f); TokenDictionaryClose a8 = t.GetToken() as TokenDictionaryClose; Assert.NotNull(a8); TokenArrayClose a9 = t.GetToken() as TokenArrayClose; Assert.NotNull(a9); TokenComment a10 = t.GetToken() as TokenComment; Assert.NotNull(a10); Assert.True(a10.Value == "%comment"); Assert.True(t.GetToken() is TokenEmpty); }
public void CommentMiddle() { Tokenizer t = new Tokenizer(StringToStream("\x00\x09\x0A\x0C\x0D%comment")); t.IgnoreComments = false; TokenComment c = t.GetToken() as TokenComment; Assert.NotNull(c); Assert.True(c.Value == "%comment"); Assert.True(t.GetToken() is TokenEmpty); }
public void CommentThree() { Tokenizer t = new Tokenizer(StringToStream("%one\n%two\n%three")); t.IgnoreComments = false; TokenComment c = t.GetToken() as TokenComment; Assert.NotNull(c); Assert.True(c.Value == "%one"); c = t.GetToken() as TokenComment; Assert.NotNull(c); Assert.True(c.Value == "%two"); c = t.GetToken() as TokenComment; Assert.NotNull(c); Assert.True(c.Value == "%three"); Assert.True(t.GetToken() is TokenEmpty); }
public void Visit(TokenComment tok) { AppendEverything(tok); NbItems++; }
public void Visit(TokenComment tok) { //if (tok.Line < FromLine || tok.Line > ToLine) return; //Npp.StyleText((int)TextStyle.Comment, tok.StartPosition, tok.EndPosition); }
private bool ParseToken() { bool result = true; TokenComment tokenComment = new TokenComment(this); TokenKeyword tokenKeyword = new TokenKeyword(this); TokenLeftBracket tokenLeftBracket = new TokenLeftBracket(this); TokenRightBracket tokenRightBracket = new TokenRightBracket(this); TokenString tokenString = new TokenString(this); TokenComma tokenComma = new TokenComma(this); TokenFloat tokenFloat = new TokenFloat(this); TokenInt tokenInt = new TokenInt(this); TokenBool tokenBool = new TokenBool(this); TokenLeftCurlyBracket tokenLeftCurlyBracket = new TokenLeftCurlyBracket(this); TokenRightCurlyBracket tokenRightCurlyBracket = new TokenRightCurlyBracket(this); if (tokenComment.TryToGet()) { this.m_Tokens.Add(tokenComment); } else if (tokenBool.TryToGet()) { this.m_Tokens.Add(tokenBool); } else if (tokenKeyword.TryToGet()) { this.m_Tokens.Add(tokenKeyword); } else if (tokenLeftBracket.TryToGet()) { this.m_Tokens.Add(tokenLeftBracket); } else if (tokenRightBracket.TryToGet()) { this.m_Tokens.Add(tokenRightBracket); } else if (tokenString.TryToGet()) { this.m_Tokens.Add(tokenString); } else if (tokenComma.TryToGet()) { this.m_Tokens.Add(tokenComma); } else if (tokenFloat.TryToGet()) { this.m_Tokens.Add(tokenFloat); } else if (tokenInt.TryToGet()) { this.m_Tokens.Add(tokenInt); } else if (tokenLeftCurlyBracket.TryToGet()) { this.m_Tokens.Add(tokenLeftCurlyBracket); } else if (tokenRightCurlyBracket.TryToGet()) { this.m_Tokens.Add(tokenRightCurlyBracket); } else { result = false; } return(result); }
public void Visit(TokenComment tok) { SetStyling(tok.EndPosition - tok.StartPosition, SciStyleId.Comment); }