public void Test_commentCharI() { setTest("*comment \n / 8 'h' "); st.OrdinaryChar('/'); st.CommentChar('*'); assertEquals("nextToken() did not return the character / skiping the comments starting with *", 47, st.NextToken()); assertTrue("the next token returned should be the digit 8", st .NextToken() == StreamTokenizer.TokenType_Number && st.NumberValue == 8.0); assertTrue("the next token returned should be the quote character", st.NextToken() == 39 && st.StringValue.Equals("h", StringComparison.Ordinal)); }
public void Test_slashSlashComments_withSSOpen_NoComment() { TextReader reader = new StringReader("// t"); StreamTokenizer st = new StreamTokenizer(reader); st.SlashSlashComments = (true); st.OrdinaryChar('/'); assertEquals(StreamTokenizer.TokenType_EndOfStream, st.NextToken()); }
public void Test_slashSlashComments_withSSClosed() { TextReader reader = new StringReader("// t"); StreamTokenizer st = new StreamTokenizer(reader); st.SlashSlashComments = (false); st.OrdinaryChar('/'); assertEquals('/', st.NextToken()); assertEquals('/', st.NextToken()); assertEquals(StreamTokenizer.TokenType_Word, st.NextToken()); }
public void Test_nextToken() { // SM. // J2N NOTE: The original test had \257 (which is octal) // that is not supported in a .NET string, so we convert to decimal 175 here. // This also changes the semantics of the test, because for whatever // reason in Java it was expecting the octal number to register as a TokenType_Word. // So, we changed to expect a TokenType_Number as a result of the above change. // Also, we don't need to escape single quotes in .NET. setTest("\r\n/* fje fje 43.4 f \r\n f g */ 456.459 \r\n" + "Hello / \r\n \r\n \n \r 175 Hi 'Hello World'"); st.OrdinaryChar('/'); st.SlashStarComments = true; st.NextToken(); assertTrue("Wrong Token type1: " + (char)st.TokenType, st.TokenType == StreamTokenizer.TokenType_Number); st.NextToken(); assertTrue("Wrong Token type2: " + st.TokenType, st.TokenType == StreamTokenizer.TokenType_Word); st.NextToken(); assertTrue("Wrong Token type3: " + st.TokenType, st.TokenType == '/'); st.NextToken(); assertTrue("Wrong Token type4: " + st.TokenType, st.TokenType == StreamTokenizer.TokenType_Number); st.NextToken(); assertTrue("Wrong Token type5: " + st.TokenType, st.TokenType == StreamTokenizer.TokenType_Word); st.NextToken(); assertTrue("Wrong Token type6: " + st.TokenType, st.TokenType == '\''); assertTrue("Wrong Token type7: " + st.TokenType, st.StringValue .Equals("Hello World", StringComparison.Ordinal)); st.NextToken(); assertTrue("Wrong Token type8: " + st.TokenType, st.TokenType == -1); using (var pin = new MemoryStream(Encoding.UTF8.GetBytes("hello\n\r\r"))) { StreamTokenizer s = new StreamTokenizer(pin); s.EndOfLineIsSignificant = (true); assertTrue("Wrong token 1,1", s.NextToken() == StreamTokenizer.TokenType_Word && s.StringValue.Equals("hello", StringComparison.Ordinal)); assertTrue("Wrong token 1,2", s.NextToken() == '\n'); assertTrue("Wrong token 1,3", s.NextToken() == '\n'); assertTrue("Wrong token 1,4", s.NextToken() == '\n'); assertTrue("Wrong token 1,5", s.NextToken() == StreamTokenizer.TokenType_EndOfStream); } StreamTokenizer tokenizer = new StreamTokenizer( new StringReader("\n \r\n#")); tokenizer.OrdinaryChar('\n'); // make \n ordinary tokenizer.EndOfLineIsSignificant = (true); assertTrue("Wrong token 2,1", tokenizer.NextToken() == '\n'); assertTrue("Wrong token 2,2", tokenizer.NextToken() == '\n'); assertEquals("Wrong token 2,3", '#', tokenizer.NextToken()); }