public void TestTokenize() { string test1="this,.is 123 \"test\"\"kk 456.789 test123 123.2 12*2 /* hello */ \n hello"; TextReader reader = new StringReader(test1); StreamTokenizer tokenizer = new StreamTokenizer(reader,true); TokenType tokentype = tokenizer.NextToken(); int iTokenCount=0; while (tokentype != TokenType.Eof) { //Console.WriteLine("token:"+tokentype+"("+tokenizer.GetStringValue()+")"); tokentype = tokenizer.NextToken(); iTokenCount++; } //Console.WriteLine("token count="+iTokenCount); Assertion.AssertEquals("token count",22,iTokenCount); }
/// <summary> /// Tests the constructor /// </summary> public void TestTokenize3() { string test1="this is a very_long_word and long123 long123longer ok"; TextReader reader = new StringReader(test1); StreamTokenizer tokenizer = new StreamTokenizer(reader,true); TokenType tokentype = tokenizer.NextToken(); int iTokenCount=0; while (tokentype != TokenType.Eof) { //Console.WriteLine("token:"+tokentype+"("+tokenizer.GetStringValue()+")"); tokentype = tokenizer.NextToken(); iTokenCount++; } //Console.WriteLine("token count ignore=false="+iTokenCount); Assertion.AssertEquals("token count",8,iTokenCount); }
public void TestReadNumber() { string test1="this is a 123.5 notAnumber"; TextReader reader = new StringReader(test1); StreamTokenizer tokenizer = new StreamTokenizer(reader,true); TokenType tokentype1 = tokenizer.NextToken(); //this TokenType tokentype2 = tokenizer.NextToken(); //is TokenType tokentype3 = tokenizer.NextToken(); //a tokenizer.NextToken(); double number = tokenizer.GetNumericValue(); Assertion.AssertEquals("test1",123.5,number); tokenizer.NextToken(); try { double number2 = tokenizer.GetNumericValue(); Assertion.Fail("This should fail because the token is not a number."); } catch { } }
public void TestTokenize5() { string test1="-2.5 -2 4"; TextReader reader = new StringReader(test1); StreamTokenizer tokenizer = new StreamTokenizer(reader,true); TokenType tokentype = tokenizer.NextToken(); int iTokenCount=0; while (tokentype != TokenType.Eof) { tokentype = tokenizer.NextToken(); iTokenCount++; } // first token will be -2.5 Assertion.AssertEquals("token count",3,iTokenCount); }