static void TestOneStatement(string stmt) { using (MemoryStream ms = CreateStringStream(stmt)) { Lexer l = new Lexer(ms); TokenType t; while (true) { t = l.Scan(); if (t == TokenType.EOF) { break; } else if (t == TokenType.ILLEGAL) { Console.WriteLine("ILLEGAL TOKEN: (" + l.Line + "," + l.Column + ") " + l.GetCurrentLine()); break; } switch (t) { case TokenType.IDENTIFIER: Console.WriteLine(t + ": " + l.StringValue); break; case TokenType.LITERAL: switch (l.LiteralType) { case LiteralType.STRING: Console.WriteLine(t + " STRING: " + l.StringValue); break; case LiteralType.DOUBLE: double d = l.DoubleValue; Console.WriteLine(t + " DOUBLE: " + d.ToString("G")); break; case LiteralType.INT: Console.WriteLine(t + " INT: " + l.IntValue); break; case LiteralType.LONG: Console.WriteLine(t + " LONG: " + l.LongValue); break; case LiteralType.DURATION: Console.WriteLine(t + " DURATION: " + l.StringValue + " => " + l.DurationValue); break; case LiteralType.REGEX: Console.WriteLine(t + " REGEX: " + l.RegexValue.ToString() + " (" + l.RegexValue.Options + ")"); break; } break; default: Console.WriteLine(t + ""); break; } } } }
static void TestOneStatement(string stmt) { using (MemoryStream ms = CreateStringStream(stmt)) { Lexer l = new Lexer(ms); TokenType t; while (true) { t = l.Scan(); if (t == TokenType.EOF) { break; } else if (t == TokenType.INVALID) { Console.WriteLine("SYNTAX ERROR: (" + l.Context.Line + ", " + l.Context.Column + ", \"" + l.Context.Code + "\")"); break; } Console.WriteLine("(" + t + ", " + l.Context.Value + ")"); } } }
private void CopyValues(Token t, Lexer l) { switch (l.LiteralType) { case LiteralType.INT: t.IntValue = l.IntValue; break; case LiteralType.LONG: t.LongValue = l.LongValue; break; case LiteralType.DOUBLE: t.DoubleValue = l.DoubleValue; break; case LiteralType.DURATION: t.DurationValue = l.DurationValue; break; case LiteralType.STRING: t.StringValue = l.StringValue; break; case LiteralType.REGEX: t.RegexValue = l.RegexValue; break; } }
public Tokenizer(Stream stream) { _lexer = new Lexer(stream); _tokens = new Stack<Token>(); _keywords = new KeywordCollection(); }