public Parser() { LB = LexerBehavior.SkipComments | LexerBehavior.SkipWhiteSpaces | LexerBehavior.Default; LS = LexerSettings.Default; LS.Options = LexerOptions.StringDoubleQuote | LexerOptions.StringEscaping; LS.Keywords = new Dictionary <string, int>(); string[] KeywordNames = Enum.GetNames(typeof(Keyword)); for (int i = 0; i < KeywordNames.Length; i++) { if (KeywordNames[i].StartsWith("_")) { continue; } LS.Keywords.Add(KeywordNames[i].ToLower(), (int)(Keyword)Enum.Parse(typeof(Keyword), KeywordNames[i])); } LS.Symbols = new Dictionary <string, int>(); LS.Symbols.Add("(", (int)Symbol.LParen); LS.Symbols.Add(")", (int)Symbol.RParen); LS.Symbols.Add("{", (int)Symbol.LCurvy); LS.Symbols.Add("}", (int)Symbol.RCurvy); LS.Symbols.Add(",", (int)Symbol.Comma); LS.Symbols.Add(";", (int)Symbol.Semicolon); LS.Symbols.Add("=", (int)Symbol.Assign); }
private Parser(string source) { // Initialize the static (reusable) lexer settings if we haven't already if (lexerSettings == null) { Dictionary <string, int> lexerTokenDict = new Dictionary <string, int>(); foreach (LuaTokenData token in lexerTokens) { lexerTokenDict.Add(token.symbol, token.id); } lexerSettings = new LexerSettings(); lexerSettings.Symbols = lexerTokenDict; } Lexer lex = new Lexer(source.Trim(), lexerSettings); bool inQuote = false; int startPosition = 0; StringBuilder sb = null; foreach (var tok in lex) { if (inQuote) { sb.Append(tok.Text); // Close quote if (tok.Type == TokenType.Symbol && tok.Id == 2) { string builtString = sb.ToStringAndRelease(); tokenList.Add(new Token(TokenType.QuotedString, builtString, builtString, 0, startPosition, tok.EndPosition, tok.LineBegin, tok.LineNumber, tok.EndLineBegin, tok.EndLineNumber)); inQuote = false; } } else { // Open quote if (tok.Type == TokenType.Symbol && tok.Id == 2) { inQuote = true; startPosition = tok.StartPosition; sb = StringBuilderCache.Acquire(); sb.Append(tok.Text); } else if (tok.Type == TokenType.Decimal) { tokenList.Add(new Token(TokenType.Number, (double)(Decimal)tok.Value, tok.Text, 0, 0, 0, 0, 0, 0, 0)); } else if (tok.Type == TokenType.Identifier) { // The Lexer won't tokenize keywords as symbols, so we have to convert them // here. if (tok.Text == "and") { tokenList.Add(new Token(TokenType.Symbol, tok.Text, tok.Text, 64, 0, 0, 0, 0, 0, 0)); } else if (tok.Text == "or") { tokenList.Add(new Token(TokenType.Symbol, tok.Text, tok.Text, 65, 0, 0, 0, 0, 0, 0)); } else if (tok.Text == "not") { tokenList.Add(new Token(TokenType.Symbol, tok.Text, tok.Text, 66, 0, 0, 0, 0, 0, 0)); } else { tokenList.Add(tok); } } else if (tok.Type != TokenType.WhiteSpace) { tokenList.Add(tok); } } } }