public string BuildString(byte[] pCode, ref int index) { var l = new Lexer(Format, Format); var str = ""; var size = 1; var start = index; while (l.MoveNext()) { if (l.Current.Is(TokenType.Token, "$")) { str += " "; l.AssertMoveNext(); l.AssertToken(TokenType.Identifier); switch (l.Current.Value) { case "int": str += BitConverter.ToInt32(pCode, index + 1); str += " "; index += 4; size += 4; break; case "float": str += BitConverter.ToSingle(pCode, index + 1); str += " "; index += 4; size += 4; break; case "string": str += '"'; while (pCode[index + 1] != 0) { str += (char) pCode[index++ + 1]; size++; } str += "\" "; index++; size++; break; default: throw new Exception(); } } else { str += $"{l.Current.Value}"; } } return $"[{start:000000}:{size:00}] " + str; }
public void LoadFromFile(string fileName, string fileContents, string prefix) { var lexer = new Lexer(fileName, fileContents); string reference = null; while (lexer.MoveNext()) { if (lexer.Current.Is(TokenType.Identifier, "REFERENCE")) { lexer.AssertMoveNext(); lexer.AssertToken(TokenType.Identifier); reference = lexer.Current.Value; continue; } lexer.AssertToken(TokenType.Identifier); var key = lexer.Current.Value; lexer.AssertMoveNext(); lexer.AssertToken(TokenType.StringLiteral); _values[new Tuple<string, string>(key, $"{prefix}{reference}")] = lexer.Current.Value; } }
private static IGrammarRuleElement[] Compile(string rule) { if (rule == null) throw new ArgumentNullException(nameof(rule)); var lexer = new Lexer("grammar_rule", rule.Replace('`', '"')); var conditions = new List<IGrammarRuleElement>(); var nextConditionIsOptional = false; var add = new Action<IGrammarRuleElement>(element => { conditions.Add(nextConditionIsOptional ? new GrammarRuleElementOptional(element) : element); nextConditionIsOptional = false; }); var types = typeof (TokenType).GetEnumValues() .OfType<TokenType>() .Select(value => new KeyValuePair<string, TokenType>(value.ToUpperString(), value)) .ToDictionary(p => p.Key, p => p.Value); while (lexer.MoveNext()) { switch (lexer.Current.Type) { case TokenType.Token: add(new GrammarRuleElementLiteral(new[] {TokenType.Token}, lexer.Current.Value)); break; case TokenType.Identifier: if (lexer.Current.Value == "OPTIONAL") { nextConditionIsOptional = true; break; } TokenType tokenType; if (types.TryGetValue(lexer.Current.Value, out tokenType)) { var tokenTypes = new List<TokenType>(new[] {tokenType}); for (;;) { var previous = lexer.Current; if (!lexer.MoveNext()) { add(new GrammarRuleElementLiteral(tokenTypes.ToArray(), null)); break; } if (!lexer.Current.Is(TokenType.Token, "|")) { lexer.Push(previous); add(new GrammarRuleElementLiteral(tokenTypes.ToArray(), null)); break; } if (!lexer.MoveNext()) { add(new GrammarRuleElementLiteral(tokenTypes.ToArray(), null)); break; } lexer.AssertToken(TokenType.Identifier); if (types.TryGetValue(lexer.Current.Value, out tokenType)) tokenTypes.Add(tokenType); else throw new Exception("Expected token type"); } break; } add(new GrammarRuleElementEmbed(lexer.Current.Value)); break; case TokenType.StringLiteral: add(new GrammarRuleElementLiteral(new[] {TokenType.Identifier}, lexer.Current.Value)); break; default: throw new Exception("Invalid token"); } } return conditions.ToArray(); }
public virtual EarleFile CompileFile(string fileName, string script) { if (fileName == null) throw new ArgumentNullException(nameof(fileName)); if (script == null) throw new ArgumentNullException(nameof(script)); fileName = fileName.ToLower(); var lexer = new Lexer(fileName, script); var file = new EarleFile(_runtime, fileName); lexer.MoveNext(); // Recursively look foor function declarations while (lexer.Current != null) { var match = _fileGrammarProcessor.GetMatch(lexer, true); switch (match) { case "FUNCTION_DECLARATION": // Compile the function and add it to the file file.AddFunction(CompileFunction(lexer, file)); break; case "INCLUDE": lexer.SkipToken(TokenType.Token, "#"); lexer.SkipToken(TokenType.Identifier, "include"); var identifier = !lexer.Current.Is(TokenType.Token, "\\"); var path = identifier ? "\\" : string.Empty; do { // check syntax if (identifier) lexer.AssertToken(TokenType.Identifier); else lexer.AssertToken(TokenType.Token, "\\"); identifier = !identifier; path += lexer.Current.Value; lexer.AssertMoveNext(); } while (!lexer.Current.Is(TokenType.Token, ";")); lexer.SkipToken(TokenType.Token, ";"); file.IncludeFile(path); break; default: throw new ParseException(lexer.Current, $"Expected function, found {match} `{lexer.Current.Value}`"); } } return file; }