// TODO: make another method to compile from file and allow imports public static IEnvironment Compile(string code, IEnvironment environment = null) { var reader = new ScriptReader(code, "compiled"); var lexer = new ScriptLexer(reader, new CommonLexer(reader)); var parser = new Parser(lexer); var ast = parser.Parse(); if (environment == null) { var env = new ScriptEnvironment(new OperationCodeFactory(), new ValueFactory()); // TODO: turn the following methods into extension methods on IEnvironment env.AddClassesDerivedFromClassInAssembly(typeof(Eilang)); env.AddExportedFunctionsFromAssembly(typeof(Eilang)); env.AddExportedModulesFromAssembly(typeof(Eilang)); environment = env; } Compiler.Compile(environment, ast); return(environment); }
static void Main(string[] args) { string source = ""; string target = ""; try { for (int i = 0; i < args.Length; ++i) { if (args[i] == "-s") { source = args[i + 1]; } else if (args[i] == "-o") { target = args[i + 1]; } } } catch (System.Exception ex) { Console.WriteLine("参数出错 -s [源文件] -o [输出文件] error : " + ex.ToString()); goto exit; } if (string.IsNullOrEmpty(source) || string.IsNullOrEmpty(target)) { Console.WriteLine("参数出错 -s [源文件] -o [输出文件] "); goto exit; } source = Path.Combine(Environment.CurrentDirectory, source); target = Path.Combine(Environment.CurrentDirectory, target); try { ScriptLexer lexer = new ScriptLexer(File.ReadAllText(source)); File.WriteAllBytes(target, Serialize(lexer.GetTokens())); } catch (System.Exception ex) { Console.WriteLine("解析出错 error : " + ex.ToString()); } exit: Console.WriteLine("生成完成,请按任意键继续"); Console.ReadKey(); }
internal ScriptObject LoadString(string strBreviary, string strBuffer, ScriptContext context, bool clearStack) { ScriptObject obj2; try { if (Util.IsNullOrEmpty(strBuffer)) { return(this.m_Null); } if (clearStack) { this.m_StackInfoStack.Clear(); } ScriptLexer lexer = new ScriptLexer(strBuffer, strBreviary); obj2 = this.Load(lexer.GetBreviary(), lexer.GetTokens(), context); } catch (System.Exception exception) { throw new ScriptException("load buffer [" + strBreviary + "] is error : " + exception.ToString()); } return(obj2); }
public List <RegionResult> Parse(string code) { _lexer = new ScriptLexer(code); _current = _lexer.GetToken(); return(GetRegionBlocks()); }
public static byte[] Serialize(String breviary, string data) { List <Token> tokens = new ScriptLexer(data, breviary).GetTokens(); if (tokens.Count == 0) { return(new byte[0]); } int sourceLine = 0; MemoryStream stream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(stream); writer.Write((sbyte)0); //第一个字符写入一个null 以此判断文件是二进制文件还是字符串文件 writer.Write(tokens.Count); for (int i = 0; i < tokens.Count; ++i) { var token = tokens[i]; if (sourceLine != token.SourceLine) { sourceLine = token.SourceLine; writer.Write(LineFlag); writer.Write(token.SourceLine); } writer.Write((sbyte)token.Type); switch (token.Type) { case TokenType.Boolean: writer.Write((bool)token.Lexeme ? (sbyte)1 : (sbyte)0); break; case TokenType.String: case TokenType.SimpleString: Util.WriteString(writer, (string)token.Lexeme); break; case TokenType.Identifier: Util.WriteString(writer, (string)token.Lexeme); break; case TokenType.Number: if (token.Lexeme is double) { writer.Write((sbyte)1); writer.Write((double)token.Lexeme); } else { writer.Write((sbyte)2); writer.Write((long)token.Lexeme); } break; } } byte[] ret = stream.ToArray(); stream.Dispose(); #if SCORPIO_UWP && !UNITY_EDITOR writer.Dispose(); #else writer.Close(); #endif return(ret); }
public LexerException(ScriptLexer lexer, string message) : base($"{lexer.Breviary} Line:{lexer.SourceLine + 1} Column:{lexer.SourceChar} : {message}") { }
public static byte[] Serialize(string breviary, string data) { List <Token> tokens = new ScriptLexer(data, breviary).GetTokens(); if (tokens.Count == 0) { return(new byte[0]); } int sourceLine = 0; byte[] buffer = null; using (MemoryStream stream = new MemoryStream()) { using (BinaryWriter writer = new BinaryWriter(stream)) { writer.Write((sbyte)0); writer.Write(tokens.Count); for (int i = 0; i < tokens.Count; i++) { Token token = tokens[i]; if (sourceLine != token.SourceLine) { sourceLine = token.SourceLine; writer.Write(LineFlag); writer.Write(token.SourceLine); } writer.Write((sbyte)token.Type); switch (token.Type) { case Scorpio.Compiler.TokenType.Boolean: { writer.Write(((bool)token.Lexeme) ? ((sbyte)1) : ((sbyte)0)); continue; } case Scorpio.Compiler.TokenType.Number: { if (!(token.Lexeme is double)) { break; } writer.Write((sbyte)1); writer.Write((double)token.Lexeme); continue; } case Scorpio.Compiler.TokenType.String: case Scorpio.Compiler.TokenType.SimpleString: { Util.WriteString(writer, (string)token.Lexeme); continue; } case Scorpio.Compiler.TokenType.Null: case Scorpio.Compiler.TokenType.Eval: { continue; } case Scorpio.Compiler.TokenType.Identifier: { Util.WriteString(writer, (string)token.Lexeme); continue; } default: { continue; } } writer.Write((sbyte)2); writer.Write((long)token.Lexeme); } buffer = stream.ToArray(); } } return(buffer); }
public NewParser(ScriptLexer scriptLexer) { // https://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/ }