public static void Main(string[] args) { //Machine machine = new Machine (); //foreach ( var arg in args ) // machine.ExecuteFile ( arg ); //machine.ExecuteFile ( @"/Users/Kanbaru/OneDrive/文档/CocoR/CocoRuby/test_0.rb" ); // foreach ( var path in System.IO.Directory.GetFiles ( @"/Users/Kanbaru/OneDrive/文档/RubySharp/Src/RubySharp.Core.Tests/MachineFiles" ) ) { foreach (var path in System.IO.Directory.GetFiles(@"D:/OneDrive/文档/RubySharp/Src/RubySharp.Core.Tests/MachineFiles")) { if (System.IO.Path.GetExtension(path) != ".rb") { continue; } if (!path.EndsWith("main.rb")) { continue; } Console.WriteLine(path); AstParser aparser = new AstParser(System.IO.File.ReadAllText(path)); aparser.filepath = path; // for ( var command = aparser.ParseCommand (); command != null; command = aparser.ParseCommand () ) { // Console.WriteLine ( command ); // } var vm = new VM(); UserDataUtility.RegAssembly(vm, typeof(Microsoft.Xna.Framework.Game).Assembly); // UserDataUtility.RegCustomClass ( vm, typeof ( Microsoft.Xna.Framework.Vector2 ) ); // UserDataUtility.RegCustomClass ( vm, typeof ( Microsoft.Xna.Framework.Curve ) ); vm.Evaluate(aparser.Parse()); Console.WriteLine(); } //if ( args.Length == 0 ) { // Console.WriteLine ( "rush 0.0.1-alpha-alpha-alpha-realpha ;-)" ); // Parser parser = new Parser ( Console.In ); // while ( true ) // try { // IExpression expr = parser.ParseCommand (); // var result = expr.Evaluate ( machine.RootContext ); // var text = result == null ? "nil" : result.ToString (); // Console.WriteLine ( string.Format ( "=> {0}", text ) ); // } // catch ( Exception ex ) { // Console.WriteLine ( ex.Message ); // Console.WriteLine ( ex.StackTrace ); // } //} }
private Ast ParseString(string source) { var tks = _tokenizer.Parse(source); var ts = new TokenStream(tks); return(AstParser.Parse(ts)); }
public void TestCases(ExpressionReductionTestCase testCase) { var tokens = Lexer.Process(DemoUtility.OperatorMap, testCase.Infix); var ex = Assert.Throws <ExpressionReductionException>(() => AstParser.Parse(DemoUtility.OperatorMap, tokens)); Assert.AreEqual(testCase.ExpectedRemaining, ex.RemainingValues); }
public void TestCases(MissingTokenTestCase testCase) { var tokens = Lexer.Process(DemoUtility.OperatorMap, testCase.Infix); var ex = Assert.Throws <MissingTokenException>(() => AstParser.Parse(DemoUtility.OperatorMap, tokens)); Assert.AreEqual(testCase.ExpectedType, ex.Type); }
public void TestCases(PrecedenceTestCase testCase) { var tokens = Lexer.Process(s_testOperatorMap, testCase.Infix); var node = AstParser.Parse(s_testOperatorMap, tokens); Assert.AreEqual(testCase.ExpectedNodeString, node.ToString()); }
public void TestCases(ArgumentMismatchTestCase testCase) { var tokens = Lexer.Process(DemoUtility.OperatorMap, testCase.Infix); var node = AstParser.Parse(DemoUtility.OperatorMap, tokens); var ex = Assert.Throws <OverloadMismatchException>(() => ExpressionCompiler.Compile <double>(DemoUtility.CompilerFunctions, node)); Assert.AreEqual(testCase.ExpectedOperator, ex.OperatorNode.OperatorInfo.Keyword); Assert.AreEqual(testCase.ActualArguments, ex.OperatorNode.Children.Count); }
public void TestCases(End2EndTestCase <double> testCase) { var tokens = Lexer.Process(DemoUtility.OperatorMap, testCase.Infix); var node = AstParser.Parse(DemoUtility.OperatorMap, tokens); Assert.AreEqual(testCase.ExpectedNodeString, node.ToString()); var functionActual = ExpressionCompiler.Compile <Context <double>, double>(DemoUtility.CompilerFunctions, node); Assert.AreEqual(testCase.ExpectedFunction(s_ctx), functionActual(s_ctx)); }
static void Main(string[] args) { string infix = string.Join(' ', args); Console.WriteLine(infix); var tokens = Lexer.Process(DemoUtility.OperatorMap, infix); var node = AstParser.Parse(DemoUtility.OperatorMap, tokens); var func = ExpressionCompiler.Compile <double>(DemoUtility.CompilerFunctions, node); Console.WriteLine(func()); }
private ControlFlowGraph <Statement <DummyInstruction> > ConstructAst( IEnumerable <DummyInstruction> instructions) { var architecture = DummyArchitecture.Instance; var dfgBuilder = new DummyTransitionResolver(); var cfgBuilder = new SymbolicFlowGraphBuilder <DummyInstruction>(architecture, instructions, dfgBuilder); var cfg = cfgBuilder.ConstructFlowGraph(0); var astBuilder = new AstParser <DummyInstruction>(cfg, dfgBuilder.DataFlowGraph); return(astBuilder.Parse()); }
static LllCompiler() { SymTable = LllSymbolTable.CreateBasic(); var tokenizer = Tokenizer.CreateBasic(); tokenizer.FilePath = "<builtin>"; var foreigns = new StringBuilder(); foreigns.AppendLine("foreign func alloc as 'malloc' (size: size) -> void*;"); foreigns.AppendLine("foreign func free as 'free' (ptr: void*);"); foreigns.AppendLine("foreign func sizeof as 'sizeof' () -> size;"); foreigns.AppendLine("foreign func printf as 'printf' (fmt: char*, ...) -> i32;"); var foreignsTks = tokenizer.Parse(foreigns.ToString()); var foreignsTs = new TokenStream(foreignsTks); _program = AstParser.Parse(foreignsTs); }
public static ShaderFile CreateFromText(string payload, IGlobalScopeFactory globalScopeFactory = null) { globalScopeFactory = globalScopeFactory ?? new GlobalScopeFactory(); ShaderFile result = new ShaderFile(); AstParser parser = new AstParser(); result.SyntaxTree = parser.Parse(payload); var globalScope = globalScopeFactory.Construct(result.SyntaxTree.Version); SemanticModelBuilderVisitor sBuilder = new SemanticModelBuilderVisitor(); SemanticModelBuilderContext sContext = new SemanticModelBuilderContext(); sBuilder.Visit(result.SyntaxTree, sContext); result.SemanticContext = new SemanticContext(sContext.Result, globalScope); result.SemanticContext.ResolveSymbolReferences(); return(result); }
private IData ReadInput() { var reader = new AstReader(); reader.Escape = "!"; reader.EscapeBehavior = AstReader.EscapeHandling.Skip; reader.SequenceSplit = ","; var group = new GroupSymbol("{", "}"); var garbage = new GroupSymbol("<", ">", false); reader.AddGroup(group).AddGroup(garbage); var parser = new AstParser <IData>(); parser.Add(new TupleParser <IData>(group, (IList <IData> args, out IData result) => { result = new Group { Data = args.ToList() }; return(true); })); parser.Add(new TupleParser <IData>(garbage, (IList <IData> args, out IData result) => { result = new Garbage { Content = args.Count > 0 ? args[0] as GarbageData : null }; return(true); })); parser.Add(new SequenceParser <IData>((AstParser <IData> _, IList <AstNode> nodes, out IData result) => { result = new GarbageData(nodes.Str()); return(true); })); var nodes = reader.Read(InputLine, new TokenSettings { SingleLetters = true }); return(parser.Parse(nodes)); }
public string CompileString(string source) { var sw = new Stopwatch(); Console.WriteLine("Parsing sources into tokens..."); sw.Start(); var tks = _tokenizer.Parse(source); sw.Stop(); Console.WriteLine("Parsed to tokens in {0}ms", sw.ElapsedMilliseconds); var ts = new TokenStream(tks); Console.WriteLine("Parsing tokens into AST..."); sw.Restart(); Ast = AstParser.Parse(ts); sw.Stop(); Console.WriteLine("Parsed to AST in {0}ms", sw.ElapsedMilliseconds); SemanticAnalyzer.Analyze(Ast); TypeChecker.Check(Ast); _program.AppendAst(Ast); return(_program.CompileToC()); }