Beispiel #1
0
        public void PerformanceTest2()
        {
            var grammar = new Grammar(new QuerySyntax())
            {
                Strict = true
            };
            var lexer        = new Lexer(grammar);
            var parser       = new Parser(grammar);
            var source       = "Id!=null and FirstName like '%e%' and LastName like '%e%' and BornAt<date(now()) and Address.Street like 'e%' and Address.City like '%e' or Address.Number like '%0%'";
            var shuntingYard = new ShuntingYardParser(grammar);

            var tokens = lexer.Tokenize(source).ToList();
            var rpn    = shuntingYard.BuildRPN(tokens);
            var exp    = shuntingYard.BuildAST <Person, bool>(tokens);

            Debug.WriteLine(exp);
            var expression = parser.Parse <Person, bool>(source);
            var d          = expression.Compile();

            var sw = new Stopwatch();

            sw.Start();
            for (var i = 0; i < 1000; i++)
            {
                tokens = lexer.Tokenize(source).ToList();
            }
            sw.Stop();
            Debug.WriteLine("source->tokens: " + sw.ElapsedMilliseconds);
            sw = new Stopwatch();
            sw.Start();
            for (var i = 0; i < 1000; i++)
            {
                shuntingYard.BuildAST <Person, bool>(tokens);
            }
            sw.Stop();
            Debug.WriteLine("infix->AST: " + sw.ElapsedMilliseconds);
        }
Beispiel #2
0
        public void PerformanceTest()
        {
            // this test shows that
            var grammar = new Grammar(new QuerySyntax());
            var lexer   = new Lexer(grammar);
            var parser  = new Parser(grammar);
            var source  = "(1+3)*(5.0/0.4)-16.3e5";
            //var source = "(color=white or color=green) and wheels >=10";
            var shuntingYard = new ShuntingYardParser(grammar);

            var tokens     = lexer.Tokenize(source).ToArray();
            var rpn        = shuntingYard.BuildRPN(tokens);
            var exp        = shuntingYard.BuildAST <object, double>(tokens);
            var expression = parser.Parse <object, double>(source);
            var d          = expression.Compile();

            var sw = new Stopwatch();

            sw.Start();
            for (var i = 0; i < 10000; i++)
            {
                lexer.Tokenize(source);
            }
            sw.Stop();
            Debug.WriteLine("tokenizing: " + sw.ElapsedMilliseconds);

            sw = new Stopwatch();
            sw.Start();
            for (var i = 0; i < 10000; i++)
            {
                shuntingYard.BuildRPN(tokens);
            }
            sw.Stop();
            Debug.WriteLine("infix->postfix: " + sw.ElapsedMilliseconds);

            sw = new Stopwatch();
            sw.Start();
            for (var i = 0; i < 10000; i++)
            {
                shuntingYard.BuildAST <object, double>(tokens);
            }
            sw.Stop();
            Debug.WriteLine("infix->AST: " + sw.ElapsedMilliseconds);

            sw = new Stopwatch();
            sw.Start();
            for (var i = 0; i < 10000; i++)
            {
                parser.Parse <object, double>(source);
            }
            sw.Stop();
            Debug.WriteLine("source->ast: " + sw.ElapsedMilliseconds);

            sw = new Stopwatch();
            sw.Start();
            for (var i = 0; i < 10000; i++)
            {
                expression.Compile();
            }
            sw.Stop();
            Debug.WriteLine("ast->IL: " + sw.ElapsedMilliseconds);
        }