Esempio n. 1
0
        public void PerfTestWithFastTokenizerAndLargeQuery()
        {
            ITokenizer tokenizer = new PrecedenceBasedRegexTokenizer();
            string     query     = @"MATCH app = 'MyTestApp'
AND ex IN ('System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException')
AND sf = 'sadsdfsdfsdfsdfssdfjhsfjhsdfjhsdfjhsdfjhsdjfhsdjhfsdjfhsdhfsdjhfsdjhfjsdhfjsdhfjhsdjfhsdjfh'
AND sf = 'fggdfgdfgfdgdfgdfgggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggh'
AND sf = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
AND sf = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
AND sf = 'ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
AND sf = 'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
AND sf = '1eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee'
AND sf = '2sadsdfsdfsdfsdfssdfjhsfjhsdfjhsdfjhsdfjhsdjfhsdjhfsdjfhsdhfsdjhfsdjhfjsdhfjsdhfjhsdjfhsdjfh'
AND sf = '3fggdfgdfgfdgdfgdfgggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggh'
AND sf = '4aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
AND sf = '5bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'
AND sf = '6ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
AND sf = '7ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
AND sf = '8eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee'
AND ex IN ('System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException')
AND ex IN ('System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException')
AND ex IN ('System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException','System.NullReferenceException', 'System.FormatException')
BETWEEN 2016-01-01 00:00:00 AND 2016-02-01 00:00:00
LIMIT 100";

            PerfTest(tokenizer, query, "Fast tokenizer + large query");
        }
Esempio n. 2
0
        public void PerfTestWithFastTokenizerAndSmallQuery()
        {
            ITokenizer tokenizer = new PrecedenceBasedRegexTokenizer();
            string     query     = @"MATCH app = 'MyTestApp'
AND ex IN ('System.NullReferenceException', 'System.FormatException')
BETWEEN 2016-01-01 00:00:00 AND 2016-02-01 00:00:00
LIMIT 100";

            PerfTest(tokenizer, query, "Fast tokenizer + small query");
        }
Esempio n. 3
0
        public void Run()
        {
            while (true)
            {
                Console.WriteLine("Press 1 : view inefficient version output");
                Console.WriteLine("Press 2 : Perf test of inefficient tokenizer with a small query");
                Console.WriteLine("Press 3 : Perf test of inefficient tokenizer with a large query");
                Console.WriteLine("Press 4 : view more efficient version output");
                Console.WriteLine("Press 5 : Perf test of more efficient tokenizer with a small query");
                Console.WriteLine("Press 6 : Perf test of more efficient tokenizer with a large query");

                var key = Console.ReadKey();
                Console.WriteLine("");

                switch (key.KeyChar.ToString())
                {
                case "1":
                    ITokenizer slowTokenizer = new SimpleRegexTokenizer();
                    RunOnceAndPrintOutput(slowTokenizer, "Run with inefficient tokenizer");
                    break;

                case "2":
                    PerfTestWithSlowTokenizerAndSmallQuery();
                    break;

                case "3":
                    PerfTestWithSlowTokenizerAndLargeQuery();
                    break;

                case "4":
                    ITokenizer fastTokenizer = new PrecedenceBasedRegexTokenizer();
                    RunOnceAndPrintOutput(fastTokenizer, "Run with faster tokenizer");
                    break;

                case "5":
                    PerfTestWithFastTokenizerAndSmallQuery();
                    break;

                case "6":
                    PerfTestWithFastTokenizerAndLargeQuery();
                    break;

                default:
                    Console.WriteLine("Press 1, 2, 3, 4, 5 or 6");
                    break;
                }

                Console.WriteLine("");
            }
        }
Esempio n. 4
0
        static void Main(string[] args)
        {
            var main_code_text = File.ReadAllText(args[2]);

            //Console.WriteLine(main_code_text.GetHashCode());
            Console.WriteLine("-------------------------------------------------");
            Console.WriteLine("Сode received in the Gonozov's language: ");
            Console.WriteLine("- - - - - - - - - - - - - - - - - - - - - - - - -");
            Console.WriteLine(main_code_text);
            Console.WriteLine("-------------------------------------------------");

            VariableTable <Function> globalFuncTable = new VariableTable <Function>();

            PrecedenceBasedRegexTokenizer tokenizer = new PrecedenceBasedRegexTokenizer();
            PrecedenceBasedRPNParser      parser    = new PrecedenceBasedRPNParser(ref globalFuncTable);
            TriadsOptimizer triadsOptimizer         = new TriadsOptimizer();
            StackMachine    stackMachine            = new StackMachine(globalFuncTable);


            var tokenSeqence = tokenizer.Tokenize(main_code_text).ToList();

            foreach (var token in tokenSeqence)
            {
                Console.Write(token.ToString() + " , ");
            }
            Console.WriteLine("\n-------------------------------------------------");

            var parseSequence = parser.Parse(tokenSeqence);

            foreach (var result in parseSequence)
            {
                Console.Write(result.ToString() + " , ");
            }
            Console.WriteLine("\n-------------------------------------------------");

            globalFuncTable.Print();

            //parseSequence = triadsOptimizer.Optimize(parseSequence);

            //Console.WriteLine("\n-------------------------------------------------");

            //foreach (var result in parseSequence)
            //{
            //    Console.Write(result.ToString() + " , ");
            //}

            //Console.WriteLine("\n-------------------------------------------------");

            stackMachine.Execute(parseSequence);
        }
Esempio n. 5
0
        public void Run()
        {
            while (true)
            {
                //Console.WriteLine("Press 1 : view inefficient version output");
                //Console.WriteLine("Press 2 : Perf test of inefficient tokenizer with a small query");
                //Console.WriteLine("Press 3 : Perf test of inefficient tokenizer with a large query");
                Console.WriteLine("Presione 1 : Ver salida");
                Console.WriteLine("Presione 2 : Prueba de rendimiento de tokenizer con una pequeña consulta");
                Console.WriteLine("Presione 3 : Prueba de rendimiento de tokenizer con una consulta grande");

                var key = Console.ReadKey();
                Console.WriteLine("");

                switch (key.KeyChar.ToString())
                {
                //case "1":
                //    ITokenizer slowTokenizer = new SimpleRegexTokenizer();
                //    RunOnceAndPrintOutput(slowTokenizer, "Run with inefficient tokenizer");
                //    break;
                //case "2":
                //    PerfTestWithSlowTokenizerAndSmallQuery();
                //    break;
                //case "3":
                //    PerfTestWithSlowTokenizerAndLargeQuery();
                //    break;
                case "1":
                    ITokenizer fastTokenizer = new PrecedenceBasedRegexTokenizer();
                    RunOnceAndPrintOutput(fastTokenizer, "Run with faster tokenizer");
                    break;

                case "2":
                    PerfTestWithFastTokenizerAndSmallQuery();
                    break;

                case "3":
                    PerfTestWithFastTokenizerAndLargeQuery();
                    break;

                default:
                    Console.WriteLine("Press 1, 2, 3");
                    break;
                }

                Console.WriteLine("");
            }
        }
Esempio n. 6
0
        public static QueryTree GetQuery(string query)
        {
            var tokens = new PrecedenceBasedRegexTokenizer().Tokenize(query).ToList();

            return(new Parser().Parse(tokens));
        }