Ejemplo n.º 1
0
        public AssertTokenizer /*!*/ Load(object /*!*/ source) // source: byte[] or string
        {
            _tests.Assert(_log.Errors.Count == 0, "Previous test case reported unexpected error/warning(s)");

            SourceUnit   sourceUnit;
            RubyEncoding encoding;

            byte[] binarySource = source as byte[];
            if (binarySource != null)
            {
                encoding   = RubyEncoding.Binary;
                sourceUnit = _context.CreateSourceUnit(new BinaryContentProvider(binarySource), null, encoding.Encoding, SourceCodeKind.File);
            }
            else
            {
                encoding   = DefaultEncoding;
                sourceUnit = _context.CreateSnippet((string)source, SourceCodeKind.File);
            }

            _tokenizer = new Tokenizer(false, DummyVariableResolver.AllMethodNames)
            {
                ErrorSink     = _log,
                Compatibility = Compatibility,
                Encoding      = encoding,
                Verbatim      = Verbatim,
            };

            _tokenizer.Initialize(sourceUnit);
            _allTokens = new List <Tokens>();
            _allValues = new List <object>();
            return(this);
        }
Ejemplo n.º 2
0
        public AssertTokenizer /*!*/ Load(byte[] /*!*/ source)
        {
            Tests.Assert(_log.Errors.Count == 0, "Previous test case reported unexpected error/warning(s)");

            _tokenizer = new Tokenizer(false, _log);
            _tokenizer.Compatibility = _context.RubyOptions.Compatibility;
            _tokenizer.Initialize(_context.CreateSourceUnit(
                                      new BinaryContentProvider(source), null, BinaryEncoding.Instance, SourceCodeKind.File)
                                  );
            _allTokens = new List <Tokens>();
            _allValues = new List <object>();
            return(this);
        }
Ejemplo n.º 3
0
        private void Benchmark(List <string> /*!*/ files)
        {
            var       sources   = new List <SourceUnit>();
            Stopwatch readTime  = new Stopwatch();
            long      totalSize = 0;

            readTime.Start();
            foreach (string path in files)
            {
                try {
                    byte[] data = File.ReadAllBytes(path);
                    sources.Add(_context.CreateSourceUnit(new BinaryContentProvider(data), path, Encoding.Default, SourceCodeKind.File));
                    totalSize += data.Length;
                } catch (Exception) {
                    Console.WriteLine("Error: {0}", path);
                }
            }
            readTime.Stop();

            Console.WriteLine("Read: {0} kB in {1}", totalSize / 1024, readTime.Elapsed);

#if F
            Stopwatch tokenizeTime = new Stopwatch();
            tokenizeTime.Start();
            foreach (var source in sources)
            {
                try {
                    var tokenizer = new Tokenizer();
                    tokenizer.Initialize(source);

                    Tokens token;
                    do
                    {
                        token = tokenizer.GetNextToken();
                    } while (token != Tokens.EndOfFile);
                } catch (Exception) {
                    Console.WriteLine("Tokenization error: {0}", source.Path);
                    break;
                }
            }
            tokenizeTime.Stop();
#endif
            //var stackSizes = new Dictionary<int, int>();

            var       options       = new RubyCompilerOptions();
            Stopwatch parseTime     = new Stopwatch();
            Stopwatch transformTime = new Stopwatch();
            foreach (var source in sources)
            {
                try {
                    parseTime.Start();
                    var parser = new Parser();
                    parser.Parse(source, options, ErrorSink.Null);
                    //int mt;
                    //stackSizes[parser.StackMaxTop] = stackSizes.TryGetValue(parser.StackMaxTop, out mt) ? mt + 1 : 1;
                    parseTime.Stop();
#if F
                    if (rubyTree != null)
                    {
                        transformTime.Start();
                        var lambda = _context.TransformTree <DlrMainCallTarget>(rubyTree, source, options);
                        transformTime.Stop();
                    }
                    else
                    {
                        Console.WriteLine("SyntaxError: {0}", source.Path);
                    }
#endif
                } catch (Exception e) {
                    Console.WriteLine("{0}: {1}: {2}", e.GetType().Name, source.Path, e.Message);
                    break;
                }
            }

            //  Console.WriteLine("Tokenize:        {0}", tokenizeTime.Elapsed);
            Console.WriteLine("Parse:           {0}", parseTime.Elapsed);
            //Console.WriteLine("Idf/Kwd/Loc: {0}/{1}/{2}", Tokenizer.IdfLength, Tokenizer.KwdLength, Tokenizer.LocLength);
            // Console.WriteLine("Transform:       {0}", transformTime.Elapsed);

            //PerfTrack.DumpHistogram(Parser.Reductions);
            //PerfTrack.DumpHistogram(stackSizes);
        }