コード例 #1
0
        public void DoubleFunctionDefinition()
        {
            //First parse the initial file and then request the rest
            //Let's lexicalize the file
            StreamReader sourceStream = new StreamReader("../../../../TestChecker/WaebricTestFiles/doublefunctiondefinition.wae");
            WaebricLexer lexer        = new WaebricLexer(sourceStream);

            lexer.LexicalizeStream();
            TokenIterator tokens = lexer.GetTokenIterator();

            //Lets parse the file
            WaebricParser parser = new WaebricParser(tokens);

            parser.Parse();

            SyntaxTree parsedTree = parser.GetTree();

            //Initialize ModuleCache with correct DirectoryPath
            ModuleCache.Instance.SetDirectoryPath("../../../../TestChecker/WaebricTestFiles/");

            //Lets check the tree
            WaebricChecker   checker           = new WaebricChecker();
            List <Exception> checkerExceptions = checker.CheckSyntaxTree(parsedTree);

            //Exception function already defined should be in list
            Assert.AreEqual(1, checkerExceptions.Count);
            Assert.AreEqual(typeof(FunctionAlreadyDefined), checkerExceptions.ToArray()[0].GetType());
        }
コード例 #2
0
        /// <summary>
        /// Initialize test
        /// </summary>
        /// <param name="stream">Stream to lexicalize</param>
        /// <returns>TokenIterator</returns>
        private TokenIterator Init(String stream)
        {
            lexer = new WaebricLexer(new StringReader(stream));
            lexer.LexicalizeStream();

            return(lexer.GetTokenIterator());
        }
コード例 #3
0
        public void ModuleParserSiteTest()
        {
            SyntaxTree tree = new SyntaxTree();

            //Create lexer to tokenize stream
            WaebricLexer lexer = new WaebricLexer(new StringReader("module test\n\nsite\n  site/index.html : home() ; site/index2.html : home()\nend"));

            lexer.LexicalizeStream();

            //Parse tokenized stream
            ModuleParser parser = new ModuleParser(lexer.GetTokenIterator());

            tree.SetRoot(parser.ParseModule());

            //Check module
            Module module = tree.GetRoot();

            Assert.IsTrue(module.GetModuleId().ToString() == "test");
            Assert.AreEqual(0, module.GetImports().Count);             //No imports
            Assert.AreEqual(0, module.GetFunctionDefinitions().Count); //No function definitions
            Assert.AreEqual(1, module.GetSites().Count);               //One site

            //Check site
            Site site = (Site)module.GetSites().Get(0);

            Assert.AreEqual(2, site.GetMappings().Count);
        }
コード例 #4
0
        public void WaebricCheckerImportTest()
        {
            //First parse the initial file and then request the rest
            //Let's lexicalize the file
            StreamReader sourceStream = new StreamReader("../../../../TestChecker/WaebricTestFiles/home.wae");
            WaebricLexer lexer        = new WaebricLexer(sourceStream);

            lexer.LexicalizeStream();
            TokenIterator tokens = lexer.GetTokenIterator();

            //Lets parse the file
            WaebricParser parser = new WaebricParser(tokens);

            parser.Parse();

            SyntaxTree parsedTree = parser.GetTree();

            //Initialize ModuleCache with correct DirectoryPath
            ModuleCache.Instance.SetDirectoryPath("../../../../TestChecker/WaebricTestFiles/");

            //Lets check the tree
            WaebricChecker   checker           = new WaebricChecker();
            List <Exception> checkerExceptions = checker.CheckSyntaxTree(parsedTree);

            //Test output
            Assert.AreEqual(0, checkerExceptions.Count);

            //Test if all modules except tree root are in cache
            Assert.IsTrue(ModuleCache.Instance.ContainsModule("first"));
            Assert.IsTrue(ModuleCache.Instance.ContainsModule("second"));
            Assert.IsTrue(ModuleCache.Instance.ContainsModule("common"));
        }
コード例 #5
0
        /// <summary>
        /// Request an specified module
        /// </summary>
        /// <param name="indentifier">ModuleId of requested module</param>
        /// <returns>Requested module if available</returns>
        public Module RequestModule(ModuleId identifier)
        {
            if (ModuleTable.ContainsKey(identifier))
            {   //Module already loaded so return instance of module
                return((Module)ModuleTable[identifier]);
            }

            //Module not cached, so load it
            StreamReader moduleStream = new StreamReader(GetPath(identifier));

            //Lexicalize and parse it
            WaebricLexer lexer = new WaebricLexer(moduleStream);

            lexer.LexicalizeStream();
            WaebricParser parser = new WaebricParser(lexer.GetTokenIterator());

            parser.Parse();

            //Get module of tree
            SyntaxTree tree = parser.GetTree();

            //Add module to hashtable
            Module requestedModule = tree.GetRoot();

            ModuleTable.Add(identifier, requestedModule);

            return(requestedModule);
        }
コード例 #6
0
        public void ComplexEmbeddingTest()
        {
            //Set up tokenizer
            WaebricLexer lexer = new WaebricLexer(new StringReader("\"<a(href=\"http://www.microsoft.com\") \"Microsoft Corp\">\""));

            lexer.LexicalizeStream();

            TokenIterator tokens = lexer.GetTokenIterator();

            //Test token
            Assert.AreEqual(1, tokens.GetSize());
            Assert.AreEqual(TokenType.EMBEDDING, tokens.Peek(1).GetType());

            //Test tokens in embedding
            EmbeddingToken embeddingToken  = (EmbeddingToken)tokens.NextToken();
            TokenIterator  embeddingTokens = embeddingToken.GetTokenIterator();

            Assert.AreEqual(12, embeddingTokens.GetSize());
            Assert.AreEqual("\"", embeddingTokens.Peek(1).GetValue().ToString());
            Assert.AreEqual("", embeddingTokens.Peek(2).GetValue().ToString());
            Assert.AreEqual("<", embeddingTokens.Peek(3).GetValue().ToString());
            Assert.AreEqual("a", embeddingTokens.Peek(4).GetValue().ToString());
            Assert.AreEqual("(", embeddingTokens.Peek(5).GetValue().ToString());
            Assert.AreEqual("href", embeddingTokens.Peek(6).GetValue().ToString());
            Assert.AreEqual("=", embeddingTokens.Peek(7).GetValue().ToString());
            Assert.AreEqual("http://www.microsoft.com", embeddingTokens.Peek(8).GetValue().ToString());
            Assert.AreEqual(")", embeddingTokens.Peek(9).GetValue().ToString());
            Assert.AreEqual("Microsoft Corp", embeddingTokens.Peek(10).GetValue().ToString());
            Assert.AreEqual(">", embeddingTokens.Peek(11).GetValue().ToString());
            Assert.AreEqual("\"", embeddingTokens.Peek(12).GetValue().ToString());
        }
コード例 #7
0
        public void EmbeddingTest()
        {
            //Set up tokenizer
            WaebricLexer lexer = new WaebricLexer(new StringReader("\"pre<\"\\\">\">post\""));

            lexer.LexicalizeStream();

            TokenIterator tokens = lexer.GetTokenIterator();

            //Test token
            Assert.AreEqual(1, tokens.GetSize());
            Assert.AreEqual(TokenType.EMBEDDING, tokens.Peek(1).GetType());

            //Get embedding and test inner tokens
            EmbeddingToken parsedToken     = (EmbeddingToken)tokens.NextToken();
            TokenIterator  embeddingTokens = parsedToken.GetTokenIterator();

            Assert.AreEqual(7, embeddingTokens.GetSize());
            Assert.AreEqual("\"", embeddingTokens.Peek(1).GetValue().ToString());
            Assert.AreEqual("pre", embeddingTokens.Peek(2).GetValue().ToString());
            Assert.AreEqual("<", embeddingTokens.Peek(3).GetValue().ToString());
            Assert.AreEqual("\\\">", embeddingTokens.Peek(4).GetValue().ToString());
            Assert.AreEqual(">", embeddingTokens.Peek(5).GetValue().ToString());
            Assert.AreEqual("post", embeddingTokens.Peek(6).GetValue().ToString());
            Assert.AreEqual("\"", embeddingTokens.Peek(7).GetValue().ToString());
        }
コード例 #8
0
        public void TestComplexStream()
        {
            WaebricLexer lexer = new WaebricLexer(new StringReader("module test\n\nsite site/index.html : home()\nend"));

            lexer.LexicalizeStream();

            Assert.IsTrue(lexer.GetTokenList().Count == 13);
        }
コード例 #9
0
        public void MultipleLineCommentTest()
        {
            //Set up tokenizer
            WaebricLexer lexer = new WaebricLexer(new StringReader("/*this is a comment \n on multiple \n lines*/"));

            lexer.LexicalizeStream();

            TokenIterator tokens = lexer.GetTokenIterator();

            Assert.AreEqual(0, tokens.GetSize());
        }
コード例 #10
0
        public void SingleQuoteTest()
        {
            //Set up tokenizer
            WaebricLexer lexer = new WaebricLexer(new StringReader("\""));

            lexer.LexicalizeStream();

            TokenIterator tokens = lexer.GetTokenIterator();

            Assert.AreEqual(1, tokens.GetSize());
            Assert.AreEqual(TokenType.SYMBOL, tokens.Peek(1).GetType());
            Assert.AreEqual("\"", tokens.Peek(1).GetValue().ToString());
        }
コード例 #11
0
ファイル: WaebricCompiler.cs プロジェクト: tvdstorm/waebric
        private static String Path; //Path of file to compile

        #endregion

        #region Public Methods

        public static void Main(string[] args)
        {
            Console.WriteLine("Waebric Compiler/Interpreter v1.0");
            Console.WriteLine("---------------------------------");


            if (args.Length == 1)
            {   //There is one file specified.
                Path = args[0];
            }
            else
            {
                Console.WriteLine("WeabricCompiler: no input file specified.");
                Console.Read(); //Testing purposes only
                return;
            }

            //Let's lexicalize the file
            StreamReader sourceStream = new StreamReader(Path);
            WaebricLexer lexer        = new WaebricLexer(sourceStream);


            lexer.LexicalizeStream();
            TokenIterator tokens = lexer.GetTokenIterator();

            if (tokens.GetSize() == 0)
            {           //Not tokens parsed
                Console.WriteLine("WaebricCompiler: Empty file or comments only.");
                return; //Nothing to compile so end program
            }

            //Lets parse the file
            WaebricParser parser = new WaebricParser(tokens);

            parser.Parse();

            SyntaxTree parsedTree = parser.GetTree();

            //Initialize ModuleCache with correct DirectoryPath
            ModuleCache.Instance.SetDirectoryPath(GetDirectoryPath());

            //Lets check the tree
            WaebricChecker checker = new WaebricChecker();

            checker.CheckSyntaxTree(parsedTree);

            //Lets interpret the tree and generate XHTML
            WaebricInterpreter interpreter = new WaebricInterpreter();

            interpreter.InterpretAST(parsedTree);
        }
コード例 #12
0
        public void WaebricSymbolTokenTest()
        {
            //Set up tokenizer
            WaebricLexer lexer = new WaebricLexer(new StringReader("'test"));

            lexer.LexicalizeStream();

            TokenIterator tokens = lexer.GetTokenIterator();

            //Test token
            Assert.AreEqual(1, tokens.GetSize());
            Token token = tokens.NextToken();

            Assert.AreEqual(TokenType.WAEBRICSYMBOL, token.GetType());
            Assert.AreEqual("test", token.GetValue().ToString());
        }
コード例 #13
0
        public void ModuleParserComplexModuleNameTest()
        {
            SyntaxTree tree;

            //Create lexer to tokenize stream
            WaebricLexer lexer = new WaebricLexer(new StringReader("module test.test2.test3"));

            lexer.LexicalizeStream();

            //Retrieve tokenIterator from lexer and lets parse it
            WaebricParser parser = new WaebricParser(lexer.GetTokenIterator());

            parser.Parse();

            //Test if root is modulelist and it contains the right module
            tree = parser.GetTree();

            Module module = tree.GetRoot();

            Assert.AreEqual(3, module.GetModuleId().GetIdentifiers().Count);
            Assert.AreEqual("test.test2.test3", module.GetModuleId().ToString());
        }
コード例 #14
0
        public void ModuleParserImportTest()
        {
            SyntaxTree tree;

            //Create lexer to tokenize stream
            WaebricLexer lexer = new WaebricLexer(new StringReader("module test\n\nimport importtest"));

            lexer.LexicalizeStream();

            //Test if stream is lexicalized into 4 tokens
            Assert.IsTrue(lexer.GetTokenIterator().GetSize() == 4);

            //Retrieve tokenIterator from lexer and lets parse it
            WaebricParser parser = new WaebricParser(lexer.GetTokenIterator());

            parser.Parse();

            //Test tree structure
            tree = parser.GetTree();

            Module module = tree.GetRoot();
        }
コード例 #15
0
        public void ModuleParserSingleModuleTest()
        {
            SyntaxTree tree;

            //Create lexer to tokenize stream
            WaebricLexer lexer = new WaebricLexer(new StringReader("module test"));

            lexer.LexicalizeStream();

            //Retrieve tokenIterator from lexer and lets parse it
            WaebricParser parser = new WaebricParser(lexer.GetTokenIterator());

            parser.Parse();

            //Test if root is modulelist and it contains the right module
            tree = parser.GetTree();

            Module module = tree.GetRoot();

            String[] identifiers = module.GetModuleId().GetIdentifiers().ToArray();
            Assert.AreEqual(1, identifiers.Length);
            Assert.AreEqual("test", identifiers[0]);
        }