Example #1
0
 public CodeGeneratorTest()
 {
     // Initialise lexer definitions
     foreach (var pair in TokenRegex.Instance.Regexes)
     {
         _lexer.AddDefinition(new TokenDefinition(pair.Key, pair.Value));
     }
 }
Example #2
0
        public void TestABC()
        {
            var lexer = new Lexer();

            lexer.AddDefinition(new TokenDefinition("a", TokenType.classProperties));
            lexer.AddDefinition(new TokenDefinition("b", TokenType.classProperties));
            lexer.AddDefinition(new TokenDefinition("c", TokenType.classProperties));
            lexer.AddDefinition(new TokenDefinition(@"\s+", TokenType.whitespace));

            var result = new List <Token>();

            result.AddRange(lexer.Tokenize("abc"));

            Assert.Equal(3, result.Count);
            Assert.Equal("a", result[0].Value);
            Assert.Equal("b", result[1].Value);
            Assert.Equal("c", result[2].Value);
        }
Example #3
0
        private static void Main(string[] args)
        {
#if !DEBUG
            if (args.Length < 1 || args.Length > 2)
            {
                Logger.Instance.Error("Invalid arguments, correct usage: jampiler.exe {file} {ip}");
                return;
            }

            Logger.Instance.Debug(args[0]);
#endif

            CultureInfo.DefaultThreadCurrentCulture = CultureInfo.CreateSpecificCulture("en-GB");

            var lexer = new Lexer();

            foreach (var pair in TokenRegex.Instance.Regexes)
            {
                lexer.AddDefinition(new TokenDefinition(pair.Key, pair.Value));
            }

#if DEBUG
            var program = File.ReadAllText(@"../../test.jam");
#else
            var program = File.ReadAllText(args.ElementAtOrDefault(0));
#endif

            Logger.Instance.Debug(program);

            Token[] tokens = null;
            try
            {
                var lexTokens = lexer.Tokenize(program);
                tokens = lexTokens as Token[] ?? lexTokens.ToArray();
            }
            catch (Exception exception)
            {
                Logger.Instance.Error(exception.Message);
                return;
            }

            Logger.Instance.Debug("\n----- TOKENS -----");
            foreach (var token in tokens)
            {
                Logger.Instance.Debug(token.ToString());
            }
            Logger.Instance.Debug("--- END TOKENS ---");

            List <Node> nodes = null;
            try
            {
                var parser = new Parser();
                nodes = parser.Parse(tokens);
            }
            catch (Exception exception)
            {
                Logger.Instance.Error(exception.Message);
                return;
            }

            Logger.Instance.Debug("\n----- NODES -----");
            nodes.ForEach(n => n.Print());
            Logger.Instance.Debug("--- END NODES ---");

            Logger.Instance.Debug("\n----- OUTPUT -----");

            var codeGenOutput = "";
            try
            {
                var codeGenerator = new CodeGenerator();
                codeGenerator.Generate(nodes);
                codeGenOutput = codeGenerator.Output();
            }
            catch (Exception exception)
            {
                Logger.Instance.Error(exception.Message);
                return;
            }

            Logger.Instance.Debug(codeGenOutput);

            // Write assembly to file
            var file = new StreamWriter(@"jam.s");
            file.WriteLine(codeGenOutput);
            file.Close();

            Logger.Instance.Debug("--- END OUTPUT ---");

            // Run assembler and linker and copy to pi
            var directory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
            if (directory == null)
            {
                throw new Exception("Failed to find working directory");
            }

            var arguments = "/k arm-linux-gnueabihf-gcc -march=armv6 -mfloat-abi=hard -mfpu=vfp -o jam.out jam.s";
#if DEBUG
            arguments +=
                string.Format(
                    @" & pscp -pw raspberry jam.out pi@{0}:/home/pi & putty -pw raspberry -m chmod pi@{0}",
                    args.ElementAtOrDefault(1) ?? "192.168.1.34");
#else
            arguments +=
                string.Format(
                    @" & pscp -pw raspberry jam.out pi@{0}:/home/pi & " +
                    @"putty -pw raspberry -m chmod pi@{0}",
                    args.ElementAtOrDefault(1) ?? "192.168.1.34");
#endif

            var processStartInfo = new ProcessStartInfo()
            {
                //WindowStyle = ProcessWindowStyle.Hidden,
                FileName              = "cmd.exe",
                WorkingDirectory      = directory,
                Arguments             = arguments,
                RedirectStandardInput = true,
                UseShellExecute       = false
            };
            Process.Start(processStartInfo);

#if DEBUG
            Console.ReadLine();
#endif
        }
        public ILexer GetLexer()
        {
            ILexer lexer = new Lexer();

            #region Numeric

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\d+"), TokenTypeEnum.Number));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\d+(\.\d{1,2})m?"), TokenTypeEnum.Real));

            #endregion

            #region Operator Tokens

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\^"), TokenTypeEnum.OperatorPotentiation));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\+"), TokenTypeEnum.OperatorSum));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\-"), TokenTypeEnum.OperatorSubtraction));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\*"), TokenTypeEnum.OperatorMultiplication));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\/"), TokenTypeEnum.OperatorDivision));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\="), TokenTypeEnum.Assignment));

            #endregion

            #region Relational Operators

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"[==]+"), TokenTypeEnum.OperatorEquals));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"[<=]+"), TokenTypeEnum.OperatorLessEquals));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"[>=]+"), TokenTypeEnum.OperatorGreaterEquals));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\>"), TokenTypeEnum.OperatorBigger));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\<"), TokenTypeEnum.OperatorSmaller));

            #endregion

            #region Special Characters

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\' '"), TokenTypeEnum.Space, true));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\s+"), TokenTypeEnum.Tab, true));

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\t+"), TokenTypeEnum.Tab));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\n]+"), TokenTypeEnum.LineBreak));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\d+"), TokenTypeEnum.LineBreak));

            #endregion

            #region Delimiters

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\{"), TokenTypeEnum.OpenKeys));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\}"), TokenTypeEnum.CloseKeys));

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\("), TokenTypeEnum.OpenParentheses));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\)"), TokenTypeEnum.CloseParentheses));

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\["), TokenTypeEnum.OpenBrackets));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\]"), TokenTypeEnum.CloseBrackets));

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\."), TokenTypeEnum.Dot));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\,"), TokenTypeEnum.Comma));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"\;"), TokenTypeEnum.Semicolon));

            #endregion

            #region Reserved Words

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"main"), TokenTypeEnum.Main));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"var"), TokenTypeEnum.ReservedWordVar));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"while"), TokenTypeEnum.ReservedWordWhile));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"for"), TokenTypeEnum.ReservedWordFor));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"if"), TokenTypeEnum.ReservedWordIf));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"else"), TokenTypeEnum.ReservedWordElse));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"switch"), TokenTypeEnum.ReservedWordSwitch));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"case"), TokenTypeEnum.ReservedWordCase));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"return"), TokenTypeEnum.ReservedWordReturn));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"function"), TokenTypeEnum.ReservedWordFunction));

            #endregion

            #region Types

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"int"), TokenTypeEnum.TypeInt));
            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"real"), TokenTypeEnum.TypeReal));

            #endregion

            #region Identifiers

            lexer.AddDefinition(TokenDefinition.Factory.Create(new Regex(@"[A-Za-z_][a-zA-Z0-9_]+"), TokenTypeEnum.Identifier));

            #endregion

            return(lexer);
        }
Example #5
0
        static void Main(string[] args)
        {
            Lexer  lexer  = new Lexer();
            Parser parser = new Parser();

            lexer.AddDefinition(new TokenDefinition(TokenType.accessor, "(public|private)"));                                    // ([^a-zA-Z0-9])

            lexer.AddDefinition(new TokenDefinition(TokenType.typeSpecifier, "(bool|byte|short|int|float|double|char|string)")); // ([^a-zA-Z0-9])
            lexer.AddDefinition(new TokenDefinition(TokenType.constant, @"\d*[\.]?\d+"));
            lexer.AddDefinition(new TokenDefinition(TokenType.@string, "\"(\\.|[^\"\\\\]*)\""));

            lexer.AddDefinition(new TokenDefinition(TokenType.accessOperator, "[.]"));
            lexer.AddDefinition(new TokenDefinition(TokenType.preOperator, "[!]"));
            //lexer.AddDefinition(new TokenDefinition(TokenType.postOperator, ""));
            lexer.AddDefinition(new TokenDefinition(TokenType.prepostOperator, @"(\+\+|\-\-)"));
            lexer.AddDefinition(new TokenDefinition(TokenType.comma, "[,]"));
            lexer.AddDefinition(new TokenDefinition(TokenType.binaryOperator, "(\\+|-|=|\\*|\\/|\\<|\\>)"));
            lexer.AddDefinition(new TokenDefinition(TokenType.@new, "new"));

            lexer.AddDefinition(new TokenDefinition(TokenType.lineEnd, "[;]"));

            lexer.AddDefinition(new TokenDefinition(TokenType.rBraceOpen, "[(]"));
            lexer.AddDefinition(new TokenDefinition(TokenType.rBraceClose, "[)]"));
            lexer.AddDefinition(new TokenDefinition(TokenType.cBraceOpen, "[{]"));
            lexer.AddDefinition(new TokenDefinition(TokenType.cBraceClose, "[}]"));

            lexer.AddDefinition(new TokenDefinition(TokenType.@namespace, "namespace"));
            lexer.AddDefinition(new TokenDefinition(TokenType.@class, "class"));

            lexer.AddDefinition(new TokenDefinition(TokenType.ifStatement, "if"));
            lexer.AddDefinition(new TokenDefinition(TokenType.elseStatement, "else"));
            lexer.AddDefinition(new TokenDefinition(TokenType.doStatement, "do"));
            lexer.AddDefinition(new TokenDefinition(TokenType.whileStatement, "while"));
            lexer.AddDefinition(new TokenDefinition(TokenType.forStatement, "for"));
            lexer.AddDefinition(new TokenDefinition(TokenType.returnStatement, "return"));

            lexer.AddDefinition(new TokenDefinition(TokenType.identifier, "[a-zA-Z_][a-zA-Z0-9_]*"));
            //string source = "a = b + c.d";//"int a = 5;";

            //Console.WriteLine(source);

            //Console.WriteLine();
            //Console.WriteLine();
            //Console.WriteLine();

            //foreach (var token in lexer.Tokenize(source))
            //{
            //    Console.WriteLine($"{token.Value} {token.Type.ToString()}");
            //}

            //Console.WriteLine();
            //Console.WriteLine();
            //Console.WriteLine();

            //List<Token> tokens = lexer.Tokenize(source).ToList();
            //Console.WriteLine(parser._ParseExpression(tokens).ToString());

            string source = System.IO.File.ReadAllText(@"D:\C#\Shpoon\TestFile.txt"); //"namespace Shpoon{class TestClass{int temp(int v){return v + 5;}}}";

            Console.WriteLine(source);

            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine();
            }

            TokenString tokens = new TokenString();

            foreach (var token in lexer.Tokenize(source))
            {
                tokens.Add(token);
                Console.WriteLine($"{token.Value} {token.Type.ToString()}");
            }

            for (int i = 0; i < 3; i++)
            {
                Console.WriteLine();
            }


            int zero = 0;
            var node = parser.Parse(tokens, ref zero);

            zero = 0;

            Console.WriteLine(node);

            Console.ReadLine();
        }