public void Test1()
        {
            string[] inputText
                = { "int globalInt;",
                    "int globalAfterMainX;",
                    "int globalAfterMainY;",
                    "bool globalBool;",
                    "void f(int integer, bool boolean) {",
                    "  cout << " + "\"" + "I AM A FUNCTION!\n" + "\"" + ";",
                    "  return;",
                    "}",
                    "int main() {",
                    "int x;",
                    "x = 5;",
                    "f();",
                    "cout << x;",
                    "return 0;",
                    "}" };

            var tokens = tokenizer.Tokenize(inputText);

            Assert.IsFalse(tokens.Any(t => t.TokenType == TokenType.Invalid));

            var parser = new Parser(new Grammar(new GrammarRulesProvider()));

            var(couldParse, syntaxTree) = parser.Parse(tokens);

            Assert.IsTrue(couldParse);
        }
Beispiel #2
0
        public Analysis Analize(string content, int tokenLimit)
        {
            ITokenizer           tokenizer = new Tokenizer.Tokenizer();
            IEnumerable <string> tokens    = tokenizer.Tokenize(content);

            Analysis analysis = new Analysis();

            foreach (string token in tokens)
            {
                foreach (LanguageDictionary dict in languageDictionaries)
                {
                    if (dict.Internal.ContainsKey(token))
                    {
                        if (!analysis.analysisMap.ContainsKey(dict.Langauge))
                        {
                            analysis.analysisMap.Add(dict.Langauge, 0.0d);
                        }
                        analysis.analysisMap[dict.Langauge] += 1;
                    }
                }
                if (tokenLimit-- == 0)
                {
                    break;
                }
            }
            return(analysis);
        }
Beispiel #3
0
        public void TokenDefinition_IsOperator1()
        {
            string[] input =
            {
                "( a + b ) / c"
            };

            var syntaxRegex = RegexWrapper.DefaultWrap(SyntaxProvider.GetPattern());

            var tokenDefinition = new TokenDefinition(TokenType.SyntaxOperator, syntaxRegex);
            var tokensGenerated = _tokenizer.Tokenize(input).ToList();

            var operatorTokens = tokensGenerated.Where(t => t.TokenType == TokenType.SyntaxOperator).ToList();

            Assert.IsTrue(operatorTokens.Any());
            Assert.AreEqual(2, operatorTokens.Count());
            Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == "("));
            Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == ")"));
        }
Beispiel #4
0
        public void TokenDefinition_IsOperator_Logic()
        {
            string[] input =
            {
                "! true",
                "1 << 7"
            };

            var operatorRegex = RegexWrapper.DefaultWrap(OperatorProvider.GetPattern());

            var tokenDefinition = new TokenDefinition(TokenType.ArithmeticAndLogicOperator, operatorRegex);
            var tokensGenerated = _tokenizer.Tokenize(input).ToList();

            var operatorTokens = tokensGenerated.Where(t => t.TokenType == TokenType.ArithmeticAndLogicOperator).ToList();

            Assert.IsTrue(operatorTokens.Any());
            Assert.AreEqual(2, operatorTokens.Count());
            Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == "!"));
            Assert.IsNotNull(operatorTokens.FirstOrDefault(t => t.Value == "<<"));
        }
Beispiel #5
0
        static void Main(string[] args)
        {
            // Test stream
            // Foo = 10.5
            // Bar = 20
            string test = "50 > 49.5 and 50 < 50.5";

            byte[]       byteArray = Encoding.ASCII.GetBytes(test);
            MemoryStream stream    = new MemoryStream(byteArray);

            var Tokenizer = new Tokenizer.Tokenizer();

            // Token definitions
            Tokenizer.AddTokenDefinition(TokenType.Ignored, @" ");
            Tokenizer.AddTokenDefinition(TokenType.NewLine, @"\\n\\r");
            Tokenizer.AddTokenDefinition(TokenType.EndOfFile, "");
            Tokenizer.AddTokenDefinition(TokenType.LogicalOr, @"\|\|");
            Tokenizer.AddTokenDefinition(TokenType.LogicalOr, @"or");
            Tokenizer.AddTokenDefinition(TokenType.LogicalAnd, @"&&");
            Tokenizer.AddTokenDefinition(TokenType.LogicalAnd, @"and");
            Tokenizer.AddTokenDefinition(TokenType.Bool, "true|TRUE|True");
            Tokenizer.AddTokenDefinition(TokenType.Bool, "false|FALSE|False");
            Tokenizer.AddTokenDefinition(TokenType.Ident, "[a-zA-Z][a-zA-Z0-9]+");
            //Tokenizer.AddTokenDefinition(TokenType.Int, "(-)?[0-9]+");
            Tokenizer.AddTokenDefinition(TokenType.Float, @"(-)?[0-9]+(\.[0-9]+)?(e\+[0-9]+)?");
            Tokenizer.AddTokenDefinition(TokenType.Equals, @"==");
            Tokenizer.AddTokenDefinition(TokenType.NotEquals, @"!=");
            Tokenizer.AddTokenDefinition(TokenType.LessThan, @"<");
            Tokenizer.AddTokenDefinition(TokenType.GreaterThan, @">");
            Tokenizer.AddTokenDefinition(TokenType.LessThanEquals, @"<=");
            Tokenizer.AddTokenDefinition(TokenType.GreaterThanEquals, @">=");
            Tokenizer.AddTokenDefinition(TokenType.Addition, @"\+");
            Tokenizer.AddTokenDefinition(TokenType.Subtraction, @"-");
            Tokenizer.AddTokenDefinition(TokenType.Multiplication, @"\*");
            Tokenizer.AddTokenDefinition(TokenType.Division, @"\/");
            Tokenizer.AddTokenDefinition(TokenType.Modulo, @"%");
            Tokenizer.AddTokenDefinition(TokenType.Not, @"!");
            Tokenizer.AddTokenDefinition(TokenType.OpenBracket, @"\(");
            Tokenizer.AddTokenDefinition(TokenType.CloseBracket, @"\)");

            Stopwatch sw = new Stopwatch();

            sw.Start();
            Tokenizer.Tokenize(stream);
            long tokenizerMs = sw.ElapsedMilliseconds;

            // Tokenizer debug output
            //foreach (IToken token in Tokenizer.Tokens)
            //    Console.WriteLine($"{token.TokenType}, {token.Text}, {token.LineNumber}:{token.ColumnNumber}");

            Dictionary <string, Variable> vars = new Dictionary <string, Variable>
            {
                { "foobar", new BoolVariable("foobar", true) },
                { "foo", new FloatVariable("foo", 10.5f) },
                { "bar", new FloatVariable("bar", 20) }
            };

            Compiler.Compiler compiler = new Compiler.Compiler();
            sw.Restart();
            Expression expr       = compiler.Compile(Tokenizer.Tokens, vars);
            long       compilerMs = sw.ElapsedMilliseconds;

            sw.Stop();

            Console.WriteLine($"Tokenized in {tokenizerMs}ms");
            Console.WriteLine($"Compiled in {compilerMs}ms");
            Console.WriteLine($"Result: {(expr as IExpression<bool>).Evaluate()}");

            Console.ReadLine();
        }