public void Nested() { const string TestString = "1 2 3 4 5 6 7 8 9 0"; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); }
public void Nested() { const string TestString = "1 2 3 4 5 6 7 8 9 0"; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); TokenConsumer consumer = new TokenConsumer(tokens); using (var scope = consumer.BeginScope()) { int val; Assert.IsTrue(scope.EatInt(out val).Ok()); Assert.AreEqual(1, val); scope.Commit(); Assert.IsTrue(scope.EatInt(out val).Ok()); Assert.AreEqual(2, val); } using (var scope = consumer.BeginScope()) { int val; Assert.IsTrue(scope.EatInt(out val).Ok()); Assert.AreEqual(2, val); Assert.IsTrue(scope.EatInt(out val).Ok()); Assert.AreEqual(3, val); } }
public void TokenizeDefaultExpressionCountsTokensTo8() { const string TokenString = "toggle 352,432 through 628,550"; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TokenString).ToList(); Assert.AreEqual(8, tokens.Count); }
public void LinebreaksCausesNewLineInTokens() { const string TestString = "1\r\n2"; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); Assert.AreEqual(2, tokens.Count); Assert.AreEqual(0, tokens[0].Line); Assert.AreEqual(1, tokens[1].Line); }
public void EatStringHex() { const string TestString = "\"\\x27\""; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); Assert.AreEqual(1, tokens.Count); StringToken tok = (StringToken)tokens[0]; Assert.AreEqual(TestString.Length, tok.ActualLength); Assert.AreEqual("'".Length, tok.Content.Length); }
public void EatInvalidStringThrowsException() { const string TestString = "\"aaa\"aaa\""; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); //Assert.AreEqual(1, tokens.Count); //StringToken tok = (StringToken)tokens[0]; //Assert.AreEqual(TestString.Length, tok.ActualLength); //Assert.AreEqual("aaa\"aaa".Length, tok.Content.Length); }
public void TestMethod1() { using (var stream = CreateResource()) { Tokenizer tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(stream); RouteParser parser = new RouteParser(); var routes = parser.GetRoutes(tokens).ToList(); Assert.AreEqual(28, routes.Count); } }
public void Test4() { Tokenizer tokenizer = new Tokenizer(); var tokenStream = tokenizer.Tokenize(@"NOT ax -> ay"); ExpressionParser parser = new ExpressionParser(); var statements = parser.GenerateStatements(tokenStream).ToList(); Assert.AreEqual(1, statements.Count); Assert.AreEqual(new AssignStatement(new NotExpression(16, new VariableExpression("ax")), new VariableExpression("ay")), statements[0]); }
public void Test3() { Tokenizer tokenizer = new Tokenizer(); var tokenStream = tokenizer.Tokenize(@"fe -> a le -> NOT"); ExpressionParser parser = new ExpressionParser(); var statements = parser.GenerateStatements(tokenStream).ToList(); Assert.AreEqual(2, statements.Count); Assert.AreEqual(new AssignStatement(new VariableExpression("le"), new VariableExpression("NOT")), statements[1]); }
public static IEnumerable<PersonHappinessRow> ParseHappiness(this string text) { Tokenizer tokenizer = new Tokenizer(); var sequence = tokenizer.Tokenize(text); var consumer = new TokenConsumer(sequence); using (var scope = consumer.BeginScope()) { return scope.ParseHappiness(); } }
public void TestMethod3() { using (var stream = CreateResource()) { Tokenizer tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(stream); RouteParser parser = new RouteParser(); var routes = parser.GetRoutes(tokens).ToList(); var bestPath = new RouteCalculator().CalculateAllRoutes(routes).OrderByDescending(x => x.Item2).First(); Assert.AreEqual(804, bestPath.Item2); } }
public void CreateAssignAnd() { Tokenizer tokenizer = new Tokenizer(); var tokenStream = tokenizer.Tokenize("x AND y -> d"); ExpressionParser parser = new ExpressionParser(); TokenConsumer consumer = new TokenConsumer(tokenStream); using (var scope = consumer.BeginScope()) { var expression = ExpressionParser.CreateAssignExpression(scope); Assert.AreEqual(new AssignStatement(new AndExpression(16, new VariableExpression("x"), new VariableExpression("y")), new VariableExpression("d")), expression); } }
public void CalcLengthDiff() { using (var stream = CreateResource()) { Tokenizer tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(stream); int memLength = 0; int diskLength = 0; foreach (StringToken token in tokens) { memLength += token.Content.Length; diskLength += token.ActualLength; } Assert.AreEqual(1350, diskLength - memLength); } }
public void CreateEqualTokenTest() { const string TestString = "a = 3"; var tokenizer = new Tokenizer(); var tokens = tokenizer.Tokenize(TestString).ToList(); Assert.AreEqual(3, tokens.Count); Assert.AreEqual(new LiteralToken("a", 0, 0), tokens[0]); Assert.AreEqual(new Token(TokenType.Equal, 0, 0), tokens[1]); Assert.AreEqual(new NumberToken("3", 0, 0), tokens[2]); }
public void ValueOfa() { Tokenizer tokenizer = new Tokenizer(); using (var stream = CreateResource()) { var tokens = tokenizer.Tokenize(stream); var parser = new ExpressionParser(); var statements = parser.GenerateStatements(tokens).ToList(); var treeBuilder = new ExpressionDependecyBuilder(); var expressionTree = treeBuilder.BuildExpressionTree(statements, "a"); int value = expressionTree.CalcTree(); Assert.AreEqual(46065, value); } }
public void ValueOfaWithbOverrided() { Tokenizer tokenizer = new Tokenizer(); using (var stream = CreateResource()) { var tokens = tokenizer.Tokenize(stream); var parser = new ExpressionParser(); var statements = parser.GenerateStatements(tokens).ToList(); var treeBuilder = new ExpressionDependecyBuilder(); var expressionTree = treeBuilder.BuildExpressionTree(statements, "a"); Dictionary<string, int> wireValues = new Dictionary<string, int>(); wireValues.Add("b", 46065); int value = expressionTree.CalcTree(wireValues); Assert.AreEqual(14134, value); } }