public void Lex_AllTokens_ReturnsAllTokens() { Tokens tokens = Lexer.Lex(new RawExpression("1x*/+-()^")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Number, tokens[0].Type); Assert.AreEqual(TokenType.Identifier, tokens[1].Type); Assert.AreEqual(TokenType.Multiply, tokens[2].Type); Assert.AreEqual(TokenType.Divide, tokens[3].Type); Assert.AreEqual(TokenType.Addition, tokens[4].Type); Assert.AreEqual(TokenType.Subtraction, tokens[5].Type); Assert.AreEqual(TokenType.LeftParentheses, tokens[6].Type); Assert.AreEqual(TokenType.RightParentheses, tokens[7].Type); Assert.AreEqual(TokenType.Exponent, tokens[8].Type); Assert.AreEqual("1", tokens[0].Value); Assert.AreEqual("x", tokens[1].Value); Assert.AreEqual(null, tokens[2].Value); Assert.AreEqual(null, tokens[3].Value); Assert.AreEqual(null, tokens[4].Value); Assert.AreEqual(null, tokens[5].Value); Assert.AreEqual(null, tokens[6].Value); Assert.AreEqual(null, tokens[7].Value); Assert.AreEqual(null, tokens[8].Value); }
public void Add_AddsToken_AddsTokenToList() { Tokens tokens = new Tokens(); tokens.Add(new Token(TokenType.Addition)); Assert.True(tokens.CanRead()); }
public void Lex_IdentifierBackSlash_ReturnsIdentifierToken() { Tokens tokens = Lexer.Lex(new RawExpression("\\asdf")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Identifier, tokens[0].Type); Assert.AreEqual("\\asdf", tokens[0].Value); }
public void Lex_DecimalTensNumber_ReturnsNumberToken() { Tokens tokens = Lexer.Lex(new RawExpression("11.1")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Number, tokens[0].Type); Assert.AreEqual("11.1", tokens[0].Value); }
public void Lex_ExponentOperator_ReturnsExponentToken() { Tokens tokens = Lexer.Lex(new RawExpression("^")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Exponent, tokens[0].Type); Assert.AreEqual(null, tokens[0].Value); }
public void Lex_RightParentheses_ReturnsRightParenthesesToken() { Tokens tokens = Lexer.Lex(new RawExpression(")")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.RightParentheses, tokens[0].Type); Assert.AreEqual(null, tokens[0].Value); }
public void Lex_SubtractionOperator_ReturnsSubstractionToken() { Tokens tokens = Lexer.Lex(new RawExpression("-")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Subtraction, tokens[0].Type); Assert.AreEqual(null, tokens[0].Value); }
public void Lex_MultiplyOperator_ReturnsMultiplyToken() { Tokens tokens = Lexer.Lex(new RawExpression("*")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Multiply, tokens[0].Type); Assert.AreEqual(null, tokens[0].Value); }
public void Lex_DivideOperator_ReturnsDivideToken() { Tokens tokens = Lexer.Lex(new RawExpression("/")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Divide, tokens[0].Type); Assert.AreEqual(null, tokens[0].Value); }
public void CanRead_Tokens_ReturnsFalse() { Tokens tokens = new Tokens(new List <Token> { new Token(TokenType.Addition) }); tokens.Add(new Token(TokenType.Addition)); Assert.True(tokens.CanRead()); }
public void Lex_SimpleExpression_ReturnsSimpleExpressionTokens() { Tokens tokens = Lexer.Lex(new RawExpression("1234.5x + 1.2345")); Assert.True(tokens.CanRead()); Assert.AreEqual(TokenType.Number, tokens[0].Type); Assert.AreEqual(TokenType.Identifier, tokens[1].Type); Assert.AreEqual(TokenType.Addition, tokens[2].Type); Assert.AreEqual(TokenType.Number, tokens[3].Type); Assert.AreEqual("1234.5", tokens[0].Value); Assert.AreEqual("x", tokens[1].Value); Assert.AreEqual(null, tokens[2].Value); Assert.AreEqual("1.2345", tokens[3].Value); }
public void Lex_WhiteSpace_ReturnsWhiteSpaceToken() { Tokens tokens = Lexer.Lex(new RawExpression("\t")); Assert.False(tokens.CanRead()); }
private bool NextTokenIsATermOperator() { return(tokens.CanRead() && tokens.IsPeekTypeOfOne(SyntaxTreeConstants.TermOperatorTypes)); }