public FunctionCallToken(string text, TokenEvaluator evaluator, TokenPosition position) : base(text) { Precedence = 20; TokenType = TokenType.FunctionCall; Evaluator = evaluator; TokenPosition = position; }
internal TokenDefinition(TokenType token, string pattern, ushort priority, TokenEvaluator evaluator) { Token = token; Pattern = $"^({pattern})"; Priority = priority; Evaluator = evaluator; }
public ExpressionTokenMatcher(ITokenMatcher matcher, TokenType tokenType, int precedence, OperatorAssociativity associativity, TokenEvaluator tokenEvaluator) { _matcher = matcher; _tokenType = tokenType; _tokenEvaluator = tokenEvaluator; _precedence = precedence; _associativity = associativity; }
public void SubtractionOperator() { string eq = " 1 - 4"; Lexer tokenizer = new Lexer(); List <Token> tokens = tokenizer.Parse(eq); TokenVerifier verifier = new TokenVerifier(); Assert.IsTrue(verifier.Verify(tokens)); TokenEvaluator evaluator = new TokenEvaluator(); Assert.AreEqual(-3, evaluator.Evaluate(tokens)); }
public void AdditionOperator() { string eq = " 1.5 + 4.0"; double expected = 5.5; Lexer tokenizer = new Lexer(); List <Token> tokens = tokenizer.Parse(eq); TokenVerifier verifier = new TokenVerifier(); Assert.IsTrue(verifier.Verify(tokens)); TokenEvaluator evaluator = new TokenEvaluator(); double actual = evaluator.Evaluate(tokens); Assert.AreEqual(expected, actual); }
public ExpressionToken(ExpressionTokenMatcher tokenMatcher, string text) : base(tokenMatcher, text) { switch (tokenMatcher.TokenType) { case TokenType.TernaryOperator: _numTerms = 3; break; case TokenType.UnaryOperator: _numTerms = 1; break; case TokenType.Operator: _numTerms = 2; break; } _precedence = tokenMatcher.Precedence; _associativity = tokenMatcher.Associativity; _tokenType = tokenMatcher.TokenType; _evaluator = tokenMatcher.Evaluator; }
public void MultiplicationOperator() { string eq = "5*10"; int expected = 50; Lexer tokenizer = new Lexer(); List <Token> tokens = tokenizer.Parse(eq); TokenVerifier verifier = new TokenVerifier(); Assert.IsTrue(verifier.Verify(tokens)); TokenEvaluator evaluator = new TokenEvaluator(); double actual = evaluator.Evaluate(tokens); Assert.AreEqual(expected, actual); }
public void AdditionAndDiceOperator() { string eq = "1d5 + 10"; int lowBound = 11; int highBound = 15; Lexer tokenizer = new Lexer(); List <Token> tokens = tokenizer.Parse(eq); TokenVerifier verifier = new TokenVerifier(); Assert.IsTrue(verifier.Verify(tokens)); TokenEvaluator evaluator = new TokenEvaluator(); double results = evaluator.Evaluate(tokens); if (!(lowBound <= results && results <= highBound)) { Assert.Fail(String.Format("Result should have fallen within {0} and {1} but was instead {2}", lowBound, highBound, results)); } }
private void AddTokenDefinition(TokenDefinition tokenDefinition, TokenEvaluator evaluator) { tokenDefinition.Evaluator = evaluator; TokenDefinition existing = _TokenDefinitions.Find(delegate(TokenDefinition td) { return(td.Pattern == tokenDefinition.Pattern && !string.IsNullOrEmpty(td.Pattern)); }); if (existing != null) { existing.Alternate = tokenDefinition; } else { _TokenDefinitions.Add(tokenDefinition); } }
public Expression Compile() { Stack <Expression> tempStack = new Stack <Expression>(); Expression currentExpression = null; foreach (ExpressionToken token in _tokenList) { TokenEvaluator evaluator = token.Evaluator; if (token.IsTerm) { currentExpression = evaluator(token.Text, token.TokenPosition, null); tempStack.Push(currentExpression); } else { int numTerms = token.NumTerms; if (token.IsFunction) { numTerms++; } Expression[] parameters = new Expression[numTerms]; for (int i = numTerms - 1; i >= 0; i--) { parameters[i] = tempStack.Pop(); } //Array.Reverse(parameters); currentExpression = evaluator(token.Text, token.TokenPosition, parameters); tempStack.Push(currentExpression); } } return(currentExpression); }
public static string GetTableName(string commandText) { var handler = new CommandTextHandler(); var evaluator = new TokenEvaluator(handler); evaluator.Evaluate(commandText); if (handler.Command.Type != "Select") { return(string.Empty); } var fromTable = handler.From.Items.FirstOrDefault(); if (string.IsNullOrEmpty(fromTable) || fromTable.EndsWith(",")) { return(string.Empty); } return(fromTable); }
public string Transcribe(string text) { var tokenizedString = new TokenEvaluator(text); foreach (Token word in tokenizedString.GetWordsToTransliterate()) { foreach (var rule in perWordRules) { word.Value = rule.Execute(word.Value); } } var transliteratedText = tokenizedString.Compile(); foreach (var rule in perSentenceRules) { transliteratedText = rule.Execute(transliteratedText); } return(transliteratedText); }
public Expression Compile() { Stack <Expression> resultStack = new Stack <Expression>(); Expression currentExpression = null; foreach (ExpressionToken token in _tokenList) { TokenEvaluator evaluator = token.Evaluator; if (token.IsTerm) { currentExpression = evaluator(token.Text, token.TokenPosition, null); resultStack.Push(currentExpression); } else { int numTerms = token.NumTerms; if (token.IsFunction) { numTerms++; // include the function name as the first parameter } Expression[] parameters = new Expression[numTerms]; for (int i = numTerms - 1; i >= 0; i--) { parameters[i] = resultStack.Pop(); } currentExpression = evaluator(token.Text, token.TokenPosition, parameters); resultStack.Push(currentExpression); } } return(currentExpression); }
public void AddTernaryTokenMatcher(ITokenMatcher matcher1, ITokenMatcher matcher2, int precedence, OperatorAssociativity associativity, TokenEvaluator tokenEvaluator) { ExpressionTokenMatcher root = new ExpressionTokenMatcher(null, TokenType.TernaryOperator, tokenEvaluator); ExpressionTokenMatcher partial1 = new ExpressionTokenMatcher(matcher1, TokenType.TernaryOperator1, precedence, associativity, null); ExpressionTokenMatcher partial2 = new ExpressionTokenMatcher(matcher2, TokenType.TernaryOperator1, precedence, associativity, null); partial1.Root = root; partial2.Root = root; AddTokenMatcher(partial1); AddTokenMatcher(partial2); }
public void AddTokenMatcher(ITokenMatcher tokenMatcher, TokenType tokenType, int precedence, TokenEvaluator tokenEvaluator) { AddTokenMatcher(tokenMatcher, tokenType, precedence, OperatorAssociativity.Left, tokenEvaluator); }
public void AddTokenMatcher(ITokenMatcher tokenMatcher, TokenType tokenType, int precedence, OperatorAssociativity associativity, TokenEvaluator tokenEvaluator) { AddTokenMatcher(new ExpressionTokenMatcher(tokenMatcher, tokenType, precedence, associativity, tokenEvaluator)); }
public void AddBinaryOperator(string pattern, int precedence, OperatorAssociativity associativity, TokenEvaluator evaluator) { AddTokenDefinition(new TokenDefinition(TokenType.Operator, precedence, associativity, pattern), evaluator); }
public TokenDefinition(TokenType tokenType, int precedence, TokenEvaluator evaluator) : this(tokenType, precedence, (string)null) { _evaluator = evaluator; }
public void AddTernaryOperator(string pattern1, string pattern2, int precedence, OperatorAssociativity associativity, TokenEvaluator evaluator) { TokenDefinition root = new TokenDefinition(TokenType.TernaryOperator, evaluator); TokenDefinition partial1 = new TokenDefinition(TokenType.TernaryOperator1, precedence, associativity, pattern1); TokenDefinition partial2 = new TokenDefinition(TokenType.TernaryOperator2, precedence, associativity, pattern2); partial1.Root = root; partial2.Root = root; AddTokenDefinition(partial1); AddTokenDefinition(partial2); }
public ExpressionTokenMatcher(ITokenMatcher matcher, TokenType tokenType, TokenEvaluator tokenEvaluator) { _matcher = matcher; _tokenType = tokenType; _tokenEvaluator = tokenEvaluator; }
public void AddTerm(string pattern, TokenEvaluator evaluator) { AddTokenDefinition(new TokenDefinition(TokenType.Term, pattern), evaluator); }
public void AddTokenMatcher(ITokenMatcher tokenMatcher, TokenType tokenType, TokenEvaluator tokenEvaluator, int?numTerms = null) { AddTokenMatcher(tokenMatcher, tokenType, 0, OperatorAssociativity.Left, tokenEvaluator, numTerms); }
public RPNExpression(TokenEvaluator functionEvaluator, int functionCallPrecedence) { _FunctionEvaluator = functionEvaluator; _FunctionCallPrecedence = functionCallPrecedence; }
public void AddTokenMatcher(ITokenMatcher tokenMatcher, TokenType tokenType, int precedence, OperatorAssociativity associativity, TokenEvaluator tokenEvaluator, int?numTerms = null) { var matcher = new ExpressionTokenMatcher(tokenMatcher, tokenType, precedence, associativity, tokenEvaluator); if (numTerms != null) { matcher.NumTerms = numTerms; } AddTokenMatcher(matcher); }
public Compiler(TokenEvaluator scrubber) { Condition.Requires(scrubber).IsNotNull(); this.TokenScrubber = scrubber; }
protected void AddOperator(string name, string pattern, ushort priority, TokenEvaluator evaluator) { AddTokenDefinition(name, new TokenDefinition(TokenType.Operator, pattern, priority, evaluator)); }
public RPNExpression(TokenEvaluator functionEvaluator) { _functionEvaluator = functionEvaluator; }
public static Compiler New(TokenEvaluator scrubber) { return(new Compiler(scrubber)); }