private Expression ParseComparisonOperationExpression( List <IToken> expression, ParsingContext context, TokenDetailTypes detailType, Func <Expression, Expression, BinaryExpression> comparisonExpression, ParseOperation parseOperation, out Type dataType) { List <List <IToken> > expressions = SplitIntoExpressions( expression, true, detailType); if (expressions.Count > 2) { throw new ParseException(expression, "Invalid comparison expression"); } else if (expressions.Count == 2) { var leftPart = parseOperation(expressions[0], context, out var leftSubType); var rightPart = parseOperation(expressions[1], context, out var rightSubType); if (leftSubType != rightSubType) { throw new ParseException(expression, "Data types on both sides of the equation do not align."); } dataType = typeof(bool); return(comparisonExpression(leftPart, rightPart)); } else { return(parseOperation(expression, context, out dataType)); } }
private List <List <IToken> > SplitIntoExpressions(List <IToken> tokens, bool removeSplitter, TokenDetailTypes splitter, params TokenDetailTypes[] types) { var expressions = new List <List <IToken> >(); var expression = new List <IToken>(); expressions.Add(expression); var splitters = new List <TokenDetailTypes>() { splitter }; splitters.AddRange(types); foreach (var token in tokens) { if (!splitters.Contains(token.DetailType) || !removeSplitter) { expressions.Last().Add(token); } if (splitters.Contains(token.DetailType) && (tokens.IndexOf(token) + 1) < tokens.Count) { expressions.Add(new List <IToken>()); } } return(expressions); }
public static List <IToken> Tokenize(string code) { List <IToken> tokens = new List <IToken>(); var remainingCode = code; while (remainingCode != string.Empty) { Token token = null; foreach (var tokenDefinition in _tokenDefinitions) { var match = Regex.Match(remainingCode, tokenDefinition.Pattern); if (match.Success) { var value = match.Captures.First().Value; remainingCode = remainingCode.Remove(0, value.Length); TokenDetailTypes detailTypes = tokenDefinition.DetailType; // if is a subtraction operator but: // 1. its the first token // 2. the previous token was neither a variable nor a value // => it is a unary negative instead if (tokenDefinition.DetailType == TokenDetailTypes.Subtraction && (tokens.Count == 0 || (tokens.Last().DetailType != TokenDetailTypes.VariableName && tokens.Last().Type != TokenTypes.Value))) { detailTypes = TokenDetailTypes.Negative; } token = new Token() { Value = value.Trim(), DetailType = detailTypes, Type = tokenDefinition.Type }; break; } } if (token == null) { remainingCode = remainingCode.Remove(0, 1); } else if (token.DetailType != TokenDetailTypes.Whitespace) { tokens.Add(token); } } return(tokens); }
public void Tokenize_SingleTokenParsing_ReturnToken(string value, TokenTypes type, TokenDetailTypes detailType) { var expectedToken = new Token() { Type = type, Value = value, DetailType = detailType }; var result = Lexer.Tokenize(value); Assert.That(result.Count, Is.EqualTo(1)); Assert.That(result[0], Is.TypeOf <Token>()); var token = (Token)result[0]; Assert.That(token.Type, Is.EqualTo(expectedToken.Type)); Assert.That(token.DetailType, Is.EqualTo(expectedToken.DetailType)); Assert.That(token.Value, Is.EqualTo(expectedToken.Value)); }
public TokenDefinition(string pattern, TokenDetailTypes detailType, TokenTypes type) { Pattern = pattern; DetailType = detailType; Type = type; }