private ASTNode ParseInternal(string input) { tokenStream = new TokenStream(ExpressionTokenizer.Tokenize(input, StructList <ExpressionToken> .Get())); expressionStack = expressionStack ?? StackPool <ASTNode> .Get(); operatorStack = operatorStack ?? StackPool <OperatorNode> .Get(); if (tokenStream.Current == ExpressionTokenType.ExpressionOpen) { tokenStream.Advance(); } if (!tokenStream.HasMoreTokens) { throw new ParseException("Failed trying to parse empty expression"); } if (tokenStream.Last == ExpressionTokenType.ExpressionClose) { tokenStream.Chop(); } ASTNode retn = ParseLoop(); Release(); return(retn); }
static void Main() { Console.Write(Prompt); var line = Console.ReadLine(); while (line != null) { if (!string.IsNullOrWhiteSpace(line)) { try { var tokens = ExpressionTokenizer.TryTokenize(line); if (!tokens.HasValue) { WriteSyntaxError(tokens.ToString(), tokens.ErrorPosition); } else if (!ExpressionParser.TryParse(tokens.Value, out var expr, out var error, out var errorPosition)) { WriteSyntaxError(error, errorPosition); } else { var result = ExpressionEvaluator.Evaluate(expr); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine(result); } }
public FunctionArgument(string name, int index, object value, ExpressionTokenizer.Position beforeArgument, ExpressionTokenizer.Position afterArgument) { this._name = name; this._index = index; this._value = value; this._beforeArgument = beforeArgument; this._afterArgument = afterArgument; }
static void Main(string[] args) { Console.WriteLine("Expression tokenizer and evaluator"); Console.WriteLine(); Console.Write("Expression: "); string uInput = Console.ReadLine(); while (uInput != "exit") { try { et = new ExpressionTokenizer(uInput); } catch (Exception ex) { Console.WriteLine(ex.Message); } foreach (ExpressionToken token in et.GetTokens()) { Console.WriteLine(token); } Console.WriteLine(); Console.Write("Expression: "); uInput = Console.ReadLine(); } }
public void Tokenize_Operators() { string input = "+"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.Plus, tokens[0].expressionTokenType); input = "-"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.Minus, tokens[0].expressionTokenType); input = "*"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.Times, tokens[0].expressionTokenType); input = "/"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.Divide, tokens[0].expressionTokenType); input = "%"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.Mod, tokens[0].expressionTokenType); }
public bool IsValidName(ReadOnlyString newName, out ReadOnlyString validatedName) { validatedName = newName.Chars.Trim(); if (validatedName.IsEmpty) { return(true); } var nameSpan = validatedName.Span; for (int i = 0; i < nameSpan.Length; i++) { char c = nameSpan[i]; if (!( ExpressionTokenizer.IsNameToken(c) || ExpressionTokenizer.IsSpaceToken(c) || (i > 0 && ExpressionTokenizer.IsDigitToken(c)))) { return(false); } } if (Expressions.ContainsKey(validatedName)) { return(false); } return(true); }
public TypeBodyNode Parse(string input, string fileName, int lineStart) { tokenStream = new TokenStream(ExpressionTokenizer.Tokenize(input, StructList <ExpressionToken> .Get())); if (!tokenStream.HasMoreTokens) { throw new ParseException("Failed trying to parse empty expression"); } TypeBodyNode retn = new TypeBodyNode(); int cnt = 0; while (tokenStream.HasMoreTokens && cnt < 10000) { cnt++; ExpressionToken current = tokenStream.Current; ASTNode node = null; if (ParseDeclaration(ref node)) { retn.nodes.Add(node); continue; } if (current == tokenStream.Current) { throw new ParseException($"Failed to parse {fileName}. Got stuck on {current.value}"); } } return(retn); }
public static bool TryParseAll <T>(TokenListParser <ExpressionToken, T> parser, string source, out T value, out string error) { if (source == null) { throw new ArgumentNullException(nameof(source)); } var tokens = ExpressionTokenizer.TryTokenize(source); if (!tokens.HasValue) { value = default; error = tokens.ToString(); return(false); } var result = parser.AtEnd()(tokens.Value); if (!result.HasValue) { value = default; error = result.ToString(); return(false); } value = result.Value; error = null; return(true); }
public void Tokenize_SingleToken_ReturnsExpectedToken(string path, ExpressionTokenType expectedType) { var tokenizer = new ExpressionTokenizer(); var token = tokenizer.Tokenize(path).Single(); Assert.AreEqual(expectedType, token.Type); }
public void Keyword() { string [] identifiers = new string[] { "a.d", "a-d", "a.d", "a\\d", "a_d", "_ad", "ad5" }; for (int i = 0; i < identifiers.Length; i++) { string identifier = identifiers[i]; ExpressionTokenizer et = new ExpressionTokenizer(); et.InitTokenizer(identifier); Assert.AreEqual(identifier, et.TokenText, "#A1"); Assert.AreEqual(ExpressionTokenizer.TokenType.Keyword, et.CurrentToken, "#A2:" + identifier); et.GetNextToken(); Assert.AreEqual(identifier, et.TokenText, "#B1"); Assert.AreEqual(ExpressionTokenizer.TokenType.EOF, et.CurrentToken, "#B2:" + identifier); } }
public void TestExpressionParserDbg(string query) { var context = TokenzierContext.FromText(query); var expressions = ExpressionTokenizer.ParseExpressionOrString(query, context); Assert.That(expressions, Is.Not.Null); Assert.That(context.Errors, Is.Empty, () => context.Errors.GetErrorText()); }
public void Tokenize_String() { string input = "'some string'"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual("some string", tokens[0].value); }
static IExpressionTreeNode <double> BuildExpressionTree(string expression, IVariableProvider <double> variableProvider) { var tokenizer = new ExpressionTokenizer(AbstractTokenParser.DefaultTokenParserChain); var tokenizedExpression = tokenizer.Tokenize(expression); var expressionTree = ExpressionTreeBuilder.BuildTree(tokenizedExpression, variableProvider); return(expressionTree); }
public void AllowKeyWordAsIdentifierPart() { string input = "isThing"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); List <ExpressionTokenType> types = new List <ExpressionTokenType>(); types.Add(ExpressionTokenType.Identifier); AssertTokenTypes(types, tokens); }
public void Tokenize_Null_ThrowsArgumentNullException() { var tokenizer = new ExpressionTokenizer(); Assert.Throws <ArgumentNullException>(() => { // ReSharper disable once ReturnValueOfPureMethodIsNotUsed tokenizer.Tokenize(null).ToList(); }); }
public void TokenizeBasicString() { string input = "item.thing"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(3, tokens.Count); Assert.AreEqual("item", tokens[0].value); Assert.AreEqual(".", tokens[1].value); Assert.AreEqual("thing", tokens[2].value); }
public void Tokenize_IllegalCharacters_ThrowsFormatException(string input) { var tokenizer = new ExpressionTokenizer(); // ReSharper disable once ReturnValueOfPureMethodIsNotUsed Assert.Throws <FormatException>(() => { tokenizer.Tokenize(input).ToList(); // Using ToList to force tokenization of the entire string }); }
public void Keyword_ShouldNotEndWithDot() { ExpressionTokenizer et = new ExpressionTokenizer(); try { et.InitTokenizer("abc."); Assert.Fail(); } catch (ExpressionParseException) { } }
public void ValidResultsAreComputed(string source, string result) { var tokens = ExpressionTokenizer.TryTokenize(source); Assert.True(tokens.HasValue, tokens.ToString()); Assert.True(ExpressionParser.TryParse(tokens.Value, out var expr, out var err), err); var actual = ExpressionEvaluator.Evaluate(expr); Assert.Equal(result, actual.ToString()); }
public void Tokenize_SpecialIdentifier() { string input = "1 + $ident"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); List <ExpressionTokenType> types = new List <ExpressionTokenType>(); types.Add(ExpressionTokenType.Number); types.Add(ExpressionTokenType.Plus); types.Add(ExpressionTokenType.Identifier); AssertTokenTypes(types, tokens); }
public void TestParseErrorInsideDefinitionExpressionTokenizer() { var group = new ExpressionGroup(); var tokenizer = new ExpressionTokenizer(Tokenizer.CreateTokenizer("function func() { j = }"), group); tokenizer.Match("function"); var expr = UserFunctionDefinitionExpression.Parse(tokenizer); Assert.That(expr, Is.InstanceOf <FunctionDefinitionExpression>()); Assert.That(group.ParseErrors.Count(), Is.EqualTo(2)); Assert.That(group.ParseErrors.First().Message, Is.EqualTo("Unexpected character: }")); }
public void CreatesParenthesis(string str, TokenType tokenType) { // arrange var tokenizer = new ExpressionTokenizer(); // act var tokens = tokenizer.Tokenize(str); // assert Assert.Single(tokens); Assert.Equal(new Token(tokenType), tokens[0]); }
public void CreatesOperatorToken(string str, Operator @operator) { // arrange var tokenizer = new ExpressionTokenizer(); // act var tokens = tokenizer.Tokenize(str); // assert Assert.Single(tokens); Assert.Equal(new Token(@operator), tokens[0]); }
public void Tokenize_ArrayAccess() { string input = "["; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.ArrayAccessOpen, tokens[0].expressionTokenType); input = "]"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.ArrayAccessClose, tokens[0].expressionTokenType); }
public void AllowKeyWordAsIdentifierPartInExpression() { string input = "isThing ? 1 : 2"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); List <ExpressionTokenType> types = new List <ExpressionTokenType>(); types.Add(ExpressionTokenType.Identifier); types.Add(ExpressionTokenType.QuestionMark); types.Add(ExpressionTokenType.Number); types.Add(ExpressionTokenType.Colon); types.Add(ExpressionTokenType.Number); AssertTokenTypes(types, tokens); }
public void CreatesVariableToken() { // arrange var str = "bananas"; var tokenizer = new ExpressionTokenizer(); // act var tokens = tokenizer.Tokenize(str); // assert Assert.Single(tokens); Assert.Equal(new Token("bananas"), tokens[0]); }
public void CreatesValueToken() { // arrange var str = "3.14159"; var tokenizer = new ExpressionTokenizer(); // act var tokens = tokenizer.Tokenize(str); // assert Assert.Single(tokens); Assert.Equal(new Token(float.Parse(str)), tokens[0]); }
public void Tokenize_Boolean() { string input = "true"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual("true", tokens[0].value); input = "false"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual("false", tokens[0].value); }
public void Tokenize_ExpressionStatement() { string input = "{"; StructList <ExpressionToken> tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.ExpressionOpen, tokens[0].expressionTokenType); input = "}"; tokens = ExpressionTokenizer.Tokenize(input); Assert.AreEqual(1, tokens.Count); Assert.AreEqual(ExpressionTokenType.ExpressionClose, tokens[0].expressionTokenType); }
public bool ConsistsOfDigits() { var span = Value.AsSpan(); for (int i = 0; i < span.Length; i++) { char c = span[i]; if (!ExpressionTokenizer.IsDigitToken(c)) { return(false); } } return(true); }
public void HandlesMultipleTokens0() { // arrange var str = "()"; var tokenizer = new ExpressionTokenizer(); // act var tokens = tokenizer.Tokenize(str); // assert Assert.Equal(2, tokens.Count); Assert.Equal(new Token(TokenType.LeftParenthesis), tokens[0]); Assert.Equal(new Token(TokenType.RightParenthesis), tokens[1]); }