public void TestTokenizer14()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Number, "1"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.Operator, "-"),
                GenerateTestToken(TokenType.Number, "3"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "4"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "5"),
                GenerateTestToken(TokenType.Operator, "^"),
                GenerateTestToken(TokenType.Number, "6"),
                GenerateTestToken(TokenType.Operator, "^"),
                GenerateTestToken(TokenType.Number, "7"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "8"),
                GenerateTestToken(TokenType.Operator, "-"),
                GenerateTestToken(TokenType.Number, "9"),
            };
            var arrResult  = MathExpressionTokenizer.Tokenize("1 + 2 - 3*4 + 5^6^7*8 - 9", true).ToArray();
            var arrResultT = MathExpressionTokenizer.Tokenize("1 + 2 - 3 4 + 5^6^7*8 - 9", true).ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
            Assert.IsTrue(arrResultT.SequenceEqual(arrResult));
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Filtert einen mathematischen Ausdruck oder Term.
        /// </summary>
        /// <param name="tokens">Ein Token-Iterator mit den einzelnen Teilen des mathematischen Ausdruckes</param>
        /// <param name="tokenizer">Der <see cref="MathExpressionTokenizer"/>, der den Term zuvor geparst hat.</param>
        /// <returns>Ein neuer Token-Iterator, der es ermöglicht über den gefilterten Term zu iterieren.</returns>
        public IEnumerable <Token> PostProcessTokens(IEnumerable <Token> tokens)
        {
            Queue <Token> lastTokens = new Queue <Token>(3);

            foreach (Token t in tokens)
            {
                lastTokens.Enqueue(t);

                if (lastTokens.Count == 3)
                {
                    Token t1 = lastTokens.Dequeue();
                    Token t2 = lastTokens.Dequeue();
                    Token t3 = lastTokens.Dequeue();

                    if (t1.Type == TokenType.Number && t2.Type == TokenType.DecimalSeparator && t3.Type == TokenType.Number)
                    {
                        yield return(MathExpressionTokenizer.GetModifiedToken(t1, t1.Value + t2.Value + t3.Value));
                    }
                    else
                    {
                        yield return(t1);

                        lastTokens.Enqueue(t2);
                        lastTokens.Enqueue(t3);
                    }
                }
            }

            while (lastTokens.Count > 0)
            {
                yield return(lastTokens.Dequeue());
            }
        }
        public void TestTokenizer08()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Number, "253"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Variable, "x"),
                GenerateTestToken(TokenType.Operator, "-"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Function, "tan"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "5"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "5"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "3"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.Operator, "/"),
                GenerateTestToken(TokenType.Variable, "y"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("253 * x-(tan(5 5) 3) / y", true, "x", "y").ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
        public void TestTokenizer05()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Function, "sqrt"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Function, "sqrt"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.ArgumentSeparator, ","),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.ArgumentSeparator, ","),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.Operator, "="),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("sqrt(9*9 + sqrt(2)) = (9 )", true).ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
        public void TestTokenizer16()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Function, "sqrt"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Variable, "a"),
                GenerateTestToken(TokenType.Operator, "^"),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.ArgumentSeparator, ","),
                GenerateTestToken(TokenType.Number, "3"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.Operator, "/"),
                GenerateTestToken(TokenType.Variable, "a"),
                GenerateTestToken(TokenType.Operator, "^"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "1"),
                GenerateTestToken(TokenType.Operator, "/"),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("sqrt(a^2, 3) / a^(1/2)", true, "a").ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
 public void MathExpressionTokenizer_Can_Tokenize_Modulus()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("4%2");
     Assert.AreEqual(3, result.Count);
     Assert.AreEqual(TokenType.Modulus, result[1].Type);
 }
Ejemplo n.º 7
0
 private Token GenerateTestToken(TokenType tt, string strValue)
 {
     if (tt == TokenType.Variable)
     {
         return(MathExpressionTokenizer.GetVariableToken(strValue));
     }
     return(MathExpressionTokenizer.GetModifiedToken(MathExpressionTokenizer.GetToken(tt), strValue));
 }
 public void WordRewriter_Will_Detect_And_Rewrite_Functions()
 {
     MathExpressionTokenizer tokenizer = new MathExpressionTokenizer();
     var tokens = tokenizer.Tokenize("sqrt(1)");
     WordRewriter rewriter = new WordRewriter();
     var result = rewriter.Rewrite(tokens);
     Assert.AreEqual(TokenType.Function, result[0].Type);
     Assert.AreEqual("sqrt", result[0].Value);
 }
 public void MathExpressionTokenizer_Can_Tokenize_Float_Values_With_Dot()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("3.23+1");
     Assert.AreEqual(3, result.Count);
     Assert.AreEqual(TokenType.Numeric, result[0].Type);
     Assert.AreEqual(3.23D, result[0].Value);
     Assert.AreEqual(TokenType.Addition, result[1].Type);
     Assert.AreEqual(TokenType.Numeric, result[2].Type);
     Assert.AreEqual(1D, result[2].Value);
 }
        public void TestTokenizer15()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Number, "5"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "-5"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "5"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("5 +-5+ 5", true).ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
        public void TestTokenizer11()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Number, "1.2042"),
                GenerateTestToken(TokenType.Operator, "-"),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "0"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("+1.2042 -2 0", true).ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
Ejemplo n.º 12
0
 private void FormatChild(StringBuilder builder, int iIndex)
 {
     if (this[iIndex].RecursiveCount() > 1 && this[iIndex].Type != TokenType.Function &&
         MathExpressionTokenizer.GetToken(Value)?.Precedence >= TokenPrecedence.Multiplication)
     {
         builder.Append("(");
         builder.Append(this[iIndex]);
         builder.Append(")");
     }
     else
     {
         builder.Append(this[iIndex]);
     }
 }
Ejemplo n.º 13
0
        /// <summary>
        /// Ergänzt die Wissensbasis anhand einer Datenbank.
        /// </summary>
        /// <param name="strFile">Die Datei, welche die Datenbank enthält.</param>
        public static void LoadKnowledgeBaseFromFile(string strFile)
        {
            try
            {
                XmlDocument doc = new XmlDocument();
                doc.Load(strFile);
                XmlNode root = doc.DocumentElement;

                if (root?.Name != "knowledgebase")
                {
                    return;
                }

                XmlNode xmlRules = root.SelectSingleNode("tokens");

                if (xmlRules == null || !xmlRules.HasChildNodes ||
                    !(xmlRules.SelectNodes("token") is XmlNodeList listTokens))
                {
                    return;
                }

                foreach (XmlNode node in listTokens)
                {
                    XmlNode xmlType          = node.SelectSingleNode("type");
                    XmlNode xmlValue         = node.SelectSingleNode("value");
                    XmlNode xmlAssociativity = node.SelectSingleNode("associativity");
                    XmlNode xmlPrecendence   = node.SelectSingleNode("precedence");


                    if (xmlType != null && xmlValue != null && xmlAssociativity != null && xmlPrecendence != null &&
                        Enum.TryParse(typeof(TokenType), xmlType.InnerText, out object tt) &&
                        Enum.TryParse(typeof(TokenAssociativity), xmlAssociativity.InnerText, out object ta) &&
                        Enum.TryParse(typeof(TokenPrecedence), xmlPrecendence.InnerText, out object tp))
                    {
                        MathExpressionTokenizer.RegisterToken((TokenType)tt, xmlValue.InnerText,
                                                              (TokenAssociativity)ta, (TokenPrecedence)tp);
                    }
                    else
                    {
                        Console.WriteLine($"Invalid knowledge base entry: {node.InnerXml}");
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
        public void TestTokenizer17()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "-21"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "4"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.Operator, "+"),
                GenerateTestToken(TokenType.Number, "10"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("(- 21 + 4) + 10", true).ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
        public void TestTokenizer03()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Number, "1"),
                GenerateTestToken(TokenType.Number, "0"),
                GenerateTestToken(TokenType.WhiteSpace, " "),
                GenerateTestToken(TokenType.Operator, "-"),
                GenerateTestToken(TokenType.Number, "2"),
                GenerateTestToken(TokenType.WhiteSpace, " "),
                GenerateTestToken(TokenType.Number, "0"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("10 -2 0").ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
        /// <summary>
        /// Filtert einen mathematischen Ausdruck oder Term.
        /// </summary>
        /// <param name="tokens">Ein Token-Iterator mit den einzelnen Teilen des mathematischen Ausdruckes</param>
        /// <param name="tokenizer">Der <see cref="MathExpressionTokenizer"/>, der den Term zuvor geparst hat.</param>
        /// <returns>Ein neuer Token-Iterator, der es ermöglicht über den gefilterten Term zu iterieren.</returns>
        public IEnumerable <Token> PostProcessTokens(IEnumerable <Token> tokens)
        {
            Queue <Token> lastTokens = new Queue <Token>(3);

            foreach (Token t in tokens)
            {
                lastTokens.Enqueue(t);

                if (lastTokens.Count == 2)
                {
                    Token t1 = lastTokens.Dequeue();
                    Token t2 = lastTokens.Dequeue();

                    if (t1.Type == TokenType.Parenthesis && t1.Value == ")" &&
                        t2.Type == TokenType.Parenthesis && t2.Value == "(")
                    {
                        yield return(t1);

                        yield return(MathExpressionTokenizer.GetToken("*"));

                        yield return(t2);
                    }
                    else if ((t1.Type == TokenType.Number || t1.Type == TokenType.Variable || t1.Type == TokenType.Constant || (t1.Type == TokenType.Parenthesis && t1.Value == ")")) &&
                             (t2.Type == TokenType.Number || t2.Type == TokenType.Variable || t2.Type == TokenType.Constant || (t2.Type == TokenType.Parenthesis && t1.Value == "(") || t2.Type == TokenType.Function))
                    {
                        yield return(t1);

                        yield return(MathExpressionTokenizer.GetToken("*"));

                        yield return(t2);
                    }
                    else
                    {
                        yield return(t1);

                        lastTokens.Enqueue(t2);
                    }
                }
            }

            while (lastTokens.Count > 0)
            {
                yield return(lastTokens.Dequeue());
            }
        }
 public void MathExpressionTokenizer_Can_Tokenize_Simple_Expression()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("4+(2*7)/2-7");
     Assert.AreEqual(11, result.Count);
     Assert.AreEqual(TokenType.Numeric, result[0].Type);
     Assert.AreEqual(4D, result[0].Value);
     Assert.AreEqual(TokenType.Addition, result[1].Type);
     Assert.AreEqual(TokenType.OpeningParenthesis, result[2].Type);
     Assert.AreEqual(TokenType.Numeric, result[3].Type);
     Assert.AreEqual(2D, result[3].Value);
     Assert.AreEqual(TokenType.Multiplication, result[4].Type);
     Assert.AreEqual(TokenType.Numeric, result[5].Type);
     Assert.AreEqual(7D, result[5].Value);
     Assert.AreEqual(TokenType.ClosingParenthesis, result[6].Type);
     Assert.AreEqual(TokenType.Division, result[7].Type);
     Assert.AreEqual(2D, result[8].Value);
     Assert.AreEqual(TokenType.Subtraction, result[9].Type);
     Assert.AreEqual(7D, result[10].Value);
 }
        public void TestTokenizer04()
        {
            var arrExpected = new[]
            {
                GenerateTestToken(TokenType.Function, "sqrt"),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.Operator, "*"),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.Parenthesis, ")"),
                GenerateTestToken(TokenType.WhiteSpace, " "),
                GenerateTestToken(TokenType.Operator, "="),
                GenerateTestToken(TokenType.WhiteSpace, " "),
                GenerateTestToken(TokenType.Parenthesis, "("),
                GenerateTestToken(TokenType.Number, "9"),
                GenerateTestToken(TokenType.WhiteSpace, " "),
                GenerateTestToken(TokenType.Parenthesis, ")"),
            };
            var arrResult = MathExpressionTokenizer.Tokenize("sqrt(9*9) = (9 )").ToArray();

            Assert.IsTrue(arrResult.SequenceEqual(arrExpected));
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Gruppiert aufeinanderfolgende Token.
        /// </summary>
        /// <param name="tokens">Potenziell zu gruppierende Token</param>
        /// <param name="tt">Typ der zu gruppierenden Token</param>
        /// <returns>Gefilterte Liste, in der aufeinanderfolgende Token des angegebenen Typs zu einem Token zusammengefasst worden sind.</returns>
        private IEnumerable <Token> GroupByType(IEnumerable <Token> tokens, TokenType tt)
        {
            Token lastToken = MathExpressionTokenizer.GetToken(TokenType.Unknown);

            foreach (Token t in tokens)
            {
                if (t.Type == tt && lastToken.Type == tt)
                {
                    lastToken = MathExpressionTokenizer.GetModifiedToken(t, lastToken.Value + t.Value);
                }
                else if (lastToken.Type == TokenType.Unknown)
                {
                    lastToken = t;
                }
                else
                {
                    yield return(lastToken);

                    lastToken = t;
                }
            }

            yield return(lastToken);
        }
 public void MathExpressionTokenizer_Can_Tokenize_Wikipedia_Example()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("3+4*2/(1-5)^2^3");
     Assert.AreEqual(15, result.Count);
 }
 private TokenStream GetPostfixTokenStream(string source)
 {
     MathExpressionTokenizer tokenizer = new MathExpressionTokenizer();
     TokenStream tokens = tokenizer.Tokenize(source);
     return new TokenPipeline().Process(tokens);
 }
Ejemplo n.º 22
0
        /// <summary>
        /// Ergänzt die Wissensbasis anhand einer Datenbank.
        /// </summary>
        /// <param name="strFile">Die Datei, welche die Datenbank enthält.</param>
        public static void LoadKnowledgeBaseFromFile(string strFile)
        {
            try
            {
                XmlDocument doc = new XmlDocument();
                doc.Load(strFile);
                XmlNode root = doc.DocumentElement;

                if (root?.Name != "knowledgebase")
                {
                    return;
                }

                XmlNode xmlRules = root.SelectSingleNode("functions");

                if (xmlRules == null || !xmlRules.HasChildNodes ||
                    !(xmlRules.SelectNodes("function") is XmlNodeList listFunctions))
                {
                    return;
                }

                foreach (XmlNode node in listFunctions)
                {
                    if (!(node.SelectSingleNode("arguments")?.SelectNodes("argument") is XmlNodeList listArguments))
                    {
                        continue;
                    }
                    XmlNode xmlName = node.SelectSingleNode("name");

                    if (xmlName == null)
                    {
                        continue;
                    }

                    List <DefaultFunctionArgument> listDefaultArguments = new List <DefaultFunctionArgument>();

                    foreach (XmlNode nodeArgument in listArguments)
                    {
                        if (!(nodeArgument.SelectSingleNode("hasdefaultvalue") is XmlNode xmlHasDefaultArgument) ||
                            !bool.TryParse(xmlHasDefaultArgument.InnerText, out bool bHasDefaultArgument))
                        {
                            continue;
                        }

                        if (bHasDefaultArgument &&
                            nodeArgument.SelectSingleNode("defaultvalue") is XmlNode xmlDefaultArgument)
                        {
                            listDefaultArguments.Add(new DefaultFunctionArgument
                            {
                                HasDefaultValue = true,
                                DefaultValue    = MathExpressionTokenizer.GetToken(xmlDefaultArgument.InnerText)
                            });
                        }
                        else
                        {
                            listDefaultArguments.Add(new DefaultFunctionArgument
                            {
                                HasDefaultValue = false
                            });
                        }
                    }

                    FunctionArgumentFilter.RegisterFunction(xmlName.InnerText, listDefaultArguments);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
        /// <summary>
        /// Filtert einen mathematischen Ausdruck oder Term.
        /// </summary>
        /// <param name="tokens">Ein Token-Iterator mit den einzelnen Teilen des mathematischen Ausdruckes</param>
        /// <param name="tokenizer">Der <see cref="MathExpressionTokenizer"/>, der den Term zuvor geparst hat.</param>
        /// <returns>Ein neuer Token-Iterator, der es ermöglicht über den gefilterten Term zu iterieren.</returns>
        public IEnumerable <Token> PostProcessTokens(IEnumerable <Token> tokens)
        {
            bool          bIsStart   = true;
            Queue <Token> lastTokens = new Queue <Token>(3);

            foreach (Token t in tokens)
            {
                lastTokens.Enqueue(t);

                if (lastTokens.Count == 2 && bIsStart)
                {
                    bIsStart = false;

                    Token t1 = lastTokens.Dequeue();
                    Token t2 = lastTokens.Dequeue();

                    if ((t1.Type == TokenType.Operator && (t1.Value == "-" || t1.Value == "+")) &&
                        t2.Type == TokenType.Number)
                    {
                        yield return(MathExpressionTokenizer.GetModifiedToken(t2, (t1.Value != "+" ? t1.Value : "") + t2.Value));
                    }
                    else
                    {
                        if (t1.Type == TokenType.Parenthesis && t1.Value == "(")
                        {
                            lastTokens.Enqueue(t1);
                        }
                        else
                        {
                            yield return(t1);
                        }
                        lastTokens.Enqueue(t2);
                    }
                }

                if (lastTokens.Count == 3)
                {
                    Token t1 = lastTokens.Dequeue();
                    Token t2 = lastTokens.Dequeue();
                    Token t3 = lastTokens.Dequeue();

                    if ((t1.Type == TokenType.Operator || (t1.Type == TokenType.Parenthesis && t1.Value == "(")) &&
                        (t2.Type == TokenType.Operator && (t2.Value == "-" || t2.Value == "+")) &&
                        t3.Type == TokenType.Number)
                    {
                        yield return(t1);

                        yield return(MathExpressionTokenizer.GetModifiedToken(t3, (t2.Value != "+" ? t2.Value : "") + t3.Value));
                    }
                    else

                    {
                        yield return(t1);

                        lastTokens.Enqueue(t2);
                        lastTokens.Enqueue(t3);
                    }
                }
            }

            while (lastTokens.Count > 0)
            {
                yield return(lastTokens.Dequeue());
            }
        }
Ejemplo n.º 24
0
 /// <summary>
 /// Erstellt eine Baumstruktur, die einen mathematischen Term repräsentiert.
 /// </summary>
 /// <param name="readerInfixTerm">Der mathematische Term in der Infix-Notation.</param>
 /// <param name="environment">Die Umgebung, in dessen Kontext der mathematische Term seine Gültigkeit besitzt.</param>
 /// <param name="arrVariables">Eine Liste mit Variablen, die in dem Term vorkommen.</param>
 /// <returns>Eine Baumstruktur, die einen mathematischen Term repräsentiert.</returns>
 public static Node Get(TextReader readerInfixTerm, MathEnvironment environment, params string[] arrVariables)
 {
     return(Get(MathExpressionTokenizer.Tokenize(readerInfixTerm, true, arrVariables), environment, true));
 }
 public void MathExpressionTokenizer_Can_Tokenize_Words()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("2+x*1");
     Assert.AreEqual(5, result.Count);
     Assert.AreEqual(TokenType.Numeric, result[0].Type);
     Assert.AreEqual(2D, result[0].Value);
     Assert.AreEqual(TokenType.Addition, result[1].Type);
     Assert.AreEqual(TokenType.Word, result[2].Type);
     Assert.AreEqual(TokenType.Multiplication, result[3].Type);
     Assert.AreEqual(TokenType.Numeric, result[4].Type);
     Assert.AreEqual(1D, result[4].Value);
 }
Ejemplo n.º 26
0
 /// <summary>
 /// Parst einen mathematischen Term und gibt diesen aufgeteilt in einzelne Bauteile zurück.
 /// </summary>
 /// <param name="strTerm">Der mathematische Term als String, z.B. "5 + 5 - sqrt(9)"</param>
 /// <param name="arrVariables">Eine Liste mit Variablen, die in dem Term vortkommen, beispielsweise "x", "y"</param>
 /// <returns></returns>
 public static Token[] TokenizeTerm(string strTerm, params string[] arrVariables)
 {
     return(MathExpressionTokenizer.Tokenize(strTerm, true, arrVariables).ToArray());
 }
 public void MathExpressionTokenizer_Unknown_Symbol_Throws_Exception()
 {
     var tokenizer = new MathExpressionTokenizer();
     var exception = Assert.Throws<MathExpressionException>(() => tokenizer.Tokenize("1$2"));
     Assert.AreEqual("Invalid expression.", exception.Message);
     Assert.AreEqual("Unknown token '$' in expression.", exception.Details);
 }
 public void MathExpressionTokenizer_Treats_Comma_As_Argument_Separator()
 {
     var tokenizer = new MathExpressionTokenizer();
     var result = tokenizer.Tokenize("3,23+1");
     Assert.AreEqual(5, result.Count);
     Assert.AreEqual(TokenType.Separator, result[1].Type);
 }
Ejemplo n.º 29
0
 /// <summary>
 /// Parst einen mathematischen Term und gibt diesen aufgeteilt in einzelne Bauteile zurück.
 /// </summary>
 /// <param name="reader">Ein StringReader-Objekt, von dem der zu parsende Term ausgelesen wird.</param>
 /// <param name="arrVariables">Eine Liste mit Variablen, die in dem Term vortkommen, beispielsweise "x", "y"</param>
 /// <returns></returns>
 public static Token[] TokenizeTerm(StringReader reader, params string[] arrVariables)
 {
     return(MathExpressionTokenizer.Tokenize(reader, true, arrVariables).ToArray());
 }