Пример #1
0
        public void TestSimpleParenthesisExpression()
        {
            LexicalScanner scanner = new LexicalScanner("(1*2)");

            AbstractToken token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(OpenParenthesis), token);
            Assert.IsTrue(token.GetValue().Equals("("));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Integer), token);
            Assert.IsTrue(token.GetValue().Equals("1"));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Operator), token);
            Assert.IsTrue(token.GetValue().Equals("*"));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Integer), token);
            Assert.IsTrue(token.GetValue().Equals("2"));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(CloseParenthesis), token);
            Assert.IsTrue(token.GetValue().Equals(")"));
        }
Пример #2
0
 private void Expacc()
 {
     _currentNode.AddChild(new ParseNode(ParseEnum.ExpressionAccent));
     _currentNode = _currentNode.GetChildren()[_currentNode.GetChildren().Count - 1];
     if (_current is AddSub)
     {
         _currentNode.AddChild(new ParseNode(ParseEnum.Operator, _current.GetValue()));
         if (!_lex.EndOfInput)
         {
             _current = _lex.GetNextToken();
         }
         Term();
         Expacc();
     }
     else if (_current is Equals)
     {
         _currentNode.AddChild(new ParseNode(ParseEnum.Equals));
         if (!_lex.EndOfInput)
         {
             _current = _lex.GetNextToken();
         }
         Expressie();
     }
     else
     {
         _currentNode.AddChild(new ParseNode(ParseEnum.Empty));
     }
     _currentNode = _currentNode.GetParent();
 }
Пример #3
0
 private void Factor()
 {
     _currentNode.AddChild(new ParseNode(ParseEnum.Factor));
     _currentNode = _currentNode.getChildren()[_currentNode.getChildren().Count - 1];
     if (!_lex.EndOfInput)
     {
         if (_current is OpenParenthesis)
         {
             _currentNode.AddChild(new ParseNode(ParseEnum.OpenParenthesis));
             _current = _lex.GetNextToken();
             Expressie();
             if (_current is CloseParenthesis)
             {
                 _currentNode.AddChild(new ParseNode(ParseEnum.CloseParenthesis));
                 _current = _lex.GetNextToken();
             }
         }
         else if (_current is Variable)
         {
             _currentNode.AddChild(new ParseNode(ParseEnum.Variable, _current.GetValue()));
             _current = _lex.GetNextToken();
         }
         else if (_current is Number)
         {
             _currentNode.AddChild(new ParseNode(ParseEnum.Number, _current.GetValue()));
             _current = _lex.GetNextToken();
         }
         else
         {
             Console.WriteLine("Syntaxfout.");
             Stop();
         }
     }
     _currentNode = _currentNode.GetParent();
 }
Пример #4
0
        public void TestFloat()
        {
            LexicalScanner scanner = new LexicalScanner("1.0");

            AbstractToken token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Float), token);
            Assert.IsTrue(token.GetValue().Equals("1.0"));
        }
Пример #5
0
        public static Amount CreateFromToken(AbstractToken token)
        {
            var amount = new Amount
            {
                Token = token
            };

            return(amount);
        }
Пример #6
0
        private void Equals(Tokenizer.Tokenizer tokenizer)
        {
            AbstractToken token = tokenizer.NextToken();

            if (token.GetType() == typeof(Equals))
            {
                return;
            }

            throw new ParseException("Expected type Equals but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Пример #7
0
        private AbstractToken ValueStop(Tokenizer.Tokenizer tokenizer)
        {
            AbstractToken token = tokenizer.NextToken();

            if (token.GetType() == typeof(ClosingBrace) || token.GetType() == typeof(ValueQuote))
            {
                return(token);
            }

            throw new ParseException("Expected type ClosingBrace or ValueQuote but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Пример #8
0
        private String Text(Tokenizer.Tokenizer tokenizer)
        {
            AbstractToken token = tokenizer.NextToken();

            if (token.GetType() == typeof(Comma) || token.GetType() == typeof(Text))
            {
                return(token.GetValue());
            }

            throw new ParseException("Expected type Text but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Пример #9
0
        public ParseNode Parse()
        {
            _current = _lex.GetNextToken();
            try
            {
                Expressie();
            }
            catch (MatchException me)
            {
                Console.WriteLine(me.Message);
            }

            return(_start);
        }
Пример #10
0
        public ParserToken <T> createToken(AbstractToken <object> serviceToken)
        {
            var parserToken = new ParserToken <T>();

            //Parse the result of the service before notifying the listener
            //eventually we need to handle an error during parsing becoming a fault
            serviceToken.result = delegate(object result) {
                parserToken.serviceResult(parseResult(result));
            };

            //just pass the error straight through
            serviceToken.fault = parserToken.serviceFault;
            return(parserToken);
        }
Пример #11
0
        private ParseNode ParseInput(Tokenizer.Tokenizer tokenizer)
        {
            Nodes.BibtexFile file = new Nodes.BibtexFile();

            while (!tokenizer.EndOfInput)
            {
                AbstractToken token = tokenizer.NextToken();

                if (token.GetType() == typeof(At))
                {
                    file.Entries.Add(Entry(tokenizer));
                }
            }

            return(file);
        }
Пример #12
0
        public void TestPrevious()
        {
            Tokenizer tokenizer = new Tokenizer(new ExpressionDictionary(), @"@book{ aaker:1912,
                                                                                author = { tes~est }
                                                                            }");

            Assert.Throws <ArgumentOutOfRangeException>(() => tokenizer.GetPreviousCharacters(10));

            tokenizer.NextToken();
            tokenizer.NextToken();
            tokenizer.NextToken();
            tokenizer.NextToken();
            tokenizer.NextToken();
            AbstractToken token = tokenizer.NextToken();

            Assert.AreEqual("   author ", tokenizer.GetPreviousCharacters(10));
        }
Пример #13
0
        private void NewLine(Tokenizer.Tokenizer tokenizer, Boolean optional = false)
        {
            AbstractToken token = tokenizer.Peek();

            if (token.GetType() == typeof(NewLine))
            {
                tokenizer.NextToken();
                return;
            }

            if (optional)
            {
                return;
            }

            throw new ParseException("Expected type Comma but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Пример #14
0
        public void TestSimpleAddSubExpression()
        {
            LexicalScanner scanner = new LexicalScanner("1+2");

            AbstractToken token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Integer), token);
            Assert.IsTrue(token.GetValue().Equals("1"));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(AddSub), token);
            Assert.IsTrue(token.GetValue().Equals("+"));

            token = scanner.GetNextToken();

            Assert.IsInstanceOf(typeof(Integer), token);
            Assert.IsTrue(token.GetValue().Equals("2"));
        }
Пример #15
0
 private void Termacc()
 {
     _currentNode.AddChild(new ParseNode(ParseEnum.TermAccent));
     _currentNode = _currentNode.GetChildren()[_currentNode.GetChildren().Count - 1];
     if (_current is Operator)
     {
         _currentNode.AddChild(new ParseNode(ParseEnum.Operator, _current.GetValue()));
         if (!_lex.EndOfInput)
         {
             _current = _lex.GetNextToken();
         }
         Factor();
         Termacc();
     }
     else
     {
         _currentNode.AddChild(new ParseNode(ParseEnum.Empty));
     }
     _currentNode = _currentNode.GetParent();
 }
Пример #16
0
        /// <summary>
        /// Returns the next character without forwarding the streampointer.
        /// </summary>
        /// <returns>Token</returns>
        public AbstractToken Peek()
        {
            // Loop through all tokens and check if they match the input string
            foreach (KeyValuePair <Type, string> pair in _dictionary)
            {
                // TODO: See if substring does not impose a to harsh performance drop
                Match match = Regex.Match(_input.Substring(_counter), pair.Value);

                if (match.Success)
                {
                    if (pair.Key.IsSubclassOf(typeof(AbstractToken)))
                    {
                        // Create new instance of the specified type with the found value as parameter
                        AbstractToken token = (AbstractToken)Activator.CreateInstance(pair.Key, new object[] { match.Value, _counter }, null);

                        return(token);
                    }
                }
            }

            throw new MatchException(_input[_counter].ToString(CultureInfo.InvariantCulture), _counter);
        }
Пример #17
0
        public AbstractToken NextToken()
        {
            // Loop through all tokens and check if they match the input string
            foreach (KeyValuePair <Type, string> pair in _dictionary)
            {
                Match match;

                if (pair.Key == typeof(Comment))
                {
                    match = Regex.Match(_input.Substring(_counter), pair.Value, RegexOptions.Multiline);
                }
                else
                {
                    // TODO: See if substring does not impose a to harsh performance drop
                    match = Regex.Match(_input.Substring(_counter), pair.Value);
                }

                if (!match.Success)
                {
                    continue;
                }
                _counter += match.Value.Length;

                if (!pair.Key.IsSubclassOf(typeof(AbstractToken)))
                {
                    continue;
                }

                // Create new instance of the specified type with the found value as parameter
                AbstractToken token = (AbstractToken)Activator.CreateInstance(pair.Key, new object[] { match.Value, _counter - match.Value.Length }, null);

                return(token);
            }

            throw new MatchException(_input, _input[_counter].ToString(CultureInfo.InvariantCulture), _counter);
        }
Пример #18
0
        /// <summary>
        /// Retrieves the tag values from the input.
        /// </summary>
        /// <param name="tokenizer"></param>
        /// <returns></returns>
        ///
        private ICollection <Tag> Tags(Tokenizer.Tokenizer tokenizer)
        {
            // This function needs some refactoring.
            List <Tag> tags = new List <Tag>();

            while (tokenizer.Peek().GetType() != typeof(ClosingBrace))
            {
                Tag tag = new Tag {
                    Key = Text(tokenizer)
                };
                Equals(tokenizer);
                AbstractToken startToken = ValueStart(tokenizer);

                List <AbstractToken> tokens = new List <AbstractToken>();

                bool keepProcessing = true;
                int  balance        = 1;

                while (keepProcessing)
                {
                    Type nextTokenType = tokenizer.Peek().GetType();

                    if (nextTokenType == typeof(OpeningBrace))
                    {
                        balance++;
                    }

                    if ((startToken.GetType() == typeof(OpeningBrace) && nextTokenType == typeof(ClosingBrace)))
                    {
                        if (balance == 1)
                        {
                            keepProcessing = false;
                            ValueStop(tokenizer);
                        }
                    }

                    if (nextTokenType == typeof(ClosingBrace))
                    {
                        if (balance > 1)
                        {
                            balance--;
                        }
                    }

                    // Double quotes are much more difficult to handle then the braces. The problem is that there is no distinction between
                    // start and stop quotes. This means we need to look forward to see what is behind the quote to see if it is a quote @ the end
                    // or the start of a new quote.
                    if (nextTokenType == typeof(ValueQuote))
                    {
                        AbstractToken quote = tokenizer.NextToken();

                        Type nextType = tokenizer.Peek().GetType();
                        if ((nextType == typeof(ClosingBrace) && balance == 1) ||
                            nextType == typeof(Comma))
                        {
                            // end of line found.
                            keepProcessing = false;
                        }
                        else
                        {
                            tokens.Add(quote);
                            continue;
                        }
                    }

                    if (keepProcessing)
                    {
                        tokens.Add(tokenizer.NextToken());
                    }
                }

                tag.Value = tokens.Aggregate("", (s, token) => s + token.RawValue);

                Comma(tokenizer, true);
                NewLine(tokenizer, true);

                tags.Add(tag);
            }

            return(tags);
        }