public LexerToken ConsumeGet()
        {
            LexerToken t = buffer.ElementAt(consumeOffset);

            Consume();
            return(t);
        }
        private Signal ScanLeftUnary()
        {
            LexerToken token = tokenizer.ConsumeGet();
            Signal     s     = ScanOperand();

            return(context.Builder.Function(token.Text, InfixNotation.PreOperator, s));
        }
        private void ScanCommand()
        {
            LexerToken t = tokenizer.LookaheadFistToken;

            if (t.IsType(TokenTypes.DefineKeyword))
            {
                ScanDefine();
            }
            else if (t.IsType(TokenTypes.InstantiateKeyword))
            {
                ScanInstantiate();
            }
            else if (t.IsType(TokenTypes.SignalKeyword))
            {
                ScanSignalDeclaration();
            }
            else if (t.IsType(TokenTypes.BusKeyword))
            {
                ScanBusDeclaration();
            }
            else if (t.IsType(TokenTypes.TextIdentifier) && tokenizer.LookaheadToken(1).IsType(TokenTypes.Assignment))
            {
                ScanSignalAssignment();
            }
            else
            {
                ScanSignalExpression();
            }
        }
        private Signal ScanOperandPart()
        {
            LexerToken ahead = tokenizer.LookaheadFistToken;

            if (IsBeginningEncapsulation(ahead))
            {
                return(ScanEncapsulation());
            }
            if (IsLeftUnary(ahead))
            {
                return(ScanLeftUnary());
            }
            if (ahead.IsType(TokenTypes.TextIdentifier) || ahead.IsType(TokenTypes.SymbolIdentifier))
            {
                return(ScanIdentifierInExpression());
            }
            if (ahead.IsType(TokenTypes.Integer))
            {
                return(ScanIntegerSignal());
            }
            if (ahead.IsType(TokenTypes.Real))
            {
                return(ScanRealSignal());
            }
            if (ahead.IsType(TokenTypes.Literal))
            {
                return(ScanLiteralSignal());
            }
            throw new ParsingException("Parsing failed. Parser Scanner detected unexpected operand part token '" + ahead.Text + "'");
        }
        private Signal ScanRealSignal()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.Real);
            Signal     s     = RealValue.ParseConstant(context, token.Text);

            system.AddSignal(s);
            return(s);
        }
 /// <summary>Add token to end of the queue</summary>
 /// <param name="token">The token to add</param>
 private void Append(LexerToken token)
 {
     if (count == buffer.Length)
     {
         Expand();
     }
     buffer[(offset + count) & sizeLessOne] = token;
     count++;
 }
        public LexerToken MatchGet(TokenTypes expected)
        {
            LexerToken t = buffer.ElementAt(consumeOffset);

            if (!t.IsType(expected))
            {
                throw new ParsingException(string.Format(MathNet.Symbolics.Properties.Resources.ex_Parsing_Failed_TokenMismatch, expected.ToString(), t.Text, CurrentTokenNeighbourhood()));
            }
            Consume();
            return(t);
        }
        private Signal ScanOperand()
        {
            Signal expression = ScanOperandPart();

            while (IsRightUnary(tokenizer.LookaheadFistToken))
            {
                LexerToken token = tokenizer.ConsumeGet();
                expression = context.Builder.Function(token.Text, InfixNotation.PostOperator, expression);
            }
            return(expression);
        }
 private MathIdentifier ScanEntityMathIdentifierOrLabel(bool defaultToWorkDomain)
 {
     if (tokenizer.LookaheadFistToken.IsType(TokenTypes.MathIdentifier)) //id
     {
         LexerToken token = tokenizer.MatchGet(TokenTypes.MathIdentifier);
         return(MathIdentifier.Parse(token.Text));
     }
     else //label
     {
         LexerToken token  = tokenizer.MatchGet(TokenTypes.TextIdentifier);
         string     domain = defaultToWorkDomain ? "Work" : context.Library.Entities.FindDomainOfLabel(token.Text);
         return(new MathIdentifier(token.Text, domain));
     }
 }
Beispiel #10
0
 private LexerToken ScanMarkers()
 {
     if (lexer.LookaheadCharacter(1) == '_' && IsTextIdentifierCharacter(lexer.LookaheadCharacter(2)) ||
         lexer.LookaheadCharacter(1) != '_' && IsTextIdentifierCharacter(lexer.LookaheadCharacter(1)))
     {
         char c = lexer.LookaheadFistCharacter;
         lexer.Consume();
         LexerToken t = ScanTextIdentifier();
         t.Text = c + t.Text;
         //t.Types |= TokenTypes.Marked;
         return(t);
     }
     return(ScanSymbolIdentifier());
 }
 /// <summary>Expand the token buffer by doubling its capacity</summary>
 private void Expand()
 {
     if (maxSize > 0 && buffer.Length * 2 > maxSize)
     {
         throw new ParsingException("Parsing failed. Maximum parser buffer size exceeded.");
     }
     LexerToken[] newBuffer = new LexerToken[buffer.Length * 2];
     for (int i = 0; i < buffer.Length; i++)
     {
         newBuffer[i] = buffer[(offset + i) & sizeLessOne];
     }
     buffer      = newBuffer;
     sizeLessOne = buffer.Length - 1;
     offset      = 0;
 }
        private Signal ScanFunction()
        {
            LexerToken    token = tokenizer.ConsumeGet();
            List <Signal> a     = ScanList();
            Entity        e     = ScanEntity(token, InfixNotation.None, a.Count);

            if (tokenizer.LookaheadFistToken.IsType(TokenTypes.LeftVector))
            {
                tokenizer.Match(TokenTypes.LeftVector);
                long idx = ScanInteger();
                tokenizer.Match(TokenTypes.RightVector);
                return(context.Builder.Functions(e, a)[(int)idx]);
            }
            else
            {
                return(context.Builder.Function(e, a));
            }
        }
        private Signal ScanEncapsulation()
        {
            LexerToken ahead = tokenizer.LookaheadFistToken;

            if (ahead.IsType(TokenTypes.LeftList))
            {
                return(ScanParenthesisSignal());
            }
            if (ahead.IsType(TokenTypes.LeftVector))
            {
                return(ScanVectorSignal());
            }
            if (ahead.IsType(TokenTypes.LeftSet))
            {
                return(ScanSetSignal());
            }
            return(ScanScalarSignal());
        }
 private Entity ScanEntity()
 {
     if (tokenizer.LookaheadFistToken.IsType(TokenTypes.Literal)) //symbol
     {
         LexerToken token = tokenizer.MatchGet(TokenTypes.Literal);
         return(context.Library.Entities.LookupSymbol(token.Text)[0]);
     }
     else if (tokenizer.LookaheadFistToken.IsType(TokenTypes.SymbolIdentifier)) //symbol
     {
         LexerToken token = tokenizer.MatchGet(TokenTypes.SymbolIdentifier);
         return(context.Library.Entities.LookupSymbol(token.Text)[0]);
     }
     else //label or id
     {
         MathIdentifier entityId = ScanEntityMathIdentifierOrLabel(false);
         return(context.Library.LookupEntity(entityId));
     }
 }
 private Entity ScanEntity(LexerToken token, InfixNotation notation, int inputs)
 {
     if (token.IsType(TokenTypes.MathIdentifier))
     {
         return(context.Library.LookupEntity(MathIdentifier.Parse(token.Text)));
     }
     else if (token.IsType(TokenTypes.Literal) || token.IsType(TokenTypes.SymbolIdentifier)) //symbol
     {
         return(context.Library.LookupEntity(token.Text, notation, inputs));
     }
     else //textsymbol or label
     {
         Entity entity;
         if (context.Library.TryLookupEntity(token.Text, notation, inputs, out entity))
         {
             return(entity);
         }
         else
         {
             string domain = context.Library.Entities.FindDomainOfLabel(token.Text);
             return(context.Library.LookupEntity(new MathIdentifier(token.Text, domain)));
         }
     }
 }
        private string ScanLiteral()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.Literal);

            return(token.Text);
        }
 private bool IsBinary(LexerToken token)
 {
     return((token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier)) &&
            (context.Library.Entities.ContainsSymbol(token.Text, InfixNotation.LeftAssociativeInnerOperator) ||
             context.Library.Entities.ContainsSymbol(token.Text, InfixNotation.RightAssociativeInnerOperator)));
 }
        private string ScanEntitySymbol()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.SymbolIdentifier);

            return(token.Text);
        }
        private long ScanInteger()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.Integer);

            return(long.Parse(token.Text, Context.NumberFormat));
        }
        private string ScanTextIdentifier()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.TextIdentifier);

            return(token.Text);
        }
        private MathIdentifier ScanMathIdentifier()
        {
            LexerToken token = tokenizer.MatchGet(TokenTypes.MathIdentifier);

            return(MathIdentifier.Parse(token.Text));
        }
 private bool IsRightUnary(LexerToken token)
 {
     return((token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier)) &&
            context.Library.Entities.ContainsSymbol(token.Text, InfixNotation.PostOperator));
 }
 private static bool IsBeginningEncapsulation(LexerToken token)
 {
     return(token.IsType(TokenTypes.Left));
 }
 private bool IsRightUnary(LexerToken token)
 {
     return (token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier))
         && context.Library.Entities.ContainsSymbol(token.Text, InfixNotation.PostOperator);
 }
 private static bool IsBeginningEncapsulation(LexerToken token)
 {
     return token.IsType(TokenTypes.Left);
 }
 /// <summary>Expand the token buffer by doubling its capacity</summary>
 private void Expand()
 {
     if(maxSize > 0 && buffer.Length * 2 > maxSize)
         throw new ParsingException("Parsing failed. Maximum parser buffer size exceeded.");
     LexerToken[] newBuffer = new LexerToken[buffer.Length * 2];
     for(int i = 0; i < buffer.Length; i++)
         newBuffer[i] = buffer[(offset + i) & sizeLessOne];
     buffer = newBuffer;
     sizeLessOne = buffer.Length - 1;
     offset = 0;
 }
 private bool IsBinary(LexerToken token)
 {
     return (token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier))
         && (context.Library.Entities.ContainsSymbol(token.Text,InfixNotation.LeftAssociativeInnerOperator)
         || context.Library.Entities.ContainsSymbol(token.Text, InfixNotation.RightAssociativeInnerOperator));
 }
        private Signal ScanSignalExpression()
        {
            /*
             *  This method is kind of a postfix machine, parsing
             *  infix expressions to postfix expressions (having
             *  regard to precedence) using the operator stack and
             *  evaluates the postfix term to an expression tree
             *  using the expression stack.
             */

            SignalStack expressionStack = new SignalStack(4, 4096);
            EntityStack operatorStack   = new EntityStack(4, 4096);

            expressionStack.Push(ScanOperand());
            Entity lastEntity, topEntity;

            MathNet.Symbolics.Backend.Containers.EntityTable entityTable = context.Library.Entities;
            Library library = context.Library;

            while (true)
            {
                LexerToken ahead = tokenizer.LookaheadFistToken;
                if (!(ahead.IsType(TokenTypes.SymbolIdentifier) || ahead.IsType(TokenTypes.TextIdentifier)))
                {
                    break;
                }
                if (entityTable.ContainsSymbol(ahead.Text, InfixNotation.LeftAssociativeInnerOperator))
                {
                    lastEntity = library.LookupEntity(ahead.Text, InfixNotation.LeftAssociativeInnerOperator, 2, 1, 0);
                }
                else if (entityTable.ContainsSymbol(ahead.Text, InfixNotation.RightAssociativeInnerOperator))
                {
                    lastEntity = library.LookupEntity(ahead.Text, InfixNotation.RightAssociativeInnerOperator, 2, 1, 0);
                }
                else
                {
                    break;
                }
                tokenizer.Consume();

                while (operatorStack.Count > 0 && IsPrecedenceLeftFirst(operatorStack.Peek(), lastEntity))
                {
                    topEntity = operatorStack.Pop();
                    Signal swap = expressionStack.Pop();
                    Signal s    = context.Builder.Function(topEntity, expressionStack.Pop(), swap);
                    expressionStack.Push(s);
                }

                operatorStack.Push(lastEntity);
                expressionStack.Push(ScanOperand());
            }

            while (operatorStack.Count > 0)
            {
                topEntity = operatorStack.Pop();
                Signal swap = expressionStack.Pop();
                Signal s    = context.Builder.Function(topEntity, expressionStack.Pop(), swap);
                expressionStack.Push(s);
            }

            Signal ret = expressionStack.Pop();

            system.AddSignalTree(ret, false, false);
            return(ret);
        }
 private Entity ScanEntity(LexerToken token, InfixNotation notation, int inputs)
 {
     if(token.IsType(TokenTypes.MathIdentifier))
         return context.Library.LookupEntity(MathIdentifier.Parse(token.Text));
     else if(token.IsType(TokenTypes.Literal) || token.IsType(TokenTypes.SymbolIdentifier)) //symbol
         return context.Library.LookupEntity(token.Text, notation, inputs);
     else //textsymbol or label
     { 
         Entity entity;
         if(context.Library.TryLookupEntity(token.Text, notation, inputs, out entity))
             return entity;
         else
         {
             string domain = context.Library.Entities.FindDomainOfLabel(token.Text);
             return context.Library.LookupEntity(new MathIdentifier(token.Text, domain));
         }
     }
 }
 /// <summary>Add token to end of the queue</summary>
 /// <param name="token">The token to add</param>
 private void Append(LexerToken token)
 {
     if(count == buffer.Length)
         Expand();
     buffer[(offset + count) & sizeLessOne] = token;
     count++;
 }