public LexerToken ConsumeGet() { LexerToken t = buffer.ElementAt(consumeOffset); Consume(); return(t); }
private Signal ScanLeftUnary() { LexerToken token = tokenizer.ConsumeGet(); Signal s = ScanOperand(); return(builder.Function(token.Text, InfixNotation.PreOperator, s)); }
private void ScanCommand() { LexerToken t = tokenizer.LookaheadFistToken; if (t.IsType(TokenTypes.DefineKeyword)) { ScanDefine(); } else if (t.IsType(TokenTypes.InstantiateKeyword)) { ScanInstantiate(); } else if (t.IsType(TokenTypes.SignalKeyword)) { ScanSignalDeclaration(); } else if (t.IsType(TokenTypes.BusKeyword)) { ScanBusDeclaration(); } else if (t.IsType(TokenTypes.TextIdentifier) && tokenizer.LookaheadToken(1).IsType(TokenTypes.Assignment)) { ScanSignalAssignment(); } else { ScanSignalExpression(); } }
private Signal ScanOperandPart() { LexerToken ahead = tokenizer.LookaheadFistToken; if (IsBeginningEncapsulation(ahead)) { return(ScanEncapsulation()); } if (IsLeftUnary(ahead)) { return(ScanLeftUnary()); } if (ahead.IsType(TokenTypes.TextIdentifier) || ahead.IsType(TokenTypes.SymbolIdentifier)) { return(ScanIdentifierInExpression()); } if (ahead.IsType(TokenTypes.Integer)) { return(ScanIntegerSignal()); } if (ahead.IsType(TokenTypes.Real)) { return(ScanRealSignal()); } if (ahead.IsType(TokenTypes.Literal)) { return(ScanLiteralSignal()); } throw new MathNet.Symbolics.Exceptions.ParsingException("Parsing failed. Parser Scanner detected unexpected operand part token '" + ahead.Text + "'"); }
private Signal ScanRealSignal() { LexerToken token = tokenizer.MatchGet(TokenTypes.Real); Signal s = RealValue.ParseConstant(token.Text); system.AddSignal(s); return(s); }
private Signal ScanIntegerSignal() { LexerToken token = tokenizer.MatchGet(TokenTypes.Integer); Signal s = IntegerValue.ParseConstant(token.Text); system.AddSignal(s); return(s); }
private Signal ScanLiteralSignal() { LexerToken token = tokenizer.MatchGet(TokenTypes.Literal); Signal s = LiteralValue.Constant(token.Text); system.AddSignal(s); return(s); }
/// <summary>Add token to end of the queue</summary> /// <param name="token">The token to add</param> private void Append(LexerToken token) { if (count == buffer.Length) { Expand(); } buffer[(offset + count) & sizeLessOne] = token; count++; }
private Signal ScanOperand() { Signal expression = ScanOperandPart(); while (IsRightUnary(tokenizer.LookaheadFistToken)) { LexerToken token = tokenizer.ConsumeGet(); expression = builder.Function(token.Text, InfixNotation.PostOperator, expression); } return(expression); }
public LexerToken MatchGet(TokenTypes expected) { LexerToken t = buffer.ElementAt(consumeOffset); if (!t.IsType(expected)) { throw new Exceptions.ParsingException(string.Format( Config.UserCulture, Properties.Resources.ex_Parsing_Failed_TokenMismatch, expected.ToString(), t.Text, CurrentTokenNeighbourhood())); } Consume(); return(t); }
private MathIdentifier ScanEntityMathIdentifierOrLabel(bool defaultToWorkDomain) { if (tokenizer.LookaheadFistToken.IsType(TokenTypes.MathIdentifier)) //id { LexerToken token = tokenizer.MatchGet(TokenTypes.MathIdentifier); return(MathIdentifier.Parse(token.Text)); } else //label { LexerToken token = tokenizer.MatchGet(TokenTypes.TextIdentifier); string domain = defaultToWorkDomain ? "Work" : library.FindEntityByLabel(token.Text).Domain; return(new MathIdentifier(token.Text, domain)); } }
private LexerToken ScanMarkers() { if (lexer.LookaheadCharacter(1) == '_' && IsTextIdentifierCharacter(lexer.LookaheadCharacter(2)) || lexer.LookaheadCharacter(1) != '_' && IsTextIdentifierCharacter(lexer.LookaheadCharacter(1))) { char c = lexer.LookaheadFistCharacter; lexer.Consume(); LexerToken t = ScanTextIdentifier(); t.Text = c + t.Text; //t.Types |= TokenTypes.Marked; return(t); } return(ScanSymbolIdentifier()); }
/// <summary>Expand the token buffer by doubling its capacity</summary> private void Expand() { if (maxSize > 0 && buffer.Length * 2 > maxSize) { throw new MathNet.Symbolics.Exceptions.ParsingException("Parsing failed. Maximum parser buffer size exceeded."); } LexerToken[] newBuffer = new LexerToken[buffer.Length * 2]; for (int i = 0; i < buffer.Length; i++) { newBuffer[i] = buffer[(offset + i) & sizeLessOne]; } buffer = newBuffer; sizeLessOne = buffer.Length - 1; offset = 0; }
private Signal ScanFunction() { LexerToken token = tokenizer.ConsumeGet(); List <Signal> a = ScanList(); IEntity e = ScanEntity(token, InfixNotation.None, a.Count); if (tokenizer.LookaheadFistToken.IsType(TokenTypes.LeftVector)) { tokenizer.Match(TokenTypes.LeftVector); long idx = ScanInteger(); tokenizer.Match(TokenTypes.RightVector); return(builder.Functions(e, a)[(int)idx]); } else { return(builder.Function(e, a)); } }
private Signal ScanEncapsulation() { LexerToken ahead = tokenizer.LookaheadFistToken; if (ahead.IsType(TokenTypes.LeftList)) { return(ScanParenthesisSignal()); } if (ahead.IsType(TokenTypes.LeftVector)) { return(ScanVectorSignal()); } if (ahead.IsType(TokenTypes.LeftSet)) { return(ScanSetSignal()); } return(ScanScalarSignal()); }
private IEntity ScanEntity() { if (tokenizer.LookaheadFistToken.IsType(TokenTypes.Literal)) //symbol { LexerToken token = tokenizer.MatchGet(TokenTypes.Literal); return(library.LookupEntities(token.Text)[0]); } else if (tokenizer.LookaheadFistToken.IsType(TokenTypes.SymbolIdentifier)) //symbol { LexerToken token = tokenizer.MatchGet(TokenTypes.SymbolIdentifier); return(library.LookupEntities(token.Text)[0]); } else //label or id { MathIdentifier entityId = ScanEntityMathIdentifierOrLabel(false); return(library.LookupEntity(entityId)); } }
private IEntity ScanEntity(LexerToken token, InfixNotation notation, int inputs) { if (token.IsType(TokenTypes.MathIdentifier)) { return(library.LookupEntity(MathIdentifier.Parse(token.Text))); } else if (token.IsType(TokenTypes.Literal) || token.IsType(TokenTypes.SymbolIdentifier)) //symbol { return(library.LookupEntity(token.Text, notation, inputs)); } else //textsymbol or label { IEntity entity; if (library.TryLookupEntity(token.Text, notation, inputs, out entity)) { return(entity); } else { MathIdentifier id = library.FindEntityByLabel(token.Text); return(library.LookupEntity(id)); } } }
private string ScanLiteral() { LexerToken token = tokenizer.MatchGet(TokenTypes.Literal); return(token.Text); }
private bool IsBinary(LexerToken token) { return((token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier)) && (library.ContainsEntity(token.Text, InfixNotation.LeftAssociativeInnerOperator) || library.ContainsEntity(token.Text, InfixNotation.RightAssociativeInnerOperator))); }
private string ScanEntitySymbol() { LexerToken token = tokenizer.MatchGet(TokenTypes.SymbolIdentifier); return(token.Text); }
private long ScanInteger() { LexerToken token = tokenizer.MatchGet(TokenTypes.Integer); return(long.Parse(token.Text, Config.InternalNumberFormat)); }
private MathIdentifier ScanMathIdentifier() { LexerToken token = tokenizer.MatchGet(TokenTypes.MathIdentifier); return(MathIdentifier.Parse(token.Text)); }
private string ScanTextIdentifier() { LexerToken token = tokenizer.MatchGet(TokenTypes.TextIdentifier); return(token.Text); }
private bool IsRightUnary(LexerToken token) { return((token.IsType(TokenTypes.SymbolIdentifier) || token.IsType(TokenTypes.TextIdentifier)) && library.ContainsEntity(token.Text, InfixNotation.PostOperator)); }
private static bool IsBeginningEncapsulation(LexerToken token) { return(token.IsType(TokenTypes.Left)); }
private Signal ScanSignalExpression() { /* * This method is kind of a postfix machine, parsing * infix expressions to postfix expressions (having * regard to precedence) using the operator stack and * evaluates the postfix term to an expression tree * using the expression stack. */ SignalStack expressionStack = new SignalStack(4, 4096); EntityStack operatorStack = new EntityStack(4, 4096); expressionStack.Push(ScanOperand()); IEntity lastEntity, topEntity; while (true) { LexerToken ahead = tokenizer.LookaheadFistToken; if (!(ahead.IsType(TokenTypes.SymbolIdentifier) || ahead.IsType(TokenTypes.TextIdentifier))) { break; } // TODO: Optimize using TryMethods... if (library.ContainsEntity(ahead.Text, InfixNotation.LeftAssociativeInnerOperator)) { lastEntity = library.LookupEntity(ahead.Text, InfixNotation.LeftAssociativeInnerOperator, 2, 1, 0); } else if (library.ContainsEntity(ahead.Text, InfixNotation.RightAssociativeInnerOperator)) { lastEntity = library.LookupEntity(ahead.Text, InfixNotation.RightAssociativeInnerOperator, 2, 1, 0); } else { break; } tokenizer.Consume(); while (operatorStack.Count > 0 && IsPrecedenceLeftFirst(operatorStack.Peek(), lastEntity)) { topEntity = operatorStack.Pop(); Signal swap = expressionStack.Pop(); Signal s = builder.Function(topEntity, expressionStack.Pop(), swap); expressionStack.Push(s); } operatorStack.Push(lastEntity); expressionStack.Push(ScanOperand()); } while (operatorStack.Count > 0) { topEntity = operatorStack.Pop(); Signal swap = expressionStack.Pop(); Signal s = builder.Function(topEntity, expressionStack.Pop(), swap); expressionStack.Push(s); } Signal ret = expressionStack.Pop(); system.AddSignalTree(ret, false, false); return(ret); }