public override bool Eval(ref TokenStack <T> stack, ref RuleData data) { Token <T> token = stack.Next(); // TEMP //#if DEBUG // new ColorConsole().WriteLine($" - Checking {GetType().Name.Replace("`1", string.Empty)}: '{Pattern}' (&-a;{string.Join(", ", TokenTypes)}&-^;) => '&-e;{token.Value}&-^;' (&-a;{token.TokenType}&-^;)"); //#endif // Token type doesn't match, token isn't the one we are looking for... Revert stack, and return true. if (!IsTokenTypeMatch(token)) { stack.Prev(); UpdateData(ref data, token, false); //TEMP //#if DEBUG // new ColorConsole().WriteLine(" - Optional token missing, reverting..."); //#endif return(true); } // Token with correct type exists, check if pattern matches. bool isMatch = IsPatternMatch(token); UpdateData(ref data, token, isMatch); return(isMatch); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { if (tokens.ExpectSequence(TokenType.Identifier, TokenType.ColonSymbol)) { var queue = new Queue <Token>(); var identifier = tokens.Pop(); var colon = tokens.Pop(); queue.Enqueue(identifier); queue.Enqueue(colon); var valueSyntax = new ValueSyntax(); var valueToken = tokens.Peek(); if (valueSyntax.TryParse(tokens, out GraphNode value)) { queue.Enqueue(valueToken); node = new VariableAssignment(queue, identifier.Value, (Value)value); return(true); } else { while (queue.Count > 0) { tokens.Push(queue.Dequeue()); } } } node = null; return(false); }
protected void AssertTokenStackIsEmpty() { if (TokenStack.Any()) { throw new QueryParseException("End of expression expected."); } }
public bool Eval(ref TokenStack <T> stack, out V result) { Token <T> token = stack.Peek(); if (IncludeTriggerToken) { stack.Prev(); } RuleData data = new RuleData(); foreach (IRule <T> rule in Rules) { if (!rule.Eval(ref stack, ref data)) { result = null; return(false); } } if (CleanRuleData) { data.RemoveAll(x => x == null); } try { result = Construct?.Invoke(data) ?? null; return(true); } catch (Exception) { result = null; return(false); } }
protected int?TryParseNumber() { if (TokenStack.TryPeek(out Token nextToken)) { int number; if (nextToken.Kind == TokenKind.Minus) { TokenStack.Pop(); if (TokenStack.TryPop(out Token token) && token.Kind == TokenKind.Text && int.TryParse(token.Value, out number)) { return(-number); } throw new QueryParseException("Digits expected."); } if (nextToken.Kind == TokenKind.Text && int.TryParse(nextToken.Value, out number)) { TokenStack.Pop(); return(number); } } return(null); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { node = new Script(); while (tokens.Count > 0) { var versionDeclarationSyntax = new VersionDeclarationSyntax(); var bodySyntax = new BodySyntax(); if (versionDeclarationSyntax.TryParse(tokens, out GraphNode versionDeclaration)) { node.AddChildNode(versionDeclaration); } else if (bodySyntax.TryParse(tokens, out GraphNode body)) { node.AddChildNode(body); } else { // TODO: syntax error node = null; return(false); } } return(true); }
public override bool Eval(ref TokenStack <T> stack, ref RuleData data) { //TEMP // new ColorConsole().WriteLine(Level.Fine, $" - Checking {GetType().Name.Replace("`1", string.Empty)}"); foreach (Rule <T> rule in rules) { if (!rule.Eval(ref stack, ref data)) { return(false); } } if (IsTokenTypeMatch(stack.Next())) { UpdateData(ref data, stack.Peek(), includeSeperator); return(Eval(ref stack, ref data)); } else { stack.Prev(); } return(true); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { node = null; if (tokens.Count == 0) { return(true); } var possibleSyntaxList = new Syntax[] { new VariableAssignmentSyntax(), new OpenStatementSyntax(), new EnumDeclarationSyntax(), new BlockDeclarationSyntax(), new PropertyDeclarationSyntax() }; foreach (Syntax syntax in possibleSyntaxList) { if (syntax.TryParse(tokens, out node)) { return(true); } } // TODO: syntax error return(false); }
public override void TryParse(TFunction caller, bool forFlag) { ReturnBubble = null; ReturnFlag = false; if (caller != null) { BlindExecute = caller.BlindExecute; Tracer = caller.Tracer; Caller = caller; Extensions = caller.Extensions; } if (caller != null && caller.Arguments != null && ExpectedArgs != null && ExpectedArgs.Length > 0) { ProvidedArgs = new TokenStack(); var args = caller.ReturnArgsArray(); if (args.Length > 0) { if (args.Length > ExpectedArgs.Length) { Compiler.ExceptionListener.Throw($"The arguments supplied do not match the arguments expected!"); return; } for (var i = 0; i < args.Length; i++) { var exp = ExpectedArgs[i].Replace("var ", "").Replace(" ", ""); ProvidedArgs.Add(new Token(exp, args[i], caller.Line)); } } } Parse(); }
public Tokenized(Stream s) { using (var sr = new StreamReader(s)) { var stack = new TokenStack(); if (!sr.EndOfStream) { var start = new Token.FromLine(sr.ReadLine()); stack.Push(start); while (!sr.EndOfStream) { var t = stack.Pop(); foreach (var result in t.Consider(sr.ReadLine())) { stack.Push(result); } } } tokens = stack.Push(new EndOfFile()).Reverse(); } }
private IStencil Parse(IToken token) { var stencil = new Stencil(); _currentSpace = stencil; var streamReader = new StreamReader(TemplateStream); var buffer = new StringBuilder(); Reset(buffer, token); while (!streamReader.EndOfStream) { var currentCharacter = (char)streamReader.Read(); buffer.Append(currentCharacter); var result = ProcessBuffer(buffer); switch (result.Type) { case TokenType.Simple: case TokenType.Interpolation: AddPositiveSpace(_currentSpace, result.Payload); _currentSpace.Add(result.Token.CreateSpace()); Reset(buffer); break; case TokenType.Complex: AddPositiveSpace(_currentSpace, result.Payload); var complexSpace = result.Token.CreateSpace(); _currentSpace.Add(complexSpace); TokenStack.Push(result); SpaceStack.Push(complexSpace); _currentSpace = complexSpace; Reset(buffer, result.Token); break; case TokenType.Terminator: AddPositiveSpace(_currentSpace, result.Payload); TokenStack.Pop(); SpaceStack.Pop(); _currentSpace = (object)SpaceStack.FirstOrDefault() ?? stencil; Reset(buffer, TokenStack.Select(t => t.Token).FirstOrDefault() ?? token); break; default: break; } } AddPositiveSpace(stencil, buffer.ToString()); TemplateStream = null; TokenStack = null; SpaceStack = null; _prospectiveTokens = null; return(stencil); }
public TokenSeqToTerm(OperatorTable opTable) { IS = new TokenStack("IS"); OS = new TokenStack("OS"); // operator stack PS = new TokenStack("PS"); IS.Push(SeqStartToken); inOpAtBoS = false; }
public override bool Eval(ref TokenStack <T> tokenStack, ref RuleData data) { bool isMatch = base.Eval(ref tokenStack, ref data); if (isMatch) { tokenStack.Prev(); } return(isMatch); }
private static bool EvalRules(ref TokenStack <T> stack, ref RuleData data, List <Rule <T> > rules) { foreach (Rule <T> rule in rules) { if (!rule.Eval(ref stack, ref data)) { return(false); } } return(true); }
public Executor() { CodeStack = new TokenStack(); DataStack = new TokenStack(); Methods = new Dictionary<string, IFunction>(_GlobalMethods); foreach (var line in _InternalCoded.Reverse<string>()) CodeStack.PushRange(Lexer.Parse(line).Value); while (CodeStack.Count > 0) Cycle(); }
public void ParseScript() { const string scriptPath = @"./Resources/SampleScript-Version1.bitx"; string scriptText = File.ReadAllText(scriptPath); var tokens = new TokenStack(scriptPath, Tokenizer.Tokenize(scriptText)); bool isSyntaxValid = Parser.TryParse(tokens, out Script script); Assert.True(isSyntaxValid); }
/// <summary> /// Evaluates the provided <paramref name="tokenStack"/> and returns true if the next token in the stack matches any of the <see cref="TokenTypes"/> and the token value matches the optional <see cref="Pattern"/>. /// <br/>Note: Increments the provided <paramref name="tokenStack"/>. See <see cref="TokenStack{T}.Next"/> /// </summary> public virtual bool Eval(ref TokenStack <T> tokenStack, ref RuleData data) { Token <T> token = tokenStack.Next(); // TEMP // new ColorConsole().WriteLine($" - Checking {GetType().Name.Replace("`1", string.Empty)}: '{Pattern}' (&-a;{string.Join(", ", TokenTypes)}&-^;) => '&-e;{token.Value}&-^;' (&-a;{token.TokenType}&-^;)"); bool isMatch = IsTokenTypeMatch(token) && IsPatternMatch(token); UpdateData(ref data, token, isMatch); return(isMatch); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { if (tokens.ExpectAny(EnumMapping.Keys.ToArray())) { var token = tokens.Pop(); node = new Value(new[] { token }, EnumMapping[token.TokenType], token.Value); return(true); } node = null; return(false); }
private void Header() { Token = Tokenization.GetToken(); if (!Token.IsProgram()) { throw new ExpectedException("PROGRAM", Token.Value, null); } TokenStack.Push(Token); AddCode(CmsCodeFactory.LSP(new CmsCode(0X0010))); GenerateVariableArea(); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { if (tokens.ExpectAny(TokenType.SignedIntegerTypeKeyword, TokenType.UnsignedIntegerTypeKeyword, TokenType.BitfieldTypeKeyword, TokenType.BitTypeKeyword, TokenType.Identifier)) { var token = tokens.Pop(); node = new TypeDeclaration(new [] { token }, token.Value); return(true); } // TODO: syntax error node = null; return(false); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { if (tokens.ExpectSequence(TokenType.VersionKeyword, TokenType.IntegerLiteral)) { var keyword = tokens.Pop(); var integerLiteral = tokens.Pop(); node = new VersionDeclaration(new[] { keyword, integerLiteral }, int.Parse(integerLiteral.Value)); return(true); } node = null; return(false); }
protected SortExpression ParseSort() { SortElementExpression firstElement = ParseSortElement(); List <SortElementExpression> elements = firstElement.AsList(); while (TokenStack.Any()) { EatSingleCharacterToken(TokenKind.Comma); SortElementExpression nextElement = ParseSortElement(); elements.Add(nextElement); } return(new SortExpression(elements)); }
protected CountExpression TryParseCount() { if (TokenStack.TryPeek(out Token nextToken) && nextToken.Kind == TokenKind.Text && nextToken.Value == Keywords.Count) { TokenStack.Pop(); EatSingleCharacterToken(TokenKind.OpenParen); ResourceFieldChainExpression targetCollection = ParseFieldChain(FieldChainRequirements.EndsInToMany, null); EatSingleCharacterToken(TokenKind.CloseParen); return(new CountExpression(targetCollection)); } return(null); }
protected SparseFieldSetExpression ParseSparseFieldSet() { var fields = new Dictionary <string, ResourceFieldAttribute>(); while (TokenStack.Any()) { if (fields.Count > 0) { EatSingleCharacterToken(TokenKind.Comma); } ResourceFieldChainExpression nextChain = ParseFieldChain(FieldChainRequirements.EndsInAttribute, "Field name expected."); ResourceFieldAttribute nextField = nextChain.Fields.Single(); fields[nextField.PublicName] = nextField; } return(fields.Any() ? new SparseFieldSetExpression(fields.Values) : null); }
protected PaginationQueryStringValueExpression ParsePagination() { var elements = new List <PaginationElementQueryStringValueExpression>(); PaginationElementQueryStringValueExpression element = ParsePaginationElement(); elements.Add(element); while (TokenStack.Any()) { EatSingleCharacterToken(TokenKind.Comma); element = ParsePaginationElement(); elements.Add(element); } return(new PaginationQueryStringValueExpression(elements)); }
protected IncludeExpression ParseInclude(int?maximumDepth) { ResourceFieldChainExpression firstChain = ParseFieldChain(FieldChainRequirements.IsRelationship, "Relationship name expected."); List <ResourceFieldChainExpression> chains = firstChain.AsList(); while (TokenStack.Any()) { EatSingleCharacterToken(TokenKind.Comma); ResourceFieldChainExpression nextChain = ParseFieldChain(FieldChainRequirements.IsRelationship, "Relationship name expected."); chains.Add(nextChain); } ValidateMaximumIncludeDepth(maximumDepth, chains); return(IncludeChainConverter.FromRelationshipChains(chains)); }
protected IdentifierExpression TryParseConstantOrNull() { if (TokenStack.TryPeek(out Token nextToken)) { if (nextToken.Kind == TokenKind.Text && nextToken.Value == Keywords.Null) { TokenStack.Pop(); return(new NullConstantExpression()); } if (nextToken.Kind == TokenKind.QuotedText) { TokenStack.Pop(); return(new LiteralConstantExpression(nextToken.Value)); } } return(null); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { if (tokens.ExpectSequence(TokenType.EnumKeyword, TokenType.Identifier, TokenType.OpenCurlyBraceSymbol)) { var source = new Queue <Token>(); var enumKeyword = tokens.Pop(); var identifier = tokens.Pop(); var openBrace = tokens.Pop(); source.Enqueue(enumKeyword); source.Enqueue(identifier); source.Enqueue(openBrace); var enumValues = new List <EnumValue>(); var enumBodySyntax = new EnumValueSyntax(); while (tokens.Peek().TokenType != TokenType.CloseCurlyBraceSymbol) { if (enumBodySyntax.TryParse(tokens, out GraphNode enumValue)) { enumValues.Add((EnumValue)enumValue); } else { // TODO: Set error message node = null; return(false); } } var closeBrace = tokens.Pop(); source.Enqueue(closeBrace); node = new EnumDeclaration(source, identifier.Value, enumValues); return(true); } // TODO: Set error message node = null; return(false); }
protected SparseFieldSetExpression ParseSparseFieldSet() { var attributes = new Dictionary <string, AttrAttribute>(); ResourceFieldChainExpression nextChain = ParseFieldChain(FieldChainRequirements.EndsInAttribute, "Attribute name expected."); AttrAttribute nextAttribute = nextChain.Fields.Cast <AttrAttribute>().Single(); attributes[nextAttribute.PublicName] = nextAttribute; while (TokenStack.Any()) { EatSingleCharacterToken(TokenKind.Comma); nextChain = ParseFieldChain(FieldChainRequirements.EndsInAttribute, "Attribute name expected."); nextAttribute = nextChain.Fields.Cast <AttrAttribute>().Single(); attributes[nextAttribute.PublicName] = nextAttribute; } return(new SparseFieldSetExpression(attributes.Values)); }
public override bool TryParse(TokenStack tokens, out GraphNode node) { var typeDeclarationSyntax = new TypeDeclarationSyntax(); if (typeDeclarationSyntax.TryParse(tokens, out GraphNode typeDeclaration)) { if (tokens.Expect(TokenType.Identifier)) { var identifier = tokens.Pop(); if (tokens.Expect(TokenType.AsKeyword)) { var asKeyword = tokens.Pop(); if (typeDeclarationSyntax.TryParse(tokens, out GraphNode asTypeDeclaration)) { node = new PropertyDeclaration(new[] { identifier }, identifier.Value, (TypeDeclaration)typeDeclaration, (TypeDeclaration)asTypeDeclaration); return(true); } else { // TODO: syntax error } } else { node = new PropertyDeclaration(new[] { identifier }, identifier.Value, (TypeDeclaration)typeDeclaration); return(true); } } else { // TODO: syntax error } } // TODO: syntax error node = null; return(false); }
public void TokenStack_Push1() { var tokens = new TokenStack(); Assert.IsFalse(tokens.TryPop(out _)); Assert.AreEqual(0, tokens.Count); for (var i = 0; i < 99; i++) { tokens.Push(new Token { Expression = Expression.Number(i.ToString()) }); Assert.AreEqual(i + 1, tokens.Count); } for (var i = 98; i >= 0; i--) { Assert.IsTrue(tokens.TryPop(out var token)); Assert.AreEqual(i, tokens.Count); Assert.AreEqual(i.ToString(), ((ConstantExpression)token.Expression).Value); } }