internal bool?TryReadOnlyOneToken(IEnumerable <char> inputText, [NotNullWhen(true)] out Token?firstToken) { firstToken = default; bool?result = null; using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(inputText, _LookaheadCount); var continueLoop = true; while (continueLoop && enumerator.MoveNext()) { foreach (var del in Delegates) { if (del(enumerator, out firstToken)) { continueLoop = false; result = true; break; } } } if (firstToken != default) { enumerator.MoveNext(); } if (enumerator.State != EnumeratorState.Complete) { result = false; } return(result); }
internal static TemplateNode ParseTemplate(JinjaEnvironment environment, Lexer lexer, IEnumerable <ParsingNode> source) { using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(source, 10); while (enumerator.MoveNext()) { var nodes = ParseUntilFailure(environment, lexer, enumerator).ToArray(); if (enumerator.TryGetNext(out var nextNode)) { throw new NotImplementedException(); } return(new TemplateNode(nodes)); } throw new NotImplementedException(); }
internal IEnumerable <Token> Tokenize(IEnumerable <char> source) { var maximumLength = _KeywordLookups.Keys.Max(keyword => keyword.Length); using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(source, (byte)maximumLength); var currentUnknown = new Queue <char>(); while (enumerator.MoveNext()) { var token = _Delegates.Select(del => { var Success = del(enumerator, out var Token); return(new { Success, Token }); }).FirstOrDefault(x => x.Success)?.Token; if (token != default) { if (currentUnknown.Count != 0) { yield return(new Token(TokenType.Unknown, new string(currentUnknown.ToArray()))); currentUnknown.Clear(); } yield return(token); } else { currentUnknown.Enqueue(enumerator.Current); } } if (currentUnknown.Count != 0) { yield return(new Token(TokenType.Unknown, new string(currentUnknown.ToArray()))); } }
internal ASTNode Parse(IEnumerable <Token> tokens) { var tokensExcludingWhiteSpace = tokens.Where(token => token.TokenType != TokenType.WhiteSpace); using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(tokensExcludingWhiteSpace, 1, 1); if (enumerator.MoveNext() == false) { return(LiteralNode.CreateNull()); } if (TryParse(enumerator, out var parsedNode, AssignmentOperatorBehavior.Assign) == false || parsedNode == default) { throw new NotImplementedException(); // Couldn't parse data! } enumerator.MoveNext(); if (enumerator.State != EnumeratorState.Complete) { throw new NotImplementedException(); // We have unparsed data - why!? } return(parsedNode); }
internal IEnumerable <Token> Tokenize(IEnumerable <char> inputText) { using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(inputText, _LookaheadCount); var DEBUG = enumerator as EnumerableLookaroundEnumerator <char>; using var checkoutRecord_UnknownChars = StringBuilderPool.Instance.Checkout(); var currentUnknownChars = checkoutRecord_UnknownChars.CheckedOutObject; var hasUnknown = false; while (enumerator.MoveNext()) { var foundToken = Delegates.Select(del => { var Success = del(enumerator, out var Result); return(new { Success, Result }); }).FirstOrDefault(res => res.Success)?.Result; if (foundToken == null) { hasUnknown = true; currentUnknownChars.Append(enumerator.Current); } else { if (hasUnknown) { yield return(new Token(TokenType.Unknown, null, currentUnknownChars)); currentUnknownChars.Clear(); hasUnknown = false; } yield return(foundToken.Value); } } if (hasUnknown) { yield return(new Token(TokenType.Unknown, null, currentUnknownChars)); } }
internal static IEnumerable <ParsingNode> Parse(IEnumerable <Token> source) { using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(source, 10); var pendingOutput = new Queue <Token>(); var canBeLineStatement = true; // Line statements must have only whitespace prior to it. while (enumerator.MoveNext()) { ParsingNode pendingNode; switch (enumerator.Current.TokenType) { case TokenType.LineStatement: pendingNode = LineStatementOrUnknown(canBeLineStatement, enumerator); canBeLineStatement = false; break; case TokenType.LineComment: pendingNode = new ParsingNode(ParsingNodeType.Comment, ReadWhile(enumerator, tokenType => tokenType != TokenType.NewLine)); break; case TokenType.StatementStart: canBeLineStatement = false; pendingNode = new ParsingNode(ParsingNodeType.Statement, ReadUntil(enumerator, TokenType.StatementEnd)); break; case TokenType.CommentStart: canBeLineStatement = false; pendingNode = new ParsingNode(ParsingNodeType.Comment, ReadUntil(enumerator, TokenType.CommentEnd)); break; case TokenType.ExpressionStart: canBeLineStatement = false; pendingNode = new ParsingNode(ParsingNodeType.Expression, ReadUntil(enumerator, TokenType.ExpressionEnd)); break; case TokenType.NewLine: pendingNode = new ParsingNode(ParsingNodeType.NewLine, ReadOne(enumerator)); canBeLineStatement = true; break; case TokenType.WhiteSpace: var whiteSpaceNode = new ParsingNode(ParsingNodeType.WhiteSpace, ReadWhile(enumerator, tokenType => tokenType == TokenType.WhiteSpace)); if (!(canBeLineStatement && enumerator.TryGetNext(out var nextToken) && (nextToken.TokenType == TokenType.LineStatement || nextToken.TokenType == TokenType.LineComment))) { pendingNode = whiteSpaceNode; break; } continue; default: canBeLineStatement = false; pendingOutput.Enqueue(enumerator.Current); continue; } if (pendingOutput.Count > 0) { yield return(new ParsingNode(ParsingNodeType.Output, pendingOutput)); pendingOutput.Clear(); } yield return(pendingNode); } if (pendingOutput.Count > 0) { yield return(new ParsingNode(ParsingNodeType.Output, pendingOutput)); } }
internal static bool TryParse(JinjaEnvironment environment, ParsingNode currentNode, [NotNullWhen(true)] out ASTNode?parsedNode) { using var enumerator = LookaroundEnumeratorFactory.CreateLookaroundEnumerator(currentNode.Tokens); parsedNode = default; var state = States.StartJinja; var templateQueue = new Queue <Token>(); while (enumerator.MoveNext()) { var token = enumerator.Current; switch (state) { case States.StartJinja: switch (token.TokenType) { case TokenType.StatementStart: state = States.Keyword; continue; default: return(false); } case States.Keyword: switch (token.TokenType) { case TokenType.WhiteSpace: continue; case TokenType.Keyword_Extends: state = States.Template; continue; default: return(false); } case States.Template: switch (token.TokenType) { case TokenType.StatementEnd: state = States.Done; continue; default: templateQueue.Enqueue(token); continue; } case States.Done: throw new NotImplementedException(); default: throw new NotImplementedException(); } } var templateName = string.Join(string.Empty, templateQueue.Select(token => token.Value)).Trim(); parsedNode = new ExtendsNode(templateName, ExpressionNode.FromString(environment, templateName), currentNode); return(true); }