internal void SyntaxError( TokenzierContext context, CharacterLocationExtended location, string helpText) { context.Errors.Add(new InvalidPathSyntaxError(location, SourceExpression, helpText)); }
internal static TokenPair TokenizeVariableAssignment( string tokenValue, TokenzierContext context, TokenType type, IEnumerable <ITokenOption> options) { var startOfExpression = context.CurrentLocation; switch (type) { case TokenType.VariableLet: break; case TokenType.VariableVar: break; default: context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation.AddWindow(new CharacterSnippedLocation(0, 0, tokenValue)), "#var", "", "#var name", "Expected #var or #let")); break; } var strVarType = StringifyVariableAssignmentType(type) + " "; var variableNameIndex = tokenValue.IndexOf(strVarType, StringComparison.OrdinalIgnoreCase); if (variableNameIndex != 0) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation.AddWindow(new CharacterSnippedLocation(0, 0, tokenValue)), strVarType, "", strVarType + "name", "Expected " + strVarType)); return(default);
public void TestExpressionParserDbg(string query) { var context = TokenzierContext.FromText(query); var expressions = ExpressionTokenizer.ParseExpressionOrString(query, context); Assert.That(expressions, Is.Not.Null); Assert.That(context.Errors, Is.Empty, () => context.Errors.GetErrorText()); }
/// <summary> /// Runs the Tokenizer and returns all errors in the template if present /// </summary> /// <param name="template"></param> /// <returns></returns> public static IEnumerable <IMorestachioError> Validate(string template) { var options = new ParserOptions(template); var tokenzierContext = TokenzierContext.FromText(template); var parsedTemplate = Tokenizer.Tokenize(options, tokenzierContext); return(tokenzierContext.Errors); }
/// <summary> /// Runs the Tokenizer and returns all errors in the template if present /// </summary> /// <param name="template"></param> /// <returns></returns> public static async Task <IEnumerable <IMorestachioError> > Validate(ITemplateContainer template) { var options = new ParserOptions(template); var tokenzierContext = new TokenzierContext(new List <int>(), null); await Tokenizer.Tokenize(options, tokenzierContext); return(tokenzierContext.Errors); }
internal TokenInfo(string token, TokenzierContext context, Stack <Tuple <string, int> > scopeStack) { TokenizerContext = context; ScopeStack = scopeStack; Token = token; Errors = new List <IMorestachioError>(); }
internal TokenInfo(string token, TokenzierContext context, Stack <Tokenizer.ScopeStackItem> scopeStack, IList <ITokenOption> tokenOptions) { TokenizerContext = context; ScopeStack = scopeStack; TokenOptions = tokenOptions; Token = token; Errors = new List <IMorestachioError>(); }
public void TestNewExpressionTokenizer(string testExpression) { //var testExpression = "test.data.value(data)"; var tokenzierContext = TokenzierContext.FromText(testExpression); var parsedExpression = ExpressionParser.ParseExpression(testExpression, tokenzierContext); var builder = new ToParsableStringExpressionVisitor(); parsedExpression.Accept(builder); var str = builder.StringBuilder.ToString(); Assert.That(str, Is.EqualTo(testExpression)); }
/// <summary> /// Returns the current char position in relation to the context /// </summary> /// <param name="context"></param> /// <returns></returns> public int ToPosition(TokenzierContext context) { return(_index); var line = Line - 1; if (line >= 0 && context.Lines.Count > line) { return(context.Lines[line] + Character); } return(Character); }
private static IEnumerable <TokenMatch> MatchTokens(string template, TokenzierContext context) { var lastChars = new RollingArray(3); var elementIndex = -1; char?isInString = null; var stringEscape = false; for (int i = 0; i < template.Length; i++) { var c = template[i]; lastChars.Add(c); if (isInString.HasValue && context.CommentIntend == 0) { if (c == '\\') { stringEscape = true; } else if (stringEscape && c == isInString.Value) { stringEscape = false; } else if (!stringEscape && c == isInString.Value) { isInString = null; } } else if (lastChars.StartToken(context.PrefixToken)) { elementIndex = i - 1; } else if (elementIndex != -1 && lastChars.EndToken(context.SuffixToken)) { var token = template.Substring(elementIndex, i - elementIndex + 1); yield return(new TokenMatch(elementIndex, token)); elementIndex = -1; } else if (elementIndex != -1 && IsStringDelimiter(c) && context.CommentIntend == 0) { isInString = c; } } if (isInString.HasValue && elementIndex != -1) { var token = template.Substring(elementIndex, template.Length - elementIndex); yield return(new TokenMatch(elementIndex, token)); } }
public void TestExpressionParser(string query) { var context = TokenzierContext.FromText(query); var expressions = ExpressionTokenizer.ParseExpressionOrString(query, context); Assert.That(expressions, Is.Not.Null); var visitor = new ToParsableStringExpressionVisitor(); expressions.Accept(visitor); Assert.That(visitor.StringBuilder.ToString(), Is.EqualTo(query)); Assert.That(context.Errors, Is.Empty, () => context.Errors.GetErrorText()); }
public async Task TestTokenizerTime(string variation, int modelDepth, int sizeOfTemplate, int inserts, int runs) { var model = ConstructModelAndPath(modelDepth); var baseTemplate = Enumerable.Range(1, 5) .Aggregate("", (seed, current) => seed += " {{" + model.Item2 + "}}\r\n"); while (baseTemplate.Length <= sizeOfTemplate) { baseTemplate += model.Item2; } var options = new ParserOptions(baseTemplate, () => Stream.Null); var tokenzierContext = new TokenzierContext(new List <int>(), options.CultureInfo); var tokenizerResult = await Tokenizer.Tokenize(options, tokenzierContext); }
public async Task TestExpressionRuns(string variation, int width, int depth, int noArguments, int runs) { var expression = ConstructExpression("", width, depth, noArguments); ExpressionParser.ParseExpression("data", TokenzierContext.FromText("data")); var data = new Dictionary <string, object>(); for (int i = 0; i < width; i++) { data["data"] = new Dictionary <string, object>(); } var totalTime = Stopwatch.StartNew(); var parseTime = Stopwatch.StartNew(); IMorestachioExpression morestachioExpression = null; for (var i = 0; i < runs; i++) { morestachioExpression = ExpressionParser.ParseExpression(expression.Item1, TokenzierContext.FromText(expression.Item1)); } parseTime.Stop(); var executeTime = Stopwatch.StartNew(); for (int i = 0; i < runs; i++) { var parserOptions = new ParserOptions(""); await morestachioExpression.GetValue(new ContextObject(".", null, data) { }, new ScopeData(parserOptions)); } executeTime.Stop(); totalTime.Stop(); PerformanceCounter.PerformanceCounters.Add(new PerformanceCounter.ExpressionPerformanceCounterEntity(variation) { TimePerRun = new TimeSpan(parseTime.ElapsedTicks / runs), RunOver = runs, Width = width, Depth = depth, NoArguments = noArguments, ParseTime = parseTime.Elapsed, TotalTime = totalTime.Elapsed, ExecuteTime = executeTime.Elapsed }); }
internal static string Validated(string token, TokenzierContext context) { token = token.Trim(); var match = NegativePathSpec.Match(token); if (!match.Success) { return(token); } context.Errors.Add(new InvalidPathSyntaxError( context .CurrentLocation .AddWindow(new CharacterSnippedLocation(1, match.Index, token)), token)); return(token); }
public static async MorestachioDocumentInfoPromise ParseWithOptionsAsync([NotNull] ParserOptions parsingOptions) { if (parsingOptions == null) { throw new ArgumentNullException(nameof(parsingOptions)); } parsingOptions.Seal(); var tokenzierContext = TokenzierContext.FromText(parsingOptions.Template, parsingOptions.CultureInfo); var tokenizerResult = await Tokenizer.Tokenize(parsingOptions, tokenzierContext); //if there are any errors do not parse the template var documentInfo = new MorestachioDocumentInfo(parsingOptions, tokenzierContext.Errors.Any() ? null : Parse(tokenizerResult, parsingOptions), tokenzierContext.Errors); return(documentInfo); }
public void TestExpressionCanParseOperators(string query) { var sw = new Stopwatches(); IMorestachioExpression expressions = null; TokenzierContext context = null; for (int i = 0; i < 50000; i++) { sw.Start(); context = TokenzierContext.FromText(query); expressions = ExpressionParser.ParseExpression(query, context); sw.Stop(); } Assert.Warn("Result: " + sw.Elapsed + " average: " + sw.ElapsedAverage); //TestContext.Out.WriteLine(); //Assert.That(expressions, Is.Not.Null, () => context.Errors.GetErrorText()); //Assert.That(context.Errors, Is.Empty, () => context.Errors.GetErrorText()); //var visitor = new ToParsableStringExpressionVisitor(); //expressions.Accept(visitor); //var actual = visitor.StringBuilder.ToString(); //Assert.That(actual, Is.EqualTo(query)); //var template = "{{" + query + "}}"; //var data = new Dictionary<string, object>(); //for (var index = 0; index < args.Length; index++) //{ // var arg = args[index]; // data.Add(((char)('A' + index)).ToString(), arg); //} //var result = await ParserFixture.CreateAndParseWithOptions(template, data, ParserOptionTypes.UseOnDemandCompile, options => //{ // //options.Formatters.AddSingleGlobal<object, object>(f => // //{ // // return f; // //}, "Self"); //}); //Assert.That(result, Is.EqualTo((valExp).ToString())); }
/// <summary> /// Parses the Template with the given options /// </summary> /// <param name="parsingOptions">a set of options</param> /// <returns></returns> public static async MorestachioDocumentInfoPromise ParseWithOptionsAsync(ParserOptions parsingOptions) { if (parsingOptions == null) { throw new ArgumentNullException(nameof(parsingOptions)); } parsingOptions.Seal(); parsingOptions.Logger?.LogDebug(LoggingFormatter.ParserEventId, "Parse new Template"); var tokenzierContext = new TokenzierContext(new List <int>(), parsingOptions.CultureInfo); var tokenizerResult = await Tokenizer.Tokenize(parsingOptions, tokenzierContext); parsingOptions.Logger?.LogDebug(LoggingFormatter.ParserEventId, "Template Parsed", new Dictionary <string, object>() { { "Errors", tokenzierContext.Errors } }); //if there are any errors do not parse the template var documentInfo = new MorestachioDocumentInfo(parsingOptions, tokenzierContext.Errors.Any() ? null : Parse(tokenizerResult, parsingOptions), tokenzierContext.Errors); return(documentInfo); }
public async Task CanNotParseUnclosedString() { var text = "\""; var result = await ExpressionParser.EvaluateExpression(text, new ParserOptions(), null, TokenzierContext.FromText(text)); Assert.That(result, Is.Null); }
public async Task ProfileTest() { string variation = "Template Size"; int modelDepth = 5; int sizeOfTemplate = 100000; int inserts = 5; int runs = 1; var model = PerfHarness.ConstructModelAndPath(modelDepth); var baseTemplate = Enumerable.Range(1, 5) .Aggregate("", (seed, current) => seed += " {{" + model.Item2 + "}}"); while (baseTemplate.Length <= sizeOfTemplate) { baseTemplate += model.Item2 + "\r\n"; } MorestachioDocumentInfo template = null; TokenizerResult tokenizerResult = null; //make sure this class is JIT'd before we start timing. //await Parser.ParseWithOptionsAsync(new ParserOptions("asdf")); var totalTime = Stopwatch.StartNew(); var tokenizingTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); var tokenzierContext = new TokenzierContext(new List <int>(), options.CultureInfo); tokenizerResult = await Tokenizer.Tokenize(options, tokenzierContext); } tokenizingTime.Stop(); //var parseTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var options = new ParserOptions(baseTemplate, () => Stream.Null); // template = new MorestachioDocumentInfo(options, Parser.Parse(tokenizerResult, options)); //} //parseTime.Stop(); //var tmp = await template.CreateAndStringifyAsync(model.Item1); //var renderTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var morestachioDocumentResult = await template.CreateAsync(model.Item1); // morestachioDocumentResult.Stream.Dispose(); //} //renderTime.Stop(); //totalTime.Stop(); //var compileTime = Stopwatch.StartNew(); //CompilationResult compilationResult = null; //for (var i = 0; i < runs; i++) //{ // compilationResult = template.Compile(); //} //compileTime.Stop(); //var compiledRenderTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var morestachioDocumentResult = await compilationResult(model.Item1, CancellationToken.None); // morestachioDocumentResult.Stream.Dispose(); //} //compiledRenderTime.Stop(); }
/// <summary> /// Goes through the template and evaluates all tokens that are enclosed by {{ }}. /// </summary> /// <param name="parserOptions"></param> /// <param name="context"></param> /// <returns></returns> public static async TokenizerResultPromise Tokenize(ParserOptions parserOptions, TokenzierContext context) { var templateString = parserOptions.Template; var scopestack = new Stack <ScopeStackItem>(); var partialsNames = new List <string>(parserOptions.PartialsStore?.GetNames() ?? new string[0]); context.SetLocation(0); var tokens = new List <TokenPair>(); void BeginElse(TokenMatch match) { var firstNonContentToken = tokens .AsReadOnly() .Reverse() .FirstOrDefault(e => !e.Type.Equals(TokenType.Content)); if (!firstNonContentToken.Type.Equals(TokenType.IfClose)) { context.Errors .Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "find if block for else", firstNonContentToken.Value, "{{/if}}", "Could not find an /if block for this else")); } else { scopestack.Push(new ScopeStackItem(TokenType.Else, firstNonContentToken.Value, match.Index)); tokens.Add(new TokenPair(TokenType.Else, firstNonContentToken.Value, context.CurrentLocation)); } } void EndIf(TokenMatch match, string expected) { if (!scopestack.Any()) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } else { var item1 = scopestack.Peek(); if (item1.TokenType == TokenType.If || item1.TokenType == TokenType.IfNot) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.IfClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } } } string TrimToken(string token, string keyword, char key = '#') { token = token.TrimStart(key); if (keyword != null) { token = token.Trim().Substring(keyword.Length); } return(token.Trim()); } foreach (var match in MatchTokens(templateString, context)) { var tokenValue = match.Value; var trimmedToken = tokenValue .Remove(0, context.PrefixToken.Length); trimmedToken = trimmedToken.Remove(trimmedToken.Length - context.SuffixToken.Length); if (context.CommentIntend > 0) { if (trimmedToken == "/!") { context.CommentIntend--; if (context.CommentIntend == 0) { //move forward in the string. if (context.Character > match.Index + match.Length) { throw new InvalidOperationException("Internal index location error"); } context.SetLocation(match.Index + match.Length); } } else if (trimmedToken.Equals("!")) { context.CommentIntend++; } } else { //yield front content. if (match.Index > context.Character) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character, match.Index - context.Character), context.CurrentLocation)); } context.SetLocation(match.Index + context.PrefixToken.Length); if (trimmedToken.StartsWith("#declare ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "declare "); scopestack.Push(new ScopeStackItem(TokenType.PartialDeclarationOpen, token, match.Index)); if (string.IsNullOrWhiteSpace(token)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "open", "declare", "{{#declare name}}", " Missing the Name.")); } else { partialsNames.Add(token); tokens.Add(new TokenPair(TokenType.PartialDeclarationOpen, token, context.CurrentLocation)); } } else if (trimmedToken.Equals("/declare", StringComparison.CurrentCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.PartialDeclarationOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.PartialDeclarationClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "declare", "{{#declare name}}")); } } else if (trimmedToken.StartsWith("#include ", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#').Trim(); var partialRegex = PartialIncludeRegEx.Match(token); var partialName = partialRegex.Groups[1].Value; var partialContext = partialRegex.Groups[2].Value; if (!string.IsNullOrWhiteSpace(partialContext)) { partialContext = token.Substring(partialRegex.Groups[2].Index + "WITH ".Length); } if (string.IsNullOrWhiteSpace(partialName) || !partialsNames.Contains(partialName)) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "use", "include", "{{#include name}}", $" There is no Partial declared '{partialName}'. Partial names are case sensitive and must be declared before an include.")); } else { IMorestachioExpression exp = null; if (!string.IsNullOrWhiteSpace(partialContext)) { exp = ExpressionParser.ParseExpression(partialContext, context); } var tokenPair = new TokenPair(TokenType.RenderPartial, partialName, context.CurrentLocation, exp); tokens.Add(tokenPair); } } else if (trimmedToken.StartsWith("#each ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "each"); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.CollectionOpen, alias ?? token, match.Index)); if (token.Trim() != "") { token = token.Trim(); ScopingBehavior?scopeBehavior = null; if (!string.IsNullOrWhiteSpace(alias)) { if (token.EndsWith("NoScope", StringComparison.InvariantCultureIgnoreCase)) { scopeBehavior = ScopingBehavior.DoNotScope; } if (token.EndsWith("WithScope", StringComparison.InvariantCultureIgnoreCase)) { scopeBehavior = ScopingBehavior.ScopeAnyway; } } tokens.Add(new TokenPair(TokenType.CollectionOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context), scopeBehavior)); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation("each ".Length + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.Equals("/each", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.CollectionOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.CollectionClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "each", "{{#each name}}")); } } else if (trimmedToken.StartsWith("#while ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "while"); scopestack.Push(new ScopeStackItem(TokenType.WhileLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.WhileLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/while", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.WhileLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.WhileLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "while", "{{#while Expression}}")); } } else if (trimmedToken.StartsWith("#do ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "do"); scopestack.Push(new ScopeStackItem(TokenType.DoLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.DoLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/do", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.DoLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.DoLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "do", "{{#do Expression}}")); } } else if (trimmedToken.StartsWith("#repeat ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "repeat"); scopestack.Push(new ScopeStackItem(TokenType.RepeatLoopOpen, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.RepeatLoopOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/repeat", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.RepeatLoopOpen) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.RepeatLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "repeat", "{{#repeat Expression}}")); } } else if (trimmedToken.StartsWith("#if ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "if"); var eval = EvaluateNameFromToken(token); token = eval.Value; if (eval.Name != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "^if", "AS", "No Alias")); } scopestack.Push(new ScopeStackItem(TokenType.If, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.If, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.StartsWith("^if ", true, CultureInfo.InvariantCulture)) { var token = TrimToken(trimmedToken, "if", '^'); var eval = EvaluateNameFromToken(token); token = eval.Value; if (eval.Name != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "^if", "AS", "No Alias")); } scopestack.Push(new ScopeStackItem(TokenType.IfNot, token, match.Index)); if (token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.IfNot, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "")); } } else if (trimmedToken.Equals("/if", StringComparison.InvariantCultureIgnoreCase)) { EndIf(match, "/If"); } else if (trimmedToken.Equals("#ifelse", StringComparison.InvariantCultureIgnoreCase)) { EndIf(match, "#ifelse"); BeginElse(match); } else if (trimmedToken.Equals("#else", StringComparison.InvariantCultureIgnoreCase)) { BeginElse(match); } else if (trimmedToken.Equals("/else", StringComparison.InvariantCultureIgnoreCase)) { if (scopestack.Any() && scopestack.Peek().TokenType == TokenType.Else) { var token = scopestack.Pop().Value; tokens.Add(new TokenPair(TokenType.ElseClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "else", "{{#else name}}")); } } else if (trimmedToken.StartsWith("#var ", true, CultureInfo.InvariantCulture)) { tokens.Add(ExpressionParser.TokenizeVariableAssignment(trimmedToken, context, TokenType.VariableVar)); } else if (trimmedToken.StartsWith("#let ", true, CultureInfo.InvariantCulture)) { tokens.Add(ExpressionParser.TokenizeVariableAssignment(trimmedToken, context, TokenType.VariableLet)); } else if (trimmedToken.StartsWith("^")) { //open inverted group var token = trimmedToken.TrimStart('^').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.InvertedElementOpen, alias ?? token, match.Index)); tokens.Add(new TokenPair(TokenType.InvertedElementOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(1 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.StartsWith("&")) { //escaped single element var token = trimmedToken.TrimStart('&').Trim(); tokens.Add(new TokenPair(TokenType.UnescapedSingleValue, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } else if (trimmedToken.StartsWith("!")) { //it's a comment drop this on the floor, no need to even yield it. if (trimmedToken.Equals("!")) { //except for when its a block comment then set the isCommentBlock flag context.CommentIntend++; } } else if (trimmedToken.Equals("#NL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.WriteLineBreak, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TNL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimLineBreak, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TNLS", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimLineBreaks, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.Equals("#TRIMALL", StringComparison.InvariantCultureIgnoreCase)) { tokens.Add(new TokenPair(TokenType.TrimEverything, trimmedToken, context.CurrentLocation)); } else if (trimmedToken.StartsWith("#SET OPTION ", StringComparison.InvariantCultureIgnoreCase)) { var token = TrimToken(trimmedToken, "SET OPTION "); var expectEquals = false; string name = null; IMorestachioExpression value = null; for (int i = 0; i < token.Length; i++) { var c = token[i]; if (IsWhiteSpaceDelimiter(c)) { expectEquals = true; continue; } if (expectEquals || c == '=') { if (c != '=') { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find '=' or whitespace after name but found '{c}'")); } else { name = token.Substring(0, i - 1).Trim(); value = ExpressionParser.ParseExpression(token.Substring(i + 1).Trim(), context); break; } } } if (string.IsNullOrWhiteSpace(name)) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find '=' after name")); break; } if (value == null) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#SET OPTION Name = Value}}", $" Expected to find an expression after '='")); break; } await context.SetOption(name, value, parserOptions); } //else if (tokenValue.Equals("{{/TRIMALL}}", StringComparison.InvariantCultureIgnoreCase)) //{ // tokens.Add(new TokenPair(TokenType.StopTrimEverything, tokenValue, context.CurrentLocation)); //} else { //check for custom DocumentItem provider var customDocumentProvider = parserOptions.CustomDocumentItemProviders.FirstOrDefault(e => e.ShouldTokenize(trimmedToken)); if (customDocumentProvider != null) { var tokenPairs = customDocumentProvider .Tokenize(new CustomDocumentItemProvider.TokenInfo(trimmedToken, context, scopestack), parserOptions); tokens.AddRange(tokenPairs); } else if (trimmedToken.StartsWith("#")) { //open group var token = trimmedToken.TrimStart('#').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Value; var alias = eval.Name; scopestack.Push(new ScopeStackItem(TokenType.ElementOpen, alias ?? token, match.Index)); tokens.Add(new TokenPair(TokenType.ElementOpen, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(3 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (trimmedToken.StartsWith("/")) { var token = trimmedToken.TrimStart('/').Trim(); //close group if (!scopestack.Any()) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#path}}", " There are more closing elements then open.")); } else { var item = scopestack.Peek(); if ((item.TokenType == TokenType.ElementOpen || item.TokenType == TokenType.InvertedElementOpen) && item.Value == token) { scopestack.Pop(); tokens.Add(new TokenPair(TokenType.ElementClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, tokenValue)), "/", "{{#path}}", " There are more closing elements then open.")); } } } else { //unsingle value. var token = trimmedToken.Trim(); tokens.Add(new TokenPair(TokenType.EscapedSingleValue, token, context.CurrentLocation, ExpressionParser.ParseExpression(token, context))); } } //move forward in the string. if (context.Character > match.Index + match.Length) { throw new InvalidOperationException("Internal index location error"); } context.SetLocation(match.Index + match.Length); } } if (context.Character < templateString.Length) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character), context.CurrentLocation)); } if (scopestack.Any() || parserOptions.CustomDocumentItemProviders.Any(f => f.ScopeStack.Any())) { foreach (var unclosedScope in scopestack .Concat(parserOptions.CustomDocumentItemProviders.SelectMany(f => f.ScopeStack)) .Select(k => { return(new { scope = k.TokenType.ToString(), location = HumanizeCharacterLocation(k.Index, context.Lines) }); }).Reverse()) { context.Errors.Add(new MorestachioUnclosedScopeError(unclosedScope.location .AddWindow(new CharacterSnippedLocation(1, -1, "")), unclosedScope.scope, "")); } } return(new TokenizerResult(tokens)); }
internal static IEnumerable <TokenPair> Tokenize(ParserOptions parserOptions, PerformanceProfiler profiler, out TokenzierContext tokenzierContext) { var templateString = parserOptions.Template; MatchCollection matches; using (profiler.Begin("Find Tokens")) { matches = TokenFinder.Matches(templateString); } var scopestack = new Stack <Tuple <string, int> >(); var partialsNames = new List <string>(parserOptions.PartialsStore?.GetNames() ?? new string[0]); var context = new TokenzierContext(NewlineFinder.Matches(templateString).OfType <Match>().Select(k => k.Index).ToArray()); tokenzierContext = context; context.SetLocation(0); var tokens = new List <TokenPair>(); void BeginElse(Match match) { var firstNonContentToken = tokens .AsReadOnly() .Reverse() .FirstOrDefault(e => !e.Type.Equals(TokenType.Content)); if (firstNonContentToken == null || !firstNonContentToken.Type.Equals(TokenType.IfClose)) { context.Errors .Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "find if block for else", firstNonContentToken?.Value, "{{/if}}", "Could not find an /if block for this else")); } else { scopestack.Push(Tuple.Create($"#else_{firstNonContentToken.Value}", match.Index)); tokens.Add(new TokenPair(TokenType.Else, firstNonContentToken.Value, context.CurrentLocation)); } } void EndIf(Match match, string expected) { if (!string.Equals(match.Value, "{{" + expected + "}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors .Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", expected, "{{" + expected + "}}")); } else { if (!scopestack.Any()) { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } else { var item1 = scopestack.Peek().Item1; if (item1.StartsWith("#if") || item1.StartsWith("^if")) { var token = scopestack.Pop().Item1; tokens.Add(new TokenPair(TokenType.IfClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "if", "{{#if name}}")); } } } } foreach (Match match in matches) { //yield front content. if (match.Index > context.Character) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character, match.Index - context.Character), context.CurrentLocation)); } context.SetLocation(match.Index + 2); var tokenValue = match.Value; var trimmedToken = tokenValue.TrimStart('{').TrimEnd('}'); if (tokenValue.StartsWith("{{#declare ", true, CultureInfo.InvariantCulture)) { scopestack.Push(Tuple.Create(tokenValue, match.Index)); var token = trimmedToken.TrimStart('#').Trim() .Substring("declare ".Length).Trim(); if (string.IsNullOrWhiteSpace(token)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "open", "declare", "{{#declare name}}", " Missing the Name.")); } else { partialsNames.Add(token); tokens.Add(new TokenPair(TokenType.PartialDeclarationOpen, token, context.CurrentLocation)); } } else if (tokenValue.StartsWith("{{/declare", true, CultureInfo.InvariantCulture)) { if (!string.Equals(tokenValue, "{{/declare}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", "declare", "{{/declare}}")); } else if (scopestack.Any() && scopestack.Peek().Item1.StartsWith("{{#declare", StringComparison.InvariantCultureIgnoreCase)) { var token = scopestack.Pop().Item1.TrimStart('{').TrimEnd('}').TrimStart('#').Trim() .Substring("declare".Length); tokens.Add(new TokenPair(TokenType.PartialDeclarationClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "declare", "{{#declare name}}")); } } else if (tokenValue.StartsWith("{{#include ", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#').Trim() .Substring("include ".Length).Trim(); if (string.IsNullOrWhiteSpace(token) || !partialsNames.Contains(token)) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "use", "include", "{{#include name}}", $" There is no Partial declared '{token}'. Partial names are case sensitive and must be declared before an include.")); } else { tokens.Add(new TokenPair(TokenType.RenderPartial, token, context.CurrentLocation)); } } else if (tokenValue.StartsWith("{{#each", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#').Trim().Substring("each".Length); var eval = EvaluateNameFromToken(token); token = eval.Item1; var alias = eval.Item2; scopestack.Push(Tuple.Create($"#each{alias ?? token}", match.Index)); if (token.StartsWith(" ") && token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.CollectionOpen, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "")); } if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation("each ".Length + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (tokenValue.StartsWith("{{/each", true, CultureInfo.InvariantCulture)) { if (!string.Equals(tokenValue, "{{/each}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", "each", "{{/each}}")); } else if (scopestack.Any() && scopestack.Peek().Item1.StartsWith("#each")) { var token = scopestack.Pop().Item1; tokens.Add(new TokenPair(TokenType.CollectionClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "each", "{{#each name}}")); } } else if (tokenValue.StartsWith("{{#while", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#') .Trim() .Substring("while".Length); scopestack.Push(Tuple.Create($"#while{token}", match.Index)); if (token.StartsWith(" ") && token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.WhileLoopOpen, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "")); } } else if (tokenValue.StartsWith("{{/while", true, CultureInfo.InvariantCulture)) { if (!string.Equals(tokenValue, "{{/while}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", "while", "{{/while}}")); } else if (scopestack.Any() && scopestack.Peek().Item1.StartsWith("#while")) { var token = scopestack.Pop().Item1; tokens.Add(new TokenPair(TokenType.WhileLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "while", "{{#while Expression}}")); } } else if (tokenValue.StartsWith("{{#do", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#') .Trim() .Substring("do".Length); scopestack.Push(Tuple.Create($"#do{token}", match.Index)); if (token.StartsWith(" ") && token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.DoLoopOpen, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "")); } } else if (tokenValue.StartsWith("{{/do", true, CultureInfo.InvariantCulture)) { if (!string.Equals(tokenValue, "{{/do}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", "do", "{{/do}}")); } else if (scopestack.Any() && scopestack.Peek().Item1.StartsWith("#do")) { var token = scopestack.Pop().Item1; tokens.Add(new TokenPair(TokenType.DoLoopClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "do", "{{#do Expression}}")); } } else if (tokenValue.StartsWith("{{#if ", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('#').Trim().Substring("if".Length); var eval = EvaluateNameFromToken(token); token = eval.Item1; if (eval.Item2 != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "^if", "AS", "No Alias")); } scopestack.Push(Tuple.Create($"#if{token}", match.Index)); if (token.StartsWith(" ") && token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.If, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "")); } } else if (tokenValue.StartsWith("{{^if ", true, CultureInfo.InvariantCulture)) { var token = trimmedToken.TrimStart('^').Trim().Substring("if".Length); var eval = EvaluateNameFromToken(token); token = eval.Item1; if (eval.Item2 != null) { context.Errors.Add(new MorestachioSyntaxError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "^if", "AS", "No Alias")); } scopestack.Push(Tuple.Create($"^if{token}", match.Index)); if (token.StartsWith(" ") && token.Trim() != "") { token = token.Trim(); tokens.Add(new TokenPair(TokenType.IfNot, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else { context.Errors.Add(new InvalidPathSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "")); } } else if (tokenValue.StartsWith("{{/if", true, CultureInfo.InvariantCulture)) { EndIf(match, "/If"); } else if (tokenValue.StartsWith("{{#ifelse", true, CultureInfo.InvariantCulture)) { EndIf(match, "#ifelse"); BeginElse(match); } else if (tokenValue.Equals("{{#else}}", StringComparison.InvariantCultureIgnoreCase)) { BeginElse(match); } else if (tokenValue.Equals("{{/else}}", StringComparison.InvariantCultureIgnoreCase)) { if (!string.Equals(tokenValue, "{{/else}}", StringComparison.InvariantCultureIgnoreCase)) { context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "close", "else", "{{/else}}")); } else { if (scopestack.Any() && scopestack.Peek().Item1.StartsWith("#else_")) { var token = scopestack.Pop().Item1; tokens.Add(new TokenPair(TokenType.ElseClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError( context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "else", "{{#else name}}")); } } } else if (tokenValue.StartsWith("{{#var ", true, CultureInfo.InvariantCulture)) { tokens.AddRange(ExpressionTokenizer.TokenizeVariableAssignment(tokenValue.Trim('{', '}'), context)); } else if (tokenValue.StartsWith("{{#")) { //open group var token = trimmedToken.TrimStart('#').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Item1; var alias = eval.Item2; scopestack.Push(Tuple.Create(alias ?? token, match.Index)); //if (scopestack.Any() && scopestack.Peek().Item1 == token) //{ // tokens.Add(new TokenPair(TokenType.ElementClose, // Validated(token, match.Index, lines, context.Errors), context.CurrentLocation)); //} //else //{ // scopestack.Push(Tuple.Create(alias ?? token, match.Index)); //} tokens.Add(new TokenPair(TokenType.ElementOpen, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(3 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (tokenValue.StartsWith("{{^")) { //open inverted group var token = trimmedToken.TrimStart('^').Trim(); var eval = EvaluateNameFromToken(token); token = eval.Item1; var alias = eval.Item2; scopestack.Push(Tuple.Create(alias ?? token, match.Index)); tokens.Add(new TokenPair(TokenType.InvertedElementOpen, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); if (!string.IsNullOrWhiteSpace(alias)) { context.AdvanceLocation(1 + alias.Length); tokens.Add(new TokenPair(TokenType.Alias, alias, context.CurrentLocation)); } } else if (tokenValue.StartsWith("{{/")) { var token = trimmedToken.TrimStart('/').Trim(); //close group if (scopestack.Any() && scopestack.Peek().Item1 == token) { scopestack.Pop(); tokens.Add(new TokenPair(TokenType.ElementClose, token, context.CurrentLocation)); } else { context.Errors.Add(new MorestachioUnopendScopeError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), "/", "{{#path}}", " There are more closing elements then open.")); } } else if (tokenValue.StartsWith("{{{") || tokenValue.StartsWith("{{&")) { //escaped single element var token = trimmedToken.TrimStart('&').Trim(); tokens.Add(new TokenPair(TokenType.UnescapedSingleValue, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } else if (tokenValue.StartsWith("{{!")) { //it's a comment drop this on the floor, no need to even yield it. } else if (tokenValue.StartsWith("#") || tokenValue.StartsWith("/")) { //catch expression handler context.Errors.Add(new MorestachioSyntaxError(context.CurrentLocation .AddWindow(new CharacterSnippedLocation(1, 1, match.Value)), $"Unexpected token. Expected an valid Expression but got '{tokenValue}'", tokenValue, "")); } else { //check for custom DocumentItem provider var customDocumentProvider = parserOptions.CustomDocumentItemProviders.FirstOrDefault(e => e.ShouldTokenize(tokenValue)); if (customDocumentProvider != null) { var tokenInfo = new CustomDocumentItemProvider.TokenInfo(tokenValue, context, scopestack); var tokenPairs = customDocumentProvider.Tokenize(tokenInfo, parserOptions); tokens.AddRange(tokenPairs); } else { //unsingle value. var token = trimmedToken.Trim(); tokens.Add(new TokenPair(TokenType.EscapedSingleValue, token, context.CurrentLocation) { MorestachioExpression = ExpressionTokenizer.ParseExpressionOrString(token, context) }); } } //move forward in the string. if (context.Character > match.Index + match.Length) { throw new InvalidOperationException("Internal index location error"); } context.SetLocation(match.Index + match.Length); } if (context.Character < templateString.Length) { tokens.Add(new TokenPair(TokenType.Content, templateString.Substring(context.Character), context.CurrentLocation)); } if (scopestack.Any() || parserOptions.CustomDocumentItemProviders.Any(f => f.ScopeStack.Any())) { foreach (var unclosedScope in scopestack .Concat(parserOptions.CustomDocumentItemProviders.SelectMany(f => f.ScopeStack)) .Select(k => { var value = k.Item1.Trim('{', '#', '}'); if (value.StartsWith("each ")) { value = value.Substring(5); } return(new { scope = value, location = HumanizeCharacterLocation(k.Item2, context.Lines) }); }).Reverse()) { context.Errors.Add(new MorestachioUnopendScopeError(unclosedScope.location .AddWindow(new CharacterSnippedLocation(1, -1, "")), unclosedScope.scope, "")); } } return(tokens); }
public PrepareMailDataStepViewModelErrors() { var invalidExpression = new UiLocalizableString("DataImport.PrepareStep.Errors.InvalidExpression"); var invalidAddress = new UiLocalizableString("DataImport.PrepareStep.Errors.InvalidAddress"); //https://haacked.com/archive/2007/08/21/i-knew-how-to-validate-an-email-address-until-i.aspx/ var mailRegEx = new Regex(@"^(?!\.)(""([^""\r\\]|\\[""\r\\])*""|" + @"([-a-z0-9!#$%&'*+/=?^_`{|}~]|(?<!\.)\.)*)(?<!\.)" + @"@[a-z0-9][\w\.-]*[a-z0-9]\.[a-z][a-z\.]*[a-z]$", RegexOptions.IgnoreCase); var defaultOptions = new ParserOptions() { Timeout = TimeSpan.FromSeconds(3) }; async ValueTask <string> ExportValue(string expressionValue, object value) { if (string.IsNullOrWhiteSpace(expressionValue) || value == null) { return(null); } var context = TokenzierContext.FromText(expressionValue); var expression = ExpressionParser.ParseExpression(expressionValue, context); if (expression == null) { return(null); } return((await expression.GetValue(new ContextObject(defaultOptions, "", null, value), new ScopeData())) .Value?.ToString()); } Add(new AsyncError <PrepareMailDataStepViewModel>(invalidExpression, async e => { return((e.ExampleAddress = await ExportValue(e.MExpressionAddress, e.ExampleMailData)) == null); }, nameof(MExpressionAddress), nameof(ExampleMailData))); Add(new Error <PrepareMailDataStepViewModel>(invalidAddress, e => { if (string.IsNullOrWhiteSpace(e.ExampleAddress)) { return(true); } return(!mailRegEx.IsMatch(e.ExampleAddress)); }, nameof(ExampleAddress), nameof(MExpressionAddress), nameof(ExampleMailData))); Add(new AsyncError <PrepareMailDataStepViewModel>(invalidExpression, async e => { return((e.ExampleName = await ExportValue(e.MExpressionName, e.ExampleMailData)) == null);; }, nameof(MExpressionName), nameof(ExampleMailData))); Add(new AsyncError <PrepareMailDataStepViewModel>(invalidExpression, async e => { return((e.ExampleSubject = await ExportValue(e.MExpressionSubject, e.ExampleMailData)) == null); }, nameof(MExpressionSubject), nameof(ExampleMailData))); Add(new AsyncError <PrepareMailDataStepViewModel>(invalidExpression, async e => { return((e.ExampleFromName = await ExportValue(e.MExpressionFromName, e.ExampleMailData)) == null); }, nameof(MExpressionFromName), nameof(ExampleMailData))); Add(new AsyncError <PrepareMailDataStepViewModel>(invalidExpression, async e => { return((e.ExampleFromAddress = await ExportValue(e.MExpressionFromAddress, e.ExampleMailData)) == null); }, nameof(MExpressionFromAddress), nameof(ExampleMailData))); Add(new Error <PrepareMailDataStepViewModel>(invalidAddress, e => { if (string.IsNullOrWhiteSpace(e.ExampleFromAddress)) { return(true); } return(!mailRegEx.IsMatch(e.ExampleFromAddress)); }, nameof(ExampleFromAddress), nameof(MExpressionFromAddress), nameof(ExampleMailData))); }
public async Task TestRuns(string variation, int modelDepth, int sizeOfTemplate, int inserts, int runs) { var model = ConstructModelAndPath(modelDepth); var baseTemplate = Enumerable.Range(1, 5) .Aggregate("", (seed, current) => seed += " {{" + model.Item2 + "}}"); while (baseTemplate.Length <= sizeOfTemplate) { baseTemplate += model.Item2 + "\r\n"; } MorestachioDocumentInfo template = null; TokenizerResult tokenizerResult = null; //make sure this class is JIT'd before we start timing. (await Parser.ParseWithOptionsAsync(new ParserOptions("asdf"))).Create(new object()).Stream.Dispose(); var totalTime = Stopwatch.StartNew(); var tokenizingTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); var tokenzierContext = new TokenzierContext(new List <int>(), options.CultureInfo); tokenizerResult = await Tokenizer.Tokenize(options, tokenzierContext); } tokenizingTime.Stop(); var parseTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); template = new MorestachioDocumentInfo(options, Parser.Parse(tokenizerResult, options)); } parseTime.Stop(); var tmp = await template.CreateAndStringifyAsync(model.Item1); var renderTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var morestachioDocumentResult = await template.CreateAsync(model.Item1); morestachioDocumentResult.Stream.Dispose(); } renderTime.Stop(); totalTime.Stop(); var compileTime = Stopwatch.StartNew(); CompilationResult compilationResult = null; for (var i = 0; i < runs; i++) { compilationResult = template.Compile(); } compileTime.Stop(); var compiledRenderTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var morestachioDocumentResult = await compilationResult(model.Item1, CancellationToken.None); morestachioDocumentResult.Stream.Dispose(); } compiledRenderTime.Stop(); var modelPerformanceCounterEntity = new PerformanceCounter.ModelPerformanceCounterEntity(variation) { TimePerRun = new TimeSpan((tokenizingTime.ElapsedTicks / runs) + (parseTime.ElapsedTicks / runs) + (renderTime.ElapsedTicks / runs)), RunOver = runs, ModelDepth = modelDepth, SubstitutionCount = inserts, TemplateSize = sizeOfTemplate, TokenizingTime = tokenizingTime.Elapsed, ParseTime = parseTime.Elapsed, RenderTime = renderTime.Elapsed, TotalTime = totalTime.Elapsed, CompilerTime = compileTime.Elapsed, CompiledRenderTime = compiledRenderTime.Elapsed }; PerformanceCounter.PerformanceCounters.Add(modelPerformanceCounterEntity); //Console.WriteLine(PerformanceCounter.ModelPerformanceCounterEntity.Header(" | ")); //Console.WriteLine(modelPerformanceCounterEntity.PrintAsCsv(" | ")); }
/// <summary> /// Helper for parsing an fully qualified expression /// </summary> /// <param name="expression"></param> /// <returns></returns> public MorestachioExpressionBuilder Parse(string expression) { ExpressionParts.Add(MorestachioExpression.ParseFrom(expression, TokenzierContext.FromText(expression), out _)); return(this); }
/// <inheritdoc /> public virtual IEnumerable <TokenMatch> Matches(TokenzierContext context) { var isInString = new InStringInfo(-1, ' '); var stringEscape = false; var templateString = Template; var index = 0; var preLastIndex = 0; var charComparer = new CharComparer(); var lastChars = new MorestachioDefaultRollingArray(); //use the index of method for fast lookup of the next token var prefixToken = new string(context._prefixToken); while ((index = templateString.IndexOf(prefixToken, index)) != -1) { index += context._prefixToken.Length; while (templateString[index] == context._prefixToken[0]) //skip all excess { { index++; } var preText = templateString.Substring(preLastIndex, index - context._prefixToken.Length - preLastIndex); var startOfToken = index; var tokenCount = 0; var nlsIdx = 0; //iterrate all newlines for character humanization while ((nlsIdx = preText.IndexOf('\n', nlsIdx)) != -1) { context.Lines.Add(nlsIdx + preLastIndex); nlsIdx += 1; } while (index < templateString.Length()) { var c = templateString[index]; lastChars.Add(c); tokenCount++; if (c == '\n') { context.Lines.Add(index); } if (isInString.Index != -1 && context.CommentIntend == 0) { if (c == '\\') { stringEscape = true; } else if (stringEscape && c == isInString.Delimiter) { stringEscape = false; } else if (!stringEscape && c == isInString.Delimiter) { isInString = new InStringInfo(-1, ' '); } } else { if (lastChars.EndsWith(context._prefixToken)) //something like "content {{" { preText = preText ?? string.Empty; preText += c; tokenCount = 0; } else if (lastChars.EndsWith(context.SuffixToken, charComparer)) //something like "zzddata }}" { var tokenLength = tokenCount - context.SuffixToken.Length; var tokenContent = templateString.Substring(startOfToken, tokenLength); //it's a comment drop this on the floor, no need to even yield it. if (tokenContent.StartsWith("!")) { if (preText != string.Empty) { yield return(new TokenMatch(preLastIndex, preText, null, preText.Length, true)); } if (tokenContent.Equals("!")) { context.CommentIntend++; while (context.CommentIntend > 0) { var nextCommentIndex = templateString.IndexOf("{{!}}", index); var nextCommentCloseIndex = templateString.IndexOf("{{/!}}", index); if (nextCommentCloseIndex == -1 && nextCommentIndex == -1) { yield break; } if (nextCommentIndex < nextCommentCloseIndex && nextCommentIndex == -1) { context.CommentIntend++; index = nextCommentIndex + "{{!}}".Length - 1; } else { context.CommentIntend--; var commentCloseIndex = nextCommentCloseIndex; if (context.TokenizeComments && context.CommentIntend == 0) { var comment = templateString.Substring(index, commentCloseIndex - index); yield return(new TokenMatch(index, comment, null, comment.Length, false)); } index = commentCloseIndex + "{{/!}}".Length - 1; } } } else if (tokenContent.Equals("!?")) { var nextCommentCloseIndex = templateString.IndexOf("{{/!?}}", index); if (nextCommentCloseIndex == -1) { preText = templateString.Substring(index + 1); yield return(new TokenMatch(preLastIndex, preText, null, preText.Length, true)); yield break; } preText = templateString.Substring(index + 1, nextCommentCloseIndex - index - 1); yield return(new TokenMatch(preLastIndex, preText, null, preText.Length, true)); index = nextCommentCloseIndex + "{{/!?}}".Length - 1; } else if (tokenContent.StartsWith("!=")) { preText = prefixToken + tokenContent.Substring("!=".Length) + new string(context.SuffixToken); yield return(new TokenMatch(preLastIndex, preText, null, preText.Length, true)); } else if (context.TokenizeComments) { yield return(new TokenMatch(index, tokenContent, null, tokenContent.Length, false)); } //intentionally do nothing to drop all tags with leading ! as they are considered comments } else { yield return(new TokenMatch(startOfToken - context._prefixToken.Length, tokenContent, preText, tokenLength + context.SuffixToken.Length + context._prefixToken.Length, false)); } break; } else if (Tokenizer.IsStringDelimiter(c) && context.CommentIntend == 0) { isInString = new InStringInfo(index, c); } } index++; } if (isInString.Index != -1) { context.Errors.Add(new MorestachioSyntaxError( context .Location(isInString.Index) .AddWindow(new CharacterSnippedLocation(0, 5, templateString.Substring(isInString.Index - 5, 10))), "string", isInString.Delimiter.ToString(), isInString.Delimiter.ToString(), "Expected an closing string delimiter")); } preLastIndex = index + 1; } if (preLastIndex < templateString.Length()) { var substring = templateString.Substring(preLastIndex); if (isInString.Index != -1) { context.Errors.Add(new MorestachioSyntaxError( context .Location(isInString.Index) .AddWindow(new CharacterSnippedLocation(0, 5, templateString.Substring(isInString.Index - 5, 10))), "string", isInString.Delimiter.ToString(), isInString.Delimiter.ToString(), "Expected an closing string delimiter")); yield return(new TokenMatch(preLastIndex, substring, null, substring.Length, false)); } else { yield return(new TokenMatch(preLastIndex, substring, null, substring.Length, true)); } } }
public void ParserCanFormatArgumentWithSubExpressionMultiple() { var dt = DateTime.Now; var dictionary = new Dictionary <string, object> { { "d", dt }, { "f", 19191919 }, { "by", 10L } }; var exp = "d.(f.('d'), \"t\").('pl', by.(by, 'f'))"; Assert.That(exp, Is.EqualTo(MorestachioExpression.ParseFrom(exp, TokenzierContext.FromText(exp), out _).ToString())); var parsingOptions = new ParserOptions("{{" + exp + "}}", null, DefaultEncoding); var format = "yyyy.mm"; var formatterCalled = false; var formatter2Called = false; var formatter3Called = false; parsingOptions.Formatters.AddSingle <int, string, string>((sourceValue, testString) => { Assert.That(testString, Is.EqualTo("d")); formatterCalled = true; return(format); }); parsingOptions.Formatters.AddSingle(new Func <long, long, string, int>( (sourceValue, testString, f) => { Assert.That(testString, Is.EqualTo(sourceValue)); Assert.That(f, Is.EqualTo("f")); formatterCalled = true; return((int)sourceValue); })); parsingOptions.Formatters.AddSingle(new Func <DateTime, string, string, string>( (sourceValue, testString2, shouldBed) => { Assert.That(shouldBed, Is.EqualTo("t")); Assert.That(testString2, Is.EqualTo(format)); formatter2Called = true; return(sourceValue.ToString(testString2)); })); parsingOptions.Formatters.AddSingle(new Func <string, string, int, string>( (sourceValue, name, number) => { Assert.That(sourceValue, Is.EqualTo(dt.ToString(format))); Assert.That(name, Is.EqualTo("pl")); Assert.That(number, Is.EqualTo(dictionary["by"])); formatter3Called = true; return(sourceValue.PadLeft(number)); })); var extendedParseInformation = Parser.ParseWithOptions(parsingOptions); var andStringify = extendedParseInformation.CreateAndStringify(dictionary); Assert.That(formatterCalled, Is.True, "The formatter was not called"); Assert.That(formatter2Called, Is.True, "The Date formatter was not called"); Assert.That(formatter3Called, Is.True, "The Pad formatter was not called"); Assert.That(andStringify, Is.EqualTo(dt.ToString(format).PadLeft(int.Parse(dictionary["by"].ToString())))); }