public async Task ProfileTest() { string variation = "Template Size"; int modelDepth = 5; int sizeOfTemplate = 100000; int inserts = 5; int runs = 1; var model = PerfHarness.ConstructModelAndPath(modelDepth); var baseTemplate = Enumerable.Range(1, 5) .Aggregate("", (seed, current) => seed += " {{" + model.Item2 + "}}"); while (baseTemplate.Length <= sizeOfTemplate) { baseTemplate += model.Item2 + "\r\n"; } MorestachioDocumentInfo template = null; TokenizerResult tokenizerResult = null; //make sure this class is JIT'd before we start timing. //await Parser.ParseWithOptionsAsync(new ParserOptions("asdf")); var totalTime = Stopwatch.StartNew(); var tokenizingTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); var tokenzierContext = new TokenzierContext(new List <int>(), options.CultureInfo); tokenizerResult = await Tokenizer.Tokenize(options, tokenzierContext); } tokenizingTime.Stop(); //var parseTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var options = new ParserOptions(baseTemplate, () => Stream.Null); // template = new MorestachioDocumentInfo(options, Parser.Parse(tokenizerResult, options)); //} //parseTime.Stop(); //var tmp = await template.CreateAndStringifyAsync(model.Item1); //var renderTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var morestachioDocumentResult = await template.CreateAsync(model.Item1); // morestachioDocumentResult.Stream.Dispose(); //} //renderTime.Stop(); //totalTime.Stop(); //var compileTime = Stopwatch.StartNew(); //CompilationResult compilationResult = null; //for (var i = 0; i < runs; i++) //{ // compilationResult = template.Compile(); //} //compileTime.Stop(); //var compiledRenderTime = Stopwatch.StartNew(); //for (var i = 0; i < runs; i++) //{ // var morestachioDocumentResult = await compilationResult(model.Item1, CancellationToken.None); // morestachioDocumentResult.Stream.Dispose(); //} //compiledRenderTime.Stop(); }
public List <Token> Tokenize(ITracker tracker, int untilIndex = 0) { Guid newestValidResetPoint = tracker.GetResetPoint(); index = tracker.Index; Level = 0; if (0 == untilIndex || tracker.StopIndex < untilIndex) { untilIndex = tracker.StopIndex; } while (index < untilIndex) { if (Level >= Tokenizers.Count) { Errors.Reverse(); TokenizeError specific = (from e in Errors orderby e.Specificity descending select e).First(); throw new TokenizeException(tracker.Raw, specific.Region, tracker.Last, specific.Message); } if (!Tokenizers[Level].Selector(tracker.Raw[index])) { AddError("unknown char: '" + tracker.Raw[index] + "'", 0, new TextRegion(index), Tokenizers[Level]); Level++; continue; } // Reset errors from last iteration in the tracker if (!tracker.ResetErrorsToLastResetPoint(newestValidResetPoint)) { throw new TokenizeException(tracker.Raw, new TextRegion(0, 0), tracker.Last, "Could not reset to last resetpoint - that should not be possible."); } // Try next Tokenizer TokenizerResult result = Tokenizers[Level].Tokenize(tracker); if (result == TokenizerResult.Success) { newestValidResetPoint = tracker.GetResetPoint(); index = tracker.Index; Level = 0; Errors.Clear(); } else { if (!tracker.HasErrors) { AddError(Tokenizers[Level] + ": Error!", 0, new TextRegion(index), Tokenizers[Level]); } else { Errors.AddRange(tracker.Errors); } tracker.Reset(newestValidResetPoint); Level++; } } return(tracker.Tokens.ToList()); }
/// <summary> /// Parses the Tokens into a Document. /// </summary> /// <param name="tokenizerResult">The result of an Tokenizer.Tokenize call.</param> /// <param name="options">The ParserOptions</param> /// <returns></returns> public static IDocumentItem Parse(TokenizerResult tokenizerResult, ParserOptions options) { var buildStack = new Stack <DocumentScope>(); //this is the scope id that determines a scope that is using let or alias variables int variableScope = 1; var getScope = new Func <int>(() => variableScope++); //instead of recursive calling the parse function we stack the current document buildStack.Push(new DocumentScope(new MorestachioDocument(), () => 0)); DocumentScope GetVariabeScope() { return(buildStack.FirstOrDefault(e => e.VariableScopeNumber != -1)); } foreach (var currentToken in tokenizerResult.Tokens) { var currentDocumentItem = buildStack.Peek(); //get the latest document if (currentToken.Type.Equals(TokenType.Content)) { currentDocumentItem.Document.Add(new ContentDocumentItem(currentToken.Value) { ExpressionStart = currentToken.TokenLocation }); } else if (currentToken.Type.Equals(TokenType.If)) { var nestedDocument = new IfExpressionScopeDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.IfNot)) { var nestedDocument = new IfNotExpressionScopeDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.Else)) { var nestedDocument = new ElseExpressionScopeDocumentItem() { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.CollectionOpen)) { var nestedDocument = new EachDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.WhileLoopOpen)) { var nestedDocument = new WhileLoopDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.DoLoopOpen)) { var nestedDocument = new DoLoopDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.ElementOpen)) { var nestedDocument = new ExpressionScopeDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.RepeatLoopOpen)) { var nestedDocument = new RepeatDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(nestedDocument); } else if (currentToken.Type.Equals(TokenType.InvertedElementOpen)) { var invertedScope = new InvertedExpressionScopeDocumentItem(currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation }; buildStack.Push(new DocumentScope(invertedScope, getScope)); currentDocumentItem.Document.Add(invertedScope); } else if (currentToken.Type.Equals(TokenType.CollectionClose) || currentToken.Type.Equals(TokenType.ElementClose) || currentToken.Type.Equals(TokenType.IfClose) || currentToken.Type.Equals(TokenType.ElseClose) || currentToken.Type.Equals(TokenType.WhileLoopClose) || currentToken.Type.Equals(TokenType.DoLoopClose) || currentToken.Type.Equals(TokenType.RepeatLoopClose)) { DocumentScope scope = buildStack.Peek(); if (scope.HasAlias) //are we in a alias then remove it { foreach (var scopeLocalVariable in scope.LocalVariables) { currentDocumentItem.Document.Add(new RemoveAliasDocumentItem(scopeLocalVariable, scope.VariableScopeNumber)); } } // remove the last document from the stack and go back to the parents buildStack.Pop(); } else if (currentToken.Type.Equals(TokenType.EscapedSingleValue) || currentToken.Type.Equals(TokenType.UnescapedSingleValue)) { currentDocumentItem.Document.Add(new PathDocumentItem(currentToken.MorestachioExpression, currentToken.Type.Equals(TokenType.EscapedSingleValue)) { ExpressionStart = currentToken.TokenLocation }); } else if (currentToken.Type.Equals(TokenType.PartialDeclarationOpen)) { // currently same named partials will override each other // to allow recursive calls of partials we first have to declare the partial and then load it as we would parse // -the partial as a whole and then add it to the list would lead to unknown calls of partials inside the partial var nestedDocument = new MorestachioDocument(); buildStack.Push(new DocumentScope(nestedDocument, getScope)); currentDocumentItem.Document.Add(new PartialDocumentItem(currentToken.Value, nestedDocument) { ExpressionStart = currentToken.TokenLocation }); } else if (currentToken.Type.Equals(TokenType.PartialDeclarationClose)) { buildStack.Pop(); } else if (currentToken.Type.Equals(TokenType.RenderPartial)) { currentDocumentItem.Document.Add(new RenderPartialDocumentItem(currentToken.Value, currentToken.MorestachioExpression) { ExpressionStart = currentToken.TokenLocation, }); } else if (currentToken.Type.Equals(TokenType.Alias)) { var scope = GetVariabeScope(); var aliasDocumentItem = new AliasDocumentItem(currentToken.Value, scope.VariableScopeNumber) { ExpressionStart = currentToken.TokenLocation }; currentDocumentItem.Document.Add(aliasDocumentItem); currentDocumentItem.LocalVariables.Add(currentToken.Value); } else if (currentToken.Type.Equals(TokenType.VariableVar)) { var evaluateVariableDocumentItem = new EvaluateVariableDocumentItem(currentToken.Value, currentToken.MorestachioExpression); currentDocumentItem.Document.Add(evaluateVariableDocumentItem); } else if (currentToken.Type.Equals(TokenType.WriteLineBreak)) { currentDocumentItem.Document.Add(new TextEditDocumentItem(new AppendLineBreakTextOperation())); } else if (currentToken.Type.Equals(TokenType.TrimLineBreak)) { currentDocumentItem.Document.Add(new TextEditDocumentItem(new TrimLineBreakTextOperation() { LineBreaks = 1 })); } else if (currentToken.Type.Equals(TokenType.TrimLineBreaks)) { currentDocumentItem.Document.Add(new TextEditDocumentItem(new TrimLineBreakTextOperation() { LineBreaks = -1 })); } else if (currentToken.Type.Equals(TokenType.TrimEverything)) { currentDocumentItem.Document.Add(new TextEditDocumentItem(new TrimAllWhitespacesTextOperation())); } else if (currentToken.Type.Equals(TokenType.VariableLet)) { var scope = 0; if (buildStack.Count > 1) { scope = GetVariabeScope() .VariableScopeNumber; } var evaluateVariableDocumentItem = new EvaluateVariableDocumentItem(currentToken.Value, currentToken.MorestachioExpression, scope); currentDocumentItem.Document.Add(evaluateVariableDocumentItem); if (buildStack.Count > 1) { currentDocumentItem.LocalVariables.Add(currentToken.Value); } } else if (currentToken.Type.Equals(TokenType.Comment) || currentToken.Type.Equals(TokenType.BlockComment)) { //just ignore this part and print nothing } else { var customDocumentItemProvider = options.CustomDocumentItemProviders.FirstOrDefault(e => e.ShouldParse(currentToken, options)); var documentItem = customDocumentItemProvider?.Parse(currentToken, options, buildStack, getScope); if (documentItem != null) { currentDocumentItem.Document.Add(documentItem); } } } if (buildStack.Count != 1) { //var invalidScopedElements = buildStack //throw new MorestachioSyntaxError(new Tokenizer.CharacterLocation(){Character = }, ); throw new InvalidOperationException( "There is an Error with the Parser. The Parser still contains unscoped builds: " + buildStack.Select(e => e.Document.GetType().Name).Aggregate((e, f) => e + ", " + f)); } return(buildStack.Pop().Document); }
/// <summary> /// Parses the Tokens into a Document. /// </summary> /// <param name="tokenizerResult">The result of an Tokenizer.Tokenize call.</param> /// <param name="options">The ParserOptions</param> /// <returns></returns> public static IDocumentItem Parse(TokenizerResult tokenizerResult, ParserOptions options) { var buildStack = new Stack <DocumentScope>(); //this is the scope id that determines a scope that is using let or alias variables int variableScope = 1; var getScope = new Func <int>(() => variableScope++); //instead of recursive calling the parse function we stack the current document buildStack.Push(new DocumentScope(new MorestachioDocument(), () => 0)); var textEdits = new List <TextEditDocumentItem>(); DocumentScope GetVariableScope() { return(buildStack.FirstOrDefault(e => e.VariableScopeNumber != -1)); } IEnumerable <ITokenOption> GetPublicOptions(TokenPair token) { var publicOptions = token.TokenOptions?.Where(e => e.Persistent).ToArray(); return(publicOptions?.Length > 0 ? publicOptions : null); } bool TryAdd(IDocumentItem document, IDocumentItem child) { if (document is IBlockDocumentItem block) { block.Add(child); return(true); } return(false); } void CloseScope(Stack <DocumentScope> documentScopes, TokenPair currentToken, DocumentScope currentDocumentItem) { DocumentScope scope = documentScopes.Peek(); if (!(scope.Document is IBlockDocumentItem blockDocument)) { throw new InvalidOperationException( $"Closing an token '{currentToken.Type}' at '{currentToken.TokenLocation}'" + $" that is not of type '{typeof(IBlockDocumentItem)}' is not possible."); } blockDocument.BlockClosingOptions = GetPublicOptions(currentToken); if (scope.HasAlias) //are we in a alias then remove it { foreach (var scopeLocalVariable in scope.LocalVariables) { TryAdd(currentDocumentItem.Document, new RemoveAliasDocumentItem(currentToken.TokenLocation, scopeLocalVariable, scope.VariableScopeNumber, null)); } } // remove the last document from the stack and go back to the parents documentScopes.Pop(); } foreach (var currentToken in tokenizerResult) { var currentDocumentItem = buildStack.Peek(); //get the latest document if (currentToken.Type.Equals(TokenType.Content)) { var contentDocumentItem = new ContentDocumentItem(currentToken.TokenLocation, currentToken.Value, GetPublicOptions(currentToken)); TryAdd(currentDocumentItem.Document, contentDocumentItem); if (tokenizerResult.Previous.HasValue) { if (tokenizerResult.Previous.Value.FindOption <bool>("Embedded.TrimTailing")) { TryAdd(contentDocumentItem, new TextEditDocumentItem(tokenizerResult.Previous.Value.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = 0, LineBreakTrimDirection = LineBreakTrimDirection.Begin }, EmbeddedInstructionOrigin.Previous, GetPublicOptions(currentToken))); } if (tokenizerResult.Previous.Value.FindOption <bool>("Embedded.TrimAllTailing")) { TryAdd(contentDocumentItem, new TextEditDocumentItem(tokenizerResult.Previous.Value.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = -1, LineBreakTrimDirection = LineBreakTrimDirection.Begin }, EmbeddedInstructionOrigin.Previous, GetPublicOptions(currentToken))); } } if (tokenizerResult.Next.HasValue) { if (tokenizerResult.Next.Value.FindOption <bool>("Embedded.TrimLeading")) { TryAdd(contentDocumentItem, new TextEditDocumentItem(tokenizerResult.Next.Value.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = 0, LineBreakTrimDirection = LineBreakTrimDirection.End }, EmbeddedInstructionOrigin.Next, GetPublicOptions(currentToken))); } if (tokenizerResult.Next.Value.FindOption <bool>("Embedded.TrimAllLeading")) { TryAdd(contentDocumentItem, new TextEditDocumentItem(tokenizerResult.Next.Value.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = -1, LineBreakTrimDirection = LineBreakTrimDirection.End }, EmbeddedInstructionOrigin.Next, GetPublicOptions(currentToken))); } } foreach (var textEditDocumentItem in textEdits) { TryAdd(contentDocumentItem, textEditDocumentItem); } textEdits.Clear(); } else if (currentToken.Type.Equals(TokenType.If)) { var nestedDocument = new IfExpressionScopeDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken), false); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.IfNot)) { var nestedDocument = new IfExpressionScopeDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken), true); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.Else)) { var nestedDocument = new ElseExpressionScopeDocumentItem(currentToken.TokenLocation, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); if (currentDocumentItem.Document is IfExpressionScopeDocumentItem ifDocument) { ifDocument.Add(nestedDocument); } } else if (currentToken.Type.Equals(TokenType.ElseIf)) { var nestedDocument = new ElseIfExpressionScopeDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); var documentScope = new DocumentScope(nestedDocument, getScope); buildStack.Push(documentScope); if (currentDocumentItem.Document is IfExpressionScopeDocumentItem ifDocument) { ifDocument.Add(nestedDocument); } //AddIfDocument(currentToken, documentScope); } else if (currentToken.Type.Equals(TokenType.CollectionOpen)) { var nestedDocument = new EachDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.SwitchOpen)) { var nestedDocument = new SwitchDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, currentToken.FindOption <bool>("ScopeTo"), GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.SwitchCaseOpen)) { var nestedDocument = new SwitchCaseDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.SwitchDefaultOpen)) { var nestedDocument = new SwitchDefaultDocumentItem(currentToken.TokenLocation, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.WhileLoopOpen)) { var nestedDocument = new WhileLoopDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.DoLoopOpen)) { var nestedDocument = new DoLoopDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.ElementOpen)) { var nestedDocument = new ExpressionScopeDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.RepeatLoopOpen)) { var nestedDocument = new RepeatDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.InvertedElementOpen)) { var nestedDocument = new InvertedExpressionScopeDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(nestedDocument, getScope)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.CollectionClose) || currentToken.Type.Equals(TokenType.ElementClose) || currentToken.Type.Equals(TokenType.IfClose) || currentToken.Type.Equals(TokenType.ElseClose) || currentToken.Type.Equals(TokenType.ElseIfClose) || currentToken.Type.Equals(TokenType.WhileLoopClose) || currentToken.Type.Equals(TokenType.DoLoopClose) || currentToken.Type.Equals(TokenType.RepeatLoopClose) || currentToken.Type.Equals(TokenType.SwitchCaseClose) || currentToken.Type.Equals(TokenType.SwitchDefaultClose) || currentToken.Type.Equals(TokenType.SwitchClose)) { CloseScope(buildStack, currentToken, currentDocumentItem); } else if (currentToken.Type.Equals(TokenType.EscapedSingleValue) || currentToken.Type.Equals(TokenType.UnescapedSingleValue)) { var nestedDocument = new PathDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, currentToken.Type.Equals(TokenType.EscapedSingleValue), GetPublicOptions(currentToken)); TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.PartialDeclarationOpen)) { // currently same named partials will override each other // to allow recursive calls of partials we first have to declare the partial and then load it as we would parse // -the partial as a whole and then add it to the list would lead to unknown calls of partials inside the partial var partialDocumentItem = new PartialDocumentItem(currentToken.TokenLocation, currentToken.Value, GetPublicOptions(currentToken)); buildStack.Push(new DocumentScope(partialDocumentItem, getScope)); TryAdd(currentDocumentItem.Document, partialDocumentItem); } else if (currentToken.Type.Equals(TokenType.PartialDeclarationClose)) { CloseScope(buildStack, currentToken, currentDocumentItem); //buildStack.Pop(); } else if (currentToken.Type.Equals(TokenType.RenderPartial)) { TryAdd(currentDocumentItem.Document, new RenderPartialDocumentItem(currentToken.TokenLocation, currentToken.Value, currentToken.MorestachioExpression, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.ImportPartial)) { TryAdd(currentDocumentItem.Document, new ImportPartialDocumentItem(currentToken.TokenLocation, currentToken.MorestachioExpression, currentToken.FindOption <IMorestachioExpression>("Context"), GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.IsolationScopeOpen)) { var nestedDocument = new IsolationScopeDocumentItem(currentToken.TokenLocation, currentToken.FindOption <IsolationOptions>("IsolationType"), currentToken.FindOption <IMorestachioExpression>("IsolationScopeArg"), GetPublicOptions(currentToken)); TryAdd(currentDocumentItem.Document, nestedDocument); buildStack.Push(new DocumentScope(nestedDocument, getScope)); } else if (currentToken.Type.Equals(TokenType.IsolationScopeClose)) { CloseScope(buildStack, currentToken, currentDocumentItem); } else if (currentToken.Type.Equals(TokenType.Alias)) { var scope = GetVariableScope(); var nestedDocument = new AliasDocumentItem(currentToken.TokenLocation, currentToken.Value, scope.VariableScopeNumber, GetPublicOptions(currentToken)); TryAdd(currentDocumentItem.Document, nestedDocument); currentDocumentItem.LocalVariables.Add(currentToken.Value); } else if (currentToken.Type.Equals(TokenType.VariableVar)) { EvaluateVariableDocumentItem nestedDocument; var isolationParent = buildStack.FirstOrDefault(e => e.Document is IsolationScopeDocumentItem doc && doc.Isolation.HasFlag(IsolationOptions.VariableIsolation)); if (isolationParent != null) { nestedDocument = new EvaluateVariableDocumentItem(currentToken.TokenLocation, currentToken.Value, currentToken.MorestachioExpression, isolationParent.VariableScopeNumber, GetPublicOptions(currentToken)); isolationParent.LocalVariables.Add(currentToken.Value); } else { nestedDocument = new EvaluateVariableDocumentItem(currentToken.TokenLocation, currentToken.Value, currentToken.MorestachioExpression, GetPublicOptions(currentToken)); } TryAdd(currentDocumentItem.Document, nestedDocument); } else if (currentToken.Type.Equals(TokenType.VariableLet)) { var scope = 0; if (buildStack.Count > 1) { scope = GetVariableScope() .VariableScopeNumber; } var nestedDocument = new EvaluateLetVariableDocumentItem(currentToken.TokenLocation, currentToken.Value, currentToken.MorestachioExpression, scope, GetPublicOptions(currentToken)); TryAdd(currentDocumentItem.Document, nestedDocument); if (buildStack.Count > 1) { currentDocumentItem.LocalVariables.Add(currentToken.Value); } } else if (currentToken.Type.Equals(TokenType.WriteLineBreak)) { TryAdd(currentDocumentItem.Document, new TextEditDocumentItem(currentToken.TokenLocation, new AppendLineBreakTextOperation(), currentToken.IsEmbeddedToken, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.TrimLineBreak)) { textEdits.Add(new TextEditDocumentItem(currentToken.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = 1, LineBreakTrimDirection = LineBreakTrimDirection.Begin }, currentToken.IsEmbeddedToken, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.TrimLineBreaks)) { textEdits.Add(new TextEditDocumentItem(currentToken.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = currentToken.FindOption <bool>("All") ? -1 : 0, LineBreakTrimDirection = LineBreakTrimDirection.Begin }, currentToken.IsEmbeddedToken, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.TrimPrependedLineBreaks)) { textEdits.Add(new TextEditDocumentItem(currentToken.TokenLocation, new TrimLineBreakTextOperation() { LineBreaks = currentToken.FindOption <bool>("All") ? -1 : 0, LineBreakTrimDirection = LineBreakTrimDirection.End }, currentToken.IsEmbeddedToken, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.TrimEverything)) { textEdits.Add(new TextEditDocumentItem(currentToken.TokenLocation, new TrimAllWhitespacesTextOperation(), currentToken.IsEmbeddedToken, GetPublicOptions(currentToken))); } else if (currentToken.Type.Equals(TokenType.Comment) || currentToken.Type.Equals(TokenType.BlockComment)) { //just ignore this part and print nothing if (options.TokenizeComments) { TryAdd(currentDocumentItem.Document, new CommentDocumentItem(currentToken.TokenLocation, currentToken.Value, GetPublicOptions(currentToken), currentToken.Type.Equals(TokenType.BlockComment))); } } else { var tokenOptions = GetPublicOptions(currentToken); var customDocumentItemProvider = options.CustomDocumentItemProviders.FindTokenProvider(currentToken, options, tokenOptions); var nestedDocument = customDocumentItemProvider?.Parse(currentToken, options, buildStack, getScope, tokenOptions); if (nestedDocument != null) { TryAdd(currentDocumentItem.Document, nestedDocument); } } } if (buildStack.Count != 1) { //var invalidScopedElements = buildStack //throw new MorestachioSyntaxError(new Tokenizer.CharacterLocation(){Character = }, ); throw new InvalidOperationException( "There is an Error with the Parser. The Parser still contains unscoped builds: " + buildStack.Select(e => e.Document.GetType().Name).Aggregate((e, f) => e + ", " + f)); } return(buildStack.Pop().Document); }
public async Task TestRuns(string variation, int modelDepth, int sizeOfTemplate, int inserts, int runs) { var model = ConstructModelAndPath(modelDepth); var baseTemplate = Enumerable.Range(1, 5) .Aggregate("", (seed, current) => seed += " {{" + model.Item2 + "}}"); while (baseTemplate.Length <= sizeOfTemplate) { baseTemplate += model.Item2 + "\r\n"; } MorestachioDocumentInfo template = null; TokenizerResult tokenizerResult = null; //make sure this class is JIT'd before we start timing. (await Parser.ParseWithOptionsAsync(new ParserOptions("asdf"))).Create(new object()).Stream.Dispose(); var totalTime = Stopwatch.StartNew(); var tokenizingTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); var tokenzierContext = new TokenzierContext(new List <int>(), options.CultureInfo); tokenizerResult = await Tokenizer.Tokenize(options, tokenzierContext); } tokenizingTime.Stop(); var parseTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var options = new ParserOptions(baseTemplate, () => Stream.Null); template = new MorestachioDocumentInfo(options, Parser.Parse(tokenizerResult, options)); } parseTime.Stop(); var tmp = await template.CreateAndStringifyAsync(model.Item1); var renderTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var morestachioDocumentResult = await template.CreateAsync(model.Item1); morestachioDocumentResult.Stream.Dispose(); } renderTime.Stop(); totalTime.Stop(); var compileTime = Stopwatch.StartNew(); CompilationResult compilationResult = null; for (var i = 0; i < runs; i++) { compilationResult = template.Compile(); } compileTime.Stop(); var compiledRenderTime = Stopwatch.StartNew(); for (var i = 0; i < runs; i++) { var morestachioDocumentResult = await compilationResult(model.Item1, CancellationToken.None); morestachioDocumentResult.Stream.Dispose(); } compiledRenderTime.Stop(); var modelPerformanceCounterEntity = new PerformanceCounter.ModelPerformanceCounterEntity(variation) { TimePerRun = new TimeSpan((tokenizingTime.ElapsedTicks / runs) + (parseTime.ElapsedTicks / runs) + (renderTime.ElapsedTicks / runs)), RunOver = runs, ModelDepth = modelDepth, SubstitutionCount = inserts, TemplateSize = sizeOfTemplate, TokenizingTime = tokenizingTime.Elapsed, ParseTime = parseTime.Elapsed, RenderTime = renderTime.Elapsed, TotalTime = totalTime.Elapsed, CompilerTime = compileTime.Elapsed, CompiledRenderTime = compiledRenderTime.Elapsed }; PerformanceCounter.PerformanceCounters.Add(modelPerformanceCounterEntity); //Console.WriteLine(PerformanceCounter.ModelPerformanceCounterEntity.Header(" | ")); //Console.WriteLine(modelPerformanceCounterEntity.PrintAsCsv(" | ")); }
/// <summary> /// Tokenizes the input string. /// </summary> /// <param name="input">String to tokenize.</param> /// <param name="tokens">List of tokens.</param> /// <returns>Result of tokenization.</returns> public static TokenizerResult Tokenize(string input, ref IList <Token> tokens, out string error) { if (input is null) { error = "Input is null."; return(TokenizerResult.Error); } if (tokens is null) { tokens = new List <Token>(); } int offset = 0; int scopeLevel = GetScopeLevel(tokens); string errMsg = null; TokenizerResult result = TokenizerResult.Ok; while (offset < input.Length) { int oldOffset = offset; foreach (var def in _defs) { // Match the current token definition. int matchLen = def.Match(input, offset); // If there's a match that cannot be ignored, then set it up properly. if (matchLen > 0 && !def.Ignore) { var value = def.Process(input.Substring(offset, matchLen)); var token = new Token(def.GetTokenType(value), value, offset); // Is it a bracket? if (token == TokenType.BracketL) { ++scopeLevel; } else if (token == TokenType.BracketR) { --scopeLevel; if (scopeLevel < 0) { error = "Unexpected ')' without previous active scopes."; tokens.Clear(); return(TokenizerResult.Error); } } tokens.Add(token); } // If the match length was greater than zero, then add it and restart the process. if (matchLen > 0) { offset += matchLen; break; } } // If up to this point the current offset is the same as the old offset, it means that we found an // unknown character, so we just state the error and stop the loop. if (oldOffset == offset) { errMsg = $"Unexpected {(offset >= input.Length? "end of input": "character")} at offset {offset}."; result = TokenizerResult.Error; tokens.Clear(); // Clear on error. scopeLevel = 0; break; } } // Do we need more tokens to produce a statement? if (scopeLevel > 0) { result = TokenizerResult.NeedsMore; } error = errMsg; return(result); }