public override ParsedDocument Parse (bool storeAst, string fileName, TextReader content, Project project = null) { ParsedTemplate template = new ParsedTemplate (fileName); try { var tk = new Tokeniser (fileName, content.ReadToEnd ()); template.ParseWithoutIncludes (tk); } catch (ParserException ex) { template.LogError (ex.Message, ex.Location); } T4ParsedDocument doc = new T4ParsedDocument (fileName, template.RawSegments); doc.Flags |= ParsedDocumentFlags.NonSerializable; foreach (System.CodeDom.Compiler.CompilerError err in template.Errors) doc.Errors.Add (new Error (err.IsWarning? ErrorType.Warning : ErrorType.Error, err.ErrorText, err.Line, err.Column)); return doc; }
public override System.Threading.Tasks.Task<ParsedDocument> Parse (ParseOptions parseOptions, System.Threading.CancellationToken cancellationToken) { var fileName = parseOptions.FileName; ParsedTemplate template = new ParsedTemplate (fileName); var readOnlyDoc = TextEditorFactory.CreateNewReadonlyDocument (parseOptions.Content, fileName); try { var tk = new Tokeniser (fileName, readOnlyDoc.Text); template.ParseWithoutIncludes (tk); } catch (ParserException ex) { template.LogError (ex.Message, ex.Location); } var errors = new List<Error> (); foreach (System.CodeDom.Compiler.CompilerError err in template.Errors) { errors.Add (new Error (err.IsWarning ? ErrorType.Warning : ErrorType.Error, err.ErrorText, new DocumentLocation (err.Line, err.Column))); } var doc = new T4ParsedDocument (fileName, template.RawSegments, errors); doc.Flags |= ParsedDocumentFlags.NonSerializable; return System.Threading.Tasks.Task.FromResult((ParsedDocument)doc); }
void Parse(ITextTemplatingEngineHost host, Tokeniser tokeniser, bool parseIncludes, bool isImport) { bool skip = false; bool addToImportedHelpers = false; while ((skip || tokeniser.Advance()) && tokeniser.State != State.EOF) { skip = false; ISegment seg = null; switch (tokeniser.State) { case State.Block: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Block, tokeniser.Value, tokeniser.Location); } break; case State.Content: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Content, tokeniser.Value, tokeniser.Location); } break; case State.Expression: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Expression, tokeniser.Value, tokeniser.Location); } break; case State.Helper: addToImportedHelpers = isImport; if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Helper, tokeniser.Value, tokeniser.Location); } break; case State.Directive: Directive directive = null; string attName = null; while (!skip && tokeniser.Advance()) { switch (tokeniser.State) { case State.DirectiveName: if (directive == null) { directive = new Directive(tokeniser.Value, tokeniser.Location); directive.TagStartLocation = tokeniser.TagStartLocation; if (!parseIncludes || !string.Equals(directive.Name, "include", StringComparison.OrdinalIgnoreCase)) { segments.Add(directive); } } else { attName = tokeniser.Value; } break; case State.DirectiveValue: if (attName != null && directive != null) { directive.Attributes[attName] = tokeniser.Value; } else { LogError("Directive value without name", tokeniser.Location); } attName = null; break; case State.Directive: if (directive != null) { directive.EndLocation = tokeniser.TagEndLocation; } break; default: skip = true; break; } } if (parseIncludes && directive != null && string.Equals(directive.Name, "include", StringComparison.OrdinalIgnoreCase)) { Import(host, directive, Path.GetDirectoryName(tokeniser.Location.FileName)); } break; default: throw new InvalidOperationException(); } if (seg != null) { seg.TagStartLocation = tokeniser.TagStartLocation; seg.EndLocation = tokeniser.TagEndLocation; if (addToImportedHelpers) { importedHelperSegments.Add(seg); } else { segments.Add(seg); } } } if (!isImport) { AppendAnyImportedHelperSegments(); } }
void Parse(ITextTemplatingEngineHost host, Tokeniser tokeniser, bool parseIncludes) { Parse(host, tokeniser, parseIncludes, false); }
public void ParseWithoutIncludes(Tokeniser tokeniser) { Parse(null, tokeniser, false); }
public void Parse(ITextTemplatingEngineHost host, Tokeniser tokeniser) { Parse(host, tokeniser, true); }
void Parse(ITextTemplatingEngineHost host, Tokeniser tokeniser, bool parseIncludes) { bool skip = false; while ((skip || tokeniser.Advance()) && tokeniser.State != State.EOF) { skip = false; ISegment seg = null; switch (tokeniser.State) { case State.Block: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Block, tokeniser.Value, tokeniser.Location); } break; case State.Content: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Content, tokeniser.Value, tokeniser.Location); } break; case State.Expression: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Expression, tokeniser.Value, tokeniser.Location); } break; case State.Helper: if (!String.IsNullOrEmpty(tokeniser.Value)) { seg = new TemplateSegment(SegmentType.Helper, tokeniser.Value, tokeniser.Location); } break; case State.Directive: Directive directive = null; string attName = null; while (!skip && tokeniser.Advance()) { switch (tokeniser.State) { case State.DirectiveName: if (directive == null) { directive = new Directive(tokeniser.Value.ToLower(), tokeniser.Location); directive.TagStartLocation = tokeniser.TagStartLocation; if (!parseIncludes || directive.Name != "include") { segments.Add(directive); } } else { attName = tokeniser.Value; } break; case State.DirectiveValue: if (attName != null && directive != null) { directive.Attributes[attName.ToLower()] = tokeniser.Value; } else { LogError("Directive value without name", tokeniser.Location); } attName = null; break; case State.Directive: if (directive != null) { directive.EndLocation = tokeniser.TagEndLocation; } break; default: skip = true; break; } } if (parseIncludes && directive.Name == "include") { Import(host, directive); } break; default: throw new InvalidOperationException(); } if (seg != null) { seg.TagStartLocation = tokeniser.TagStartLocation; seg.EndLocation = tokeniser.TagEndLocation; segments.Add(seg); } } }