public AzureVideoBlockToken(IMarkdownRule rule, IMarkdownContext context, string videoId, SourceInfo sourceInfo) { Rule = rule; Context = context; VideoId = videoId; SourceInfo = sourceInfo; }
public DfmSectionBlockToken(IMarkdownRule rule, IMarkdownContext context, string attributes, SourceInfo sourceInfo) { Rule = rule; Context = context; Attributes = attributes; SourceInfo = sourceInfo; }
public DfmVideoBlockToken(IMarkdownRule rule, IMarkdownContext context, string link, SourceInfo sourceInfo) { Rule = rule; Context = context; Link = link; SourceInfo = sourceInfo; }
public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var preprocessedSourceInfo = sourceInfo.Copy(Preprocess(sourceInfo.Markdown)); var tokens = parser.Tokenize(preprocessedSourceInfo); if (parser.Context is MarkdownBlockContext) { tokens = TokenHelper.CreateParagraghs( parser, MarkdownParagraphBlockRule.Instance, tokens, true, preprocessedSourceInfo); } // resolve two phase token tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda<IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1))); // fix id. var idTable = new Dictionary<string, int>(); tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda<IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable)))); // customized rewriter. tokens = RewriteTokens( tokens, sourceInfo.File, RewriteEngine); if (TokenTreeValidator != null) { TokenTreeValidator.Validate(tokens); } var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return result; }
public DfmNoteBlockToken(IMarkdownRule rule, IMarkdownContext context, string noteType, string content, SourceInfo sourceInfo) { Rule = rule; Context = context; Content = content; NoteType = noteType; SourceInfo = sourceInfo; }
public MarkdownParsingContext(SourceInfo sourceInfo) { CurrentMarkdown = sourceInfo.Markdown; _markdownLength = sourceInfo.Markdown.Length; _file = sourceInfo.File; _lineNumber = sourceInfo.LineNumber; _lineIndexer = CreateLineIndexer(sourceInfo.Markdown); }
public ImmutableArray<IMarkdownToken> Tokenize(SourceInfo sourceInfo) { var markdown = Preprocess(sourceInfo.Markdown); if (sourceInfo.Markdown != markdown) { sourceInfo = sourceInfo.Copy(markdown); } return TokenizeCore(sourceInfo).ToImmutableArray(); }
private static IMarkdownToken GroupTextTokens(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, StringBuffer textContent, SourceInfo si) { if (textContent.EndsWith('\n')) { textContent = textContent.Substring(0, textContent.GetLength() - 1); } var rawMarkdown = textContent.ToString(); return CreateTwoPhaseToken(parser, rule, wrapParagraph, si.Copy(rawMarkdown)); }
protected AzureMigrationIncludeBasicToken(IMarkdownRule rule, IMarkdownContext context, string name, string src, string title, SourceInfo sourceInfo) { Rule = rule; Context = context; Name = name; Src = src; Title = title; SourceInfo = sourceInfo; }
private string LoadCore(IMarkdownRenderer adapter, string currentPath, string raw, SourceInfo sourceInfo, IMarkdownContext context, DfmEngine engine) { try { if (!PathUtility.IsRelativePath(currentPath)) { return GenerateErrorNodeWithCommentWrapper("INCLUDE", $"Absolute path \"{currentPath}\" is not supported.", raw, sourceInfo); } // Always report original include file dependency var originalRelativePath = currentPath; context.ReportDependency(currentPath); var parents = context.GetFilePathStack(); string parent = string.Empty; if (parents == null) parents = ImmutableStack<string>.Empty; // Update currentPath to be referencing to sourcePath else if (!parents.IsEmpty) { parent = parents.Peek(); currentPath = ((RelativePath)currentPath).BasedOn((RelativePath)parent); } if (parents.Contains(currentPath, FilePathComparer.OSPlatformSensitiveComparer)) { return GenerateErrorNodeWithCommentWrapper("INCLUDE", $"Unable to resolve {raw}: Circular dependency found in \"{parent}\"", raw, sourceInfo); } // Add current file path to chain when entering recursion parents = parents.Push(currentPath); string result; HashSet<string> dependency; if (!_dependencyCache.TryGetValue(currentPath, out dependency) || !_cache.TryGet(currentPath, out result)) { var filePathWithStatus = DfmFallbackHelper.GetFilePathWithFallback(originalRelativePath, context); var src = File.ReadAllText(filePathWithStatus.Item1); dependency = new HashSet<string>(); src = engine.InternalMarkup(src, context.SetFilePathStack(parents).SetDependency(dependency).SetIsInclude()); result = UpdateToHrefFromWorkingFolder(src, currentPath); result = GenerateNodeWithCommentWrapper("INCLUDE", $"Include content from \"{currentPath}\"", result); _cache.Add(currentPath, result); _dependencyCache[currentPath] = dependency; } context.ReportDependency( from d in dependency select (string)((RelativePath)currentPath + (RelativePath)d - (RelativePath)parent)); return result; } catch (Exception e) { return GenerateErrorNodeWithCommentWrapper("INCLUDE", $"Unable to resolve {raw}:{e.Message}", raw, sourceInfo); } }
public DfmIncludeInlineToken(IMarkdownRule rule, IMarkdownContext context, string src, string name, string title, string raw, SourceInfo sourceInfo) { Rule = rule; Context = context; Src = src; Name = name; Title = title; Raw = raw; SourceInfo = sourceInfo; }
protected AzureIncludeBasicToken(IMarkdownRule rule, IMarkdownContext context, string src, string name, string title, ImmutableArray<IMarkdownToken> tokens, string raw, SourceInfo sourceInfo) { Rule = rule; Context = context; Src = src; Name = name; Title = title; Raw = raw; SourceInfo = sourceInfo; Tokens = tokens; }
public DfmFencesToken(IMarkdownRule rule, IMarkdownContext context, string name, string path, SourceInfo sourceInfo, string lang, string title, IDfmFencesBlockPathQueryOption pathQueryOption) { Rule = rule; Context = context; Path = path; Lang = lang; Name = name; Title = title; PathQueryOption = pathQueryOption; SourceInfo = sourceInfo; }
public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var tokens = parser.Tokenize(sourceInfo.Copy(Preprocess(sourceInfo.Markdown))); var internalRewriteEngine = new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1)); internalRewriteEngine.Initialize(); tokens = internalRewriteEngine.Rewrite(tokens); internalRewriteEngine.Complete(); var idTable = new Dictionary <string, int>(); var idRewriteEngine = new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable))); idRewriteEngine.Initialize(); tokens = idRewriteEngine.Rewrite(tokens); idRewriteEngine.Complete(); var rewriteEngine = RewriteEngine; rewriteEngine.Initialize(); tokens = rewriteEngine.Rewrite(tokens); rewriteEngine.Complete(); if (TokenTreeValidator != null) { TokenTreeValidator.Validate(tokens); } var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return(result); }
public MarkdownTableBlockToken( IMarkdownRule rule, IMarkdownContext context, ImmutableArray <MarkdownTableItemBlockToken> header, ImmutableArray <Align> align, ImmutableArray <ImmutableArray <MarkdownTableItemBlockToken> > cells, SourceInfo sourceInfo) { Rule = rule; Context = context; Header = header; Align = align; Cells = cells; SourceInfo = sourceInfo; }
private List <IMarkdownToken> TokenizeCore(SourceInfo sourceInfo) { var pc = new MarkdownParsingContext(sourceInfo); var tokens = new List <IMarkdownToken>(); while (pc.CurrentMarkdown.Length > 0) { var token = ApplyRules(pc); if (token == null) { throw new MarkdownParsingException("Cannot parse markdown: No rule match.", pc.ToSourceInfo()); } tokens.Add(token); } return(tokens); }
private List <IMarkdownToken> TokenizeCore(SourceInfo sourceInfo) { var pc = new MarkdownParsingContext(sourceInfo); var tokens = new List <IMarkdownToken>(); while (pc.CurrentMarkdown.Length > 0) { var token = (from r in Context.Rules select r.TryMatch(this, pc)).FirstOrDefault(t => t != null); if (token == null) { throw new InvalidOperationException($"Cannot parse markdown for file {sourceInfo.File}, line {pc.LineNumber}."); } tokens.Add(token); } return(tokens); }
private ImmutableArray<IMarkdownToken> GetContent(IMarkdownParser parser, Match match, SourceInfo sourceInfo) { var emContent = new MarkdownEmInlineToken( this, parser.Context, parser.Tokenize(sourceInfo.Copy(match.Groups[2].Value)), sourceInfo.Copy("*" + match.Groups[1].Value + "*")); if (match.Groups[2].Length > 0) { return parser.Tokenize(sourceInfo.Copy(match.Groups[3].Value)).Insert(0, emContent); } else { return ImmutableArray.Create<IMarkdownToken>(emContent); } }
public static InlineContent TokenizeInline(this IMarkdownParser parser, SourceInfo sourceInfo) { if (parser == null) { throw new ArgumentNullException(nameof(parser)); } if (!(parser.Context is MarkdownBlockContext context)) { throw new InvalidOperationException($"{nameof(parser)}.{nameof(parser.Context)}(type:{parser.Context.GetType().FullName}) is invalid."); } var c = parser.SwitchContext(context.GetInlineContext()); var tokens = parser.Tokenize(sourceInfo); parser.SwitchContext(c); return(new InlineContent(tokens)); }
public SourceInfo Consume(int charCount) { var offset = CalcLineNumber(); string markdown; if (CurrentMarkdown.Length == charCount) { markdown = CurrentMarkdown; CurrentMarkdown = string.Empty; } else { markdown = CurrentMarkdown.Remove(charCount); CurrentMarkdown = CurrentMarkdown.Substring(charCount); } return(SourceInfo.Create(markdown, _file, _lineNumber + offset)); }
public static ImmutableArray <IMarkdownToken> ParseInlineToken( IMarkdownParser parser, IMarkdownRule rule, ImmutableArray <IMarkdownToken> blockTokens, bool wrapParagraph, SourceInfo sourceInfo) { var result = new List <IMarkdownToken>(blockTokens.Length); var textContent = StringBuffer.Empty; foreach (var token in blockTokens) { var text = token as MarkdownTextToken; if (text != null) { if (textContent != StringBuffer.Empty) { textContent += "\n"; } textContent += text.Content; continue; } var newLine = token as MarkdownNewLineBlockToken; if (newLine?.SourceInfo.Markdown.Length == 1) { continue; } if (textContent != StringBuffer.Empty) { var rawMarkdown = textContent.ToString(); result.Add(CreateToken(parser, rule, wrapParagraph, sourceInfo.Copy(rawMarkdown))); textContent = StringBuffer.Empty; } if (newLine != null) { continue; } result.Add(token); } if (textContent != StringBuffer.Empty) { var rawMarkdown = textContent.ToString(); result.Add(CreateToken(parser, rule, wrapParagraph, sourceInfo.Copy(rawMarkdown))); } return(result.ToImmutableArray()); }
private static TwoPhaseBlockToken CreateTwoPhaseToken(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, SourceInfo sourceInfo) { var inlineContent = parser.TokenizeInline(sourceInfo); if (wrapParagraph) { return new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(t.SourceInfo), t.SourceInfo)); } else { return new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownNonParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(t.SourceInfo), t.SourceInfo)); } }
public static ImmutableArray <IMarkdownToken> CreateParagraghs( IMarkdownParser parser, IMarkdownRule rule, ImmutableArray <IMarkdownToken> blockTokens, bool wrapParagraph, SourceInfo sourceInfo) { var result = new List <IMarkdownToken>(blockTokens.Length); var textContent = StringBuffer.Empty; var si = sourceInfo; foreach (var token in blockTokens) { var text = token as MarkdownTextToken; if (text != null) { if (textContent == StringBuffer.Empty) { si = text.SourceInfo; } textContent += text.Content; continue; } if (textContent != StringBuffer.Empty) { result.Add(GroupTextTokens(parser, rule, wrapParagraph, textContent, si)); textContent = StringBuffer.Empty; } if (token is MarkdownNewLineBlockToken) { continue; } result.Add(token); } if (textContent != StringBuffer.Empty) { result.Add(GroupTextTokens(parser, rule, wrapParagraph, textContent, si)); } return(result.ToImmutableArray()); }
public static ImmutableArray<IMarkdownToken> CreateParagraghs( IMarkdownParser parser, IMarkdownRule rule, ImmutableArray<IMarkdownToken> blockTokens, bool wrapParagraph, SourceInfo sourceInfo) { var result = new List<IMarkdownToken>(blockTokens.Length); var textContent = StringBuffer.Empty; var si = sourceInfo; foreach (var token in blockTokens) { var text = token as MarkdownTextToken; if (text != null) { if (textContent == StringBuffer.Empty) { si = text.SourceInfo; } textContent += text.Content; continue; } if (textContent != StringBuffer.Empty) { result.Add(GroupTextTokens(parser, rule, wrapParagraph, textContent, si)); textContent = StringBuffer.Empty; } if (token is MarkdownNewLineBlockToken) { continue; } result.Add(token); } if (textContent != StringBuffer.Empty) { result.Add(GroupTextTokens(parser, rule, wrapParagraph, textContent, si)); } return result.ToImmutableArray(); }
private List<IMarkdownToken> TokenizeCore(SourceInfo sourceInfo) { var pc = new MarkdownParsingContext(sourceInfo); var tokens = new List<IMarkdownToken>(); while (pc.CurrentMarkdown.Length > 0) { var token = ApplyRules(pc); if (token == null) { throw new MarkdownParsingException("Cannot parse markdown: No rule match.", pc.ToSourceInfo()); } else if (token.Rule is MarkdownTextBlockRule) { pc.IsInParagraph = true; } else { pc.IsInParagraph = false; } tokens.Add(token); } return tokens; }
private static TwoPhaseBlockToken CreateTwoPhaseToken( IMarkdownParser parser, IMarkdownRule rule, string markdown, bool wrapParagraph, SourceInfo sourceInfo) { if (wrapParagraph) { return(new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(sourceInfo.Copy(markdown)), t.SourceInfo))); } else { return(new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownNonParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(sourceInfo.Copy(markdown)), t.SourceInfo))); } }
private static string GetMessage(string message, SourceInfo sourceInfo) { StringBuffer sb = message; if (sourceInfo.File != null) { sb = sb + " in " + sourceInfo.File; } if (sourceInfo.LineNumber > 0) { sb = sb + " at line " + sourceInfo.LineNumber.ToString(); } sb += " with following markdown content:"; sb += Environment.NewLine; var md = sourceInfo.Markdown; if (md.Length > 256) { md = md.Remove(256); } foreach (var line in md.Split('\n')) { sb = sb + "> " + line; } return sb.ToString(); }
public MarkdownParsingException(SourceInfo sourceInfo) : this("Unable to parse markdown", sourceInfo) { }
public static MarkdownParagraphBlockToken Create(IMarkdownRule rule, MarkdownParser engine, string content, SourceInfo sourceInfo) { return(new MarkdownParagraphBlockToken(rule, engine.Context, engine.TokenizeInline(sourceInfo.Copy(content)), sourceInfo)); }
public MarkdownParsingException(string message, SourceInfo sourceInfo) : base(GetMessage(message, sourceInfo)) { }
public DfmFencesInlineToken(IMarkdownRule rule, IMarkdownContext context, string name, string path, SourceInfo sourceInfo, string lang = null, string title = null, IDfmFencesBlockPathQueryOption pathQueryOption = null) : base(rule, context, name, path, sourceInfo, lang, title, pathQueryOption) { }
public MarkdownListBlockToken(IMarkdownRule rule, IMarkdownContext context, ImmutableArray <IMarkdownToken> tokens, bool ordered, SourceInfo sourceInfo) : this(rule, context, tokens, ordered, 1, sourceInfo) { }
public SourceInfo ToSourceInfo() { return(SourceInfo.Create(CurrentMarkdown, _file, _lineNumber, _lineIndexer.Count - CalcLineNumber())); }
public AzureMigrationIncludeInlineToken(IMarkdownRule rule, IMarkdownContext context, string name, string src, string title, SourceInfo sourceInfo) : base(rule, context, name, src, title, sourceInfo) { }
public MarkdownNonParagraphBlockToken(IMarkdownRule rule, IMarkdownContext context, InlineContent content, SourceInfo sourceInfo) { Rule = rule; Context = context; Content = content; SourceInfo = sourceInfo; }
public MarkdownParsingException(string message, SourceInfo sourceInfo, Exception innerException) : base(GetMessage(message, sourceInfo), innerException) { }
private static IMarkdownToken CreateToken(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, SourceInfo sourceInfo) { var inlineContent = parser.TokenizeInline(sourceInfo); if (wrapParagraph) { return(new MarkdownParagraphBlockToken(rule, parser.Context, inlineContent, sourceInfo)); } else { return(new MarkdownNonParagraphBlockToken(rule, parser.Context, inlineContent, sourceInfo)); } }
public MarkdownListItemBlockToken(IMarkdownRule rule, IMarkdownContext context, ImmutableArray <IMarkdownToken> tokens, bool loose, SourceInfo sourceInfo) { Rule = rule; Context = context; Tokens = tokens; Loose = loose; SourceInfo = sourceInfo; }
public GfmDelInlineToken(IMarkdownRule rule, IMarkdownContext context, ImmutableArray <IMarkdownToken> content, SourceInfo sourceInfo) { Rule = rule; Context = context; Content = content; SourceInfo = sourceInfo; }
public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var preprocessedSourceInfo = sourceInfo.Copy(Preprocess(sourceInfo.Markdown)); var tokens = parser.Tokenize(preprocessedSourceInfo); if (parser.Context is MarkdownBlockContext) { tokens = TokenHelper.CreateParagraghs( parser, MarkdownParagraphBlockRule.Instance, tokens, true, preprocessedSourceInfo); } // resolve two phase token tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1))); // Aggregate tokens. foreach (var agg in TokenAggregators) { tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownAggregateEngine( this, agg)); } // customized rewriter. tokens = RewriteTokens( tokens, sourceInfo.File, RewriteEngine); if (Options.ShouldFixId) { // fix id. var idTable = new Dictionary <string, int>(); tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable)))); } if (TokenTreeValidator != null) { TokenTreeValidator.Validate(tokens); } var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return(result); }
public MarkdownHeadingBlockToken(IMarkdownRule rule, IMarkdownContext context, InlineContent content, string id, int depth, SourceInfo sourceInfo) { Rule = rule; Context = context; Content = content; Id = id; Depth = depth; SourceInfo = sourceInfo; }
public MarkdownLinkInlineToken(IMarkdownRule rule, IMarkdownContext context, string href, string title, ImmutableArray <IMarkdownToken> content, SourceInfo sourceInfo) { Rule = rule; Context = context; Href = href; Title = title; Content = content; SourceInfo = sourceInfo; }
public MarkdownListBlockToken(IMarkdownRule rule, IMarkdownContext context, ImmutableArray <IMarkdownToken> tokens, bool ordered, int start, SourceInfo sourceInfo) { Rule = rule; Context = context; Tokens = tokens; Ordered = ordered; Start = start; SourceInfo = sourceInfo; }
public SourceInfo ToSourceInfo() { return(SourceInfo.Create(CurrentMarkdown, _file, _lineNumber)); }
private static IMarkdownToken GroupTextTokens(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, StringBuffer textContent, SourceInfo si) { if (textContent.EndsWith('\n')) { textContent = textContent.Substring(0, textContent.GetLength() - 1); } var rawMarkdown = textContent.ToString(); return(CreateTwoPhaseToken(parser, rule, wrapParagraph, si.Copy(rawMarkdown))); }
private static string GenerateErrorNodeWithCommentWrapper(string tag, string comment, string html, SourceInfo sourceInfo) { Logger.LogError(comment + $"at line {sourceInfo.LineNumber}."); return GenerateNodeWithCommentWrapper("ERROR " + tag, comment, html); }
public string Load(IMarkdownRenderer adapter, string currentPath, string raw, SourceInfo sourceInfo, IMarkdownContext context, DfmEngine engine) { return LoadCore(adapter, currentPath, raw, sourceInfo, context, engine); }
public MarkdownParagraphBlockToken(IMarkdownRule rule, IMarkdownContext context, InlineContent inlineTokens, SourceInfo sourceInfo) { Rule = rule; Context = context; InlineTokens = inlineTokens; SourceInfo = sourceInfo; }
public MarkdownBlockquoteBlockToken(IMarkdownRule rule, IMarkdownContext context, ImmutableArray <IMarkdownToken> tokens, SourceInfo sourceInfo) { Rule = rule; Context = context; Tokens = tokens; SourceInfo = sourceInfo; }
private ImmutableArray <IMarkdownToken> GetContent(IMarkdownParser parser, Match match, SourceInfo sourceInfo) { var emContent = new MarkdownEmInlineToken( this, parser.Context, parser.Tokenize(sourceInfo.Copy(match.Groups[2].Value)), sourceInfo.Copy("*" + match.Groups[1].Value + "*")); if (match.Groups[2].Length > 0) { return(parser.Tokenize(sourceInfo.Copy(match.Groups[3].Value)).Insert(0, emContent)); } else { return(ImmutableArray.Create <IMarkdownToken>(emContent)); } }
public virtual string Markup(string markdown, string file) { var normalized = Normalize(markdown); return(Mark(SourceInfo.Create(normalized, file), null).ToString()); }