public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var tokens = parser.Tokenize(sourceInfo.Copy(Preprocess(sourceInfo.Markdown))); var internalRewriteEngine = new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1)); tokens = internalRewriteEngine.Rewrite(tokens); var idTable = new Dictionary <string, int>(); var idRewriteEngine = new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable))); tokens = idRewriteEngine.Rewrite(tokens); tokens = RewriteEngine.Rewrite(tokens); var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return(result); }
public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var preprocessedSourceInfo = sourceInfo.Copy(Preprocess(sourceInfo.Markdown)); var tokens = parser.Tokenize(preprocessedSourceInfo); if (parser.Context is MarkdownBlockContext) { tokens = TokenHelper.CreateParagraghs( parser, MarkdownParagraphBlockRule.Instance, tokens, true, preprocessedSourceInfo); } // resolve two phase token tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda<IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1))); // fix id. var idTable = new Dictionary<string, int>(); tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda<IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable)))); // customized rewriter. tokens = RewriteTokens( tokens, sourceInfo.File, RewriteEngine); if (TokenTreeValidator != null) { TokenTreeValidator.Validate(tokens); } var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return result; }
public static ImmutableArray <IMarkdownToken> ParseInlineToken( IMarkdownParser parser, IMarkdownRule rule, ImmutableArray <IMarkdownToken> blockTokens, bool wrapParagraph, SourceInfo sourceInfo) { var result = new List <IMarkdownToken>(blockTokens.Length); var textContent = StringBuffer.Empty; foreach (var token in blockTokens) { var text = token as MarkdownTextToken; if (text != null) { if (textContent != StringBuffer.Empty) { textContent += "\n"; } textContent += text.Content; continue; } var newLine = token as MarkdownNewLineBlockToken; if (newLine?.SourceInfo.Markdown.Length == 1) { continue; } if (textContent != StringBuffer.Empty) { var rawMarkdown = textContent.ToString(); result.Add(CreateToken(parser, rule, wrapParagraph, sourceInfo.Copy(rawMarkdown))); textContent = StringBuffer.Empty; } if (newLine != null) { continue; } result.Add(token); } if (textContent != StringBuffer.Empty) { var rawMarkdown = textContent.ToString(); result.Add(CreateToken(parser, rule, wrapParagraph, sourceInfo.Copy(rawMarkdown))); } return(result.ToImmutableArray()); }
private ImmutableArray<IMarkdownToken> GetContent(IMarkdownParser parser, Match match, SourceInfo sourceInfo) { var emContent = new MarkdownEmInlineToken( this, parser.Context, parser.Tokenize(sourceInfo.Copy(match.Groups[2].Value)), sourceInfo.Copy("*" + match.Groups[1].Value + "*")); if (match.Groups[2].Length > 0) { return parser.Tokenize(sourceInfo.Copy(match.Groups[3].Value)).Insert(0, emContent); } else { return ImmutableArray.Create<IMarkdownToken>(emContent); } }
private ImmutableArray <IMarkdownToken> GetContent(IMarkdownParser parser, Match match, SourceInfo sourceInfo) { var emContent = new MarkdownEmInlineToken( this, parser.Context, parser.Tokenize(sourceInfo.Copy(match.Groups[2].Value)), sourceInfo.Copy("*" + match.Groups[1].Value + "*")); if (match.Groups[2].Length > 0) { return(parser.Tokenize(sourceInfo.Copy(match.Groups[3].Value)).Insert(0, emContent)); } else { return(ImmutableArray.Create <IMarkdownToken>(emContent)); } }
private static IMarkdownToken GroupTextTokens(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, StringBuffer textContent, SourceInfo si) { if (textContent.EndsWith('\n')) { textContent = textContent.Substring(0, textContent.GetLength() - 1); } var rawMarkdown = textContent.ToString(); return CreateTwoPhaseToken(parser, rule, wrapParagraph, si.Copy(rawMarkdown)); }
public ImmutableArray<IMarkdownToken> Tokenize(SourceInfo sourceInfo) { var markdown = Preprocess(sourceInfo.Markdown); if (sourceInfo.Markdown != markdown) { sourceInfo = sourceInfo.Copy(markdown); } return TokenizeCore(sourceInfo).ToImmutableArray(); }
public ImmutableArray <IMarkdownToken> Tokenize(SourceInfo sourceInfo) { var markdown = Preprocess(sourceInfo.Markdown); if (sourceInfo.Markdown != markdown) { sourceInfo = sourceInfo.Copy(markdown); } return(TokenizeCore(sourceInfo).ToImmutableArray()); }
private static IMarkdownToken GroupTextTokens(IMarkdownParser parser, IMarkdownRule rule, bool wrapParagraph, StringBuffer textContent, SourceInfo si) { if (textContent.EndsWith('\n')) { textContent = textContent.Substring(0, textContent.GetLength() - 1); } var rawMarkdown = textContent.ToString(); return(CreateTwoPhaseToken(parser, rule, wrapParagraph, si.Copy(rawMarkdown))); }
public StringBuffer Mark(SourceInfo sourceInfo, IMarkdownContext context) { var result = StringBuffer.Empty; var parser = Parser; if (context != null) { parser.SwitchContext(context); } var preprocessedSourceInfo = sourceInfo.Copy(Preprocess(sourceInfo.Markdown)); var tokens = parser.Tokenize(preprocessedSourceInfo); if (parser.Context is MarkdownBlockContext) { tokens = TokenHelper.CreateParagraghs( parser, MarkdownParagraphBlockRule.Instance, tokens, true, preprocessedSourceInfo); } // resolve two phase token tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.Loop( MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, TwoPhaseBlockToken>( (e, t) => t.Extract(parser)), MaxExtractCount + 1))); // Aggregate tokens. foreach (var agg in TokenAggregators) { tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownAggregateEngine( this, agg)); } // customized rewriter. tokens = RewriteTokens( tokens, sourceInfo.File, RewriteEngine); if (Options.ShouldFixId) { // fix id. var idTable = new Dictionary <string, int>(); tokens = RewriteTokens( tokens, sourceInfo.File, new MarkdownRewriteEngine( this, MarkdownTokenRewriterFactory.FromLambda <IMarkdownRewriteEngine, MarkdownHeadingBlockToken>( (e, t) => t.RewriteId(idTable)))); } if (TokenTreeValidator != null) { TokenTreeValidator.Validate(tokens); } var renderer = Renderer; foreach (var token in tokens) { result += renderer.Render(token); } return(result); }
public static MarkdownParagraphBlockToken Create(IMarkdownRule rule, MarkdownParser engine, string content, SourceInfo sourceInfo) { return(new MarkdownParagraphBlockToken(rule, engine.Context, engine.TokenizeInline(sourceInfo.Copy(content)), sourceInfo)); }
private static TwoPhaseBlockToken CreateTwoPhaseToken( IMarkdownParser parser, IMarkdownRule rule, string markdown, bool wrapParagraph, SourceInfo sourceInfo) { if (wrapParagraph) { return(new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(sourceInfo.Copy(markdown)), t.SourceInfo))); } else { return(new TwoPhaseBlockToken( rule, parser.Context, sourceInfo, (p, t) => new MarkdownNonParagraphBlockToken(t.Rule, p.Context, p.TokenizeInline(sourceInfo.Copy(markdown)), t.SourceInfo))); } }