private static void BuildArticle(HostService hostService, int maxParallelism) { using (var aggregatedPerformanceScope = new AggregatedPerformanceScope()) { hostService.Models.RunAll( m => { using (new LoggerFileScope(m.LocalPathFromRoot)) { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}: Building..."); BuildPhaseUtility.RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Building..."); using (new LoggerPhaseScope(buildStep.Name, LogLevel.Diagnostic, aggregatedPerformanceScope)) { buildStep.Build(m, hostService); } }); } }, maxParallelism); } }
private static void BuildCore(HostService hostService, int maxParallelism) { using (new LoggerPhaseScope(hostService.Processor.Name, true)) { foreach (var m in hostService.Models) { if (m.LocalPathFromRepoRoot == null) { m.LocalPathFromRepoRoot = Path.Combine(m.BaseDir, m.File).ToDisplayPath(); } if (m.LocalPathFromRoot == null) { m.LocalPathFromRoot = Path.Combine(m.BaseDir, m.File).ToDisplayPath(); } } var steps = string.Join("=>", hostService.Processor.BuildSteps.OrderBy(step => step.BuildOrder).Select(s => s.Name)); Logger.LogInfo($"Building {hostService.Models.Count} file(s) in {hostService.Processor.Name}({steps})..."); Logger.LogVerbose($"Processor {hostService.Processor.Name}: Prebuilding..."); using (new LoggerPhaseScope("Prebuild", true)) { Prebuild(hostService); } Logger.LogVerbose($"Processor {hostService.Processor.Name}: Building..."); using (new LoggerPhaseScope("Build", true)) { BuildArticle(hostService, maxParallelism); } Logger.LogVerbose($"Processor {hostService.Processor.Name}: Postbuilding..."); using (new LoggerPhaseScope("Postbuild", true)) { Postbuild(hostService); } } }
public virtual HostService CreateHostService( DocumentBuildParameters parameters, TemplateProcessor templateProcessor, IMarkdownService markdownService, IEnumerable<IInputMetadataValidator> metadataValidator, IDocumentProcessor processor, IEnumerable<FileAndType> files) { var hostService = new HostService( parameters.Files.DefaultBaseDir, files == null ? new FileModel[0] : from file in files select Load(processor, parameters.Metadata, parameters.FileMetadata, file) into model where model != null select model) { MarkdownService = markdownService, Processor = processor, Template = templateProcessor, Validators = metadataValidator?.ToImmutableList(), ShouldTraceIncrementalInfo = ShouldProcessorTraceInfo(processor), CanIncrementalBuild = CanProcessorIncremental(processor), }; return hostService; }
private void CheckFileLink(HostService hostService, SaveResult result) { result.LinkToFiles.RunAll(fileLink => { if (!hostService.SourceFiles.ContainsKey(fileLink)) { if (result.FileLinkSources.TryGetValue(fileLink, out ImmutableList <LinkSourceInfo> list)) { foreach (var fileLinkSourceFile in list) { Logger.LogWarning( $"Invalid file link:({fileLinkSourceFile.Target}{fileLinkSourceFile.Anchor}).", null, fileLinkSourceFile.SourceFile, fileLinkSourceFile.LineNumber.ToString(), WarningCodes.Build.InvalidFileLink); } } else { Logger.LogWarning($"Invalid file link:({fileLink}).", code: WarningCodes.Build.InvalidFileLink); } } }); }
public virtual HostService CreateHostService( DocumentBuildParameters parameters, TemplateProcessor templateProcessor, IMarkdownService markdownService, IEnumerable <IInputMetadataValidator> metadataValidator, IDocumentProcessor processor, IEnumerable <FileAndType> files) { var(models, invalidFiles) = LoadModels(files, parameters, processor); var hostService = new HostService( parameters.Files.DefaultBaseDir, models, parameters.VersionName, parameters.VersionDir, parameters.LruSize, parameters.GroupInfo, new BuildParameters(parameters.TagParameters)) { MarkdownService = markdownService, Processor = processor, Template = templateProcessor, Validators = metadataValidator?.ToImmutableList(), ShouldTraceIncrementalInfo = ShouldProcessorTraceInfo(processor), CanIncrementalBuild = CanProcessorIncremental(processor), InvalidSourceFiles = invalidFiles.ToImmutableList(), }; return(hostService); }
private IEnumerable <ManifestItemWithContext> BuildCore(HostService hostService, DocumentBuildContext context) { hostService.SourceFiles = context.AllSourceFiles; hostService.DependencyGraph = context.DependencyGraph; BuildCore(hostService, context.MaxParallelism); return(ExportManifest(hostService, context)); }
public virtual HostService CreateHostService( DocumentBuildParameters parameters, TemplateProcessor templateProcessor, IMarkdownService markdownService, IEnumerable <IInputMetadataValidator> metadataValidator, IDocumentProcessor processor, IEnumerable <FileAndType> files) { var hostService = new HostService( parameters.Files.DefaultBaseDir, files == null ? Enumerable.Empty <FileModel>() : from file in files select Load(processor, parameters.Metadata, parameters.FileMetadata, file) into model where model != null select model, parameters.VersionName, parameters.VersionDir) { MarkdownService = markdownService, Processor = processor, Template = templateProcessor, Validators = metadataValidator?.ToImmutableList(), ShouldTraceIncrementalInfo = ShouldProcessorTraceInfo(processor), CanIncrementalBuild = CanProcessorIncremental(processor), }; return(hostService); }
private static void CheckFileLink(HostService hostService, SaveResult result) { result.LinkToFiles.RunAll(fileLink => { if (!hostService.SourceFiles.ContainsKey(fileLink)) { var message = $"Invalid file link({fileLink})"; Logger.LogWarning(message); } }); }
public static ImmutableList <FileModel> Build(IDocumentProcessor processor, DocumentBuildParameters parameters) { var hostService = new HostService( parameters.Files.DefaultBaseDir, from file in parameters.Files.EnumerateFiles() select Load(processor, parameters.Metadata, parameters.FileMetadata, file) into model where model != null select model); BuildCore(processor, hostService, parameters.MaxParallelism); return(hostService.Models); }
private InternalManifestItem HandleSaveResult( HostService hostService, FileModel model, SaveResult result) { Context.SetFilePath(model.Key, ((RelativePath)model.File).GetPathFromWorkingFolder()); DocumentException.RunAll( () => CheckFileLink(model, hostService, result), () => HandleUids(result), () => RegisterXRefSpec(result)); return(GetManifestItem(model, result)); }
private static void Postbuild(HostService hostService) { RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Postprocessing..."); using (new LoggerPhaseScope(buildStep.Name)) { buildStep.Postbuild(hostService.Models, hostService); } }); }
private void PostCreate(HostService hostService, IEnumerable <FileAndType> files) { using (new LoggerPhaseScope("ReportModelLoadInfo", true)) { if (!hostService.ShouldTraceIncrementalInfo) { return; } var allFiles = files?.Select(f => f.File) ?? new string[0]; var loadedFiles = hostService.Models.Select(m => m.FileAndType.File); IncrementalContext.ReportModelLoadInfo(hostService, allFiles.Except(loadedFiles), null); IncrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.PreBuildBuild); } }
private void PostCreate(HostService hostService, IEnumerable <FileAndType> files) { using (new LoggerPhaseScope("ReportModelLoadInfo", LogLevel.Diagnostic)) { if (!hostService.ShouldTraceIncrementalInfo) { return; } var allFiles = files?.Select(f => f.File) ?? new string[0]; var loadedFiles = hostService.Models.Select(m => m.OriginalFileAndType.File); IncrementalContext.ReportModelLoadInfo(hostService, allFiles.Except(loadedFiles).Except(hostService.InvalidSourceFiles), null); IncrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.Compile); } }
private ManifestItem HandleSaveResult( DocumentBuildContext context, HostService hostService, FileModel model, SaveResult result) { context.FileMap[model.Key] = ((RelativePath)model.File).GetPathFromWorkingFolder(); DocumentException.RunAll( () => CheckFileLink(hostService, result), () => HandleUids(context, result), () => HandleToc(context, result), () => RegisterXRefSpec(context, result)); return(GetManifestItem(context, model, result)); }
private IEnumerable <string> GetFilesToRelayMessages(HostService hs) { foreach (var f in hs.GetUnloadedModelFiles(IncrementalContext)) { yield return(f); // warnings from token file won't be delegated to article, so we need to add it manually var key = ((RelativePath)f).GetPathFromWorkingFolder(); foreach (var item in CurrentBuildVersionInfo.Dependency.GetAllDependencyFrom(key)) { if (item.Type == DependencyTypeName.Include) { yield return(((RelativePath)item.To).RemoveWorkingFolder()); } } } }
private static void Prebuild(HostService hostService) { BuildPhaseUtility.RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Prebuilding..."); using (new LoggerPhaseScope(buildStep.Name, true)) { var models = buildStep.Prebuild(hostService.Models, hostService); if (!object.ReferenceEquals(models, hostService.Models)) { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Reloading models..."); hostService.Reload(models); } } }); }
private static void Prebuild(HostService hostService) { RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Preprocessing..."); using (new LoggerPhaseScope(buildStep.Name)) { var models = buildStep.Prebuild(hostService.Models, hostService); if (!object.ReferenceEquals(models, hostService.Models)) { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Reloading models..."); hostService.Reload(models); } } }); }
public static ImmutableList <FileModel> Build(IDocumentProcessor processor, DocumentBuildParameters parameters, IMarkdownService markdownService) { var hostService = new HostService( parameters.Files.DefaultBaseDir, from file in parameters.Files.EnumerateFiles() select Load(processor, parameters.Metadata, parameters.FileMetadata, file, false, null, null, null, null) into model where model != null select model) { Processor = processor, MarkdownService = markdownService, DependencyGraph = new DependencyGraph(), }; BuildCore(hostService, parameters.MaxParallelism); return(hostService.Models); }
private void CheckFileLink(FileModel model, HostService hostService, SaveResult result) { result.LinkToFiles.RunAll(fileLink => { if (!hostService.SourceFiles.ContainsKey(fileLink)) { if (Context.ApplyTemplateSettings.HrefGenerator != null) { var path = ((RelativePath)fileLink).RemoveWorkingFolder() - ((RelativePath)model.OriginalFileAndType.File); var fli = new FileLinkInfo { FromFileInSource = model.OriginalFileAndType.File, FromFileInDest = model.File, ToFileInSource = ((RelativePath)fileLink).RemoveWorkingFolder().ToString(), FileLinkInSource = path, GroupInfo = Context.GroupInfo, }; fli.Href = path.UrlEncode(); if (Context.ApplyTemplateSettings.HrefGenerator.GenerateHref(fli) != null) { return; } } if (result.FileLinkSources.TryGetValue(fileLink, out ImmutableList <LinkSourceInfo> list)) { foreach (var fileLinkSourceFile in list) { Logger.LogWarning( $"Invalid file link:({fileLinkSourceFile.Target}{fileLinkSourceFile.Anchor}).", null, fileLinkSourceFile.SourceFile, fileLinkSourceFile.LineNumber.ToString(), WarningCodes.Build.InvalidFileLink); } } else { Logger.LogWarning($"Invalid file link:({fileLink}).", code: WarningCodes.Build.InvalidFileLink); } } }); }
private IEnumerable <ManifestItemWithContext> ExportManifest(HostService hostService) { var manifestItems = new List <ManifestItemWithContext>(); using (new LoggerPhaseScope("Save", LogLevel.Verbose)) { hostService.Models.RunAll(m => { if (m.Type != DocumentType.Overwrite) { using (new LoggerFileScope(m.LocalPathFromRoot)) { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}: Saving..."); m.BaseDir = Context.BuildOutputFolder; if (m.FileAndType.SourceDir != m.FileAndType.DestinationDir) { m.File = (RelativePath)m.FileAndType.DestinationDir + (((RelativePath)m.File) - (RelativePath)m.FileAndType.SourceDir); } m.File = Path.Combine(Context.VersionFolder ?? string.Empty, m.File); var result = hostService.Processor.Save(m); if (result != null) { string extension = string.Empty; if (hostService.Template != null) { if (hostService.Template.TryGetFileExtension(result.DocumentType, out extension)) { m.File = result.FileWithoutExtension + extension; } } var item = HandleSaveResult(hostService, m, result); item.Extension = extension; manifestItems.Add(new ManifestItemWithContext(item, m, hostService.Processor, hostService.Template?.GetTemplateBundle(result.DocumentType))); } } } }); } return(manifestItems); }
private void PostCreate(HostService hostService, IEnumerable <FileAndType> files) { using (new LoggerPhaseScope("ReportModelLoadInfo", LogLevel.Diagnostic)) { if (!hostService.ShouldTraceIncrementalInfo) { return; } var allFiles = files?.Select(f => f.File)?.ToList() ?? new List <string>(); var loadedFiles = hostService.Models.Select(m => m.OriginalFileAndType.File).ToList(); var skippedFiles = allFiles.Except(loadedFiles.Union(hostService.InvalidSourceFiles)).ToList(); IncrementalContext.ReportModelLoadInfo(hostService, skippedFiles, null); IncrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.Compile); IncrementalContext.IncrementalInfo.ReportProcessorFileCount(hostService.Processor.Name, allFiles.Count, skippedFiles.Count); Logger.LogVerbose($"Processor {hostService.Processor.Name} (compile phase), total file count: {allFiles.Count}, skipped file count: {skippedFiles.Count}."); } }
private IEnumerable <ManifestItemWithContext> ExportManifest(HostService hostService, DocumentBuildContext context) { var manifestItems = new List <ManifestItemWithContext>(); using (new LoggerPhaseScope("Save", true)) { hostService.Models.RunAll(m => { if (m.Type != DocumentType.Overwrite) { using (new LoggerFileScope(m.LocalPathFromRepoRoot)) { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}: Saving..."); m.BaseDir = context.BuildOutputFolder; if (m.PathRewriter != null) { m.File = m.PathRewriter(m.File); } var result = hostService.Processor.Save(m); if (result != null) { string extension = string.Empty; if (hostService.Template != null) { if (hostService.Template.TryGetFileExtension(result.DocumentType, out extension)) { m.File = result.FileWithoutExtension + extension; } } var item = HandleSaveResult(context, hostService, m, result); item.Extension = extension; manifestItems.Add(new ManifestItemWithContext(item, m, hostService.Processor, hostService.Template?.GetTemplateBundle(result.DocumentType))); } } } }); } return(manifestItems); }
private static void RegisterDependencyType(HostService hostService) { RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { if (buildStep is ISupportIncrementalBuildStep) { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Registering DependencyType..."); using (new LoggerPhaseScope(buildStep.Name, true)) { var types = (buildStep as ISupportIncrementalBuildStep).GetDependencyTypesToRegister(); if (types == null) { return; } hostService.DependencyGraph.RegisterDependencyType(types); } } }); }
private static void BuildArticle(HostService hostService, int maxParallelism) { hostService.Models.RunAll( m => { using (new LoggerFileScope(m.LocalPathFromRepoRoot)) { Logger.LogVerbose($"Processor {hostService.Processor.Name}: Building..."); RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Building..."); using (new LoggerPhaseScope(buildStep.Name)) { buildStep.Build(m, hostService); } }); } }, maxParallelism); }
private static void BuildCore(IDocumentProcessor processor, HostService hostService, int maxParallelism) { Logger.LogVerbose($"Processor {processor.Name}: Loading document..."); using (new LoggerPhaseScope(processor.Name)) { foreach (var m in hostService.Models) { if (m.LocalPathFromRepoRoot == null) { m.LocalPathFromRepoRoot = Path.Combine(m.BaseDir, m.File).ToDisplayPath(); } } var steps = string.Join("=>", processor.BuildSteps.OrderBy(step => step.BuildOrder).Select(s => s.Name)); Logger.LogInfo($"Building {hostService.Models.Count} file(s) in {processor.Name}({steps})..."); Logger.LogVerbose($"Processor {processor.Name}: Preprocessing..."); Prebuild(processor, hostService); Logger.LogVerbose($"Processor {processor.Name}: Building..."); BuildArticle(processor, hostService, maxParallelism); Logger.LogVerbose($"Processor {processor.Name}: Postprocessing..."); Postbuild(processor, hostService); Logger.LogVerbose($"Processor {processor.Name}: Generating manifest..."); } }
private IEnumerable <HostService> GetInnerContexts( DocumentBuildParameters parameters, IEnumerable <IDocumentProcessor> processors, TemplateProcessor templateProcessor, IMarkdownService markdownService, DocumentBuildContext context) { var k = from fileItem in ( from file in parameters.Files.EnumerateFiles() from p in (from processor in processors let priority = processor.GetProcessingPriority(file) where priority != ProcessingPriority.NotSupported group processor by priority into ps orderby ps.Key descending select ps.ToList()).FirstOrDefault() ?? new List <IDocumentProcessor> { null } select new { file, p }) group fileItem by fileItem.p; var toHandleItems = k.Where(s => s.Key != null); var notToHandleItems = k.Where(s => s.Key == null); foreach (var item in notToHandleItems) { var sb = new StringBuilder(); sb.AppendLine("Cannot handle following file:"); foreach (var f in item) { sb.Append("\t"); sb.AppendLine(f.file.File); } Logger.LogWarning(sb.ToString()); } // todo : revert until PreProcessor ready foreach (var pair in (from processor in processors join item in toHandleItems on processor equals item.Key into g from item in g.DefaultIfEmpty() select new { processor, item, }).AsParallel().WithDegreeOfParallelism(parameters.MaxParallelism)) { var incrementalContext = context.IncrementalBuildContext; var processorSupportIncremental = IsProcessorSupportIncremental(pair.processor); bool processorCanIncremental = processorSupportIncremental; if (processorSupportIncremental) { incrementalContext.CreateProcessorInfo(pair.processor); processorCanIncremental = incrementalContext.CanProcessorIncremental(pair.processor); } var hostService = new HostService( parameters.Files.DefaultBaseDir, pair.item == null ? new FileModel[0] : from file in pair.item select Load(pair.processor, parameters.Metadata, parameters.FileMetadata, file.file, processorCanIncremental, context) into model where model != null select model) { MarkdownService = markdownService, Processor = pair.processor, Template = templateProcessor, Validators = MetadataValidators.ToImmutableList(), ShouldTraceIncrementalInfo = processorSupportIncremental, CanIncrementalBuild = processorCanIncremental, }; if (ShouldTraceIncrementalInfo) { using (new LoggerPhaseScope("ReportModelLoadInfo", true)) { var allFiles = pair.item?.Select(f => f.file.File) ?? new string[0]; var loadedFiles = hostService.Models.Select(m => m.FileAndType.File); incrementalContext.ReportModelLoadInfo(hostService, allFiles.Except(loadedFiles), null); incrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.PreBuild); } } yield return(hostService); } }
private static IEnumerable <string> GetFilesToRelayMessages(IncrementalBuildContext context, HostService hs) { var files = new HashSet <string>(); var cvi = context.CurrentBuildVersionInfo; foreach (var f in hs.GetUnloadedModelFiles(context)) { files.Add(f); // warnings from token file won't be delegated to article, so we need to add it manually var key = ((RelativePath)f).GetPathFromWorkingFolder(); foreach (var item in cvi.Dependency.GetAllIncludeDependencyFrom(key)) { files.Add(((RelativePath)item).RemoveWorkingFolder()); } } return(files); }
public static ImmutableList<FileModel> Build(IDocumentProcessor processor, DocumentBuildParameters parameters, IMarkdownService markdownService) { var hostService = new HostService( parameters.Files.DefaultBaseDir, from file in parameters.Files.EnumerateFiles() select Load(processor, parameters.Metadata, parameters.FileMetadata, file, false, null) into model where model != null select model) { Processor = processor, MarkdownService = markdownService, DependencyGraph = new DependencyGraph(), }; BuildCore(new List<HostService> { hostService }, parameters.MaxParallelism, null, null, null); return hostService.Models; }
private InternalManifestItem HandleSaveResult( DocumentBuildContext context, HostService hostService, FileModel model, SaveResult result) { context.FileMap[model.Key] = ((TypeForwardedToRelativePath)model.File).GetPathFromWorkingFolder(); DocumentException.RunAll( () => CheckFileLink(hostService, result), () => HandleUids(context, result), () => HandleToc(context, result), () => RegisterXRefSpec(context, result)); return GetManifestItem(context, model, result); }
private void Cleanup(HostService hostService) { hostService.Models.RunAll(m => m.Dispose()); }
private void Cleanup(HostService hostService) { hostService.Models.RunAll(m => m.Dispose()); }
public InnerBuildContext(HostService hostService, IDocumentProcessor processor, TemplateProcessor templateProcessor) { HostService = hostService; Processor = processor; TemplateProcessor = templateProcessor; }
private IEnumerable<HostService> GetInnerContexts( DocumentBuildParameters parameters, IEnumerable<IDocumentProcessor> processors, TemplateProcessor templateProcessor, IMarkdownService markdownService, DocumentBuildContext context) { var k = from fileItem in ( from file in parameters.Files.EnumerateFiles() from p in (from processor in processors let priority = processor.GetProcessingPriority(file) where priority != ProcessingPriority.NotSupported group processor by priority into ps orderby ps.Key descending select ps.ToList()).FirstOrDefault() ?? new List<IDocumentProcessor> { null } select new { file, p }) group fileItem by fileItem.p; var toHandleItems = k.Where(s => s.Key != null); var notToHandleItems = k.Where(s => s.Key == null); foreach (var item in notToHandleItems) { var sb = new StringBuilder(); sb.AppendLine("Cannot handle following file:"); foreach (var f in item) { sb.Append("\t"); sb.AppendLine(f.file.File); } Logger.LogWarning(sb.ToString()); } // todo : revert until PreProcessor ready foreach (var pair in (from processor in processors join item in toHandleItems on processor equals item.Key into g from item in g.DefaultIfEmpty() select new { processor, item, }).AsParallel().WithDegreeOfParallelism(parameters.MaxParallelism)) { var incrementalContext = context.IncrementalBuildContext; var processorSupportIncremental = IsProcessorSupportIncremental(pair.processor); bool processorCanIncremental = processorSupportIncremental; if (processorSupportIncremental) { incrementalContext.CreateProcessorInfo(pair.processor); processorCanIncremental = incrementalContext.CanProcessorIncremental(pair.processor); } var hostService = new HostService( parameters.Files.DefaultBaseDir, pair.item == null ? new FileModel[0] : from file in pair.item select Load(pair.processor, parameters.Metadata, parameters.FileMetadata, file.file, processorCanIncremental, context) into model where model != null select model) { MarkdownService = markdownService, Processor = pair.processor, Template = templateProcessor, Validators = MetadataValidators.ToImmutableList(), ShouldTraceIncrementalInfo = processorSupportIncremental, CanIncrementalBuild = processorCanIncremental, }; if (ShouldTraceIncrementalInfo) { using (new LoggerPhaseScope("ReportModelLoadInfo", true)) { var allFiles = pair.item?.Select(f => f.file.File) ?? new string[0]; var loadedFiles = hostService.Models.Select(m => m.FileAndType.File); incrementalContext.ReportModelLoadInfo(hostService, allFiles.Except(loadedFiles), null); incrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.PreBuild); } } yield return hostService; } }
private static void CheckFileLink(HostService hostService, SaveResult result) { result.LinkToFiles.RunAll(fileLink => { if (!hostService.SourceFiles.ContainsKey(fileLink)) { ImmutableList<LinkSourceInfo> list; if (result.FileLinkSources.TryGetValue(fileLink, out list)) { foreach (var fileLinkSourceFile in list) { Logger.LogWarning($"Invalid file link:({fileLinkSourceFile.Target}{fileLinkSourceFile.Anchor}).", null, fileLinkSourceFile.SourceFile, fileLinkSourceFile.LineNumber.ToString()); } } else { Logger.LogWarning($"Invalid file link:({fileLink})."); } } }); }
private void PostCreate(HostService hostService, IEnumerable<FileAndType> files) { using (new LoggerPhaseScope("ReportModelLoadInfo", true)) { if (!hostService.ShouldTraceIncrementalInfo) { return; } var allFiles = files?.Select(f => f.File) ?? new string[0]; var loadedFiles = hostService.Models.Select(m => m.FileAndType.File); IncrementalContext.ReportModelLoadInfo(hostService, allFiles.Except(loadedFiles), null); IncrementalContext.ReportModelLoadInfo(hostService, loadedFiles, BuildPhase.PreBuildBuild); } }
private static void RegisterDependencyType(HostService hostService) { RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { if (buildStep is ISupportIncrementalBuildStep) { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Registering DependencyType..."); using (new LoggerPhaseScope(buildStep.Name, true)) { var types = (buildStep as ISupportIncrementalBuildStep).GetDependencyTypesToRegister(); if (types == null) { return; } hostService.DependencyGraph.RegisterDependencyType(types); } } }); }
private static void BuildArticle(HostService hostService, int maxParallelism) { hostService.Models.RunAll( m => { using (new LoggerFileScope(m.LocalPathFromRoot)) { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}: Building..."); RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Building..."); using (new LoggerPhaseScope(buildStep.Name, true)) { buildStep.Build(m, hostService); } }); } }, maxParallelism); }
private IEnumerable<ManifestItemWithContext> ExportManifest(HostService hostService, DocumentBuildContext context) { var manifestItems = new List<ManifestItemWithContext>(); using (new LoggerPhaseScope("Save", true)) { hostService.Models.RunAll(m => { if (m.Type != DocumentType.Overwrite) { using (new LoggerFileScope(m.LocalPathFromRoot)) { Logger.LogDiagnostic($"Processor {hostService.Processor.Name}: Saving..."); m.BaseDir = context.BuildOutputFolder; if (m.FileAndType.SourceDir != m.FileAndType.DestinationDir) { m.File = (TypeForwardedToRelativePath)m.FileAndType.DestinationDir + (((TypeForwardedToRelativePath)m.File) - (TypeForwardedToRelativePath)m.FileAndType.SourceDir); } var result = hostService.Processor.Save(m); if (result != null) { string extension = string.Empty; if (hostService.Template != null) { if (hostService.Template.TryGetFileExtension(result.DocumentType, out extension)) { m.File = result.FileWithoutExtension + extension; } } var item = HandleSaveResult(context, hostService, m, result); item.Extension = extension; manifestItems.Add(new ManifestItemWithContext(item, m, hostService.Processor, hostService.Template?.GetTemplateBundle(result.DocumentType))); } } } }); } return manifestItems; }
private static void Postbuild(HostService hostService) { RunBuildSteps( hostService.Processor.BuildSteps, buildStep => { Logger.LogVerbose($"Processor {hostService.Processor.Name}, step {buildStep.Name}: Postbuilding..."); using (new LoggerPhaseScope(buildStep.Name, true)) { buildStep.Postbuild(hostService.Models, hostService); } }); }
public void TestReadMarkdownAsOverwrite() { var content = @"--- uid: Test remarks: Hello --- This is unit test!"; content = Regex.Replace(content, "\r?\n", "\r\n"); var html = DocfxFlavoredMarked.Markup(content); var baseDir = Directory.GetCurrentDirectory(); var fileName = "ut_ReadMarkdownAsOverwrite.md"; var fullPath = Path.Combine(baseDir, fileName); File.WriteAllText(fullPath, content); var host = new HostService(null, Enumerable.Empty<FileModel>()) { MarkdownService = new DfmServiceProvider().CreateMarkdownService(new MarkdownServiceParameters {BasePath = string.Empty}), SourceFiles = ImmutableDictionary.Create<string, FileAndType>() }; var ft = new FileAndType(baseDir, fileName, DocumentType.Overwrite); var results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(1, results.Count); Assert.Equal("Test", results[0].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal("<p sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"6\" sourceendlinenumber=\"6\">This is unit test!</p>\n", results[0].Conceptual); File.Delete(fileName); // Test conceptual content between two yamlheader content = @"--- uid: Test1 remarks: Hello --- This is unit test! --- uid: Test2 --- "; content = Regex.Replace(content, "\r?\n", "\r\n"); html = DocfxFlavoredMarked.Markup(content); File.WriteAllText(fileName, content); results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(2, results.Count); Assert.Equal("Test1", results[0].Uid); Assert.Equal("Test2", results[1].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal("<p sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"5\">This is unit test!</p>\n", results[0].Conceptual); Assert.Equal(string.Empty, results[1].Conceptual); File.Delete(fileName); //invalid yamlheader is not supported content = @"--- uid: Test1 remarks: Hello --- This is unit test! --- uid: Test2 --- "; content = Regex.Replace(content, "\r?\n", "\r\n"); html = DocfxFlavoredMarked.Markup(content); File.WriteAllText(fileName, content); results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(1, results.Count); Assert.Equal("Test1", results[0].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal("<h2 id=\"this-is-unit-test\" sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"6\">This is unit test!</h2>\n<h2 id=\"uid-test2\" sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"7\" sourceendlinenumber=\"8\">uid: Test2</h2>\n", results[0].Conceptual); File.Delete(fileName); // Test conceptual content with extra empty line between two yamlheader content = @"--- uid: Test1 remarks: Hello --- This is unit test! --- uid: Test2 --- "; content = Regex.Replace(content, "\r?\n", "\r\n"); html = DocfxFlavoredMarked.Markup(content); File.WriteAllText(fileName, content); results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(2, results.Count); Assert.Equal("Test1", results[0].Uid); Assert.Equal("Test2", results[1].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal("<p sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"7\" sourceendlinenumber=\"7\">This is unit test!</p>\n", results[0].Conceptual); Assert.Equal(string.Empty, results[1].Conceptual); File.Delete(fileName); // Test different line ending content = "---\nuid: Test\nremarks: Hello\n---\nThis is unit test!\n"; html = DocfxFlavoredMarked.Markup(content); File.WriteAllText(fileName, content); results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(1, results.Count); Assert.Equal("Test", results[0].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal("<p sourcefile=\"ut_ReadMarkdownAsOverwrite.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"5\">This is unit test!</p>\n", results[0].Conceptual); File.Delete(fileName); // Test link to files and Uids in overwrite document content = @"--- uid: Test remarks: Hello --- @NotExistUid [Not exist link](link.md) This is unit test!"; content = Regex.Replace(content, "\r?\n", "\r\n"); html = DocfxFlavoredMarked.Markup(content); File.WriteAllText(fileName, content); results = MarkdownReader.ReadMarkdownAsOverwrite(host, ft).ToList(); Assert.NotNull(results); Assert.Equal(1, results.Count); Assert.Equal("Test", results[0].Uid); Assert.Equal("Hello", results[0].Metadata["remarks"]); Assert.Equal(1, results[0].LinkToFiles.Count); Assert.Equal("~/link.md", results[0].LinkToFiles.ElementAt(0)); Assert.Equal(1, results[0].LinkToUids.Count); Assert.Equal("NotExistUid", results[0].LinkToUids.ElementAt(0)); Assert.Equal(@"<p sourcefile=""ut_ReadMarkdownAsOverwrite.md"" sourcestartlinenumber=""5"" sourceendlinenumber=""5""><xref href=""NotExistUid"" data-throw-if-not-resolved=""False"" data-raw-source=""@NotExistUid"" sourcefile=""ut_ReadMarkdownAsOverwrite.md"" sourcestartlinenumber=""5"" sourceendlinenumber=""5""></xref></p> <p sourcefile=""ut_ReadMarkdownAsOverwrite.md"" sourcestartlinenumber=""7"" sourceendlinenumber=""7""><a href=""link.md"" data-raw-source=""[Not exist link](link.md)"" sourcefile=""ut_ReadMarkdownAsOverwrite.md"" sourcestartlinenumber=""7"" sourceendlinenumber=""7"">Not exist link</a></p> <p sourcefile=""ut_ReadMarkdownAsOverwrite.md"" sourcestartlinenumber=""9"" sourceendlinenumber=""9"">This is unit test!</p> ".Replace("\r\n", "\n"), results[0].Conceptual); File.Delete(fileName); }