public void AddLink(string type, string path, string url = null) { if (type == null) { throw new ArgumentNullException(nameof(type)); } if (path == null) { throw new ArgumentNullException(nameof(path)); } if (!UPath.TryParse(path, out _)) { throw new ArgumentException($"Invalid path [{path}]", nameof(path)); } if (url != null) { if (!UPath.TryParse(url, out _)) { throw new ArgumentException($"Invalid url [{url}]", nameof(url)); } } if (path.Contains("*") && (url == null || !url.EndsWith("/"))) { throw new ArgumentException($"Invalid url [{path}]. Must end with a `/` if the path contains a wildcard.", nameof(url)); } var link = new BundleLink(this, type, path, url); Links.Add(link); }
protected override void ValidateItem(object item) { if (item is string str) { if (!UPath.TryParse(str, out var path)) { throw new ArgumentException($"Invalid path `{str}`. The path is malformed.", nameof(item)); } if (!path.IsAbsolute) { throw new ArgumentException($"Invalid path `{str}`. Expecting an absolute path.", nameof(item)); } } else { throw new ArgumentException($"Invalid path. Expecting a string instead of `{item.GetType().FullName}`.", nameof(item)); } }
private void AddLink(string kind, object resourceOrPath, string path, string url = null) { if (resourceOrPath == null) { throw new ArgumentNullException(nameof(resourceOrPath)); } if (resourceOrPath is ResourceObject resource) { if (path != null) { if (!UPath.TryParse(path, out var relativePath)) { throw new ArgumentException($"Invalid path {path}.", nameof(path)); } path = (string)(resource.Path / relativePath.ToRelative()); } else { path ??= resource["main"]?.ToString(); if (path == null) { throw new ArgumentNullException(nameof(path), "path cannot be null with a resource"); } } } else if (resourceOrPath is string str) { if (path != null) { throw new ArgumentException("Parameter must be null if first argument is already a path.", nameof(path)); } path = str; } else { throw new ArgumentException($"Invalid parameter type ({resourceOrPath?.GetType()}) for {kind} function.", nameof(resourceOrPath)); } AddLink(kind, path, url); }
public override void Process(ProcessingStage stage) { if (stage == ProcessingStage.BeforeLoadingContent) { if (!Plugin.Enable) { return; } if (Plugin.Url == null || !UPath.TryParse(Plugin.Url, out _outputUrl) || !_outputUrl.IsAbsolute) { Site.Error($"Invalid url `{Plugin.Url}` declared for search. Search will not be generated."); return; } _excludes.Clear(); // Exclude any files that are globally excluded foreach (var excludeItem in Plugin.Excludes) { if (excludeItem is string str && UPath.TryParse(str, out var excludePath) && excludePath.IsAbsolute) { var searchPattern = excludePath.SearchPattern(); _excludes.Add(searchPattern); } } try { Initialize(); _isInitialized = true; } catch (Exception ex) { Site.Error($"Unable to initialize search processor `{Name}`. Reason: {ex.Message}"); } } else if (stage == ProcessingStage.BeforeProcessingContent) { Terminate(); } }
public void TestNormalize(string pathAsText, string expectedResult) { var path = new UPath(pathAsText); Assert.Equal(expectedResult, path.FullName); // Check Equatable var expectedPath = new UPath(expectedResult); Assert.Equal(expectedPath, path); Assert.True(expectedPath.Equals((object)path)); Assert.Equal(expectedPath.GetHashCode(), path.GetHashCode()); Assert.True(path == expectedPath); Assert.False(path != expectedPath); // Check TryParse UPath result; Assert.True(UPath.TryParse(path.FullName, out result)); }
public void ParsePath(string path, UPathParseMode parseMode, string expectedPath, string expectedException) { if (expectedPath != null) { var p = new UPath(path, parseMode); p.ToString().Should().Be(expectedPath); bool result = UPath.TryParse(path, parseMode, out p); result.Should().BeTrue(); p.ToString().Should().Be(expectedPath); } else { Action a = () => { // ReSharper disable once ObjectCreationAsStatement new UPath(path, parseMode); }; a.ShouldThrow <UPathFormatException>().Where(x => x.Message.StartsWith(expectedException)); bool result = UPath.TryParse(path, parseMode, out var p); result.Should().BeFalse(); } }
private void DecodeMenu(object o, ContentObject menuFile, MenuObject parent = null, bool expectingMenuEntry = false) { if (o is ScriptObject obj) { if (parent == null) { foreach (var keyPair in obj) { var menuName = keyPair.Key; var menuObject = new MenuObject { Path = (string)menuFile.Path, Name = menuName }; _menus.Add(menuObject); Plugin.SetValue(menuName, menuObject); DecodeMenu(keyPair.Value, menuFile, menuObject, false); } } else { if (expectingMenuEntry) { var menuObject = new MenuObject { Parent = parent }; _menus.Add(menuObject); parent.Children.Add(menuObject); foreach (var keyPair in obj) { var key = keyPair.Key; var value = keyPair.Value; if (key == "path") { var valueAsStr = value?.ToString(); if (valueAsStr == null || !UPath.TryParse(valueAsStr, out _)) { throw new LunetException($"The path value `{valueAsStr}` is not a valid path for key `{key}`."); } value = (string)(menuFile.Path.GetDirectory() / (UPath)value?.ToString()); } menuObject[key] = value; } } else { foreach (var keyPair in obj) { var key = keyPair.Key; var value = keyPair.Value; if (key == "items") { if (!(value is ScriptArray)) { throw new LunetException($"The items of menu `{parent.Name}` must be an array. The type {value?.GetType()} is not valid for this element."); } DecodeMenu(value, menuFile, parent, true); } else { parent[key] = value; } } } } } else if (o is ScriptArray array) { if (parent == null) { parent = new MenuObject() { Path = (string)menuFile.Path }; _menus.Add(parent); } foreach (var item in array) { DecodeMenu(item, menuFile, parent, true); } } else if (o is string str) { if (!UPath.TryParse(str, out var relPath)) { throw new LunetException($"Error while parsing menu [{menuFile.Path}]. The string `{str}` is not a valid path."); } if (parent == null) { throw new LunetException($"Error while parsing menu [{menuFile.Path}]. The string `{str}` cannot be a root value."); } var menuPath = menuFile.Path.GetDirectory() / relPath; var menuObject = new MenuObject { Path = (string)menuPath, Parent = parent }; _menus.Add(menuObject); parent.Children.Add(menuObject); } }
private void ProcessBundleLinks(BundleObject bundle, Dictionary <UPath, ContentObject> staticFiles) { Dictionary <string, ConcatGroup> concatBuilders = null; if (bundle.Concat) { concatBuilders = new Dictionary <string, ConcatGroup>(); foreach (var type in bundle.UrlDestination) { if (!concatBuilders.ContainsKey(type.Key)) { concatBuilders[type.Key] = new ConcatGroup(); } } } // Expand wildcard * links for (int i = 0; i < bundle.Links.Count; i++) { var link = bundle.Links[i]; var path = link.Path; var url = link.Url; if (!path.Contains("*")) { continue; } // Always remove the link bundle.Links.RemoveAt(i); var upath = (UPath)path; foreach (var file in Site.MetaFileSystem.EnumerateFileSystemEntries(upath.GetDirectory(), upath.GetName())) { var newLink = new BundleLink(bundle, link.Type, (string)file.Path, url + file.Path.GetName()); bundle.Links.Insert(i++, newLink); } // Cancel the double i++ i--; } // Collect minifier IContentMinifier minifier = null; if (bundle.Minify) { var minifierName = bundle.Minifier; foreach (var min in Minifiers) { if (minifierName == null || min.Name == minifierName) { minifier = min; break; } } if (minifier == null) { Site.Warning($"Minify is setup for bundle [{bundle.Name}] but no minifiers are registered (Minified requested: {minifierName ?? "default"})"); } } // Process links for (int i = 0; i < bundle.Links.Count; i++) { var link = bundle.Links[i]; var path = link.Path; var url = link.Url; if (url != null) { if (!UPath.TryParse(url, out _)) { Site.Error($"Invalid absolute url [{url}] in bundle [{bundle.Name}]"); } } if (path != null) { path = ((UPath)path).FullName; link.Path = path; var entry = new FileEntry(Site.MetaFileSystem, path); ContentObject currentContent; var isExistingContent = staticFiles.TryGetValue(entry.FullName, out currentContent); if (url == null) { var outputUrlDirectory = bundle.UrlDestination[link.Type]; // If the file is private or meta, we need to copy to the output // bool isFilePrivateOrMeta = Site.IsFilePrivateOrMeta(entry.FullName); url = outputUrlDirectory + Path.GetFileName(path); link.Url = url; } // Process file by existing processors if (currentContent == null) { if (entry.Exists) { currentContent = new ContentObject(Site, entry); } else { Site.Error($"Unable to find content [{path}] in bundle [{bundle.Name}]"); } } if (currentContent != null) { currentContent.Url = url; var listTemp = new PageCollection() { currentContent }; Site.Content.ProcessPages(listTemp, false); link.ContentObject = currentContent; bool isRawContent = link.Type == BundleObjectProperties.ContentType; // If we require concat and/or minify, we preload the content of the file if (!isRawContent && (bundle.Concat || bundle.Minify)) { try { link.Content = currentContent.Content ?? entry.ReadAllText(); // Minify content separately if (bundle.Minify && minifier != null) { Minify(minifier, link, bundle.MinifyExtension); } } catch (Exception ex) { Site.Error( $"Unable to load content [{path}] while trying to concatenate for bundle [{bundle.Name}]. Reason: {ex.GetReason()}"); } } // If we are concatenating if (!isRawContent && concatBuilders != null) { currentContent.Discard = true; // Remove this link from the list of links, as we are going to squash them after bundle.Links.RemoveAt(i); i--; concatBuilders[link.Type].Pages.Add(currentContent); concatBuilders[link.Type].Builder.AppendLine(link.Content); } else if (!isExistingContent) { Site.StaticFiles.Add(currentContent); staticFiles.Add(entry.FullName, currentContent); } } } } // Concatenate files if necessary if (concatBuilders != null) { foreach (var builderGroup in concatBuilders) { var builder = builderGroup.Value.Builder; if (builder.Length > 0) { var type = builderGroup.Key; var outputUrlDirectory = bundle.UrlDestination[type]; // If the file is private or meta, we need to copy to the output // bool isFilePrivateOrMeta = Site.IsFilePrivateOrMeta(entry.FullName); var url = outputUrlDirectory + bundle.Name + "." + type; var newStaticFile = new ContentObject(Site) { Url = url, Content = builder.ToString() }; Site.DynamicPages.Add(newStaticFile); // Add file dependencies foreach (var page in builderGroup.Value.Pages) { newStaticFile.Dependencies.Add(new PageContentDependency(page)); } var link = new BundleLink(bundle, type, null, url) { Content = newStaticFile.Content, ContentObject = newStaticFile }; bundle.Links.Add(link); } } } foreach (var link in bundle.Links) { var contentObject = link.ContentObject; if (contentObject != null) { link.Url = contentObject.Url; } } }
public override void Process(ProcessingStage stage) { Debug.Assert(stage == ProcessingStage.BeforeProcessingContent); foreach (var taxonomy in List.ScriptObject) { var name = taxonomy.Key; var value = taxonomy.Value; string singular = null; string url = null; ScriptObject map = null; switch (value) { case string valueAsStr: singular = valueAsStr; break; case ScriptObject valueAsObj: singular = valueAsObj.GetSafeValue <string>("singular"); url = valueAsObj.GetSafeValue <string>("url"); map = valueAsObj.GetSafeValue <ScriptObject>("map"); break; case IScriptCustomFunction _: // Skip functions (clear...etc.) continue; } if (string.IsNullOrWhiteSpace(singular)) { // Don't log an error, as we just want to Site.Error($"Invalid singular form [{singular}] of taxonomy [{name}]. Expecting a non empty string"); continue; } // TODO: verify that plural is a valid identifier var tax = Find(name); if (tax != null) { continue; } List.Add(new Taxonomy(this, name, singular, url, map)); } // Convert taxonomies to readonly after initialization List.ScriptObject.Clear(); foreach (var taxonomy in List) { List.ScriptObject.SetValue(taxonomy.Name, taxonomy, true); } foreach (var page in Site.Pages) { var dyn = (DynamicObject)page; foreach (var tax in List) { var termsObj = dyn[tax.Name]; var terms = termsObj as ScriptArray; if (termsObj == null) { continue; } if (terms == null) { Site.Error("Invalid type"); continue; } foreach (var termNameObj in terms) { var termName = termNameObj as string; if (termName == null) { Site.Error("// TODO ERROR ON TERM"); continue; } object termObj; TaxonomyTerm term; if (!tax.Terms.TryGetValue(termName, out termObj)) { termObj = term = new TaxonomyTerm(tax, termName); tax.Terms[termName] = termObj; } else { term = (TaxonomyTerm)termObj; } term.Pages.Add(page); } } } // Update taxonomy computed foreach (var tax in List) { tax.Update(); } // Generate taxonomy pages foreach (var tax in List) { UPath.TryParse(tax.Url, out var taxPath); var section = taxPath.GetFirstDirectory(out var pathInSection); bool hasTerms = false; // Generate a term page for each term in the current taxonomy foreach (var term in tax.Terms.Values.OfType <TaxonomyTerm>()) { // term.Url var content = new DynamicContentObject(Site, term.Url, section) { ScriptObjectLocal = new ScriptObject(), // only used to let layout processor running Layout = tax.Name, LayoutType = "term", ContentType = ContentType.Html }; content.ScriptObjectLocal.SetValue("pages", term.Pages, true); content.ScriptObjectLocal.SetValue("taxonomy", tax, true); content.ScriptObjectLocal.SetValue("term", term, true); foreach (var page in term.Pages) { content.Dependencies.Add(new PageContentDependency(page)); } content.Initialize(); Site.DynamicPages.Add(content); hasTerms = true; } // Generate a terms page for the current taxonomy if (hasTerms) { var content = new DynamicContentObject(Site, tax.Url, section) { ScriptObjectLocal = new ScriptObject(), // only used to let layout processor running Layout = tax.Name, LayoutType = "terms", ContentType = ContentType.Html }; content.ScriptObjectLocal.SetValue("taxonomy", tax, true); content.Initialize(); // TODO: Add dependencies Site.DynamicPages.Add(content); } } }
public override ContentResult TryProcessContent(ContentObject file, ContentProcessingStage stage) { var contentType = file.ContentType; // This plugin is only working on scss files if (contentType != ScssType) { return(ContentResult.None); } if (file.Content == null) { file.Content = file.SourceFile.ReadAllText(); } var content = file.Content; var options = new ScssOptions(); options.InputFile = (string)file.Path; var includePaths = new List <DirectoryEntry>(); foreach (var pathObj in Plugin.Includes) { var path = pathObj as string; if (path != null && UPath.TryParse(path, out var validPath) && Site.MetaFileSystem.DirectoryExists(validPath)) { includePaths.Add(new DirectoryEntry(Site.MetaFileSystem, validPath)); } else { Site.Error($"Invalid folder path `{pathObj}` found in site.scss.includes."); } } var tempIncludePaths = new List <DirectoryEntry>(); var extensions = new string[] { ".scss", ".sass", ".css" }; var includedFiles = new List <FileEntry>(); options.TryImport = (ref string file, string parentpath, out string scss, out string map) => { scss = null; map = null; // From: https://sass-lang.com/documentation/at-rules/import#load-paths // Imports will always be resolved relative to the current file first, though. // Load paths will only be used if no relative file exists that matches the import. // This ensures that you can’t accidentally mess up your relative imports when you add a new library. tempIncludePaths.Clear(); UPath filePath = (UPath)file; var directoryName = ((UPath)parentpath).GetDirectory(); if (!directoryName.IsNull && directoryName.IsAbsolute) { DirectoryEntry localDirEntry = null; if (Site.FileSystem.DirectoryExists(directoryName)) { localDirEntry = new DirectoryEntry(Site.FileSystem, directoryName); if (!tempIncludePaths.Contains(localDirEntry)) { tempIncludePaths.Add(localDirEntry); } } if (Site.MetaFileSystem.DirectoryExists(directoryName)) { localDirEntry = new DirectoryEntry(Site.MetaFileSystem, directoryName); if (!tempIncludePaths.Contains(localDirEntry)) { tempIncludePaths.Add(localDirEntry); } } } tempIncludePaths.AddRange(includePaths); // From libsass, order for ambiguous import: // (1) filename as given // (2) underscore + given // (3) underscore + given + extension // (4) given + extension // (5) given + _index.scss // (6) given + _index.sass var ufile = (UPath)file; var relativeFolder = ufile.GetDirectory(); var filename = ufile.GetName(); bool Resolve(FileEntry entry, out string scss, out string file) { scss = null; file = null; if (entry.Exists) { scss = entry.ReadAllText(); file = (string)entry.Path; includedFiles.Add(entry); return(true); } return(false); } foreach (var dirEntry in tempIncludePaths) { var rootFolder = dirEntry.Path / relativeFolder; // (1) filename as given if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / filename), out scss, out file)) { return(true); } // (2) underscore + given if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / $"_{filename}"), out scss, out file)) { return(true); } // (3) underscore + given + extension foreach (var extension in extensions) { if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / $"_{filename}{extension}"), out scss, out file)) { return(true); } } // (4) given + extension foreach (var extension in extensions) { if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / $"{filename}{extension}"), out scss, out file)) { return(true); } } // (5) given + _index.scss if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / $"{filename}" / "_index.scss"), out scss, out file)) { return(true); } // (6) given + _index.sass if (Resolve(new FileEntry(dirEntry.FileSystem, rootFolder / $"{filename}" / "_index.sass"), out scss, out file)) { return(true); } } return(false); }; var result = SharpScss.Scss.ConvertToCss(content, options); file.Content = result.Css; file.ChangeContentType(ContentType.Css); foreach (var includeFile in includedFiles) { file.Dependencies.Add(new FileContentDependency(includeFile)); } return(ContentResult.Continue); }