/// <summary> /// Minifies Css /// </summary> /// <param name="fileProcessContext"></param> /// <returns></returns> public Task <string> ProcessAsync(FileProcessContext fileProcessContext) { using (var reader = new StringReader(fileProcessContext.FileContent)) { return(Task.FromResult(Minify(reader))); } }
public Task<string> ProcessAsync(FileProcessContext fileProcessContext) { //ensure the Urls in the css are changed to absolute var parsedUrls = ReplaceUrlsWithAbsolutePaths(fileProcessContext.FileContent, fileProcessContext.WebFile.FilePath, _http.HttpContext.Request); return Task.FromResult(parsedUrls); }
/// <summary> /// Minifies Css /// </summary> /// <param name="fileProcessContext"></param> /// <returns></returns> public Task<string> ProcessAsync(FileProcessContext fileProcessContext) { using (var reader = new StringReader(fileProcessContext.FileContent)) { return Task.FromResult(Minify(reader)); } }
public Task <string> ProcessAsync(FileProcessContext fileProcessContext) { //ensure the Urls in the css are changed to absolute var parsedUrls = ReplaceUrlsWithAbsolutePaths(fileProcessContext.FileContent, fileProcessContext.WebFile.FilePath, _http.HttpContext.Request); return(Task.FromResult(parsedUrls)); }
public Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next) { //ensure the Urls in the css are changed to absolute var parsedUrls = ReplaceUrlsWithAbsolutePaths(fileProcessContext.FileContent, fileProcessContext.WebFile.FilePath); fileProcessContext.Update(parsedUrls); return(next(fileProcessContext)); }
public Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next) { var jsMin = new JsMin(); var result = jsMin.ProcessAsync(fileProcessContext); fileProcessContext.Update(result); return(next(fileProcessContext)); }
public async Task<string> ProcessAsync(FileProcessContext fileProcessContext) { foreach (var p in Processors) { fileProcessContext.FileContent = await p.ProcessAsync(fileProcessContext); } return fileProcessContext.FileContent; }
public async Task <string> ProcessAsync(FileProcessContext fileProcessContext) { foreach (var p in Processors) { fileProcessContext.FileContent = await p.ProcessAsync(fileProcessContext); } return(fileProcessContext.FileContent); }
/// <summary> /// Minifies Css /// </summary> /// <param name="input"></param> /// <returns></returns> public Task<string> ProcessAsync(FileProcessContext fileProcessContext) { var input = fileProcessContext.FileContent; input = Regex.Replace(input, @"[\n\r]+\s*", string.Empty); input = Regex.Replace(input, @"\s+", " "); input = Regex.Replace(input, @"\s?([:,;{}])\s?", "$1"); input = Regex.Replace(input, @"([\s:]0)(px|pt|%|em)", "$1"); input = Regex.Replace(input, @"/\*[\d\D]*?\*/", string.Empty); return Task.FromResult(input); }
/// <summary> /// Minifies Css /// </summary> /// <param name="fileProcessContext"></param> /// <param name="next"></param> /// <returns></returns> public async Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next) { using (var reader = new StringReader(fileProcessContext.FileContent)) { var cssMin = new CssMin(); var minResult = cssMin.Minify(reader); fileProcessContext.Update(minResult); await next(fileProcessContext); } }
public Task<string> ProcessAsync(FileProcessContext fileProcessContext) { StringBuilder sb = new StringBuilder(); using (sr = new StringReader(fileProcessContext.FileContent)) { using (sw = new StringWriter(sb)) { jsmin(); } } return Task.FromResult(sb.ToString()); }
public async Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next) { var sb = new StringBuilder(); IEnumerable <string> importedPaths; var removedImports = ParseImportStatements(fileProcessContext.FileContent, out importedPaths); //need to write the imported sheets first since these theoretically should *always* be at the top for browser to support them foreach (var importPath in importedPaths) { var uri = new Uri(fileProcessContext.WebFile.FilePath, UriKind.RelativeOrAbsolute).MakeAbsoluteUri(_siteInfo.GetBaseUrl()); var absolute = uri.ToAbsolutePath(importPath); var path = _requestHelper.Content(absolute); //is it external? if (path.Contains(Constants.SchemeDelimiter)) { //Pretty sure we just leave the external refs in there //TODO: Look in CDF, we have tests for this, pretty sure the ParseImportStatements removes that } else { //it's internal (in theory) var filePath = _fileSystemHelper.GetFileInfo(path); var content = await _fileSystemHelper.ReadContentsAsync(filePath); //This needs to be put back through the whole pre-processor pipeline before being added, // so we'll clone the original webfile with it's new path, this will inherit the whole pipeline, // and then we'll execute the pipeline for that file var clone = fileProcessContext.WebFile.Duplicate(path); var processed = await clone.Pipeline.ProcessAsync(new FileProcessContext(content, clone, fileProcessContext.BundleContext)); sb.Append(processed); //// _fileSystemHelper.MapWebPath(path.StartsWith("/") ? path : string.Format("~/{0}", path)); //if (System.IO.File.Exists(filePath)) //{ //} //else //{ // //TODO: Need to log this //} } } sb.Append(removedImports); fileProcessContext.Update(sb.ToString()); await next(fileProcessContext); }
public string ProcessAsync(FileProcessContext fileProcessContext) { var sb = new StringBuilder(); using (_sr = new StringReader(fileProcessContext.FileContent)) using (_sw = new StringWriter(sb)) { ExecuteJsMin(); } //ensure there's a semicolon sb.Append(";"); return(sb.ToString()); }
public Task <string> ProcessAsync(FileProcessContext fileProcessContext) { StringBuilder sb = new StringBuilder(); using (sr = new StringReader(fileProcessContext.FileContent)) { using (sw = new StringWriter(sb)) { jsmin(); } } return(Task.FromResult(sb.ToString())); }
public async Task<string> ProcessAsync(FileProcessContext fileProcessContext) { var sb = new StringBuilder(); IEnumerable<string> importedPaths; var removedImports = ParseImportStatements(fileProcessContext.FileContent, out importedPaths); //need to write the imported sheets first since these theoretically should *always* be at the top for browser to support them foreach (var importPath in importedPaths) { var uri = new Uri(fileProcessContext.WebFile.FilePath, UriKind.RelativeOrAbsolute).MakeAbsoluteUri(_http.HttpContext.Request); var absolute = uri.ToAbsolutePath(importPath); var path = _fileSystemHelper.NormalizeWebPath(absolute, _http.HttpContext.Request); //is it external? if (path.Contains(Constants.SchemeDelimiter)) { //Pretty sure we just leave the external refs in there //TODO: Look in CDF, we have tests for this, pretty sure the ParseImportStatements removes that } else { //it's internal (in theory) var filePath = _fileSystemHelper.MapPath(string.Format("~/{0}", path)); if (System.IO.File.Exists(filePath)) { var content = await _fileSystemHelper.ReadContentsAsync(filePath); //This needs to be put back through the whole pre-processor pipeline before being added, // so we'll clone the original webfile with it's new path, this will inherit the whole pipeline, // and then we'll execute the pipeline for that file var clone = fileProcessContext.WebFile.Duplicate(path); var processed = await clone.Pipeline.ProcessAsync(new FileProcessContext(content, clone)); sb.Append(processed); } else { //TODO: Need to log this } } } sb.Append(removedImports); return sb.ToString(); }
public async Task <string> ProcessAsync(FileProcessContext fileProcessContext) { var queue = new Queue <IPreProcessor>(Processors); while (queue.Count > 0) { var executed = await ProcessNext(queue, fileProcessContext); //The next item wasn't executed which means the processor terminated // the pipeline. if (!executed) { break; } } //return output.Result; return(fileProcessContext.FileContent); }
/// <summary> /// Recursively process the next pre-processor until the queue is completed or until the pipeline is terminated /// </summary> /// <param name="queue"></param> /// <param name="fileProcessContext"></param> /// <returns></returns> private static async Task <bool> ProcessNext(Queue <IPreProcessor> queue, FileProcessContext fileProcessContext) { //Check if there are no more, if not then we're all done if (queue.Count == 0) { return(true); } var p = queue.Dequeue(); var executed = false; await p.ProcessAsync(fileProcessContext, async ctx => { executed = await ProcessNext(queue, ctx); }); //The next item wasn't executed which means the processor terminated the pipeline. return(executed); }
public Task ProcessAsync(FileProcessContext fileProcessContext, PreProcessorDelegate next) { var sb = new StringBuilder(); using (var reader = new StringReader(fileProcessContext.FileContent)) { var line = reader.ReadLine(); while (line != null) { var isTrim = true; var foundIndex = 0; for (int i = 0; i < line.Length; i++) { char c = line[i]; if (isTrim && char.IsWhiteSpace(c)) { continue; } isTrim = false; if (c == _sourceMappingUrl[foundIndex]) { foundIndex++; if (foundIndex == _sourceMappingUrl.Length) { // found! parse it var match = RegexStatements.SourceMap.Match(line); if (match.Success) { var url = match.Groups[1].Value; // convert to it's absolute path var contentPath = _requestHelper.Content(fileProcessContext.WebFile.FilePath); var uri = new Uri(contentPath, UriKind.RelativeOrAbsolute).MakeAbsoluteUri(_siteInfo.GetBaseUrl()); var absolute = uri.ToAbsolutePath(url); var path = _requestHelper.Content(absolute); // replace the source map with the correct url WriteLine(sb, $"{_sourceMappingUrl}={path};"); } else { // should have matched, perhaps the source map is formatted in a weird way, we're going to ignore // it since if it's rendered without the correct path then other errors will occur. } break; // exit for loop } } else { // not found on this line WriteLine(sb, line); break; // exit for loop } } // next line = reader.ReadLine(); } } fileProcessContext.Update(sb.ToString()); return(next(fileProcessContext)); }