/// <summary> /// Minifies (and performs any other operation defined in the pipeline) for each file /// </summary> /// <param name="files"></param> /// <returns></returns> private async Task ProcessWebFilesAsync(IEnumerable <IWebFile> files, PreProcessPipeline pipeline) { //we need to do the minify on the original files foreach (var file in files) { //if the pipeline on the file is null, assign the default one passed in if (file.Pipeline == null) { file.Pipeline = pipeline; } //We need to check if this path is a folder, then iterate the files if (_fileSystemHelper.IsFolder(file.FilePath)) { var filePaths = _fileSystemHelper.GetPathsForFilesInFolder(file.FilePath); foreach (var f in filePaths) { await _fileManager.ProcessAndCacheFileAsync(new WebFile { FilePath = _fileSystemHelper.NormalizeWebPath(f, _request), DependencyType = file.DependencyType, Pipeline = file.Pipeline }); } } else { await _fileManager.ProcessAndCacheFileAsync(file); } } }
/// <summary> /// Get a collection of files that will be used to create the composite file(s), this will normalize all of the paths and ignore /// any external requests. /// </summary> /// <param name="files"></param> /// <returns></returns> /// <remarks> /// We need to get a collection of files that have their cached/hashed paths, this is used /// to check if the composite file has already been created, if it is then we don't need to worry /// about anything. If it is not, then we need to minify each of the files now. Then when the request /// is made to get the composite file, that process is already complete and the composite handler just /// needs to combine, compress and store the file. /// /// The result of this method is a staggered collection of files. This will iterate over the files and when it comes across /// an external dependency or a dependency that requires a different rendering output, it will close the current collection and /// start another one. Each of these collections will be rendered individually. /// </remarks> internal IEnumerable <WebFileBatch> GetCompositeFileCollectionForUrlGeneration(IEnumerable <IWebFile> files) { var current = new WebFileBatch(); var result = new List <WebFileBatch>(); foreach (var f in files) { var webPath = _requestHelper.Content(f.FilePath); //if this is an external path then we need to split and start new if (webPath.Contains(Constants.SchemeDelimiter)) { if (current.Any()) { result.Add(current); current = new WebFileBatch(); } f.FilePath = webPath; current.AddExternal(f); //add it to the result and split again - each batch can only contain a single external request result.Add(current); current = new WebFileBatch(); } else if (_fileSystemHelper.IsFolder(f.FilePath)) { //it's a folder so get all of it's individual files and process them var filePaths = _fileSystemHelper.GetPathsForFilesInFolder(f.FilePath); foreach (var p in filePaths) { var subFile = f.Duplicate(_requestHelper.Content(p)); var hashedFile = subFile.Duplicate(_hasher.Hash(subFile.FilePath)); hashedFile.Pipeline = f.Pipeline; current.AddInternal(subFile, hashedFile); } } else { var hashedFile = f.Duplicate(_hasher.Hash(webPath)); current.AddInternal(f.Duplicate(webPath), hashedFile); } } //check if there's any left in current and add it if (current.Any()) { result.Add(current); } return(result); }
public IEnumerable <IWebFile> GetFiles(string bundleName, HttpRequest request) { IEnumerable <IWebFile> files; if (_bundles.TryGetValue(bundleName, out files)) { var fileList = new List <IWebFile>(); foreach (var file in files) { if (file.Pipeline == null) { file.Pipeline = _processorFactory.GetDefault(file.DependencyType); } file.FilePath = _fileSystemHelper.NormalizeWebPath(file.FilePath, request); //We need to check if this path is a folder, then iterate the files if (_fileSystemHelper.IsFolder(file.FilePath)) { var filePaths = _fileSystemHelper.GetPathsForFilesInFolder(file.FilePath); foreach (var f in filePaths) { fileList.Add(new WebFile { FilePath = _fileSystemHelper.NormalizeWebPath(f, request), DependencyType = file.DependencyType, Pipeline = file.Pipeline }); } } else { fileList.Add(file); } } return(fileList); } return(null); }
/// <summary> /// Returns the ordered file set for dynamically registered assets and ensures that all pre-processor pipelines are applied correctly /// </summary> /// <param name="files"></param> /// <param name="pipeline"></param> /// <returns></returns> public IEnumerable <IWebFile> GetOrderedFileSet(IEnumerable <IWebFile> files, PreProcessPipeline pipeline) { var customOrdered = new List <IWebFile>(); var defaultOrdered = new List <IWebFile>(); foreach (var file in files) { ValidateFile(file); if (file.Pipeline == null) { file.Pipeline = pipeline.Copy(); } file.FilePath = _requestHelper.Content(file.FilePath); //We need to check if this path is a folder, then iterate the files if (_fileSystemHelper.IsFolder(file.FilePath)) { var filePaths = _fileSystemHelper.GetPathsForFilesInFolder(file.FilePath); foreach (var f in filePaths) { if (file.Order > 0) { customOrdered.Add(new WebFile { FilePath = _requestHelper.Content(f), DependencyType = file.DependencyType, Pipeline = file.Pipeline, Order = file.Order }); } else { defaultOrdered.Add(new WebFile { FilePath = _requestHelper.Content(f), DependencyType = file.DependencyType, Pipeline = file.Pipeline, Order = file.Order }); } } } else { if (file.Order > 0) { customOrdered.Add(file); } else { defaultOrdered.Add(file); } } } //add the custom ordered to the end of the list defaultOrdered.AddRange(customOrdered.OrderBy(x => x.Order)); //apply conventions return(defaultOrdered.Select(ApplyConventions).Where(x => x != null)); }