/// <summary> /// Takes the url of a combined file, and returns its content, /// ready to be sent to the browser. /// The url does not relate to an actual file. The combined content /// only lives in cache. If it is not in cache, this method /// finds out which files are associated with the fileUrl, /// reads them, compresses the content and stores that in cache /// (as well as returning it). /// </summary> /// <param name="context"></param> /// <param name="combinedFileUrl"></param> /// <returns></returns> public static string Content( HttpContext context, string combinedFileUrl, bool minifyCSS, bool minifyJavaScript, UrlProcessor urlProcessor, out FileTypeUtilities.FileType fileType) { // Get the urlsHash from the combined file url. string urlsHash = UrlVersioner.UnversionedFilename(combinedFileUrl); // Based on that hash, get the compressed content of the combined file. string combinedFileContent = null; string newVersionId = null; GetContentVersion( context, urlsHash, urlProcessor, null, minifyCSS, minifyJavaScript, out combinedFileContent, out newVersionId, out fileType); if (combinedFileContent == null) { // combinedFileUrl matches an actual file on the server. Load that file // and return its content to the browser. Because this situation normally // only happens when a (already minified) library file could not be loaded // from a CDN (a rare event), or if we are in debug mode, there is no need // to minify the file. combinedFileContent = ""; string filePath = MapPath(combinedFileUrl, urlProcessor.ThrowExceptionOnMissingFile); if (filePath != null) { combinedFileContent = File.ReadAllText(filePath); } fileType = FileTypeUtilities.FileTypeOfUrl(combinedFileUrl); } return combinedFileContent; }
/// <summary> /// /// </summary> /// <param name="headHtmlSb"></param> /// <param name="groupRegexp"></param> /// <param name="fileType"></param> /// <param name="tagTemplate"></param> /// <param name="totalFileNames"></param> /// <param name="combineFiles"></param> /// <param name="placeCombinedFilesAtEnd"> /// This is only relevant if combineFiles equals All. /// If placeCombinedFilesAtEnd is true, the tag loading the combined file /// replaces the very last file group (important if you're loading js, because it means that if any /// js is dependent on a library loaded from a CDN, all the js will load after that library. /// /// If placeCombinedFilesAtEnd is false, the tag replaces the very first file group. /// You'd use this with CSS, to get it load possibly sooner than the js. /// </param> /// <param name="urlProcessor"></param> private void ProcessFileType( string headHtml, string groupRegexp, FileTypeUtilities.FileType fileType, string tagTemplate, List<string> totalFileNames, ConfigSection.CombineOption combineFiles, bool placeCombinedFilesAtEnd, bool minifyCSS, bool minifyJavaScript, UrlProcessor urlProcessor) { List<groupInfo> allGroups = new List<groupInfo>(); List<Uri> totalFileUrlsList = new List<Uri>(); Regex r = new Regex(groupRegexp, RegexOptions.IgnoreCase); Match m = r.Match(headHtml); // Visit each group of script or link tags. Record the html of each file group // and a list of the urls in the tags in that file group in allGroups. while (m.Success) { string fileGroup = m.Value; CaptureCollection fileUrls = m.Groups["url"].Captures; // Visit the src or href of each individual script or link tag in the group, // and add to a list of urls. List<Uri> fileUrlsList = new List<Uri>(); for (int j = 0; j < fileUrls.Count; j++) { Uri fileUrl = new Uri(HttpContext.Current.Request.Url, fileUrls[j].Value); fileUrlsList.Add(fileUrl); totalFileUrlsList.Add(fileUrl); } allGroups.Add(new groupInfo() { fileGroup = fileGroup, fileUrlsList = fileUrlsList }); m = m.NextMatch(); } // Process each file group in allGroups switch (combineFiles) { case ConfigSection.CombineOption.None: // In each group, process all URLs individually into tags. // Note that CombinedFile.Url not only has the ability to combine urls, but also // to insert version info - and we still want that to be able to use far future cache expiry, // even if not combining files. // Concatenate the tags and replace the group with the concatenated tags. foreach (groupInfo g in allGroups) { StringBuilder tagsInGroup = new StringBuilder(); foreach (Uri u in g.fileUrlsList) { string versionedUrl = CombinedFile.Url( HttpContext.Current, new List<Uri>(new Uri[] { u }), fileType, minifyCSS, minifyJavaScript, urlProcessor, totalFileNames); string versionedFileTag = string.Format(tagTemplate, versionedUrl); tagsInGroup.Append(versionedFileTag); } // Be sure to trim the group before storing it (that is, remove space at the front and end). // If you don't, you may store a group with white space at either end, that then doesn't match // a group in some other file that is exactly the same, except for the white space at either end. Replacements.Add(new Replacement { original = g.fileGroup.Trim(), replacement = tagsInGroup.ToString() }); } break; case ConfigSection.CombineOption.PerGroup: // In each group, process all URLs together into a combined tag. // Replace the group with that one tag. foreach (groupInfo g in allGroups) { string combinedFileUrl = CombinedFile.Url( HttpContext.Current, g.fileUrlsList, fileType, minifyCSS, minifyJavaScript, urlProcessor, totalFileNames); string combinedFileTag = string.Format(tagTemplate, combinedFileUrl); Replacements.Add(new Replacement { original = g.fileGroup.Trim(), replacement = combinedFileTag }); } break; case ConfigSection.CombineOption.All: // Combine all urls into a single tag. Then insert that tag in the head. // Also, remove all groups. { string combinedFileUrl = CombinedFile.Url( HttpContext.Current, totalFileUrlsList, fileType, minifyCSS, minifyJavaScript, urlProcessor, totalFileNames); string combinedFileTag = string.Format(tagTemplate, combinedFileUrl); int idxFileGroupToReplace = placeCombinedFilesAtEnd ? (allGroups.Count - 1) : 0; Replacements.Add( new Replacement { original = allGroups[idxFileGroupToReplace].fileGroup.Trim(), replacement = combinedFileTag }); // Replace all file groups with empty string, except for the one // we just replaced with the tag. allGroups.RemoveAt(idxFileGroupToReplace); foreach (groupInfo g in allGroups) { Replacements.Add( new Replacement { original = g.fileGroup.Trim(), replacement = "" }); } } break; default: throw new ArgumentException("ProcessFileType - combineFiles=" + combineFiles.ToString()); } }
/// <summary> /// Takes the urls of a series of files (taken from the src or href /// attribute of their script or link tags), and returns the url /// of the combined file. That url will be placed in /// single script or link tag that replaces the individual script or link tags. /// /// When the browser sends a request for this url, get the content /// to return by calling CombinedFileContent. /// </summary> /// <param name="context"></param> /// <param name="fileUrls"></param> /// <param name="totalFileNames"> /// The method adds the physical file names of the files making up the combined /// file to this parameter. If this is null, nothing is done. /// </param> /// <returns></returns> public static string Url( HttpContext context, List<Uri> fileUrls, FileTypeUtilities.FileType fileType, bool minifyCSS, bool minifyJavaScript, UrlProcessor urlProcessor, List<string> totalFileNames) { string urlsHash = UrlsHash(fileUrls); // Store the urls of the files, so GetContentVersion can retrieve // the urls if needed. StoreFileUrls(context, urlsHash, fileUrls, fileType); string combinedFileContent = null; string versionId = null; GetContentVersion( context, urlsHash, urlProcessor, totalFileNames, minifyCSS, minifyJavaScript, out combinedFileContent, out versionId, out fileType); string combinedFileUrl = CombinedFileUrl(urlsHash, versionId, fileType, urlProcessor); return combinedFileUrl; }
/// <summary> /// Takes the hash identifying the urls of the files that make up a combined file. /// Returns the compressed content of the combined files, and the version ID /// of the combined files. The version ID is based on the last modified time of the last /// modified file file that goes into the combined file. /// </summary> /// <param name="context"></param> /// <param name="urlsHash"></param> /// <param name="totalFileNames"> /// The file names of the files read by this method get added to this list. /// If this is null, nothing is done with this parameter. /// </param> /// <param name="combinedFileContent"> /// Content to be sent back to the browser. /// Will be null if the content could not be retrieved, because the hash was not found in /// the Application object. This means that the file tag that caused the browser to /// request this file was generated in JavaScript or appeared outside the head tag /// on the page. This will also happen in debug mode. /// In this case, the name of the requested file matches an actual /// file on the server. /// </param> /// <param name="versionId"></param> private static void GetContentVersion( HttpContext context, string urlsHash, UrlProcessor urlProcessor, List<string> totalFileNames, bool minifyCSS, bool minifyJavaScript, out string combinedFileContent, out string versionId, out FileTypeUtilities.FileType fileType) { combinedFileContent = null; versionId = null; List<Uri> fileUrls; RetrieveFileUrls(context, urlsHash, out fileUrls, out fileType); if (fileUrls == null) { return; } CacheElement cacheElement = (CacheElement)context.Cache[urlsHash]; if (cacheElement == null) { StringBuilder combinedContentSb = new StringBuilder(); DateTime mostRecentModifiedTime = DateTime.MinValue; List<string> fileNames = new List<string>(); bool fileMissing = false; foreach (Uri fileUrl in fileUrls) { string filePath = MapPath(fileUrl.AbsolutePath, urlProcessor.ThrowExceptionOnMissingFile); string fileContent = null; if (filePath != null) { fileContent = File.ReadAllText(filePath); if (fileType == FileTypeUtilities.FileType.CSS) { FixUrlProperties(ref fileContent, fileUrl, urlProcessor); } DateTime lastModifiedTime = File.GetLastWriteTime(filePath); mostRecentModifiedTime = (mostRecentModifiedTime > lastModifiedTime) ? mostRecentModifiedTime : lastModifiedTime; fileNames.Add(filePath); if (totalFileNames != null) { totalFileNames.Add(filePath); } } else { // A comment starting with /*! doesn't get removed by the minifier fileContent = string.Format("\n/*!\n** Does not exist: {0}\n*/\n", fileUrl); fileMissing = true; } combinedContentSb.Append(fileContent); } string combinedContent = combinedContentSb.ToString(); if (!string.IsNullOrEmpty(combinedContent)) { cacheElement = new CacheElement(); cacheElement.CombinedFileContent = combinedContent; if (fileType == FileTypeUtilities.FileType.JavaScript) { if (minifyJavaScript) { cacheElement.CombinedFileContent = JavaScriptCompressor.Compress(combinedContent); } } else { if (minifyCSS) { cacheElement.CombinedFileContent = CssCompressor.Compress(combinedContent); } } cacheElement.VersionId = VersionId(mostRecentModifiedTime); // Cache the newly created cacheElement // // Do not cache the cacheElement if one of the files couldn't be found. // That way, the package will keep checking the missing file, and pick it up // when someone puts the file there. if (!fileMissing) { CacheDependency cd = new CacheDependency(fileNames.ToArray()); context.Cache.Insert(urlsHash, cacheElement, cd); } } } if (cacheElement == null) { if (context.IsDebuggingEnabled) { throw new Exception("cacheElement == null"); } combinedFileContent = ""; versionId = ""; } else { combinedFileContent = cacheElement.CombinedFileContent; versionId = cacheElement.VersionId; } }
/// <summary> /// Returns a combined file url. /// </summary> /// <param name="urlsHash"> /// Hash based on the urls of the files that make up the combined file. /// </param> /// <param name="versionId"> /// A string that is different for each version of the files that make up /// the combined file. This is used to make sure that a browser doesn't /// pick up an outdated version from its internal browser cache. /// </param> /// <param name="fileType"> /// </param> /// <param name="urlDomain"> /// Domain to be used for the url. /// Make null or empty if you don't want a domain used in the url. /// </param> /// <returns></returns> private static string CombinedFileUrl( string urlsHash, string versionId, FileTypeUtilities.FileType fileType, UrlProcessor urlProcessor) { string url = "/" + urlsHash + FileTypeUtilities.FileTypeToExtension(fileType); return urlProcessor.ProcessedUrl(url, false, false, null, versionId); }
/// <summary> /// Retrieves the file urls list in Application state under the given hash. /// </summary> /// <param name="context"></param> /// <param name="urlsHash"></param> /// <returns> /// List of file urls. Null if the hash was not found. /// </returns> private static void RetrieveFileUrls( HttpContext context, string urlsHash, out List<Uri> fileUrls, out FileTypeUtilities.FileType fileType) { fileUrls = null; fileType = FileTypeUtilities.FileType.JavaScript; FileUrlsElement fileUrlsElement = (FileUrlsElement)context.Application[urlsHash]; if (fileUrlsElement == null) { return; } fileUrls = fileUrlsElement.fileUrls; fileType = fileUrlsElement.fileType; }
/// <summary> /// Stores the file urls list in Application state under the given hash. /// </summary> /// <param name="context"></param> /// <param name="urlsHash"></param> /// <param name="fileUrls"></param> private static void StoreFileUrls( HttpContext context, string urlsHash, List<Uri> fileUrls, FileTypeUtilities.FileType fileType) { FileUrlsElement fileUrlsElement = new FileUrlsElement(); fileUrlsElement.fileType = fileType; fileUrlsElement.fileUrls = fileUrls; context.Application[urlsHash] = fileUrlsElement; }