public void Save(int hashCode, CombinatorResource resource) { var scliceCount = _fileRepository.Count(file => file.HashCode == hashCode); var fileRecord = new CombinedFileRecord() { HashCode = hashCode, Slice = ++scliceCount, Type = resource.Type, LastUpdatedUtc = _clock.UtcNow, Settings = _combinatorResourceManager.SerializeResourceSettings(resource) }; if (!String.IsNullOrEmpty(resource.Content)) { var path = MakePath(fileRecord); using (var stream = _storageProvider.CreateFile(path).OpenWrite()) { var bytes = Encoding.UTF8.GetBytes(resource.Content); stream.Write(bytes, 0, bytes.Length); } } _fileRepository.Create(fileRecord); }
private static string MakePath(CombinedFileRecord file) { // Maybe others will come, therefore the architecture string extension = ""; string folderPath = ""; if (file.Type == ResourceType.JavaScript) { folderPath = _scriptsPath; extension = "js"; } else if (file.Type == ResourceType.Style) { folderPath = _stylesPath; extension = "css"; } return folderPath + file.GetFileName() + "." + extension; }
public void Save(string fingerprint, CombinatorResource resource, ICombinatorSettings settings) { if (settings.EnableResourceSharing && CallOnDefaultShell(cacheFileService => cacheFileService.Save(fingerprint, resource, new CombinatorSettings(settings) { EnableResourceSharing = false }))) { return; } var sliceCount = _fileRepository.Count(file => file.Fingerprint == ConvertFingerprintToStorageFormat(fingerprint)); if (resource.LastUpdatedUtc == DateTime.MinValue) { resource.LastUpdatedUtc = _clock.UtcNow; } // Ceil-ing timestamp to the second, because sub-second precision is not stored in the DB. This would cause a discrepancy between saved // and fetched vs freshly created date times, causing unwanted cache busting for the same resource. resource.LastUpdatedUtc = new DateTime(resource.LastUpdatedUtc.Year, resource.LastUpdatedUtc.Month, resource.LastUpdatedUtc.Day, resource.LastUpdatedUtc.Hour, resource.LastUpdatedUtc.Minute, resource.LastUpdatedUtc.Second); var fileRecord = new CombinedFileRecord() { Fingerprint = ConvertFingerprintToStorageFormat(fingerprint), Slice = ++sliceCount, Type = resource.Type, LastUpdatedUtc = resource.LastUpdatedUtc, Settings = _combinatorResourceManager.SerializeResourceSettings(resource) }; _fileRepository.Create(fileRecord); if (!string.IsNullOrEmpty(resource.Content)) { var path = MakePath(fileRecord); if (_storageProvider.FileExists(path)) _storageProvider.DeleteFile(path); using (var stream = _storageProvider.CreateFile(path).OpenWrite()) { var bytes = Encoding.UTF8.GetBytes(resource.Content); stream.Write(bytes, 0, bytes.Length); } if (!resource.IsRemoteStorageResource) { // This is needed to adjust relative paths if the resource is stored in a remote storage provider. // Why the double-saving? Before saving the file there is no reliable way to tell whether the storage public url will be a // remote one or not... var testResource = _combinatorResourceManager.ResourceFactory(resource.Type); testResource.FillRequiredContext("TestCombinedResource", _storageProvider.GetPublicUrl(path)); _combinatorResourceManager.DeserializeSettings(fileRecord.Settings, testResource); testResource.IsRemoteStorageResource = settings.RemoteStorageUrlPattern != null && settings.RemoteStorageUrlPattern.IsMatch(testResource.AbsoluteUrl.ToString()); if (testResource.IsRemoteStorageResource) { _storageProvider.DeleteFile(path); testResource.Content = resource.Content; var relativeUrlsBaseUri = settings.ResourceBaseUri != null ? settings.ResourceBaseUri : new Uri(_urlHelper.RequestContext.HttpContext.Request.Url, _urlHelper.Content("~/")); ResourceProcessingService.RegexConvertRelativeUrlsToAbsolute(testResource, relativeUrlsBaseUri); using (var stream = _storageProvider.CreateFile(path).OpenWrite()) { var bytes = Encoding.UTF8.GetBytes(testResource.Content); stream.Write(bytes, 0, bytes.Length); } resource.IsRemoteStorageResource = true; fileRecord.Settings = _combinatorResourceManager.SerializeResourceSettings(resource); } } } _combinatorEventHandler.BundleChanged(fingerprint); }