/// <summary> /// Uploads the resource to s3 asynchronous. /// </summary> /// <param name="resourceToUpload"><see cref="ResourceUploadSettings"/> describing resource to upload to S3</param> /// <returns>URL of object in S3</returns> /// <exception cref="FormatException">Unable to parse key name '{Path.GetFileName(latestVersion.Key)}'</exception> public async Task <S3Artifact> UploadResourceToS3Async(ResourceUploadSettings resourceToUpload) { await this.CheckBucket(resourceToUpload.S3Artifact.BucketName); var req = new PutObjectRequest { BucketName = resourceToUpload.S3Artifact.BucketName, Key = resourceToUpload.FullKey, FilePath = resourceToUpload.File.FullName }; if (resourceToUpload.Metadata != null) { // Add user metadata foreach (var k in resourceToUpload.Metadata.Keys) { if (!(k is string)) { throw new InvalidDataException("Metadata keys must be strings."); } req.Metadata.Add(k.ToString(), resourceToUpload.Metadata[k].ToString()); } } // Add our own hash req.Metadata.Add(PackagerHashKey, resourceToUpload.Hash); await this.s3.PutObjectAsync(req); this.logger.LogVerbose($"Uploaded {resourceToUpload.File.Name} to {resourceToUpload.S3Artifact.Url}"); return(resourceToUpload.S3Artifact); }
/// <summary> /// Processes a nested stack. /// </summary> /// <param name="nestedStackResource">The nested stack.</param> /// <param name="templatePath">The template path.</param> /// <param name="workingDirectory">The working directory.</param> /// <returns><c>true</c> if the containing template should be modified (to point to S3); else <c>false</c></returns> /// <exception cref="FileNotFoundException">Nested stack resource '{nestedStackResource.LogicalName}': TemplateURL cannot refer to a directory.</exception> private async Task <bool> ProcessNestedStack( IResource nestedStackResource, string templatePath, string workingDirectory) { var nestedTemplateLocation = ResolveFileSystemResource( this.pathResolver, templatePath, (string)nestedStackResource.GetResourcePropertyValue("TemplateURL")); switch (nestedTemplateLocation) { case null: // Value of TemplateURL is already a URL. return(false); case FileInfo fi: // Referenced nested template is in the filesystem, therefore it must be uploaded // whether or not it was itself modified var processedTemplate = new FileInfo(await this.ProcessTemplate(fi.FullName, workingDirectory)); var templateHash = processedTemplate.MD5(); // Output intermediate templates to console if -Debug this.logger.LogDebug($"Processed template '{fi.FullName}', Hash: {templateHash}"); this.logger.LogDebug("\n\n{0}", File.ReadAllText(processedTemplate.FullName)); var resourceToUpload = new ResourceUploadSettings { File = processedTemplate, Hash = templateHash, KeyPrefix = this.s3Util.KeyPrefix, Metadata = this.s3Util.Metadata }; if (await this.s3Util.ObjectChangedAsync(resourceToUpload)) { await this.s3Util.UploadResourceToS3Async(resourceToUpload); } // Update resource to point to uploaded template nestedStackResource.UpdateResourceProperty("TemplateURL", resourceToUpload.S3Artifact.Url); break; default: // The path references a directory, which is illegal in this context. throw new FileNotFoundException( $"Nested stack resource '{nestedStackResource.Name}': TemplateURL cannot refer to a directory."); } return(true); }
/// <summary> /// Determine whether we have created a new version of the resource in S3 /// </summary> /// <param name="resourceToUpload">The resource to upload.</param> /// <returns><c>true</c> if the object should be uploaded; else <c>false</c></returns> /// <exception cref="FormatException">Unable to parse key name '{Path.GetFileName(latestVersion.Key)}'</exception> public async Task <bool> ObjectChangedAsync(ResourceUploadSettings resourceToUpload) { await this.CheckBucket(resourceToUpload.S3Artifact?.BucketName); var prefix = resourceToUpload.KeyPrefix ?? string.Empty; var latestVersion = await this.GetLatestVersionOfObjectAsync(resourceToUpload.File, prefix); if (latestVersion != null) { // Read metadata for PackagerHashKey and compare to passed in hash var metadata = (await this.s3.GetObjectMetadataAsync( new GetObjectMetadataRequest { BucketName = latestVersion.BucketName, Key = latestVersion.Key })).Metadata; if (metadata.Keys.Contains(AmzPackagerHashKey)) { var hash = metadata[AmzPackagerHashKey]; this.logger.LogDebug( $"Version of {resourceToUpload.File} exists: {ToS3Url(latestVersion.BucketName, latestVersion.Key)}, Hash: {hash}"); if (hash == resourceToUpload.Hash) { this.logger.LogDebug("- Hashes match. Object unchanged."); resourceToUpload.S3Artifact = new S3Artifact { BucketName = latestVersion.BucketName, Key = latestVersion.Key }; return(false); } this.logger.LogDebug("- Hashes don't match. Object will be uploaded."); } else { this.logger.LogDebug("- Object hash not found in metadata."); } } else { this.logger.LogDebug($"Version of {resourceToUpload.File} not found"); } // Compute new key string newObjectName; if (latestVersion == null) { newObjectName = this.FileInfoToUnVersionedObjectName(resourceToUpload.File) + "-0000" + Path.GetExtension(resourceToUpload.File.Name); } else { // Generate new key name so that CloudFormation will redeploy lambdas etc. var mc = ObjectVersionRegex.Match(Path.GetFileNameWithoutExtension(latestVersion.Key)); if (mc.Success) { // We aren't going to run this package more than 10,000 times? newObjectName = $"{mc.Groups["name"].Value}-{(int.Parse(mc.Groups["version"].Value) + 1) % 10000:D4}{Path.GetExtension(latestVersion.Key)}"; } else { throw new FormatException($"Unable to parse key name '{Path.GetFileName(latestVersion.Key)}'"); } } // Set new object name on ResourceToUpload object resourceToUpload.S3Artifact = new S3Artifact { BucketName = this.BucketName, Key = (prefix.Trim('/') + "/" + newObjectName).TrimStart('/') }; return(true); }
/// <summary> /// Package the artifact /// </summary> /// <param name="workingDirectory">Working directory to use for packaging</param> /// <returns><see cref="ResourceUploadSettings"/>; else <c>null</c> if nothing to upload (hash sums match)</returns> public async Task <ResourceUploadSettings> Package(string workingDirectory) { this.ValidateHandler(); // ReSharper disable once SwitchStatementHandlesSomeKnownEnumValuesWithDefault - Intentionally so, all other cases are effectively the default switch (this.LambdaArtifact.ArtifactType) { case LambdaArtifactType.ZipFile: // Already zipped FileInfo lambdaFile = this.LambdaArtifact; var resourceToUpload = new ResourceUploadSettings { File = lambdaFile, Hash = lambdaFile.MD5() }; await this.S3.ObjectChangedAsync(resourceToUpload); // Template will always be modified, however the resource may not need upload. return(resourceToUpload); case LambdaArtifactType.Inline: case LambdaArtifactType.FromS3: // Template is unchanged if code is inline or already in S3 return(null); default: var dependencies = this.LambdaArtifact.LoadDependencies(); if (!dependencies.Any()) { switch (this.LambdaArtifact.ArtifactType) { case LambdaArtifactType.CodeFile: return(await ArtifactPackager.PackageFile( this.LambdaArtifact, workingDirectory, true, this.S3, this.Logger)); case LambdaArtifactType.Directory: return(await ArtifactPackager.PackageDirectory( this.LambdaArtifact, workingDirectory, this.S3, this.Logger)); } } // If we get here, there are dependencies to process var packageDirectory = this.PreparePackage(workingDirectory); return(await ArtifactPackager.PackageDirectory( new DirectoryInfo(packageDirectory), workingDirectory, this.S3, this.Logger)); } }