//--- Methods --- public void Build( IFunction function, bool noCompile, bool noAssemblyValidation, string gitSha, string gitBranch, string buildConfiguration, bool forceBuild ) { // collect sources with invoke methods var mappings = ExtractMappings(function); if (mappings == null) { // nothing to log since error was already logged return; } // check if a function package already exists if (!forceBuild) { var functionPackage = Provider.ExistingPackages.FirstOrDefault(p => Path.GetFileName(p).StartsWith($"function_{Provider.ModuleFullName}_{function.LogicalId}_", StringComparison.Ordinal) && p.EndsWith(".zip", StringComparison.Ordinal) ); // to skip the build, we both need the function package and the function schema when mappings are present var schemaFile = Path.Combine(Provider.OutputDirectory, $"functionschema_{Provider.ModuleFullName}_{function.LogicalId}.json"); if ((functionPackage != null) && (!mappings.Any() || File.Exists(schemaFile))) { LogInfoVerbose($"=> Analyzing function {function.FullName} dependencies"); // find all files used to create the function package var files = new HashSet <string>(); CSharpProjectFile.DiscoverDependencies( files, function.Project, filePath => LogInfoVerbose($"... analyzing {filePath}"), (message, exception) => LogError(message, exception) ); // check if any of the files has been modified more recently than the function package var functionPackageDate = File.GetLastWriteTime(functionPackage); var file = files.FirstOrDefault(f => File.GetLastWriteTime(f) > functionPackageDate); if (file == null) { var success = true; if (mappings.Any()) { // apply function schema to generate REST API and WebSocket models try { if (!ApplyInvocationSchemas(function, mappings, schemaFile, silent: true)) { success = false; // reset the mappings as the call to ApplyInvocationSchemas() may have modified them mappings = ExtractMappings(function); if (mappings == null) { // nothing to log since error was already logged return; } } } catch (Exception e) { LogError("unable to read create-invoke-methods-schema output", e); return; } // check if the mappings have changed by comparing the new data-structure to the one inside the zip file if (success) { var newMappingsJson = JsonSerializer.Serialize(new ApiGatewayInvocationMappings { Mappings = mappings }, _jsonOptions); using (var zipArchive = ZipFile.Open(functionPackage, ZipArchiveMode.Read)) { var entry = zipArchive.Entries.FirstOrDefault(entry => entry.FullName == API_MAPPINGS); if (entry != null) { using (var stream = entry.Open()) using (var reader = new StreamReader(stream)) { if (newMappingsJson != reader.ReadToEnd()) { // module mappings have change success = false; LogInfoVerbose($"... api mappings updated"); } } } else { // we now have mappings and we didn't use to success = false; LogInfoVerbose($"... api mappings updated"); } } } } // only skip compilation if we were able to apply the invocation schemas (or didn't have to) if (success) { Provider.WriteLine($"=> Skipping function {Provider.InfoColor}{function.FullName}{Provider.ResetColor} (no changes found)"); // keep the existing package Provider.ExistingPackages.Remove(functionPackage); // set the module variable to the final package name Provider.AddArtifact($"{function.FullName}::PackageName", functionPackage); return; } } else { LogInfoVerbose($"... change detected in {file}"); } } } else { LogInfoVerbose($"=> Analyzing function {function.FullName} dependencies"); // find all files used to create the function package var files = new HashSet <string>(); CSharpProjectFile.DiscoverDependencies( files, function.Project, filePath => LogInfoVerbose($"... analyzing {filePath}"), (message, exception) => LogError(message, exception) ); // loop over all project folders new CleanBuildFolders(BuildEventsConfig).Do(files); } // read settings from project file var projectFile = new CSharpProjectFile(function.Project); // compile function project var isReadyToRunSupported = VersionInfoCompatibility.IsReadyToRunSupported(projectFile.TargetFramework); var isAmazonLinux2 = Provider.IsAmazonLinux2(); var isReadyToRun = isReadyToRunSupported && isAmazonLinux2; var isSelfContained = (projectFile.OutputType == "Exe") && (projectFile.AssemblyName == "bootstrap"); var isTopLevelMain = !isSelfContained && (projectFile.OutputType == "Exe"); var readyToRunText = isReadyToRun ? ", ReadyToRun" : ""; var selfContained = isSelfContained ? ", SelfContained" : ""; Provider.WriteLine($"=> Building function {Provider.InfoColor}{function.FullName}{Provider.ResetColor} [{projectFile.TargetFramework}, {buildConfiguration}{readyToRunText}{selfContained}]"); var projectDirectory = Path.Combine(Provider.WorkingDirectory, Path.GetFileNameWithoutExtension(function.Project)); // check if the project contains an obsolete AWS Lambda Tools extension: <DotNetCliToolReference Include="Amazon.Lambda.Tools"/> if (projectFile.RemoveAmazonLambdaToolsReference()) { LogWarn($"removing obsolete AWS Lambda Tools extension from {Path.GetRelativePath(Provider.WorkingDirectory, function.Project)}"); projectFile.Save(function.Project); } // validate the project is using the most recent lambdasharp assembly references if ( !noAssemblyValidation && function.HasAssemblyValidation && !projectFile.ValidateLambdaSharpPackageReferences(Provider.ToolVersion, LogWarn, LogError) ) { return; } if (noCompile) { return; } // build project with AWS dotnet CLI lambda tool if (!DotNetPublish(projectFile.TargetFramework, buildConfiguration, projectDirectory, forceBuild, isReadyToRunSupported, isAmazonLinux2, isReadyToRun, isSelfContained, out var publishFolder)) { // nothing to do; error was already reported return; } // building a function with top-level statements also creates an ELF file we don't need if (isTopLevelMain) { var elfBinary = Path.Combine(publishFolder, Path.GetFileNameWithoutExtension(function.Project)); try { File.Delete(elfBinary); } catch (Exception e) { // no harm in leaving the file; report error as a warning LogWarn($"Unable to delete unnecessary ELF binary at '{elfBinary}' (Error: {e})"); } } // check if the assembly entry-point needs to be validated if (function.HasHandlerValidation) { if (isSelfContained || isTopLevelMain) { // nothing to do } else { // verify the function handler can be found in the compiled assembly if (function.Handler != null) { if (!ValidateEntryPoint( publishFolder, function.Handler )) { return; } } } } // add api mappings JSON file(s) if (mappings.Any()) { // self-contained assemblies cannot be inspected if (isSelfContained) { LogError("API Gateway mappings are not supported for self-contained Lambda functions"); return; } // create request/response schemas for invocation methods if (!LambdaSharpCreateInvocationSchemas( function, publishFolder, projectFile.RootNamespace, function.Handler, mappings )) { LogError($"'{Provider.Lash} util create-invoke-methods-schema' command failed"); return; } // write api-mappings.json file to publish folder File.WriteAllText(Path.Combine(publishFolder, API_MAPPINGS), JsonSerializer.Serialize(new ApiGatewayInvocationMappings { Mappings = mappings }, _jsonOptions)); } // compute hash o publish folder string hash; using (var md5 = MD5.Create()) using (var hashStream = new CryptoStream(Stream.Null, md5, CryptoStreamMode.Write)) { foreach (var publishedFile in Directory.GetFiles(publishFolder, "*", SearchOption.AllDirectories).OrderBy(filePath => filePath)) { // hash file path var filePathBytes = Encoding.UTF8.GetBytes(Path.GetRelativePath(publishFolder, publishedFile).Replace('\\', '/')); hashStream.Write(filePathBytes, 0, filePathBytes.Length); // hash file contents using (var stream = File.OpenRead(publishedFile)) { stream.CopyTo(hashStream); } } hashStream.FlushFinalBlock(); hash = md5.Hash !.ToHexString(); } // genereate function package with hash var package = Path.Combine(Provider.OutputDirectory, $"function_{Provider.ModuleFullName}_{function.LogicalId}_{hash}.zip"); if (Provider.ExistingPackages.Remove(package)) { // remove old, existing package so we can create the new package in the same location (which also preserves the more recent build timestamp) File.Delete(package); } // write git-info.json file to publish folder File.WriteAllText(Path.Combine(publishFolder, GIT_INFO_FILE), JsonSerializer.Serialize(new ModuleManifestGitInfo { SHA = gitSha, Branch = gitBranch }, _jsonOptions)); // zip files in publishing folder new ZipTool(BuildEventsConfig).ZipFolderWithExecutable(package, publishFolder); // set the module variable to the final package name Provider.AddArtifact($"{function.FullName}::PackageName", package); }
//--- Methods --- public void Build( IFunction function, bool noCompile, bool noAssemblyValidation, string gitSha, string gitBranch, string buildConfiguration, bool forceBuild ) { // check if AWS Lambda Tools extension is installed if (!new AmazonLambdaTool(BuildEventsConfig).CheckIsInstalled()) { return; } // collect sources with invoke methods var mappings = ExtractMappings(function); if (mappings == null) { // nothing to log since error was already logged return; } // check if a function package already exists if (!forceBuild) { var functionPackage = Provider.ExistingPackages.FirstOrDefault(p => Path.GetFileName(p).StartsWith($"function_{Provider.ModuleFullName}_{function.LogicalId}_", StringComparison.Ordinal) && p.EndsWith(".zip", StringComparison.Ordinal) ); // to skip the build, we both need the function package and the function schema when mappings are present var schemaFile = Path.Combine(Provider.OutputDirectory, $"functionschema_{Provider.ModuleFullName}_{function.LogicalId}.json"); if ((functionPackage != null) && (!mappings.Any() || File.Exists(schemaFile))) { LogInfoVerbose($"=> Analyzing function {function.FullName} dependencies"); // find all files used to create the function package var files = new HashSet <string>(); CSharpProjectFile.DiscoverDependencies( files, function.Project, filePath => LogInfoVerbose($"... analyzing {filePath}"), (message, exception) => LogError(message, exception) ); // check if any of the files has been modified more recently than the function package var functionPackageDate = File.GetLastWriteTime(functionPackage); var file = files.FirstOrDefault(f => File.GetLastWriteTime(f) > functionPackageDate); if (file == null) { var success = true; if (mappings.Any()) { // apply function schema to generate REST API and WebSocket models try { if (!ApplyInvocationSchemas(function, mappings, schemaFile, silent: true)) { success = false; // reset the mappings as the call to ApplyInvocationSchemas() may have modified them mappings = ExtractMappings(function); if (mappings == null) { // nothing to log since error was already logged return; } } } catch (Exception e) { LogError("unable to read create-invoke-methods-schema output", e); return; } // check if the mappings have changed by comparing the new data-structure to the one inside the zip file if (success) { var newMappingsJson = JsonSerializer.Serialize(new ApiGatewayInvocationMappings { Mappings = mappings }, _jsonOptions); using (var zipArchive = ZipFile.Open(functionPackage, ZipArchiveMode.Read)) { var entry = zipArchive.Entries.FirstOrDefault(entry => entry.FullName == API_MAPPINGS); if (entry != null) { using (var stream = entry.Open()) using (var reader = new StreamReader(stream)) { if (newMappingsJson != reader.ReadToEnd()) { // module mappings have change success = false; LogInfoVerbose($"... api mappings updated"); } } } else { // we now have mappings and we didn't use to success = false; LogInfoVerbose($"... api mappings updated"); } } } } // only skip compilation if we were able to apply the invocation schemas (or didn't have to) if (success) { Provider.WriteLine($"=> Skipping function {Provider.InfoColor}{function.FullName}{Provider.ResetColor} (no changes found)"); // keep the existing package Provider.ExistingPackages.Remove(functionPackage); // set the module variable to the final package name Provider.AddArtifact($"{function.FullName}::PackageName", functionPackage); return; } } else { LogInfoVerbose($"... change detected in {file}"); } } } else { LogInfoVerbose($"=> Analyzing function {function.FullName} dependencies"); // find all files used to create the function package var files = new HashSet <string>(); CSharpProjectFile.DiscoverDependencies( files, function.Project, filePath => LogInfoVerbose($"... analyzing {filePath}"), (message, exception) => LogError(message, exception) ); // loop over all project folders new CleanBuildFolders(BuildEventsConfig).Do(files); } // read settings from project file var projectFile = new CSharpProjectFile(function.Project); // compile function project var isNetCore31OrLater = projectFile.TargetFramework.CompareTo("netcoreapp3.") >= 0; var isAmazonLinux2 = Provider.IsAmazonLinux2(); var isReadyToRun = isNetCore31OrLater && isAmazonLinux2; var readyToRunText = isReadyToRun ? ", ReadyToRun" : ""; Provider.WriteLine($"=> Building function {Provider.InfoColor}{function.FullName}{Provider.ResetColor} [{projectFile.TargetFramework}, {buildConfiguration}{readyToRunText}]"); var projectDirectory = Path.Combine(Provider.WorkingDirectory, Path.GetFileNameWithoutExtension(function.Project)); var temporaryPackage = Path.Combine(Provider.OutputDirectory, $"function_{Provider.ModuleFullName}_{function.LogicalId}_temporary.zip"); // check if the project contains an obsolete AWS Lambda Tools extension: <DotNetCliToolReference Include="Amazon.Lambda.Tools"/> if (projectFile.RemoveAmazonLambdaToolsReference()) { LogWarn($"removing obsolete AWS Lambda Tools extension from {Path.GetRelativePath(Provider.WorkingDirectory, function.Project)}"); projectFile.Save(function.Project); } // validate the project is using the most recent lambdasharp assembly references if ( !noAssemblyValidation && function.HasAssemblyValidation && !projectFile.ValidateLambdaSharpPackageReferences(Provider.ToolVersion, LogWarn, LogError) ) { return; } if (noCompile) { return; } // build project with AWS dotnet CLI lambda tool if (!DotNetLambdaPackage(projectFile.TargetFramework, buildConfiguration, temporaryPackage, projectDirectory, forceBuild, isNetCore31OrLater, isAmazonLinux2, isReadyToRun)) { // nothing to do; error was already reported return; } // verify the function handler can be found in the compiled assembly var buildFolder = Path.Combine(projectDirectory, "bin", buildConfiguration, projectFile.TargetFramework, "publish"); if (function.HasHandlerValidation) { if (function.Handler != null) { if (!ValidateEntryPoint( buildFolder, function.Handler )) { return; } } } // create request/response schemas for invocation methods if (!LambdaSharpCreateInvocationSchemas( function, buildFolder, projectFile.RootNamespace, function.Handler, mappings )) { LogError($"'{Provider.Lash} util create-invoke-methods-schema' command failed"); return; } // add api mappings JSON file(s) if (mappings.Any()) { using (var zipArchive = ZipFile.Open(temporaryPackage, ZipArchiveMode.Update)) { var entry = zipArchive.CreateEntry(API_MAPPINGS); // Set RW-R--R-- permissions attributes on non-Windows operating system if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { entry.ExternalAttributes = 0b1_000_000_110_100_100 << 16; } using (var stream = entry.Open()) { stream.Write(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new ApiGatewayInvocationMappings { Mappings = mappings }, _jsonOptions))); } } } // compute hash for zip contents string hash; using (var zipArchive = ZipFile.OpenRead(temporaryPackage)) { using (var md5 = MD5.Create()) using (var hashStream = new CryptoStream(Stream.Null, md5, CryptoStreamMode.Write)) { foreach (var entry in zipArchive.Entries.OrderBy(e => e.FullName)) { // hash file path var filePathBytes = Encoding.UTF8.GetBytes(entry.FullName.Replace('\\', '/')); hashStream.Write(filePathBytes, 0, filePathBytes.Length); // hash file contents using (var stream = entry.Open()) { stream.CopyTo(hashStream); } } hashStream.FlushFinalBlock(); hash = md5.Hash.ToHexString(); } } // rename function package with hash var package = Path.Combine(Provider.OutputDirectory, $"function_{Provider.ModuleFullName}_{function.LogicalId}_{hash}.zip"); if (Provider.ExistingPackages.Remove(package)) { // remove old, existing package so we can move the new package into location (which also preserves the more recent build timestamp) File.Delete(package); } File.Move(temporaryPackage, package); // add git-info.json file using (var zipArchive = ZipFile.Open(package, ZipArchiveMode.Update)) { var entry = zipArchive.CreateEntry(GIT_INFO_FILE); // Set RW-R--R-- permissions attributes on non-Windows operating system if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { entry.ExternalAttributes = 0b1_000_000_110_100_100 << 16; } using (var stream = entry.Open()) { stream.Write(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new ModuleManifestGitInfo { SHA = gitSha, Branch = gitBranch }, _jsonOptions))); } } // set the module variable to the final package name Provider.AddArtifact($"{function.FullName}::PackageName", package); }