private void CreateTestJobDirectories() { if (Directory.Exists(_testJobSourceDir)) { Directory.Delete(_testJobSourceDir, true); } Directory.CreateDirectory(_testJobSourceDir); // add some files in root File.WriteAllText(Path.Combine(_testJobSourceDir, "test1.txt"), "test"); File.WriteAllText(Path.Combine(_testJobSourceDir, "test2.txt"), "test"); File.WriteAllText(Path.Combine(_testJobSourceDir, "test3.txt"), "test"); File.WriteAllText(Path.Combine(_testJobSourceDir, "job.exe"), "binary"); File.WriteAllText(Path.Combine(_testJobSourceDir, "job.exe.config"), "<configuration></configuration>"); // add some files in a sub directory string testSubDir = Path.Combine(_testJobSourceDir, "subdir"); Directory.CreateDirectory(testSubDir); File.WriteAllText(Path.Combine(testSubDir, "test1.txt"), "test"); File.WriteAllText(Path.Combine(testSubDir, "test2.txt"), "test"); File.WriteAllText(Path.Combine(testSubDir, "test3.txt"), "test"); // now, copy all the files to the working directory if (Directory.Exists(_testJobWorkingDir)) { Directory.Delete(_testJobWorkingDir, true); } FileSystemHelpers.CopyDirectoryRecursive(_testJobSourceDir, _testJobWorkingDir); }
public override Task Build(DeploymentContext context) { context.Logger.Log($"Running build. Project type: {ProjectType}"); // Start by copying the manifest as-is so that // manifest based deployments (Example: ZipDeploy) are unaffected context.Logger.Log($"Copying the manifest"); FileSystemHelpers.CopyFile(context.PreviousManifestFilePath, context.NextManifestFilePath); // If we want to clean up the target directory before copying // the new files, use kudusync so that only unnecessary files are // deleted. This has two benefits: // 1. This is faster than deleting the target directory before copying the source dir. // 2. Minimizes chances of failure in deleting a directory due to open handles. // This is especially useful when a target directory is present in the source and // need not be deleted. if (_deploymentInfo.CleanupTargetDirectory) { context.Logger.Log($"Clean deploying to {context.OutputPath}"); // We do not want to use the manifest for OneDeploy. Set manifest paths to null. // This way we don't interfere with manifest based deployments. context.PreviousManifestFilePath = context.NextManifestFilePath = string.Empty; base.Build(context); } else { context.Logger.Log($"Incrementally deploying to {context.OutputPath}"); FileSystemHelpers.CopyDirectoryRecursive(_repositoryPath, context.OutputPath); } context.Logger.Log($"Build completed succesfully."); return(Task.CompletedTask); }
private void CacheJobBinaries(IJobLogger logger) { if (WorkingDirectory != null) { try { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } catch (Exception ex) { // Log error and ignore it as it's not critical to cache job binaries logger.LogError("Failed to calculate hash for WebJob: " + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); WorkingDirectory = tempJobInstancePath; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled WorkingDirectory = null; } }
private void PreDeployment(ITracer tracer) { if (Environment.IsAzureEnvironment() && FileSystemHelpers.DirectoryExists(_environment.SSHKeyPath)) { string src = Path.GetFullPath(_environment.SSHKeyPath); string dst = Path.GetFullPath(Path.Combine(System.Environment.GetEnvironmentVariable("USERPROFILE"), Constants.SSHKeyPath)); if (!String.Equals(src, dst, StringComparison.OrdinalIgnoreCase)) { // copy %HOME%\.ssh to %USERPROFILE%\.ssh key to workaround // npm with private ssh git dependency using (tracer.Step("Copying SSH keys")) { FileSystemHelpers.CopyDirectoryRecursive(src, dst, overwrite: true); } } } }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory); return; } _inPlaceWorkingDirectory = null; if (WorkingDirectory != null) { try { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } catch (Exception ex) { // Log error and ignore it as it's not critical to cache job binaries logger.LogWarning("Failed to calculate hash for WebJob, continue to copy WebJob binaries (this will not affect WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); _workingDirectory = tempJobInstancePath; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory, _analytics); return; } _inPlaceWorkingDirectory = null; Dictionary <string, FileInfoBase> sourceDirectoryFileMap = GetJobDirectoryFileMap(JobBinariesPath); if (WorkingDirectory != null) { try { var workingDirectoryFileMap = GetJobDirectoryFileMap(WorkingDirectory); if (!JobDirectoryHasChanged(sourceDirectoryFileMap, workingDirectoryFileMap, _cachedSourceDirectoryFileMap, logger)) { // no changes detected, so skip the cache/copy step below return; } } catch (Exception ex) { // Log error and ignore it, since this diff optimization isn't critical. // We'll just do a full copy in this case. logger.LogWarning("Failed to diff WebJob directories for changes. Continuing to copy WebJob binaries (this will not affect the WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath, _analytics); _workingDirectory = tempJobInstancePath; // cache the file map snapshot for next time (to aid in detecting // file deletions) _cachedSourceDirectoryFileMap = sourceDirectoryFileMap; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }