public async Task RunAsync() { // Validate args. Trace.Entering(); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext.Variables, nameof(ExecutionContext.Variables)); ArgUtil.NotNull(Task, nameof(Task)); var taskManager = HostContext.GetService <ITaskManager>(); var handlerFactory = HostContext.GetService <IHandlerFactory>(); // Enable skip for string translator in case of checkout task. // It's required for support of multiply checkout tasks with repo alias "self" in container jobs. Reported in issue 3520. this.ExecutionContext.Variables.Set(Constants.Variables.Task.SkipTranslatorForCheckout, this.Task.IsCheckoutTask().ToString()); // Set the task id and display name variable. using (var scope = ExecutionContext.Variables.CreateScope()) { scope.Set(Constants.Variables.Task.DisplayName, DisplayName); scope.Set(WellKnownDistributedTaskVariables.TaskInstanceId, Task.Id.ToString("D")); scope.Set(WellKnownDistributedTaskVariables.TaskDisplayName, DisplayName); scope.Set(WellKnownDistributedTaskVariables.TaskInstanceName, Task.Name); // Load the task definition and choose the handler. // TODO: Add a try catch here to give a better error message. Definition definition = taskManager.Load(Task); ArgUtil.NotNull(definition, nameof(definition)); // Verify Signatures and Re-Extract Tasks if neccessary await VerifyTask(taskManager, definition); // Print out task metadata PrintTaskMetaData(definition); ExecutionData currentExecution = null; switch (Stage) { case JobRunStage.PreJob: currentExecution = definition.Data?.PreJobExecution; break; case JobRunStage.Main: currentExecution = definition.Data?.Execution; break; case JobRunStage.PostJob: currentExecution = definition.Data?.PostJobExecution; break; } ; HandlerData handlerData = GetHandlerData(ExecutionContext, currentExecution, PlatformUtil.HostOS); if (handlerData == null) { if (PlatformUtil.RunningOnWindows) { throw new InvalidOperationException(StringUtil.Loc("SupportedTaskHandlerNotFoundWindows", $"{PlatformUtil.HostOS}({PlatformUtil.HostArchitecture})")); } throw new InvalidOperationException(StringUtil.Loc("SupportedTaskHandlerNotFoundLinux")); } Trace.Info($"Handler data is of type {handlerData}"); Variables runtimeVariables = ExecutionContext.Variables; IStepHost stepHost = HostContext.CreateService <IDefaultStepHost>(); var stepTarget = ExecutionContext.StepTarget(); // Setup container stephost and the right runtime variables for running job inside container. if (stepTarget is ContainerInfo containerTarget) { if (Stage == JobRunStage.PostJob && AgentKnobs.SkipPostExeceutionIfTargetContainerStopped.GetValue(ExecutionContext).AsBoolean()) { try { // Check that the target container is still running, if not Skip task execution IDockerCommandManager dockerManager = HostContext.GetService <IDockerCommandManager>(); bool isContainerRunning = await dockerManager.IsContainerRunning(ExecutionContext, containerTarget.ContainerId); if (!isContainerRunning) { ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = $"Target container - {containerTarget.ContainerName} has been stopped, task post-execution will be skipped"; return; } } catch (Exception ex) { ExecutionContext.Write(WellKnownTags.Warning, $"Failed to check container state for task post-execution. Exception: {ex}"); } } if (handlerData is AgentPluginHandlerData) { // plugin handler always runs on the Host, the runtime variables needs to the variable works on the Host, ex: file path variable System.DefaultWorkingDirectory Dictionary <string, VariableValue> variableCopy = new Dictionary <string, VariableValue>(StringComparer.OrdinalIgnoreCase); foreach (var publicVar in ExecutionContext.Variables.Public) { variableCopy[publicVar.Key] = new VariableValue(stepTarget.TranslateToHostPath(publicVar.Value)); } foreach (var secretVar in ExecutionContext.Variables.Private) { variableCopy[secretVar.Key] = new VariableValue(stepTarget.TranslateToHostPath(secretVar.Value), true); } List <string> expansionWarnings; runtimeVariables = new Variables(HostContext, variableCopy, out expansionWarnings); expansionWarnings?.ForEach(x => ExecutionContext.Warning(x)); } else if (handlerData is BaseNodeHandlerData || handlerData is PowerShell3HandlerData) { // Only the node, node10, and powershell3 handlers support running inside container. // Make sure required container is already created. ArgUtil.NotNullOrEmpty(containerTarget.ContainerId, nameof(containerTarget.ContainerId)); var containerStepHost = HostContext.CreateService <IContainerStepHost>(); containerStepHost.Container = containerTarget; stepHost = containerStepHost; } else { throw new NotSupportedException(String.Format("Task '{0}' is using legacy execution handler '{1}' which is not supported in container execution flow.", definition.Data.FriendlyName, handlerData.GetType().ToString())); } } // Load the default input values from the definition. Trace.Verbose("Loading default inputs."); var inputs = new Dictionary <string, string>(StringComparer.OrdinalIgnoreCase); foreach (var input in (definition.Data?.Inputs ?? new TaskInputDefinition[0])) { string key = input?.Name?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { if (AgentKnobs.DisableInputTrimming.GetValue(ExecutionContext).AsBoolean()) { inputs[key] = input.DefaultValue ?? string.Empty; } else { inputs[key] = input.DefaultValue?.Trim() ?? string.Empty; } } } // Merge the instance inputs. Trace.Verbose("Loading instance inputs."); foreach (var input in (Task.Inputs as IEnumerable <KeyValuePair <string, string> > ?? new KeyValuePair <string, string> [0])) { string key = input.Key?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { if (AgentKnobs.DisableInputTrimming.GetValue(ExecutionContext).AsBoolean()) { inputs[key] = input.Value ?? string.Empty; } else { inputs[key] = input.Value?.Trim() ?? string.Empty; } } } // Expand the inputs. Trace.Verbose("Expanding inputs."); runtimeVariables.ExpandValues(target: inputs); // We need to verify inputs of the tasks that were injected by decorators, to check if they contain secrets, // for security reasons execution of tasks in this case should be skipped. // Target task inputs could be injected into the decorator's tasks if the decorator has post-task-tasks or pre-task-tasks targets, // such tasks will have names that start with __system_pretargettask_ or __system_posttargettask_. var taskDecoratorManager = HostContext.GetService <ITaskDecoratorManager>(); if (taskDecoratorManager.IsInjectedTaskForTarget(Task.Name) && taskDecoratorManager.IsInjectedInputsContainsSecrets(inputs, out var inputsWithSecrets)) { var inputsForReport = taskDecoratorManager.GenerateTaskResultMessage(inputsWithSecrets); ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = StringUtil.Loc("SecretsAreNotAllowedInInjectedTaskInputs", inputsForReport); return; } VarUtil.ExpandEnvironmentVariables(HostContext, target: inputs); // Translate the server file path inputs to local paths. foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if (string.Equals(input.InputType, TaskInputType.FilePath, StringComparison.OrdinalIgnoreCase)) { Trace.Verbose($"Translating file path input '{input.Name}': '{inputs[input.Name]}'"); inputs[input.Name] = stepHost.ResolvePathForStepHost(TranslateFilePathInput(inputs[input.Name] ?? string.Empty)); Trace.Verbose($"Translated file path input '{input.Name}': '{inputs[input.Name]}'"); } } // Load the task environment. Trace.Verbose("Loading task environment."); var environment = new Dictionary <string, string>(VarUtil.EnvironmentVariableKeyComparer); foreach (var env in (Task.Environment ?? new Dictionary <string, string>(0))) { string key = env.Key?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { environment[key] = env.Value?.Trim() ?? string.Empty; } } // Expand the inputs. Trace.Verbose("Expanding task environment."); runtimeVariables.ExpandValues(target: environment); VarUtil.ExpandEnvironmentVariables(HostContext, target: environment); // Expand the handler inputs. Trace.Verbose("Expanding handler inputs."); VarUtil.ExpandValues(HostContext, source: inputs, target: handlerData.Inputs); runtimeVariables.ExpandValues(target: handlerData.Inputs); // Get each endpoint ID referenced by the task. var endpointIds = new List <Guid>(); foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if ((input.InputType ?? string.Empty).StartsWith("connectedService:", StringComparison.OrdinalIgnoreCase)) { string inputKey = input?.Name?.Trim() ?? string.Empty; string inputValue; if (!string.IsNullOrEmpty(inputKey) && inputs.TryGetValue(inputKey, out inputValue) && !string.IsNullOrEmpty(inputValue)) { foreach (string rawId in inputValue.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { Guid parsedId; if (Guid.TryParse(rawId.Trim(), out parsedId) && parsedId != Guid.Empty) { endpointIds.Add(parsedId); } } } } } if (endpointIds.Count > 0 && (runtimeVariables.GetBoolean(WellKnownDistributedTaskVariables.RestrictSecrets) ?? false) && (runtimeVariables.GetBoolean(Microsoft.TeamFoundation.Build.WebApi.BuildVariables.IsFork) ?? false)) { ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = $"References service endpoint. PRs from repository forks are not allowed to access secrets in the pipeline. For more information see https://go.microsoft.com/fwlink/?linkid=862029 "; return; } // Get the endpoints referenced by the task. var endpoints = (ExecutionContext.Endpoints ?? new List <ServiceEndpoint>(0)) .Join(inner: endpointIds, outerKeySelector: (ServiceEndpoint endpoint) => endpoint.Id, innerKeySelector: (Guid endpointId) => endpointId, resultSelector: (ServiceEndpoint endpoint, Guid endpointId) => endpoint) .ToList(); // Add the system endpoint. foreach (ServiceEndpoint endpoint in (ExecutionContext.Endpoints ?? new List <ServiceEndpoint>(0))) { if (string.Equals(endpoint.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)) { endpoints.Add(endpoint); break; } } // Get each secure file ID referenced by the task. var secureFileIds = new List <Guid>(); foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if (string.Equals(input.InputType ?? string.Empty, "secureFile", StringComparison.OrdinalIgnoreCase)) { string inputKey = input?.Name?.Trim() ?? string.Empty; string inputValue; if (!string.IsNullOrEmpty(inputKey) && inputs.TryGetValue(inputKey, out inputValue) && !string.IsNullOrEmpty(inputValue)) { foreach (string rawId in inputValue.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { Guid parsedId; if (Guid.TryParse(rawId.Trim(), out parsedId) && parsedId != Guid.Empty) { secureFileIds.Add(parsedId); } } } } } if (secureFileIds.Count > 0 && (runtimeVariables.GetBoolean(WellKnownDistributedTaskVariables.RestrictSecrets) ?? false) && (runtimeVariables.GetBoolean(Microsoft.TeamFoundation.Build.WebApi.BuildVariables.IsFork) ?? false)) { ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = $"References secure file. PRs from repository forks are not allowed to access secrets in the pipeline. For more information see https://go.microsoft.com/fwlink/?linkid=862029"; return; } // Get the endpoints referenced by the task. var secureFiles = (ExecutionContext.SecureFiles ?? new List <SecureFile>(0)) .Join(inner: secureFileIds, outerKeySelector: (SecureFile secureFile) => secureFile.Id, innerKeySelector: (Guid secureFileId) => secureFileId, resultSelector: (SecureFile secureFile, Guid secureFileId) => secureFile) .ToList(); // Set output variables. foreach (var outputVar in definition.Data?.OutputVariables ?? new OutputVariable[0]) { if (outputVar != null && !string.IsNullOrEmpty(outputVar.Name)) { ExecutionContext.OutputVariables.Add(outputVar.Name); } } // translate inputs inputs = inputs.ToDictionary(kvp => kvp.Key, kvp => ExecutionContext.TranslatePathForStepTarget(kvp.Value)); // Create the handler. IHandler handler = handlerFactory.Create( ExecutionContext, Task.Reference, stepHost, endpoints, secureFiles, handlerData, inputs, environment, runtimeVariables, taskDirectory: definition.Directory); // Run the task. int retryCount = this.Task.RetryCountOnTaskFailure; if (retryCount > 0) { if (retryCount > RetryCountOnTaskFailureLimit) { ExecutionContext.Warning(StringUtil.Loc("RetryCountLimitExceeded", RetryCountOnTaskFailureLimit, retryCount)); retryCount = RetryCountOnTaskFailureLimit; } RetryHelper rh = new RetryHelper(ExecutionContext, retryCount); await rh.RetryStep(async() => await handler.RunAsync(), RetryHelper.ExponentialDelay); } else { await handler.RunAsync(); } } }
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, Variables taskVariables = null) { Trace.Entering(); var child = new ExecutionContext(); child.Initialize(HostContext); child.Features = Features; child.Variables = Variables; child.Endpoints = Endpoints; child.SecureFiles = SecureFiles; child.TaskVariables = taskVariables; child._cancellationTokenSource = new CancellationTokenSource(); child.WriteDebug = WriteDebug; child._parentExecutionContext = this; child.PrependPath = PrependPath; child.Container = Container; // the job timeline record is at order 1. child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, _childExecutionContextCount + 2); child._logger = HostContext.CreateService <IPagingLogger>(); child._logger.Setup(_mainTimelineId, recordId); _childExecutionContextCount++; return(child); }
public void InitializeJob(JobRequestMessage message, CancellationToken token) { // Validation Trace.Entering(); ArgUtil.NotNull(message, nameof(message)); ArgUtil.NotNull(message.Environment, nameof(message.Environment)); ArgUtil.NotNull(message.Environment.SystemConnection, nameof(message.Environment.SystemConnection)); ArgUtil.NotNull(message.Environment.Endpoints, nameof(message.Environment.Endpoints)); ArgUtil.NotNull(message.Environment.Variables, nameof(message.Environment.Variables)); ArgUtil.NotNull(message.Plan, nameof(message.Plan)); _cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); // Features Features = ApiUtil.GetFeatures(message.Plan); // Endpoints Endpoints = message.Environment.Endpoints; Endpoints.Add(message.Environment.SystemConnection); // SecureFiles SecureFiles = message.Environment.SecureFiles; // Variables (constructor performs initial recursive expansion) List <string> warnings; Variables = new Variables(HostContext, message.Environment.Variables, message.Environment.MaskHints, out warnings); // Prepend Path PrependPath = new List <string>(); // Docker Container = new ContainerInfo() { ContainerImage = Variables.Get("_PREVIEW_VSTS_DOCKER_IMAGE"), ContainerName = $"VSTS_{Variables.System_HostType.ToString()}_{message.JobId.ToString("D")}", }; // Proxy variables var agentWebProxy = HostContext.GetService <IVstsAgentWebProxy>(); if (!string.IsNullOrEmpty(agentWebProxy.ProxyAddress)) { Variables.Set(Constants.Variables.Agent.ProxyUrl, agentWebProxy.ProxyAddress); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY", string.Empty); if (!string.IsNullOrEmpty(agentWebProxy.ProxyUsername)) { Variables.Set(Constants.Variables.Agent.ProxyUsername, agentWebProxy.ProxyUsername); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY_USERNAME", string.Empty); } if (!string.IsNullOrEmpty(agentWebProxy.ProxyPassword)) { Variables.Set(Constants.Variables.Agent.ProxyPassword, agentWebProxy.ProxyPassword, true); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY_PASSWORD", string.Empty); } if (agentWebProxy.ProxyBypassList.Count > 0) { Variables.Set(Constants.Variables.Agent.ProxyBypassList, JsonUtility.ToString(agentWebProxy.ProxyBypassList)); } } // Job timeline record. InitializeTimelineRecord( timelineId: message.Timeline.Id, timelineRecordId: message.JobId, parentTimelineRecordId: null, recordType: ExecutionContextType.Job, displayName: message.JobName, refName: message.JobRefName, order: 1); // The job timeline record must be at order 1. // Logger (must be initialized before writing warnings). _logger = HostContext.CreateService <IPagingLogger>(); _logger.Setup(_mainTimelineId, _record.Id); // Log warnings from recursive variable expansion. warnings?.ForEach(x => this.Warning(x)); // Verbosity (from system.debug). WriteDebug = Variables.System_Debug ?? false; // Hook up JobServerQueueThrottling event, we will log warning on server tarpit. _jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived; }
public async Task RunPluginTaskAsync(IExecutionContext context, string plugin, Dictionary <string, string> inputs, Dictionary <string, string> environment, Variables runtimeVariables, EventHandler <ProcessDataReceivedEventArgs> outputHandler) { ArgUtil.NotNullOrEmpty(plugin, nameof(plugin)); // Only allow plugins we defined if (!_taskPlugins.Contains(plugin)) { throw new NotSupportedException(plugin); } // Resolve the working directory. string workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); ArgUtil.Directory(workingDirectory, nameof(workingDirectory)); // Agent.PluginHost string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), $"Agent.PluginHost{Util.IOUtil.ExeExtension}"); ArgUtil.File(file, $"Agent.PluginHost{Util.IOUtil.ExeExtension}"); // Agent.PluginHost's arguments string arguments = $"task \"{plugin}\""; // construct plugin context var target = context.StepTarget(); Variables.TranslationMethod translateToHostPath = Variables.DefaultStringTranslator; ContainerInfo containerInfo = target as ContainerInfo; // Since plugins run on the host, but the inputs and variables have already been translated // to the container path, we need to convert them back to the host path // TODO: look to see if there is a better way to not have translate these back if (containerInfo != null) { var newInputs = new Dictionary <string, string>(); foreach (var entry in inputs) { newInputs[entry.Key] = containerInfo.TranslateToHostPath(entry.Value); } inputs = newInputs; translateToHostPath = (string val) => { return(containerInfo.TranslateToHostPath(val)); }; } AgentTaskPluginExecutionContext pluginContext = new AgentTaskPluginExecutionContext { Inputs = inputs, Repositories = context.Repositories, Endpoints = context.Endpoints, Container = containerInfo, //TODO: Figure out if this needs to have all the containers or just the one for the current step JobSettings = context.JobSettings, }; // variables runtimeVariables.CopyInto(pluginContext.Variables, translateToHostPath); context.TaskVariables.CopyInto(pluginContext.TaskVariables, translateToHostPath); using (var processInvoker = HostContext.CreateService <IProcessInvoker>()) { var redirectStandardIn = new InputQueue <string>(); redirectStandardIn.Enqueue(JsonUtility.ToString(pluginContext)); processInvoker.OutputDataReceived += outputHandler; processInvoker.ErrorDataReceived += outputHandler; // Execute the process. Exit code 0 should always be returned. // A non-zero exit code indicates infrastructural failure. // Task failure should be communicated over STDOUT using ## commands. await processInvoker.ExecuteAsync(workingDirectory : workingDirectory, fileName : file, arguments : arguments, environment : environment, requireExitCodeZero : true, outputEncoding : Encoding.UTF8, killProcessOnCancel : false, redirectStandardIn : redirectStandardIn, cancellationToken : context.CancellationToken); } }
public void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token) { // Validation Trace.Entering(); ArgUtil.NotNull(message, nameof(message)); ArgUtil.NotNull(message.Resources, nameof(message.Resources)); ArgUtil.NotNull(message.Variables, nameof(message.Variables)); ArgUtil.NotNull(message.Plan, nameof(message.Plan)); _cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); // Features Features = PlanUtil.GetFeatures(message.Plan); // Endpoints Endpoints = message.Resources.Endpoints; // SecureFiles SecureFiles = message.Resources.SecureFiles; // Repositories Repositories = message.Resources.Repositories; // JobSettings JobSettings = new Dictionary <string, string>(StringComparer.OrdinalIgnoreCase); JobSettings[WellKnownJobSettings.HasMultipleCheckouts] = message.Steps?.Where(x => Pipelines.PipelineConstants.IsCheckoutTask(x)).Count() > 1 ? Boolean.TrueString : Boolean.FalseString; // Variables (constructor performs initial recursive expansion) List <string> warnings; Variables = new Variables(HostContext, message.Variables, out warnings); // Prepend Path PrependPath = new List <string>(); // Docker (JobContainer) string imageName = Variables.Get("_PREVIEW_VSTS_DOCKER_IMAGE"); if (string.IsNullOrEmpty(imageName)) { imageName = Environment.GetEnvironmentVariable("_PREVIEW_VSTS_DOCKER_IMAGE"); } if (!string.IsNullOrEmpty(imageName) && string.IsNullOrEmpty(message.JobContainer)) { var dockerContainer = new Pipelines.ContainerResource() { Alias = "vsts_container_preview" }; dockerContainer.Properties.Set("image", imageName); Container = HostContext.CreateContainerInfo(dockerContainer); } else if (!string.IsNullOrEmpty(message.JobContainer)) { Container = HostContext.CreateContainerInfo(message.Resources.Containers.Single(x => string.Equals(x.Alias, message.JobContainer, StringComparison.OrdinalIgnoreCase))); } else { Container = null; } // Docker (Sidecar Containers) SidecarContainers = new List <ContainerInfo>(); foreach (var sidecar in message.JobSidecarContainers) { var networkAlias = sidecar.Key; var containerResourceAlias = sidecar.Value; var containerResource = message.Resources.Containers.Single(c => string.Equals(c.Alias, containerResourceAlias, StringComparison.OrdinalIgnoreCase)); ContainerInfo containerInfo = HostContext.CreateContainerInfo(containerResource, isJobContainer: false); containerInfo.ContainerNetworkAlias = networkAlias; SidecarContainers.Add(containerInfo); } // Proxy variables var agentWebProxy = HostContext.GetService <IVstsAgentWebProxy>(); if (!string.IsNullOrEmpty(agentWebProxy.ProxyAddress)) { Variables.Set(Constants.Variables.Agent.ProxyUrl, agentWebProxy.ProxyAddress); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY", string.Empty); if (!string.IsNullOrEmpty(agentWebProxy.ProxyUsername)) { Variables.Set(Constants.Variables.Agent.ProxyUsername, agentWebProxy.ProxyUsername); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY_USERNAME", string.Empty); } if (!string.IsNullOrEmpty(agentWebProxy.ProxyPassword)) { Variables.Set(Constants.Variables.Agent.ProxyPassword, agentWebProxy.ProxyPassword, true); Environment.SetEnvironmentVariable("VSTS_HTTP_PROXY_PASSWORD", string.Empty); } if (agentWebProxy.ProxyBypassList.Count > 0) { Variables.Set(Constants.Variables.Agent.ProxyBypassList, JsonUtility.ToString(agentWebProxy.ProxyBypassList)); } } // Certificate variables var agentCert = HostContext.GetService <IAgentCertificateManager>(); if (agentCert.SkipServerCertificateValidation) { Variables.Set(Constants.Variables.Agent.SslSkipCertValidation, bool.TrueString); } if (!string.IsNullOrEmpty(agentCert.CACertificateFile)) { Variables.Set(Constants.Variables.Agent.SslCAInfo, agentCert.CACertificateFile); } if (!string.IsNullOrEmpty(agentCert.ClientCertificateFile) && !string.IsNullOrEmpty(agentCert.ClientCertificatePrivateKeyFile) && !string.IsNullOrEmpty(agentCert.ClientCertificateArchiveFile)) { Variables.Set(Constants.Variables.Agent.SslClientCert, agentCert.ClientCertificateFile); Variables.Set(Constants.Variables.Agent.SslClientCertKey, agentCert.ClientCertificatePrivateKeyFile); Variables.Set(Constants.Variables.Agent.SslClientCertArchive, agentCert.ClientCertificateArchiveFile); if (!string.IsNullOrEmpty(agentCert.ClientCertificatePassword)) { Variables.Set(Constants.Variables.Agent.SslClientCertPassword, agentCert.ClientCertificatePassword, true); } } // Runtime option variables var runtimeOptions = HostContext.GetService <IConfigurationStore>().GetAgentRuntimeOptions(); if (runtimeOptions != null) { if (PlatformUtil.RunningOnWindows && runtimeOptions.GitUseSecureChannel) { Variables.Set(Constants.Variables.Agent.GitUseSChannel, runtimeOptions.GitUseSecureChannel.ToString()); } } // Job timeline record. InitializeTimelineRecord( timelineId: message.Timeline.Id, timelineRecordId: message.JobId, parentTimelineRecordId: null, recordType: ExecutionContextType.Job, displayName: message.JobDisplayName, refName: message.JobName, order: null); // The job timeline record's order is set by server. // Logger (must be initialized before writing warnings). _logger = HostContext.CreateService <IPagingLogger>(); _logger.Setup(_mainTimelineId, _record.Id); // Log warnings from recursive variable expansion. warnings?.ForEach(x => this.Warning(x)); // Verbosity (from system.debug). WriteDebug = Variables.System_Debug ?? false; // Hook up JobServerQueueThrottling event, we will log warning on server tarpit. _jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived; }
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, Variables taskVariables = null, bool outputForward = false) { Trace.Entering(); var child = new ExecutionContext(); child.Initialize(HostContext); child.Features = Features; child.Variables = Variables; child.Endpoints = Endpoints; child.Repositories = Repositories; child.JobSettings = JobSettings; child.SecureFiles = SecureFiles; child.TaskVariables = taskVariables; child._cancellationTokenSource = new CancellationTokenSource(); child.WriteDebug = WriteDebug; child._parentExecutionContext = this; child.PrependPath = PrependPath; child.Container = Container; child.SidecarContainers = SidecarContainers; child._outputForward = outputForward; child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder); child._logger = HostContext.CreateService <IPagingLogger>(); child._logger.Setup(_mainTimelineId, recordId); return(child); }
public VariableScope(Variables data) { Data = data; Names = new HashSet <string>(); }
public async Task RunAsync() { // Validate args. Trace.Entering(); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext.Variables, nameof(ExecutionContext.Variables)); ArgUtil.NotNull(Task, nameof(Task)); var taskManager = HostContext.GetService <ITaskManager>(); var handlerFactory = HostContext.GetService <IHandlerFactory>(); // Set the task id and display name variable. ExecutionContext.Variables.Set(Constants.Variables.Task.DisplayName, DisplayName); ExecutionContext.Variables.Set(WellKnownDistributedTaskVariables.TaskInstanceId, Task.Id.ToString("D")); ExecutionContext.Variables.Set(WellKnownDistributedTaskVariables.TaskDisplayName, DisplayName); ExecutionContext.Variables.Set(WellKnownDistributedTaskVariables.TaskInstanceName, Task.Name); // Load the task definition and choose the handler. // TODO: Add a try catch here to give a better error message. Definition definition = taskManager.Load(Task); ArgUtil.NotNull(definition, nameof(definition)); // Print out task metadata PrintTaskMetaData(definition); ExecutionData currentExecution = null; switch (Stage) { case JobRunStage.PreJob: currentExecution = definition.Data?.PreJobExecution; break; case JobRunStage.Main: currentExecution = definition.Data?.Execution; break; case JobRunStage.PostJob: currentExecution = definition.Data?.PostJobExecution; break; } ; if ((currentExecution?.All.Any(x => x is PowerShell3HandlerData)).Value && (currentExecution?.All.Any(x => x is PowerShellHandlerData && x.Platforms != null && x.Platforms.Contains("windows", StringComparer.OrdinalIgnoreCase))).Value) { // When task contains both PS and PS3 implementations, we will always prefer PS3 over PS regardless of the platform pinning. Trace.Info("Ignore platform pinning for legacy PowerShell execution handler."); var legacyPShandler = currentExecution?.All.Where(x => x is PowerShellHandlerData).FirstOrDefault(); legacyPShandler.Platforms = null; } HandlerData handlerData = currentExecution?.All .OrderBy(x => !x.PreferredOnCurrentPlatform()) // Sort true to false. .ThenBy(x => x.Priority) .FirstOrDefault(); if (handlerData == null) { #if OS_WINDOWS throw new Exception(StringUtil.Loc("SupportedTaskHandlerNotFoundWindows", $"{PlatformUtil.RunningOnOS}({PlatformUtil.RunningOnArchitecture})")); #else throw new Exception(StringUtil.Loc("SupportedTaskHandlerNotFoundLinux")); #endif } Variables runtimeVariables = ExecutionContext.Variables; IStepHost stepHost = HostContext.CreateService <IDefaultStepHost>(); // Setup container stephost and the right runtime variables for running job inside container. if (ExecutionContext.Container != null) { if (handlerData is AgentPluginHandlerData) { // plugin handler always runs on the Host, the rumtime variables needs to the variable works on the Host, ex: file path variable System.DefaultWorkingDirectory Dictionary <string, VariableValue> variableCopy = new Dictionary <string, VariableValue>(StringComparer.OrdinalIgnoreCase); foreach (var publicVar in ExecutionContext.Variables.Public) { variableCopy[publicVar.Key] = new VariableValue(ExecutionContext.Container.TranslateToHostPath(publicVar.Value)); } foreach (var secretVar in ExecutionContext.Variables.Private) { variableCopy[secretVar.Key] = new VariableValue(ExecutionContext.Container.TranslateToHostPath(secretVar.Value), true); } List <string> expansionWarnings; runtimeVariables = new Variables(HostContext, variableCopy, out expansionWarnings); expansionWarnings?.ForEach(x => ExecutionContext.Warning(x)); } else if (handlerData is NodeHandlerData || handlerData is Node10HandlerData || handlerData is PowerShell3HandlerData) { // Only the node, node10, and powershell3 handlers support running inside container. // Make sure required container is already created. ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId)); var containerStepHost = HostContext.CreateService <IContainerStepHost>(); containerStepHost.Container = ExecutionContext.Container; stepHost = containerStepHost; } else { throw new NotSupportedException(String.Format("Task '{0}' is using legacy execution handler '{1}' which is not supported in container execution flow.", definition.Data.FriendlyName, handlerData.GetType().ToString())); } } // Load the default input values from the definition. Trace.Verbose("Loading default inputs."); var inputs = new Dictionary <string, string>(StringComparer.OrdinalIgnoreCase); foreach (var input in (definition.Data?.Inputs ?? new TaskInputDefinition[0])) { string key = input?.Name?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { inputs[key] = input.DefaultValue?.Trim() ?? string.Empty; } } // Merge the instance inputs. Trace.Verbose("Loading instance inputs."); foreach (var input in (Task.Inputs as IEnumerable <KeyValuePair <string, string> > ?? new KeyValuePair <string, string> [0])) { string key = input.Key?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { inputs[key] = input.Value?.Trim() ?? string.Empty; } } // Expand the inputs. Trace.Verbose("Expanding inputs."); runtimeVariables.ExpandValues(target: inputs); VarUtil.ExpandEnvironmentVariables(HostContext, target: inputs); // Translate the server file path inputs to local paths. foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if (string.Equals(input.InputType, TaskInputType.FilePath, StringComparison.OrdinalIgnoreCase)) { Trace.Verbose($"Translating file path input '{input.Name}': '{inputs[input.Name]}'"); inputs[input.Name] = stepHost.ResolvePathForStepHost(TranslateFilePathInput(inputs[input.Name] ?? string.Empty)); Trace.Verbose($"Translated file path input '{input.Name}': '{inputs[input.Name]}'"); } } // Load the task environment. Trace.Verbose("Loading task environment."); var environment = new Dictionary <string, string>(VarUtil.EnvironmentVariableKeyComparer); foreach (var env in (Task.Environment ?? new Dictionary <string, string>(0))) { string key = env.Key?.Trim() ?? string.Empty; if (!string.IsNullOrEmpty(key)) { environment[key] = env.Value?.Trim() ?? string.Empty; } } // Expand the inputs. Trace.Verbose("Expanding task environment."); runtimeVariables.ExpandValues(target: environment); VarUtil.ExpandEnvironmentVariables(HostContext, target: environment); // Expand the handler inputs. Trace.Verbose("Expanding handler inputs."); VarUtil.ExpandValues(HostContext, source: inputs, target: handlerData.Inputs); runtimeVariables.ExpandValues(target: handlerData.Inputs); // Get each endpoint ID referenced by the task. var endpointIds = new List <Guid>(); foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if ((input.InputType ?? string.Empty).StartsWith("connectedService:", StringComparison.OrdinalIgnoreCase)) { string inputKey = input?.Name?.Trim() ?? string.Empty; string inputValue; if (!string.IsNullOrEmpty(inputKey) && inputs.TryGetValue(inputKey, out inputValue) && !string.IsNullOrEmpty(inputValue)) { foreach (string rawId in inputValue.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { Guid parsedId; if (Guid.TryParse(rawId.Trim(), out parsedId) && parsedId != Guid.Empty) { endpointIds.Add(parsedId); } } } } } if (endpointIds.Count > 0 && (runtimeVariables.GetBoolean(WellKnownDistributedTaskVariables.RestrictSecrets) ?? false) && (runtimeVariables.GetBoolean(Microsoft.TeamFoundation.Build.WebApi.BuildVariables.IsFork) ?? false)) { ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = $"References service endpoint. PRs from repository forks are not allowed to access secrets in the pipeline. For more information see https://go.microsoft.com/fwlink/?linkid=862029 "; return; } // Get the endpoints referenced by the task. var endpoints = (ExecutionContext.Endpoints ?? new List <ServiceEndpoint>(0)) .Join(inner: endpointIds, outerKeySelector: (ServiceEndpoint endpoint) => endpoint.Id, innerKeySelector: (Guid endpointId) => endpointId, resultSelector: (ServiceEndpoint endpoint, Guid endpointId) => endpoint) .ToList(); // Add the system endpoint. foreach (ServiceEndpoint endpoint in (ExecutionContext.Endpoints ?? new List <ServiceEndpoint>(0))) { if (string.Equals(endpoint.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)) { endpoints.Add(endpoint); break; } } // Get each secure file ID referenced by the task. var secureFileIds = new List <Guid>(); foreach (var input in definition.Data?.Inputs ?? new TaskInputDefinition[0]) { if (string.Equals(input.InputType ?? string.Empty, "secureFile", StringComparison.OrdinalIgnoreCase)) { string inputKey = input?.Name?.Trim() ?? string.Empty; string inputValue; if (!string.IsNullOrEmpty(inputKey) && inputs.TryGetValue(inputKey, out inputValue) && !string.IsNullOrEmpty(inputValue)) { foreach (string rawId in inputValue.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { Guid parsedId; if (Guid.TryParse(rawId.Trim(), out parsedId) && parsedId != Guid.Empty) { secureFileIds.Add(parsedId); } } } } } if (secureFileIds.Count > 0 && (runtimeVariables.GetBoolean(WellKnownDistributedTaskVariables.RestrictSecrets) ?? false) && (runtimeVariables.GetBoolean(Microsoft.TeamFoundation.Build.WebApi.BuildVariables.IsFork) ?? false)) { ExecutionContext.Result = TaskResult.Skipped; ExecutionContext.ResultCode = $"References secure file. PRs from repository forks are not allowed to access secrets in the pipeline. For more information see https://go.microsoft.com/fwlink/?linkid=862029"; return; } // Get the endpoints referenced by the task. var secureFiles = (ExecutionContext.SecureFiles ?? new List <SecureFile>(0)) .Join(inner: secureFileIds, outerKeySelector: (SecureFile secureFile) => secureFile.Id, innerKeySelector: (Guid secureFileId) => secureFileId, resultSelector: (SecureFile secureFile, Guid secureFileId) => secureFile) .ToList(); // Set output variables. foreach (var outputVar in definition.Data?.OutputVariables ?? new OutputVariable[0]) { if (outputVar != null && !string.IsNullOrEmpty(outputVar.Name)) { ExecutionContext.OutputVariables.Add(outputVar.Name); } } // Create the handler. IHandler handler = handlerFactory.Create( ExecutionContext, Task.Reference, stepHost, endpoints, secureFiles, handlerData, inputs, environment, runtimeVariables, taskDirectory: definition.Directory); // Run the task. await handler.RunAsync(); }