public IHttpActionResult Post([FromBody] JobSpecification spec) { if (string.IsNullOrEmpty(spec.RootUrl)) { spec.RootUrl = Request.RequestUri.GetLeftPart(UriPartial.Authority); Request.Headers.TryGetValues("X-Forwarded-Proto", out IEnumerable <string> values); if (string.Equals(values?.FirstOrDefault(), Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase) && !spec.RootUrl.Contains("localhost")) { var uriBuilder = new UriBuilder(spec.RootUrl) { Scheme = Uri.UriSchemeHttps, Port = -1 // default port for scheme }; spec.RootUrl = uriBuilder.ToString(); } } if (spec.Specification == null) { throw new Exception("Specification expected"); } var status = XGenJobManager.Instance.StartNew(spec); return(Redirect(new Uri($"/clientapi/xgen/jobs/{status.Id}", UriKind.Relative))); }
/// <summary> /// Convert JobSpecification ToJob /// </summary> /// <param name="jobSpecification">Internal Job Specification</param> /// <param name="schedulerAllocationCmd">Scheduler command</param> /// <returns></returns> public override object ConvertJobSpecificationToJob(JobSpecification jobSpecification, object schedulerAllocationCmd = null) { var localHpcJobInfo = Convert.ToBase64String(Encoding.UTF8.GetBytes( jobSpecification.ConvertToLocalHPCInfo(LinuxLocalTaskState.Q.ToString(), LinuxLocalTaskState.Q.ToString())) ); StringBuilder commands = new(); StringBuilder taskCommandLine = new(); foreach (var task in jobSpecification.Tasks) { var commandParameterDictionary = CreateTemplateParameterValuesDictionary( jobSpecification, task, task.CommandTemplate.TemplateParameters, task.CommandParameterValues ); taskCommandLine.Append(ReplaceTemplateDirectivesInCommand($"{task.CommandTemplate.ExecutableFile} {task.CommandTemplate.CommandParameters}", commandParameterDictionary)); if (!string.IsNullOrEmpty(task.StandardOutputFile)) { taskCommandLine.Append($" 1>>{task.StandardOutputFile}"); } if (!string.IsNullOrEmpty(task.StandardErrorFile)) { taskCommandLine.Append($" 2>>{task.StandardErrorFile}"); } commands.Append(Convert.ToBase64String(Encoding.UTF8.GetBytes(taskCommandLine.ToString())) + " "); taskCommandLine.Clear(); } //preparation script, prepares job info file to the job directory at local linux "cluster" return($"{_linuxLocalCommandScripts.PrepareJobDirCmdPath} {jobSpecification.FileTransferMethod.Cluster.LocalBasepath}/{jobSpecification.Id}/ {localHpcJobInfo} \"{commands}\";"); }
public static void Main(string[] args) { Outcomes(); return; //Parser2(); //return; //XeroxParser.Configure(); var json = File.ReadAllText(@"C:\Temp\Xerox.js"); var root = JsonConvert.DeserializeObject <JObject>(json); //var p = new XeroxParser(); //p.ParseSegments(root["Segments"]); //return; //var gen = p.ParseDateGenerator(root["DateTest"]); var spec = new JobSpecification { VisitorCount = 100000, Specification = root }; using (var tmp = File.CreateText(@"C:\Temp\XeroxDates.txt")) { tmp.WriteLine("Date\tCount\tPct"); var visits = spec.CreateSimulator().NextVisitors(spec.VisitorCount).ToArray(); var abs = visits.GroupBy(v => v.GetVariable <string>("Test")); foreach (var g in abs.OrderBy(g => g.Key)) { System.Console.Out.WriteLine("{0}: {1:P2}", g.Key, g.Count() / (double)spec.VisitorCount); } var visitCounts = visits.GroupBy(v => v.Start.Date) .ToDictionary(g => g.Key, g => g.Count()); var start = visitCounts.Keys.Min(); var end = visitCounts.Keys.Max(); var current = start; while (current <= end) { int c; c = visitCounts.TryGetValue(current, out c) ? c : 0; tmp.WriteLine("{0}\t{1}\t{2:P2}", current, c, c / (double)spec.VisitorCount); current = current.AddDays(1); } } }
/// <summary> /// Validation /// </summary> /// <returns></returns> public override ValidationResult Validate() { string message = _validationObject switch { JobSpecification jobSpecification => ValidateJobSpecification(jobSpecification), _ => string.Empty }; return(new ValidationResult(string.IsNullOrEmpty(message), message)); }
public static JobSpecification ConvertExtToInt(this JobSpecificationExt jobSpecification) { var result = new JobSpecification { Name = jobSpecification.Name, Project = jobSpecification.Project, WaitingLimit = jobSpecification.WaitingLimit ?? 0, WalltimeLimit = jobSpecification.WalltimeLimit, NotificationEmail = jobSpecification.NotificationEmail, PhoneNumber = jobSpecification.PhoneNumber, NotifyOnAbort = jobSpecification.NotifyOnAbort, NotifyOnFinish = jobSpecification.NotifyOnFinish, NotifyOnStart = jobSpecification.NotifyOnStart, EnvironmentVariables = jobSpecification.EnvironmentVariables? .Select(s => s.ConvertExtToInt()) .ToList(), FileTransferMethodId = jobSpecification.FileTransferMethodId, ClusterId = jobSpecification.ClusterId ?? 0 }; //Same Reference for DependOn tasks Dictionary <TaskSpecificationExt, TaskSpecification> tasksSpecs = new(); foreach (var taskExt in jobSpecification.Tasks) { var convertedTaskSpec = taskExt.ConvertExtToInt(result); if (taskExt.DependsOn != null) { var taskDependency = new List <TaskDependency>(); foreach (var dependentTask in taskExt.DependsOn) { if (tasksSpecs.ContainsKey(dependentTask)) { taskDependency.Add(new TaskDependency { TaskSpecification = convertedTaskSpec, ParentTaskSpecification = tasksSpecs[dependentTask] }); } else { //throw new InputValidationException($"Depending task \"{dependentTask.Name}\" for task \"{taskExt.Name}\" contains wrong task dependency."); } } convertedTaskSpec.DependsOn = taskDependency; } tasksSpecs.Add(taskExt, convertedTaskSpec); } result.Tasks = tasksSpecs.Values.ToList(); //Agregation walltimelimit for tasks result.WalltimeLimit = result.Tasks.Sum(s => s.WalltimeLimit); return(result); }
public void TestListJobsByJobSchedule() { Action test = () => { using (BatchClient batchCli = TestUtilities.OpenBatchClientAsync(TestUtilities.GetCredentialsFromEnvironment()).Result) { string jobScheduleId = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName() + "-TestListJobsByJobSchedule"; try { Schedule schedule = new Schedule() { DoNotRunAfter = DateTime.UtcNow.Add(TimeSpan.FromDays(1)), RecurrenceInterval = TimeSpan.FromMinutes(1) }; JobSpecification jobSpecification = new JobSpecification(new PoolInformation() { PoolId = "DummyPool" }); CloudJobSchedule unboundJobSchedule = batchCli.JobScheduleOperations.CreateJobSchedule(jobScheduleId, schedule, jobSpecification); unboundJobSchedule.Commit(); //List the jobs under this JobSchedule for (int i = 1; i <= 3; i++) { string expectedJobId = string.Format("{0}:job-{1}", jobScheduleId, i); CloudJobSchedule boundJobSchedule = TestUtilities.WaitForJobOnJobSchedule( batchCli.JobScheduleOperations, jobScheduleId, expectedJobId: expectedJobId, timeout: TimeSpan.FromSeconds(70)); List <CloudJob> jobs = boundJobSchedule.ListJobs().ToList(); Assert.Equal(i, jobs.Count); jobs = batchCli.JobScheduleOperations.ListJobs(jobScheduleId).ToList(); Assert.Equal(i, jobs.Count); //Terminate the current job to force a new job to be created batchCli.JobOperations.TerminateJob(expectedJobId); } } finally { // clean up TestUtilities.DeleteJobScheduleIfExistsAsync(batchCli, jobScheduleId).Wait(); } } }; SynchronizationContextHelper.RunTest(test, LongTestTimeout); }
/// <summary> /// Submit job to scheduler /// </summary> /// <param name="jobSpecification">Job specification</param> /// <param name="credentials">Credentials</param> /// <returns></returns> public IEnumerable <SubmittedTaskInfo> SubmitJob(JobSpecification jobSpecification, ClusterAuthenticationCredentials credentials) { ConnectionInfo schedulerConnection = _connectionPool.GetConnectionForUser(credentials); try { var tasks = _adapter.SubmitJob(schedulerConnection.Connection, jobSpecification, credentials); return(tasks); } finally { _connectionPool.ReturnConnection(schedulerConnection); } }
private void ValidateRequestedCluster(JobSpecification job) { var clusterNodeType = LogicFactory.GetLogicFactory().CreateClusterInformationLogic(_unitOfWork) .GetClusterById(job.ClusterId); if (clusterNodeType == null) { _messageBuilder.AppendLine($"Requested Cluster with Id {job.ClusterId} does not exist in the system"); } if (job.FileTransferMethod?.ClusterId != job.ClusterId) { _messageBuilder.AppendLine($"Job {job.Name} has wrong FileTransferMethod"); } }
public IHttpActionResult Post([FromBody] JobSpecification spec) { if (string.IsNullOrEmpty(spec.RootUrl)) { spec.RootUrl = Request.RequestUri.GetLeftPart(UriPartial.Authority); } if (spec.Specification == null) { throw new Exception("Specification expected"); } var status = XGenJobManager.Instance.StartNew(spec); return(RedirectToRoute("ExperienceGeneratorJobsApi", new { id = status.Id })); }
public override byte[] DownloadFileFromClusterByAbsolutePath(JobSpecification jobSpecification, string absoluteFilePath) { ConnectionInfo connection = _connectionPool.GetConnectionForUser(jobSpecification.ClusterUser); try { var client = new SftpClientAdapter((ExtendedSftpClient)connection.Connection); using var stream = new MemoryStream(); var path = absoluteFilePath.Replace("~/", string.Empty).Replace("/~/", string.Empty); client.DownloadFile(path, stream); return(stream.ToArray()); } finally { _connectionPool.ReturnConnection(connection); } }
protected virtual void CreateSynchronizersForType(JobSpecification jobSpecification, SynchronizableFiles fileType) { _fileSynchronizers[fileType] = new Dictionary <string, IFileSynchronizer>(jobSpecification.Tasks.Count); foreach (TaskSpecification task in jobSpecification.Tasks) { string jobClusterDirectoryPath = FileSystemUtils.GetJobClusterDirectoryPath(_fileSystem.Cluster.LocalBasepath, jobSpecification); string taskClusterDirectoryPath = FileSystemUtils.GetTaskClusterDirectoryPath(jobClusterDirectoryPath, task); FullFileSpecification fileInfo = CreateSynchronizableFileInfoForType(task, taskClusterDirectoryPath, fileType); string sourceFilePath = FileSystemUtils.ConcatenatePaths(fileInfo.SourceDirectory, fileInfo.RelativePath); if (!_fileSynchronizers[fileType].ContainsKey(sourceFilePath)) { _fileSynchronizers[fileType][sourceFilePath] = CreateFileSynchronizer(fileInfo, jobSpecification.ClusterUser); } } }
public SubmittedJobInfoExt CreateJob(JobSpecificationExt specification, string sessionCode) { try { using (IUnitOfWork unitOfWork = UnitOfWorkFactory.GetUnitOfWorkFactory().CreateUnitOfWork()) { AdaptorUser loggedUser = UserAndLimitationManagementService.GetValidatedUserForSessionCode(sessionCode, unitOfWork, UserRoleType.Submitter); IJobManagementLogic jobLogic = LogicFactory.GetLogicFactory().CreateJobManagementLogic(unitOfWork); JobSpecification js = specification.ConvertExtToInt(); SubmittedJobInfo jobInfo = jobLogic.CreateJob(js, loggedUser, specification.IsExtraLong.Value); return(jobInfo.ConvertIntToExt()); } } catch (Exception exc) { ExceptionHandler.ThrowProperExternalException(exc); return(null); } }
/// <summary> /// Convert job specification to job /// </summary> /// <param name="jobSpecification">Job specification</param> /// <param name="schedulerAllocationCmd">Scheduler allocation command</param> /// <returns></returns> public virtual object ConvertJobSpecificationToJob(JobSpecification jobSpecification, object schedulerAllocationCmd) { ISchedulerJobAdapter jobAdapter = _conversionAdapterFactory.CreateJobAdapter(); jobAdapter.SetNotifications(jobSpecification.NotificationEmail, jobSpecification.NotifyOnStart, jobSpecification.NotifyOnFinish, jobSpecification.NotifyOnAbort); // Setting global parameters for all tasks var globalJobParameters = (string)jobAdapter.AllocationCmd; var tasks = new List <object>(); if (jobSpecification.Tasks is not null && jobSpecification.Tasks.Any()) { foreach (var task in jobSpecification.Tasks) { tasks.Add($"_{task.Id}=$({(string)ConvertTaskSpecificationToTask(jobSpecification, task, schedulerAllocationCmd)}{globalJobParameters});echo $_{task.Id};"); } } jobAdapter.SetTasks(tasks); return(jobAdapter.AllocationCmd); }
public async Task CanCreateAndUpdateJobScheduleWithApplicationReferences() { var jobId = Guid.NewGuid().ToString(); const string newVersion = "2.0"; var poolInformation = new PoolInformation { AutoPoolSpecification = new AutoPoolSpecification { PoolSpecification = new PoolSpecification { ApplicationPackageReferences = new List <ApplicationPackageReference> { new ApplicationPackageReference { ApplicationId = ApplicationId, Version = Version } }, CloudServiceConfiguration = new CloudServiceConfiguration(PoolFixture.OSFamily), VirtualMachineSize = PoolFixture.VMSize, }, PoolLifetimeOption = PoolLifetimeOption.JobSchedule, KeepAlive = false, } }; Schedule schedule = new Schedule { DoNotRunAfter = DateTime.UtcNow.AddMinutes(5), RecurrenceInterval = TimeSpan.FromMinutes(2) }; JobSpecification jobSpecification = new JobSpecification(poolInformation); using BatchClient client = await TestUtilities.OpenBatchClientFromEnvironmentAsync().ConfigureAwait(false); CloudJobSchedule cloudJobSchedule = client.JobScheduleOperations.CreateJobSchedule(jobId, schedule, jobSpecification); async Task test() { CloudJobSchedule updatedBoundJobSchedule = null; try { await cloudJobSchedule.CommitAsync().ConfigureAwait(false); CloudJobSchedule boundJobSchedule = TestUtilities.WaitForJobOnJobSchedule(client.JobScheduleOperations, jobId); ApplicationPackageReference apr = boundJobSchedule.JobSpecification.PoolInformation.AutoPoolSpecification.PoolSpecification.ApplicationPackageReferences.First(); Assert.Equal(ApplicationId, apr.ApplicationId); Assert.Equal(Version, apr.Version); boundJobSchedule.JobSpecification.PoolInformation.AutoPoolSpecification.PoolSpecification.ApplicationPackageReferences = new[] { new ApplicationPackageReference() { ApplicationId = ApplicationId, Version = newVersion } }; await boundJobSchedule.CommitAsync().ConfigureAwait(false); await boundJobSchedule.RefreshAsync().ConfigureAwait(false); updatedBoundJobSchedule = await client.JobScheduleOperations.GetJobScheduleAsync(jobId).ConfigureAwait(false); ApplicationPackageReference updatedApr = updatedBoundJobSchedule.JobSpecification.PoolInformation.AutoPoolSpecification.PoolSpecification.ApplicationPackageReferences .First(); Assert.Equal(ApplicationId, updatedApr.ApplicationId); Assert.Equal(newVersion, updatedApr.Version); } finally { TestUtilities.DeleteJobScheduleIfExistsAsync(client, jobId).Wait(); } } await SynchronizationContextHelper.RunTestAsync(test, LongTestTimeout); }
/// <summary> /// Convert task specification to task /// </summary> /// <param name="jobSpecification">Job specification</param> /// <param name="taskSpecification">Task specification</param> /// <param name="schedulerAllocationCmd">Scheduler allocation cmd</param> /// <returns></returns> /// <exception cref="ApplicationException"></exception> public virtual object ConvertTaskSpecificationToTask(JobSpecification jobSpecification, TaskSpecification taskSpecification, object schedulerAllocationCmd) { ISchedulerTaskAdapter taskAdapter = _conversionAdapterFactory.CreateTaskAdapter(schedulerAllocationCmd); taskAdapter.DependsOn = taskSpecification.DependsOn; taskAdapter.SetEnvironmentVariablesToTask(taskSpecification.EnvironmentVariables); taskAdapter.IsExclusive = taskSpecification.IsExclusive; taskAdapter.SetRequestedResourceNumber(taskSpecification.ClusterNodeType.RequestedNodeGroups.Select(s => s.Name).ToList(), taskSpecification.RequiredNodes.Select(s => s.NodeName).ToList(), taskSpecification.PlacementPolicy, taskSpecification.TaskParalizationSpecifications, Convert.ToInt32(taskSpecification.MinCores), Convert.ToInt32(taskSpecification.MaxCores), taskSpecification.ClusterNodeType.CoresPerNode); // Do not change!!! Task name on the cluster is set as ID of the used task specification to enable pairing of cluster task info with DB task info. taskAdapter.Name = taskSpecification.Id.ToString(CultureInfo.InvariantCulture); if (Convert.ToInt32(taskSpecification.WalltimeLimit) > 0) { taskAdapter.Runtime = Convert.ToInt32(taskSpecification.WalltimeLimit); } string jobClusterDirectory = FileSystemUtils.GetJobClusterDirectoryPath(jobSpecification.FileTransferMethod.Cluster.LocalBasepath, jobSpecification); string workDirectory = FileSystemUtils.GetTaskClusterDirectoryPath(jobClusterDirectory, taskSpecification); string stdErrFilePath = FileSystemUtils.ConcatenatePaths(workDirectory, taskSpecification.StandardErrorFile); taskAdapter.StdErrFilePath = workDirectory.Equals(stdErrFilePath) ? string.Empty : stdErrFilePath; string stdInFilePath = FileSystemUtils.ConcatenatePaths(workDirectory, taskSpecification.StandardInputFile); taskAdapter.StdInFilePath = workDirectory.Equals(stdInFilePath) ? string.Empty : stdInFilePath; string stdOutFilePath = FileSystemUtils.ConcatenatePaths(workDirectory, taskSpecification.StandardOutputFile); taskAdapter.StdOutFilePath = workDirectory.Equals(stdOutFilePath) ? string.Empty : stdOutFilePath; taskAdapter.WorkDirectory = workDirectory; taskAdapter.JobArrays = taskSpecification.JobArrays; taskAdapter.IsRerunnable = !string.IsNullOrEmpty(taskSpecification.JobArrays) || taskSpecification.IsRerunnable; taskAdapter.Queue = taskSpecification.ClusterNodeType.Queue; taskAdapter.ClusterAllocationName = taskSpecification.ClusterNodeType.ClusterAllocationName; taskAdapter.CpuHyperThreading = taskSpecification.CpuHyperThreading ?? false; CommandTemplate template = taskSpecification.CommandTemplate; if (template is null) { throw new ApplicationException(@$ "Command Template " "{taskSpecification.CommandTemplate.Name}" " for task " "{taskSpecification.Name}" " does not exist in the adaptor configuration."); } Dictionary <string, string> templateParameters = CreateTemplateParameterValuesDictionary(jobSpecification, taskSpecification, template.TemplateParameters, taskSpecification.CommandParameterValues); taskAdapter.SetPreparationAndCommand(workDirectory, ReplaceTemplateDirectivesInCommand(template.PreparationScript, templateParameters), ReplaceTemplateDirectivesInCommand($"{template.ExecutableFile} {template.CommandParameters}", templateParameters), stdOutFilePath, stdErrFilePath, CreateTaskDirectorySymlinkCommand(taskSpecification)); return(taskAdapter.AllocationCmd); }
public static void DisplayJobScheduleLong(ITestOutputHelper testOutputHelper, CloudJobSchedule curWI) { // job schedule top level simple properties testOutputHelper.WriteLine("Id: " + curWI.Id); testOutputHelper.WriteLine(" State: " + curWI.State.ToString()); testOutputHelper.WriteLine(" " + "URL: " + curWI.Url); testOutputHelper.WriteLine(" " + "LastModified: " + (curWI.LastModified.HasValue ? curWI.LastModified.Value.ToLongDateString() : "<null>")); // execution INFO { JobScheduleExecutionInformation wiExInfo = curWI.ExecutionInformation; testOutputHelper.WriteLine(" ExeInfo:"); testOutputHelper.WriteLine(" LastUpdateTime: " + (wiExInfo.EndTime.HasValue ? wiExInfo.EndTime.Value.ToLongDateString() : "<null>")); testOutputHelper.WriteLine(" NextRuntime: " + (wiExInfo.NextRunTime.HasValue ? wiExInfo.NextRunTime.Value.ToLongDateString() : "<null>")); testOutputHelper.WriteLine(" RecentJob:"); // RecentJob RecentJob rj = wiExInfo.RecentJob; if (null == rj) { testOutputHelper.WriteLine(" <null>"); } else { testOutputHelper.WriteLine(" Id: " + rj.Id); testOutputHelper.WriteLine(" Url: " + rj.Url); } } // JobSpecification JobSpecification jobSpec = curWI.JobSpecification; testOutputHelper.WriteLine(" JobSpecification:"); if (null == jobSpec) { testOutputHelper.WriteLine(" <null>"); } else { testOutputHelper.WriteLine(""); testOutputHelper.WriteLine(" Priority: " + (jobSpec.Priority.HasValue ? jobSpec.Priority.ToString() : "<null>")); JobConstraints jobCon = jobSpec.Constraints; testOutputHelper.WriteLine(" Constraints: "); if (null == jobCon) { testOutputHelper.WriteLine("null"); } else { testOutputHelper.WriteLine(""); testOutputHelper.WriteLine(" MaxTaskRetryCount: " + (jobCon.MaxTaskRetryCount.HasValue ? jobSpec.Constraints.MaxTaskRetryCount.Value.ToString() : "<null>")); testOutputHelper.WriteLine(" MaxWallClockTime: " + (jobCon.MaxWallClockTime.HasValue ? jobSpec.Constraints.MaxWallClockTime.Value.TotalMilliseconds.ToString() : "<null>")); } JobManagerTask ijm = jobSpec.JobManagerTask; if (null == ijm) { testOutputHelper.WriteLine("<null>"); } else { testOutputHelper.WriteLine(" JobManagerTask:"); testOutputHelper.WriteLine(" CommandLine : " + ijm.CommandLine); testOutputHelper.WriteLine(" KillJobOnCompletion: " + (ijm.KillJobOnCompletion.HasValue ? ijm.KillJobOnCompletion.Value.ToString() : "<null>")); testOutputHelper.WriteLine(" Id : " + ijm.Id); testOutputHelper.WriteLine(" RunExclusive : " + (ijm.RunExclusive.HasValue ? ijm.RunExclusive.Value.ToString() : "<null>")); IEnumerable <EnvironmentSetting> envSettings = ijm.EnvironmentSettings; if (null != envSettings) { List <EnvironmentSetting> envSettingsList = new List <EnvironmentSetting>(ijm.EnvironmentSettings); testOutputHelper.WriteLine(" EnvironmentSettings.count:" + envSettingsList.Count); } else { testOutputHelper.WriteLine(" EnvironmentSettings: <null>"); } IEnumerable <ResourceFile> resFilesProp = ijm.ResourceFiles; if (null != resFilesProp) { List <ResourceFile> resFiles = new List <ResourceFile>(); testOutputHelper.WriteLine(" ResourceFiles.count:" + resFiles.Count); } else { testOutputHelper.WriteLine(" ResourceFiles: <null>"); } TaskConstraints tc = ijm.Constraints; if (null == tc) { testOutputHelper.WriteLine(" TaskConstraints: <null>"); } else { testOutputHelper.WriteLine(" TaskConstraints: "); testOutputHelper.WriteLine(" MaxTaskRetryCount: " + (tc.MaxTaskRetryCount.HasValue ? tc.MaxTaskRetryCount.Value.ToString() : "<null>")); testOutputHelper.WriteLine(" MaxWallClockTime: " + (tc.MaxWallClockTime.HasValue ? tc.MaxWallClockTime.Value.TotalMilliseconds.ToString() : "<null>")); testOutputHelper.WriteLine(" RetentionTime: " + (tc.RetentionTime.HasValue ? tc.RetentionTime.Value.TotalMilliseconds.ToString() : "<null>")); } if (ijm.UserIdentity != null) { testOutputHelper.WriteLine(" UserIdentity: "); testOutputHelper.WriteLine(" UserName: "******" ElevationLevel: ", ijm.UserIdentity.AutoUser?.ElevationLevel); testOutputHelper.WriteLine(" Scope: ", ijm.UserIdentity.AutoUser?.Scope); } } } // metadata { IEnumerable <MetadataItem> mdis = curWI.Metadata; testOutputHelper.WriteLine(" Metadata: "); if (null == mdis) { testOutputHelper.WriteLine("<null>"); } else { List <MetadataItem> meta = new List <MetadataItem>(curWI.Metadata); testOutputHelper.WriteLine(" count:" + meta.Count); } } // schedule Schedule sched = curWI.Schedule; if (null == sched) { testOutputHelper.WriteLine(" Schedule: <null>"); } else { testOutputHelper.WriteLine(" Schedule:"); testOutputHelper.WriteLine(" DoNotRunAfter:" + (sched.DoNotRunAfter.HasValue ? sched.DoNotRunAfter.Value.ToLongDateString() : "<null>")); testOutputHelper.WriteLine(" DoNotRunUntil: " + (sched.DoNotRunUntil.HasValue ? sched.DoNotRunUntil.Value.ToLongDateString() : "<null>")); testOutputHelper.WriteLine(" RecurrenceInterval: " + (sched.RecurrenceInterval.HasValue ? sched.RecurrenceInterval.Value.TotalMilliseconds.ToString() : "<null>")); testOutputHelper.WriteLine(" StartWindow :" + (sched.StartWindow.HasValue ? sched.StartWindow.Value.TotalMilliseconds.ToString() : "<null>")); } // stats JobScheduleStatistics stats = curWI.Statistics; if (null == stats) { testOutputHelper.WriteLine(" Stats: <null>"); } else { testOutputHelper.WriteLine(" Stats:"); testOutputHelper.WriteLine(" LastUpdateTime: " + stats.LastUpdateTime.ToLongDateString()); testOutputHelper.WriteLine(" KernelCPUTime: " + stats.KernelCpuTime.TotalMilliseconds.ToString()); testOutputHelper.WriteLine(" NumFailedTasks: " + stats.FailedTaskCount.ToString()); testOutputHelper.WriteLine(" NumTimesCalled : " + stats.TaskRetryCount); testOutputHelper.WriteLine(" NumSucceededTasks: " + stats.SucceededTaskCount); testOutputHelper.WriteLine(" ReadIOGiB : " + stats.ReadIOGiB); testOutputHelper.WriteLine(" ReadIOps : " + stats.ReadIOps); testOutputHelper.WriteLine(" StartTime : " + stats.StartTime.ToLongDateString()); testOutputHelper.WriteLine(" Url : " + stats.Url); testOutputHelper.WriteLine(" UserCpuTime : " + stats.UserCpuTime.TotalMilliseconds.ToString()); testOutputHelper.WriteLine(" WaitTime : " + stats.WaitTime.TotalMilliseconds.ToString()); testOutputHelper.WriteLine(" WallClockTime : " + stats.WallClockTime.TotalMilliseconds.ToString()); testOutputHelper.WriteLine(" WriteIOGiB : " + stats.WriteIOGiB); testOutputHelper.WriteLine(" WriteIOps : " + stats.WriteIOps); } }
/// <summary> /// Validate Job specification /// </summary> /// <param name="job">Job specification</param> /// <returns></returns> private string ValidateJobSpecificationInput(JobSpecification job) { if (!(job.FileTransferMethodId.HasValue)) { _messageBuilder.AppendLine("FileTransferMethod is empty."); } if (job.ClusterId <= 0) { _messageBuilder.AppendLine("ClusterId cannot be empty or <= 0."); } if (string.IsNullOrEmpty(job.Name)) { _messageBuilder.AppendLine("Job name cannot be empty."); } if (job.Name.Length > 50) { _messageBuilder.AppendLine("Job name cannot be longer than 50 characters."); } if (ContainsIllegalCharacters(job.Name)) { _messageBuilder.AppendLine("Job name contains illegal characters."); } if (!string.IsNullOrEmpty(job.Project)) { if (job.Project.Length > 50) { _messageBuilder.AppendLine("Project name cannot be longer than 50 characters."); } if (ContainsIllegalCharacters(job.Project)) { _messageBuilder.AppendLine("Project name contains illegal characters."); } } if (job.WaitingLimit < 0) { _messageBuilder.AppendLine("Waiting limit cannot be lower than 0."); } if (!string.IsNullOrEmpty(job.NotificationEmail)) { if (job.NotificationEmail.Length > 50) { _messageBuilder.AppendLine("Notification email cannot be longer than 50 characters."); } if (!IsEmailAddress(job.NotificationEmail)) { _messageBuilder.AppendLine("Notification email address is in a wrong format."); } } if (!string.IsNullOrEmpty(job.PhoneNumber)) { if (job.PhoneNumber.Length > 20) { _messageBuilder.AppendLine("Phone number cannot be longer than 20 characters."); } if (!IsPhoneNumber(job.PhoneNumber)) { _messageBuilder.AppendLine("Phone number is in a wrong format."); } } if (job.EnvironmentVariables != null) { foreach (EnvironmentVariable variable in job.EnvironmentVariables) { if (string.IsNullOrEmpty(variable.Name)) { _messageBuilder.AppendLine($"Environment variable's name for the job cannot be empty. ({variable.Name}={variable.Value})"); } } } if (job.Tasks == null || job.Tasks.Count == 0) { _messageBuilder.AppendLine("Each job has to contain at least one task."); } else { job.Tasks.ForEach(task => ValidateTaskSpecificationInput(task)); } return(_messageBuilder.ToString()); }
public void TestJobScheduleVerbs() { Action test = () => { using (BatchClient batchCli = TestUtilities.OpenBatchClientAsync(TestUtilities.GetCredentialsFromEnvironment()).Result) { string jobScheduleId = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName() + "-TestEnableDisableDeleteJobSchedule"; try { Schedule schedule = new Schedule() { DoNotRunAfter = DateTime.UtcNow.Add(TimeSpan.FromDays(1)) }; JobSpecification jobSpecification = new JobSpecification(new PoolInformation() { PoolId = "DummyPool" }); CloudJobSchedule unboundJobSchedule = batchCli.JobScheduleOperations.CreateJobSchedule(jobScheduleId, schedule, jobSpecification); unboundJobSchedule.Commit(); CloudJobSchedule boundJobSchedule = batchCli.JobScheduleOperations.GetJobSchedule(jobScheduleId); //Disable the job schedule via instance boundJobSchedule.Disable(); boundJobSchedule.Refresh(); Assert.NotNull(boundJobSchedule.State); Assert.Equal(JobScheduleState.Disabled, boundJobSchedule.State); //Enable the job schedule via instance boundJobSchedule.Enable(); boundJobSchedule.Refresh(); Assert.NotNull(boundJobSchedule.State); Assert.Equal(JobScheduleState.Active, boundJobSchedule.State); //Disable the job schedule via operations batchCli.JobScheduleOperations.DisableJobSchedule(jobScheduleId); boundJobSchedule.Refresh(); Assert.NotNull(boundJobSchedule.State); Assert.Equal(JobScheduleState.Disabled, boundJobSchedule.State); //Enable the job schedule via instance batchCli.JobScheduleOperations.EnableJobSchedule(jobScheduleId); boundJobSchedule.Refresh(); Assert.NotNull(boundJobSchedule.State); Assert.Equal(JobScheduleState.Active, boundJobSchedule.State); //Terminate the job schedule batchCli.JobScheduleOperations.TerminateJobSchedule(jobScheduleId); boundJobSchedule.Refresh(); Assert.True(boundJobSchedule.State == JobScheduleState.Completed || boundJobSchedule.State == JobScheduleState.Terminating); //Delete the job schedule boundJobSchedule.Delete(); //Wait for deletion to take BatchException be = TestUtilities.AssertThrowsEventuallyAsync <BatchException>(() => boundJobSchedule.RefreshAsync(), TimeSpan.FromSeconds(30)).Result; Assert.NotNull(be.RequestInformation); Assert.NotNull(be.RequestInformation.BatchError); Assert.Equal("JobScheduleNotFound", be.RequestInformation.BatchError.Code); } finally { // clean up TestUtilities.DeleteJobScheduleIfExistsAsync(batchCli, jobScheduleId).Wait(); } } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }
public static string GetJobClusterDirectoryPath(string basePath, JobSpecification jobSpecification) { return(ConcatenatePaths(basePath, jobSpecification.Id.ToString(CultureInfo.InvariantCulture))); }
/// <summary> /// Create template parameter values dictionary /// </summary> /// <param name="jobSpecification">Job specification</param> /// <param name="taskSpecification">Task specification</param> /// <param name="templateParameters">Template parameters</param> /// <param name="taskParametersValues">Task parameters values</param> /// <returns></returns> protected static Dictionary <string, string> CreateTemplateParameterValuesDictionary(JobSpecification jobSpecification, TaskSpecification taskSpecification, ICollection <CommandTemplateParameter> templateParameters, ICollection <CommandTemplateParameterValue> taskParametersValues) { var finalParameters = new Dictionary <string, string>(); foreach (CommandTemplateParameter templateParameter in templateParameters) { var taskParametersValue = taskParametersValues.Where(w => w.TemplateParameter.Identifier == templateParameter.Identifier) .FirstOrDefault(); if (taskParametersValue is not null) { // If taskParametersValue represent already escaped string of generic key-value pairs, don't escape it again. var isStringOfGenericParameters = templateParameter.CommandTemplate.IsGeneric && Regex.IsMatch(taskParametersValue.Value, @""".+""", RegexOptions.IgnoreCase | RegexOptions.Compiled); finalParameters.Add(templateParameter.Identifier, isStringOfGenericParameters ? taskParametersValue.Value : Regex.Escape(taskParametersValue.Value)); } else { string templateParameterValueFromQuery = templateParameter.Query; if (templateParameter.Query.StartsWith("Job.")) { templateParameterValueFromQuery = GetPropertyValueForQuery(jobSpecification, templateParameter.Query); } if (templateParameter.Query == "Task.Workdir") { string taskClusterDirectory = FileSystemUtils.GetJobClusterDirectoryPath(jobSpecification.FileTransferMethod.Cluster.LocalBasepath, jobSpecification); templateParameterValueFromQuery = FileSystemUtils.GetTaskClusterDirectoryPath(taskClusterDirectory, taskSpecification); } if (templateParameter.Query.StartsWith("Task.")) { templateParameterValueFromQuery = GetPropertyValueForQuery(taskSpecification, templateParameter.Query); } finalParameters.Add(templateParameter.Identifier, templateParameterValueFromQuery); } } return(finalParameters); }
public void SampleCreateJobScheduleAutoPool() { Action test = () => { using (BatchClient batchCli = TestUtilities.OpenBatchClientAsync(TestUtilities.GetCredentialsFromEnvironment()).Result) { string jsId = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName() + "-CreateWiAutoPoolTest"; try { CloudJobSchedule newJobSchedule = batchCli.JobScheduleOperations.CreateJobSchedule(jsId, null, null); { newJobSchedule.Metadata = MakeMetaData("onCreateName", "onCreateValue"); PoolInformation poolInformation = new PoolInformation(); AutoPoolSpecification iaps = new AutoPoolSpecification(); Schedule schedule = new Schedule() { RecurrenceInterval = TimeSpan.FromMinutes(18) }; poolInformation.AutoPoolSpecification = iaps; iaps.AutoPoolIdPrefix = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName(); iaps.PoolLifetimeOption = Microsoft.Azure.Batch.Common.PoolLifetimeOption.Job; iaps.KeepAlive = false; PoolSpecification ps = new PoolSpecification(); iaps.PoolSpecification = ps; ps.TargetDedicated = 1; ps.VirtualMachineSize = PoolFixture.VMSize; ps.CloudServiceConfiguration = new CloudServiceConfiguration(PoolFixture.OSFamily); ps.Metadata = MakeMetaData("pusMDIName", "pusMDIValue"); JobSpecification jobSpec = newJobSchedule.JobSpecification; Assert.Null(jobSpec); jobSpec = new JobSpecification(poolInformation); JobManagerTask jobMgr = jobSpec.JobManagerTask; Assert.Null(jobMgr); jobMgr = new JobManagerTask(TestUtilities.GetMyName() + "-JobManagerTest", "hostname"); jobMgr.KillJobOnCompletion = false; // set the JobManagerTask on the JobSpecification jobSpec.JobManagerTask = jobMgr; // set the JobSpecifcation on the Job Schedule newJobSchedule.JobSpecification = jobSpec; newJobSchedule.Schedule = schedule; newJobSchedule.Commit(); } CloudJobSchedule jobSchedule = batchCli.JobScheduleOperations.GetJobSchedule(jsId); { TestUtilities.DisplayJobScheduleLong(this.testOutputHelper, jobSchedule); List <MetadataItem> mdi = new List <MetadataItem>(jobSchedule.Metadata); // check the values specified for AddJobSchedule are correct. foreach (MetadataItem curIMDI in mdi) { Assert.Equal("onCreateName", curIMDI.Name); Assert.Equal("onCreateValue", curIMDI.Value); } // add metadata items mdi.Add(new MetadataItem("modifiedName", "modifiedValue")); jobSchedule.Metadata = mdi; jobSchedule.Commit(); // confirm metadata updated correctly CloudJobSchedule jsUpdated = batchCli.JobScheduleOperations.GetJobSchedule(jsId); { List <MetadataItem> updatedMDI = new List <MetadataItem>(jsUpdated.Metadata); Assert.Equal(2, updatedMDI.Count); Assert.Equal("onCreateName", updatedMDI[0].Name); Assert.Equal("onCreateValue", updatedMDI[0].Value); Assert.Equal("modifiedName", updatedMDI[1].Name); Assert.Equal("modifiedValue", updatedMDI[1].Value); } jobSchedule.Refresh(); TestUtilities.DisplayJobScheduleLong(this.testOutputHelper, jobSchedule); } } finally { // clean up TestUtilities.DeleteJobScheduleIfExistsAsync(batchCli, jsId).Wait(); } } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }
private static TaskSpecification ConvertExtToInt(this TaskSpecificationExt taskSpecificationExt, JobSpecification jobSpecification) { var result = new TaskSpecification { Name = taskSpecificationExt.Name, MinCores = taskSpecificationExt.MinCores, MaxCores = taskSpecificationExt.MaxCores, WalltimeLimit = taskSpecificationExt.WalltimeLimit, PlacementPolicy = taskSpecificationExt.PlacementPolicy, RequiredNodes = taskSpecificationExt.RequiredNodes? .Select(s => new TaskSpecificationRequiredNode { NodeName = s }) .ToList(), Priority = taskSpecificationExt.Priority.ConvertExtToInt(), Project = jobSpecification.Project, JobArrays = taskSpecificationExt.JobArrays, IsExclusive = taskSpecificationExt.IsExclusive ?? false, IsRerunnable = !string.IsNullOrEmpty(taskSpecificationExt.JobArrays) || (taskSpecificationExt.IsRerunnable ?? false), StandardInputFile = taskSpecificationExt.StandardInputFile, StandardOutputFile = taskSpecificationExt.StandardOutputFile ?? "stdout.txt", StandardErrorFile = taskSpecificationExt.StandardErrorFile ?? "stderr.txt", ClusterTaskSubdirectory = taskSpecificationExt.ClusterTaskSubdirectory, ProgressFile = new FileSpecification { RelativePath = taskSpecificationExt.ProgressFile, NameSpecification = FileNameSpecification.FullName, SynchronizationType = FileSynchronizationType.IncrementalAppend }, LogFile = new FileSpecification { RelativePath = taskSpecificationExt.LogFile, NameSpecification = FileNameSpecification.FullName, SynchronizationType = FileSynchronizationType.IncrementalAppend }, ClusterNodeTypeId = taskSpecificationExt.ClusterNodeTypeId.Value, CommandTemplateId = taskSpecificationExt.CommandTemplateId ?? 0, EnvironmentVariables = taskSpecificationExt.EnvironmentVariables? .Select(s => s.ConvertExtToInt()) .ToList(), CpuHyperThreading = taskSpecificationExt.CpuHyperThreading, JobSpecification = jobSpecification, TaskParalizationSpecifications = taskSpecificationExt.TaskParalizationParameters? .Select(s => s.ConvertExtToInt()) .ToList(), CommandParameterValues = taskSpecificationExt.TemplateParameterValues? .Select(s => s.ConvertExtToInt()) .ToList(), }; result.DependsOn = taskSpecificationExt.DependsOn? .Select(s => new TaskDependency { TaskSpecification = result, ParentTaskSpecification = s.ConvertExtToInt(jobSpecification) }) .ToList(); return(result); }
/// <summary> /// Submit job to scheduler /// </summary> /// <param name="connectorClient">Connector</param> /// <param name="jobSpecification">Job specification</param> /// <param name="credentials">Credentials</param> /// <returns></returns> public virtual IEnumerable <SubmittedTaskInfo> SubmitJob(object connectorClient, JobSpecification jobSpecification, ClusterAuthenticationCredentials credentials) { var shellCommandSb = new StringBuilder(); SshCommandWrapper command = null; string shellCommand = (string)_convertor.ConvertJobSpecificationToJob(jobSpecification, null); _log.Info($"Submitting job \"{jobSpecification.Id}\", command \"{shellCommand}\""); string sshCommandBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(shellCommand)); command = SshCommandUtils.RunSshCommand(new SshClientAdapter((SshClient)connectorClient), $"{_commandScripts.ExecutieCmdPath} {sshCommandBase64}"); shellCommandSb.Clear(); //compose command with parameters of job and task IDs shellCommandSb.Append($"{_linuxLocalCommandScripts.RunLocalCmdPath} {jobSpecification.FileTransferMethod.Cluster.LocalBasepath}/{jobSpecification.Id}/"); jobSpecification.Tasks.ForEach(task => shellCommandSb.Append($" {task.Id}")); //log local HPC Run script to log file shellCommandSb.Append($" >> {jobSpecification.FileTransferMethod.Cluster.LocalBasepath}/{jobSpecification.Id}/job_log.txt &"); shellCommand = shellCommandSb.ToString(); sshCommandBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(shellCommand)); command = SshCommandUtils.RunSshCommand(new SshClientAdapter((SshClient)connectorClient), $"{_commandScripts.ExecutieCmdPath} {sshCommandBase64}"); return(GetActualTasksInfo(connectorClient, jobSpecification.Cluster, new string[] { $"{jobSpecification.Id}" })); }
private void Slice(GameObject subject, Vector4 planeInLocalSpace, int shatterStep, bool destroyOriginal) { if (subject == null) { throw new System.ArgumentNullException(); } //Sanity check: are we already slicing this? foreach (var jobState in jobStates) { if (object.ReferenceEquals(jobState.Specification.Subject, subject)) { return; // Debug.Log ("Turbo Slicer was asked to slice '{0}' but this target is already enqueued.", subject.name); } } //If names in the hierarchy are replicated anywhere than we have a problem. transformBuffer.Clear(); concatenateHierarchy(subject.transform, transformBuffer); nameBuffer.Clear(); foreach (var t in transformBuffer) { nameBuffer.Add(t.gameObject.name); } if (nameBuffer.Count != transformBuffer.Count) { Debug.LogWarning("Turbo Slicer needs each object under its hierarchy to have a unique name.", subject); return; } var sliceable = subject.GetComponent <Sliceable> (); bool channelTangents, channelNormals, channelUV2; if (sliceable != null) { channelNormals = sliceable.channelNormals; channelTangents = sliceable.channelTangents; channelUV2 = sliceable.channelUV2; } else { channelTangents = false; channelNormals = true; channelUV2 = false; } var renderers = subject.GetComponentsInChildren <Renderer>(); IEnumerable <MeshSnapshot> snapshots; var forwardPassAgent = subject.GetComponent <ForwardPassAgent> (); if (forwardPassAgent != null) { snapshots = forwardPassAgent.Snapshots; } else { var snapshotsBuilder = new List <MeshSnapshot>(); foreach (var renderer in renderers) { Mesh mesh; bool meshIsABufferMesh; if (renderer is MeshRenderer) { var filter = renderer.GetComponent <MeshFilter> (); mesh = filter.sharedMesh; meshIsABufferMesh = false; } else if (renderer is SkinnedMeshRenderer) { var smr = (SkinnedMeshRenderer)renderer; meshIsABufferMesh = true; mesh = new Mesh(); smr.BakeMesh(mesh); } else { throw new System.NotImplementedException("Turbo Slicer encountered a Renderer that is neither a MeshRenderer nor a SkinnedMeshRenderer"); } var rootToLocalTransform = renderer.transform.worldToLocalMatrix * subject.transform.localToWorldMatrix; var unitRect = new Rect(0.0f, 0.0f, 1.0f, 1.0f); var infillRects = new Rect[mesh.subMeshCount]; for (int i = 0; i < infillRects.Length; i++) { Rect?rect = null; if (renderer.sharedMaterials.Length > i) { var mat = renderer.sharedMaterials [i]; if (sliceable != null) { for (int j = 0; j < sliceable.infillers.Length; j++) { var ifc = sliceable.infillers [j]; if (object.ReferenceEquals(ifc.material, mat)) { rect = ifc.regionForInfill; } } } infillRects [i] = rect.GetValueOrDefault(unitRect); } } var isThisTheRoot = renderer.gameObject == subject; var key = isThisTheRoot ? MeshSnapshot.RootKey : renderer.gameObject.name; MeshSnapshot snapshot; if (preloadedMeshes.TryGetValue(mesh.GetInstanceID(), out snapshot) == false) { if (mesh.isReadable == false) { Debug.LogErrorFormat(subject, "Turbo Slicer needs to read mesh '{0}' on object '{1}', but cannot. If this object is " + "an original, go to its mesh and enable readability. If this object is a slice result, go to the " + "sliceable configuration on the original and turn on 'shreddable.", mesh.name, subject.name); return; } var indices = new int[mesh.subMeshCount][]; for (int i = 0; i < mesh.subMeshCount; i++) { indices [i] = mesh.GetIndices(i); } var coords = mesh.uv; if (coords == null || coords.Length < mesh.vertexCount) { //These might be null or empty but the core doesn't have a branch for UVs. So we'll put some junk in there if we need to. coords = new Vector2[mesh.vertexCount]; } snapshot = new MeshSnapshot( key, mesh.vertices, channelNormals ? mesh.normals : new Vector3[0], coords, channelUV2 ? mesh.uv2 : new Vector2[0], channelTangents ? mesh.tangents : new Vector4[0], indices, infillRects, rootToLocalTransform); } else { snapshot = new MeshSnapshot( key, snapshot.vertices, channelNormals ? snapshot.normals : new Vector3[0], snapshot.coords, channelUV2 ? snapshot.coords2 : new Vector2[0], channelTangents ? snapshot.tangents : new Vector4[0], snapshot.indices, infillRects, rootToLocalTransform); } if (meshIsABufferMesh) { GameObject.DestroyImmediate(mesh); } snapshotsBuilder.Add(snapshot); } snapshots = snapshotsBuilder; } var jobSpec = new JobSpecification(subject, snapshots, planeInLocalSpace, channelTangents, channelNormals, channelUV2, shatterStep, destroyOriginal); try { var jobState = new JobState(jobSpec); switch (workerThreadMode) { case WorkerThreadMode.Asynchronous: jobStates.Add(jobState); #if NETFX_CORE && !UNITY_EDITOR System.Threading.Tasks.Task.Factory.StartNew(ThreadSafeSlice.Slice, jobState); #else System.Threading.ThreadPool.QueueUserWorkItem(ThreadSafeSlice.Slice, jobState); #endif break; case WorkerThreadMode.Synchronous: ThreadSafeSlice.Slice(jobState); if (jobState.HasYield) { ConsumeJobYield(jobState.Specification, jobState.Yield); } else if (jobState.HasException) { throw jobState.Exception; } break; default: throw new System.NotImplementedException(); } } catch (System.Exception ex) { Debug.LogException(ex, subject); } }
/// <summary> /// This will take the basic data provided about the account, upload the necessary information to the account, and schedule a job. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnStart_Click(object sender, RoutedEventArgs e) { BatchCredentials credentials = new BatchCredentials(txtBAccountName.Text, txtBAccountKey.Text); IBatchClient bClient = BatchClient.Connect(SampleConstants.BatchSvcEndpoint, credentials); // Setting a retry policy adds robustness against an individual call timing out. When using a policy, by default all recoverable failures are retried. bClient.CustomBehaviors.Add(new SetRetryPolicy(new ExponentialRetry(TimeSpan.FromSeconds(5), 5))); // Create a unique workitem name; don't forget to delete these when you're done string workItemName = SampleConstants.WorkItemNamePrefix + Guid.NewGuid().ToString(); // Identify the pre-existing pool of VMs that will run the tasks. An Autopool specification // is fine but there is the delay associated with the creation of the pool along with waiting // for the VMs to reach Idle state before tasks are running. You can use Batch Explorer to // pre-create the pool and then resize it to the desired size and number of VMs. JobExecutionEnvironment jee = new JobExecutionEnvironment() { PoolName = PoolName }; // Next, create the JobManager instance describing the environment settings and resources it // needs to run JobManager jobMgr = new JobManager() { Name = "JM1", CommandLine = SampleConstants.JobManager, // NOTE: We do not in general recommend that customers put their secrets on the command line or as environmental variables, as // these are not a secure locations. This was done for the simplicity of the sample. EnvironmentSettings = new List<IEnvironmentSetting>() { { new EnvironmentSetting( SampleConstants.EnvWorkItemName, workItemName ) }, { new EnvironmentSetting( SampleConstants.EnvBatchAccountKeyName, txtBAccountKey.Text) } }, // In many cases you will want KillJobOnCompletion to be set to 'TRUE' - this allows the previous job to finish before // a recurrence is scheduled. As an alternative, you can set this to 'FALSE' and use MaxWallClockTime as shown below, // which will instead ensure that every recurrence happens. KillJobOnCompletion = true }; // Create a list of resource files that are needed to run JobManager.exe. A shared access signature key specifying // readonly access is used so the JobManager program will have access to the resource files when it is started // on a VM. var sasPrefix = Helpers.ConstructContainerSas( txtSAccountName.Text, txtSAccountKey.Text, "core.windows.net", txtSContainerName.Text); jobMgr.ResourceFiles = Helpers.GetResourceFiles(sasPrefix, SampleConstants.JobManagerFiles); // Create the job specification, identifying that this job has a job manager associated with it JobSpecification jobSpec = new JobSpecification() { JobManager = jobMgr }; // Set up the desired recurrence or start time schedule. WorkItemSchedule wiSchedule = new WorkItemSchedule(); if (rdoOnce.IsChecked == true) { // Set information if the task is to be run once. if (dpkDate.SelectedDate != null) { DateTime runOnce = (DateTime)(dpkDate.SelectedDate); runOnce = runOnce.AddHours(cbxHourO.SelectedIndex); runOnce = runOnce.AddMinutes(cbxMinuteO.SelectedIndex); wiSchedule.DoNotRunUntil = runOnce; } } else { // Set information if the task is to be recurring. TimeSpan recurring = new TimeSpan(cbxHourR.SelectedIndex, cbxMinuteR.SelectedIndex, 0); wiSchedule.RecurrenceInterval = recurring; TimeSpan countback = new TimeSpan(0, 0, 30); jobSpec.JobConstraints = new JobConstraints(recurring.Subtract(countback), null); } // Upload files and create workitem. UploadFiles(); using (IWorkItemManager wiMgr = bClient.OpenWorkItemManager()) { ICloudWorkItem workItem = wiMgr.CreateWorkItem(workItemName); workItem.JobExecutionEnvironment = jee; workItem.Schedule = wiSchedule; workItem.JobSpecification = jobSpec; try { workItem.Commit(); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } } // Remember to clean up your workitems and jobs }
/// <summary> /// This will take the basic data provided about the account, upload the necessary information to the account, and schedule a job. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnStart_Click(object sender, RoutedEventArgs e) { BatchCredentials credentials = new BatchCredentials(txtBAccountName.Text, txtBAccountKey.Text); IBatchClient bClient = BatchClient.Connect(SampleConstants.BatchSvcEndpoint, credentials); // Setting a retry policy adds robustness against an individual call timing out. When using a policy, by default all recoverable failures are retried. bClient.CustomBehaviors.Add(new SetRetryPolicy(new ExponentialRetry(TimeSpan.FromSeconds(5), 5))); // Create a unique workitem name; don't forget to delete these when you're done string workItemName = SampleConstants.WorkItemNamePrefix + Guid.NewGuid().ToString(); // Identify the pre-existing pool of VMs that will run the tasks. An Autopool specification // is fine but there is the delay associated with the creation of the pool along with waiting // for the VMs to reach Idle state before tasks are running. You can use Batch Explorer to // pre-create the pool and then resize it to the desired size and number of VMs. JobExecutionEnvironment jee = new JobExecutionEnvironment() { PoolName = PoolName }; // Next, create the JobManager instance describing the environment settings and resources it // needs to run JobManager jobMgr = new JobManager() { Name = "JM1", CommandLine = SampleConstants.JobManager, // NOTE: We do not in general recommend that customers put their secrets on the command line or as environmental variables, as // these are not a secure locations. This was done for the simplicity of the sample. EnvironmentSettings = new List <IEnvironmentSetting>() { { new EnvironmentSetting(SampleConstants.EnvWorkItemName, workItemName) }, { new EnvironmentSetting(SampleConstants.EnvBatchAccountKeyName, txtBAccountKey.Text) } }, // In many cases you will want KillJobOnCompletion to be set to 'TRUE' - this allows the previous job to finish before // a recurrence is scheduled. As an alternative, you can set this to 'FALSE' and use MaxWallClockTime as shown below, // which will instead ensure that every recurrence happens. KillJobOnCompletion = true }; // Create a list of resource files that are needed to run JobManager.exe. A shared access signature key specifying // readonly access is used so the JobManager program will have access to the resource files when it is started // on a VM. var sasPrefix = Helpers.ConstructContainerSas( txtSAccountName.Text, txtSAccountKey.Text, "core.windows.net", txtSContainerName.Text); jobMgr.ResourceFiles = Helpers.GetResourceFiles(sasPrefix, SampleConstants.JobManagerFiles); // Create the job specification, identifying that this job has a job manager associated with it JobSpecification jobSpec = new JobSpecification() { JobManager = jobMgr }; // Set up the desired recurrence or start time schedule. WorkItemSchedule wiSchedule = new WorkItemSchedule(); if (rdoOnce.IsChecked == true) { // Set information if the task is to be run once. DateTime runOnce = (DateTime)(dpkDate.SelectedDate); runOnce = runOnce.AddHours(cbxHourO.SelectedIndex); runOnce = runOnce.AddMinutes(cbxMinuteO.SelectedIndex); wiSchedule.DoNotRunUntil = runOnce; } else { // Set information if the task is to be recurring. TimeSpan recurring = new TimeSpan(cbxHourR.SelectedIndex, cbxMinuteR.SelectedIndex, 0); wiSchedule.RecurrenceInterval = recurring; TimeSpan countback = new TimeSpan(0, 0, 30); jobSpec.JobConstraints = new JobConstraints(recurring.Subtract(countback), null); } // Upload files and create workitem. UploadFiles(); using (IWorkItemManager wiMgr = bClient.OpenWorkItemManager()) { ICloudWorkItem workItem = wiMgr.CreateWorkItem(workItemName); workItem.JobExecutionEnvironment = jee; workItem.Schedule = wiSchedule; workItem.JobSpecification = jobSpec; try { workItem.Commit(); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } } // Remember to clean up your workitems and jobs }
private void Slice(GameObject subject, Vector4 planeInLocalSpace, int shatterStep, bool destroyOriginal) { bool passSanityCheck = true; if (subject == null) { throw new System.ArgumentNullException(); } //Sanity check: are we already slicing this? foreach (var jobState in jobStates) { if (object.ReferenceEquals(jobState.Specification.Subject, subject)) { passSanityCheck = false; Debug.LogErrorFormat("Turbo Slicer was asked to slice '{0}' but this target is already enqueued.", subject.name); } } if (passSanityCheck) { var sliceable = subject.GetComponent <Sliceable> (); GameObject meshHolder; bool channelTangents, channelNormals, channelUV2; if (sliceable != null) { channelNormals = sliceable.channelNormals; channelTangents = sliceable.channelTangents; channelUV2 = sliceable.channelUV2; } else { channelTangents = false; channelNormals = true; channelUV2 = false; } if (sliceable != null && sliceable.explicitlySelectedMeshHolder != null) { meshHolder = sliceable.explicitlySelectedMeshHolder; } else { meshHolder = subject; } var renderer = meshHolder.GetComponent <Renderer> (); MeshSnapshot snapshot; var forwardPassAgent = meshHolder.GetComponent <ForwardPassAgent> (); if (forwardPassAgent != null) { snapshot = forwardPassAgent.Snapshot; } else { Mesh mesh; bool meshIsABufferMesh; if (renderer is MeshRenderer) { var filter = meshHolder.GetComponent <MeshFilter> (); mesh = filter.sharedMesh; meshIsABufferMesh = false; } else if (renderer is SkinnedMeshRenderer) { var smr = (SkinnedMeshRenderer)renderer; meshIsABufferMesh = true; mesh = new Mesh(); smr.BakeMesh(mesh); } else { throw new System.NotImplementedException("Turbo Slicer encountered a Renderer that is neither a MeshRenderer nor a SkinnedMeshRenderer"); } var unitRect = new Rect(0.0f, 0.0f, 1.0f, 1.0f); var infillRects = new Rect[mesh.subMeshCount]; for (int i = 0; i < infillRects.Length; i++) { Rect?rect = null; if (renderer.sharedMaterials.Length > i) { var mat = renderer.sharedMaterials [i]; if (sliceable != null) { for (int j = 0; j < sliceable.infillers.Length; j++) { var ifc = sliceable.infillers [j]; if (object.ReferenceEquals(ifc.material, mat)) { rect = ifc.regionForInfill; } } } infillRects [i] = rect.GetValueOrDefault(unitRect); } } if (preloadedMeshes.TryGetValue(mesh.GetInstanceID(), out snapshot) == false) { var indices = new int[mesh.subMeshCount][]; for (int i = 0; i < mesh.subMeshCount; i++) { indices [i] = mesh.GetIndices(i); } //Not that we don't pull the tangents at all. snapshot = new MeshSnapshot( mesh.vertices, channelNormals ? mesh.normals : new Vector3[0], mesh.uv, channelUV2 ? mesh.uv2 : new Vector2[0], channelTangents ? mesh.tangents : new Vector4[0], indices, infillRects); } else { snapshot = new MeshSnapshot( snapshot.vertices, channelNormals ? snapshot.normals : new Vector3[0], snapshot.coords, channelUV2 ? snapshot.coords2 : new Vector2[0], channelTangents ? snapshot.tangents : new Vector4[0], snapshot.indices, infillRects); } if (meshIsABufferMesh) { GameObject.DestroyImmediate(mesh); } } var jobSpec = new JobSpecification(subject, snapshot, planeInLocalSpace, channelTangents, channelNormals, channelUV2, shatterStep, destroyOriginal); try { var jobState = new JobState(jobSpec); switch (workerThreadMode) { case WorkerThreadMode.Asynchronous: jobStates.Add(jobState); System.Threading.ThreadPool.QueueUserWorkItem(ThreadSafeSlice.Slice, jobState); break; case WorkerThreadMode.Synchronous: ThreadSafeSlice.Slice(jobState); if (jobState.HasYield) { ConsumeJobYield(jobState.Specification, jobState.Yield); } else if (jobState.HasException) { throw jobState.Exception; } break; default: throw new System.NotImplementedException(); } } catch (System.Exception ex) { Debug.LogException(ex, subject); } } }
public void Bug1433008JobScheduleScheduleNewable() { Action test = () => { using (BatchClient batchCli = TestUtilities.OpenBatchClientAsync(TestUtilities.GetCredentialsFromEnvironment()).Result) { string jsId = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName() + "-Bug1433008JobScheduleScheduleNewable"; try { DateTime unboundDNRU = DateTime.UtcNow.AddYears(1); CloudJobSchedule newJobSchedule = batchCli.JobScheduleOperations.CreateJobSchedule(jsId, null, null); { AutoPoolSpecification iaps = new AutoPoolSpecification(); PoolSpecification ips = new PoolSpecification(); JobSpecification jobSpecification = new JobSpecification(new PoolInformation() { AutoPoolSpecification = iaps }); iaps.PoolSpecification = ips; iaps.AutoPoolIdPrefix = Microsoft.Azure.Batch.Constants.DefaultConveniencePrefix + TestUtilities.GetMyName(); iaps.PoolLifetimeOption = Microsoft.Azure.Batch.Common.PoolLifetimeOption.Job; iaps.KeepAlive = false; PoolSpecification ps = iaps.PoolSpecification; ps.TargetDedicated = 1; ps.VirtualMachineSize = PoolFixture.VMSize; ps.CloudServiceConfiguration = new CloudServiceConfiguration(PoolFixture.OSFamily); Schedule sched = new Schedule(); sched.DoNotRunUntil = unboundDNRU; newJobSchedule.Schedule = sched; newJobSchedule.JobSpecification = jobSpecification; newJobSchedule.Commit(); } CloudJobSchedule jobSchedule = batchCli.JobScheduleOperations.GetJobSchedule(jsId); // confirm that the original value(s) are set TestUtilities.DisplayJobScheduleLong(this.testOutputHelper, jobSchedule); Assert.Equal(unboundDNRU, jobSchedule.Schedule.DoNotRunUntil); // now update the schedule and confirm DateTime boundDNRU = DateTime.UtcNow.AddYears(2); jobSchedule.Schedule.DoNotRunUntil = boundDNRU; jobSchedule.Commit(); jobSchedule.Refresh(); // confirm that the new value(s) are set TestUtilities.DisplayJobScheduleLong(this.testOutputHelper, jobSchedule); Assert.Equal(boundDNRU, jobSchedule.Schedule.DoNotRunUntil); } finally { // clean up TestUtilities.DeleteJobScheduleIfExistsAsync(batchCli, jsId).Wait(); } } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }
/// <summary> /// Submit job to scheduler /// </summary> /// <param name="connectorClient">Connector</param> /// <param name="jobSpecification">Job specification</param> /// <param name="credentials">Credentials</param> /// <returns></returns> /// <exception cref="Exception"></exception> public virtual IEnumerable <SubmittedTaskInfo> SubmitJob(object connectorClient, JobSpecification jobSpecification, ClusterAuthenticationCredentials credentials) { var jobIdsWithJobArrayIndexes = new List <string>(); SshCommandWrapper command = null; string sshCommand = (string)_convertor.ConvertJobSpecificationToJob(jobSpecification, "qsub -koed"); _log.Info($"Submitting job \"{jobSpecification.Id}\", command \"{sshCommand}\""); string sshCommandBase64 = $"{_commands.InterpreterCommand} '{_commands.ExecutieCmdScriptPath} {Convert.ToBase64String(Encoding.UTF8.GetBytes(sshCommand))}'"; try { command = SshCommandUtils.RunSshCommand(new SshClientAdapter((SshClient)connectorClient), sshCommandBase64); var jobIds = _convertor.GetJobIds(command.Result).ToList(); for (int i = 0; i < jobSpecification.Tasks.Count; i++) { jobIdsWithJobArrayIndexes.AddRange(string.IsNullOrEmpty(jobSpecification.Tasks[i].JobArrays) ? new List <string> { jobIds[i] } : CombineScheduledJobIdWithJobArrayIndexes(jobIds[i], jobSpecification.Tasks[i].JobArrays)); } return(GetActualTasksInfo(connectorClient, jobSpecification.Cluster, jobIdsWithJobArrayIndexes)); } catch (FormatException e) { throw new Exception(@$ "Exception thrown when submitting a job: " "{jobSpecification.Name}" " to the cluster: " "{jobSpecification.Cluster.Name}" ". Submission script result: " "{command.Result}" ".\nSubmission script error message: " "{command.Error}" ".\n Command line for job submission: " "{sshCommandBase64}" ".\n", e); } }
private void ConsumeJobYield(JobSpecification jobSpec, JobYield jobYield) { #if NOBLEMUFFINS var stopwatch = new System.Diagnostics.Stopwatch(); stopwatch.Start(); #endif var go = jobSpec.Subject; if (go == null) { throw new System.Exception("Turbo Slicer was asked to slice an object, but the object has been destroyed."); } var onlyHaveOne = false; { var sides = new [] { jobYield.Alfa, jobYield.Bravo }; for (int i = 0; i < sides.Length && !onlyHaveOne; i++) { var side = sides[i]; var indexCount = 0; foreach (var snapshot in side) { for (int j = 0; j < snapshot.indices.Length; j++) { indexCount += snapshot.indices[j].Length; } } onlyHaveOne |= indexCount == 0; } } if (onlyHaveOne) { //Do nothing } else { GameObject alfaObject, bravoObject; var sliceable = go.GetComponent <Sliceable> (); var goTransform = go.transform; Dictionary <string, Transform> transformByName; Dictionary <string, bool> alfaPresence, bravoPresence; determinePresence(goTransform, jobSpec.PlaneInLocalSpace, out transformByName, out alfaPresence, out bravoPresence); Object alfaSource, bravoSource; if (sliceable != null) { bool useAlternateForAlfa, useAlternateForBravo; if (sliceable.alternatePrefab == null) { useAlternateForAlfa = false; useAlternateForBravo = false; } else if (sliceable.alwaysCloneFromAlternate) { useAlternateForAlfa = true; useAlternateForBravo = true; } else { useAlternateForAlfa = sliceable.cloneAlternate(alfaPresence); useAlternateForBravo = sliceable.cloneAlternate(bravoPresence); } alfaSource = useAlternateForAlfa ? sliceable.alternatePrefab : go; bravoSource = useAlternateForBravo ? sliceable.alternatePrefab : go; } else { alfaSource = bravoSource = go; } alfaObject = (GameObject)GameObject.Instantiate(alfaSource); bravoObject = (GameObject)GameObject.Instantiate(bravoSource); bravoObject.name = alfaObject.name = alfaSource.name; handleHierarchy(alfaObject.transform, alfaPresence, transformByName); handleHierarchy(bravoObject.transform, bravoPresence, transformByName); var originalRigidBody = go.GetComponent <Rigidbody> (); var tuples = new [] { new ConsumptionTuple(alfaObject, jobYield.Alfa), new ConsumptionTuple(bravoObject, jobYield.Bravo) }; for (int i = 0; i < tuples.Length; i++) { var tuple = tuples [i]; var transform = tuple.root.GetComponent <Transform> (); transform.SetParent(goTransform.parent, false); transform.localPosition = goTransform.localPosition; transform.localRotation = goTransform.localRotation; transform.localScale = goTransform.localScale; tuple.root.layer = go.layer; if (originalRigidBody != null) { var rigidBody = tuple.root.GetComponent <Rigidbody> (); if (rigidBody != null) { rigidBody.angularVelocity = originalRigidBody.angularVelocity; rigidBody.velocity = originalRigidBody.velocity; } } var doColliders = sliceable != null && sliceable.refreshColliders; var doForwardPass = sliceable != null && sliceable.shreddable; snapshotByKeyBuffer.Clear(); foreach (var snapshot in tuple.snapshot) { snapshotByKeyBuffer[snapshot.key] = snapshot; } traversalBuffer.Clear(); targetBuffer.Clear(); traversalBuffer.Add(tuple.root.transform); while (traversalBuffer.Count > 0) { var lastIndex = traversalBuffer.Count - 1; var last = traversalBuffer[lastIndex]; traversalBuffer.RemoveAt(lastIndex); for (var childIndex = 0; childIndex < last.childCount; childIndex++) { var child = last.GetChild(childIndex); traversalBuffer.Add(child); } var isThisHolderTheRoot = last.gameObject == tuple.root; var key = isThisHolderTheRoot ? MeshSnapshot.RootKey : last.name; if (snapshotByKeyBuffer.ContainsKey(key)) { targetBuffer.Add(last.gameObject); } } for (int targetIndex = 0; targetIndex < targetBuffer.Count; targetIndex++) { var holder = targetBuffer[targetIndex]; var isThisHolderTheRoot = holder == tuple.root; var key = isThisHolderTheRoot ? MeshSnapshot.RootKey : holder.name; var snapshot = snapshotByKeyBuffer[key]; var mesh = new Mesh(); mesh.name = "Turbo Slicer mesh"; mesh.vertices = snapshot.vertices; mesh.uv = snapshot.coords; if (snapshot.normals.Length > 0) { mesh.normals = snapshot.normals; } if (snapshot.coords2.Length > 0) { mesh.uv2 = snapshot.coords2; } if (snapshot.tangents.Length > 0) { mesh.tangents = snapshot.tangents; } mesh.subMeshCount = snapshot.indices.Length; for (int j = 0; j < snapshot.indices.Length; j++) { int[] array; array = snapshot.indices [j]; mesh.SetIndices(array, MeshTopology.Triangles, j); } if (doColliders) { mesh.RecalculateBounds(); } SetMesh(holder, mesh); if (doColliders) { var collider = holder.GetComponent <Collider> (); if (collider != null) { var holderTransform = holder.GetComponent <Transform> (); var rootTransform = tuple.root.GetComponent <Transform> (); var bounds = mesh.bounds; var pointOne = bounds.min; var pointTwo = bounds.max; if (holder != tuple.root) { var matrix = holderTransform.localToWorldMatrix * rootTransform.worldToLocalMatrix; pointOne = matrix.MultiplyPoint3x4(pointOne); pointTwo = matrix.MultiplyPoint3x4(pointTwo); var center = (pointOne + pointTwo) * 0.5f; bounds = new Bounds(center, Vector3.zero); bounds.Encapsulate(pointOne); bounds.Encapsulate(pointTwo); } if (collider is BoxCollider) { var boxCollider = (BoxCollider)collider; boxCollider.center = bounds.center; boxCollider.size = bounds.extents * 2.0f; } else if (collider is SphereCollider) { var sphereCollider = (SphereCollider)collider; sphereCollider.center = bounds.center; sphereCollider.radius = bounds.extents.magnitude; } else if (collider is MeshCollider) { var mc = (MeshCollider)collider; mc.sharedMesh = mesh; } } } mesh.UploadMeshData(true); } if (doForwardPass) { var forwardPassAgent = tuple.root.GetComponent <ForwardPassAgent> (); if (forwardPassAgent == null) { forwardPassAgent = tuple.root.AddComponent <ForwardPassAgent> (); } forwardPassAgent.Snapshots = tuple.snapshot; } } if (jobSpec.ShatterStep == 0 && sliceable != null) { sliceable.RaiseSliced(alfaObject, bravoObject); } else { var nextShatterStep = jobSpec.ShatterStep - 1; Shatter(alfaObject, nextShatterStep); Shatter(bravoObject, nextShatterStep); } if (jobSpec.DestroyOriginal) { GameObject.Destroy(go); } } #if NOBLEMUFFINS stopwatch.Stop(); Debug.LogFormat("Slice result consumed in {0} ms", stopwatch.ElapsedMilliseconds.ToString()); #endif }
/// <summary> /// Validate Job specification /// </summary> /// <param name="job">Job specification</param> /// <returns></returns> private string ValidateJobSpecification(JobSpecification job) { ValidateRequestedCluster(job); if (job.Id != 0 && _unitOfWork.JobSpecificationRepository.GetById(job.Id) == null) { _messageBuilder.AppendLine($"Job with Id {job.Id} does not exist in the system"); } if (job.Tasks.Count <= 0) { _messageBuilder.AppendLine("Job must have at least one task"); } //Task Dependency for (int i = 0; i < job.Tasks.Count; i++) { //Task Validation ValidateTaskSpecification(job.Tasks[i]); if (job.Tasks[i].CommandTemplate == null) { //_messageBuilder.AppendLine($"Command Template does not exist."); //this is validated in jobSpec break; } //Validation cluster in tasks if (job.Tasks[i].CommandTemplate.ClusterNodeType.Cluster.Id != job.Cluster.Id) { _messageBuilder.AppendLine($"Task \"{job.Tasks[i].Name}\" must used same HPC Cluster as job " + $"\"{job.Name}\"."); } if (job.FileTransferMethodId != job.Tasks[i].CommandTemplate.ClusterNodeType.FileTransferMethodId) { _messageBuilder.AppendLine($"Command template \"{job.Tasks[i].CommandTemplate.Id}\" for task " + $"\"{job.Tasks[i].Name}\" has different file transfer method " + $"\"{ job.Tasks[i].CommandTemplate.ClusterNodeType.FileTransferMethodId}\" " + $"than job file transfer method \"{job.FileTransferMethodId}\"."); } if (job.Tasks[i].CommandTemplate.ClusterNodeType.Id != job.Tasks[i].ClusterNodeTypeId) { _messageBuilder.AppendLine($"Task \"{job.Tasks[i].Name}\" must used same ClusterNodeTypeId " + $"\"{job.Tasks[i].ClusterNodeTypeId}\" which is defined in CommandTemplate " + $"(ClusterNodeTypeId=\"{job.Tasks[i].CommandTemplate.ClusterNodeType.Id}\")."); } if (job.Tasks[i].DependsOn != null && job.Tasks[i].DependsOn.Count > 0) { List <TaskSpecification> prevTasks = new List <TaskSpecification>(); foreach (var dependTask in job.Tasks[i].DependsOn) { if (dependTask.TaskSpecification == dependTask.ParentTaskSpecification) { //Inself reference _messageBuilder.AppendLine($"Depending task \"{dependTask.TaskSpecification.Name}\" for task " + $"\"{job.Tasks[i].Name}\" references inself."); } var prevTask = prevTasks.FirstOrDefault(w => ReferenceEquals(w, dependTask.ParentTaskSpecification)); if (prevTask is null) { prevTasks.Add(dependTask.ParentTaskSpecification); } else { //Same dependency _messageBuilder.AppendLine($"Depending task \"{dependTask.ParentTaskSpecification.Name}\" for task " + $"\"{job.Tasks[i].Name}\" twice same reference."); } bool previousTask = false; for (int j = (i - 1); j >= 0; j--) { if (dependTask.ParentTaskSpecification == job.Tasks[j]) { previousTask = true; } } if (!previousTask) { //Circular dependency _messageBuilder.AppendLine( $"Depending task \"{dependTask.ParentTaskSpecification.Name}\" for task \"{job.Tasks[i].Name}\" " + $"can reference only on previous task."); } } } } return(_messageBuilder.ToString()); }