public void PlanTwoJobsWithMultipleTranscodingPluginsTest() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var ffmpegPluginMock = CreateFFMpegMock("1"); ffmpegPluginMock.SetupGet(p => p.Busy).Returns(false); ffmpegPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, ffmpegPluginMock.Object }, JobRepository, Logging, _callBackService.Object); var audioJob = CreateNewFFMpegJob(); var videoJob = CreateNewWfsJob(); JobRepository.Add(audioJob); JobRepository.Add(videoJob); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(2)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(0)); planner.Calculate(); ffmpegPluginMock.Verify(x => x.CheckAndEstimate(It.IsAny <ExecutionTask>()), Times.Once(), "ffmpeg plugin should only be called once"); wfsPluginMock.Verify(x => x.CheckAndEstimate(It.IsAny <ExecutionTask>()), Times.Once(), "Wfs plugin should only be called once"); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(2)); var dbAudioJob = JobRepository.ActiveJobs().First(j => j.Plan.Tasks.First().PluginUrn == ffmpegPluginMock.Object.Urn); var dbVideoJob = JobRepository.ActiveJobs().First(j => j.Plan.Tasks.First().PluginUrn == wfsPluginMock.Object.Urn); Assert.That(dbAudioJob.Urn, Is.EqualTo(audioJob.Urn)); Assert.That(dbVideoJob.Urn, Is.EqualTo(videoJob.Urn)); }
public void CalculateDoesNotReserveTheLastFFmpegPluginForHighPriorityOnly() { var wfsPluginMock = CreateFFMpegMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var wfsPluginMock2 = CreateFFMpegMock("2"); wfsPluginMock2.SetupGet(p => p.Busy).Returns(false); wfsPluginMock2.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, wfsPluginMock2.Object }, JobRepository, Logging, _callBackService.Object); var jobMedium = CreateNewFFMpegJob(); jobMedium.Priority = Priority.medium; JobRepository.Add(jobMedium); var jobLow = CreateNewFFMpegJob(); jobLow.Priority = Priority.low; JobRepository.Add(jobLow); planner.Calculate(); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(2)); }
public void PlannerUsesRenamePluginIfDestinationEssenceFilenameIsPresent() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var fileRenamerPluginMock = CreateFileRenamerMock("1"); fileRenamerPluginMock.SetupGet(p => p.Busy).Returns(false); fileRenamerPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, fileRenamerPluginMock.Object }, JobRepository, Logging, _callBackService.Object); var job = CreateNewWfsJob(); job.Destination.Files = new List <EssenceFile> { EssenceFile.Template("NewName_%index%.%ext%") }; var jobUrn = job.Urn; JobRepository.Add(job); planner.Calculate(); Assert.That(JobRepository.Get(jobUrn).Plan.Tasks.Count, Is.EqualTo(2)); Assert.That(JobRepository.Get(jobUrn).Plan.Tasks[1].PluginUrn, Is.EqualTo(fileRenamerPluginMock.Object.Urn)); }
public async Task CanCreateJob() { // Arrange var expectedJob = new Database.Entities.Job { Id = Guid.NewGuid(), LeagueCode = "LC", StatusCode = System.Net.HttpStatusCode.Created }; using var executionContext = new ExecutionContext <AppDbContext>(true); using (var actionContext = await executionContext.CreateContextAsync()) { var sut = new JobRepository(actionContext.Jobs); // Act sut.Add(expectedJob); await actionContext.SaveChangesAsync(); } // Assert using var assertionContext = await executionContext.CreateContextAsync(); var createdJob = await assertionContext.Jobs .FirstOrDefaultAsync(job => job.Id == expectedJob.Id); createdJob.Should().NotBeNull(); createdJob.Should().BeEquivalentTo(expectedJob); }
public void PlanAJobWithMuxingTest() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var ffmpegPluginMock = CreateFFMpegMock("1"); ffmpegPluginMock.SetupGet(p => p.Busy).Returns(false); ffmpegPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, ffmpegPluginMock.Object }, JobRepository, Logging, _callBackService.Object); var audioJob = CreateNewAudioMuxJob(); JobRepository.Add(audioJob); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(1)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(0)); planner.Calculate(); ffmpegPluginMock.Verify(x => x.CheckAndEstimate(It.IsAny <ExecutionTask>()), Times.Exactly(1), "ffmpeg plugin should only be called once for muxing"); wfsPluginMock.Verify(x => x.CheckAndEstimate(It.IsAny <ExecutionTask>()), Times.Exactly(1), "Wfs plugin should only be called once for transcoding"); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(1)); var dbMuxJob = JobRepository.ActiveJobs().First(); Assert.That(dbMuxJob.Plan.Tasks.Count(), Is.EqualTo(2)); Assert.That(dbMuxJob.Plan.Tasks[0].PluginUrn, Is.EqualTo(ffmpegPluginMock.Object.Urn)); Assert.That(dbMuxJob.Plan.Tasks[1].PluginUrn, Is.EqualTo(wfsPluginMock.Object.Urn)); }
public IActionResult Post(Job job) { var currentUser = GetCurrentUserProfile(); job.ManagerId = currentUser.Id; _jobRepository.Add(job); return(CreatedAtAction("Get", new { id = job.Id }, job)); }
/// <summary>Performs execution of the command.</summary> protected override void ProcessRecord() { ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Signal(Microsoft.Azure.AzConfig.Runtime.Events.CmdletProcessRecordStart).Wait(); if (((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Token.IsCancellationRequested) { return; } __processRecordId = System.Guid.NewGuid().ToString(); try { // work if (ShouldProcess($"Call remote 'ConfigurationStores_Update' operation")) { if (true == MyInvocation?.BoundParameters?.ContainsKey("AsJob")) { var instance = this.Clone(); var job = new Microsoft.Azure.AzConfig.Runtime.PowerShell.AsyncJob(instance, this.MyInvocation.Line, this.MyInvocation.MyCommand.Name, this._cancellationTokenSource.Token, this._cancellationTokenSource.Cancel); JobRepository.Add(job); var task = instance.ProcessRecordAsync(); job.Monitor(task); WriteObject(job); } else { using (var asyncCommandRuntime = new Microsoft.Azure.AzConfig.Runtime.PowerShell.AsyncCommandRuntime(this, ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Token)) { asyncCommandRuntime.Wait(ProcessRecordAsync(), ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Token); } } } } catch (System.AggregateException aggregateException) { // unroll the inner exceptions to get the root cause foreach (var innerException in aggregateException.Flatten().InnerExceptions) { ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Signal(Microsoft.Azure.AzConfig.Runtime.Events.CmdletException, $"{innerException.GetType().Name} - {innerException.Message} : {innerException.StackTrace}").Wait(); if (((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Token.IsCancellationRequested) { return; } // Write exception out to error channel. WriteError(new System.Management.Automation.ErrorRecord(innerException, string.Empty, System.Management.Automation.ErrorCategory.NotSpecified, null)); } } catch (System.Exception exception) { ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Signal(Microsoft.Azure.AzConfig.Runtime.Events.CmdletException, $"{exception.GetType().Name} - {exception.Message} : {exception.StackTrace}").Wait(); if (((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Token.IsCancellationRequested) { return; } // Write exception out to error channel. WriteError(new System.Management.Automation.ErrorRecord(exception, string.Empty, System.Management.Automation.ErrorCategory.NotSpecified, null)); } finally { ((Microsoft.Azure.AzConfig.Runtime.IEventListener) this).Signal(Microsoft.Azure.AzConfig.Runtime.Events.CmdletProcessRecordEnd).Wait(); } }
/// <inheritdoc/> protected override void ProcessRecord() { foreach (var cn in ComputerName) { var job = new PingJob(MyInvocation.Line, Name, cn); JobRepository.Add(job); job.StartJob(); WriteObject(job); } }
public void OnlyHighPriorityJobsWillBeGivenTheLastWfsNode() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object }, JobRepository, Logging, _callBackService.Object); var jobMedium = CreateNewWfsJob(); jobMedium.Priority = Priority.medium; JobRepository.Add(jobMedium); planner.Calculate(); // Medium prio job will be started with only one wfs node Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(1)); JobRepository.Reset(); jobMedium = CreateNewWfsJob(); jobMedium.Priority = Priority.medium; JobRepository.Add(jobMedium); var jobHigh = CreateNewWfsJob(); jobHigh.Priority = Priority.high; JobRepository.Add(jobHigh); planner.Calculate(); // High prio job will start with one wfs node Assert.That(JobRepository.ActiveJobs().First().Urn, Is.EqualTo(jobHigh.Urn)); Assert.That(JobRepository.WaitingJobs().First().Urn, Is.EqualTo(jobMedium.Urn)); JobRepository.ActiveJobs().First().Plan.GetCurrentTask().State = ExecutionState.Done; JobRepository.ActiveJobs().First().Plan.MoveToNextTask(); JobRepository.Reset(); var wfsPluginMock2 = CreateWfsMock("2"); wfsPluginMock2.SetupGet(p => p.Busy).Returns(false); wfsPluginMock2.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, wfsPluginMock2.Object }, JobRepository, Logging, _callBackService.Object); var jobMedium2 = CreateNewWfsJob(); jobMedium2.Priority = Priority.medium; JobRepository.Add(jobMedium2); planner.Calculate(); // Medium prio job will start with two wfs nodes Assert.That(JobRepository.ActiveJobs().First().Urn, Is.EqualTo(jobMedium2.Urn)); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); }
public bool Add(Job job) { var jo = new Job(); jo.ID = Guid.NewGuid(); jo.JobCode = job.JobCode; jo.JobName = job.JobName; jo.Description = job.Description; jo.IsActive = job.IsActive; jo.UpdateTime = DateTime.Now; JobRepository.Add(jo); JobRepository.SaveChanges(); return(true); }
public bool AddJob(AddJobViewModel viewModel) { var model = new JobModel(); var request = Context.Requests.Find(viewModel.RequestId); model.Amount = viewModel.Amount; model.Description = viewModel.Description; model.MediaBefore = request.Media; model.Name = viewModel.Title; model.State = "Pending"; model.StartDate = DateTime.Now; model.JobId = viewModel.RequestId; return(repository.Add(Context, model)); }
//Add a new job public void AddJob(NewJobBindingModel dto) { var job = new Job() { Name = dto.Name, Description = dto.Description, Estimate = dto.Estimate, ContractorId = dto.ContractorId, State = (dto.State == null) ? (Project.Status)Enum.Parse(typeof(Project.Status), "ToDo") : (Project.Status)Enum.Parse(typeof(Project.Status), dto.State), Deadline = dto.Deadline, ProjectId = dto.ProjectId }; _jobRepo.Add(job); _jobRepo.SaveChanges(); }
static private void InternalSaveJob(Job job) { using (var context = new CoreContext()) { var repository = new JobRepository(context); if (job.ID == default(int)) { repository.Add(job); } else { repository.Update(job); } context.SaveChanges(); } }
public JsonResult Create(Job job, string userid) { try { if (ModelState.IsValid) { job.COMPANYSITE = int.Parse(Session["companysite"].ToString()); IJO.Add(job, Session["userid"].ToString()); } } catch { return(Json("error")); } return(Json("success")); }
public ActionResult Create(Job job) { try { { _jobRepository = new JobRepository(); if (_jobRepository.Add(job)) { ViewBag.Mensagem = "Job cadastrado com Sucesso"; } } //return RedirectToAction("Index"); return(View()); } catch (Exception) { return(View("Index")); } }
public void MaxFiveMinutesJobsWillBeTreatedAsHighPriorityJobsAndBeGivenTheLastWfsNode() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var wfsPluginMock2 = CreateWfsMock("2"); wfsPluginMock2.SetupGet(p => p.Busy).Returns(false); wfsPluginMock2.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, wfsPluginMock2.Object }, JobRepository, Logging, _callBackService.Object); var jobHighPrio = CreateNewWfsJob(); jobHighPrio.Priority = Priority.high; JobRepository.Add(jobHighPrio); var jobMediumPrioLong = CreateNewWfsJob(); jobMediumPrioLong.Priority = Priority.medium; jobMediumPrioLong.Source.Duration = 300001; JobRepository.Add(jobMediumPrioLong); planner.Calculate(); // Medium prio long job (more than 5 min) will not start if one node left Assert.That(JobRepository.WaitingJobs().First().Urn, Is.EqualTo(jobMediumPrioLong.Urn)); JobRepository.Reset(); var jobMediumPrioShort = CreateNewWfsJob(); jobMediumPrioShort.Priority = Priority.medium; jobMediumPrioShort.Source.Duration = 30000; JobRepository.Add(jobMediumPrioShort); JobRepository.Add(jobHighPrio); planner.Calculate(); // Medium prio short job (less than 5 min) will start if one node left Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Select(t => t.Urn == jobMediumPrioShort.Urn), Is.Not.Null); }
/// <summary> /// Handle normal step execution. /// </summary> /// <param name="step"></param> /// <param name="execution"></param> /// <param name="currentStepExecution"></param> private void HandleStepExecution(IStep step, JobExecution execution, StepExecution currentStepExecution) { JobRepository.Add(currentStepExecution); Logger.Info("Executing step: [ {0} ]", step.Name); try { step.Execute(currentStepExecution); currentStepExecution.ExecutionContext.Put("batch.executed", true); } catch (JobInterruptedException) { // Ensure that the job gets the message that it is stopping // and can pass it on to other steps that are executing // concurrently. execution.Status = BatchStatus.Stopping; throw; } JobRepository.UpdateExecutionContext(execution); }
public void PlanOneJobTest() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object }, JobRepository, Logging, _callBackService.Object); var job = CreateNewWfsJob(); var jobUrn = job.Urn; JobRepository.Add(job); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(1)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(0)); planner.Calculate(); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(1)); Assert.That(JobRepository.ActiveJobs().First().Urn, Is.EqualTo(jobUrn)); }
public void EssenceGetsCleanedOnUpdate() { var originJob = ActiveJob(); int essenceAmount; JobRepository.Add(originJob); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(1)); Assert.That(JobRepository.DoneJobs(), Is.Empty); Assert.That(JobRepository.WaitingJobs(), Is.Empty); var jobFromRepo = JobRepository.Get(originJob.Urn); jobFromRepo.Plan.GetCurrentTask().State = ExecutionState.Done; jobFromRepo.Plan.MoveToNextTask(); JobRepository.Update(jobFromRepo); using (var db = new MarvinEntities()) { essenceAmount = db.essence.Count(); } //There should only be four essencefiles (two job essence and two task essence) Assert.That(essenceAmount, Is.EqualTo(4)); }
public bool Add(Job job, out string strResult) { strResult = string.Empty; bool result = false; var jo = new Job(); var jobExist = JobRepository.GetQueryable().FirstOrDefault(j => j.JobCode == job.JobCode); if (jobExist == null) { if (jo != null) { try { jo.ID = Guid.NewGuid(); jo.JobCode = job.JobCode; jo.JobName = job.JobName; jo.Description = job.Description; jo.IsActive = job.IsActive; jo.UpdateTime = DateTime.Now; JobRepository.Add(jo); JobRepository.SaveChanges(); result = true; } catch (Exception ex) { strResult = "原因:" + ex.Message; } } else { strResult = "原因:找不到当前登陆用户!请重新登陆!"; } } else { strResult = "原因:该编号已存在!"; } return(result); }
public void PlanAnAudioJobWhileMuxingTest() { var wfsPluginMock = CreateWfsMock("1"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var ffmpegPluginMock = CreateFFMpegMock("1"); ffmpegPluginMock.SetupGet(p => p.Busy).Returns(false); ffmpegPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, ffmpegPluginMock.Object }, JobRepository, Logging, _callBackService.Object); //Plan the mux job var muxJob = CreateNewAudioMuxJob(); JobRepository.Add(muxJob); planner.Calculate(); //Advance muxjob progress to wfs, leaving the ffmpeg plugin free var dbMuxJob = JobRepository.ActiveJobs().First(); dbMuxJob.Plan.Tasks[0].State = ExecutionState.Done; dbMuxJob.Plan.Tasks[1].State = ExecutionState.Running; JobRepository.Update(dbMuxJob); //Plan the audio trancoding job var audioJob = CreateNewFFMpegJob(); JobRepository.Add(audioJob); planner.Calculate(); //Check both jobs are running Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(0)); Assert.That(JobRepository.ActiveJobs().Count(), Is.EqualTo(2)); }
public override void ExecuteCmdlet() { base.ExecuteCmdlet(); switch (ParameterSetName) { case IdParameterSet: { var resource = new ResourceIdentifier(Id); ResourceGroupName = resource.ResourceGroupName; Name = resource.ResourceName; break; } case InputObjectParameterSet: { var resource = new ResourceIdentifier(InputObject.Id); ResourceGroupName = resource.ResourceGroupName; Name = resource.ResourceName; break; } } RunCmdLet(() => { if (!GeneralUtilities.Probe("kubectl")) { throw new CmdletInvocationException(Resources.KubectlIsRequriedToBeInstalledAndOnYourPathToExecute); } var tmpFileName = Path.GetTempFileName(); var encoded = Client.ManagedClusters.GetAccessProfiles(ResourceGroupName, Name, "clusterUser") .KubeConfig; AzureSession.Instance.DataStore.WriteFile( tmpFileName, Encoding.UTF8.GetString(Convert.FromBase64String(encoded))); WriteVerbose(string.Format( Resources.RunningKubectlGetPodsKubeconfigNamespaceSelector, tmpFileName)); var proc = new Process { StartInfo = new ProcessStartInfo { FileName = "kubectl", Arguments = $"get pods --kubeconfig {tmpFileName} --namespace kube-system --output name --selector k8s-app=kubernetes-dashboard", UseShellExecute = false, RedirectStandardOutput = true, CreateNoWindow = true } }; proc.Start(); var dashPodName = proc.StandardOutput.ReadToEnd(); proc.WaitForExit(); // remove "pods/" dashPodName = dashPodName.Substring(5).TrimEnd('\r', '\n'); WriteVerbose(string.Format( Resources.RunningInBackgroundJobKubectlTunnel, tmpFileName, dashPodName)); var exitingJob = JobRepository.Jobs.FirstOrDefault(j => j.Name == "Kubectl-Tunnel"); if (exitingJob != null) { WriteVerbose(Resources.StoppingExistingKubectlTunnelJob); exitingJob.StopJob(); JobRepository.Remove(exitingJob); } var job = new KubeTunnelJob(tmpFileName, dashPodName); if (!DisableBrowser) { WriteVerbose(Resources.SettingUpBrowserPop); job.StartJobCompleted += (sender, evt) => { WriteVerbose(string.Format(Resources.StartingBrowser, ProxyUrl)); PopBrowser(ProxyUrl); }; } JobRepository.Add(job); job.StartJob(); WriteObject(job); }); }
public override void ExecuteCmdlet() { base.ExecuteCmdlet(); switch (ParameterSetName) { case IdParameterSet: { var resource = new ResourceIdentifier(Id); ResourceGroupName = resource.ResourceGroupName; Name = resource.ResourceName; break; } case InputObjectParameterSet: { var resource = new ResourceIdentifier(InputObject.Id); ResourceGroupName = resource.ResourceGroupName; Name = resource.ResourceName; break; } } RunCmdLet(() => { if (!GeneralUtilities.Probe("kubectl")) { throw new AzPSApplicationException(Resources.KubectlIsRequriedToBeInstalledAndOnYourPathToExecute); } var tmpFileName = Path.GetTempFileName(); var credentials = Client.ManagedClusters.ListClusterAdminCredentials(ResourceGroupName, Name).Kubeconfigs; var encoded = credentials.First(credential => credential.Name.Equals("clusterUser")).Value; AzureSession.Instance.DataStore.WriteFile( tmpFileName, Encoding.UTF8.GetString(encoded)); WriteVerbose(string.Format( Resources.RunningKubectlGetPodsKubeconfigNamespaceSelector, tmpFileName)); var proc = new Process { StartInfo = new ProcessStartInfo { FileName = "kubectl", Arguments = $"get pods --kubeconfig {tmpFileName} --namespace kube-system --output name --selector k8s-app=kubernetes-dashboard", UseShellExecute = false, RedirectStandardOutput = true, CreateNoWindow = true } }; proc.Start(); var dashPodName = proc.StandardOutput.ReadToEnd(); proc.WaitForExit(); // remove "pods/" or "pod/" dashPodName = dashPodName.Substring(dashPodName.IndexOf('/') + 1).TrimEnd('\r', '\n'); var procDashboardPort = new Process { StartInfo = new ProcessStartInfo { FileName = "kubectl", Arguments = $"get pods --kubeconfig {tmpFileName} --namespace kube-system --selector k8s-app=kubernetes-dashboard --output jsonpath='{{.items[0].spec.containers[0].ports[0].containerPort}}'", UseShellExecute = false, RedirectStandardOutput = true, CreateNoWindow = true } }; procDashboardPort.Start(); var dashboardPortOutput = procDashboardPort.StandardOutput.ReadToEnd(); procDashboardPort.WaitForExit(); dashboardPortOutput = dashboardPortOutput.Replace("'", ""); int dashboardPort = int.Parse(dashboardPortOutput); string protocol = dashboardPort == 8443 ? "https" : "http"; string dashboardUrl = $"{protocol}://{ListenAddress}:{ListenPort}"; //TODO: check in cloudshell //TODO: support for --address {ListenAddress} WriteVerbose(string.Format( Resources.RunningInBackgroundJobKubectlTunnel, tmpFileName, dashPodName)); var exitingJob = JobRepository.Jobs.FirstOrDefault(j => j.Name == "Kubectl-Tunnel"); if (exitingJob != null) { WriteVerbose(Resources.StoppingExistingKubectlTunnelJob); exitingJob.StopJob(); JobRepository.Remove(exitingJob); } var job = new KubeTunnelJob(tmpFileName, dashPodName, ListenPort, dashboardPort); if (!DisableBrowser) { WriteVerbose(Resources.SettingUpBrowserPop); job.StartJobCompleted += (sender, evt) => { WriteVerbose(string.Format(Resources.StartingBrowser, dashboardUrl)); PopBrowser(dashboardUrl); }; } JobRepository.Add(job); job.StartJob(); WriteObject(job); }); }
public void PrioritySortTest() { var jobDue1DayLow = CreateNewWfsJob(dueDate: TimeProvider.GetUtcNow().AddDays(1)); jobDue1DayLow.Priority = Priority.low; JobRepository.Add(jobDue1DayLow); var jobDue1DayHigh = CreateNewWfsJob(dueDate: TimeProvider.GetUtcNow().AddDays(1)); jobDue1DayHigh.Priority = Priority.high; JobRepository.Add(jobDue1DayHigh); var jobDueNowDayLow = CreateNewWfsJob(); jobDueNowDayLow.Priority = Priority.low; JobRepository.Add(jobDueNowDayLow); var jobDueNowDayHigh = CreateNewWfsJob(); jobDueNowDayHigh.Priority = Priority.high; JobRepository.Add(jobDueNowDayHigh); Assert.That(JobRepository.WaitingJobs().Count(), Is.EqualTo(4)); var wfsPluginMock = CreateWfsMock("1"); var wfsPluginMock2 = CreateWfsMock("2"); wfsPluginMock.SetupGet(p => p.Busy).Returns(false); wfsPluginMock.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); wfsPluginMock2.SetupGet(p => p.Busy).Returns(false); wfsPluginMock2.Setup(p => p.CheckAndEstimate(It.IsAny <ExecutionTask>())).Returns(true); var planner = new SimplePlanner(new List <IPlugin> { wfsPluginMock.Object, wfsPluginMock2.Object }, JobRepository, Logging, _callBackService.Object); planner.Calculate(); var activeJob = JobRepository.ActiveJobs().First(m => m.Id == jobDueNowDayHigh.Id); Assert.That(activeJob.Urn, Is.EqualTo(jobDueNowDayHigh.Urn)); activeJob.Plan.GetCurrentTask().State = ExecutionState.Done; activeJob.Plan.MoveToNextTask(); JobRepository.Update(activeJob); planner.Calculate(); activeJob = JobRepository.ActiveJobs().First(m => m.Id == jobDue1DayHigh.Id); Assert.That(activeJob.Urn, Is.EqualTo(jobDue1DayHigh.Urn)); activeJob.Plan.GetCurrentTask().State = ExecutionState.Done; activeJob.Plan.MoveToNextTask(); JobRepository.Update(activeJob); planner.Calculate(); activeJob = JobRepository.ActiveJobs().First(m => m.Id == jobDueNowDayLow.Id); Assert.That(activeJob.Urn, Is.EqualTo(jobDueNowDayLow.Urn)); activeJob.Plan.GetCurrentTask().State = ExecutionState.Done; activeJob.Plan.MoveToNextTask(); JobRepository.Update(activeJob); planner.Calculate(); activeJob = JobRepository.ActiveJobs().First(m => m.Id == jobDue1DayLow.Id); Assert.That(activeJob.Urn, Is.EqualTo(jobDue1DayLow.Urn)); activeJob.Plan.GetCurrentTask().State = ExecutionState.Done; activeJob.Plan.MoveToNextTask(); JobRepository.Update(activeJob); Assert.That(JobRepository.WaitingJobs(), Is.Empty); Assert.That(JobRepository.ActiveJobs(), Is.Empty); Assert.That(JobRepository.DoneJobs().Count(), Is.EqualTo(4)); }