public void AddJob(string jobName, string jobGroup, string triggerName, string triggerGroup, string cronExpression, Type jobType, bool isDurable, IDictionary <string, object> jobProperties) { if (IsJobExist(jobName, jobGroup)) { throw new ArgumentException(string.Format("Job name {0} Group Name {1} already exists", jobName, jobGroup)); } if (!JobUtil.IsValidCronExpression(cronExpression)) { throw new ArgumentException(string.Format("Invalid Expression {0}", cronExpression)); } var jobDetail = JobBuilder.Create() .OfType(jobType) .WithIdentity(jobName, jobGroup) .StoreDurably(isDurable) .RequestRecovery(true) .Build(); //build trigger by cron expression var triggerBuilder = TriggerBuilder.Create() .WithIdentity(triggerName, triggerGroup) .WithCronSchedule(cronExpression); if (jobProperties != null && jobProperties.Count > 0) { triggerBuilder.UsingJobData(new JobDataMap(jobProperties)); } var trigger = triggerBuilder.Build(); _scheduler.ScheduleJob(jobDetail, trigger); }
public BuildResultEntity( BoundBuildId buildId, DateTimeOffset buildDateTime, TimeSpan duration, string jobKind, string machineName, BuildResultClassification classification, PullRequestInfo prInfo) { JobName = buildId.JobId.Name; JobKind = jobKind; ViewName = AzureUtil.GetViewName(BuildId.JobId); BuildNumber = buildId.Number; HostRaw = buildId.Host.ToString(); ClassificationKindRaw = classification.Kind.ToString(); ClassificationName = classification.Name; BuildDateTime = buildDateTime.UtcDateTime; MachineName = machineName; IsPullRequest = JobUtil.IsPullRequestJobName(buildId.JobId); DurationSeconds = (int)duration.TotalSeconds; if (prInfo != null) { PullRequestId = prInfo.Id; PullRequestAuthor = prInfo.Author; PullRequestAuthorEmail = prInfo.AuthorEmail; PullRequestUrl = prInfo.PullUrl; PullRequestSha1 = prInfo.Sha1; Debug.Assert(HasPullRequestInfo); Debug.Assert(PullRequestInfo != null); } Debug.Assert(BuildDateTime.Kind == DateTimeKind.Utc); }
private BuildResultSummaryModel GetBuildResultSummaryModel(BuildFilterModel filter) { var model = new BuildResultSummaryModel() { Filter = filter, }; var queryResult = _buildUtil .GetBuildResults(filter.StartDate, filter.ViewName) .Where(x => filter.IncludePullRequests || !JobUtil.IsPullRequestJobName(x.JobId.Name)) .Where(x => x.ClassificationKind != ClassificationKind.Succeeded) .GroupBy(x => x.JobId) .Select(x => new { JobId = x.Key, Count = x.Count() }) .OrderByDescending(x => x.Count) .AsEnumerable(); if (filter.Limit.HasValue) { queryResult = queryResult.Take(filter.Limit.Value); } foreach (var entity in queryResult) { var entry = new BuildResultSummaryEntry() { JobId = entity.JobId, Count = entity.Count }; model.Entries.Add(entry); } return(model); }
/// <summary> /// A view of the builds grouped by the result. /// </summary> /// <returns></returns> public ActionResult View(bool pr = false, DateTimeOffset?startDate = null, string viewName = AzureUtil.ViewNameRoslyn) { var filter = CreateBuildFilter(actionName: nameof(View), viewName: viewName, startDate: startDate, pr: pr); var results = _buildUtil.GetBuildResults(filter.StartDate, viewName) .Where(x => pr || !JobUtil.IsPullRequestJobName(x.JobId)) .ToList(); var totalCount = results.Count; var totalSucceeded = results.Count(x => x.ClassificationKind == ClassificationKind.Succeeded); var builds = results .Where(x => x.ClassificationKind != ClassificationKind.Succeeded) .GroupBy(x => x.ClassificationName) .Select(x => new BuildViewModel() { KindName = x.Key, Count = x.Count() }) .ToList(); var model = new BuildViewSummaryModel() { Filter = filter, TotalBuildCount = totalCount, TotalSucceededCount = totalSucceeded, Builds = builds }; return(View(viewName: "View", model: model)); }
public ActionResult KindByViewName(string name = null, bool pr = false, DateTime?startDate = null) { var filter = CreateBuildFilter(actionName: nameof(KindByViewName), name: name, startDate: startDate, pr: pr); var startDateValue = startDate ?? DateTimeOffset.UtcNow - TimeSpan.FromDays(1); var results = _buildUtil .GetBuildResultsByKindName(startDateValue, name, AzureUtil.ViewNameAll) .Where(x => pr || !JobUtil.IsPullRequestJobName(x.JobId)) .ToList(); var builds = results .GroupBy(x => x.ViewName) .Select(x => new BuildViewNameModel() { ViewName = x.Key, Count = x.Count() }) .ToList(); var model = new BuildResultKindByViewNameModel() { Filter = filter, ClassificationKind = name, Builds = builds, TotalResultCount = results.Count }; return(View(viewName: "KindByViewName", model: model)); }
public ActionResult JobElapsedTimePerBuild(bool pr = false, DateTime?startDate = null, string viewName = AzureUtil.ViewNameRoslyn, string jobName = "dotnet_coreclr/master/checked_windows_nt_bld") { var startDateValue = startDate ?? DateTimeOffset.UtcNow - TimeSpan.FromDays(1); var filter = CreateBuildFilter(actionName: nameof(JobElapsedTimePerBuild), viewName: viewName, startDate: startDate, pr: pr); var results = _buildUtil .GetBuildResults(startDateValue, viewName) .Where(x => pr || !JobUtil.IsPullRequestJobName(x.JobId) && x.JobId.Name == jobName) .ToList(); var buildCount = results.Count; var totalETOfCurrJob = 0; foreach (var entry in results) { totalETOfCurrJob += entry.DurationSeconds; } var model = new JobElapsedTimePerBuildModel() { Filter = filter, TotalBuildCount = buildCount, TotalETOfCurrJob = totalETOfCurrJob, Entries = results }; return(View(viewName: "JobElapsedTimePerBuild", model: model)); }
public ActionResult AddJob(AddJobViewModel model) { try { var type = JobUtil.Create(model.NameSpace, string.Format("{0}.{1}", model.NameSpace, model.ClassName)); var properties = new Dictionary <string, object>(); if (model.TriggerProperties != null) { foreach (var keyVal in model.TriggerProperties) { try { string[] splitted = keyVal.Split('='); properties.Add(splitted[0].Replace(" ", ""), splitted[1]); } catch (Exception) {} } } Scheduler.Instance.AddJob(model.JobName, model.JobGroup, model.JobName, model.JobGroup, model.CronExpression, type, model.IsDurable, properties); } catch (Exception ex) { return(Json(new { success = false, responseMessage = ex.Message })); } return(Json(new { success = true })); }
private async Task PopulateCounters(BuildResultEntity result) { await _buildCounterUtil.UpdateAsync(x => { var succeeded = result.ClassificationKind == ClassificationKind.Succeeded; if (JobUtil.IsPullRequestJobName(result.JobName)) { if (succeeded) { x.PullRequestSucceededCount++; } else { x.PullRequestFailedCount++; } } else { if (succeeded) { x.CommitSucceededCount++; } else { x.CommitFailedCount++; } } }); }
private TestFailureSummaryModel GetTestFailureSummaryModel(BuildFilterModel filter) { var failureQuery = _buildUtil .GetTestCaseFailures(filter.StartDate, filter.ViewName) .Where(x => filter.IncludePullRequests || !JobUtil.IsPullRequestJobName(x.BuildId.JobName)) .GroupBy(x => x.Identifier) .Select(x => new { Key = x.Key, Count = x.Count() }) .OrderByDescending(x => x.Count) .AsEnumerable(); if (filter.Limit.HasValue) { failureQuery = failureQuery.Take(filter.Limit.Value); } var summary = new TestFailureSummaryModel() { Filter = filter, }; foreach (var pair in failureQuery) { var entry = new TestFailureSummaryEntry() { Name = pair.Key, Count = pair.Count }; summary.Entries.Add(entry); } return(summary); }
public async Task AddJob(Model.Task model) { if (await IsJobExist(model.Name, model.JobGroup)) { throw new ArgumentException(string.Format("Job name {0} Group Name {1} already exists", model.Name, model.JobGroup)); } if (!JobUtil.IsValidCronExpression(model.Cron)) { throw new ArgumentException(string.Format("Invalid Expression {0}", model.Cron)); } var jobDetail = JobBuilder.Create() .OfType(typeof(HttpRequestJob)) .WithIdentity(model.Name, model.JobGroup) .StoreDurably(model.IsDeleted == 1) .RequestRecovery(model.IsDeleted == 1) .UsingJobData("TaskId", model.Id) .UsingJobData("Timeout", model.Timeout) .Build(); //build trigger by cron expression var triggerBuilder = TriggerBuilder.Create() .WithIdentity(model.Name, model.JobGroup) .WithCronSchedule(model.Cron); var trigger = triggerBuilder.Build(); await _scheduler.ScheduleJob(jobDetail, trigger); }
public List <TestFailureData> GetTestFailures([FromUri] DateTimeOffset?startDate = null, [FromUri] string viewName = AzureUtil.ViewNameAll) { var startDateValue = startDate ?? DateTimeOffset.UtcNow - TimeSpan.FromDays(1); var list = new List <TestFailureData>(); foreach (var group in _buildUtil.GetTestCaseFailures(startDateValue, viewName).GroupBy(x => x.Identifier)) { var commitFailure = 0; var prFailure = 0; foreach (var entity in group) { if (JobUtil.IsPullRequestJobName(entity.BuildId.JobName)) { prFailure++; } else { commitFailure++; } } var item = new TestFailureData() { Name = group.Key, TotalFailures = commitFailure + prFailure, CommitFailures = commitFailure, PullRequestFailures = prFailure }; list.Add(item); } return(list); }
/// <summary> /// Method is processing integration config or integration folder item. /// </summary> /// <param name="integrationConfigDataSource">The item which contains integration config data.</param> /// <param name="processIntegrationItemsOptions">The process integration items options.</param> public static void ProcessTree([NotNull] Item integrationConfigDataSource, [NotNull] ProcessIntegrationItemsOptions processIntegrationItemsOptions) { Assert.ArgumentNotNull(integrationConfigDataSource, "integrationConfigDataSource"); Assert.ArgumentNotNull(processIntegrationItemsOptions, "processIntegrationItemsOptions"); CacheableIntegrationConfigData it = IntegrationCache.GetIntegrationConfigData(integrationConfigDataSource.ID); if (!IntegrationDisabler.CurrentValue && (processIntegrationItemsOptions.Force || it == null || it.IsExpired)) { if (processIntegrationItemsOptions.AsyncIntegration) { string jobName = string.Format("SharePoint_Integration_{0}", integrationConfigDataSource.ID); var parameters = new object[] { processIntegrationItemsOptions, integrationConfigDataSource }; JobUtil.StartJob(jobName, Instance, "ProcessTree", parameters); } else { Instance.ProcessTree(processIntegrationItemsOptions, integrationConfigDataSource); } } }
protected override ValidationResult IsValid(object value, ValidationContext validationContext) { bool isValid = JobUtil.IsValidCronExpression(value.ToString()); return(isValid ? ValidationResult.Success : new ValidationResult(FormatErrorMessage(validationContext.DisplayName))); }
/// <summary> /// Update the table storage to contain the result of the specified build. /// </summary> private async Task <BuildResultEntity> GetBuildFailureEntity(BuildId id) { var buildInfo = await _client.GetBuildInfoAsync(id); var jobKind = await _client.GetJobKindAsync(id.JobId); PullRequestInfo prInfo = null; if (JobUtil.IsPullRequestJobName(id.JobId.Name)) { try { prInfo = await _client.GetPullRequestInfoAsync(id); } catch (Exception ex) { // TODO: Flow builds don't have the PR directly in the triggered jobs. Have to walk // back up to the parent job. For now swallow this error so we don't trigger false // positives in the error detection. _textWriter.WriteLine($"Error pulling PR info for {id}: {ex.Message}"); } } BuildResultClassification classification; switch (buildInfo.State) { case BuildState.Succeeded: classification = BuildResultClassification.Succeeded; break; case BuildState.Aborted: classification = BuildResultClassification.Aborted; break; case BuildState.Failed: classification = await PopulateFailedBuildResult(buildInfo, jobKind, prInfo); break; case BuildState.Running: classification = BuildResultClassification.Unknown; break; default: throw new Exception($"Invalid enum: {buildInfo.State} for {id.JobName} - {id.Number}"); } return(new BuildResultEntity( buildInfo.Id, buildInfo.Date, buildInfo.Duration, jobKind: jobKind, machineName: buildInfo.MachineName, classification: classification, prInfo: prInfo)); }
/// <summary> /// postprocesses an audio job followed by a video job /// this constellation happens in automated or one click encoding where we have an audio job linked /// to a video job /// first, any audio jobs previous to the audio job in question will be located /// then we get the size of all audio tracks /// from the desired final output size stored in the first video job, we calculate the video bitrate /// we have to use to obtain the final file size, taking container overhead into account /// the calculated bitrate is then applied to all video jobs /// </summary> /// <param name="firstAudio">the audio job that is linked to a video job</param> /// <param name="firstpass">the video job to which the audio job is linked</param> public static LogItem calculateBitrate(MainForm mainForm, Job ajob) { if (!(ajob is VideoJob)) { return(null); } VideoJob job = (VideoJob)ajob; if (job.BitrateCalculationInfo == null) { return(null); } BitrateCalculationInfo b = job.BitrateCalculationInfo; LogItem log = new LogItem("Bitrate calculation for video"); List <AudioBitrateCalculationStream> audioStreams = new List <AudioBitrateCalculationStream>(); foreach (string s in b.AudioFiles) { audioStreams.Add(new AudioBitrateCalculationStream(s)); } double framerate; ulong framecount; JobUtil.getInputProperties(out framecount, out framerate, job.Input); CalcData data = new CalcData((long)framecount, (decimal)framerate, b.Container, job.Settings.Codec, job.Settings.NbBframes > 0, audioStreams.ToArray()); data.TotalSize = b.DesiredSize; try { data.CalcByTotalSize(); } catch (Exception e) { log.LogValue("Calculation failed", e, ImageType.Error); return(log); } log.LogValue("Desired size after subtracting audio", Math.Floor(data.VideoSize.KBExact) + " KBs"); log.LogValue("Calculated desired bitrate", (int)data.VideoBitrate + " kbit/s"); foreach (TaggedJob t in b.VideoJobs) { if (t.Job is VideoJob) { ((VideoJob)t.Job).Settings.BitrateQuantizer = (int)data.VideoBitrate; } } return(log); }
/// <summary> /// default constructor /// initializes all the GUI components, initializes the internal objects and makes a default selection for all the GUI dropdowns /// In addition, all the jobs and profiles are being loaded from the harddisk /// </summary> public MeGUIInfo() { this.codecs = new CodecManager(); this.gen = new CommandLineGenerator(); this.path = System.Windows.Forms.Application.StartupPath; this.jobs = new Dictionary <string, Job>(); this.skipJobs = new List <Job>(); this.logBuilder = new StringBuilder(); this.jobUtil = new JobUtil(this); this.settings = new MeGUISettings(); this.calc = new BitrateCalculator(); audioStreams = new AudioStream[2]; audioStreams[0].path = ""; audioStreams[0].output = ""; audioStreams[0].settings = null; audioStreams[1].path = ""; audioStreams[1].output = ""; audioStreams[1].settings = null; this.videoEncoderProvider = new VideoEncoderProvider(); this.audioEncoderProvider = new AudioEncoderProvider(); this.profileManager = new ProfileManager(this.path); this.profileManager.LoadProfiles(videoProfile, audioProfile); this.loadSettings(); this.loadJobs(); this.dialogManager = new DialogManager(this); #warning refactor menus int index = menuItem1.MenuItems.Count; foreach (IMuxing muxer in PackageSystem.MuxerProviders.Values) { MenuItem newMenuItem = new MenuItem(); newMenuItem.Text = muxer.Name; newMenuItem.Tag = muxer; newMenuItem.Index = index; index++; menuItem1.MenuItems.Add(newMenuItem); newMenuItem.Click += new System.EventHandler(this.mnuMuxer_Click); } index = mnuTools.MenuItems.Count; foreach (ITool tool in PackageSystem.Tools.Values) { MenuItem newMenuItem = new MenuItem(); newMenuItem.Text = tool.Name; newMenuItem.Tag = tool; newMenuItem.Index = index; index++; mnuTools.MenuItems.Add(newMenuItem); newMenuItem.Click += new System.EventHandler(this.mnuTool_Click); } //MessageBox.Show(String.Join("|", this.GetType().Assembly.GetManifestResourceNames())); using (TextReader r = new StreamReader(this.GetType().Assembly.GetManifestResourceStream("MeGUI.Changelog.txt"))) { mainForm.Changelog.Text = r.ReadToEnd(); } }
internal async Task <SendGridMessage> Clean(CancellationToken cancellationToken) { var limit = DateTimeOffset.UtcNow - TimeSpan.FromHours(12); var filter = FilterUtil.Column( nameof(UnprocessedBuildEntity.LastUpdate), limit, ColumnOperator.LessThanOrEqual); var query = new TableQuery <UnprocessedBuildEntity>().Where(filter); var list = await AzureUtil.QueryAsync(_unprocessedBuildTable, query, cancellationToken); if (list.Count == 0) { return(null); } var textBuilder = new StringBuilder(); var htmlBuilder = new StringBuilder(); foreach (var entity in list) { var boundBuildId = entity.BoundBuildId; var buildId = boundBuildId.BuildId; // GC Stress jobs can correctly execute for up to 3 days. This is a bit of an outlier but one we // need to handle; if (JobUtil.IsGCStressJob(buildId.JobId)) { var stressLimit = DateTimeOffset.UtcNow - TimeSpan.FromDays(3); if (entity.LastUpdate >= stressLimit) { continue; } } _logger.WriteLine($"Deleting stale data {boundBuildId.Uri}"); textBuilder.Append($"Deleting stale data: {boundBuildId.Uri}"); textBuilder.Append($"Eror: {entity.StatusText}"); htmlBuilder.Append($@"<div>"); htmlBuilder.Append($@"<div>Build <a href=""{boundBuildId.Uri}"">{buildId.JobName} {buildId.Number}</a></div>"); htmlBuilder.Append($@"<div>Error: {WebUtility.HtmlEncode(entity.StatusText)}</div>"); htmlBuilder.Append($@"</div>"); } await AzureUtil.DeleteBatchUnordered(_unprocessedBuildTable, list); return(new SendGridMessage() { Text = textBuilder.ToString(), Html = htmlBuilder.ToString() }); }
/// <summary> /// postprocesses an audio job followed by a video job /// this constellation happens in automated or one click encoding where we have an audio job linked /// to a video job /// first, any audio jobs previous to the audio job in question will be located /// then we get the size of all audio tracks /// from the desired final output size stored in the first video job, we calculate the video bitrate /// we have to use to obtain the final file size, taking container overhead into account /// the calculated bitrate is then applied to all video jobs /// </summary> /// <param name="firstAudio">the audio job that is linked to a video job</param> /// <param name="firstpass">the video job to which the audio job is linked</param> public static LogItem calculateBitrate(MainForm mainForm, Job ajob) { if (!(ajob is VideoJob)) { return(null); } VideoJob job = (VideoJob)ajob; if (job.BitrateCalculationInfo == null) { return(null); } BitrateCalculationInfo b = job.BitrateCalculationInfo; LogItem log = new LogItem("Bitrate calculation for video"); List <AudioBitrateCalculationStream> audioStreams = new List <AudioBitrateCalculationStream>(); foreach (string s in b.AudioFiles) { audioStreams.Add(new AudioBitrateCalculationStream(s)); } double framerate; ulong framecount; JobUtil.getInputProperties(out framecount, out framerate, job.Input); int bitrateKBits; ulong videoSizeKB = 0; try { bitrateKBits = BitrateCalculator.CalculateBitrateKBits(job.Settings.Codec, job.Settings.NbBframes > 0, b.Container, audioStreams.ToArray(), b.DesiredSize.Bytes, framecount, framerate, out videoSizeKB); } catch (CalculationException e) { log.LogValue("Calculation failed", e, ImageType.Error); return(log); } log.LogValue("Desired size after subtracting audio", videoSizeKB + "KBs"); log.LogValue("Calculated desired bitrate", bitrateKBits + "kbit/s"); foreach (TaggedJob t in b.VideoJobs) { ((VideoJob)t.Job).Settings.BitrateQuantizer = bitrateKBits; } return(log); }
internal static JenkinsClient CreateJenkinsClient(Uri jenkinsUrl, JobId jobId) { // TODO: don't authenticate when it's not https if (JobUtil.IsAuthNeededHeuristic(jobId)) { var githubConnectionString = CloudConfigurationManager.GetSetting(SharedConstants.GithubConnectionStringName); return(new JenkinsClient(jenkinsUrl, githubConnectionString)); } else { return(new JenkinsClient(jenkinsUrl)); } }
internal static JenkinsClient CreateJenkinsClient(BoundBuildId buildId) { if (JobUtil.IsAuthNeededHeuristic(buildId.JobId)) { var githubConnectionString = CloudConfigurationManager.GetSetting(SharedConstants.GithubConnectionStringName); var host = buildId.GetHostUri(useHttps: true); return(new JenkinsClient(host, githubConnectionString)); } else { return(new JenkinsClient(buildId.Host)); } }
/// <summary> /// postprocesses an audio job followed by a video job /// this constellation happens in automated or one click encoding where we have an audio job linked /// to a video job /// first, any audio jobs previous to the audio job in question will be located /// then we get the size of all audio tracks /// from the desired final output size stored in the first video job, we calculate the video bitrate /// we have to use to obtain the final file size, taking container overhead into account /// the calculated bitrate is then applied to all video jobs /// </summary> /// <param name="firstAudio">the audio job that is linked to a video job</param> /// <param name="firstpass">the video job to which the audio job is linked</param> public static void calculateBitrate(MainForm mainForm, Job ajob) { if (!(ajob is VideoJob)) { return; } VideoJob job = (VideoJob)ajob; if (job.BitrateCalculationInfo == null) { return; } BitrateCalculationInfo b = job.BitrateCalculationInfo; mainForm.addToLog("Doing bitrate calculation..."); List <AudioBitrateCalculationStream> audioStreams = new List <AudioBitrateCalculationStream>(); foreach (string s in b.AudioFiles) { audioStreams.Add(new AudioBitrateCalculationStream(s)); } double framerate; ulong framecount; JobUtil.getInputProperties(out framecount, out framerate, job.Input); int bitrateKBits; ulong videoSizeKB = 0; try { bitrateKBits = BitrateCalculator.CalculateBitrateKBits(job.Settings.Codec, job.Settings.NbBframes > 0, b.Container, audioStreams.ToArray(), b.DesiredSize.Bytes, framecount, framerate, out videoSizeKB); } catch (CalculationException e) { mainForm.addToLog("Calculation failed with message '{0}'", e); return; } mainForm.addToLog("Desired video size after subtracting audio size is {0}KBs. Setting the desired bitrate of the subsequent video jobs to {1} kbit/s.", videoSizeKB, bitrateKBits); foreach (TaggedJob t in b.VideoJobs) { ((VideoJob)t.Job).Settings.BitrateQuantizer = bitrateKBits; } }
public JobHistoryDetailDTO DoSalaryIncreasing(long empId, int jobLevel) { var transaction = _humanManagerContext.Database.BeginTransaction(); try { DateTime today = System.DateTime.Today; EmployeeEntity employeeEntity = _humanManagerContext.Employees.Where(e => e.Id == empId) .Include(e => e.Job) .ThenInclude(j => j.JobLevel) .SingleOrDefault(); JobHistoryEntity jobHistoryEntity = _humanManagerContext.JobHistorys.Where(jh => jh.EmployeeId == employeeEntity.Id && jh.JobId == employeeEntity.JobId).SingleOrDefault(); // Kiểm tra việc tính lương. tính r ko đc nâng. bool haveCounted = _humanManagerContext.SalaryHistories.Where(sh => sh.EmployeeId == empId && sh.CountedDate.Month == today.Month && sh.CountedDate.Year == today.Year).Any(); if (haveCounted) { throw new Exception(SalaryMessageContant.SALARY_COUNTED); } // kiểm tra và thay đổi job level JobLevelEntity jobLevelEntity = employeeEntity.Job.JobLevel; if (JobUtil.IsLevelExit(jobLevelEntity, jobLevel)) { employeeEntity.JobLevel = jobLevel; } else { throw new Exception(SalaryMessageContant.JOB_LEVEL_NOT_EXITS); } // lưu level vào lịch sử _humanManagerContext.JobHistoryDetails.Add(new JobHistoryDetailEntity(jobLevel, today, jobHistoryEntity)); _humanManagerContext.SaveChanges(); transaction.Commit(); return(new JobHistoryDetailDTO(employeeEntity.Id, employeeEntity.Firstname + employeeEntity.Firstname, employeeEntity.JobId, employeeEntity.Job.JobTitle, jobLevel, System.DateTime.Today)); } catch (Exception ex) { transaction.Rollback(); throw ex; } }
public ActionResult Kind(string name = null, bool pr = false, DateTime?startDate = null, string viewName = AzureUtil.ViewNameRoslyn) { var filter = CreateBuildFilter(nameof(Kind), name, viewName, pr, startDate); var startDateValue = startDate ?? DateTimeOffset.UtcNow - TimeSpan.FromDays(1); var list = _buildUtil .GetBuildResultsByKindName(startDateValue, name, viewName) .Where(x => pr || !JobUtil.IsPullRequestJobName(x.JobName)) .ToList(); var model = new BuildResultKindModel() { Filter = filter, ClassificationKind = name, Entries = list, }; return(View(viewName: "Kind", model: model)); }
private BuildResultModel GetBuildResultModel(string jobName, BuildFilterModel filter) { var model = new BuildResultModel() { Filter = filter, JobId = JobId.ParseName(jobName), }; var queryResult = _buildUtil .GetBuildResults(filter.StartDate, jobName, filter.ViewName) .Where(x => filter.IncludePullRequests || !JobUtil.IsPullRequestJobName(x.JobId.Name)) .Where(x => x.ClassificationKind != ClassificationKind.Succeeded) .OrderBy(x => x.BuildNumber); model.Entries.AddRange(queryResult); return(model); }
internal static JenkinsClient CreateClient(Uri host, JobId jobId = null, bool?auth = null) { if (auth == null && jobId != null) { auth = JobUtil.IsAuthNeededHeuristic(jobId); } var builder = new UriBuilder(host); if (auth == true) { builder.Scheme = Uri.UriSchemeHttps; var text = ConfigurationManager.AppSettings[SharedConstants.GithubConnectionStringName]; return(new JenkinsClient(builder.Uri, text)); } return(new JenkinsClient(builder.Uri)); }
private static void CollectData() { var all = new List <string>(); foreach (var file in Directory.EnumerateFiles(Root, "builds.csv", SearchOption.AllDirectories)) { foreach (var buildData in ReadBuildData(file)) { var path = JenkinsUtil.ConvertJobIdToPath(buildData.BuildId.JobId); var isPr = JobUtil.IsPullRequestJobName(buildData.BuildId.JobId); var newLine = $"{path},{buildData.BuildId.Number},{isPr},{buildData.ResultName},{buildData.ResultCategory}"; all.Add(newLine); } } File.WriteAllLines( Path.Combine(Root, "all.csv"), all.ToArray()); }
public TestFailureData GetTestFailure([FromUri] string name, [FromUri] DateTimeOffset?startDate = null, string viewName = AzureUtil.ViewNameRoslyn) { var data = new TestFailureData() { Name = name, Builds = new List <BuildData>() }; var startDateValue = startDate ?? DateTimeOffset.UtcNow - TimeSpan.FromDays(1); var prCount = 0; var commitCount = 0; foreach (var entity in _buildUtil.GetTestCaseFailures(startDateValue, name, viewName)) { var buildId = entity.BuildId; var jobId = buildId.JobId; if (JobUtil.IsPullRequestJobName(jobId.Name)) { prCount++; } else { commitCount++; } var buildData = new BuildData() { JobName = jobId.Name, JobShortName = jobId.ShortName, JobUri = entity.BoundBuildId.JobUri.ToString(), MachineName = entity.MachineName, DateTime = startDateValue, }; data.Builds.Add(buildData); } data.TotalFailures = prCount + commitCount; data.CommitFailures = commitCount; data.PullRequestFailures = prCount; return(data); }
/// <summary> /// A view of the elapsed time grouped by the result. /// </summary> /// <returns></returns> public ActionResult ElapsedTime(bool pr = false, DateTimeOffset?startDate = null, string viewName = AzureUtil.ViewNameRoslyn) { var filter = CreateBuildFilter(actionName: nameof(ElapsedTime), viewName: viewName, startDate: startDate, pr: pr); var results = _buildUtil.GetBuildResults(filter.StartDate, viewName) .Where(x => pr || !JobUtil.IsPullRequestJobName(x.JobId)) .ToList(); var totalCount = results.Count; var totalSucceeded = results.Count(x => x.ClassificationKind == ClassificationKind.Succeeded); var runCounts = results .Select(x => new ElapsedTimeModel() { JobId = x.JobId, JobName = x.JobName, ElapsedTime = x.DurationSeconds }) .ToList(); List <int> runsPerETRange = new List <int>(); for (int i = 0; i < _ETRangeCount; i++) { runsPerETRange.Add(0); } foreach (var runElapsedTime in runCounts) { int ETDigits = runElapsedTime.ElapsedTime.ToString().Length; runsPerETRange[ETDigits - 1] = runsPerETRange[ETDigits - 1] + 1; } var model = new ElapsedTimeSummaryModel() { Filter = filter, TotalBuildCount = totalCount, TotalSucceededCount = totalSucceeded, RunCountsPerETRange = runsPerETRange }; return(View(viewName: "ElapsedTime", model: model)); }
private TestFailureModel GetTestFailureModel(BuildFilterModel filter) { var model = new TestFailureModel() { Filter = filter, Name = filter.Name, }; foreach (var entity in _buildUtil.GetTestCaseFailures(filter.StartDate, filter.Name, filter.ViewName)) { var buildId = entity.BuildId; if (!filter.IncludePullRequests && JobUtil.IsPullRequestJobName(buildId.JobName)) { continue; } model.Builds.Add(entity); } return(model); }
public BuildFailureEntity(BuildId buildId, string identifier, DateTimeOffset buildDate, BuildFailureKind kind, string jobKind, string machineName, PullRequestInfo prInfo) { JobName = buildId.JobName; JobKind = jobKind; ViewName = AzureUtil.GetViewName(buildId.JobId); BuildNumber = buildId.Number; Identifier = identifier; BuildFailureKindRaw = kind.ToString(); BuildDateTime = buildDate.UtcDateTime; IsPullRequest = JobUtil.IsPullRequestJobName(buildId.JobId); MachineName = machineName; if (prInfo != null) { PullRequestId = prInfo.Id; PullRequestAuthor = prInfo.Author; PullRequestAuthorEmail = prInfo.AuthorEmail; PullRequestUrl = prInfo.PullUrl; PullRequestSha1 = prInfo.Sha1; Debug.Assert(HasPullRequestInfo); Debug.Assert(PullRequestInfo != null); } }
/// <summary> /// default constructor /// initializes all the GUI components, initializes the internal objects and makes a default selection for all the GUI dropdowns /// In addition, all the jobs and profiles are being loaded from the harddisk /// </summary> public MeGUIInfo() { this.codecs = new CodecManager(); this.gen = new CommandLineGenerator(); this.path = System.Windows.Forms.Application.StartupPath; this.jobs = new Dictionary<string, Job>(); this.skipJobs = new List<Job>(); this.logBuilder = new StringBuilder(); this.jobUtil = new JobUtil(this); this.settings = new MeGUISettings(); this.calc = new BitrateCalculator(); audioStreams = new AudioStream[2]; audioStreams[0].path = ""; audioStreams[0].output = ""; audioStreams[0].settings = null; audioStreams[1].path = ""; audioStreams[1].output = ""; audioStreams[1].settings = null; this.videoEncoderProvider = new VideoEncoderProvider(); this.audioEncoderProvider = new AudioEncoderProvider(); this.profileManager = new ProfileManager(this.path); this.profileManager.LoadProfiles(videoProfile, audioProfile); this.loadSettings(); this.loadJobs(); this.dialogManager = new DialogManager(this); int index = menuItem1.MenuItems.Count; foreach (IMuxing muxer in PackageSystem.MuxerProviders.Values) { MenuItem newMenuItem = new MenuItem(); newMenuItem.Text = muxer.Name; newMenuItem.Tag = muxer; newMenuItem.Index = index; index++; menuItem1.MenuItems.Add(newMenuItem); newMenuItem.Click += new System.EventHandler(this.mnuMuxer_Click); } index = mnuTools.MenuItems.Count; foreach (ITool tool in PackageSystem.Tools.Values) { MenuItem newMenuItem = new MenuItem(); newMenuItem.Text = tool.Name; newMenuItem.Tag = tool; newMenuItem.Index = index; index++; mnuTools.MenuItems.Add(newMenuItem); newMenuItem.Click += new System.EventHandler(this.mnuTool_Click); } //MessageBox.Show(String.Join("|", this.GetType().Assembly.GetManifestResourceNames())); using (TextReader r = new StreamReader(this.GetType().Assembly.GetManifestResourceStream("MeGUI.Changelog.txt"))) { mainForm.Changelog.Text = r.ReadToEnd(); } }