/// <summary> /// Determines if an issue can be merged into another issue that occurred at the same initial job /// </summary> /// <param name="Source">The source issue</param> /// <param name="Target">The target issue</param> /// <returns>True if the two new issues can be merged</returns> public virtual bool CanMergeInitialJob(Issue Source, Issue Target) { return(Source.Category == Target.Category); }
/// <summary> /// Main command entry point /// </summary> /// <param name="Arguments">The command line arguments</param> public override int Execute() { // Build a mapping from category to matching Dictionary <string, Matcher> CategoryNameToMatcher = new Dictionary <string, Matcher>(); foreach (Matcher Matcher in Matchers) { CategoryNameToMatcher[Matcher.Category] = Matcher; } // Complete any interrupted operation to update the state file CompleteStateTransaction(StateFile); // Read the persistent data file PersistentState State; if (!bClean && FileReference.Exists(StateFile)) { Log.TraceInformation("Reading persistent data from {0}", StateFile); State = DeserializeJson <PersistentState>(StateFile); } else { Log.TraceInformation("Creating new persistent data"); State = new PersistentState(); } // Fixup any issues loaded from disk foreach (Issue Issue in State.Issues) { if (Issue.References == null) { Issue.References = new SortedSet <string>(); } } // Create the Perforce connection PerforceConnection Perforce = new PerforceConnection(PerforcePort, PerforceUser, null); // Process the input data if (InputFile != null) { // Parse the input file Log.TraceInformation("Reading build results from {0}", InputFile); InputData InputData = DeserializeJson <InputData>(InputFile); // Parse all the builds and add them to the persistent data List <InputJob> InputJobs = InputData.Jobs.OrderBy(x => x.Change).ThenBy(x => x.Stream).ToList(); Stopwatch Timer = Stopwatch.StartNew(); foreach (InputJob InputJob in InputJobs) { // Add a new build for each job step foreach (InputJobStep InputJobStep in InputJob.Steps) { IssueBuild NewBuild = new IssueBuild(InputJob.Change, InputJob.Name, InputJob.Url, InputJobStep.Name, InputJobStep.Url, null); State.AddBuild(InputJob.Stream, NewBuild); } // Add all the job steps List <InputJobStep> InputJobSteps = InputJob.Steps.OrderBy(x => x.Name).ToList(); foreach (InputJobStep InputJobStep in InputJobSteps) { if (InputJobStep.Diagnostics != null && InputJobStep.Diagnostics.Count > 0) { AddStep(Perforce, State, InputJob, InputJobStep); } } // Remove any steps which are empty InputJob.Steps.RemoveAll(x => x.Diagnostics == null || x.Diagnostics.Count == 0); } InputJobs.RemoveAll(x => x.Steps.Count == 0); Log.TraceInformation("Added jobs in {0}s", Timer.Elapsed.TotalSeconds); // If there are any unmatched issues, save out the current state and remaining input if (SaveUnmatchedDir != null && InputJobs.Count > 0) { DirectoryReference.CreateDirectory(SaveUnmatchedDir); if (FileReference.Exists(StateFile)) { FileReference.Copy(StateFile, FileReference.Combine(SaveUnmatchedDir, "State.json"), true); } SerializeJson(FileReference.Combine(SaveUnmatchedDir, "Input.json"), InputData); } // Try to find the next successful build for each stream, so we can close it as part of updating the server for (int Idx = 0; Idx < State.Issues.Count; Idx++) { Issue Issue = State.Issues[Idx]; foreach (string Stream in Issue.Streams.Keys) { Dictionary <string, IssueHistory> StepNameToHistory = Issue.Streams[Stream]; foreach (string StepName in StepNameToHistory.Keys) { IssueHistory IssueHistory = StepNameToHistory[StepName]; if (IssueHistory.FailedBuilds.Count > 0 && IssueHistory.NextSuccessfulBuild == null) { // Find the successful build after this change IssueBuild LastFailedBuild = IssueHistory.FailedBuilds[IssueHistory.FailedBuilds.Count - 1]; IssueHistory.NextSuccessfulBuild = State.FindBuildAfter(Stream, LastFailedBuild.Change, StepName); } } } } // Find the change two days before the latest change being added if (InputData.Jobs.Count > 0 && !bKeepHistory) { // Find all the unique change numbers for each stream SortedSet <int> ChangeNumbers = new SortedSet <int>(); foreach (List <IssueBuild> Builds in State.Streams.Values) { ChangeNumbers.UnionWith(Builds.Select(x => x.Change)); } // Get the latest change record int LatestChangeNumber = InputData.Jobs.Min(x => x.Change); ChangeRecord LatestChangeRecord = Perforce.GetChange(GetChangeOptions.None, LatestChangeNumber).Data; // Step forward through all the changelists until we get to one we don't want to delete int DeleteChangeNumber = -1; foreach (int ChangeNumber in ChangeNumbers) { ChangeRecord ChangeRecord = Perforce.GetChange(GetChangeOptions.None, ChangeNumber).Data; if (ChangeRecord.Date > LatestChangeRecord.Date - TimeSpan.FromDays(2)) { break; } DeleteChangeNumber = ChangeNumber; } // Remove any builds we no longer want to track foreach (List <IssueBuild> Builds in State.Streams.Values) { Builds.RemoveAll(x => x.Change <= DeleteChangeNumber); } } } // Mark any issues as resolved foreach (Issue Issue in State.Issues) { if (Issue.IsResolved()) { if (!Issue.ResolvedAt.HasValue) { Issue.ResolvedAt = DateTime.UtcNow; } } else { if (Issue.ResolvedAt.HasValue) { Issue.ResolvedAt = null; } } } // If we're in read-only mode, don't write anything out if (bReadOnly) { return(0); } // Save the persistent data Log.TraceInformation("Writing persistent data to {0}", StateFile); DirectoryReference.CreateDirectory(StateFile.Directory); WriteState(StateFile, State); // Synchronize with the server if (ServerUrl != null) { // Post any issue updates foreach (Issue Issue in State.Issues) { Matcher Matcher; if (!CategoryNameToMatcher.TryGetValue(Issue.Category, out Matcher)) { continue; } string Summary = Matcher.GetSummary(Issue); if (Issue.Id == -1) { Log.TraceInformation("Adding issue: {0}", Issue); if (Issue.PendingWatchers.Count == 0) { Log.TraceWarning("(No possible causers)"); } ApiTypes.AddIssue IssueBody = new ApiTypes.AddIssue(); IssueBody.Project = Issue.Project; IssueBody.Summary = Summary; if (Issue.PendingWatchers.Count == 1) { IssueBody.Owner = Issue.PendingWatchers.First(); } using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues", ServerUrl), "POST", IssueBody)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to add issue"); } Issue.Id = ParseHttpResponse <ApiTypes.AddIssueResponse>(Response).Id; } Issue.PostedSummary = Summary; WriteState(StateFile, State); } else if (Issue.PostedSummary == null || !String.Equals(Issue.PostedSummary, Summary, StringComparison.Ordinal)) { Log.TraceInformation("Updating issue {0}", Issue.Id); ApiTypes.UpdateIssue IssueBody = new ApiTypes.UpdateIssue(); IssueBody.Summary = Summary; using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues/{1}", ServerUrl, Issue.Id), "PUT", IssueBody)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to add issue"); } } Issue.PostedSummary = Summary; WriteState(StateFile, State); } } // Add any new builds associated with issues Dictionary <string, long> JobStepUrlToId = new Dictionary <string, long>(StringComparer.Ordinal); foreach (Issue Issue in State.Issues) { foreach (KeyValuePair <string, Dictionary <string, IssueHistory> > StreamPair in Issue.Streams) { foreach (IssueHistory StreamHistory in StreamPair.Value.Values) { foreach (IssueBuild Build in StreamHistory.Builds) { if (!Build.bPostedToServer) { Log.TraceInformation("Adding {0} to issue {1}", Build.JobStepUrl, Issue.Id); ApiTypes.AddBuild AddBuild = new ApiTypes.AddBuild(); AddBuild.Stream = StreamPair.Key; AddBuild.Change = Build.Change; AddBuild.JobName = Build.JobName; AddBuild.JobUrl = Build.JobUrl; AddBuild.JobStepName = Build.JobStepName; AddBuild.JobStepUrl = Build.JobStepUrl; AddBuild.ErrorUrl = Build.ErrorUrl; AddBuild.Outcome = (Build == StreamHistory.PrevSuccessfulBuild || Build == StreamHistory.NextSuccessfulBuild)? ApiTypes.Outcome.Success : ApiTypes.Outcome.Error; using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues/{1}/builds", ServerUrl, Issue.Id), "POST", AddBuild)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to add build"); } Build.Id = ParseHttpResponse <ApiTypes.AddBuildResponse>(Response).Id; } Build.bPostedToServer = true; WriteState(StateFile, State); } if (Build.Id != -1) { JobStepUrlToId[Build.JobStepUrl] = Build.Id; } } } } } // Add any new diagnostics foreach (Issue Issue in State.Issues) { foreach (IssueDiagnostic Diagnostic in Issue.Diagnostics) { if (!Diagnostic.bPostedToServer) { string Summary = Diagnostic.Message; const int MaxLength = 40; if (Summary.Length > MaxLength) { Summary = Summary.Substring(0, MaxLength).TrimEnd(); } Log.TraceInformation("Adding diagnostic '{0}' to issue {1}", Summary, Issue.Id); ApiTypes.AddDiagnostic AddDiagnostic = new ApiTypes.AddDiagnostic(); long BuildId; if (Diagnostic.JobStepUrl != null && JobStepUrlToId.TryGetValue(Diagnostic.JobStepUrl, out BuildId)) { AddDiagnostic.BuildId = BuildId; } else { Console.WriteLine("ERROR"); } AddDiagnostic.Message = Diagnostic.Message; AddDiagnostic.Url = Diagnostic.ErrorUrl; using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues/{1}/diagnostics", ServerUrl, Issue.Id), "POST", AddDiagnostic)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to add build"); } } Diagnostic.bPostedToServer = true; WriteState(StateFile, State); } } } // Close any issues which are complete for (int Idx = 0; Idx < State.Issues.Count; Idx++) { Issue Issue = State.Issues[Idx]; if (Issue.ResolvedAt.HasValue != Issue.bPostedResolved) { Log.TraceInformation("Setting issue {0} resolved flag to {1}", Issue.Id, Issue.ResolvedAt.HasValue); ApiTypes.UpdateIssue UpdateBody = new ApiTypes.UpdateIssue(); UpdateBody.Resolved = Issue.ResolvedAt.HasValue; using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues/{1}", ServerUrl, Issue.Id), "PUT", UpdateBody)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to delete issue"); } } Issue.bPostedResolved = Issue.ResolvedAt.HasValue; WriteState(StateFile, State); } } // Update watchers on any open builds foreach (Issue Issue in State.Issues) { while (Issue.PendingWatchers.Count > 0) { ApiTypes.Watcher Watcher = new ApiTypes.Watcher(); Watcher.UserName = Issue.PendingWatchers.First(); using (HttpWebResponse Response = SendHttpRequest(String.Format("{0}/api/issues/{1}/watchers", ServerUrl, Issue.Id), "POST", Watcher)) { int ResponseCode = (int)Response.StatusCode; if (!(ResponseCode >= 200 && ResponseCode <= 299)) { throw new Exception("Unable to add watcher"); } } Issue.PendingWatchers.Remove(Watcher.UserName); Issue.Watchers.Add(Watcher.UserName); WriteState(StateFile, State); } } } // Remove any issues which have been resolved for 24 hours. We have to keep information about issues that have been fixed for some time; we may be updating the same job // multiple times while other steps are running, and we don't want to keep opening new issues for it. Also, it can take time for changes to propagate between streams. DateTime RemoveIssueTime = DateTime.UtcNow - TimeSpan.FromHours(24.0); for (int Idx = 0; Idx < State.Issues.Count; Idx++) { Issue Issue = State.Issues[Idx]; if (Issue.ResolvedAt.HasValue && Issue.ResolvedAt.Value < RemoveIssueTime) { State.Issues.RemoveAt(Idx--); WriteState(StateFile, State); continue; } } // TODO: VERIFY ISSUES ARE CLOSED return(0); }
/// <summary> /// Gets the summary for an issue /// </summary> /// <param name="Issue">The issue to summarize</param> /// <returns>The summary text for this issue</returns> public abstract string GetSummary(Issue Issue);
/// <summary> /// Finds or adds an issue for a particular issue /// </summary> /// <param name="Perforce">Perforce connection used to find possible causers</param> /// <param name="State">The current set of tracked issues</param> /// <param name="Build">The new build</param> /// <param name="PreviousChange">The last changelist that was built before this one</param> /// <param name="InputJob">Job containing the step to add</param> /// <param name="InputJobStep">The job step to add</param> Issue MergeIntoExistingIssue(PerforceConnection Perforce, PersistentState State, InputJob InputJob, InputJobStep InputJobStep, Issue InputIssue, Lazy <IReadOnlyList <ChangeInfo> > LazyChanges) { // Find the pattern matcher for this fingerprint Matcher Matcher = CategoryToMatcher[InputIssue.Category]; // Check if it can be added to an existing open issue foreach (Issue Issue in State.Issues) { // Check this issue already exists in the current stream Dictionary <string, IssueHistory> StepNameToHistory; if (!Issue.Streams.TryGetValue(InputJob.Stream, out StepNameToHistory)) { continue; } // Check that this issue has not already been closed IssueHistory History; if (StepNameToHistory.TryGetValue(InputJobStep.Name, out History)) { if (!History.CanAddFailedBuild(InputJob.Change)) { continue; } } else { if (!StepNameToHistory.Values.Any(x => x.CanAddFailedBuild(InputJob.Change))) { continue; } } // Try to merge the fingerprint if (!Matcher.CanMerge(InputIssue, Issue)) { continue; } // Add the new build Matcher.Merge(InputIssue, Issue); return(Issue); } // Check if this issue can be merged with an issue built in another stream IReadOnlyList <ChangeInfo> Changes = LazyChanges.Value; if (Changes != null && Changes.Count > 0) { SortedSet <int> SourceChanges = new SortedSet <int>(Changes.SelectMany(x => x.SourceChanges)); foreach (Issue Issue in State.Issues) { // Check if this issue does not already contain this stream, but contains one of the causing changes if (Issue.Streams.ContainsKey(InputJob.Stream)) { continue; } if (!SourceChanges.Any(x => Issue.SourceChanges.Contains(x))) { continue; } if (!Matcher.CanMerge(InputIssue, Issue)) { continue; } // Merge the issue Matcher.Merge(InputIssue, Issue); return(Issue); } } // Check if it can be merged into an issue that's been created for this job. We only do this after exhausting all other options. foreach (Issue Issue in State.Issues) { if (Issue.InitialJobUrl == InputIssue.InitialJobUrl && Matcher.CanMergeInitialJob(InputIssue, Issue)) { Matcher.Merge(InputIssue, Issue); return(Issue); } } return(null); }