/// <summary> /// Force the recomputation of the cluster job list. /// </summary> /// <param name="virtualCluster">Virtual cluster to use (defined only for some cluster types).</param> /// <param name="manager">Communication manager.</param> protected override void RecomputeClusterJobList(string virtualCluster, CommManager manager) { this.clusterJobs = new Dictionary <string, ClusterJobInformation>(); var jobs = this.config.AzureClient.ExpandFileOrDirectory(AzureDfsFile.UriFromPath(this.config, "")).ToList(); int done = 0; foreach (var job in jobs) { manager.Token.ThrowIfCancellationRequested(); string jobRootFolder = AzureDfsFile.PathFromUri(this.config, job); ClusterJobInformation info = this.GetJobInfo(jobRootFolder); if (info != null) { // ReSharper disable once AssignNullToNotNullAttribute this.clusterJobs.Add(job.AbsolutePath, info); } manager.Progress(100 * done++ / jobs.Count); } manager.Progress(100); }
/// <summary> /// Extract the job information from a folder with logs on the local machine. /// </summary> /// <param name="jobRootFolder">Folder with logs for the specified job.</param> /// <returns>The job information, or null if not found.</returns> private ClusterJobInformation GetJobInfo(string jobRootFolder) { DateTime date = DateTime.MinValue; DateTime lastHeartBeat = DateTime.MinValue; ClusterJobInformation.ClusterJobStatus status = ClusterJobInformation.ClusterJobStatus.Unknown; bool found = false; Uri uri = AzureDfsFile.UriFromPath(this.config, jobRootFolder); var jobsFolders = this.config.AzureClient.ExpandFileOrDirectory(uri).ToList(); jobRootFolder = GetBlobName(this.config.Container, jobRootFolder); string jobName = jobRootFolder; foreach (var file in jobsFolders) { if (file.AbsolutePath.EndsWith("heartbeat")) { string blobName = GetBlobName(this.config.Container, file.AbsolutePath); var blob = this.config.AzureClient.Container.GetPageBlobReference(blobName); blob.FetchAttributes(); var props = blob.Metadata; if (props.ContainsKey("status")) { var st = props["status"]; switch (st) { case "failure": status = ClusterJobInformation.ClusterJobStatus.Failed; break; case "success": status = ClusterJobInformation.ClusterJobStatus.Succeeded; break; case "running": status = ClusterJobInformation.ClusterJobStatus.Running; break; case "killed": status = ClusterJobInformation.ClusterJobStatus.Cancelled; break; default: Console.WriteLine("Unknown status " + st); break; } } if (props.ContainsKey("heartbeat")) { var hb = props["heartbeat"]; if (DateTime.TryParse(hb, out lastHeartBeat)) { lastHeartBeat = lastHeartBeat.ToLocalTime(); if (status == ClusterJobInformation.ClusterJobStatus.Running && DateTime.Now - lastHeartBeat > TimeSpan.FromSeconds(40)) { // job has in fact crashed status = ClusterJobInformation.ClusterJobStatus.Failed; } } } if (props.ContainsKey("jobname")) { jobName = props["jobname"]; } if (props.ContainsKey("starttime")) { var t = props["starttime"]; if (DateTime.TryParse(t, out date)) { date = date.ToLocalTime(); } } found = true; } else if (file.AbsolutePath.Contains("DryadLinqProgram__") && // newer heartbeats contain the date date != DateTime.MinValue) { var blob = this.config.AzureClient.Container.GetBlockBlobReference(AzureDfsFile.PathFromUri(this.config, file)); blob.FetchAttributes(); var props = blob.Properties; if (props.LastModified.HasValue) { date = props.LastModified.Value.DateTime; date = date.ToLocalTime(); } } } if (!found) { return(null); } TimeSpan running = TimeSpan.Zero; if (date != DateTime.MinValue && lastHeartBeat != DateTime.MinValue) { running = lastHeartBeat - date; } var info = new ClusterJobInformation(this.config.Name, "", jobRootFolder, jobName, Environment.UserName, date, running, status); return(info); }