/// <exception cref="System.Exception"/> public virtual void TestGetInvalidJob() { RunningJob runJob = new JobClient(GetJobConf()).GetJob(((JobID)JobID.ForName("job_0_0" ))); NUnit.Framework.Assert.IsNull(runJob); }
public virtual void TestListAttemptIdsWithValidInput() { JobID jobId = JobID.ForName(jobIdStr); Cluster mockCluster = Org.Mockito.Mockito.Mock <Cluster>(); Job job = Org.Mockito.Mockito.Mock <Job>(); CLI cli = Org.Mockito.Mockito.Spy(new CLI()); Org.Mockito.Mockito.DoReturn(mockCluster).When(cli).CreateCluster(); Org.Mockito.Mockito.When(job.GetTaskReports(TaskType.Map)).ThenReturn(GetTaskReports (jobId, TaskType.Map)); Org.Mockito.Mockito.When(job.GetTaskReports(TaskType.Reduce)).ThenReturn(GetTaskReports (jobId, TaskType.Reduce)); Org.Mockito.Mockito.When(mockCluster.GetJob(jobId)).ThenReturn(job); int retCode_MAP = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "MAP", "running" }); // testing case insensitive behavior int retCode_map = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "map", "running" }); int retCode_REDUCE = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "REDUCE" , "running" }); int retCode_completed = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "REDUCE" , "completed" }); NUnit.Framework.Assert.AreEqual("MAP is a valid input,exit code should be 0", 0, retCode_MAP); NUnit.Framework.Assert.AreEqual("map is a valid input,exit code should be 0", 0, retCode_map); NUnit.Framework.Assert.AreEqual("REDUCE is a valid input,exit code should be 0", 0, retCode_REDUCE); NUnit.Framework.Assert.AreEqual("REDUCE and completed are a valid inputs to -list-attempt-ids,exit code should be 0" , 0, retCode_completed); Org.Mockito.Mockito.Verify(job, Org.Mockito.Mockito.Times(2)).GetTaskReports(TaskType .Map); Org.Mockito.Mockito.Verify(job, Org.Mockito.Mockito.Times(2)).GetTaskReports(TaskType .Reduce); }
public virtual void TestJobHistoryFileNameBackwardsCompatible() { JobID oldJobId = JobID.ForName(JobId); JobId jobId = TypeConverter.ToYarn(oldJobId); long submitTime = long.Parse(SubmitTime); long finishTime = long.Parse(FinishTime); int numMaps = System.Convert.ToInt32(NumMaps); int numReduces = System.Convert.ToInt32(NumReduces); string jobHistoryFile = string.Format(OldJobHistoryFileFormatter, JobId, SubmitTime , UserName, JobName, FinishTime, NumMaps, NumReduces, JobStatus); JobIndexInfo info = FileNameIndexUtils.GetIndexInfo(jobHistoryFile); NUnit.Framework.Assert.AreEqual("Job id incorrect after decoding old history file" , jobId, info.GetJobId()); NUnit.Framework.Assert.AreEqual("Submit time incorrect after decoding old history file" , submitTime, info.GetSubmitTime()); NUnit.Framework.Assert.AreEqual("User incorrect after decoding old history file", UserName, info.GetUser()); NUnit.Framework.Assert.AreEqual("Job name incorrect after decoding old history file" , JobName, info.GetJobName()); NUnit.Framework.Assert.AreEqual("Finish time incorrect after decoding old history file" , finishTime, info.GetFinishTime()); NUnit.Framework.Assert.AreEqual("Num maps incorrect after decoding old history file" , numMaps, info.GetNumMaps()); NUnit.Framework.Assert.AreEqual("Num reduces incorrect after decoding old history file" , numReduces, info.GetNumReduces()); NUnit.Framework.Assert.AreEqual("Job status incorrect after decoding old history file" , JobStatus, info.GetJobStatus()); NUnit.Framework.Assert.IsNull("Queue name incorrect after decoding old history file" , info.GetQueueName()); }
public virtual void SetMapredJobID(string mapredJobID) { lock (this) { SetAssignedJobID(((JobID)JobID.ForName(mapredJobID))); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> private Job MockJob(Cluster mockCluster, string jobId, JobStatus.State jobState) { Job mockJob = Org.Mockito.Mockito.Mock <Job>(); Org.Mockito.Mockito.When(mockCluster.GetJob(JobID.ForName(jobId))).ThenReturn(mockJob ); JobStatus status = new JobStatus(null, 0, 0, 0, 0, jobState, JobPriority.High, null , null, null, null); Org.Mockito.Mockito.When(mockJob.GetStatus()).ThenReturn(status); return(mockJob); }
public virtual void SetDatum(object oDatum) { this.datum = (JobFinished)oDatum; this.jobId = JobID.ForName(datum.jobid.ToString()); this.finishTime = datum.finishTime; this.finishedMaps = datum.finishedMaps; this.finishedReduces = datum.finishedReduces; this.failedMaps = datum.failedMaps; this.failedReduces = datum.failedReduces; this.mapCounters = EventReader.FromAvro(datum.mapCounters); this.reduceCounters = EventReader.FromAvro(datum.reduceCounters); this.totalCounters = EventReader.FromAvro(datum.totalCounters); }
protected internal override Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job CreateJob( Configuration conf, JobStateInternal forcedState, string diagnostic) { JobImpl jobImpl = Org.Mockito.Mockito.Mock <JobImpl>(); Org.Mockito.Mockito.When(jobImpl.GetInternalState()).ThenReturn(this.jobStateInternal ); Org.Mockito.Mockito.When(jobImpl.GetAllCounters()).ThenReturn(new Counters()); JobID jobID = JobID.ForName("job_1234567890000_0001"); JobId jobId = TypeConverter.ToYarn(jobID); Org.Mockito.Mockito.When(jobImpl.GetID()).ThenReturn(jobId); ((AppContext)this.GetContext()).GetAllJobs()[jobImpl.GetID()] = jobImpl; return(jobImpl); }
public virtual void TestHistoryFileInfoSummaryFileNotExist() { TestHistoryFileManager.HistoryFileManagerTest hmTest = new TestHistoryFileManager.HistoryFileManagerTest (); string job = "job_1410889000000_123456"; Path summaryFile = new Path(job + ".summary"); JobIndexInfo jobIndexInfo = new JobIndexInfo(); jobIndexInfo.SetJobId(TypeConverter.ToYarn(JobID.ForName(job))); Configuration conf = dfsCluster.GetConfiguration(0); conf.Set(JHAdminConfig.MrHistoryDoneDir, "/" + UUID.RandomUUID()); conf.Set(JHAdminConfig.MrHistoryIntermediateDoneDir, "/" + UUID.RandomUUID()); hmTest.ServiceInit(conf); HistoryFileManager.HistoryFileInfo info = hmTest.GetHistoryFileInfo(null, null, summaryFile , jobIndexInfo, false); info.MoveToDone(); NUnit.Framework.Assert.IsFalse(info.DidMoveFail()); }
public virtual void TestQueueNamePercentEncoding() { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.ForName(JobId); JobId jobId = TypeConverter.ToYarn(oldJobId); info.SetJobId(jobId); info.SetSubmitTime(long.Parse(SubmitTime)); info.SetUser(UserName); info.SetJobName(JobName); info.SetFinishTime(long.Parse(FinishTime)); info.SetNumMaps(System.Convert.ToInt32(NumMaps)); info.SetNumReduces(System.Convert.ToInt32(NumReduces)); info.SetJobStatus(JobStatus); info.SetQueueName(QueueNameWithDelimiter); info.SetJobStartTime(long.Parse(JobStartTime)); string jobHistoryFile = FileNameIndexUtils.GetDoneFileName(info); NUnit.Framework.Assert.IsTrue("Queue name not encoded correctly into job history file" , jobHistoryFile.Contains(QueueNameWithDelimiterEscape)); }
public virtual void TestFailedJobHistoryWithoutDiagnostics() { Path histPath = new Path(GetType().GetClassLoader().GetResource("job_1393307629410_0001-1393307687476-user-Sleep+job-1393307723835-0-0-FAILED-default-1393307693920.jhist" ).GetFile()); FileSystem lfs = FileSystem.GetLocal(new Configuration()); FSDataInputStream fsdis = lfs.Open(histPath); try { JobHistoryParser parser = new JobHistoryParser(fsdis); JobHistoryParser.JobInfo info = parser.Parse(); NUnit.Framework.Assert.AreEqual("History parsed jobId incorrectly", info.GetJobId (), JobID.ForName("job_1393307629410_0001")); NUnit.Framework.Assert.AreEqual("Default diagnostics incorrect ", string.Empty, info .GetErrorInfo()); } finally { fsdis.Close(); } }
public virtual void TestEncodingDecodingEquivalence() { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.ForName(JobId); JobId jobId = TypeConverter.ToYarn(oldJobId); info.SetJobId(jobId); info.SetSubmitTime(long.Parse(SubmitTime)); info.SetUser(UserName); info.SetJobName(JobName); info.SetFinishTime(long.Parse(FinishTime)); info.SetNumMaps(System.Convert.ToInt32(NumMaps)); info.SetNumReduces(System.Convert.ToInt32(NumReduces)); info.SetJobStatus(JobStatus); info.SetQueueName(QueueName); info.SetJobStartTime(long.Parse(JobStartTime)); string jobHistoryFile = FileNameIndexUtils.GetDoneFileName(info); JobIndexInfo parsedInfo = FileNameIndexUtils.GetIndexInfo(jobHistoryFile); NUnit.Framework.Assert.AreEqual("Job id different after encoding and decoding", info .GetJobId(), parsedInfo.GetJobId()); NUnit.Framework.Assert.AreEqual("Submit time different after encoding and decoding" , info.GetSubmitTime(), parsedInfo.GetSubmitTime()); NUnit.Framework.Assert.AreEqual("User different after encoding and decoding", info .GetUser(), parsedInfo.GetUser()); NUnit.Framework.Assert.AreEqual("Job name different after encoding and decoding", info.GetJobName(), parsedInfo.GetJobName()); NUnit.Framework.Assert.AreEqual("Finish time different after encoding and decoding" , info.GetFinishTime(), parsedInfo.GetFinishTime()); NUnit.Framework.Assert.AreEqual("Num maps different after encoding and decoding", info.GetNumMaps(), parsedInfo.GetNumMaps()); NUnit.Framework.Assert.AreEqual("Num reduces different after encoding and decoding" , info.GetNumReduces(), parsedInfo.GetNumReduces()); NUnit.Framework.Assert.AreEqual("Job status different after encoding and decoding" , info.GetJobStatus(), parsedInfo.GetJobStatus()); NUnit.Framework.Assert.AreEqual("Queue name different after encoding and decoding" , info.GetQueueName(), parsedInfo.GetQueueName()); NUnit.Framework.Assert.AreEqual("Job start time different after encoding and decoding" , info.GetJobStartTime(), parsedInfo.GetJobStartTime()); }
public virtual void TestListAttemptIdsWithInvalidInputs() { JobID jobId = JobID.ForName(jobIdStr); Cluster mockCluster = Org.Mockito.Mockito.Mock <Cluster>(); Job job = Org.Mockito.Mockito.Mock <Job>(); CLI cli = Org.Mockito.Mockito.Spy(new CLI()); Org.Mockito.Mockito.DoReturn(mockCluster).When(cli).CreateCluster(); Org.Mockito.Mockito.When(mockCluster.GetJob(jobId)).ThenReturn(job); int retCode_JOB_SETUP = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "JOB_SETUP" , "running" }); int retCode_JOB_CLEANUP = cli.Run(new string[] { "-list-attempt-ids", jobIdStr, "JOB_CLEANUP" , "running" }); int retCode_invalidTaskState = cli.Run(new string[] { "-list-attempt-ids", jobIdStr , "REDUCE", "complete" }); NUnit.Framework.Assert.AreEqual("JOB_SETUP is an invalid input,exit code should be -1" , -1, retCode_JOB_SETUP); NUnit.Framework.Assert.AreEqual("JOB_CLEANUP is an invalid input,exit code should be -1" , -1, retCode_JOB_CLEANUP); NUnit.Framework.Assert.AreEqual("complete is an invalid input,exit code should be -1" , -1, retCode_invalidTaskState); }
/// <summary>Get the Job ID</summary> public virtual JobID GetJobId() { return(JobID.ForName(datum.jobid.ToString())); }
public virtual void Test() { TestMRClientService.MRAppWithClientService app = new TestMRClientService.MRAppWithClientService (this, 1, 0, false); Configuration conf = new Configuration(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.Submit(conf); app.WaitForState(job, JobState.Running); NUnit.Framework.Assert.AreEqual("Num tasks not correct", 1, job.GetTasks().Count); IEnumerator <Task> it = job.GetTasks().Values.GetEnumerator(); Task task = it.Next(); app.WaitForState(task, TaskState.Running); TaskAttempt attempt = task.GetAttempts().Values.GetEnumerator().Next(); app.WaitForState(attempt, TaskAttemptState.Running); // send the diagnostic string diagnostic1 = "Diagnostic1"; string diagnostic2 = "Diagnostic2"; app.GetContext().GetEventHandler().Handle(new TaskAttemptDiagnosticsUpdateEvent(attempt .GetID(), diagnostic1)); // send the status update TaskAttemptStatusUpdateEvent.TaskAttemptStatus taskAttemptStatus = new TaskAttemptStatusUpdateEvent.TaskAttemptStatus (); taskAttemptStatus.id = attempt.GetID(); taskAttemptStatus.progress = 0.5f; taskAttemptStatus.stateString = "RUNNING"; taskAttemptStatus.taskState = TaskAttemptState.Running; taskAttemptStatus.phase = Phase.Map; // send the status update app.GetContext().GetEventHandler().Handle(new TaskAttemptStatusUpdateEvent(attempt .GetID(), taskAttemptStatus)); //verify that all object are fully populated by invoking RPCs. YarnRPC rpc = YarnRPC.Create(conf); MRClientProtocol proxy = (MRClientProtocol)rpc.GetProxy(typeof(MRClientProtocol), app.clientService.GetBindAddress(), conf); GetCountersRequest gcRequest = recordFactory.NewRecordInstance <GetCountersRequest >(); gcRequest.SetJobId(job.GetID()); NUnit.Framework.Assert.IsNotNull("Counters is null", proxy.GetCounters(gcRequest) .GetCounters()); GetJobReportRequest gjrRequest = recordFactory.NewRecordInstance <GetJobReportRequest >(); gjrRequest.SetJobId(job.GetID()); JobReport jr = proxy.GetJobReport(gjrRequest).GetJobReport(); VerifyJobReport(jr); GetTaskAttemptCompletionEventsRequest gtaceRequest = recordFactory.NewRecordInstance <GetTaskAttemptCompletionEventsRequest>(); gtaceRequest.SetJobId(job.GetID()); gtaceRequest.SetFromEventId(0); gtaceRequest.SetMaxEvents(10); NUnit.Framework.Assert.IsNotNull("TaskCompletionEvents is null", proxy.GetTaskAttemptCompletionEvents (gtaceRequest).GetCompletionEventList()); GetDiagnosticsRequest gdRequest = recordFactory.NewRecordInstance <GetDiagnosticsRequest >(); gdRequest.SetTaskAttemptId(attempt.GetID()); NUnit.Framework.Assert.IsNotNull("Diagnostics is null", proxy.GetDiagnostics(gdRequest ).GetDiagnosticsList()); GetTaskAttemptReportRequest gtarRequest = recordFactory.NewRecordInstance <GetTaskAttemptReportRequest >(); gtarRequest.SetTaskAttemptId(attempt.GetID()); TaskAttemptReport tar = proxy.GetTaskAttemptReport(gtarRequest).GetTaskAttemptReport (); VerifyTaskAttemptReport(tar); GetTaskReportRequest gtrRequest = recordFactory.NewRecordInstance <GetTaskReportRequest >(); gtrRequest.SetTaskId(task.GetID()); NUnit.Framework.Assert.IsNotNull("TaskReport is null", proxy.GetTaskReport(gtrRequest ).GetTaskReport()); GetTaskReportsRequest gtreportsRequest = recordFactory.NewRecordInstance <GetTaskReportsRequest >(); gtreportsRequest.SetJobId(job.GetID()); gtreportsRequest.SetTaskType(TaskType.Map); NUnit.Framework.Assert.IsNotNull("TaskReports for map is null", proxy.GetTaskReports (gtreportsRequest).GetTaskReportList()); gtreportsRequest = recordFactory.NewRecordInstance <GetTaskReportsRequest>(); gtreportsRequest.SetJobId(job.GetID()); gtreportsRequest.SetTaskType(TaskType.Reduce); NUnit.Framework.Assert.IsNotNull("TaskReports for reduce is null", proxy.GetTaskReports (gtreportsRequest).GetTaskReportList()); IList <string> diag = proxy.GetDiagnostics(gdRequest).GetDiagnosticsList(); NUnit.Framework.Assert.AreEqual("Num diagnostics not correct", 1, diag.Count); NUnit.Framework.Assert.AreEqual("Diag 1 not correct", diagnostic1, diag[0].ToString ()); TaskReport taskReport = proxy.GetTaskReport(gtrRequest).GetTaskReport(); NUnit.Framework.Assert.AreEqual("Num diagnostics not correct", 1, taskReport.GetDiagnosticsCount ()); //send the done signal to the task app.GetContext().GetEventHandler().Handle(new TaskAttemptEvent(task.GetAttempts() .Values.GetEnumerator().Next().GetID(), TaskAttemptEventType.TaDone)); app.WaitForState(job, JobState.Succeeded); // For invalid jobid, throw IOException gtreportsRequest = recordFactory.NewRecordInstance <GetTaskReportsRequest>(); gtreportsRequest.SetJobId(TypeConverter.ToYarn(JobID.ForName("job_1415730144495_0001" ))); gtreportsRequest.SetTaskType(TaskType.Reduce); try { proxy.GetTaskReports(gtreportsRequest); NUnit.Framework.Assert.Fail("IOException not thrown for invalid job id"); } catch (IOException) { } }
//Test reports of JobHistoryServer. History server should get log files from MRApp and read them /// <exception cref="System.Exception"/> public virtual void TestReports() { Configuration config = new Configuration(); config.SetClass(CommonConfigurationKeysPublic.NetTopologyNodeSwitchMappingImplKey , typeof(TestJobHistoryParsing.MyResolver), typeof(DNSToSwitchMapping)); RackResolver.Init(config); MRApp app = new TestJobHistoryEvents.MRAppWithHistory(1, 1, true, this.GetType(). FullName, true); app.Submit(config); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); app.WaitForState(job, JobState.Succeeded); historyServer = new JobHistoryServer(); historyServer.Init(config); historyServer.Start(); // search JobHistory service JobHistory jobHistory = null; foreach (Org.Apache.Hadoop.Service.Service service in historyServer.GetServices()) { if (service is JobHistory) { jobHistory = (JobHistory)service; } } IDictionary <JobId, Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job> jobs = jobHistory. GetAllJobs(); NUnit.Framework.Assert.AreEqual(1, jobs.Count); NUnit.Framework.Assert.AreEqual("job_0_0000", jobs.Keys.GetEnumerator().Next().ToString ()); Task task = job.GetTasks().Values.GetEnumerator().Next(); TaskAttempt attempt = task.GetAttempts().Values.GetEnumerator().Next(); HistoryClientService historyService = historyServer.GetClientService(); MRClientProtocol protocol = historyService.GetClientHandler(); GetTaskAttemptReportRequest gtarRequest = recordFactory.NewRecordInstance <GetTaskAttemptReportRequest >(); // test getTaskAttemptReport TaskAttemptId taId = attempt.GetID(); taId.SetTaskId(task.GetID()); taId.GetTaskId().SetJobId(job.GetID()); gtarRequest.SetTaskAttemptId(taId); GetTaskAttemptReportResponse response = protocol.GetTaskAttemptReport(gtarRequest ); NUnit.Framework.Assert.AreEqual("container_0_0000_01_000000", response.GetTaskAttemptReport ().GetContainerId().ToString()); NUnit.Framework.Assert.IsTrue(response.GetTaskAttemptReport().GetDiagnosticInfo() .IsEmpty()); // counters NUnit.Framework.Assert.IsNotNull(response.GetTaskAttemptReport().GetCounters().GetCounter (TaskCounter.PhysicalMemoryBytes)); NUnit.Framework.Assert.AreEqual(taId.ToString(), response.GetTaskAttemptReport(). GetTaskAttemptId().ToString()); // test getTaskReport GetTaskReportRequest request = recordFactory.NewRecordInstance <GetTaskReportRequest >(); TaskId taskId = task.GetID(); taskId.SetJobId(job.GetID()); request.SetTaskId(taskId); GetTaskReportResponse reportResponse = protocol.GetTaskReport(request); NUnit.Framework.Assert.AreEqual(string.Empty, reportResponse.GetTaskReport().GetDiagnosticsList ().GetEnumerator().Next()); // progress NUnit.Framework.Assert.AreEqual(1.0f, reportResponse.GetTaskReport().GetProgress( ), 0.01); // report has corrected taskId NUnit.Framework.Assert.AreEqual(taskId.ToString(), reportResponse.GetTaskReport() .GetTaskId().ToString()); // Task state should be SUCCEEDED NUnit.Framework.Assert.AreEqual(TaskState.Succeeded, reportResponse.GetTaskReport ().GetTaskState()); // For invalid jobid, throw IOException GetTaskReportsRequest gtreportsRequest = recordFactory.NewRecordInstance <GetTaskReportsRequest >(); gtreportsRequest.SetJobId(TypeConverter.ToYarn(JobID.ForName("job_1415730144495_0001" ))); gtreportsRequest.SetTaskType(TaskType.Reduce); try { protocol.GetTaskReports(gtreportsRequest); NUnit.Framework.Assert.Fail("IOException not thrown for invalid job id"); } catch (IOException) { } // Expected // test getTaskAttemptCompletionEvents GetTaskAttemptCompletionEventsRequest taskAttemptRequest = recordFactory.NewRecordInstance <GetTaskAttemptCompletionEventsRequest>(); taskAttemptRequest.SetJobId(job.GetID()); GetTaskAttemptCompletionEventsResponse taskAttemptCompletionEventsResponse = protocol .GetTaskAttemptCompletionEvents(taskAttemptRequest); NUnit.Framework.Assert.AreEqual(0, taskAttemptCompletionEventsResponse.GetCompletionEventCount ()); // test getDiagnostics GetDiagnosticsRequest diagnosticRequest = recordFactory.NewRecordInstance <GetDiagnosticsRequest >(); diagnosticRequest.SetTaskAttemptId(taId); GetDiagnosticsResponse diagnosticResponse = protocol.GetDiagnostics(diagnosticRequest ); // it is strange : why one empty string ? NUnit.Framework.Assert.AreEqual(1, diagnosticResponse.GetDiagnosticsCount()); NUnit.Framework.Assert.AreEqual(string.Empty, diagnosticResponse.GetDiagnostics(0 )); }
public static JobId ToJobID(string jid) { return(TypeConverter.ToYarn(JobID.ForName(jid))); }
/// <exception cref="System.Exception"/> public virtual int Run(string[] argv) { int exitCode = -1; if (argv.Length < 1) { DisplayUsage(string.Empty); return(exitCode); } // process arguments string cmd = argv[0]; string submitJobFile = null; string jobid = null; string taskid = null; string historyFile = null; string counterGroupName = null; string counterName = null; JobPriority jp = null; string taskType = null; string taskState = null; int fromEvent = 0; int nEvents = 0; bool getStatus = false; bool getCounter = false; bool killJob = false; bool listEvents = false; bool viewHistory = false; bool viewAllHistory = false; bool listJobs = false; bool listAllJobs = false; bool listActiveTrackers = false; bool listBlacklistedTrackers = false; bool displayTasks = false; bool killTask = false; bool failTask = false; bool setJobPriority = false; bool logs = false; if ("-submit".Equals(cmd)) { if (argv.Length != 2) { DisplayUsage(cmd); return(exitCode); } submitJobFile = argv[1]; } else { if ("-status".Equals(cmd)) { if (argv.Length != 2) { DisplayUsage(cmd); return(exitCode); } jobid = argv[1]; getStatus = true; } else { if ("-counter".Equals(cmd)) { if (argv.Length != 4) { DisplayUsage(cmd); return(exitCode); } getCounter = true; jobid = argv[1]; counterGroupName = argv[2]; counterName = argv[3]; } else { if ("-kill".Equals(cmd)) { if (argv.Length != 2) { DisplayUsage(cmd); return(exitCode); } jobid = argv[1]; killJob = true; } else { if ("-set-priority".Equals(cmd)) { if (argv.Length != 3) { DisplayUsage(cmd); return(exitCode); } jobid = argv[1]; try { jp = JobPriority.ValueOf(argv[2]); } catch (ArgumentException iae) { Log.Info(iae); DisplayUsage(cmd); return(exitCode); } setJobPriority = true; } else { if ("-events".Equals(cmd)) { if (argv.Length != 4) { DisplayUsage(cmd); return(exitCode); } jobid = argv[1]; fromEvent = System.Convert.ToInt32(argv[2]); nEvents = System.Convert.ToInt32(argv[3]); listEvents = true; } else { if ("-history".Equals(cmd)) { if (argv.Length != 2 && !(argv.Length == 3 && "all".Equals(argv[1]))) { DisplayUsage(cmd); return(exitCode); } viewHistory = true; if (argv.Length == 3 && "all".Equals(argv[1])) { viewAllHistory = true; historyFile = argv[2]; } else { historyFile = argv[1]; } } else { if ("-list".Equals(cmd)) { if (argv.Length != 1 && !(argv.Length == 2 && "all".Equals(argv[1]))) { DisplayUsage(cmd); return(exitCode); } if (argv.Length == 2 && "all".Equals(argv[1])) { listAllJobs = true; } else { listJobs = true; } } else { if ("-kill-task".Equals(cmd)) { if (argv.Length != 2) { DisplayUsage(cmd); return(exitCode); } killTask = true; taskid = argv[1]; } else { if ("-fail-task".Equals(cmd)) { if (argv.Length != 2) { DisplayUsage(cmd); return(exitCode); } failTask = true; taskid = argv[1]; } else { if ("-list-active-trackers".Equals(cmd)) { if (argv.Length != 1) { DisplayUsage(cmd); return(exitCode); } listActiveTrackers = true; } else { if ("-list-blacklisted-trackers".Equals(cmd)) { if (argv.Length != 1) { DisplayUsage(cmd); return(exitCode); } listBlacklistedTrackers = true; } else { if ("-list-attempt-ids".Equals(cmd)) { if (argv.Length != 4) { DisplayUsage(cmd); return(exitCode); } jobid = argv[1]; taskType = argv[2]; taskState = argv[3]; displayTasks = true; if (!taskTypes.Contains(StringUtils.ToUpperCase(taskType))) { System.Console.Out.WriteLine("Error: Invalid task-type: " + taskType); DisplayUsage(cmd); return(exitCode); } if (!taskStates.Contains(StringUtils.ToLowerCase(taskState))) { System.Console.Out.WriteLine("Error: Invalid task-state: " + taskState); DisplayUsage(cmd); return(exitCode); } } else { if ("-logs".Equals(cmd)) { if (argv.Length == 2 || argv.Length == 3) { logs = true; jobid = argv[1]; if (argv.Length == 3) { taskid = argv[2]; } else { taskid = null; } } else { DisplayUsage(cmd); return(exitCode); } } else { DisplayUsage(cmd); return(exitCode); } } } } } } } } } } } } } } // initialize cluster cluster = CreateCluster(); // Submit the request try { if (submitJobFile != null) { Job job = Job.GetInstance(new JobConf(submitJobFile)); job.Submit(); System.Console.Out.WriteLine("Created job " + job.GetJobID()); exitCode = 0; } else { if (getStatus) { Job job = cluster.GetJob(JobID.ForName(jobid)); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { Counters counters = job.GetCounters(); System.Console.Out.WriteLine(); System.Console.Out.WriteLine(job); if (counters != null) { System.Console.Out.WriteLine(counters); } else { System.Console.Out.WriteLine("Counters not available. Job is retired."); } exitCode = 0; } } else { if (getCounter) { Job job = cluster.GetJob(JobID.ForName(jobid)); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { Counters counters = job.GetCounters(); if (counters == null) { System.Console.Out.WriteLine("Counters not available for retired job " + jobid); exitCode = -1; } else { System.Console.Out.WriteLine(GetCounter(counters, counterGroupName, counterName)); exitCode = 0; } } } else { if (killJob) { Job job = cluster.GetJob(JobID.ForName(jobid)); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { JobStatus jobStatus = job.GetStatus(); if (jobStatus.GetState() == JobStatus.State.Failed) { System.Console.Out.WriteLine("Could not mark the job " + jobid + " as killed, as it has already failed." ); exitCode = -1; } else { if (jobStatus.GetState() == JobStatus.State.Killed) { System.Console.Out.WriteLine("The job " + jobid + " has already been killed."); exitCode = -1; } else { if (jobStatus.GetState() == JobStatus.State.Succeeded) { System.Console.Out.WriteLine("Could not kill the job " + jobid + ", as it has already succeeded." ); exitCode = -1; } else { job.KillJob(); System.Console.Out.WriteLine("Killed job " + jobid); exitCode = 0; } } } } } else { if (setJobPriority) { Job job = cluster.GetJob(JobID.ForName(jobid)); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { job.SetPriority(jp); System.Console.Out.WriteLine("Changed job priority."); exitCode = 0; } } else { if (viewHistory) { ViewHistory(historyFile, viewAllHistory); exitCode = 0; } else { if (listEvents) { ListEvents(cluster.GetJob(JobID.ForName(jobid)), fromEvent, nEvents); exitCode = 0; } else { if (listJobs) { ListJobs(cluster); exitCode = 0; } else { if (listAllJobs) { ListAllJobs(cluster); exitCode = 0; } else { if (listActiveTrackers) { ListActiveTrackers(cluster); exitCode = 0; } else { if (listBlacklistedTrackers) { ListBlacklistedTrackers(cluster); exitCode = 0; } else { if (displayTasks) { DisplayTasks(cluster.GetJob(JobID.ForName(jobid)), taskType, taskState); exitCode = 0; } else { if (killTask) { TaskAttemptID taskID = TaskAttemptID.ForName(taskid); Job job = cluster.GetJob(taskID.GetJobID()); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { if (job.KillTask(taskID, false)) { System.Console.Out.WriteLine("Killed task " + taskid); exitCode = 0; } else { System.Console.Out.WriteLine("Could not kill task " + taskid); exitCode = -1; } } } else { if (failTask) { TaskAttemptID taskID = TaskAttemptID.ForName(taskid); Job job = cluster.GetJob(taskID.GetJobID()); if (job == null) { System.Console.Out.WriteLine("Could not find job " + jobid); } else { if (job.KillTask(taskID, true)) { System.Console.Out.WriteLine("Killed task " + taskID + " by failing it"); exitCode = 0; } else { System.Console.Out.WriteLine("Could not fail task " + taskid); exitCode = -1; } } } else { if (logs) { try { JobID jobID = JobID.ForName(jobid); TaskAttemptID taskAttemptID = TaskAttemptID.ForName(taskid); LogParams logParams = cluster.GetLogParams(jobID, taskAttemptID); LogCLIHelpers logDumper = new LogCLIHelpers(); logDumper.SetConf(GetConf()); exitCode = logDumper.DumpAContainersLogs(logParams.GetApplicationId(), logParams. GetContainerId(), logParams.GetNodeId(), logParams.GetOwner()); } catch (IOException e) { if (e is RemoteException) { throw; } System.Console.Out.WriteLine(e.Message); } } } } } } } } } } } } } } } } } catch (RemoteException re) { IOException unwrappedException = re.UnwrapRemoteException(); if (unwrappedException is AccessControlException) { System.Console.Out.WriteLine(unwrappedException.Message); } else { throw; } } finally { cluster.Close(); } return(exitCode); }
/// <summary> /// Parses the provided job history file name to construct a /// JobIndexInfo object which is returned. /// </summary> /// <param name="jhFileName">the job history filename.</param> /// <returns>a JobIndexInfo object built from the filename.</returns> /// <exception cref="System.IO.IOException"/> public static JobIndexInfo GetIndexInfo(string jhFileName) { string fileName = Sharpen.Runtime.Substring(jhFileName, 0, jhFileName.IndexOf(JobHistoryUtils .JobHistoryFileExtension)); JobIndexInfo indexInfo = new JobIndexInfo(); string[] jobDetails = fileName.Split(Delimiter); JobID oldJobId = JobID.ForName(DecodeJobHistoryFileName(jobDetails[JobIdIndex])); JobId jobId = TypeConverter.ToYarn(oldJobId); indexInfo.SetJobId(jobId); // Do not fail if there are some minor parse errors try { try { indexInfo.SetSubmitTime(long.Parse(DecodeJobHistoryFileName(jobDetails[SubmitTimeIndex ]))); } catch (FormatException e) { Log.Warn("Unable to parse submit time from job history file " + jhFileName + " : " + e); } indexInfo.SetUser(DecodeJobHistoryFileName(jobDetails[UserIndex])); indexInfo.SetJobName(DecodeJobHistoryFileName(jobDetails[JobNameIndex])); try { indexInfo.SetFinishTime(long.Parse(DecodeJobHistoryFileName(jobDetails[FinishTimeIndex ]))); } catch (FormatException e) { Log.Warn("Unable to parse finish time from job history file " + jhFileName + " : " + e); } try { indexInfo.SetNumMaps(System.Convert.ToInt32(DecodeJobHistoryFileName(jobDetails[NumMapsIndex ]))); } catch (FormatException e) { Log.Warn("Unable to parse num maps from job history file " + jhFileName + " : " + e); } try { indexInfo.SetNumReduces(System.Convert.ToInt32(DecodeJobHistoryFileName(jobDetails [NumReducesIndex]))); } catch (FormatException e) { Log.Warn("Unable to parse num reduces from job history file " + jhFileName + " : " + e); } indexInfo.SetJobStatus(DecodeJobHistoryFileName(jobDetails[JobStatusIndex])); indexInfo.SetQueueName(DecodeJobHistoryFileName(jobDetails[QueueNameIndex])); try { if (jobDetails.Length <= JobStartTimeIndex) { indexInfo.SetJobStartTime(indexInfo.GetSubmitTime()); } else { indexInfo.SetJobStartTime(long.Parse(DecodeJobHistoryFileName(jobDetails[JobStartTimeIndex ]))); } } catch (FormatException e) { Log.Warn("Unable to parse start time from job history file " + jhFileName + " : " + e); } } catch (IndexOutOfRangeException) { Log.Warn("Parsing job history file with partial data encoded into name: " + jhFileName ); } return(indexInfo); }