/// <summary>test some methods of CompletedTaskAttempt</summary> public virtual void TestCompletedTaskAttempt() { JobHistoryParser.TaskAttemptInfo attemptInfo = Org.Mockito.Mockito.Mock <JobHistoryParser.TaskAttemptInfo >(); Org.Mockito.Mockito.When(attemptInfo.GetRackname()).ThenReturn("Rackname"); Org.Mockito.Mockito.When(attemptInfo.GetShuffleFinishTime()).ThenReturn(11L); Org.Mockito.Mockito.When(attemptInfo.GetSortFinishTime()).ThenReturn(12L); Org.Mockito.Mockito.When(attemptInfo.GetShufflePort()).ThenReturn(10); JobID jobId = new JobID("12345", 0); TaskID taskId = new TaskID(jobId, TaskType.Reduce, 0); TaskAttemptID taskAttemptId = new TaskAttemptID(taskId, 0); Org.Mockito.Mockito.When(attemptInfo.GetAttemptId()).ThenReturn(taskAttemptId); CompletedTaskAttempt taskAttemt = new CompletedTaskAttempt(null, attemptInfo); NUnit.Framework.Assert.AreEqual("Rackname", taskAttemt.GetNodeRackName()); NUnit.Framework.Assert.AreEqual(Phase.Cleanup, taskAttemt.GetPhase()); NUnit.Framework.Assert.IsTrue(taskAttemt.IsFinished()); NUnit.Framework.Assert.AreEqual(11L, taskAttemt.GetShuffleFinishTime()); NUnit.Framework.Assert.AreEqual(12L, taskAttemt.GetSortFinishTime()); NUnit.Framework.Assert.AreEqual(10, taskAttemt.GetShufflePort()); }
/// <exception cref="System.Exception"/> public virtual void TestHistoryParsingForFailedAttempts() { Log.Info("STARTING testHistoryParsingForFailedAttempts"); try { Configuration conf = new Configuration(); conf.SetClass(CommonConfigurationKeysPublic.NetTopologyNodeSwitchMappingImplKey, typeof(TestJobHistoryParsing.MyResolver), typeof(DNSToSwitchMapping)); RackResolver.Init(conf); MRApp app = new TestJobHistoryParsing.MRAppWithHistoryWithFailedAttempt(2, 1, true , this.GetType().FullName, true); app.Submit(conf); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); JobId jobId = job.GetID(); app.WaitForState(job, JobState.Succeeded); // make sure all events are flushed app.WaitForState(Service.STATE.Stopped); JobHistory jobHistory = new JobHistory(); jobHistory.Init(conf); HistoryFileManager.HistoryFileInfo fileInfo = jobHistory.GetJobFileInfo(jobId); JobHistoryParser parser; JobHistoryParser.JobInfo jobInfo; lock (fileInfo) { Path historyFilePath = fileInfo.GetHistoryFile(); FSDataInputStream @in = null; FileContext fc = null; try { fc = FileContext.GetFileContext(conf); @in = fc.Open(fc.MakeQualified(historyFilePath)); } catch (IOException ioe) { Log.Info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } parser = new JobHistoryParser(@in); jobInfo = parser.Parse(); } Exception parseException = parser.GetParseException(); NUnit.Framework.Assert.IsNull("Caught an expected exception " + parseException, parseException ); int noOffailedAttempts = 0; IDictionary <TaskID, JobHistoryParser.TaskInfo> allTasks = jobInfo.GetAllTasks(); foreach (Task task in job.GetTasks().Values) { JobHistoryParser.TaskInfo taskInfo = allTasks[TypeConverter.FromYarn(task.GetID() )]; foreach (TaskAttempt taskAttempt in task.GetAttempts().Values) { JobHistoryParser.TaskAttemptInfo taskAttemptInfo = taskInfo.GetAllTaskAttempts()[ TypeConverter.FromYarn((taskAttempt.GetID()))]; // Verify rack-name for all task attempts NUnit.Framework.Assert.AreEqual("rack-name is incorrect", taskAttemptInfo.GetRackname (), RackName); if (taskAttemptInfo.GetTaskStatus().Equals("FAILED")) { noOffailedAttempts++; } } } NUnit.Framework.Assert.AreEqual("No of Failed tasks doesn't match.", 2, noOffailedAttempts ); } finally { Log.Info("FINISHED testHistoryParsingForFailedAttempts"); } }
public virtual string GetNodeRackName() { return(attemptInfo.GetRackname()); }
/// <exception cref="System.Exception"/> private void CheckHistoryParsing(int numMaps, int numReduces, int numSuccessfulMaps ) { Configuration conf = new Configuration(); conf.Set(MRJobConfig.UserName, Runtime.GetProperty("user.name")); long amStartTimeEst = Runtime.CurrentTimeMillis(); conf.SetClass(CommonConfigurationKeysPublic.NetTopologyNodeSwitchMappingImplKey, typeof(TestJobHistoryParsing.MyResolver), typeof(DNSToSwitchMapping)); RackResolver.Init(conf); MRApp app = new TestJobHistoryEvents.MRAppWithHistory(numMaps, numReduces, true, this.GetType().FullName, true); app.Submit(conf); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); JobId jobId = job.GetID(); Log.Info("JOBID is " + TypeConverter.FromYarn(jobId).ToString()); app.WaitForState(job, JobState.Succeeded); // make sure all events are flushed app.WaitForState(Service.STATE.Stopped); string jobhistoryDir = JobHistoryUtils.GetHistoryIntermediateDoneDirForUser(conf); FileContext fc = null; try { fc = FileContext.GetFileContext(conf); } catch (IOException ioe) { Log.Info("Can not get FileContext", ioe); throw (new Exception("Can not get File Context")); } if (numMaps == numSuccessfulMaps) { string summaryFileName = JobHistoryUtils.GetIntermediateSummaryFileName(jobId); Path summaryFile = new Path(jobhistoryDir, summaryFileName); string jobSummaryString = GetJobSummary(fc, summaryFile); NUnit.Framework.Assert.IsNotNull(jobSummaryString); NUnit.Framework.Assert.IsTrue(jobSummaryString.Contains("resourcesPerMap=100")); NUnit.Framework.Assert.IsTrue(jobSummaryString.Contains("resourcesPerReduce=100") ); IDictionary <string, string> jobSummaryElements = new Dictionary <string, string>(); StringTokenizer strToken = new StringTokenizer(jobSummaryString, ","); while (strToken.HasMoreTokens()) { string keypair = strToken.NextToken(); jobSummaryElements[keypair.Split("=")[0]] = keypair.Split("=")[1]; } NUnit.Framework.Assert.AreEqual("JobId does not match", jobId.ToString(), jobSummaryElements ["jobId"]); NUnit.Framework.Assert.AreEqual("JobName does not match", "test", jobSummaryElements ["jobName"]); NUnit.Framework.Assert.IsTrue("submitTime should not be 0", long.Parse(jobSummaryElements ["submitTime"]) != 0); NUnit.Framework.Assert.IsTrue("launchTime should not be 0", long.Parse(jobSummaryElements ["launchTime"]) != 0); NUnit.Framework.Assert.IsTrue("firstMapTaskLaunchTime should not be 0", long.Parse (jobSummaryElements["firstMapTaskLaunchTime"]) != 0); NUnit.Framework.Assert.IsTrue("firstReduceTaskLaunchTime should not be 0", long.Parse (jobSummaryElements["firstReduceTaskLaunchTime"]) != 0); NUnit.Framework.Assert.IsTrue("finishTime should not be 0", long.Parse(jobSummaryElements ["finishTime"]) != 0); NUnit.Framework.Assert.AreEqual("Mismatch in num map slots", numSuccessfulMaps, System.Convert.ToInt32 (jobSummaryElements["numMaps"])); NUnit.Framework.Assert.AreEqual("Mismatch in num reduce slots", numReduces, System.Convert.ToInt32 (jobSummaryElements["numReduces"])); NUnit.Framework.Assert.AreEqual("User does not match", Runtime.GetProperty("user.name" ), jobSummaryElements["user"]); NUnit.Framework.Assert.AreEqual("Queue does not match", "default", jobSummaryElements ["queue"]); NUnit.Framework.Assert.AreEqual("Status does not match", "SUCCEEDED", jobSummaryElements ["status"]); } JobHistory jobHistory = new JobHistory(); jobHistory.Init(conf); HistoryFileManager.HistoryFileInfo fileInfo = jobHistory.GetJobFileInfo(jobId); JobHistoryParser.JobInfo jobInfo; long numFinishedMaps; lock (fileInfo) { Path historyFilePath = fileInfo.GetHistoryFile(); FSDataInputStream @in = null; Log.Info("JobHistoryFile is: " + historyFilePath); try { @in = fc.Open(fc.MakeQualified(historyFilePath)); } catch (IOException ioe) { Log.Info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } JobHistoryParser parser = new JobHistoryParser(@in); EventReader realReader = new EventReader(@in); EventReader reader = Org.Mockito.Mockito.Mock <EventReader>(); if (numMaps == numSuccessfulMaps) { reader = realReader; } else { AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack! Org.Mockito.Mockito.When(reader.GetNextEvent()).ThenAnswer(new _Answer_257(realReader , numFinishedEvents, numSuccessfulMaps)); } jobInfo = parser.Parse(reader); numFinishedMaps = ComputeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps); if (numFinishedMaps != numMaps) { Exception parseException = parser.GetParseException(); NUnit.Framework.Assert.IsNotNull("Didn't get expected parse exception", parseException ); } } NUnit.Framework.Assert.AreEqual("Incorrect username ", Runtime.GetProperty("user.name" ), jobInfo.GetUsername()); NUnit.Framework.Assert.AreEqual("Incorrect jobName ", "test", jobInfo.GetJobname( )); NUnit.Framework.Assert.AreEqual("Incorrect queuename ", "default", jobInfo.GetJobQueueName ()); NUnit.Framework.Assert.AreEqual("incorrect conf path", "test", jobInfo.GetJobConfPath ()); NUnit.Framework.Assert.AreEqual("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps ); NUnit.Framework.Assert.AreEqual("incorrect finishedReduces ", numReduces, jobInfo .GetFinishedReduces()); NUnit.Framework.Assert.AreEqual("incorrect uberized ", job.IsUber(), jobInfo.GetUberized ()); IDictionary <TaskID, JobHistoryParser.TaskInfo> allTasks = jobInfo.GetAllTasks(); int totalTasks = allTasks.Count; NUnit.Framework.Assert.AreEqual("total number of tasks is incorrect ", (numMaps + numReduces), totalTasks); // Verify aminfo NUnit.Framework.Assert.AreEqual(1, jobInfo.GetAMInfos().Count); NUnit.Framework.Assert.AreEqual(MRApp.NmHost, jobInfo.GetAMInfos()[0].GetNodeManagerHost ()); JobHistoryParser.AMInfo amInfo = jobInfo.GetAMInfos()[0]; NUnit.Framework.Assert.AreEqual(MRApp.NmPort, amInfo.GetNodeManagerPort()); NUnit.Framework.Assert.AreEqual(MRApp.NmHttpPort, amInfo.GetNodeManagerHttpPort() ); NUnit.Framework.Assert.AreEqual(1, amInfo.GetAppAttemptId().GetAttemptId()); NUnit.Framework.Assert.AreEqual(amInfo.GetAppAttemptId(), amInfo.GetContainerId() .GetApplicationAttemptId()); NUnit.Framework.Assert.IsTrue(amInfo.GetStartTime() <= Runtime.CurrentTimeMillis( ) && amInfo.GetStartTime() >= amStartTimeEst); ContainerId fakeCid = MRApp.NewContainerId(-1, -1, -1, -1); // Assert at taskAttempt level foreach (JobHistoryParser.TaskInfo taskInfo in allTasks.Values) { int taskAttemptCount = taskInfo.GetAllTaskAttempts().Count; NUnit.Framework.Assert.AreEqual("total number of task attempts ", 1, taskAttemptCount ); JobHistoryParser.TaskAttemptInfo taInfo = taskInfo.GetAllTaskAttempts().Values.GetEnumerator ().Next(); NUnit.Framework.Assert.IsNotNull(taInfo.GetContainerId()); // Verify the wrong ctor is not being used. Remove after mrv1 is removed. NUnit.Framework.Assert.IsFalse(taInfo.GetContainerId().Equals(fakeCid)); } // Deep compare Job and JobInfo foreach (Task task in job.GetTasks().Values) { JobHistoryParser.TaskInfo taskInfo_1 = allTasks[TypeConverter.FromYarn(task.GetID ())]; NUnit.Framework.Assert.IsNotNull("TaskInfo not found", taskInfo_1); foreach (TaskAttempt taskAttempt in task.GetAttempts().Values) { JobHistoryParser.TaskAttemptInfo taskAttemptInfo = taskInfo_1.GetAllTaskAttempts( )[TypeConverter.FromYarn((taskAttempt.GetID()))]; NUnit.Framework.Assert.IsNotNull("TaskAttemptInfo not found", taskAttemptInfo); NUnit.Framework.Assert.AreEqual("Incorrect shuffle port for task attempt", taskAttempt .GetShufflePort(), taskAttemptInfo.GetShufflePort()); if (numMaps == numSuccessfulMaps) { NUnit.Framework.Assert.AreEqual(MRApp.NmHost, taskAttemptInfo.GetHostname()); NUnit.Framework.Assert.AreEqual(MRApp.NmPort, taskAttemptInfo.GetPort()); // Verify rack-name NUnit.Framework.Assert.AreEqual("rack-name is incorrect", taskAttemptInfo.GetRackname (), RackName); } } } // test output for HistoryViewer TextWriter stdps = System.Console.Out; try { Runtime.SetOut(new TextWriter(outContent)); HistoryViewer viewer; lock (fileInfo) { viewer = new HistoryViewer(fc.MakeQualified(fileInfo.GetHistoryFile()).ToString() , conf, true); } viewer.Print(); foreach (JobHistoryParser.TaskInfo taskInfo_1 in allTasks.Values) { string test = (taskInfo_1.GetTaskStatus() == null ? string.Empty : taskInfo_1.GetTaskStatus ()) + " " + taskInfo_1.GetTaskType() + " task list for " + taskInfo_1.GetTaskId( ).GetJobID(); NUnit.Framework.Assert.IsTrue(outContent.ToString().IndexOf(test) > 0); NUnit.Framework.Assert.IsTrue(outContent.ToString().IndexOf(taskInfo_1.GetTaskId( ).ToString()) > 0); } } finally { Runtime.SetOut(stdps); } }