public virtual void TestTaskIdXML() { WebResource r = Resource(); IDictionary <JobId, Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job> jobsMap = appContext .GetAllJobs(); foreach (JobId id in jobsMap.Keys) { string jobId = MRApps.ToString(id); foreach (Task task in jobsMap[id].GetTasks().Values) { string tid = MRApps.ToString(task.GetID()); ClientResponse response = r.Path("ws").Path("v1").Path("mapreduce").Path("jobs"). Path(jobId).Path("tasks").Path(tid).Accept(MediaType.ApplicationXml).Get <ClientResponse >(); NUnit.Framework.Assert.AreEqual(MediaType.ApplicationXmlType, response.GetType()); string xml = response.GetEntity <string>(); DocumentBuilderFactory dbf = DocumentBuilderFactory.NewInstance(); DocumentBuilder db = dbf.NewDocumentBuilder(); InputSource @is = new InputSource(); @is.SetCharacterStream(new StringReader(xml)); Document dom = db.Parse(@is); NodeList nodes = dom.GetElementsByTagName("task"); for (int i = 0; i < nodes.GetLength(); i++) { Element element = (Element)nodes.Item(i); VerifyAMSingleTaskXML(element, task); } } } }
public TaskInfo(Task task) { TaskType ttype = task.GetType(); this.type = ttype.ToString(); TaskReport report = task.GetReport(); this.startTime = report.GetStartTime(); this.finishTime = report.GetFinishTime(); this.state = report.GetTaskState(); this.elapsedTime = Times.Elapsed(this.startTime, this.finishTime, this.state == TaskState .Running); if (this.elapsedTime == -1) { this.elapsedTime = 0; } this.progress = report.GetProgress() * 100; this.status = report.GetStatus(); this.id = MRApps.ToString(task.GetID()); this.taskNum = task.GetID().GetId(); this.successful = GetSuccessfulAttempt(task); if (successful != null) { this.successfulAttempt = MRApps.ToString(successful.GetID()); } else { this.successfulAttempt = string.Empty; } }
public virtual void TestTasksXML() { WebResource r = Resource(); IDictionary <JobId, Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job> jobsMap = appContext .GetAllJobs(); foreach (JobId id in jobsMap.Keys) { string jobId = MRApps.ToString(id); ClientResponse response = r.Path("ws").Path("v1").Path("mapreduce").Path("jobs"). Path(jobId).Path("tasks").Accept(MediaType.ApplicationXml).Get <ClientResponse>(); NUnit.Framework.Assert.AreEqual(MediaType.ApplicationXmlType, response.GetType()); string xml = response.GetEntity <string>(); DocumentBuilderFactory dbf = DocumentBuilderFactory.NewInstance(); DocumentBuilder db = dbf.NewDocumentBuilder(); InputSource @is = new InputSource(); @is.SetCharacterStream(new StringReader(xml)); Document dom = db.Parse(@is); NodeList tasks = dom.GetElementsByTagName("tasks"); NUnit.Framework.Assert.AreEqual("incorrect number of elements", 1, tasks.GetLength ()); NodeList task = dom.GetElementsByTagName("task"); VerifyAMTaskXML(task, jobsMap[id]); } }
public virtual void SetUp() { AppContext context = Org.Mockito.Mockito.Mock <AppContext>(); Org.Mockito.Mockito.When(context.GetApplicationID()).ThenReturn(ApplicationId.NewInstance (0, 0)); Org.Mockito.Mockito.When(context.GetApplicationName()).ThenReturn("AppName"); Org.Mockito.Mockito.When(context.GetUser()).ThenReturn("User"); Org.Mockito.Mockito.When(context.GetStartTime()).ThenReturn(Runtime.CurrentTimeMillis ()); job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job>(); Task task = Org.Mockito.Mockito.Mock <Task>(); Org.Mockito.Mockito.When(job.GetTask(Any <TaskId>())).ThenReturn(task); JobId jobID = MRApps.ToJobID("job_01_01"); Org.Mockito.Mockito.When(context.GetJob(jobID)).ThenReturn(job); Org.Mockito.Mockito.When(job.CheckAccess(Any <UserGroupInformation>(), Any <JobACL> ())).ThenReturn(true); Org.Apache.Hadoop.Mapreduce.V2.App.Webapp.App app = new Org.Apache.Hadoop.Mapreduce.V2.App.Webapp.App (context); Configuration configuration = new Configuration(); ctx = Org.Mockito.Mockito.Mock <Controller.RequestContext>(); appController = new AppControllerForTest(app, configuration, ctx); appController.GetProperty()[AMParams.JobId] = "job_01_01"; appController.GetProperty()[AMParams.TaskId] = "task_01_01_m01_01"; }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public virtual void TestMRAppMasterMaxAppAttempts() { // No matter what's the maxAppAttempt or attempt id, the isLastRetry always // equals to false bool[] expectedBools = new bool[] { false, false, false }; string applicationAttemptIdStr = "appattempt_1317529182569_0004_000002"; string containerIdStr = "container_1317529182569_0004_000002_1"; string userName = "******"; ApplicationAttemptId applicationAttemptId = ConverterUtils.ToApplicationAttemptId (applicationAttemptIdStr); ContainerId containerId = ConverterUtils.ToContainerId(containerIdStr); JobConf conf = new JobConf(); conf.Set(MRJobConfig.MrAmStagingDir, stagingDir); FilePath stagingDir = new FilePath(MRApps.GetStagingAreaDir(conf, userName).ToString ()); stagingDir.Mkdirs(); for (int i = 0; i < expectedBools.Length; ++i) { MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId , "host", -1, -1, Runtime.CurrentTimeMillis(), false, true); MRAppMaster.InitAndStartAppMaster(appMaster, conf, userName); NUnit.Framework.Assert.AreEqual("isLastAMRetry is correctly computed.", expectedBools [i], appMaster.IsLastAMRetry()); } }
/// <exception cref="Org.Codehaus.Jettison.Json.JSONException"/> public virtual void VerifyHsJobTaskAttemptCounters(JSONObject info, TaskAttempt att ) { NUnit.Framework.Assert.AreEqual("incorrect number of elements", 2, info.Length()); WebServicesTestUtils.CheckStringMatch("id", MRApps.ToString(att.GetID()), info.GetString ("id")); // just do simple verification of fields - not data is correct // in the fields JSONArray counterGroups = info.GetJSONArray("taskAttemptCounterGroup"); for (int i = 0; i < counterGroups.Length(); i++) { JSONObject counterGroup = counterGroups.GetJSONObject(i); string name = counterGroup.GetString("counterGroupName"); NUnit.Framework.Assert.IsTrue("name not set", (name != null && !name.IsEmpty())); JSONArray counters = counterGroup.GetJSONArray("counter"); for (int j = 0; j < counters.Length(); j++) { JSONObject counter = counters.GetJSONObject(j); string counterName = counter.GetString("name"); NUnit.Framework.Assert.IsTrue("name not set", (counterName != null && !counterName .IsEmpty())); long value = counter.GetLong("value"); NUnit.Framework.Assert.IsTrue("value >= 0", value >= 0); } } }
public virtual void TestTaskAttemptIdCounters() { WebResource r = Resource(); IDictionary <JobId, Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job> jobsMap = appContext .GetAllJobs(); foreach (JobId id in jobsMap.Keys) { string jobId = MRApps.ToString(id); foreach (Task task in jobsMap[id].GetTasks().Values) { string tid = MRApps.ToString(task.GetID()); foreach (TaskAttempt att in task.GetAttempts().Values) { TaskAttemptId attemptid = att.GetID(); string attid = MRApps.ToString(attemptid); ClientResponse response = r.Path("ws").Path("v1").Path("history").Path("mapreduce" ).Path("jobs").Path(jobId).Path("tasks").Path(tid).Path("attempts").Path(attid). Path("counters").Accept(MediaType.ApplicationJson).Get <ClientResponse>(); NUnit.Framework.Assert.AreEqual(MediaType.ApplicationJsonType, response.GetType() ); JSONObject json = response.GetEntity <JSONObject>(); NUnit.Framework.Assert.AreEqual("incorrect number of elements", 1, json.Length()); JSONObject info = json.GetJSONObject("jobTaskAttemptCounters"); VerifyHsJobTaskAttemptCounters(info, att); } } } }
public virtual void TestTaskAttemptIdXMLCounters() { WebResource r = Resource(); IDictionary <JobId, Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job> jobsMap = appContext .GetAllJobs(); foreach (JobId id in jobsMap.Keys) { string jobId = MRApps.ToString(id); foreach (Task task in jobsMap[id].GetTasks().Values) { string tid = MRApps.ToString(task.GetID()); foreach (TaskAttempt att in task.GetAttempts().Values) { TaskAttemptId attemptid = att.GetID(); string attid = MRApps.ToString(attemptid); ClientResponse response = r.Path("ws").Path("v1").Path("history").Path("mapreduce" ).Path("jobs").Path(jobId).Path("tasks").Path(tid).Path("attempts").Path(attid). Path("counters").Accept(MediaType.ApplicationXml).Get <ClientResponse>(); NUnit.Framework.Assert.AreEqual(MediaType.ApplicationXmlType, response.GetType()); string xml = response.GetEntity <string>(); DocumentBuilderFactory dbf = DocumentBuilderFactory.NewInstance(); DocumentBuilder db = dbf.NewDocumentBuilder(); InputSource @is = new InputSource(); @is.SetCharacterStream(new StringReader(xml)); Document dom = db.Parse(@is); NodeList nodes = dom.GetElementsByTagName("jobTaskAttemptCounters"); VerifyHsTaskCountersXML(nodes, att); } } } }
/// <exception cref="Org.Codehaus.Jettison.Json.JSONException"/> public virtual void VerifyHsTaskAttempts(JSONObject json, Task task) { NUnit.Framework.Assert.AreEqual("incorrect number of elements", 1, json.Length()); JSONObject attempts = json.GetJSONObject("taskAttempts"); NUnit.Framework.Assert.AreEqual("incorrect number of elements", 1, json.Length()); JSONArray arr = attempts.GetJSONArray("taskAttempt"); foreach (TaskAttempt att in task.GetAttempts().Values) { TaskAttemptId id = att.GetID(); string attid = MRApps.ToString(id); bool found = false; for (int i = 0; i < arr.Length(); i++) { JSONObject info = arr.GetJSONObject(i); if (attid.Matches(info.GetString("id"))) { found = true; VerifyHsTaskAttempt(info, att, task.GetType()); } } NUnit.Framework.Assert.IsTrue("task attempt with id: " + attid + " not in web service output" , found); } }
/// <summary>convert a job id string to an actual job and handle all the error checking. /// </summary> /// <exception cref="Org.Apache.Hadoop.Yarn.Webapp.NotFoundException"/> public static Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job GetJobFromJobIdString(string jid, AppContext appCtx) { JobId jobId; Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job; try { jobId = MRApps.ToJobID(jid); } catch (YarnRuntimeException e) { // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on // top of which AMWebServices is built seems to automatically do that for // unhandled exceptions throw new NotFoundException(e.Message); } catch (ArgumentException e) { throw new NotFoundException(e.Message); } if (jobId == null) { throw new NotFoundException("job, " + jid + ", is not found"); } job = appCtx.GetJob(jobId); if (job == null) { throw new NotFoundException("job, " + jid + ", is not found"); } return(job); }
/// <exception cref="System.Exception"/> public virtual Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job Submit(Configuration conf , bool mapSpeculative, bool reduceSpeculative) { string user = conf.Get(MRJobConfig.UserName, UserGroupInformation.GetCurrentUser( ).GetShortUserName()); conf.Set(MRJobConfig.UserName, user); conf.Set(MRJobConfig.MrAmStagingDir, testAbsPath.ToString()); conf.SetBoolean(MRJobConfig.MrAmCreateJhIntermediateBaseDir, true); // TODO: fix the bug where the speculator gets events with // not-fully-constructed objects. For now, disable speculative exec conf.SetBoolean(MRJobConfig.MapSpeculative, mapSpeculative); conf.SetBoolean(MRJobConfig.ReduceSpeculative, reduceSpeculative); Init(conf); Start(); DefaultMetricsSystem.Shutdown(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = GetContext().GetAllJobs().Values .GetEnumerator().Next(); if (assignedQueue != null) { job.SetQueueName(assignedQueue); } // Write job.xml string jobFile = MRApps.GetJobFile(conf, user, TypeConverter.FromYarn(job.GetID() )); Log.Info("Writing job conf to " + jobFile); new FilePath(jobFile).GetParentFile().Mkdirs(); conf.WriteXml(new FileOutputStream(jobFile)); return(job); }
public virtual void VerifyHsTaskCountersXML(NodeList nodes, TaskAttempt att) { for (int i = 0; i < nodes.GetLength(); i++) { Element element = (Element)nodes.Item(i); WebServicesTestUtils.CheckStringMatch("id", MRApps.ToString(att.GetID()), WebServicesTestUtils .GetXmlString(element, "id")); // just do simple verification of fields - not data is correct // in the fields NodeList groups = element.GetElementsByTagName("taskAttemptCounterGroup"); for (int j = 0; j < groups.GetLength(); j++) { Element counters = (Element)groups.Item(j); NUnit.Framework.Assert.IsNotNull("should have counters in the web service info", counters); string name = WebServicesTestUtils.GetXmlString(counters, "counterGroupName"); NUnit.Framework.Assert.IsTrue("name not set", (name != null && !name.IsEmpty())); NodeList counterArr = counters.GetElementsByTagName("counter"); for (int z = 0; z < counterArr.GetLength(); z++) { Element counter = (Element)counterArr.Item(z); string counterName = WebServicesTestUtils.GetXmlString(counter, "name"); NUnit.Framework.Assert.IsTrue("counter name not set", (counterName != null && !counterName .IsEmpty())); long value = WebServicesTestUtils.GetXmlLong(counter, "value"); NUnit.Framework.Assert.IsTrue("value not >= 0", value >= 0); } } } }
// acls not being checked since // we are using mock job instead of CompletedJob public static void VerifyHsJobGeneric(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job, string id, string user, string name, string state, string queue, long startTime , long finishTime, int mapsTotal, int mapsCompleted, int reducesTotal, int reducesCompleted ) { JobReport report = job.GetReport(); WebServicesTestUtils.CheckStringMatch("id", MRApps.ToString(job.GetID()), id); WebServicesTestUtils.CheckStringMatch("user", job.GetUserName().ToString(), user); WebServicesTestUtils.CheckStringMatch("name", job.GetName(), name); WebServicesTestUtils.CheckStringMatch("state", job.GetState().ToString(), state); WebServicesTestUtils.CheckStringMatch("queue", job.GetQueueName(), queue); NUnit.Framework.Assert.AreEqual("startTime incorrect", report.GetStartTime(), startTime ); NUnit.Framework.Assert.AreEqual("finishTime incorrect", report.GetFinishTime(), finishTime ); NUnit.Framework.Assert.AreEqual("mapsTotal incorrect", job.GetTotalMaps(), mapsTotal ); NUnit.Framework.Assert.AreEqual("mapsCompleted incorrect", job.GetCompletedMaps() , mapsCompleted); NUnit.Framework.Assert.AreEqual("reducesTotal incorrect", job.GetTotalReduces(), reducesTotal); NUnit.Framework.Assert.AreEqual("reducesCompleted incorrect", job.GetCompletedReduces (), reducesCompleted); }
/// <exception cref="System.Exception"/> public virtual void TestCommandLine() { TestMapReduceChildJVM.MyMRApp app = new TestMapReduceChildJVM.MyMRApp(1, 0, true, this.GetType().FullName, true); Configuration conf = new Configuration(); conf.SetBoolean(MRConfig.MapreduceAppSubmissionCrossPlatform, true); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.Submit(conf); app.WaitForState(job, JobState.Succeeded); app.VerifyCompleted(); NUnit.Framework.Assert.AreEqual("[" + MRApps.CrossPlatformify("JAVA_HOME") + "/bin/java" + " -Djava.net.preferIPv4Stack=true" + " -Dhadoop.metrics.log.level=WARN" + " -Xmx200m -Djava.io.tmpdir=" + MRApps.CrossPlatformify("PWD") + "/tmp" + " -Dlog4j.configuration=container-log4j.properties" + " -Dyarn.app.container.log.dir=<LOG_DIR>" + " -Dyarn.app.container.log.filesize=0" + " -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog" + " org.apache.hadoop.mapred.YarnChild 127.0.0.1" + " 54321" + " attempt_0_0000_m_000000_0" + " 0" + " 1><LOG_DIR>/stdout" + " 2><LOG_DIR>/stderr ]" , app.myCommandLine); NUnit.Framework.Assert.IsTrue("HADOOP_ROOT_LOGGER not set for job", app.cmdEnvironment .Contains("HADOOP_ROOT_LOGGER")); NUnit.Framework.Assert.AreEqual("INFO,console", app.cmdEnvironment["HADOOP_ROOT_LOGGER" ]); NUnit.Framework.Assert.IsTrue("HADOOP_CLIENT_OPTS not set for job", app.cmdEnvironment .Contains("HADOOP_CLIENT_OPTS")); NUnit.Framework.Assert.AreEqual(string.Empty, app.cmdEnvironment["HADOOP_CLIENT_OPTS" ]); }
public virtual void TestAMStandardEnv() { string AdminLibPath = "foo"; string UserLibPath = "bar"; string UserShell = "shell"; JobConf jobConf = new JobConf(); jobConf.Set(MRJobConfig.MrAmAdminUserEnv, "LD_LIBRARY_PATH=" + AdminLibPath); jobConf.Set(MRJobConfig.MrAmEnv, "LD_LIBRARY_PATH=" + UserLibPath); jobConf.Set(MRJobConfig.MapredAdminUserShell, UserShell); YARNRunner yarnRunner = new YARNRunner(jobConf); ApplicationSubmissionContext appSubCtx = BuildSubmitContext(yarnRunner, jobConf); // make sure PWD is first in the lib path ContainerLaunchContext clc = appSubCtx.GetAMContainerSpec(); IDictionary <string, string> env = clc.GetEnvironment(); string libPath = env[ApplicationConstants.Environment.LdLibraryPath.ToString()]; NUnit.Framework.Assert.IsNotNull("LD_LIBRARY_PATH not set", libPath); string cps = jobConf.GetBoolean(MRConfig.MapreduceAppSubmissionCrossPlatform, MRConfig .DefaultMapreduceAppSubmissionCrossPlatform) ? ApplicationConstants.ClassPathSeparator : FilePath.pathSeparator; NUnit.Framework.Assert.AreEqual("Bad AM LD_LIBRARY_PATH setting", MRApps.CrossPlatformifyMREnv (conf, ApplicationConstants.Environment.Pwd) + cps + AdminLibPath + cps + UserLibPath , libPath); // make sure SHELL is set string shell = env[ApplicationConstants.Environment.Shell.ToString()]; NUnit.Framework.Assert.IsNotNull("SHELL not set", shell); NUnit.Framework.Assert.AreEqual("Bad SHELL setting", UserShell, shell); }
/// <summary>Ensure that a TASK_ID was passed into the page.</summary> public virtual void RequireTask() { if ($(TaskId).IsEmpty()) { BadRequest("missing task ID"); throw new RuntimeException("missing task ID"); } TaskId taskID = MRApps.ToTaskID($(TaskId)); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.context.GetJob(taskID.GetJobId ()); app.SetJob(job); if (app.GetJob() == null) { NotFound(MRApps.ToString(taskID.GetJobId())); throw new RuntimeException("Not Found: " + $(JobId)); } else { app.SetTask(app.GetJob().GetTask(taskID)); if (app.GetTask() == null) { NotFound($(TaskId)); throw new RuntimeException("Not Found: " + $(TaskId)); } } if (!CheckAccess(job)) { AccessDenied("User " + Request().GetRemoteUser() + " does not have " + " permission to view job " + $(JobId)); throw new RuntimeException("Access denied: User " + Request().GetRemoteUser() + " does not have permission to view job " + $(JobId)); } }
public virtual TasksInfo GetJobTasks(HttpServletRequest hsr, string jid, string type ) { Init(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = AMWebServices.GetJobFromJobIdString (jid, ctx); CheckAccess(job, hsr); TasksInfo allTasks = new TasksInfo(); foreach (Task task in job.GetTasks().Values) { TaskType ttype = null; if (type != null && !type.IsEmpty()) { try { ttype = MRApps.TaskType(type); } catch (YarnRuntimeException) { throw new BadRequestException("tasktype must be either m or r"); } } if (ttype != null && task.GetType() != ttype) { continue; } allTasks.Add(new TaskInfo(task)); } return(allTasks); }
protected override void Render(HtmlBlock.Block html) { string rmweb = $(AMParams.RmWeb); Hamlet.DIV <Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet> nav = html.Div("#nav").H3 ("Cluster").Ul().Li().A(Url(rmweb, "cluster", "cluster"), "About").().Li().A(Url (rmweb, "cluster", "apps"), "Applications").().Li().A(Url(rmweb, "cluster", "scheduler" ), "Scheduler").().().H3("Application").Ul().Li().A(Url("app/info"), "About").() .Li().A(Url("app"), "Jobs").().(); if (app.GetJob() != null) { string jobid = MRApps.ToString(app.GetJob().GetID()); IList <AMInfo> amInfos = app.GetJob().GetAMInfos(); AMInfo thisAmInfo = amInfos[amInfos.Count - 1]; string nodeHttpAddress = thisAmInfo.GetNodeManagerHost() + ":" + thisAmInfo.GetNodeManagerHttpPort (); nav.H3("Job").Ul().Li().A(Url("job", jobid), "Overview").().Li().A(Url("jobcounters" , jobid), "Counters").().Li().A(Url("conf", jobid), "Configuration").().Li().A(Url ("tasks", jobid, "m"), "Map tasks").().Li().A(Url("tasks", jobid, "r"), "Reduce tasks" ).().Li().A(".logslink", Url(MRWebAppUtil.GetYARNWebappScheme(), nodeHttpAddress , "node", "containerlogs", thisAmInfo.GetContainerId().ToString(), app.GetJob(). GetUserName()), "AM Logs").().(); if (app.GetTask() != null) { string taskid = MRApps.ToString(app.GetTask().GetID()); nav.H3("Task").Ul().Li().A(Url("task", taskid), "Task Overview").().Li().A(Url("taskcounters" , taskid), "Counters").().(); } } nav.H3("Tools").Ul().Li().A("/conf", "Configuration").().Li().A("/logs", "Local logs" ).().Li().A("/stacks", "Server stacks").().Li().A("/jmx?qry=Hadoop:*", "Server metrics" ).().().(); }
/// <summary>Ensure that a JOB_ID was passed into the page.</summary> public virtual void RequireJob() { if ($(JobId).IsEmpty()) { BadRequest("missing job ID"); throw new RuntimeException("Bad Request: Missing job ID"); } JobId jobID = MRApps.ToJobID($(JobId)); app.SetJob(app.context.GetJob(jobID)); if (app.GetJob() == null) { NotFound($(JobId)); throw new RuntimeException("Not Found: " + $(JobId)); } /* check for acl access */ Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.context.GetJob(jobID); if (!CheckAccess(job)) { AccessDenied("User " + Request().GetRemoteUser() + " does not have " + " permission to view job " + $(JobId)); throw new RuntimeException("Access denied: User " + Request().GetRemoteUser() + " does not have permission to view job " + $(JobId)); } }
public virtual void TestDeletionofStaging() { conf.Set(MRJobConfig.MapreduceJobDir, stagingJobDir); fs = Org.Mockito.Mockito.Mock <FileSystem>(); Org.Mockito.Mockito.When(fs.Delete(Matchers.Any <Path>(), Matchers.AnyBoolean())). ThenReturn(true); //Staging Dir exists string user = UserGroupInformation.GetCurrentUser().GetShortUserName(); Path stagingDir = MRApps.GetStagingAreaDir(conf, user); Org.Mockito.Mockito.When(fs.Exists(stagingDir)).ThenReturn(true); ApplicationId appId = ApplicationId.NewInstance(Runtime.CurrentTimeMillis(), 0); ApplicationAttemptId attemptId = ApplicationAttemptId.NewInstance(appId, 1); JobId jobid = recordFactory.NewRecordInstance <JobId>(); jobid.SetAppId(appId); ContainerAllocator mockAlloc = Org.Mockito.Mockito.Mock <ContainerAllocator>(); NUnit.Framework.Assert.IsTrue(MRJobConfig.DefaultMrAmMaxAttempts > 1); MRAppMaster appMaster = new TestStagingCleanup.TestMRApp(this, attemptId, mockAlloc , JobStateInternal.Running, MRJobConfig.DefaultMrAmMaxAttempts); appMaster.Init(conf); appMaster.Start(); appMaster.ShutDownJob(); //test whether notifyIsLastAMRetry called NUnit.Framework.Assert.AreEqual(true, ((TestStagingCleanup.TestMRApp)appMaster).GetTestIsLastAMRetry ()); Org.Mockito.Mockito.Verify(fs).Delete(stagingJobPath, true); }
/// <returns> /// The end of the JS map that is the jquery datatable config for the /// attempts table. /// </returns> private string AttemptsTableInit() { TaskType type = null; string symbol = $(AMParams.TaskType); if (!symbol.IsEmpty()) { type = MRApps.TaskType(symbol); } else { TaskId taskID = MRApps.ToTaskID($(AMParams.TaskId)); type = taskID.GetTaskType(); } StringBuilder b = JQueryUI.TableInit().Append(", 'aaData': attemptsTableData").Append (", bDeferRender: true").Append(", bProcessing: true").Append("\n,aoColumnDefs:[\n" ).Append("\n{'aTargets': [ 4 ]").Append(", 'bSearchable': false }").Append("\n, {'sType':'numeric', 'aTargets': [ 0 ]" ).Append(", 'mRender': parseHadoopAttemptID }").Append("\n, {'sType':'numeric', 'aTargets': [ 5, 6" ).Append(type == TaskType.Reduce ? ", 7, 8" : string.Empty).Append(" ], 'mRender': renderHadoopDate }" ).Append("\n, {'sType':'numeric', 'aTargets': [").Append(type == TaskType.Reduce ? "9, 10, 11, 12" : "7").Append(" ], 'mRender': renderHadoopElapsedTime }]").Append ("\n, aaSorting: [[0, 'asc']]").Append("}"); //logs column should not filterable (it includes container ID which may pollute searches) //Column numbers are different for maps and reduces // Sort by id upon page load return(b.ToString()); }
// FIXME: // Disabled this test because currently, when shutdown hook triggered at // lastRetry in RM view, cleanup will not do. This should be supported after // YARN-2261 completed // @Test (timeout = 30000) /// <exception cref="System.IO.IOException"/> public virtual void TestDeletionofStagingOnKillLastTry() { conf.Set(MRJobConfig.MapreduceJobDir, stagingJobDir); fs = Org.Mockito.Mockito.Mock <FileSystem>(); Org.Mockito.Mockito.When(fs.Delete(Matchers.Any <Path>(), Matchers.AnyBoolean())). ThenReturn(true); //Staging Dir exists string user = UserGroupInformation.GetCurrentUser().GetShortUserName(); Path stagingDir = MRApps.GetStagingAreaDir(conf, user); Org.Mockito.Mockito.When(fs.Exists(stagingDir)).ThenReturn(true); ApplicationId appId = ApplicationId.NewInstance(Runtime.CurrentTimeMillis(), 0); ApplicationAttemptId attemptId = ApplicationAttemptId.NewInstance(appId, 1); JobId jobid = recordFactory.NewRecordInstance <JobId>(); jobid.SetAppId(appId); ContainerAllocator mockAlloc = Org.Mockito.Mockito.Mock <ContainerAllocator>(); MRAppMaster appMaster = new TestStagingCleanup.TestMRApp(this, attemptId, mockAlloc ); //no retry appMaster.Init(conf); NUnit.Framework.Assert.IsTrue("appMaster.isLastAMRetry() is false", appMaster.IsLastAMRetry ()); //simulate the process being killed MRAppMaster.MRAppMasterShutdownHook hook = new MRAppMaster.MRAppMasterShutdownHook (appMaster); hook.Run(); NUnit.Framework.Assert.IsTrue("MRAppMaster isn't stopped", appMaster.IsInState(Service.STATE .Stopped)); Org.Mockito.Mockito.Verify(fs).Delete(stagingJobPath, true); }
/// <exception cref="System.Exception"/> protected override void ServiceInit(Configuration conf) { try { //Create the staging directory if it does not exist string user = UserGroupInformation.GetCurrentUser().GetShortUserName(); Path stagingDir = MRApps.GetStagingAreaDir(conf, user); FileSystem fs = GetFileSystem(conf); fs.Mkdirs(stagingDir); } catch (Exception e) { throw new YarnRuntimeException("Error creating staging dir", e); } base.ServiceInit(conf); if (this.clusterInfo != null) { GetContext().GetClusterInfo().SetMaxContainerCapability(this.clusterInfo.GetMaxContainerCapability ()); } else { GetContext().GetClusterInfo().SetMaxContainerCapability(Resource.NewInstance(10240 , 1)); } }
/// <exception cref="System.Exception"/> private void TestReduceCommandLine(Configuration conf) { TestMapReduceChildJVM.MyMRApp app = new TestMapReduceChildJVM.MyMRApp(0, 1, true, this.GetType().FullName, true); conf.SetBoolean(MRConfig.MapreduceAppSubmissionCrossPlatform, true); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.Submit(conf); app.WaitForState(job, JobState.Succeeded); app.VerifyCompleted(); long shuffleLogSize = conf.GetLong(MRJobConfig.ShuffleLogKb, 0L) * 1024L; int shuffleBackups = conf.GetInt(MRJobConfig.ShuffleLogBackups, 0); string appenderName = shuffleLogSize > 0L && shuffleBackups > 0 ? "shuffleCRLA" : "shuffleCLA"; NUnit.Framework.Assert.AreEqual("[" + MRApps.CrossPlatformify("JAVA_HOME") + "/bin/java" + " -Djava.net.preferIPv4Stack=true" + " -Dhadoop.metrics.log.level=WARN" + " -Xmx200m -Djava.io.tmpdir=" + MRApps.CrossPlatformify("PWD") + "/tmp" + " -Dlog4j.configuration=container-log4j.properties" + " -Dyarn.app.container.log.dir=<LOG_DIR>" + " -Dyarn.app.container.log.filesize=0" + " -Dhadoop.root.logger=INFO,CLA -Dhadoop.root.logfile=syslog" + " -Dyarn.app.mapreduce.shuffle.logger=INFO," + appenderName + " -Dyarn.app.mapreduce.shuffle.logfile=syslog.shuffle" + " -Dyarn.app.mapreduce.shuffle.log.filesize=" + shuffleLogSize + " -Dyarn.app.mapreduce.shuffle.log.backups=" + shuffleBackups + " org.apache.hadoop.mapred.YarnChild 127.0.0.1" + " 54321" + " attempt_0_0000_r_000000_0" + " 0" + " 1><LOG_DIR>/stdout" + " 2><LOG_DIR>/stderr ]", app.myCommandLine); NUnit.Framework.Assert.IsTrue("HADOOP_ROOT_LOGGER not set for job", app.cmdEnvironment .Contains("HADOOP_ROOT_LOGGER")); NUnit.Framework.Assert.AreEqual("INFO,console", app.cmdEnvironment["HADOOP_ROOT_LOGGER" ]); NUnit.Framework.Assert.IsTrue("HADOOP_CLIENT_OPTS not set for job", app.cmdEnvironment .Contains("HADOOP_CLIENT_OPTS")); NUnit.Framework.Assert.AreEqual(string.Empty, app.cmdEnvironment["HADOOP_CLIENT_OPTS" ]); }
public static IDictionary <string, string> GetJobParams(AppContext appContext) { JobId jobId = appContext.GetAllJobs().GetEnumerator().Next().Key; IDictionary <string, string> @params = new Dictionary <string, string>(); @params[AMParams.JobId] = MRApps.ToString(jobId); return(@params); }
/* * (non-Javadoc) * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) */ protected override void Render(HtmlBlock.Block html) { string jid = $(AMParams.JobId); if (jid.IsEmpty()) { html.P().("Sorry, can't do anything without a JobID.").(); return; } JobId jobID = MRApps.ToJobID(jid); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = appContext.GetJob(jobID); if (job == null) { html.P().("Sorry, ", jid, " not found.").(); return; } Path confPath = job.GetConfFile(); try { ConfInfo info = new ConfInfo(job); html.Div().H3(confPath.ToString()).(); Hamlet.TBODY <Hamlet.TABLE <Org.Apache.Hadoop.Yarn.Webapp.Hamlet.Hamlet> > tbody = html .Table("#conf").Thead().Tr().Th(JQueryUI.Th, "key").Th(JQueryUI.Th, "value").Th( JQueryUI.Th, "source chain").().().Tbody(); // Tasks table foreach (ConfEntryInfo entry in info.GetProperties()) { StringBuilder buffer = new StringBuilder(); string[] sources = entry.GetSource(); //Skip the last entry, because it is always the same HDFS file, and // output them in reverse order so most recent is output first bool first = true; for (int i = (sources.Length - 2); i >= 0; i--) { if (!first) { // \u2B05 is an arrow <-- buffer.Append(" \u2B05 "); } first = false; buffer.Append(sources[i]); } tbody.Tr().Td(entry.GetName()).Td(entry.GetValue()).Td(buffer.ToString()).(); } tbody.().Tfoot().Tr().Th().Input("search_init").$type(HamletSpec.InputType.text). $name("key").$value("key").().().Th().Input("search_init").$type(HamletSpec.InputType .text).$name("value").$value("value").().().Th().Input("search_init").$type(HamletSpec.InputType .text).$name("source chain").$value("source chain").().().().().(); } catch (IOException e) { Log.Error("Error while reading " + confPath, e); html.P().("Sorry got an error while reading conf file. ", confPath); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public virtual string GetStagingAreaDir() { // Path path = new Path(MRJobConstants.JOB_SUBMIT_DIR); string user = UserGroupInformation.GetCurrentUser().GetShortUserName(); Path path = MRApps.GetStagingAreaDir(conf, user); Log.Debug("getStagingAreaDir: dir=" + path); return(path.ToString()); }
/// <summary>Gets the configured directory prefix for In Progress history files.</summary> /// <param name="conf">the configuration for hte job</param> /// <param name="jobId">the id of the job the history file is for.</param> /// <returns>A string representation of the prefix.</returns> /// <exception cref="System.IO.IOException"/> public static string GetConfiguredHistoryStagingDirPrefix(Configuration conf, string jobId) { string user = UserGroupInformation.GetCurrentUser().GetShortUserName(); Path stagingPath = MRApps.GetStagingAreaDir(conf, user); Path path = new Path(stagingPath, jobId); string logDir = path.ToString(); return(EnsurePathInDefaultFileSystem(logDir, conf)); }
public JobInfo(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job) { this.id = MRApps.ToString(job.GetID()); JobReport report = job.GetReport(); this.mapsTotal = job.GetTotalMaps(); this.mapsCompleted = job.GetCompletedMaps(); this.reducesTotal = job.GetTotalReduces(); this.reducesCompleted = job.GetCompletedReduces(); this.submitTime = report.GetSubmitTime(); this.startTime = report.GetStartTime(); this.finishTime = report.GetFinishTime(); this.name = job.GetName().ToString(); this.queue = job.GetQueueName(); this.user = job.GetUserName(); this.state = job.GetState().ToString(); this.acls = new AList <ConfEntryInfo>(); if (job is CompletedJob) { avgMapTime = 0l; avgReduceTime = 0l; avgShuffleTime = 0l; avgMergeTime = 0l; failedReduceAttempts = 0; killedReduceAttempts = 0; successfulReduceAttempts = 0; failedMapAttempts = 0; killedMapAttempts = 0; successfulMapAttempts = 0; CountTasksAndAttempts(job); this.uberized = job.IsUber(); this.diagnostics = string.Empty; IList <string> diagnostics = job.GetDiagnostics(); if (diagnostics != null && !diagnostics.IsEmpty()) { StringBuilder b = new StringBuilder(); foreach (string diag in diagnostics) { b.Append(diag); } this.diagnostics = b.ToString(); } IDictionary <JobACL, AccessControlList> allacls = job.GetJobACLs(); if (allacls != null) { foreach (KeyValuePair <JobACL, AccessControlList> entry in allacls) { this.acls.AddItem(new ConfEntryInfo(entry.Key.GetAclName(), entry.Value.GetAclString ())); } } } }
public JobInfo(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job, bool hasAccess) { // ok for any user to see // these should only be seen if acls allow this.id = MRApps.ToString(job.GetID()); JobReport report = job.GetReport(); this.startTime = report.GetStartTime(); this.finishTime = report.GetFinishTime(); this.elapsedTime = Times.Elapsed(this.startTime, this.finishTime); if (this.elapsedTime == -1) { this.elapsedTime = 0; } this.name = job.GetName().ToString(); this.user = job.GetUserName(); this.state = job.GetState(); this.mapsTotal = job.GetTotalMaps(); this.mapsCompleted = job.GetCompletedMaps(); this.mapProgress = report.GetMapProgress() * 100; this.mapProgressPercent = StringHelper.Percent(report.GetMapProgress()); this.reducesTotal = job.GetTotalReduces(); this.reducesCompleted = job.GetCompletedReduces(); this.reduceProgress = report.GetReduceProgress() * 100; this.reduceProgressPercent = StringHelper.Percent(report.GetReduceProgress()); this.acls = new AList <ConfEntryInfo>(); if (hasAccess) { this.diagnostics = string.Empty; CountTasksAndAttempts(job); this.uberized = job.IsUber(); IList <string> diagnostics = job.GetDiagnostics(); if (diagnostics != null && !diagnostics.IsEmpty()) { StringBuilder b = new StringBuilder(); foreach (string diag in diagnostics) { b.Append(diag); } this.diagnostics = b.ToString(); } IDictionary <JobACL, AccessControlList> allacls = job.GetJobACLs(); if (allacls != null) { foreach (KeyValuePair <JobACL, AccessControlList> entry in allacls) { this.acls.AddItem(new ConfEntryInfo(entry.Key.GetAclName(), entry.Value.GetAclString ())); } } } }