private Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job GetJob() { Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job >(); JobId jobId = new JobIdPBImpl(); ApplicationId appId = ApplicationIdPBImpl.NewInstance(Runtime.CurrentTimeMillis() , 4); jobId.SetAppId(appId); jobId.SetId(1); Org.Mockito.Mockito.When(job.GetID()).ThenReturn(jobId); JobReport report = Org.Mockito.Mockito.Mock <JobReport>(); Org.Mockito.Mockito.When(report.GetStartTime()).ThenReturn(100010L); Org.Mockito.Mockito.When(report.GetFinishTime()).ThenReturn(100015L); Org.Mockito.Mockito.When(job.GetReport()).ThenReturn(report); Org.Mockito.Mockito.When(job.GetName()).ThenReturn("JobName"); Org.Mockito.Mockito.When(job.GetUserName()).ThenReturn("UserName"); Org.Mockito.Mockito.When(job.GetQueueName()).ThenReturn("QueueName"); Org.Mockito.Mockito.When(job.GetState()).ThenReturn(JobState.Succeeded); Org.Mockito.Mockito.When(job.GetTotalMaps()).ThenReturn(3); Org.Mockito.Mockito.When(job.GetCompletedMaps()).ThenReturn(2); Org.Mockito.Mockito.When(job.GetTotalReduces()).ThenReturn(2); Org.Mockito.Mockito.When(job.GetCompletedReduces()).ThenReturn(1); Org.Mockito.Mockito.When(job.GetCompletedReduces()).ThenReturn(1); return(job); }
public virtual void TestHistoryEvents() { Configuration conf = new Configuration(); MRApp app = new TestJobHistoryEvents.MRAppWithHistory(2, 1, true, this.GetType(). FullName, true); app.Submit(conf); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); JobId jobId = job.GetID(); Log.Info("JOBID is " + TypeConverter.FromYarn(jobId).ToString()); app.WaitForState(job, JobState.Succeeded); //make sure all events are flushed app.WaitForState(Service.STATE.Stopped); /* * Use HistoryContext to read logged events and verify the number of * completed maps */ HistoryContext context = new JobHistory(); // test start and stop states ((JobHistory)context).Init(conf); ((JobHistory)context).Start(); NUnit.Framework.Assert.IsTrue(context.GetStartTime() > 0); NUnit.Framework.Assert.AreEqual(((JobHistory)context).GetServiceState(), Service.STATE .Started); // get job before stopping JobHistory Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job parsedJob = context.GetJob(jobId); // stop JobHistory ((JobHistory)context).Stop(); NUnit.Framework.Assert.AreEqual(((JobHistory)context).GetServiceState(), Service.STATE .Stopped); NUnit.Framework.Assert.AreEqual("CompletedMaps not correct", 2, parsedJob.GetCompletedMaps ()); NUnit.Framework.Assert.AreEqual(Runtime.GetProperty("user.name"), parsedJob.GetUserName ()); IDictionary <TaskId, Task> tasks = parsedJob.GetTasks(); NUnit.Framework.Assert.AreEqual("No of tasks not correct", 3, tasks.Count); foreach (Task task in tasks.Values) { VerifyTask(task); } IDictionary <TaskId, Task> maps = parsedJob.GetTasks(TaskType.Map); NUnit.Framework.Assert.AreEqual("No of maps not correct", 2, maps.Count); IDictionary <TaskId, Task> reduces = parsedJob.GetTasks(TaskType.Reduce); NUnit.Framework.Assert.AreEqual("No of reduces not correct", 1, reduces.Count); NUnit.Framework.Assert.AreEqual("CompletedReduce not correct", 1, parsedJob.GetCompletedReduces ()); NUnit.Framework.Assert.AreEqual("Job state not currect", JobState.Succeeded, parsedJob .GetState()); }
public virtual void TestMapReduce() { MRApp app = new MRApp(2, 2, true, this.GetType().FullName, true); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.Submit(new Configuration()); app.WaitForState(job, JobState.Succeeded); app.VerifyCompleted(); NUnit.Framework.Assert.AreEqual(Runtime.GetProperty("user.name"), job.GetUserName ()); }
public JobInfo(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job) { this.id = MRApps.ToString(job.GetID()); JobReport report = job.GetReport(); this.mapsTotal = job.GetTotalMaps(); this.mapsCompleted = job.GetCompletedMaps(); this.reducesTotal = job.GetTotalReduces(); this.reducesCompleted = job.GetCompletedReduces(); this.submitTime = report.GetSubmitTime(); this.startTime = report.GetStartTime(); this.finishTime = report.GetFinishTime(); this.name = job.GetName().ToString(); this.queue = job.GetQueueName(); this.user = job.GetUserName(); this.state = job.GetState().ToString(); this.acls = new AList <ConfEntryInfo>(); if (job is CompletedJob) { avgMapTime = 0l; avgReduceTime = 0l; avgShuffleTime = 0l; avgMergeTime = 0l; failedReduceAttempts = 0; killedReduceAttempts = 0; successfulReduceAttempts = 0; failedMapAttempts = 0; killedMapAttempts = 0; successfulMapAttempts = 0; CountTasksAndAttempts(job); this.uberized = job.IsUber(); this.diagnostics = string.Empty; IList <string> diagnostics = job.GetDiagnostics(); if (diagnostics != null && !diagnostics.IsEmpty()) { StringBuilder b = new StringBuilder(); foreach (string diag in diagnostics) { b.Append(diag); } this.diagnostics = b.ToString(); } IDictionary <JobACL, AccessControlList> allacls = job.GetJobACLs(); if (allacls != null) { foreach (KeyValuePair <JobACL, AccessControlList> entry in allacls) { this.acls.AddItem(new ConfEntryInfo(entry.Key.GetAclName(), entry.Value.GetAclString ())); } } } }
public JobInfo(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job, bool hasAccess) { // ok for any user to see // these should only be seen if acls allow this.id = MRApps.ToString(job.GetID()); JobReport report = job.GetReport(); this.startTime = report.GetStartTime(); this.finishTime = report.GetFinishTime(); this.elapsedTime = Times.Elapsed(this.startTime, this.finishTime); if (this.elapsedTime == -1) { this.elapsedTime = 0; } this.name = job.GetName().ToString(); this.user = job.GetUserName(); this.state = job.GetState(); this.mapsTotal = job.GetTotalMaps(); this.mapsCompleted = job.GetCompletedMaps(); this.mapProgress = report.GetMapProgress() * 100; this.mapProgressPercent = StringHelper.Percent(report.GetMapProgress()); this.reducesTotal = job.GetTotalReduces(); this.reducesCompleted = job.GetCompletedReduces(); this.reduceProgress = report.GetReduceProgress() * 100; this.reduceProgressPercent = StringHelper.Percent(report.GetReduceProgress()); this.acls = new AList <ConfEntryInfo>(); if (hasAccess) { this.diagnostics = string.Empty; CountTasksAndAttempts(job); this.uberized = job.IsUber(); IList <string> diagnostics = job.GetDiagnostics(); if (diagnostics != null && !diagnostics.IsEmpty()) { StringBuilder b = new StringBuilder(); foreach (string diag in diagnostics) { b.Append(diag); } this.diagnostics = b.ToString(); } IDictionary <JobACL, AccessControlList> allacls = job.GetJobACLs(); if (allacls != null) { foreach (KeyValuePair <JobACL, AccessControlList> entry in allacls) { this.acls.AddItem(new ConfEntryInfo(entry.Key.GetAclName(), entry.Value.GetAclString ())); } } } }
public virtual AMAttemptsInfo GetJobAttempts(string jid) { Init(); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = AMWebServices.GetJobFromJobIdString (jid, ctx); AMAttemptsInfo amAttempts = new AMAttemptsInfo(); foreach (AMInfo amInfo in job.GetAMInfos()) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.ToString(job.GetID()), job .GetUserName(), uriInfo.GetBaseUri().ToString(), webapp.Name()); amAttempts.Add(attempt); } return(amAttempts); }
/// <exception cref="System.IO.IOException"/> public MockCompletedJob(Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job) : base(new Configuration(), job.GetID(), null, true, job.GetUserName(), null, null ) { this.job = job; }
public override string GetUserName() { return(job.GetUserName()); }
public virtual string GetUserName() { return(mockJob.GetUserName()); }
public virtual void TestAttemptsBlock() { AppContext ctx = Org.Mockito.Mockito.Mock <AppContext>(); AppForTest app = new AppForTest(ctx); Task task = GetTask(0); IDictionary <TaskAttemptId, TaskAttempt> attempts = new Dictionary <TaskAttemptId, TaskAttempt>(); TaskAttempt attempt = Org.Mockito.Mockito.Mock <TaskAttempt>(); TaskAttemptId taId = new TaskAttemptIdPBImpl(); taId.SetId(0); taId.SetTaskId(task.GetID()); Org.Mockito.Mockito.When(attempt.GetID()).ThenReturn(taId); Org.Mockito.Mockito.When(attempt.GetNodeHttpAddress()).ThenReturn("Node address"); ApplicationId appId = ApplicationIdPBImpl.NewInstance(0, 5); ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.NewInstance(appId, 1); ContainerId containerId = ContainerIdPBImpl.NewContainerId(appAttemptId, 1); Org.Mockito.Mockito.When(attempt.GetAssignedContainerID()).ThenReturn(containerId ); Org.Mockito.Mockito.When(attempt.GetAssignedContainerMgrAddress()).ThenReturn("assignedContainerMgrAddress" ); Org.Mockito.Mockito.When(attempt.GetNodeRackName()).ThenReturn("nodeRackName"); long taStartTime = 100002L; long taFinishTime = 100012L; long taShuffleFinishTime = 100010L; long taSortFinishTime = 100011L; TaskAttemptState taState = TaskAttemptState.Succeeded; Org.Mockito.Mockito.When(attempt.GetLaunchTime()).ThenReturn(taStartTime); Org.Mockito.Mockito.When(attempt.GetFinishTime()).ThenReturn(taFinishTime); Org.Mockito.Mockito.When(attempt.GetShuffleFinishTime()).ThenReturn(taShuffleFinishTime ); Org.Mockito.Mockito.When(attempt.GetSortFinishTime()).ThenReturn(taSortFinishTime ); Org.Mockito.Mockito.When(attempt.GetState()).ThenReturn(taState); TaskAttemptReport taReport = Org.Mockito.Mockito.Mock <TaskAttemptReport>(); Org.Mockito.Mockito.When(taReport.GetStartTime()).ThenReturn(taStartTime); Org.Mockito.Mockito.When(taReport.GetFinishTime()).ThenReturn(taFinishTime); Org.Mockito.Mockito.When(taReport.GetShuffleFinishTime()).ThenReturn(taShuffleFinishTime ); Org.Mockito.Mockito.When(taReport.GetSortFinishTime()).ThenReturn(taSortFinishTime ); Org.Mockito.Mockito.When(taReport.GetContainerId()).ThenReturn(containerId); Org.Mockito.Mockito.When(taReport.GetProgress()).ThenReturn(1.0f); Org.Mockito.Mockito.When(taReport.GetStateString()).ThenReturn("Processed 128/128 records <p> \n" ); Org.Mockito.Mockito.When(taReport.GetTaskAttemptState()).ThenReturn(taState); Org.Mockito.Mockito.When(taReport.GetDiagnosticInfo()).ThenReturn(string.Empty); Org.Mockito.Mockito.When(attempt.GetReport()).ThenReturn(taReport); attempts[taId] = attempt; Org.Mockito.Mockito.When(task.GetAttempts()).ThenReturn(attempts); app.SetTask(task); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job >(); Org.Mockito.Mockito.When(job.GetUserName()).ThenReturn("User"); app.SetJob(job); TestBlocks.AttemptsBlockForTest block = new TestBlocks.AttemptsBlockForTest(this, app); block.AddParameter(AMParams.TaskType, "r"); PrintWriter pWriter = new PrintWriter(data); HtmlBlock.Block html = new BlockForTest(new TestBlocks.HtmlBlockForTest(this), pWriter , 0, false); block.Render(html); pWriter.Flush(); // should be printed information about attempts NUnit.Framework.Assert.IsTrue(data.ToString().Contains("0 attempt_0_0001_r_000000_0" )); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("SUCCEEDED")); NUnit.Framework.Assert.IsFalse(data.ToString().Contains("Processed 128/128 records <p> \n" )); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("Processed 128\\/128 records <p> \\n" )); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("_0005_01_000001:attempt_0_0001_r_000000_0:User:"******"100002")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100010")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100011")); NUnit.Framework.Assert.IsTrue(data.ToString().Contains("100012")); }