/// <summary>test deprecated methods of TaskID</summary> /// <exception cref="System.IO.IOException"/> public virtual void TestDepricatedMethods() { JobID jid = new JobID(); TaskID test = new TaskID(jid, true, 1); NUnit.Framework.Assert.AreEqual(test.GetTaskType(), TaskType.Map); test = new TaskID(jid, false, 1); NUnit.Framework.Assert.AreEqual(test.GetTaskType(), TaskType.Reduce); test = new TaskID("001", 1, false, 1); NUnit.Framework.Assert.AreEqual(test.GetTaskType(), TaskType.Reduce); test = new TaskID("001", 1, true, 1); NUnit.Framework.Assert.AreEqual(test.GetTaskType(), TaskType.Map); ByteArrayOutputStream @out = new ByteArrayOutputStream(); test.Write(new DataOutputStream(@out)); TaskID ti = TaskID.Read(new DataInputStream(new ByteArrayInputStream(@out.ToByteArray ()))); NUnit.Framework.Assert.AreEqual(ti.ToString(), test.ToString()); NUnit.Framework.Assert.AreEqual("task_001_0001_m_000002", TaskID.GetTaskIDsPattern ("001", 1, true, 2)); NUnit.Framework.Assert.AreEqual("task_003_0001_m_000004", TaskID.GetTaskIDsPattern ("003", 1, TaskType.Map, 4)); NUnit.Framework.Assert.AreEqual("003_0001_m_000004", TaskID.GetTaskIDsPatternWOPrefix ("003", 1, TaskType.Map, 4).ToString()); }
internal static Org.Apache.Hadoop.Mapred.TaskReport Downgrade(Org.Apache.Hadoop.Mapreduce.TaskReport report) { return(new Org.Apache.Hadoop.Mapred.TaskReport(TaskID.Downgrade(report.GetTaskID( )), report.GetProgress(), report.GetState(), report.GetDiagnostics(), report.GetCurrentStatus (), report.GetStartTime(), report.GetFinishTime(), Counters.Downgrade(report.GetTaskCounters ()))); }
internal static StringBuilder GetTaskAttemptIDsPatternWOPrefix(string jtIdentifier , int jobId, TaskType type, int taskId, int attemptId) { StringBuilder builder = new StringBuilder(); builder.Append(TaskID.GetTaskIDsPatternWOPrefix(jtIdentifier, jobId, type, taskId )).Append(Separator).Append(attemptId != null ? attemptId : "[0-9]*"); return(builder); }
/// <summary>Downgrade a new TaskAttemptID to an old one</summary> /// <param name="old">the new id</param> /// <returns>either old or a new TaskAttemptID constructed to match old</returns> public static Org.Apache.Hadoop.Mapred.TaskAttemptID Downgrade(Org.Apache.Hadoop.Mapreduce.TaskAttemptID old) { if (old is Org.Apache.Hadoop.Mapred.TaskAttemptID) { return((Org.Apache.Hadoop.Mapred.TaskAttemptID)old); } else { return(new Org.Apache.Hadoop.Mapred.TaskAttemptID(TaskID.Downgrade(old.GetTaskID( )), old.GetId())); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> public virtual void TestTaskID() { JobID jobid = new JobID("1014873536921", 6); TaskID tid = new TaskID(jobid, TaskType.Map, 0); TaskID tid1 = TaskID.Downgrade(tid); TaskReport treport = new TaskReport(tid1, 0.0f, JobStatus.State.Failed.ToString() , null, TIPStatus.Failed, 100, 100, new Counters()); NUnit.Framework.Assert.AreEqual(treport.GetTaskId(), "task_1014873536921_0006_m_000000" ); NUnit.Framework.Assert.AreEqual(treport.GetTaskID().ToString(), "task_1014873536921_0006_m_000000" ); }
/// <exception cref="System.Exception"/> private OutputCommitter CreateOutputCommitter(bool newApiCommitter, JobID jobId, Configuration conf) { OutputCommitter committer = null; LocalJobRunner.Log.Info("OutputCommitter set in config " + conf.Get("mapred.output.committer.class" )); if (newApiCommitter) { TaskID taskId = new TaskID(jobId, TaskType.Map, 0); TaskAttemptID taskAttemptID = new TaskAttemptID(taskId, 0); TaskAttemptContext taskContext = new TaskAttemptContextImpl(conf, taskAttemptID); OutputFormat outputFormat = ReflectionUtils.NewInstance(taskContext.GetOutputFormatClass (), conf); committer = outputFormat.GetOutputCommitter(taskContext); } else { committer = ReflectionUtils.NewInstance(conf.GetClass <OutputCommitter>("mapred.output.committer.class" , typeof(FileOutputCommitter)), conf); } LocalJobRunner.Log.Info("OutputCommitter is " + committer.GetType().FullName); return(committer); }
/// <exception cref="System.IO.IOException"/> public override Path GetInputFileForWrite(TaskID mapId, long size) { throw new NotSupportedException(); }
/// <summary> /// Constructs a TaskAttemptID object from given /// <see cref="TaskID"/> /// . /// </summary> /// <param name="taskId">TaskID that this task belongs to</param> /// <param name="id">the task attempt number</param> public TaskAttemptID(TaskID taskId, int id) : base(taskId, id) { }
/// <summary>Create a local reduce input file name.</summary> /// <param name="mapId">a map task id</param> /// <param name="size">the size of the file</param> /// <returns>path</returns> /// <exception cref="System.IO.IOException"/> public override Path GetInputFileForWrite(TaskID mapId, long size) { return(lDirAlloc.GetLocalPathForWrite(string.Format(ReduceInputFileFormatString, MRJobConfig.Output, mapId.GetId()), size, GetConf())); }
/// <summary>The id of the task.</summary> public override TaskID GetTaskID() { return(TaskID.Downgrade(base.GetTaskID())); }
/// <summary>The string of the task id.</summary> public override string GetTaskId() { return(TaskID.Downgrade(base.GetTaskID()).ToString()); }
/// <summary>Creates a new TaskReport object</summary> /// <param name="taskid"/> /// <param name="progress"/> /// <param name="state"/> /// <param name="diagnostics"/> /// <param name="currentStatus"/> /// <param name="startTime"/> /// <param name="finishTime"/> /// <param name="counters"/> internal TaskReport(TaskID taskid, float progress, string state, string[] diagnostics , TIPStatus currentStatus, long startTime, long finishTime, Counters counters) : base(taskid, progress, state, diagnostics, currentStatus, startTime, finishTime , new Counters(counters)) { }
internal TaskReport(TaskID taskid, float progress, string state, string[] diagnostics , long startTime, long finishTime, Counters counters) : this(taskid, progress, state, diagnostics, null, startTime, finishTime, counters ) { }
/// <summary>Create a local reduce input file name.</summary> /// <param name="mapId">a map task id</param> /// <param name="size">the size of the file</param> /// <returns>path</returns> /// <exception cref="System.IO.IOException"/> public abstract Path GetInputFileForWrite(TaskID mapId, long size);
public virtual void TestRedirect() { Configuration conf = new YarnConfiguration(); conf.Set(MRConfig.FrameworkName, MRConfig.YarnFrameworkName); conf.Set(YarnConfiguration.RmAddress, Rmaddress); conf.Set(JHAdminConfig.MrHistoryAddress, Hshostaddress); // Start the RM. TestClientRedirect.RMService rmService = new TestClientRedirect.RMService(this, "test" ); rmService.Init(conf); rmService.Start(); // Start the AM. TestClientRedirect.AMService amService = new TestClientRedirect.AMService(this); amService.Init(conf); amService.Start(conf); // Start the HS. TestClientRedirect.HistoryService historyService = new TestClientRedirect.HistoryService (this); historyService.Init(conf); historyService.Start(conf); Log.Info("services started"); Cluster cluster = new Cluster(conf); JobID jobID = new JobID("201103121733", 1); Counters counters = cluster.GetJob(jobID).GetCounters(); ValidateCounters(counters); NUnit.Framework.Assert.IsTrue(amContact); Log.Info("Sleeping for 5 seconds before stop for" + " the client socket to not get EOF immediately.." ); Sharpen.Thread.Sleep(5000); //bring down the AM service amService.Stop(); Log.Info("Sleeping for 5 seconds after stop for" + " the server to exit cleanly.." ); Sharpen.Thread.Sleep(5000); amRestarting = true; // Same client //results are returned from fake (not started job) counters = cluster.GetJob(jobID).GetCounters(); NUnit.Framework.Assert.AreEqual(0, counters.CountCounters()); Job job = cluster.GetJob(jobID); TaskID taskId = new TaskID(jobID, TaskType.Map, 0); TaskAttemptID tId = new TaskAttemptID(taskId, 0); //invoke all methods to check that no exception is thrown job.KillJob(); job.KillTask(tId); job.FailTask(tId); job.GetTaskCompletionEvents(0, 100); job.GetStatus(); job.GetTaskDiagnostics(tId); job.GetTaskReports(TaskType.Map); job.GetTrackingURL(); amRestarting = false; amService = new TestClientRedirect.AMService(this); amService.Init(conf); amService.Start(conf); amContact = false; //reset counters = cluster.GetJob(jobID).GetCounters(); ValidateCounters(counters); NUnit.Framework.Assert.IsTrue(amContact); // Stop the AM. It is not even restarting. So it should be treated as // completed. amService.Stop(); // Same client counters = cluster.GetJob(jobID).GetCounters(); ValidateCounters(counters); NUnit.Framework.Assert.IsTrue(hsContact); rmService.Stop(); historyService.Stop(); }
/// <summary>Create a local reduce input file name.</summary> /// <param name="mapId">a map task id</param> /// <param name="size">the size of the file</param> /// <returns>path</returns> /// <exception cref="System.IO.IOException"/> public override Path GetInputFileForWrite(TaskID mapId, long size) { return(lDirAlloc.GetLocalPathForWrite(string.Format(ReduceInputFileFormatString, GetAttemptOutputDir().ToString(), mapId.GetId()), size, conf)); }