/// <summary>Creates a new TaskReport object</summary> /// <param name="taskid"/> /// <param name="progress"/> /// <param name="state"/> /// <param name="diagnostics"/> /// <param name="currentStatus"/> /// <param name="startTime"/> /// <param name="finishTime"/> /// <param name="counters"/> public TaskReport(TaskID taskid, float progress, string state, string[] diagnostics , TIPStatus currentStatus, long startTime, long finishTime, Counters counters) { this.taskid = taskid; this.progress = progress; this.state = state; this.diagnostics = diagnostics; this.currentStatus = currentStatus; this.startTime = startTime; this.finishTime = finishTime; this.counters = counters; }
/// <summary> /// Downgrade new /// <see cref="Org.Apache.Hadoop.Mapreduce.Counters"/> /// to old Counters /// </summary> /// <param name="newCounters">new Counters</param> /// <returns>old Counters instance corresponding to newCounters</returns> internal static Org.Apache.Hadoop.Mapred.Counters Downgrade(Org.Apache.Hadoop.Mapreduce.Counters newCounters) { return(new Org.Apache.Hadoop.Mapred.Counters(newCounters)); }
public virtual void TestCombiner() { if (!new FilePath(TestRootDir).Mkdirs()) { throw new RuntimeException("Could not create test dir: " + TestRootDir); } FilePath @in = new FilePath(TestRootDir, "input"); if ([email protected]()) { throw new RuntimeException("Could not create test dir: " + @in); } FilePath @out = new FilePath(TestRootDir, "output"); PrintWriter pw = new PrintWriter(new FileWriter(new FilePath(@in, "data.txt"))); pw.WriteLine("A|a,1"); pw.WriteLine("A|b,2"); pw.WriteLine("B|a,3"); pw.WriteLine("B|b,4"); pw.WriteLine("B|c,5"); pw.Close(); JobConf conf = new JobConf(); conf.Set("mapreduce.framework.name", "local"); Job job = new Job(conf); TextInputFormat.SetInputPaths(job, new Path(@in.GetPath())); TextOutputFormat.SetOutputPath(job, new Path(@out.GetPath())); job.SetMapperClass(typeof(TestNewCombinerGrouping.Map)); job.SetReducerClass(typeof(TestNewCombinerGrouping.Reduce)); job.SetInputFormatClass(typeof(TextInputFormat)); job.SetMapOutputKeyClass(typeof(Text)); job.SetMapOutputValueClass(typeof(LongWritable)); job.SetOutputFormatClass(typeof(TextOutputFormat)); job.SetGroupingComparatorClass(typeof(TestNewCombinerGrouping.GroupComparator)); job.SetCombinerKeyGroupingComparatorClass(typeof(TestNewCombinerGrouping.GroupComparator )); job.SetCombinerClass(typeof(TestNewCombinerGrouping.Combiner)); job.GetConfiguration().SetInt("min.num.spills.for.combine", 0); job.Submit(); job.WaitForCompletion(false); if (job.IsSuccessful()) { Counters counters = job.GetCounters(); long combinerInputRecords = counters.FindCounter("org.apache.hadoop.mapreduce.TaskCounter" , "COMBINE_INPUT_RECORDS").GetValue(); long combinerOutputRecords = counters.FindCounter("org.apache.hadoop.mapreduce.TaskCounter" , "COMBINE_OUTPUT_RECORDS").GetValue(); NUnit.Framework.Assert.IsTrue(combinerInputRecords > 0); NUnit.Framework.Assert.IsTrue(combinerInputRecords > combinerOutputRecords); BufferedReader br = new BufferedReader(new FileReader(new FilePath(@out, "part-r-00000" ))); ICollection <string> output = new HashSet <string>(); string line = br.ReadLine(); NUnit.Framework.Assert.IsNotNull(line); output.AddItem(Sharpen.Runtime.Substring(line, 0, 1) + Sharpen.Runtime.Substring( line, 4, 5)); line = br.ReadLine(); NUnit.Framework.Assert.IsNotNull(line); output.AddItem(Sharpen.Runtime.Substring(line, 0, 1) + Sharpen.Runtime.Substring( line, 4, 5)); line = br.ReadLine(); NUnit.Framework.Assert.IsNull(line); br.Close(); ICollection <string> expected = new HashSet <string>(); expected.AddItem("A2"); expected.AddItem("B5"); NUnit.Framework.Assert.AreEqual(expected, output); } else { NUnit.Framework.Assert.Fail("Job failed"); } }
public Counters(Org.Apache.Hadoop.Mapreduce.Counters newCounters) : base(newCounters, groupFactory) { }