/// <exception cref="System.IO.IOException"/>
        public static bool LaunchJob(URI fileSys, JobConf conf, int numMaps, int numReduces
                                     )
        {
            Path       inDir  = new Path("/testing/input");
            Path       outDir = new Path("/testing/output");
            FileSystem fs     = FileSystem.Get(fileSys, conf);

            fs.Delete(outDir, true);
            if (!fs.Mkdirs(inDir))
            {
                Log.Warn("Can't create " + inDir);
                return(false);
            }
            // generate an input file
            DataOutputStream file = fs.Create(new Path(inDir, "part-0"));

            file.WriteBytes("foo foo2 foo3");
            file.Close();
            // use WordCount example
            FileSystem.SetDefaultUri(conf, fileSys);
            conf.SetJobName("foo");
            conf.SetInputFormat(typeof(TextInputFormat));
            conf.SetOutputFormat(typeof(TestSpecialCharactersInOutputPath.SpecialTextOutputFormat
                                        ));
            conf.SetOutputKeyClass(typeof(LongWritable));
            conf.SetOutputValueClass(typeof(Text));
            conf.SetMapperClass(typeof(IdentityMapper));
            conf.SetReducerClass(typeof(IdentityReducer));
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            conf.SetNumMapTasks(numMaps);
            conf.SetNumReduceTasks(numReduces);
            // run job and wait for completion
            RunningJob runningJob = JobClient.RunJob(conf);

            try
            {
                NUnit.Framework.Assert.IsTrue(runningJob.IsComplete());
                NUnit.Framework.Assert.IsTrue(runningJob.IsSuccessful());
                NUnit.Framework.Assert.IsTrue("Output folder not found!", fs.Exists(new Path("/testing/output/"
                                                                                             + OutputFilename)));
            }
            catch (ArgumentNullException)
            {
                // This NPE should no more happens
                Fail("A NPE should not have happened.");
            }
            // return job result
            Log.Info("job is complete: " + runningJob.IsSuccessful());
            return(runningJob.IsSuccessful());
        }
        /// <exception cref="System.Exception"/>
        private void MrRun()
        {
            FileSystem fs       = FileSystem.Get(GetJobConf());
            Path       inputDir = new Path("input");

            fs.Mkdirs(inputDir);
            TextWriter writer = new OutputStreamWriter(fs.Create(new Path(inputDir, "data.txt"
                                                                          )));

            writer.Write("hello");
            writer.Close();
            Path    outputDir = new Path("output", "output");
            JobConf jobConf   = new JobConf(GetJobConf());

            jobConf.SetInt("mapred.map.tasks", 1);
            jobConf.SetInt("mapred.map.max.attempts", 1);
            jobConf.SetInt("mapred.reduce.max.attempts", 1);
            jobConf.Set("mapred.input.dir", inputDir.ToString());
            jobConf.Set("mapred.output.dir", outputDir.ToString());
            JobClient  jobClient = new JobClient(jobConf);
            RunningJob runJob    = jobClient.SubmitJob(jobConf);

            runJob.WaitForCompletion();
            NUnit.Framework.Assert.IsTrue(runJob.IsComplete());
            NUnit.Framework.Assert.IsTrue(runJob.IsSuccessful());
        }
Beispiel #3
0
        /// <exception cref="System.Exception"/>
        public static Counters RunJob(JobConf conf)
        {
            conf.SetMapperClass(typeof(TestReduceFetchFromPartialMem.MapMB));
            conf.SetReducerClass(typeof(TestReduceFetchFromPartialMem.MBValidate));
            conf.SetOutputKeyClass(typeof(Org.Apache.Hadoop.IO.Text));
            conf.SetOutputValueClass(typeof(Org.Apache.Hadoop.IO.Text));
            conf.SetNumReduceTasks(1);
            conf.SetInputFormat(typeof(TestReduceFetchFromPartialMem.FakeIF));
            conf.SetNumTasksToExecutePerJvm(1);
            conf.SetInt(JobContext.MapMaxAttempts, 0);
            conf.SetInt(JobContext.ReduceMaxAttempts, 0);
            FileInputFormat.SetInputPaths(conf, new Path("/in"));
            Path outp = new Path("/out");

            FileOutputFormat.SetOutputPath(conf, outp);
            RunningJob job = null;

            try
            {
                job = JobClient.RunJob(conf);
                NUnit.Framework.Assert.IsTrue(job.IsSuccessful());
            }
            finally
            {
                FileSystem fs = dfsCluster.GetFileSystem();
                if (fs.Exists(outp))
                {
                    fs.Delete(outp, true);
                }
            }
            return(job.GetCounters());
        }
Beispiel #4
0
        /// <exception cref="System.Exception"/>
        private void CheckCompression(bool compressMapOutputs, SequenceFile.CompressionType
                                      redCompression, bool includeCombine)
        {
            JobConf    conf    = new JobConf(typeof(TestMapRed));
            Path       testdir = new Path(TestDir.GetAbsolutePath());
            Path       inDir   = new Path(testdir, "in");
            Path       outDir  = new Path(testdir, "out");
            FileSystem fs      = FileSystem.Get(conf);

            fs.Delete(testdir, true);
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            conf.SetMapperClass(typeof(TestMapRed.MyMap));
            conf.SetReducerClass(typeof(TestMapRed.MyReduce));
            conf.SetOutputKeyClass(typeof(Text));
            conf.SetOutputValueClass(typeof(Text));
            conf.SetOutputFormat(typeof(SequenceFileOutputFormat));
            conf.Set(MRConfig.FrameworkName, MRConfig.LocalFrameworkName);
            if (includeCombine)
            {
                conf.SetCombinerClass(typeof(IdentityReducer));
            }
            conf.SetCompressMapOutput(compressMapOutputs);
            SequenceFileOutputFormat.SetOutputCompressionType(conf, redCompression);
            try
            {
                if (!fs.Mkdirs(testdir))
                {
                    throw new IOException("Mkdirs failed to create " + testdir.ToString());
                }
                if (!fs.Mkdirs(inDir))
                {
                    throw new IOException("Mkdirs failed to create " + inDir.ToString());
                }
                Path             inFile = new Path(inDir, "part0");
                DataOutputStream f      = fs.Create(inFile);
                f.WriteBytes("Owen was here\n");
                f.WriteBytes("Hadoop is fun\n");
                f.WriteBytes("Is this done, yet?\n");
                f.Close();
                RunningJob rj = JobClient.RunJob(conf);
                NUnit.Framework.Assert.IsTrue("job was complete", rj.IsComplete());
                NUnit.Framework.Assert.IsTrue("job was successful", rj.IsSuccessful());
                Path output = new Path(outDir, Task.GetOutputName(0));
                NUnit.Framework.Assert.IsTrue("reduce output exists " + output, fs.Exists(output)
                                              );
                SequenceFile.Reader rdr = new SequenceFile.Reader(fs, output, conf);
                NUnit.Framework.Assert.AreEqual("is reduce output compressed " + output, redCompression
                                                != SequenceFile.CompressionType.None, rdr.IsCompressed());
                rdr.Close();
            }
            finally
            {
                fs.Delete(testdir, true);
            }
        }
Beispiel #5
0
        public virtual void TestDefaultMRComparator()
        {
            conf.SetMapperClass(typeof(TestComparators.IdentityMapper));
            conf.SetReducerClass(typeof(TestComparators.AscendingKeysReducer));
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (!r_job.IsSuccessful())
            {
                NUnit.Framework.Assert.Fail("Oops! The job broke due to an unexpected error");
            }
        }
Beispiel #6
0
        public virtual void TestReporterProgressForMapOnlyJob()
        {
            Path    test = new Path(testRootTempDir, "testReporterProgressForMapOnlyJob");
            JobConf conf = new JobConf();

            conf.SetMapperClass(typeof(TestReporter.ProgressTesterMapper));
            conf.SetMapOutputKeyClass(typeof(Org.Apache.Hadoop.IO.Text));
            // fail early
            conf.SetMaxMapAttempts(1);
            conf.SetMaxReduceAttempts(0);
            RunningJob job = UtilsForTests.RunJob(conf, new Path(test, "in"), new Path(test,
                                                                                       "out"), 1, 0, Input);

            job.WaitForCompletion();
            NUnit.Framework.Assert.IsTrue("Job failed", job.IsSuccessful());
        }
        public virtual void TestNoMismatch()
        {
            //  Set good MapOutputKeyClass and MapOutputValueClass
            conf.SetMapOutputKeyClass(typeof(Text));
            conf.SetMapOutputValueClass(typeof(Text));
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (!r_job.IsSuccessful())
            {
                NUnit.Framework.Assert.Fail("Oops! The job broke due to an unexpected error");
            }
        }
        public virtual void TestValueMismatch()
        {
            conf.SetMapOutputKeyClass(typeof(Text));
            conf.SetMapOutputValueClass(typeof(IntWritable));
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (r_job.IsSuccessful())
            {
                NUnit.Framework.Assert.Fail("Oops! The job was supposed to break due to an exception"
                                            );
            }
        }
Beispiel #9
0
        public virtual void TestUserValueGroupingComparator()
        {
            conf.SetMapperClass(typeof(TestComparators.RandomGenMapper));
            conf.SetReducerClass(typeof(TestComparators.AscendingGroupReducer));
            conf.SetOutputValueGroupingComparator(typeof(TestComparators.CompositeIntGroupFn)
                                                  );
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (!r_job.IsSuccessful())
            {
                NUnit.Framework.Assert.Fail("Oops! The job broke due to an unexpected error");
            }
        }
Beispiel #10
0
        public virtual void TestAllUserComparators()
        {
            conf.SetMapperClass(typeof(TestComparators.RandomGenMapper));
            // use a decreasing comparator so keys are sorted in reverse order
            conf.SetOutputKeyComparatorClass(typeof(TestComparators.DecreasingIntComparator));
            conf.SetReducerClass(typeof(TestComparators.DescendingGroupReducer));
            conf.SetOutputValueGroupingComparator(typeof(TestComparators.CompositeIntReverseGroupFn
                                                         ));
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (!r_job.IsSuccessful())
            {
                NUnit.Framework.Assert.Fail("Oops! The job broke due to an unexpected error");
            }
        }
Beispiel #11
0
        // set up heap options, target value for memory loader and the output
        // directory before running the job
        /// <exception cref="System.IO.IOException"/>
        private static RunningJob RunHeapUsageTestJob(JobConf conf, Path testRootDir, string
                                                      heapOptions, long targetMapValue, long targetReduceValue, FileSystem fs, JobClient
                                                      client, Path inDir)
        {
            // define a job
            JobConf jobConf = new JobConf(conf);

            // configure the jobs
            jobConf.SetNumMapTasks(1);
            jobConf.SetNumReduceTasks(1);
            jobConf.SetMapperClass(typeof(TestJobCounters.MemoryLoaderMapper));
            jobConf.SetReducerClass(typeof(TestJobCounters.MemoryLoaderReducer));
            jobConf.SetInputFormat(typeof(TextInputFormat));
            jobConf.SetOutputKeyClass(typeof(LongWritable));
            jobConf.SetOutputValueClass(typeof(Org.Apache.Hadoop.IO.Text));
            jobConf.SetMaxMapAttempts(1);
            jobConf.SetMaxReduceAttempts(1);
            jobConf.Set(JobConf.MapredTaskJavaOpts, heapOptions);
            // set the targets
            jobConf.SetLong(TestJobCounters.MemoryLoaderMapper.TargetValue, targetMapValue);
            jobConf.SetLong(TestJobCounters.MemoryLoaderReducer.TargetValue, targetReduceValue
                            );
            // set the input directory for the job
            FileInputFormat.SetInputPaths(jobConf, inDir);
            // define job output folder
            Path outDir = new Path(testRootDir, "out");

            fs.Delete(outDir, true);
            FileOutputFormat.SetOutputPath(jobConf, outDir);
            // run the job
            RunningJob job = client.SubmitJob(jobConf);

            job.WaitForCompletion();
            JobID jobID = job.GetID();

            NUnit.Framework.Assert.IsTrue("Job " + jobID + " failed!", job.IsSuccessful());
            return(job);
        }
 /// <exception cref="System.Exception"/>
 private void EncryptedShuffleWithCerts(bool useClientCerts)
 {
     try
     {
         Configuration conf         = new Configuration();
         string        keystoresDir = new FilePath(Basedir).GetAbsolutePath();
         string        sslConfsDir  = KeyStoreTestUtil.GetClasspathDir(typeof(TestEncryptedShuffle
                                                                              ));
         KeyStoreTestUtil.SetupSSLConfig(keystoresDir, sslConfsDir, conf, useClientCerts);
         conf.SetBoolean(MRConfig.ShuffleSslEnabledKey, true);
         StartCluster(conf);
         FileSystem fs       = FileSystem.Get(GetJobConf());
         Path       inputDir = new Path("input");
         fs.Mkdirs(inputDir);
         TextWriter writer = new OutputStreamWriter(fs.Create(new Path(inputDir, "data.txt"
                                                                       )));
         writer.Write("hello");
         writer.Close();
         Path    outputDir = new Path("output", "output");
         JobConf jobConf   = new JobConf(GetJobConf());
         jobConf.SetInt("mapred.map.tasks", 1);
         jobConf.SetInt("mapred.map.max.attempts", 1);
         jobConf.SetInt("mapred.reduce.max.attempts", 1);
         jobConf.Set("mapred.input.dir", inputDir.ToString());
         jobConf.Set("mapred.output.dir", outputDir.ToString());
         JobClient  jobClient = new JobClient(jobConf);
         RunningJob runJob    = jobClient.SubmitJob(jobConf);
         runJob.WaitForCompletion();
         NUnit.Framework.Assert.IsTrue(runJob.IsComplete());
         NUnit.Framework.Assert.IsTrue(runJob.IsSuccessful());
     }
     finally
     {
         StopCluster();
     }
 }
Beispiel #13
0
        /// <exception cref="System.IO.IOException"/>
        internal static void RunProgram(MiniMRCluster mr, MiniDFSCluster dfs, Path program
                                        , Path inputPath, Path outputPath, int numMaps, int numReduces, string[] expectedResults
                                        , JobConf conf)
        {
            Path    wordExec = new Path("testing/bin/application");
            JobConf job      = null;

            if (conf == null)
            {
                job = mr.CreateJobConf();
            }
            else
            {
                job = new JobConf(conf);
            }
            job.SetNumMapTasks(numMaps);
            job.SetNumReduceTasks(numReduces);
            {
                FileSystem fs = dfs.GetFileSystem();
                fs.Delete(wordExec.GetParent(), true);
                fs.CopyFromLocalFile(program, wordExec);
                Submitter.SetExecutable(job, fs.MakeQualified(wordExec).ToString());
                Submitter.SetIsJavaRecordReader(job, true);
                Submitter.SetIsJavaRecordWriter(job, true);
                FileInputFormat.SetInputPaths(job, inputPath);
                FileOutputFormat.SetOutputPath(job, outputPath);
                RunningJob rJob = null;
                if (numReduces == 0)
                {
                    rJob = Submitter.JobSubmit(job);
                    while (!rJob.IsComplete())
                    {
                        try
                        {
                            Sharpen.Thread.Sleep(1000);
                        }
                        catch (Exception ie)
                        {
                            throw new RuntimeException(ie);
                        }
                    }
                }
                else
                {
                    rJob = Submitter.RunJob(job);
                }
                NUnit.Framework.Assert.IsTrue("pipes job failed", rJob.IsSuccessful());
                Counters       counters          = rJob.GetCounters();
                Counters.Group wordCountCounters = counters.GetGroup("WORDCOUNT");
                int            numCounters       = 0;
                foreach (Counters.Counter c in wordCountCounters)
                {
                    System.Console.Out.WriteLine(c);
                    ++numCounters;
                }
                NUnit.Framework.Assert.IsTrue("No counters found!", (numCounters > 0));
            }
            IList <string> results = new AList <string>();

            foreach (Path p in FileUtil.Stat2Paths(dfs.GetFileSystem().ListStatus(outputPath,
                                                                                  new Utils.OutputFileUtils.OutputFilesFilter())))
            {
                results.AddItem(MapReduceTestUtil.ReadOutput(p, job));
            }
            NUnit.Framework.Assert.AreEqual("number of reduces is wrong", expectedResults.Length
                                            , results.Count);
            for (int i = 0; i < results.Count; i++)
            {
                NUnit.Framework.Assert.AreEqual("pipes program " + program + " output " + i + " wrong"
                                                , expectedResults[i], results[i]);
            }
        }
Beispiel #14
0
        public virtual void TestCombiner()
        {
            if (!new FilePath(TestRootDir).Mkdirs())
            {
                throw new RuntimeException("Could not create test dir: " + TestRootDir);
            }
            FilePath @in = new FilePath(TestRootDir, "input");

            if ([email protected]())
            {
                throw new RuntimeException("Could not create test dir: " + @in);
            }
            FilePath    @out = new FilePath(TestRootDir, "output");
            PrintWriter pw   = new PrintWriter(new FileWriter(new FilePath(@in, "data.txt")));

            pw.WriteLine("A|a,1");
            pw.WriteLine("A|b,2");
            pw.WriteLine("B|a,3");
            pw.WriteLine("B|b,4");
            pw.WriteLine("B|c,5");
            pw.Close();
            JobConf job = new JobConf();

            job.Set("mapreduce.framework.name", "local");
            TextInputFormat.SetInputPaths(job, new Path(@in.GetPath()));
            TextOutputFormat.SetOutputPath(job, new Path(@out.GetPath()));
            job.SetMapperClass(typeof(TestOldCombinerGrouping.Map));
            job.SetReducerClass(typeof(TestOldCombinerGrouping.Reduce));
            job.SetInputFormat(typeof(TextInputFormat));
            job.SetMapOutputKeyClass(typeof(Text));
            job.SetMapOutputValueClass(typeof(LongWritable));
            job.SetOutputFormat(typeof(TextOutputFormat));
            job.SetOutputValueGroupingComparator(typeof(TestOldCombinerGrouping.GroupComparator
                                                        ));
            job.SetCombinerClass(typeof(TestOldCombinerGrouping.Combiner));
            job.SetCombinerKeyGroupingComparator(typeof(TestOldCombinerGrouping.GroupComparator
                                                        ));
            job.SetInt("min.num.spills.for.combine", 0);
            JobClient  client     = new JobClient(job);
            RunningJob runningJob = client.SubmitJob(job);

            runningJob.WaitForCompletion();
            if (runningJob.IsSuccessful())
            {
                Counters counters             = runningJob.GetCounters();
                long     combinerInputRecords = counters.GetGroup("org.apache.hadoop.mapreduce.TaskCounter"
                                                                  ).GetCounter("COMBINE_INPUT_RECORDS");
                long combinerOutputRecords = counters.GetGroup("org.apache.hadoop.mapreduce.TaskCounter"
                                                               ).GetCounter("COMBINE_OUTPUT_RECORDS");
                NUnit.Framework.Assert.IsTrue(combinerInputRecords > 0);
                NUnit.Framework.Assert.IsTrue(combinerInputRecords > combinerOutputRecords);
                BufferedReader br = new BufferedReader(new FileReader(new FilePath(@out, "part-00000"
                                                                                   )));
                ICollection <string> output = new HashSet <string>();
                string line = br.ReadLine();
                NUnit.Framework.Assert.IsNotNull(line);
                output.AddItem(Sharpen.Runtime.Substring(line, 0, 1) + Sharpen.Runtime.Substring(
                                   line, 4, 5));
                line = br.ReadLine();
                NUnit.Framework.Assert.IsNotNull(line);
                output.AddItem(Sharpen.Runtime.Substring(line, 0, 1) + Sharpen.Runtime.Substring(
                                   line, 4, 5));
                line = br.ReadLine();
                NUnit.Framework.Assert.IsNull(line);
                br.Close();
                ICollection <string> expected = new HashSet <string>();
                expected.AddItem("A2");
                expected.AddItem("B5");
                NUnit.Framework.Assert.AreEqual(expected, output);
            }
            else
            {
                NUnit.Framework.Assert.Fail("Job failed");
            }
        }
        /// <exception cref="System.Exception"/>
        public virtual void TestCustomFile()
        {
            Path inDir  = new Path("testing/fileoutputformat/input");
            Path outDir = new Path("testing/fileoutputformat/output");

            // Hack for local FS that does not have the concept of a 'mounting point'
            if (IsLocalFS())
            {
                string localPathRoot = Runtime.GetProperty("test.build.data", "/tmp").Replace(' '
                                                                                              , '+');
                inDir  = new Path(localPathRoot, inDir);
                outDir = new Path(localPathRoot, outDir);
            }
            JobConf    conf = CreateJobConf();
            FileSystem fs   = FileSystem.Get(conf);

            fs.Delete(outDir, true);
            if (!fs.Mkdirs(inDir))
            {
                throw new IOException("Mkdirs failed to create " + inDir.ToString());
            }
            DataOutputStream file = fs.Create(new Path(inDir, "part-0"));

            file.WriteBytes("a\nb\n\nc\nd\ne");
            file.Close();
            file = fs.Create(new Path(inDir, "part-1"));
            file.WriteBytes("a\nb\n\nc\nd\ne");
            file.Close();
            conf.SetJobName("fof");
            conf.SetInputFormat(typeof(TextInputFormat));
            conf.SetMapOutputKeyClass(typeof(LongWritable));
            conf.SetMapOutputValueClass(typeof(Text));
            conf.SetOutputFormat(typeof(TextOutputFormat));
            conf.SetOutputKeyClass(typeof(LongWritable));
            conf.SetOutputValueClass(typeof(Text));
            conf.SetMapperClass(typeof(TestFileOutputFormat.TestMap));
            conf.SetReducerClass(typeof(TestFileOutputFormat.TestReduce));
            conf.Set(MRConfig.FrameworkName, MRConfig.LocalFrameworkName);
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            JobClient  jc  = new JobClient(conf);
            RunningJob job = jc.SubmitJob(conf);

            while (!job.IsComplete())
            {
                Sharpen.Thread.Sleep(100);
            }
            NUnit.Framework.Assert.IsTrue(job.IsSuccessful());
            bool map0   = false;
            bool map1   = false;
            bool reduce = false;

            FileStatus[] statuses = fs.ListStatus(outDir);
            foreach (FileStatus status in statuses)
            {
                map0   = map0 || status.GetPath().GetName().Equals("test-m-00000");
                map1   = map1 || status.GetPath().GetName().Equals("test-m-00001");
                reduce = reduce || status.GetPath().GetName().Equals("test-r-00000");
            }
            NUnit.Framework.Assert.IsTrue(map0);
            NUnit.Framework.Assert.IsTrue(map1);
            NUnit.Framework.Assert.IsTrue(reduce);
        }
Beispiel #16
0
        /// <exception cref="System.Exception"/>
        private void ValidateOutput(JobConf conf, RunningJob runningJob, IList <string> mapperBadRecords
                                    , IList <string> redBadRecords)
        {
            Log.Info(runningJob.GetCounters().ToString());
            NUnit.Framework.Assert.IsTrue(runningJob.IsSuccessful());
            //validate counters
            Counters counters = runningJob.GetCounters();

            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.MapSkippedRecords
                                                                 ).GetCounter(), mapperBadRecords.Count);
            int mapRecs = input.Count - mapperBadRecords.Count;

            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.MapInputRecords)
                                            .GetCounter(), mapRecs);
            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.MapOutputRecords
                                                                 ).GetCounter(), mapRecs);
            int redRecs = mapRecs - redBadRecords.Count;

            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.ReduceSkippedRecords
                                                                 ).GetCounter(), redBadRecords.Count);
            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.ReduceSkippedGroups
                                                                 ).GetCounter(), redBadRecords.Count);
            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.ReduceInputGroups
                                                                 ).GetCounter(), redRecs);
            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.ReduceInputRecords
                                                                 ).GetCounter(), redRecs);
            NUnit.Framework.Assert.AreEqual(counters.FindCounter(TaskCounter.ReduceOutputRecords
                                                                 ).GetCounter(), redRecs);
            //validate skipped records
            Path skipDir = SkipBadRecords.GetSkipOutputPath(conf);

            NUnit.Framework.Assert.IsNotNull(skipDir);
            Path[]         skips      = FileUtil.Stat2Paths(GetFileSystem().ListStatus(skipDir));
            IList <string> mapSkipped = new AList <string>();
            IList <string> redSkipped = new AList <string>();

            foreach (Path skipPath in skips)
            {
                Log.Info("skipPath: " + skipPath);
                SequenceFile.Reader reader = new SequenceFile.Reader(GetFileSystem(), skipPath, conf
                                                                     );
                object key   = ReflectionUtils.NewInstance(reader.GetKeyClass(), conf);
                object value = ReflectionUtils.NewInstance(reader.GetValueClass(), conf);
                key = reader.Next(key);
                while (key != null)
                {
                    value = reader.GetCurrentValue(value);
                    Log.Debug("key:" + key + " value:" + value.ToString());
                    if (skipPath.GetName().Contains("_r_"))
                    {
                        redSkipped.AddItem(value.ToString());
                    }
                    else
                    {
                        mapSkipped.AddItem(value.ToString());
                    }
                    key = reader.Next(key);
                }
                reader.Close();
            }
            NUnit.Framework.Assert.IsTrue(mapSkipped.ContainsAll(mapperBadRecords));
            NUnit.Framework.Assert.IsTrue(redSkipped.ContainsAll(redBadRecords));
            Path[] outputFiles = FileUtil.Stat2Paths(GetFileSystem().ListStatus(GetOutputDir(
                                                                                    ), new Utils.OutputFileUtils.OutputFilesFilter()));
            IList <string> mapperOutput = GetProcessed(input, mapperBadRecords);

            Log.Debug("mapperOutput " + mapperOutput.Count);
            IList <string> reducerOutput = GetProcessed(mapperOutput, redBadRecords);

            Log.Debug("reducerOutput " + reducerOutput.Count);
            if (outputFiles.Length > 0)
            {
                InputStream    @is     = GetFileSystem().Open(outputFiles[0]);
                BufferedReader reader  = new BufferedReader(new InputStreamReader(@is));
                string         line    = reader.ReadLine();
                int            counter = 0;
                while (line != null)
                {
                    counter++;
                    StringTokenizer tokeniz = new StringTokenizer(line, "\t");
                    string          key     = tokeniz.NextToken();
                    string          value   = tokeniz.NextToken();
                    Log.Debug("Output: key:" + key + "  value:" + value);
                    NUnit.Framework.Assert.IsTrue(value.Contains("hello"));
                    NUnit.Framework.Assert.IsTrue(reducerOutput.Contains(value));
                    line = reader.ReadLine();
                }
                reader.Close();
                NUnit.Framework.Assert.AreEqual(reducerOutput.Count, counter);
            }
        }
        /// <exception cref="System.Exception"/>
        private void Run(bool ioEx, bool rtEx)
        {
            Path inDir  = new Path("testing/mt/input");
            Path outDir = new Path("testing/mt/output");

            // Hack for local FS that does not have the concept of a 'mounting point'
            if (IsLocalFS())
            {
                string localPathRoot = Runtime.GetProperty("test.build.data", "/tmp").Replace(' '
                                                                                              , '+');
                inDir  = new Path(localPathRoot, inDir);
                outDir = new Path(localPathRoot, outDir);
            }
            JobConf    conf = CreateJobConf();
            FileSystem fs   = FileSystem.Get(conf);

            fs.Delete(outDir, true);
            if (!fs.Mkdirs(inDir))
            {
                throw new IOException("Mkdirs failed to create " + inDir.ToString());
            }
            {
                DataOutputStream file = fs.Create(new Path(inDir, "part-0"));
                file.WriteBytes("a\nb\n\nc\nd\ne");
                file.Close();
            }
            conf.SetJobName("mt");
            conf.SetInputFormat(typeof(TextInputFormat));
            conf.SetOutputKeyClass(typeof(LongWritable));
            conf.SetOutputValueClass(typeof(Text));
            conf.SetMapOutputKeyClass(typeof(LongWritable));
            conf.SetMapOutputValueClass(typeof(Text));
            conf.SetOutputFormat(typeof(TextOutputFormat));
            conf.SetOutputKeyClass(typeof(LongWritable));
            conf.SetOutputValueClass(typeof(Text));
            conf.SetMapperClass(typeof(TestMultithreadedMapRunner.IDMap));
            conf.SetReducerClass(typeof(TestMultithreadedMapRunner.IDReduce));
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            conf.SetMapRunnerClass(typeof(MultithreadedMapRunner));
            conf.SetInt(MultithreadedMapper.NumThreads, 2);
            if (ioEx)
            {
                conf.SetBoolean("multithreaded.ioException", true);
            }
            if (rtEx)
            {
                conf.SetBoolean("multithreaded.runtimeException", true);
            }
            JobClient  jc  = new JobClient(conf);
            RunningJob job = jc.SubmitJob(conf);

            while (!job.IsComplete())
            {
                Sharpen.Thread.Sleep(100);
            }
            if (job.IsSuccessful())
            {
                NUnit.Framework.Assert.IsFalse(ioEx || rtEx);
            }
            else
            {
                NUnit.Framework.Assert.IsTrue(ioEx || rtEx);
            }
        }
        /// <exception cref="System.Exception"/>
        public virtual void Configure(string keySpec, int expect)
        {
            Path       testdir = new Path(TestDir.GetAbsolutePath());
            Path       inDir   = new Path(testdir, "in");
            Path       outDir  = new Path(testdir, "out");
            FileSystem fs      = GetFileSystem();

            fs.Delete(testdir, true);
            conf.SetInputFormat(typeof(TextInputFormat));
            FileInputFormat.SetInputPaths(conf, inDir);
            FileOutputFormat.SetOutputPath(conf, outDir);
            conf.SetOutputKeyClass(typeof(Text));
            conf.SetOutputValueClass(typeof(LongWritable));
            conf.SetNumMapTasks(1);
            conf.SetNumReduceTasks(1);
            conf.SetOutputFormat(typeof(TextOutputFormat));
            conf.SetOutputKeyComparatorClass(typeof(KeyFieldBasedComparator));
            conf.SetKeyFieldComparatorOptions(keySpec);
            conf.SetKeyFieldPartitionerOptions("-k1.1,1.1");
            conf.Set(JobContext.MapOutputKeyFieldSeperator, " ");
            conf.SetMapperClass(typeof(InverseMapper));
            conf.SetReducerClass(typeof(IdentityReducer));
            if (!fs.Mkdirs(testdir))
            {
                throw new IOException("Mkdirs failed to create " + testdir.ToString());
            }
            if (!fs.Mkdirs(inDir))
            {
                throw new IOException("Mkdirs failed to create " + inDir.ToString());
            }
            // set up input data in 2 files
            Path             inFile = new Path(inDir, "part0");
            FileOutputStream fos    = new FileOutputStream(inFile.ToString());

            fos.Write(Sharpen.Runtime.GetBytesForString((line1 + "\n")));
            fos.Write(Sharpen.Runtime.GetBytesForString((line2 + "\n")));
            fos.Close();
            JobClient  jc    = new JobClient(conf);
            RunningJob r_job = jc.SubmitJob(conf);

            while (!r_job.IsComplete())
            {
                Sharpen.Thread.Sleep(1000);
            }
            if (!r_job.IsSuccessful())
            {
                Fail("Oops! The job broke due to an unexpected error");
            }
            Path[] outputFiles = FileUtil.Stat2Paths(GetFileSystem().ListStatus(outDir, new Utils.OutputFileUtils.OutputFilesFilter
                                                                                    ()));
            if (outputFiles.Length > 0)
            {
                InputStream    @is    = GetFileSystem().Open(outputFiles[0]);
                BufferedReader reader = new BufferedReader(new InputStreamReader(@is));
                string         line   = reader.ReadLine();
                //make sure we get what we expect as the first line, and also
                //that we have two lines
                if (expect == 1)
                {
                    NUnit.Framework.Assert.IsTrue(line.StartsWith(line1));
                }
                else
                {
                    if (expect == 2)
                    {
                        NUnit.Framework.Assert.IsTrue(line.StartsWith(line2));
                    }
                }
                line = reader.ReadLine();
                if (expect == 1)
                {
                    NUnit.Framework.Assert.IsTrue(line.StartsWith(line2));
                }
                else
                {
                    if (expect == 2)
                    {
                        NUnit.Framework.Assert.IsTrue(line.StartsWith(line1));
                    }
                }
                reader.Close();
            }
        }