Ejemplo n.º 1
0
        /// <exception cref="System.Exception"/>
        public virtual void TestValueIteratorWithCompression()
        {
            Path          tmpDir = new Path("build/test/test.reduce.task.compression");
            Configuration conf   = new Configuration();
            DefaultCodec  codec  = new DefaultCodec();

            codec.SetConf(conf);
            foreach (TestReduceTask.Pair[] testCase in testCases)
            {
                RunValueIterator(tmpDir, testCase, conf, codec);
            }
        }
Ejemplo n.º 2
0
        public virtual void TestCompress()
        {
            JobConf job = new JobConf();

            job.Set(JobContext.TaskAttemptId, attempt);
            job.Set(FileOutputFormat.Compress, "true");
            FileOutputFormat.SetOutputPath(job, workDir.GetParent().GetParent());
            FileOutputFormat.SetWorkOutputPath(job, workDir);
            FileSystem fs = workDir.GetFileSystem(job);

            if (!fs.Mkdirs(workDir))
            {
                NUnit.Framework.Assert.Fail("Failed to create output directory");
            }
            string file = "test_compress.txt";
            // A reporter that does nothing
            Reporter reporter = Reporter.Null;
            TextOutputFormat <object, object> theOutputFormat = new TextOutputFormat <object, object
                                                                                      >();
            RecordWriter <object, object> theRecordWriter = theOutputFormat.GetRecordWriter(localFs
                                                                                            , job, file, reporter);

            Org.Apache.Hadoop.IO.Text key1 = new Org.Apache.Hadoop.IO.Text("key1");
            Org.Apache.Hadoop.IO.Text key2 = new Org.Apache.Hadoop.IO.Text("key2");
            Org.Apache.Hadoop.IO.Text val1 = new Org.Apache.Hadoop.IO.Text("val1");
            Org.Apache.Hadoop.IO.Text val2 = new Org.Apache.Hadoop.IO.Text("val2");
            NullWritable nullWritable      = NullWritable.Get();

            try
            {
                theRecordWriter.Write(key1, val1);
                theRecordWriter.Write(null, nullWritable);
                theRecordWriter.Write(null, val1);
                theRecordWriter.Write(nullWritable, val2);
                theRecordWriter.Write(key2, nullWritable);
                theRecordWriter.Write(key1, null);
                theRecordWriter.Write(null, null);
                theRecordWriter.Write(key2, val2);
            }
            finally
            {
                theRecordWriter.Close(reporter);
            }
            StringBuilder expectedOutput = new StringBuilder();

            expectedOutput.Append(key1).Append("\t").Append(val1).Append("\n");
            expectedOutput.Append(val1).Append("\n");
            expectedOutput.Append(val2).Append("\n");
            expectedOutput.Append(key2).Append("\n");
            expectedOutput.Append(key1).Append("\n");
            expectedOutput.Append(key2).Append("\t").Append(val2).Append("\n");
            DefaultCodec codec = new DefaultCodec();

            codec.SetConf(job);
            Path                   expectedFile = new Path(workDir, file + codec.GetDefaultExtension());
            FileInputStream        istream      = new FileInputStream(expectedFile.ToString());
            CompressionInputStream cistream     = codec.CreateInputStream(istream);
            LineReader             reader       = new LineReader(cistream);
            string                 output       = string.Empty;

            Org.Apache.Hadoop.IO.Text @out = new Org.Apache.Hadoop.IO.Text();
            while (reader.ReadLine(@out) > 0)
            {
                output += @out;
                output += "\n";
            }
            reader.Close();
            NUnit.Framework.Assert.AreEqual(expectedOutput.ToString(), output);
        }