/// <summary>
        /// Tests the class loader set by
        /// <see cref="Org.Apache.Hadoop.Conf.Configuration.SetClassLoader(Sharpen.ClassLoader)
        ///     "/>
        /// is inherited by any
        /// <see cref="WrappedRecordReader{K, U}"/>
        /// s created by
        /// <see cref="CompositeRecordReader{K, V, X}"/>
        /// </summary>
        /// <exception cref="System.Exception"/>
        public virtual void TestClassLoader()
        {
            Configuration conf = new Configuration();

            TestWrappedRRClassloader.Fake_ClassLoader classLoader = new TestWrappedRRClassloader.Fake_ClassLoader
                                                                        ();
            conf.SetClassLoader(classLoader);
            NUnit.Framework.Assert.IsTrue(conf.GetClassLoader() is TestWrappedRRClassloader.Fake_ClassLoader
                                          );
            FileSystem fs      = FileSystem.Get(conf);
            Path       testdir = new Path(Runtime.GetProperty("test.build.data", "/tmp")).MakeQualified
                                     (fs);
            Path @base = new Path(testdir, "/empty");

            Path[] src = new Path[] { new Path(@base, "i0"), new Path("i1"), new Path("i2") };
            conf.Set(CompositeInputFormat.JoinExpr, CompositeInputFormat.Compose("outer", typeof(
                                                                                     TestWrappedRRClassloader.IF_ClassLoaderChecker), src));
            CompositeInputFormat <NullWritable> inputFormat = new CompositeInputFormat <NullWritable
                                                                                        >();
            // create dummy TaskAttemptID
            TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.Map, 0, 0);

            conf.Set(MRJobConfig.TaskAttemptId, tid.ToString());
            inputFormat.CreateRecordReader(inputFormat.GetSplits(Job.GetInstance(conf))[0], new
                                           TaskAttemptContextImpl(conf, tid));
        }
示例#2
0
        /// <exception cref="System.Exception"/>
        public virtual int TestFormat(Configuration conf, int tupleSize, bool firstTuple,
                                      bool secondTuple, TestJoinProperties.TestType ttype)
        {
            Job job = Job.GetInstance(conf);
            CompositeInputFormat format = new CompositeInputFormat();
            int count = 0;

            foreach (InputSplit split in (IList <InputSplit>)format.GetSplits(job))
            {
                TaskAttemptContext context = MapReduceTestUtil.CreateDummyMapTaskAttemptContext(conf
                                                                                                );
                RecordReader reader   = format.CreateRecordReader(split, context);
                MapContext   mcontext = new MapContextImpl(conf, context.GetTaskAttemptID(), reader
                                                           , null, null, MapReduceTestUtil.CreateDummyReporter(), split);
                reader.Initialize(split, mcontext);
                WritableComparable key   = null;
                Writable           value = null;
                while (reader.NextKeyValue())
                {
                    key   = (WritableComparable)reader.GetCurrentKey();
                    value = (Writable)reader.GetCurrentValue();
                    ValidateKeyValue(key, value, tupleSize, firstTuple, secondTuple, ttype);
                    count++;
                }
            }
            return(count);
        }
示例#3
0
        /// <exception cref="System.Exception"/>
        private static void JoinAs(string jointype, Type map, Type reduce)
        {
            int           srcs  = 4;
            Configuration conf  = new Configuration();
            Path          @base = cluster.GetFileSystem().MakeQualified(new Path("/" + jointype));

            Path[] src = WriteSimpleSrc(@base, conf, srcs);
            conf.Set(CompositeInputFormat.JoinExpr, CompositeInputFormat.Compose(jointype, typeof(
                                                                                     SequenceFileInputFormat), src));
            conf.SetInt("testdatamerge.sources", srcs);
            Job job = Job.GetInstance(conf);

            job.SetInputFormatClass(typeof(CompositeInputFormat));
            FileOutputFormat.SetOutputPath(job, new Path(@base, "out"));
            job.SetMapperClass(map);
            job.SetReducerClass(reduce);
            job.SetOutputFormatClass(typeof(SequenceFileOutputFormat));
            job.SetOutputKeyClass(typeof(IntWritable));
            job.SetOutputValueClass(typeof(IntWritable));
            job.WaitForCompletion(true);
            NUnit.Framework.Assert.IsTrue("Job failed", job.IsSuccessful());
            if ("outer".Equals(jointype))
            {
                CheckOuterConsistency(job, src);
            }
            @base.GetFileSystem(conf).Delete(@base, true);
        }
示例#4
0
        /// <exception cref="System.Exception"/>
        public virtual void TestEmptyJoin()
        {
            Configuration conf  = new Configuration();
            Path          @base = cluster.GetFileSystem().MakeQualified(new Path("/empty"));

            Path[] src = new Path[] { new Path(@base, "i0"), new Path("i1"), new Path("i2") };
            conf.Set(CompositeInputFormat.JoinExpr, CompositeInputFormat.Compose("outer", typeof(
                                                                                     MapReduceTestUtil.Fake_IF), src));
            MapReduceTestUtil.Fake_IF.SetKeyClass(conf, typeof(MapReduceTestUtil.IncomparableKey
                                                               ));
            Job job = Job.GetInstance(conf);

            job.SetInputFormatClass(typeof(CompositeInputFormat));
            FileOutputFormat.SetOutputPath(job, new Path(@base, "out"));
            job.SetMapperClass(typeof(Mapper));
            job.SetReducerClass(typeof(Reducer));
            job.SetOutputKeyClass(typeof(MapReduceTestUtil.IncomparableKey));
            job.SetOutputValueClass(typeof(NullWritable));
            job.WaitForCompletion(true);
            NUnit.Framework.Assert.IsTrue(job.IsSuccessful());
            @base.GetFileSystem(conf).Delete(@base, true);
        }
示例#5
0
 private string C()
 {
     return(CompositeInputFormat.Compose(typeof(SequenceFileInputFormat), src[2].ToString
                                             ()));
 }
示例#6
0
        /// <exception cref="System.Exception"/>
        public virtual void TestNestedJoin()
        {
            // outer(inner(S1,...,Sn),outer(S1,...Sn))
            int           Sources = 3;
            int           Items   = (Sources + 1) * (Sources + 1);
            Configuration conf    = new Configuration();
            Path          @base   = cluster.GetFileSystem().MakeQualified(new Path("/nested"));

            int[][] source = new int[Sources][];
            for (int i = 0; i < Sources; ++i)
            {
                source[i] = new int[Items];
                for (int j = 0; j < Items; ++j)
                {
                    source[i][j] = (i + 2) * (j + 1);
                }
            }
            Path[] src = new Path[Sources];
            SequenceFile.Writer[] @out = CreateWriters(@base, conf, Sources, src);
            IntWritable           k    = new IntWritable();

            for (int i_1 = 0; i_1 < Sources; ++i_1)
            {
                IntWritable v = new IntWritable();
                v.Set(i_1);
                for (int j = 0; j < Items; ++j)
                {
                    k.Set(source[i_1][j]);
                    @out[i_1].Append(k, v);
                }
                @out[i_1].Close();
            }
            @out = null;
            StringBuilder sb = new StringBuilder();

            sb.Append("outer(inner(");
            for (int i_2 = 0; i_2 < Sources; ++i_2)
            {
                sb.Append(CompositeInputFormat.Compose(typeof(SequenceFileInputFormat), src[i_2].
                                                       ToString()));
                if (i_2 + 1 != Sources)
                {
                    sb.Append(",");
                }
            }
            sb.Append("),outer(");
            sb.Append(CompositeInputFormat.Compose(typeof(MapReduceTestUtil.Fake_IF), "foobar"
                                                   ));
            sb.Append(",");
            for (int i_3 = 0; i_3 < Sources; ++i_3)
            {
                sb.Append(CompositeInputFormat.Compose(typeof(SequenceFileInputFormat), src[i_3].
                                                       ToString()));
                sb.Append(",");
            }
            sb.Append(CompositeInputFormat.Compose(typeof(MapReduceTestUtil.Fake_IF), "raboof"
                                                   ) + "))");
            conf.Set(CompositeInputFormat.JoinExpr, sb.ToString());
            MapReduceTestUtil.Fake_IF.SetKeyClass(conf, typeof(IntWritable));
            MapReduceTestUtil.Fake_IF.SetValClass(conf, typeof(IntWritable));
            Job  job  = Job.GetInstance(conf);
            Path outf = new Path(@base, "out");

            FileOutputFormat.SetOutputPath(job, outf);
            job.SetInputFormatClass(typeof(CompositeInputFormat));
            job.SetMapperClass(typeof(Mapper));
            job.SetReducerClass(typeof(Reducer));
            job.SetNumReduceTasks(0);
            job.SetOutputKeyClass(typeof(IntWritable));
            job.SetOutputValueClass(typeof(TupleWritable));
            job.SetOutputFormatClass(typeof(SequenceFileOutputFormat));
            job.WaitForCompletion(true);
            NUnit.Framework.Assert.IsTrue("Job failed", job.IsSuccessful());
            FileStatus[] outlist = cluster.GetFileSystem().ListStatus(outf, new Utils.OutputFileUtils.OutputFilesFilter
                                                                          ());
            NUnit.Framework.Assert.AreEqual(1, outlist.Length);
            NUnit.Framework.Assert.IsTrue(0 < outlist[0].GetLen());
            SequenceFile.Reader r = new SequenceFile.Reader(cluster.GetFileSystem(), outlist[
                                                                0].GetPath(), conf);
            TupleWritable v_1 = new TupleWritable();

            while (r.Next(k, v_1))
            {
                NUnit.Framework.Assert.IsFalse(((TupleWritable)v_1.Get(1)).Has(0));
                NUnit.Framework.Assert.IsFalse(((TupleWritable)v_1.Get(1)).Has(Sources + 1));
                bool chk = true;
                int  ki  = k.Get();
                for (int i_4 = 2; i_4 < Sources + 2; ++i_4)
                {
                    if ((ki % i_4) == 0 && ki <= i_4 * Items)
                    {
                        NUnit.Framework.Assert.AreEqual(i_4 - 2, ((IntWritable)((TupleWritable)v_1.Get(1)
                                                                                ).Get((i_4 - 1))).Get());
                    }
                    else
                    {
                        chk = false;
                    }
                }
                if (chk)
                {
                    // present in all sources; chk inner
                    NUnit.Framework.Assert.IsTrue(v_1.Has(0));
                    for (int i_5 = 0; i_5 < Sources; ++i_5)
                    {
                        NUnit.Framework.Assert.IsTrue(((TupleWritable)v_1.Get(0)).Has(i_5));
                    }
                }
                else
                {
                    // should not be present in inner join
                    NUnit.Framework.Assert.IsFalse(v_1.Has(0));
                }
            }
            r.Close();
            @base.GetFileSystem(conf).Delete(@base, true);
        }