/// <exception cref="System.Exception"/> public virtual void TestEmptyJoin() { JobConf job = new JobConf(); Path @base = cluster.GetFileSystem().MakeQualified(new Path("/empty")); Path[] src = new Path[] { new Path(@base, "i0"), new Path("i1"), new Path("i2") }; job.Set("mapreduce.join.expr", CompositeInputFormat.Compose("outer", typeof(TestDatamerge.Fake_IF ), src)); job.SetInputFormat(typeof(CompositeInputFormat)); FileOutputFormat.SetOutputPath(job, new Path(@base, "out")); job.SetMapperClass(typeof(IdentityMapper)); job.SetReducerClass(typeof(IdentityReducer)); job.SetOutputKeyClass(typeof(IncomparableKey)); job.SetOutputValueClass(typeof(NullWritable)); JobClient.RunJob(job); @base.GetFileSystem(job).Delete(@base, true); }
/// <exception cref="System.Exception"/> private static void JoinAs(string jointype, Type c) { int srcs = 4; Configuration conf = new Configuration(); JobConf job = new JobConf(conf, c); Path @base = cluster.GetFileSystem().MakeQualified(new Path("/" + jointype)); Path[] src = WriteSimpleSrc(@base, conf, srcs); job.Set("mapreduce.join.expr", CompositeInputFormat.Compose(jointype, typeof(SequenceFileInputFormat ), src)); job.SetInt("testdatamerge.sources", srcs); job.SetInputFormat(typeof(CompositeInputFormat)); FileOutputFormat.SetOutputPath(job, new Path(@base, "out")); job.SetMapperClass(c); job.SetReducerClass(c); job.SetOutputKeyClass(typeof(IntWritable)); job.SetOutputValueClass(typeof(IntWritable)); JobClient.RunJob(job); @base.GetFileSystem(job).Delete(@base, true); }
/// <summary> /// Tests the class loader set by /// <see cref="Org.Apache.Hadoop.Conf.Configuration.SetClassLoader(Sharpen.ClassLoader) /// "/> /// is inherited by any /// <see cref="WrappedRecordReader{K, U}"/> /// s created by /// <see cref="CompositeRecordReader{K, V, X}"/> /// </summary> /// <exception cref="System.Exception"/> public virtual void TestClassLoader() { JobConf job = new JobConf(); TestWrappedRecordReaderClassloader.Fake_ClassLoader classLoader = new TestWrappedRecordReaderClassloader.Fake_ClassLoader (); job.SetClassLoader(classLoader); NUnit.Framework.Assert.IsTrue(job.GetClassLoader() is TestWrappedRecordReaderClassloader.Fake_ClassLoader ); FileSystem fs = FileSystem.Get(job); Path testdir = new Path(Runtime.GetProperty("test.build.data", "/tmp")).MakeQualified (fs); Path @base = new Path(testdir, "/empty"); Path[] src = new Path[] { new Path(@base, "i0"), new Path("i1"), new Path("i2") }; job.Set("mapreduce.join.expr", CompositeInputFormat.Compose("outer", typeof(TestWrappedRecordReaderClassloader.IF_ClassLoaderChecker ), src)); CompositeInputFormat <NullWritable> inputFormat = new CompositeInputFormat <NullWritable >(); inputFormat.GetRecordReader(inputFormat.GetSplits(job, 1)[0], job, Reporter.Null); }
/// <exception cref="System.Exception"/> public virtual void TestNestedJoin() { // outer(inner(S1,...,Sn),outer(S1,...Sn)) int Sources = 3; int Items = (Sources + 1) * (Sources + 1); JobConf job = new JobConf(); Path @base = cluster.GetFileSystem().MakeQualified(new Path("/nested")); int[][] source = new int[Sources][]; for (int i = 0; i < Sources; ++i) { source[i] = new int[Items]; for (int j = 0; j < Items; ++j) { source[i][j] = (i + 2) * (j + 1); } } Path[] src = new Path[Sources]; SequenceFile.Writer[] @out = CreateWriters(@base, job, Sources, src); IntWritable k = new IntWritable(); for (int i_1 = 0; i_1 < Sources; ++i_1) { IntWritable v = new IntWritable(); v.Set(i_1); for (int j = 0; j < Items; ++j) { k.Set(source[i_1][j]); @out[i_1].Append(k, v); } @out[i_1].Close(); } @out = null; StringBuilder sb = new StringBuilder(); sb.Append("outer(inner("); for (int i_2 = 0; i_2 < Sources; ++i_2) { sb.Append(CompositeInputFormat.Compose(typeof(SequenceFileInputFormat), src[i_2]. ToString())); if (i_2 + 1 != Sources) { sb.Append(","); } } sb.Append("),outer("); sb.Append(CompositeInputFormat.Compose(typeof(TestDatamerge.Fake_IF), "foobar")); sb.Append(","); for (int i_3 = 0; i_3 < Sources; ++i_3) { sb.Append(CompositeInputFormat.Compose(typeof(SequenceFileInputFormat), src[i_3]. ToString())); sb.Append(","); } sb.Append(CompositeInputFormat.Compose(typeof(TestDatamerge.Fake_IF), "raboof") + "))"); job.Set("mapreduce.join.expr", sb.ToString()); job.SetInputFormat(typeof(CompositeInputFormat)); Path outf = new Path(@base, "out"); FileOutputFormat.SetOutputPath(job, outf); TestDatamerge.Fake_IF.SetKeyClass(job, typeof(IntWritable)); TestDatamerge.Fake_IF.SetValClass(job, typeof(IntWritable)); job.SetMapperClass(typeof(IdentityMapper)); job.SetReducerClass(typeof(IdentityReducer)); job.SetNumReduceTasks(0); job.SetOutputKeyClass(typeof(IntWritable)); job.SetOutputValueClass(typeof(TupleWritable)); job.SetOutputFormat(typeof(SequenceFileOutputFormat)); JobClient.RunJob(job); FileStatus[] outlist = cluster.GetFileSystem().ListStatus(outf, new Utils.OutputFileUtils.OutputFilesFilter ()); NUnit.Framework.Assert.AreEqual(1, outlist.Length); NUnit.Framework.Assert.IsTrue(0 < outlist[0].GetLen()); SequenceFile.Reader r = new SequenceFile.Reader(cluster.GetFileSystem(), outlist[ 0].GetPath(), job); TupleWritable v_1 = new TupleWritable(); while (r.Next(k, v_1)) { NUnit.Framework.Assert.IsFalse(((TupleWritable)v_1.Get(1)).Has(0)); NUnit.Framework.Assert.IsFalse(((TupleWritable)v_1.Get(1)).Has(Sources + 1)); bool chk = true; int ki = k.Get(); for (int i_4 = 2; i_4 < Sources + 2; ++i_4) { if ((ki % i_4) == 0 && ki <= i_4 * Items) { NUnit.Framework.Assert.AreEqual(i_4 - 2, ((IntWritable)((TupleWritable)v_1.Get(1) ).Get((i_4 - 1))).Get()); } else { chk = false; } } if (chk) { // present in all sources; chk inner NUnit.Framework.Assert.IsTrue(v_1.Has(0)); for (int i_5 = 0; i_5 < Sources; ++i_5) { NUnit.Framework.Assert.IsTrue(((TupleWritable)v_1.Get(0)).Has(i_5)); } } else { // should not be present in inner join NUnit.Framework.Assert.IsFalse(v_1.Has(0)); } } r.Close(); @base.GetFileSystem(job).Delete(@base, true); }