/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); NUnit.Framework.Assert.AreEqual(prop, conf.Get("a")); WriteFlag(conf, "map.setup." + name); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(object key, Text value, Mapper.Context context) { // Make one mapper slower for speculative execution TaskAttemptID taid = context.GetTaskAttemptID(); long sleepTime = 100; Configuration conf = context.GetConfiguration(); bool test_speculate_map = conf.GetBoolean(MRJobConfig.MapSpeculative, false); // IF TESTING MAPPER SPECULATIVE EXECUTION: // Make the "*_m_000000_0" attempt take much longer than the others. // When speculative execution is enabled, this should cause the attempt // to be killed and restarted. At that point, the attempt ID will be // "*_m_000000_1", so sleepTime will still remain 100ms. if ((taid.GetTaskType() == TaskType.Map) && test_speculate_map && (taid.GetTaskID ().GetId() == 0) && (taid.GetId() == 0)) { sleepTime = 10000; } try { Sharpen.Thread.Sleep(sleepTime); } catch (Exception) { } // Ignore context.Write(value, new IntWritable(1)); }
protected internal override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); pattern = Sharpen.Pattern.Compile(conf.Get(Pattern)); group = conf.GetInt(Group, 0); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected internal override void Setup(Mapper.Context context) { // Find the Mapper from the TaggedInputSplit. TaggedInputSplit inputSplit = (TaggedInputSplit)context.GetInputSplit(); mapper = (Mapper <K1, V1, K2, V2>)ReflectionUtils.NewInstance(inputSplit.GetMapperClass (), context.GetConfiguration()); }
/// <summary>Save the configuration value that we need to write the data.</summary> protected override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); numBytesToWrite = conf.GetLong(BytesPerMap, 1 * 1024 * 1024 * 1024); minWordsInKey = conf.GetInt(MinKey, 5); wordsInKeyRange = (conf.GetInt(MaxKey, 10) - minWordsInKey); minWordsInValue = conf.GetInt(MinValue, 10); wordsInValueRange = (conf.GetInt(MaxValue, 100) - minWordsInValue); }
/// <summary> /// Save the values out of the configuaration that we need to write /// the data. /// </summary> protected override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); numBytesToWrite = conf.GetLong(BytesPerMap, 1 * 1024 * 1024 * 1024); minKeySize = conf.GetInt(MinKey, 10); keySizeRange = conf.GetInt(MaxKey, 1000) - minKeySize; minValueSize = conf.GetInt(MinValue, 0); valueSizeRange = conf.GetInt(MaxValue, 20000) - minValueSize; }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Setup(Mapper.Context context) { base.Setup(context); Configuration conf = context.GetConfiguration(); string ioSortMb = conf.Get(MRJobConfig.IoSortMb); if (!TestIoSortMb.Equals(ioSortMb)) { throw new IOException("io.sort.mb expected: " + TestIoSortMb + ", actual: " + ioSortMb ); } }
protected override void Setup(Mapper.Context context) { this.context = context; Configuration conf = context.GetConfiguration(); depth = conf.GetInt(Pentomino.Depth, PentDepth); width = conf.GetInt(Pentomino.Width, PentWidth); height = conf.GetInt(Pentomino.Height, PentHeight); pent = (Pentomino)ReflectionUtils.NewInstance(conf.GetClass(Pentomino.Class, typeof( OneSidedPentomino)), conf); pent.Initialize(width, height); pent.SetPrinter(new DistributedPentomino.PentMap.SolutionCatcher(this)); }
/// <summary>Partitions sigma into parts</summary> /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(NullWritable nw, SummationWritable sigma, Mapper.Context context) { Configuration conf = context.GetConfiguration(); int nParts = conf.GetInt(NParts, 0); Summation[] parts = sigma.GetElement().Partition(nParts); for (int i = 0; i < parts.Length; ++i) { context.Write(new IntWritable(i), new SummationWritable(parts[i])); Log.Info("parts[" + i + "] = " + parts[i]); } }
/// <exception cref="System.IO.IOException"/> protected override void Map(LongWritable key, Text value, Mapper.Context context) { StringBuilder sb = new StringBuilder(512); for (int i = 0; i < 1000; i++) { sb.Append("a"); } context.SetStatus(sb.ToString()); int progressStatusLength = context.GetConfiguration().GetInt(MRConfig.ProgressStatusLenLimitKey , MRConfig.ProgressStatusLenLimitDefault); if (context.GetStatus().Length > progressStatusLength) { throw new IOException("Status is not truncated"); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected internal override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); this.fieldSeparator = conf.Get(FieldSelectionHelper.DataFieldSeperator, "\t"); this.mapOutputKeyValueSpec = conf.Get(FieldSelectionHelper.MapOutputKeyValueSpec, "0-:"); try { this.ignoreInputKey = typeof(TextInputFormat).GetCanonicalName().Equals(context.GetInputFormatClass ().GetCanonicalName()); } catch (TypeLoadException e) { throw new IOException("Input format class not found", e); } allMapValueFieldsFrom = FieldSelectionHelper.ParseOutputKeyValueSpec(mapOutputKeyValueSpec , mapOutputKeyFieldList, mapOutputValueFieldList); Log.Info(FieldSelectionHelper.SpecToString(fieldSeparator, mapOutputKeyValueSpec, allMapValueFieldsFrom, mapOutputKeyFieldList, mapOutputValueFieldList) + "\nignoreInputKey:" + ignoreInputKey); }
protected override void Setup(Mapper.Context context) { ioEx = context.GetConfiguration().GetBoolean("multithreaded.ioException", false); rtEx = context.GetConfiguration().GetBoolean("multithreaded.runtimeException", false ); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> internal MapRunner(MultithreadedMapper <K1, V1, K2, V2> _enclosing, Mapper.Context context) { this._enclosing = _enclosing; reader = new MultithreadedMapper.SubMapRecordReader(this); this.mapper = ReflectionUtils.NewInstance(this._enclosing.mapClass, context.GetConfiguration ()); MapContext <K1, V1, K2, V2> mapContext = new MapContextImpl <K1, V1, K2, V2>(this._enclosing .outer.GetConfiguration(), this._enclosing.outer.GetTaskAttemptID(), this.reader , new MultithreadedMapper.SubMapRecordWriter(this), context.GetOutputCommitter() , new MultithreadedMapper.SubMapStatusReporter(this), this._enclosing.outer.GetInputSplit ()); this.subcontext = new WrappedMapper <K1, V1, K2, V2>().GetMapContext(mapContext); this.reader.Initialize(context.GetInputSplit(), context); }
protected internal override void Setup(Mapper.Context context) { chain = new Org.Apache.Hadoop.Mapreduce.Lib.Chain.Chain(true); chain.Setup(context.GetConfiguration()); }
// my sleep class /// <summary>attempts to access tokenCache as from client</summary> /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(IntWritable key, IntWritable value, Mapper.Context context ) { // get context token storage: Credentials contextCredentials = context.GetCredentials(); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > contextTokenCollection = contextCredentials.GetAllTokens(); foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> t in contextTokenCollection) { System.Console.Out.WriteLine("Context token: [" + t + "]"); } if (contextTokenCollection.Count != 2) { // one job token and one delegation token // fail the test: throw new RuntimeException("Exactly 2 tokens are expected in the contextTokenCollection: " + "one job token and one delegation token, but was found " + contextTokenCollection .Count + " tokens."); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> dt = contextCredentials.GetToken (new Text(DelegationTokenKey)); if (dt == null) { throw new RuntimeException("Token for key [" + DelegationTokenKey + "] not found in the job context." ); } string tokenFile0 = context.GetConfiguration().Get(MRJobConfig.MapreduceJobCredentialsBinary ); if (tokenFile0 != null) { throw new RuntimeException("Token file key [" + MRJobConfig.MapreduceJobCredentialsBinary + "] found in the configuration. It should have been removed from the configuration." ); } string tokenFile = context.GetConfiguration().Get(KeySecurityTokenFileName); if (tokenFile == null) { throw new RuntimeException("Token file key [" + KeySecurityTokenFileName + "] not found in the job configuration." ); } Credentials binaryCredentials = new Credentials(); binaryCredentials.ReadTokenStorageStream(new DataInputStream(new FileInputStream( tokenFile))); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > binaryTokenCollection = binaryCredentials.GetAllTokens(); if (binaryTokenCollection.Count != 1) { throw new RuntimeException("The token collection read from file [" + tokenFile + "] must have size = 1."); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> binTok = binaryTokenCollection .GetEnumerator().Next(); System.Console.Out.WriteLine("The token read from binary file: t = [" + binTok + "]"); // Verify that dt is same as the token in the file: if (!dt.Equals(binTok)) { throw new RuntimeException("Delegation token in job is not same as the token passed in file:" + " tokenInFile=[" + binTok + "], dt=[" + dt + "]."); } // Now test the user tokens. UserGroupInformation ugi = UserGroupInformation.GetCurrentUser(); // Print all the UGI tokens for diagnostic purposes: ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > ugiTokenCollection = ugi.GetTokens(); foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> t_1 in ugiTokenCollection) { System.Console.Out.WriteLine("UGI token: [" + t_1 + "]"); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> ugiToken = ugi.GetCredentials ().GetToken(new Text(DelegationTokenKey)); if (ugiToken == null) { throw new RuntimeException("Token for key [" + DelegationTokenKey + "] not found among the UGI tokens." ); } if (!ugiToken.Equals(binTok)) { throw new RuntimeException("UGI token is not same as the token passed in binary file:" + " tokenInBinFile=[" + binTok + "], ugiTok=[" + ugiToken + "]."); } base.Map(key, value, context); }
protected override void Setup(Mapper.Context context) { srcs = context.GetConfiguration().GetInt("testdatamerge.sources", 0); NUnit.Framework.Assert.IsTrue("Invalid src count: " + srcs, srcs > 0); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(LongWritable key, Text value, Mapper.Context context) { WriteFlag(context.GetConfiguration(), "map." + name + ".value." + value); context.Write(key, new Text(value + name)); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected internal override void Setup(Mapper.Context context) { ValueAggregatorJobBase.Setup(context.GetConfiguration()); }
/// <exception cref="System.IO.IOException"/> protected override void Setup(Mapper.Context context) { Configuration conf = context.GetConfiguration(); Path[] localFiles = context.GetLocalCacheFiles(); URI[] files = context.GetCacheFiles(); Path[] localArchives = context.GetLocalCacheArchives(); URI[] archives = context.GetCacheArchives(); // Check that 4 (2 + appjar + DistrubutedCacheChecker jar) files // and 2 archives are present NUnit.Framework.Assert.AreEqual(4, localFiles.Length); NUnit.Framework.Assert.AreEqual(4, files.Length); NUnit.Framework.Assert.AreEqual(2, localArchives.Length); NUnit.Framework.Assert.AreEqual(2, archives.Length); // Check lengths of the files IDictionary <string, Path> filesMap = PathsToMap(localFiles); NUnit.Framework.Assert.IsTrue(filesMap.Contains("distributed.first.symlink")); NUnit.Framework.Assert.AreEqual(1, localFs.GetFileStatus(filesMap["distributed.first.symlink" ]).GetLen()); NUnit.Framework.Assert.IsTrue(filesMap.Contains("distributed.second.jar")); NUnit.Framework.Assert.IsTrue(localFs.GetFileStatus(filesMap["distributed.second.jar" ]).GetLen() > 1); // Check extraction of the archive IDictionary <string, Path> archivesMap = PathsToMap(localArchives); NUnit.Framework.Assert.IsTrue(archivesMap.Contains("distributed.third.jar")); NUnit.Framework.Assert.IsTrue(localFs.Exists(new Path(archivesMap["distributed.third.jar" ], "distributed.jar.inside3"))); NUnit.Framework.Assert.IsTrue(archivesMap.Contains("distributed.fourth.jar")); NUnit.Framework.Assert.IsTrue(localFs.Exists(new Path(archivesMap["distributed.fourth.jar" ], "distributed.jar.inside4"))); // Check the class loaders Log.Info("Java Classpath: " + Runtime.GetProperty("java.class.path")); ClassLoader cl = Sharpen.Thread.CurrentThread().GetContextClassLoader(); // Both the file and the archive should have been added to classpath, so // both should be reachable via the class loader. NUnit.Framework.Assert.IsNotNull(cl.GetResource("distributed.jar.inside2")); NUnit.Framework.Assert.IsNotNull(cl.GetResource("distributed.jar.inside3")); NUnit.Framework.Assert.IsNotNull(cl.GetResource("distributed.jar.inside4")); // The Job Jar should have been extracted to a folder named "job.jar" and // added to the classpath; the two jar files in the lib folder in the Job // Jar should have also been added to the classpath NUnit.Framework.Assert.IsNotNull(cl.GetResource("job.jar/")); NUnit.Framework.Assert.IsNotNull(cl.GetResource("job.jar/lib/lib1.jar")); NUnit.Framework.Assert.IsNotNull(cl.GetResource("job.jar/lib/lib2.jar")); // Check that the symlink for the renaming was created in the cwd; FilePath symlinkFile = new FilePath("distributed.first.symlink"); NUnit.Framework.Assert.IsTrue(symlinkFile.Exists()); NUnit.Framework.Assert.AreEqual(1, symlinkFile.Length()); // Check that the symlink for the Job Jar was created in the cwd and // points to the extracted directory FilePath jobJarDir = new FilePath("job.jar"); if (Shell.Windows) { NUnit.Framework.Assert.IsTrue(IsWindowsSymlinkedDirectory(jobJarDir)); } else { NUnit.Framework.Assert.IsTrue(FileUtils.IsSymlink(jobJarDir)); NUnit.Framework.Assert.IsTrue(jobJarDir.IsDirectory()); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Cleanup(Mapper.Context context) { WriteFlag(context.GetConfiguration(), "map.cleanup." + name); }