public _MockTaskImpl_715(JobId baseArg1, int baseArg2, EventHandler baseArg3, Path baseArg4, JobConf baseArg5, TaskAttemptListener baseArg6, Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> baseArg7, Credentials baseArg8, Clock baseArg9, int baseArg10 , MRAppMetrics baseArg11, AppContext baseArg12, TaskType baseArg13) : base(baseArg1, baseArg2, baseArg3, baseArg4, baseArg5, baseArg6, baseArg7, baseArg8 , baseArg9, baseArg10, baseArg11, baseArg12, baseArg13) { }
public ReduceTaskImpl(JobId jobId, int partition, EventHandler eventHandler, Path jobFile, JobConf conf, int numMapTasks, TaskAttemptListener taskAttemptListener , Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken, Credentials credentials, Clock clock, int appAttemptId, MRAppMetrics metrics, AppContext appContext ) : base(jobId, TaskType.Reduce, partition, eventHandler, jobFile, conf, taskAttemptListener , jobToken, credentials, clock, appAttemptId, metrics, appContext) { this.numMapTasks = numMapTasks; }
public MapTaskAttemptImpl(TaskId taskId, int attempt, EventHandler eventHandler, Path jobFile, int partition, JobSplit.TaskSplitMetaInfo splitInfo, JobConf conf, TaskAttemptListener taskAttemptListener, Org.Apache.Hadoop.Security.Token.Token < JobTokenIdentifier> jobToken, Credentials credentials, Clock clock, AppContext appContext ) : base(taskId, attempt, eventHandler, taskAttemptListener, jobFile, partition, conf , splitInfo.GetLocations(), jobToken, credentials, clock, appContext) { this.splitInfo = splitInfo; }
public MockTaskAttemptImpl(TestTaskImpl _enclosing, TaskId taskId, int id, EventHandler eventHandler, TaskAttemptListener taskAttemptListener, Path jobFile, int partition , JobConf conf, Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken , Credentials credentials, Clock clock, AppContext appContext, TaskType taskType ) : base(taskId, id, eventHandler, taskAttemptListener, jobFile, partition, conf, this ._enclosing.dataLocations, jobToken, credentials, clock, appContext) { this._enclosing = _enclosing; this.taskType = taskType; }
public MockTaskImpl(TestTaskImpl _enclosing, JobId jobId, int partition, EventHandler eventHandler, Path remoteJobConfFile, JobConf conf, TaskAttemptListener taskAttemptListener , Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken, Credentials credentials, Clock clock, int startCount, MRAppMetrics metrics, AppContext appContext , TaskType taskType) : base(jobId, taskType, partition, eventHandler, remoteJobConfFile, conf, taskAttemptListener , jobToken, credentials, clock, startCount, metrics, appContext) { this._enclosing = _enclosing; this.taskType = taskType; }
public virtual void TestShuffleProviders() { ApplicationId appId = ApplicationId.NewInstance(1, 1); JobId jobId = MRBuilderUtils.NewJobId(appId, 1); TaskId taskId = MRBuilderUtils.NewTaskId(jobId, 1, TaskType.Map); Path jobFile = Org.Mockito.Mockito.Mock <Path>(); EventHandler eventHandler = Org.Mockito.Mockito.Mock <EventHandler>(); TaskAttemptListener taListener = Org.Mockito.Mockito.Mock <TaskAttemptListener>(); Org.Mockito.Mockito.When(taListener.GetAddress()).ThenReturn(new IPEndPoint("localhost" , 0)); JobConf jobConf = new JobConf(); jobConf.SetClass("fs.file.impl", typeof(TestShuffleProvider.StubbedFS), typeof(FileSystem )); jobConf.SetBoolean("fs.file.impl.disable.cache", true); jobConf.Set(JobConf.MapredMapTaskEnv, string.Empty); jobConf.Set(YarnConfiguration.NmAuxServices, TestShuffleProvider.TestShuffleHandler1 .MapreduceTestShuffleServiceid + "," + TestShuffleProvider.TestShuffleHandler2.MapreduceTestShuffleServiceid ); string serviceName = TestShuffleProvider.TestShuffleHandler1.MapreduceTestShuffleServiceid; string serviceStr = string.Format(YarnConfiguration.NmAuxServiceFmt, serviceName); jobConf.Set(serviceStr, typeof(TestShuffleProvider.TestShuffleHandler1).FullName); serviceName = TestShuffleProvider.TestShuffleHandler2.MapreduceTestShuffleServiceid; serviceStr = string.Format(YarnConfiguration.NmAuxServiceFmt, serviceName); jobConf.Set(serviceStr, typeof(TestShuffleProvider.TestShuffleHandler2).FullName); jobConf.Set(MRJobConfig.MapreduceJobShuffleProviderServices, TestShuffleProvider.TestShuffleHandler1 .MapreduceTestShuffleServiceid + "," + TestShuffleProvider.TestShuffleHandler2.MapreduceTestShuffleServiceid ); Credentials credentials = new Credentials(); Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken = new Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier>(Sharpen.Runtime.GetBytesForString(("tokenid")), Sharpen.Runtime.GetBytesForString (("tokenpw")), new Text("tokenkind"), new Text("tokenservice")); TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1, Org.Mockito.Mockito.Mock <JobSplit.TaskSplitMetaInfo>(), jobConf, taListener, jobToken, credentials, new SystemClock(), null); jobConf.Set(MRJobConfig.ApplicationAttemptId, taImpl.GetID().ToString()); ContainerLaunchContext launchCtx = TaskAttemptImpl.CreateContainerLaunchContext(null , jobConf, jobToken, taImpl.CreateRemoteTask(), TypeConverter.FromYarn(jobId), Org.Mockito.Mockito.Mock <WrappedJvmID>(), taListener, credentials); IDictionary <string, ByteBuffer> serviceDataMap = launchCtx.GetServiceData(); NUnit.Framework.Assert.IsNotNull("TestShuffleHandler1 is missing", serviceDataMap [TestShuffleProvider.TestShuffleHandler1.MapreduceTestShuffleServiceid]); NUnit.Framework.Assert.IsNotNull("TestShuffleHandler2 is missing", serviceDataMap [TestShuffleProvider.TestShuffleHandler2.MapreduceTestShuffleServiceid]); NUnit.Framework.Assert.IsTrue("mismatch number of services in map", serviceDataMap .Count == 3); }
public TestJob(MRApp _enclosing, JobId jobId, ApplicationAttemptId applicationAttemptId , Configuration conf, EventHandler eventHandler, TaskAttemptListener taskAttemptListener , Clock clock, OutputCommitter committer, bool newApiCommitter, string user, AppContext appContext, JobStateInternal forcedState, string diagnostic) : base(jobId, MRApp.GetApplicationAttemptId(MRApp.applicationId, this._enclosing. GetStartCount()), conf, eventHandler, taskAttemptListener, new JobTokenSecretManager (), new Credentials(), clock, this._enclosing.GetCompletedTaskFromPreviousRun(), this._enclosing.metrics, committer, newApiCommitter, user, Runtime.CurrentTimeMillis (), this._enclosing.GetAllAMInfos(), appContext, forcedState, diagnostic) { this._enclosing = _enclosing; // This "this leak" is okay because the retained pointer is in an // instance variable. this.localStateMachine = this.localFactory.Make(this); }
public virtual void Setup() { dispatcher = new InlineDispatcher(); ++startCount; conf = new JobConf(); taskAttemptListener = Org.Mockito.Mockito.Mock <TaskAttemptListener>(); jobToken = (Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier>)Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Security.Token.Token>(); remoteJobConfFile = Org.Mockito.Mockito.Mock <Path>(); credentials = null; clock = new SystemClock(); metrics = Org.Mockito.Mockito.Mock <MRAppMetrics>(); dataLocations = new string[1]; appId = ApplicationId.NewInstance(Runtime.CurrentTimeMillis(), 1); jobId = Org.Apache.Hadoop.Yarn.Util.Records.NewRecord <JobId>(); jobId.SetId(1); jobId.SetAppId(appId); appContext = Org.Mockito.Mockito.Mock <AppContext>(); taskSplitMetaInfo = Org.Mockito.Mockito.Mock <JobSplit.TaskSplitMetaInfo>(); Org.Mockito.Mockito.When(taskSplitMetaInfo.GetLocations()).ThenReturn(dataLocations ); taskAttempts = new AList <TestTaskImpl.MockTaskAttemptImpl>(); }
public ReduceTaskAttemptImpl(TaskId id, int attempt, EventHandler eventHandler, Path jobFile, int partition, int numMapTasks, JobConf conf, TaskAttemptListener taskAttemptListener , Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken, Credentials credentials, Clock clock, AppContext appContext) : base(id, attempt, eventHandler, taskAttemptListener, jobFile, partition, conf, new string[] { }, jobToken, credentials, clock, appContext) { this.numMapTasks = numMapTasks; }
public MapTaskImpl(JobId jobId, int partition, EventHandler eventHandler, Path remoteJobConfFile , JobConf conf, JobSplit.TaskSplitMetaInfo taskSplitMetaInfo, TaskAttemptListener taskAttemptListener, Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken, Credentials credentials, Clock clock, int appAttemptId, MRAppMetrics metrics, AppContext appContext) : base(jobId, TaskType.Map, partition, eventHandler, remoteJobConfFile, conf, taskAttemptListener , jobToken, credentials, clock, appAttemptId, metrics, appContext) { this.taskSplitMetaInfo = taskSplitMetaInfo; }
public virtual void TestAttemptContainerRequest() { Text SecretKeyAlias = new Text("secretkeyalias"); byte[] SecretKey = Sharpen.Runtime.GetBytesForString(("secretkey")); IDictionary <ApplicationAccessType, string> acls = new Dictionary <ApplicationAccessType , string>(1); acls[ApplicationAccessType.ViewApp] = "otheruser"; ApplicationId appId = ApplicationId.NewInstance(1, 1); JobId jobId = MRBuilderUtils.NewJobId(appId, 1); TaskId taskId = MRBuilderUtils.NewTaskId(jobId, 1, TaskType.Map); Path jobFile = Org.Mockito.Mockito.Mock <Path>(); EventHandler eventHandler = Org.Mockito.Mockito.Mock <EventHandler>(); TaskAttemptListener taListener = Org.Mockito.Mockito.Mock <TaskAttemptListener>(); Org.Mockito.Mockito.When(taListener.GetAddress()).ThenReturn(new IPEndPoint("localhost" , 0)); JobConf jobConf = new JobConf(); jobConf.SetClass("fs.file.impl", typeof(TestTaskAttemptContainerRequest.StubbedFS ), typeof(FileSystem)); jobConf.SetBoolean("fs.file.impl.disable.cache", true); jobConf.Set(JobConf.MapredMapTaskEnv, string.Empty); // setup UGI for security so tokens and keys are preserved jobConf.Set(CommonConfigurationKeysPublic.HadoopSecurityAuthentication, "kerberos" ); UserGroupInformation.SetConfiguration(jobConf); Credentials credentials = new Credentials(); credentials.AddSecretKey(SecretKeyAlias, SecretKey); Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier> jobToken = new Org.Apache.Hadoop.Security.Token.Token <JobTokenIdentifier>(Sharpen.Runtime.GetBytesForString(("tokenid")), Sharpen.Runtime.GetBytesForString (("tokenpw")), new Text("tokenkind"), new Text("tokenservice")); TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1, Org.Mockito.Mockito.Mock <JobSplit.TaskSplitMetaInfo>(), jobConf, taListener, jobToken, credentials, new SystemClock(), null); jobConf.Set(MRJobConfig.ApplicationAttemptId, taImpl.GetID().ToString()); ContainerLaunchContext launchCtx = TaskAttemptImpl.CreateContainerLaunchContext(acls , jobConf, jobToken, taImpl.CreateRemoteTask(), TypeConverter.FromYarn(jobId), Org.Mockito.Mockito.Mock <WrappedJvmID>(), taListener, credentials); NUnit.Framework.Assert.AreEqual("ACLs mismatch", acls, launchCtx.GetApplicationACLs ()); Credentials launchCredentials = new Credentials(); DataInputByteBuffer dibb = new DataInputByteBuffer(); dibb.Reset(launchCtx.GetTokens()); launchCredentials.ReadTokenStorageStream(dibb); // verify all tokens specified for the task attempt are in the launch context foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> token in credentials .GetAllTokens()) { Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> launchToken = launchCredentials .GetToken(token.GetService()); NUnit.Framework.Assert.IsNotNull("Token " + token.GetService() + " is missing", launchToken ); NUnit.Framework.Assert.AreEqual("Token " + token.GetService() + " mismatch", token , launchToken); } // verify the secret key is in the launch context NUnit.Framework.Assert.IsNotNull("Secret key missing", launchCredentials.GetSecretKey (SecretKeyAlias)); NUnit.Framework.Assert.IsTrue("Secret key mismatch", Arrays.Equals(SecretKey, launchCredentials .GetSecretKey(SecretKeyAlias))); }