//@Test (timeout = 60000) /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> /// <exception cref="System.TypeLoadException"/> public virtual void TestSleepJobWithSecurityOn() { Log.Info("\n\n\nStarting testSleepJobWithSecurityOn()."); if (!(new FilePath(MiniMRYarnCluster.Appjar)).Exists()) { return; } mrCluster.GetConfig().Set(CommonConfigurationKeysPublic.HadoopSecurityAuthentication , "kerberos"); mrCluster.GetConfig().Set(YarnConfiguration.RmKeytab, "/etc/krb5.keytab"); mrCluster.GetConfig().Set(YarnConfiguration.NmKeytab, "/etc/krb5.keytab"); mrCluster.GetConfig().Set(YarnConfiguration.RmPrincipal, "rm/sightbusy-lx@LOCALHOST" ); mrCluster.GetConfig().Set(YarnConfiguration.NmPrincipal, "nm/sightbusy-lx@LOCALHOST" ); UserGroupInformation.SetConfiguration(mrCluster.GetConfig()); // Keep it in here instead of after RM/NM as multiple user logins happen in // the same JVM. UserGroupInformation user = UserGroupInformation.GetCurrentUser(); Log.Info("User name is " + user.GetUserName()); foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> str in user.GetTokens ()) { Log.Info("Token is " + str.EncodeToUrlString()); } user.DoAs(new _PrivilegedExceptionAction_552()); }
/// <summary>Create a redirection URL</summary> /// <exception cref="System.IO.IOException"/> private Uri CreateRedirectURL(UserGroupInformation ugi, DatanodeID host, HttpServletRequest request, NameNode nn) { string hostname = host is DatanodeInfo?host.GetHostName() : host.GetIpAddr(); string scheme = request.GetScheme(); int port = host.GetInfoPort(); if ("https".Equals(scheme)) { int portObject = (int)GetServletContext().GetAttribute(DFSConfigKeys.DfsDatanodeHttpsPortKey ); if (portObject != null) { port = portObject; } } string encodedPath = ServletUtil.GetRawPath(request, "/fileChecksum"); string dtParam = string.Empty; if (UserGroupInformation.IsSecurityEnabled()) { string tokenString = ugi.GetTokens().GetEnumerator().Next().EncodeToUrlString(); dtParam = JspHelper.GetDelegationTokenUrlParam(tokenString); } string addr = nn.GetNameNodeAddressHostPortString(); string addrParam = JspHelper.GetUrlParam(JspHelper.NamenodeAddress, addr); return(new Uri(scheme, hostname, port, "/getFileChecksum" + encodedPath + '?' + "ugi=" + ServletUtil.EncodeQueryValue(ugi.GetShortUserName()) + dtParam + addrParam)); }
/// <exception cref="System.IO.IOException"/> private void VerifyServiceInToken(ServletContext context, HttpServletRequest request , string expected) { UserGroupInformation ugi = JspHelper.GetUGI(context, request, conf); Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> tokenInUgi = ugi.GetTokens ().GetEnumerator().Next(); NUnit.Framework.Assert.AreEqual(expected, tokenInUgi.GetService().ToString()); }
internal Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> SelectDelegationToken (UserGroupInformation ugi) { return(dtSelector.SelectToken(serviceName, ugi.GetTokens())); }
/// <exception cref="System.Exception"/> public virtual void TestHAUtilClonesDelegationTokens() { Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token = GetDelegationToken (fs, "JobTracker"); UserGroupInformation ugi = UserGroupInformation.CreateRemoteUser("test"); URI haUri = new URI("hdfs://my-ha-uri/"); token.SetService(HAUtil.BuildTokenServiceForLogicalUri(haUri, HdfsConstants.HdfsUriScheme )); ugi.AddToken(token); ICollection <IPEndPoint> nnAddrs = new HashSet <IPEndPoint>(); nnAddrs.AddItem(new IPEndPoint("localhost", nn0.GetNameNodeAddress().Port)); nnAddrs.AddItem(new IPEndPoint("localhost", nn1.GetNameNodeAddress().Port)); HAUtil.CloneDelegationTokenForLogicalUri(ugi, haUri, nnAddrs); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > tokens = ugi .GetTokens(); NUnit.Framework.Assert.AreEqual(3, tokens.Count); Log.Info("Tokens:\n" + Joiner.On("\n").Join(tokens)); DelegationTokenSelector dts = new DelegationTokenSelector(); // check that the token selected for one of the physical IPC addresses // matches the one we received foreach (IPEndPoint addr in nnAddrs) { Text ipcDtService = SecurityUtil.BuildTokenService(addr); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token2 = dts.SelectToken (ipcDtService, ugi.GetTokens()); NUnit.Framework.Assert.IsNotNull(token2); Assert.AssertArrayEquals(token.GetIdentifier(), token2.GetIdentifier()); Assert.AssertArrayEquals(token.GetPassword(), token2.GetPassword()); } // switch to host-based tokens, shouldn't match existing tokens SecurityUtilTestHelper.SetTokenServiceUseIp(false); foreach (IPEndPoint addr_1 in nnAddrs) { Text ipcDtService = SecurityUtil.BuildTokenService(addr_1); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token2 = dts.SelectToken (ipcDtService, ugi.GetTokens()); NUnit.Framework.Assert.IsNull(token2); } // reclone the tokens, and see if they match now HAUtil.CloneDelegationTokenForLogicalUri(ugi, haUri, nnAddrs); foreach (IPEndPoint addr_2 in nnAddrs) { Text ipcDtService = SecurityUtil.BuildTokenService(addr_2); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token2 = dts.SelectToken (ipcDtService, ugi.GetTokens()); NUnit.Framework.Assert.IsNotNull(token2); Assert.AssertArrayEquals(token.GetIdentifier(), token2.GetIdentifier()); Assert.AssertArrayEquals(token.GetPassword(), token2.GetPassword()); } }
public virtual void TestAMRMTokenUpdate() { Configuration conf = new Configuration(); ApplicationAttemptId attemptId = ApplicationAttemptId.NewInstance(ApplicationId.NewInstance (1, 1), 1); AMRMTokenIdentifier oldTokenId = new AMRMTokenIdentifier(attemptId, 1); AMRMTokenIdentifier newTokenId = new AMRMTokenIdentifier(attemptId, 2); Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier> oldToken = new Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier>(oldTokenId.GetBytes(), Sharpen.Runtime.GetBytesForString("oldpassword" ), oldTokenId.GetKind(), new Text()); Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier> newToken = new Org.Apache.Hadoop.Security.Token.Token <AMRMTokenIdentifier>(newTokenId.GetBytes(), Sharpen.Runtime.GetBytesForString("newpassword" ), newTokenId.GetKind(), new Text()); TestLocalContainerAllocator.MockScheduler scheduler = new TestLocalContainerAllocator.MockScheduler (); scheduler.amToken = newToken; LocalContainerAllocator lca = new TestLocalContainerAllocator.StubbedLocalContainerAllocator (scheduler); lca.Init(conf); lca.Start(); UserGroupInformation testUgi = UserGroupInformation.CreateUserForTesting("someuser" , new string[0]); testUgi.AddToken(oldToken); testUgi.DoAs(new _PrivilegedExceptionAction_144(lca)); lca.Close(); // verify there is only one AMRM token in the UGI and it matches the // updated token from the RM int tokenCount = 0; Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> ugiToken = null; foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> token in testUgi .GetTokens()) { if (AMRMTokenIdentifier.KindName.Equals(token.GetKind())) { ugiToken = token; ++tokenCount; } } NUnit.Framework.Assert.AreEqual("too many AMRM tokens", 1, tokenCount); Assert.AssertArrayEquals("token identifier not updated", newToken.GetIdentifier() , ugiToken.GetIdentifier()); Assert.AssertArrayEquals("token password not updated", newToken.GetPassword(), ugiToken .GetPassword()); NUnit.Framework.Assert.AreEqual("AMRM token service not updated", new Text(ClientRMProxy .GetAMRMTokenService(conf)), ugiToken.GetService()); }
/// <exception cref="System.Exception"/> public virtual void TestUGITokens <T>() where T : TokenIdentifier { // from Mockito mocks UserGroupInformation ugi = UserGroupInformation.CreateUserForTesting("TheDoctor", new string[] { "TheTARDIS" }); Org.Apache.Hadoop.Security.Token.Token <T> t1 = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Security.Token.Token >(); Org.Mockito.Mockito.When(t1.GetService()).ThenReturn(new Text("t1")); Org.Apache.Hadoop.Security.Token.Token <T> t2 = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Security.Token.Token >(); Org.Mockito.Mockito.When(t2.GetService()).ThenReturn(new Text("t2")); Credentials creds = new Credentials(); byte[] secretKey = new byte[] { }; Text secretName = new Text("shhh"); creds.AddSecretKey(secretName, secretKey); ugi.AddToken(t1); ugi.AddToken(t2); ugi.AddCredentials(creds); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > z = ugi.GetTokens (); Assert.True(z.Contains(t1)); Assert.True(z.Contains(t2)); Assert.Equal(2, z.Count); Credentials ugiCreds = ugi.GetCredentials(); NUnit.Framework.Assert.AreSame(secretKey, ugiCreds.GetSecretKey(secretName)); Assert.Equal(1, ugiCreds.NumberOfSecretKeys()); try { z.Remove(t1); NUnit.Framework.Assert.Fail("Shouldn't be able to modify token collection from UGI" ); } catch (NotSupportedException) { } // Can't modify tokens // ensure that the tokens are passed through doAs ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > otherSet = ugi .DoAs(new _PrivilegedExceptionAction_612()); Assert.True(otherSet.Contains(t1)); Assert.True(otherSet.Contains(t2)); }
private void CheckTokens(UserGroupInformation ugi, params Org.Apache.Hadoop.Security.Token.Token <object>[] tokens) { // check the ugi's token collection ICollection <Org.Apache.Hadoop.Security.Token.Token <object> > ugiTokens = ugi.GetTokens (); foreach (Org.Apache.Hadoop.Security.Token.Token <object> t in tokens) { Assert.True(ugiTokens.Contains(t)); } Assert.Equal(tokens.Length, ugiTokens.Count); // check the ugi's credentials Credentials ugiCreds = ugi.GetCredentials(); foreach (Org.Apache.Hadoop.Security.Token.Token <object> t_1 in tokens) { NUnit.Framework.Assert.AreSame(t_1, ugiCreds.GetToken(t_1.GetService())); } Assert.Equal(tokens.Length, ugiCreds.NumberOfTokens()); }
// my sleep class /// <summary>attempts to access tokenCache as from client</summary> /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> protected override void Map(IntWritable key, IntWritable value, Mapper.Context context ) { // get context token storage: Credentials contextCredentials = context.GetCredentials(); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > contextTokenCollection = contextCredentials.GetAllTokens(); foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> t in contextTokenCollection) { System.Console.Out.WriteLine("Context token: [" + t + "]"); } if (contextTokenCollection.Count != 2) { // one job token and one delegation token // fail the test: throw new RuntimeException("Exactly 2 tokens are expected in the contextTokenCollection: " + "one job token and one delegation token, but was found " + contextTokenCollection .Count + " tokens."); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> dt = contextCredentials.GetToken (new Text(DelegationTokenKey)); if (dt == null) { throw new RuntimeException("Token for key [" + DelegationTokenKey + "] not found in the job context." ); } string tokenFile0 = context.GetConfiguration().Get(MRJobConfig.MapreduceJobCredentialsBinary ); if (tokenFile0 != null) { throw new RuntimeException("Token file key [" + MRJobConfig.MapreduceJobCredentialsBinary + "] found in the configuration. It should have been removed from the configuration." ); } string tokenFile = context.GetConfiguration().Get(KeySecurityTokenFileName); if (tokenFile == null) { throw new RuntimeException("Token file key [" + KeySecurityTokenFileName + "] not found in the job configuration." ); } Credentials binaryCredentials = new Credentials(); binaryCredentials.ReadTokenStorageStream(new DataInputStream(new FileInputStream( tokenFile))); ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > binaryTokenCollection = binaryCredentials.GetAllTokens(); if (binaryTokenCollection.Count != 1) { throw new RuntimeException("The token collection read from file [" + tokenFile + "] must have size = 1."); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> binTok = binaryTokenCollection .GetEnumerator().Next(); System.Console.Out.WriteLine("The token read from binary file: t = [" + binTok + "]"); // Verify that dt is same as the token in the file: if (!dt.Equals(binTok)) { throw new RuntimeException("Delegation token in job is not same as the token passed in file:" + " tokenInFile=[" + binTok + "], dt=[" + dt + "]."); } // Now test the user tokens. UserGroupInformation ugi = UserGroupInformation.GetCurrentUser(); // Print all the UGI tokens for diagnostic purposes: ICollection <Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> > ugiTokenCollection = ugi.GetTokens(); foreach (Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> t_1 in ugiTokenCollection) { System.Console.Out.WriteLine("UGI token: [" + t_1 + "]"); } Org.Apache.Hadoop.Security.Token.Token <TokenIdentifier> ugiToken = ugi.GetCredentials ().GetToken(new Text(DelegationTokenKey)); if (ugiToken == null) { throw new RuntimeException("Token for key [" + DelegationTokenKey + "] not found among the UGI tokens." ); } if (!ugiToken.Equals(binTok)) { throw new RuntimeException("UGI token is not same as the token passed in binary file:" + " tokenInBinFile=[" + binTok + "], ugiTok=[" + ugiToken + "]."); } base.Map(key, value, context); }
/// <summary> /// Locate a delegation token associated with the given HA cluster URI, and if /// one is found, clone it to also represent the underlying namenode address. /// </summary> /// <param name="ugi">the UGI to modify</param> /// <param name="haUri">the logical URI for the cluster</param> /// <param name="nnAddrs"> /// collection of NNs in the cluster to which the token /// applies /// </param> public static void CloneDelegationTokenForLogicalUri(UserGroupInformation ugi, URI haUri, ICollection <IPEndPoint> nnAddrs) { // this cloning logic is only used by hdfs Text haService = Org.Apache.Hadoop.Hdfs.HAUtil.BuildTokenServiceForLogicalUri(haUri , HdfsConstants.HdfsUriScheme); Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> haToken = tokenSelector .SelectToken(haService, ugi.GetTokens()); if (haToken != null) { foreach (IPEndPoint singleNNAddr in nnAddrs) { // this is a minor hack to prevent physical HA tokens from being // exposed to the user via UGI.getCredentials(), otherwise these // cloned tokens may be inadvertently propagated to jobs Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> specificToken = new Token.PrivateToken <DelegationTokenIdentifier>(haToken); SecurityUtil.SetTokenService(specificToken, singleNNAddr); Text alias = new Text(BuildTokenServicePrefixForLogicalUri(HdfsConstants.HdfsUriScheme ) + "//" + specificToken.GetService()); ugi.AddToken(alias, specificToken); Log.Debug("Mapped HA service delegation token for logical URI " + haUri + " to namenode " + singleNNAddr); } } else { Log.Debug("No HA service delegation token found for logical URI " + haUri); } }