/// <summary>General-purpose http PUT command to the httpfs server.</summary> /// <param name="filename">The file to operate upon</param> /// <param name="command">The command to perform (SETXATTR, etc)</param> /// <param name="params">Parameters</param> /// <exception cref="System.Exception"/> private void PutCmd(string filename, string command, string @params) { string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; // Remove leading / from filename if (filename[0] == '/') { filename = Sharpen.Runtime.Substring(filename, 1); } string pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}{2}{3}&op={4}" , filename, user, (@params == null) ? string.Empty : "&", (@params == null) ? string.Empty : @params, command); Uri url = new Uri(TestJettyHelper.GetJettyURL(), pathOps); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.SetRequestMethod("PUT"); conn.Connect(); int resp = conn.GetResponseCode(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpInternalError, resp); BufferedReader reader; reader = new BufferedReader(new InputStreamReader(conn.GetErrorStream())); string err = reader.ReadLine(); NUnit.Framework.Assert.IsTrue(err.Contains("RemoteException")); NUnit.Framework.Assert.IsTrue(err.Contains("XAttr")); NUnit.Framework.Assert.IsTrue(err.Contains("rejected")); }
/// <summary>Talks to the http interface to create a file.</summary> /// <param name="filename">The file to create</param> /// <param name="perms">The permission field, if any (may be null)</param> /// <exception cref="System.Exception"/> private void CreateWithHttp(string filename, string perms) { string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; // Remove leading / from filename if (filename[0] == '/') { filename = Sharpen.Runtime.Substring(filename, 1); } string pathOps; if (perms == null) { pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}&op=CREATE", filename , user); } else { pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}&permission={2}&op=CREATE" , filename, user, perms); } Uri url = new Uri(TestJettyHelper.GetJettyURL(), pathOps); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.AddRequestProperty("Content-Type", "application/octet-stream"); conn.SetRequestMethod("PUT"); conn.Connect(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpCreated, conn.GetResponseCode ()); }
/// <summary>Fire up our own hand-rolled MiniDFSCluster.</summary> /// <remarks> /// Fire up our own hand-rolled MiniDFSCluster. We do this here instead /// of relying on TestHdfsHelper because we don't want to turn on XAttr /// support. /// </remarks> /// <exception cref="System.Exception"/> private void StartMiniDFS() { FilePath testDirRoot = TestDirHelper.GetTestDir(); if (Runtime.GetProperty("hadoop.log.dir") == null) { Runtime.SetProperty("hadoop.log.dir", new FilePath(testDirRoot, "hadoop-log").GetAbsolutePath ()); } if (Runtime.GetProperty("test.build.data") == null) { Runtime.SetProperty("test.build.data", new FilePath(testDirRoot, "hadoop-data").GetAbsolutePath ()); } Configuration conf = HadoopUsersConfTestHelper.GetBaseConf(); HadoopUsersConfTestHelper.AddUserConf(conf); conf.Set("fs.hdfs.impl.disable.cache", "true"); conf.Set("dfs.block.access.token.enable", "false"); conf.Set("dfs.permissions", "true"); conf.Set("hadoop.security.authentication", "simple"); // Explicitly turn off XAttr support conf.SetBoolean(DFSConfigKeys.DfsNamenodeXattrsEnabledKey, false); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); builder.NumDataNodes(2); miniDfs = builder.Build(); nnConf = miniDfs.GetConfiguration(0); }
public virtual void Instrumentation() { CreateHttpFSServer(false); Uri url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1?user.name={0}&op=instrumentation" , "nobody")); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpUnauthorized ); url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1?user.name={0}&op=instrumentation" , HadoopUsersConfTestHelper.GetHadoopUsers()[0])); conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpOk); BufferedReader reader = new BufferedReader(new InputStreamReader(conn.GetInputStream ())); string line = reader.ReadLine(); reader.Close(); NUnit.Framework.Assert.IsTrue(line.Contains("\"counters\":{")); url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1/foo?user.name={0}&op=instrumentation" , HadoopUsersConfTestHelper.GetHadoopUsers()[0])); conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpBadRequest ); }
/// <summary>Create an HttpFS Server to talk to the MiniDFSCluster we created.</summary> /// <exception cref="System.Exception"/> private void CreateHttpFSServer() { FilePath homeDir = TestDirHelper.GetTestDir(); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "conf").Mkdir()); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "log").Mkdir()); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "temp").Mkdir()); HttpFSServerWebApp.SetHomeDirForCurrentThread(homeDir.GetAbsolutePath()); FilePath secretFile = new FilePath(new FilePath(homeDir, "conf"), "secret"); TextWriter w = new FileWriter(secretFile); w.Write("secret"); w.Close(); // HDFS configuration FilePath hadoopConfDir = new FilePath(new FilePath(homeDir, "conf"), "hadoop-conf" ); if (!hadoopConfDir.Mkdirs()) { throw new IOException(); } string fsDefaultName = nnConf.Get(CommonConfigurationKeysPublic.FsDefaultNameKey); Configuration conf = new Configuration(false); conf.Set(CommonConfigurationKeysPublic.FsDefaultNameKey, fsDefaultName); // Explicitly turn off XAttr support conf.SetBoolean(DFSConfigKeys.DfsNamenodeXattrsEnabledKey, false); FilePath hdfsSite = new FilePath(hadoopConfDir, "hdfs-site.xml"); OutputStream os = new FileOutputStream(hdfsSite); conf.WriteXml(os); os.Close(); // HTTPFS configuration conf = new Configuration(false); conf.Set("httpfs.hadoop.config.dir", hadoopConfDir.ToString()); conf.Set("httpfs.proxyuser." + HadoopUsersConfTestHelper.GetHadoopProxyUser() + ".groups" , HadoopUsersConfTestHelper.GetHadoopProxyUserGroups()); conf.Set("httpfs.proxyuser." + HadoopUsersConfTestHelper.GetHadoopProxyUser() + ".hosts" , HadoopUsersConfTestHelper.GetHadoopProxyUserHosts()); conf.Set("httpfs.authentication.signature.secret.file", secretFile.GetAbsolutePath ()); FilePath httpfsSite = new FilePath(new FilePath(homeDir, "conf"), "httpfs-site.xml" ); os = new FileOutputStream(httpfsSite); conf.WriteXml(os); os.Close(); ClassLoader cl = Sharpen.Thread.CurrentThread().GetContextClassLoader(); Uri url = cl.GetResource("webapp"); if (url == null) { throw new IOException(); } WebAppContext context = new WebAppContext(url.AbsolutePath, "/webhdfs"); Org.Mortbay.Jetty.Server server = TestJettyHelper.GetJettyServer(); server.AddHandler(context); server.Start(); }
public virtual void TestOperationDoAs() { CreateHttpFSServer(); UserGroupInformation ugi = UserGroupInformation.CreateProxyUser(HadoopUsersConfTestHelper .GetHadoopUsers()[0], UserGroupInformation.GetCurrentUser()); ugi.DoAs(new _PrivilegedExceptionAction_928(this)); }
public virtual void TestPutNoOperation() { CreateHttpFSServer(false); string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; Uri url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1/foo?user.name={0}" , user)); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.SetDoInput(true); conn.SetDoOutput(true); conn.SetRequestMethod("PUT"); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpBadRequest ); }
public virtual void TestHdfsAccess() { CreateHttpFSServer(false); string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; Uri url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1/?user.name={0}&op=liststatus" , user)); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpOk); BufferedReader reader = new BufferedReader(new InputStreamReader(conn.GetInputStream ())); reader.ReadLine(); reader.Close(); }
/// <exception cref="System.IO.IOException"/> public override IList <string> GetGroups(string user) { if (user.Equals("root")) { return(Arrays.AsList("admin")); } else { if (user.Equals("nobody")) { return(Arrays.AsList("nobody")); } else { string[] groups = HadoopUsersConfTestHelper.GetHadoopUserGroups(user); return((groups != null) ? Arrays.AsList(groups) : Sharpen.Collections.EmptyList); } } }
/// <summary>General-purpose http PUT command to the httpfs server.</summary> /// <param name="filename">The file to operate upon</param> /// <param name="command">The command to perform (SETACL, etc)</param> /// <param name="params">Parameters, like "aclspec=..."</param> /// <exception cref="System.Exception"/> private void PutCmd(string filename, string command, string @params) { string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; // Remove leading / from filename if (filename[0] == '/') { filename = Sharpen.Runtime.Substring(filename, 1); } string pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}{2}{3}&op={4}" , filename, user, (@params == null) ? string.Empty : "&", (@params == null) ? string.Empty : @params, command); Uri url = new Uri(TestJettyHelper.GetJettyURL(), pathOps); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.SetRequestMethod("PUT"); conn.Connect(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpOk, conn.GetResponseCode()); }
public virtual void TestGlobFilter() { CreateHttpFSServer(false); FileSystem fs = FileSystem.Get(TestHdfsHelper.GetHdfsConf()); fs.Mkdirs(new Path("/tmp")); fs.Create(new Path("/tmp/foo.txt")).Close(); string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; Uri url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*" , user)); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(conn.GetResponseCode(), HttpURLConnection.HttpOk); BufferedReader reader = new BufferedReader(new InputStreamReader(conn.GetInputStream ())); reader.ReadLine(); reader.Close(); }
/// <summary> /// Talks to the http interface to get the json output of a *STATUS command /// on the given file. /// </summary> /// <param name="filename">The file to query.</param> /// <param name="command">Either GETFILESTATUS, LISTSTATUS, or ACLSTATUS</param> /// <returns>A string containing the JSON output describing the file.</returns> /// <exception cref="System.Exception"/> private string GetStatus(string filename, string command) { string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; // Remove leading / from filename if (filename[0] == '/') { filename = Sharpen.Runtime.Substring(filename, 1); } string pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}&op={2}", filename , user, command); Uri url = new Uri(TestJettyHelper.GetJettyURL(), pathOps); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.Connect(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpOk, conn.GetResponseCode()); BufferedReader reader = new BufferedReader(new InputStreamReader(conn.GetInputStream ())); return(reader.ReadLine()); }
/// <exception cref="System.Exception"/> private void TestSetOwner() { if (!IsLocalFS()) { FileSystem fs = FileSystem.Get(GetProxiedFSConf()); fs.Mkdirs(GetProxiedFSTestDir()); Path path = new Path(GetProxiedFSTestDir(), "foo.txt"); OutputStream os = fs.Create(path); os.Write(1); os.Close(); fs.Close(); fs = GetHttpFSFileSystem(); string user = HadoopUsersConfTestHelper.GetHadoopUsers()[1]; string group = HadoopUsersConfTestHelper.GetHadoopUserGroups(user)[0]; fs.SetOwner(path, user, group); fs.Close(); fs = FileSystem.Get(GetProxiedFSConf()); FileStatus status1 = fs.GetFileStatus(path); fs.Close(); NUnit.Framework.Assert.AreEqual(status1.GetOwner(), user); NUnit.Framework.Assert.AreEqual(status1.GetGroup(), group); } }
public virtual void TestOpenOffsetLength() { CreateHttpFSServer(false); byte[] array = new byte[] { 0, 1, 2, 3 }; FileSystem fs = FileSystem.Get(TestHdfsHelper.GetHdfsConf()); fs.Mkdirs(new Path("/tmp")); OutputStream os = fs.Create(new Path("/tmp/foo")); os.Write(array); os.Close(); string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; Uri url = new Uri(TestJettyHelper.GetJettyURL(), MessageFormat.Format("/webhdfs/v1/tmp/foo?user.name={0}&op=open&offset=1&length=2" , user)); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpOk, conn.GetResponseCode()); InputStream @is = conn.GetInputStream(); NUnit.Framework.Assert.AreEqual(1, @is.Read()); NUnit.Framework.Assert.AreEqual(2, @is.Read()); NUnit.Framework.Assert.AreEqual(-1, @is.Read()); }
/// <summary> /// Talks to the http interface to get the json output of a *STATUS command /// on the given file. /// </summary> /// <param name="filename">The file to query.</param> /// <param name="command">Either GETFILESTATUS, LISTSTATUS, or ACLSTATUS</param> /// <param name="expectOK">Is this operation expected to succeed?</param> /// <exception cref="System.Exception"/> private void GetStatus(string filename, string command, bool expectOK) { string user = HadoopUsersConfTestHelper.GetHadoopUsers()[0]; // Remove leading / from filename if (filename[0] == '/') { filename = Sharpen.Runtime.Substring(filename, 1); } string pathOps = MessageFormat.Format("/webhdfs/v1/{0}?user.name={1}&op={2}", filename , user, command); Uri url = new Uri(TestJettyHelper.GetJettyURL(), pathOps); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.Connect(); int resp = conn.GetResponseCode(); BufferedReader reader; if (expectOK) { NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpOk, resp); reader = new BufferedReader(new InputStreamReader(conn.GetInputStream())); string res = reader.ReadLine(); NUnit.Framework.Assert.IsTrue(!res.Contains("aclBit")); NUnit.Framework.Assert.IsTrue(res.Contains("owner")); } else { // basic sanity check NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpInternalError, resp); reader = new BufferedReader(new InputStreamReader(conn.GetErrorStream())); string res = reader.ReadLine(); NUnit.Framework.Assert.IsTrue(res.Contains("AclException")); NUnit.Framework.Assert.IsTrue(res.Contains("Support for ACLs has been disabled")); } }
/// <exception cref="System.IO.IOException"/> public virtual IList <string> GetGroups(string user) { return(Arrays.AsList(HadoopUsersConfTestHelper.GetHadoopUserGroups(user))); }
/// <exception cref="System.Exception"/> private void CreateHttpFSServer(bool addDelegationTokenAuthHandler) { FilePath homeDir = TestDirHelper.GetTestDir(); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "conf").Mkdir()); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "log").Mkdir()); NUnit.Framework.Assert.IsTrue(new FilePath(homeDir, "temp").Mkdir()); HttpFSServerWebApp.SetHomeDirForCurrentThread(homeDir.GetAbsolutePath()); FilePath secretFile = new FilePath(new FilePath(homeDir, "conf"), "secret"); TextWriter w = new FileWriter(secretFile); w.Write("secret"); w.Close(); //HDFS configuration FilePath hadoopConfDir = new FilePath(new FilePath(homeDir, "conf"), "hadoop-conf" ); hadoopConfDir.Mkdirs(); string fsDefaultName = TestHdfsHelper.GetHdfsConf().Get(CommonConfigurationKeysPublic .FsDefaultNameKey); Configuration conf = new Configuration(false); conf.Set(CommonConfigurationKeysPublic.FsDefaultNameKey, fsDefaultName); conf.SetBoolean(DFSConfigKeys.DfsNamenodeAclsEnabledKey, true); conf.SetBoolean(DFSConfigKeys.DfsNamenodeXattrsEnabledKey, true); FilePath hdfsSite = new FilePath(hadoopConfDir, "hdfs-site.xml"); OutputStream os = new FileOutputStream(hdfsSite); conf.WriteXml(os); os.Close(); //HTTPFS configuration conf = new Configuration(false); if (addDelegationTokenAuthHandler) { conf.Set("httpfs.authentication.type", typeof(HttpFSKerberosAuthenticationHandlerForTesting ).FullName); } conf.Set("httpfs.services.ext", typeof(TestHttpFSServer.MockGroups).FullName); conf.Set("httpfs.admin.group", HadoopUsersConfTestHelper.GetHadoopUserGroups(HadoopUsersConfTestHelper .GetHadoopUsers()[0])[0]); conf.Set("httpfs.proxyuser." + HadoopUsersConfTestHelper.GetHadoopProxyUser() + ".groups" , HadoopUsersConfTestHelper.GetHadoopProxyUserGroups()); conf.Set("httpfs.proxyuser." + HadoopUsersConfTestHelper.GetHadoopProxyUser() + ".hosts" , HadoopUsersConfTestHelper.GetHadoopProxyUserHosts()); conf.Set("httpfs.authentication.signature.secret.file", secretFile.GetAbsolutePath ()); conf.Set("httpfs.hadoop.config.dir", hadoopConfDir.ToString()); FilePath httpfsSite = new FilePath(new FilePath(homeDir, "conf"), "httpfs-site.xml" ); os = new FileOutputStream(httpfsSite); conf.WriteXml(os); os.Close(); ClassLoader cl = Sharpen.Thread.CurrentThread().GetContextClassLoader(); Uri url = cl.GetResource("webapp"); WebAppContext context = new WebAppContext(url.AbsolutePath, "/webhdfs"); Org.Mortbay.Jetty.Server server = TestJettyHelper.GetJettyServer(); server.AddHandler(context); server.Start(); if (addDelegationTokenAuthHandler) { HttpFSServerWebApp.Get().SetAuthority(TestJettyHelper.GetAuthority()); } }