public virtual void TestLazyTokenFetchForWebhdfs() { // for any(Token.class) MiniDFSCluster cluster = null; WebHdfsFileSystem fs = null; try { Configuration clusterConf = new HdfsConfiguration(conf); SecurityUtil.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Simple , clusterConf); clusterConf.SetBoolean(DFSConfigKeys.DfsNamenodeDelegationTokenAlwaysUseKey, true ); // trick the NN into thinking security is enabled w/o it trying // to login from a keytab UserGroupInformation.SetConfiguration(clusterConf); cluster = new MiniDFSCluster.Builder(clusterConf).NumDataNodes(1).Build(); cluster.WaitActive(); SecurityUtil.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Kerberos , clusterConf); UserGroupInformation.SetConfiguration(clusterConf); uri = DFSUtil.CreateUri("webhdfs", cluster.GetNameNode().GetHttpAddress()); ValidateLazyTokenFetch(clusterConf); } finally { IOUtils.Cleanup(null, fs); if (cluster != null) { cluster.Shutdown(); } } }
/// <exception cref="System.IO.IOException"/> public static IDictionary <object, object> ConnectAndGetJson(HttpURLConnection conn , int expectedResponseCode) { conn.Connect(); NUnit.Framework.Assert.AreEqual(expectedResponseCode, conn.GetResponseCode()); return(WebHdfsFileSystem.JsonParse(conn, false)); }
/// <exception cref="System.IO.IOException"/> public virtual void TestCaseInsensitive() { Path p = new Path("/test/testCaseInsensitive"); WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; PutOpParam.OP op = PutOpParam.OP.Mkdirs; //replace query with mix case letters Uri url = webhdfs.ToUrl(op, p); WebHdfsFileSystem.Log.Info("url = " + url); Uri replaced = new Uri(url.ToString().Replace(op.ToQueryString(), "Op=mkDIrs")); WebHdfsFileSystem.Log.Info("replaced = " + replaced); //connect with the replaced URL. HttpURLConnection conn = (HttpURLConnection)replaced.OpenConnection(); conn.SetRequestMethod(op.GetType().ToString()); conn.Connect(); BufferedReader @in = new BufferedReader(new InputStreamReader(conn.GetInputStream ())); for (string line; (line = @in.ReadLine()) != null;) { WebHdfsFileSystem.Log.Info("> " + line); } //check if the command successes. NUnit.Framework.Assert.IsTrue(fs.GetFileStatus(p).IsDirectory()); }
public virtual void TestSecureHAToken() { Configuration conf = DFSTestUtil.NewHAConfiguration(LogicalName); conf.SetBoolean(DFSConfigKeys.DfsNamenodeDelegationTokenAlwaysUseKey, true); MiniDFSCluster cluster = null; WebHdfsFileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf).NnTopology(topo).NumDataNodes(0).Build (); HATestUtil.SetFailoverConfigurations(cluster, conf, LogicalName); cluster.WaitActive(); fs = Org.Mockito.Mockito.Spy((WebHdfsFileSystem)FileSystem.Get(WebhdfsUri, conf)); FileSystemTestHelper.AddFileSystemForTesting(WebhdfsUri, conf, fs); cluster.TransitionToActive(0); Org.Apache.Hadoop.Security.Token.Token <object> token = ((Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier>)fs.GetDelegationToken(null)); cluster.ShutdownNameNode(0); cluster.TransitionToActive(1); token.Renew(conf); token.Cancel(conf); Org.Mockito.Mockito.Verify(fs).RenewDelegationToken(token); Org.Mockito.Mockito.Verify(fs).CancelDelegationToken(token); } finally { IOUtils.Cleanup(null, fs); if (cluster != null) { cluster.Shutdown(); } } }
public virtual void TestMultipleNamespacesConfigured() { Configuration conf = DFSTestUtil.NewHAConfiguration(LogicalName); MiniDFSCluster cluster = null; WebHdfsFileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf).NnTopology(topo).NumDataNodes(1).Build (); HATestUtil.SetFailoverConfigurations(cluster, conf, LogicalName); cluster.WaitActive(); DFSTestUtil.AddHAConfiguration(conf, LogicalName + "remote"); DFSTestUtil.SetFakeHttpAddresses(conf, LogicalName + "remote"); fs = (WebHdfsFileSystem)FileSystem.Get(WebhdfsUri, conf); NUnit.Framework.Assert.AreEqual(2, fs.GetResolvedNNAddr().Length); } finally { IOUtils.Cleanup(null, fs); if (cluster != null) { cluster.Shutdown(); } } }
//Do Nothing /// <exception cref="System.Exception"/> public virtual void TestWebHdfsDoAs() { WebHdfsTestUtil.Log.Info("START: testWebHdfsDoAs()"); WebHdfsTestUtil.Log.Info("ugi.getShortUserName()=" + ugi.GetShortUserName()); WebHdfsFileSystem webhdfs = WebHdfsTestUtil.GetWebHdfsFileSystemAs(ugi, config, WebHdfsFileSystem .Scheme); Path root = new Path("/"); cluster.GetFileSystem().SetPermission(root, new FsPermission((short)0x1ff)); Whitebox.SetInternalState(webhdfs, "ugi", proxyUgi); { Path responsePath = webhdfs.GetHomeDirectory(); WebHdfsTestUtil.Log.Info("responsePath=" + responsePath); NUnit.Framework.Assert.AreEqual(webhdfs.GetUri() + "/user/" + ProxyUser, responsePath .ToString()); } Path f = new Path("/testWebHdfsDoAs/a.txt"); { FSDataOutputStream @out = webhdfs.Create(f); @out.Write(Sharpen.Runtime.GetBytesForString("Hello, webhdfs user!")); @out.Close(); FileStatus status = webhdfs.GetFileStatus(f); WebHdfsTestUtil.Log.Info("status.getOwner()=" + status.GetOwner()); NUnit.Framework.Assert.AreEqual(ProxyUser, status.GetOwner()); } { FSDataOutputStream @out = webhdfs.Append(f); @out.Write(Sharpen.Runtime.GetBytesForString("\nHello again!")); @out.Close(); FileStatus status = webhdfs.GetFileStatus(f); WebHdfsTestUtil.Log.Info("status.getOwner()=" + status.GetOwner()); WebHdfsTestUtil.Log.Info("status.getLen() =" + status.GetLen()); NUnit.Framework.Assert.AreEqual(ProxyUser, status.GetOwner()); } }
public virtual void TestDelegationTokenWebHdfsApi() { ((Log4JLogger)NamenodeWebHdfsMethods.Log).GetLogger().SetLevel(Level.All); string uri = WebHdfsFileSystem.Scheme + "://" + config.Get(DFSConfigKeys.DfsNamenodeHttpAddressKey ); //get file system as JobTracker UserGroupInformation ugi = UserGroupInformation.CreateUserForTesting("JobTracker" , new string[] { "user" }); WebHdfsFileSystem webhdfs = ugi.DoAs(new _PrivilegedExceptionAction_180(this, uri )); { //test addDelegationTokens(..) Credentials creds = new Credentials(); Org.Apache.Hadoop.Security.Token.Token <object>[] tokens = webhdfs.AddDelegationTokens ("JobTracker", creds); NUnit.Framework.Assert.AreEqual(1, tokens.Length); NUnit.Framework.Assert.AreEqual(1, creds.NumberOfTokens()); NUnit.Framework.Assert.AreSame(tokens[0], creds.GetAllTokens().GetEnumerator().Next ()); CheckTokenIdentifier(ugi, tokens[0]); Org.Apache.Hadoop.Security.Token.Token <object>[] tokens2 = webhdfs.AddDelegationTokens ("JobTracker", creds); NUnit.Framework.Assert.AreEqual(0, tokens2.Length); } }
public virtual void TestJsonParseClosesInputStream() { WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fileSystem; Path file = GetTestRootPath(fSys, "test/hadoop/file"); CreateFile(file); HttpOpParam.OP op = GetOpParam.OP.Gethomedirectory; Uri url = webhdfs.ToUrl(op, file); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.SetRequestMethod(op.GetType().ToString()); conn.Connect(); InputStream myIn = new _InputStream_184(this, conn); HttpURLConnection spyConn = Org.Mockito.Mockito.Spy(conn); Org.Mockito.Mockito.DoReturn(myIn).When(spyConn).GetInputStream(); try { NUnit.Framework.Assert.IsFalse(closedInputStream); WebHdfsFileSystem.JsonParse(spyConn, false); NUnit.Framework.Assert.IsTrue(closedInputStream); } catch (IOException) { TestCase.Fail(); } conn.Disconnect(); }
/// <exception cref="System.IO.IOException"/> public static Uri ToUrl(WebHdfsFileSystem webhdfs, HttpOpParam.OP op, Path fspath , params Param <object, object>[] parameters) { Uri url = webhdfs.ToUrl(op, fspath, parameters); WebHdfsTestUtil.Log.Info("url=" + url); return(url); }
/// <exception cref="System.IO.IOException"/> public virtual void TestSecureProxyAuthParamsInUrl() { Configuration conf = new Configuration(); // fake turning on security so api thinks it should use tokens SecurityUtil.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Kerberos , conf); UserGroupInformation.SetConfiguration(conf); UserGroupInformation ugi = UserGroupInformation.CreateRemoteUser("test-user"); ugi.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Kerberos); ugi = UserGroupInformation.CreateProxyUser("test-proxy-user", ugi); UserGroupInformation.SetLoginUser(ugi); WebHdfsFileSystem webhdfs = GetWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); string tokenString = webhdfs.GetDelegationToken().EncodeToUrlString(); // send real+effective Uri getTokenUrl = webhdfs.ToUrl(GetOpParam.OP.Getdelegationtoken, fsPath); CheckQueryParams(new string[] { GetOpParam.OP.Getdelegationtoken.ToQueryString(), new UserParam(ugi.GetRealUser().GetShortUserName()).ToString(), new DoAsParam(ugi .GetShortUserName()).ToString() }, getTokenUrl); // send real+effective Uri renewTokenUrl = webhdfs.ToUrl(PutOpParam.OP.Renewdelegationtoken, fsPath, new TokenArgumentParam(tokenString)); CheckQueryParams(new string[] { PutOpParam.OP.Renewdelegationtoken.ToQueryString( ), new UserParam(ugi.GetRealUser().GetShortUserName()).ToString(), new DoAsParam (ugi.GetShortUserName()).ToString(), new TokenArgumentParam(tokenString).ToString () }, renewTokenUrl); // send token Uri cancelTokenUrl = webhdfs.ToUrl(PutOpParam.OP.Canceldelegationtoken, fsPath, new TokenArgumentParam(tokenString)); CheckQueryParams(new string[] { PutOpParam.OP.Canceldelegationtoken.ToQueryString (), new UserParam(ugi.GetRealUser().GetShortUserName()).ToString(), new DoAsParam (ugi.GetShortUserName()).ToString(), new TokenArgumentParam(tokenString).ToString () }, cancelTokenUrl); // send token Uri fileStatusUrl = webhdfs.ToUrl(GetOpParam.OP.Getfilestatus, fsPath); CheckQueryParams(new string[] { GetOpParam.OP.Getfilestatus.ToQueryString(), new DelegationParam(tokenString).ToString() }, fileStatusUrl); // wipe out internal token to simulate auth always required webhdfs.SetDelegationToken(null); // send real+effective cancelTokenUrl = webhdfs.ToUrl(PutOpParam.OP.Canceldelegationtoken, fsPath, new TokenArgumentParam (tokenString)); CheckQueryParams(new string[] { PutOpParam.OP.Canceldelegationtoken.ToQueryString (), new UserParam(ugi.GetRealUser().GetShortUserName()).ToString(), new DoAsParam (ugi.GetShortUserName()).ToString(), new TokenArgumentParam(tokenString).ToString () }, cancelTokenUrl); // send real+effective fileStatusUrl = webhdfs.ToUrl(GetOpParam.OP.Getfilestatus, fsPath); CheckQueryParams(new string[] { GetOpParam.OP.Getfilestatus.ToQueryString(), new UserParam(ugi.GetRealUser().GetShortUserName()).ToString(), new DoAsParam(ugi.GetShortUserName ()).ToString() }, fileStatusUrl); }
public virtual void TestWebImageViewer() { WebImageViewer viewer = new WebImageViewer(NetUtils.CreateSocketAddr("localhost:0" )); try { viewer.InitServer(originalFsimage.GetAbsolutePath()); int port = viewer.GetPort(); // create a WebHdfsFileSystem instance URI uri = new URI("webhdfs://localhost:" + port.ToString()); Configuration conf = new Configuration(); WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.Get(uri, conf); // verify the number of directories FileStatus[] statuses = webhdfs.ListStatus(new Path("/")); NUnit.Framework.Assert.AreEqual(NumDirs + 3, statuses.Length); // contains empty and xattr directory // verify the number of files in the directory statuses = webhdfs.ListStatus(new Path("/dir0")); NUnit.Framework.Assert.AreEqual(FilesPerDir, statuses.Length); // compare a file FileStatus status = webhdfs.ListStatus(new Path("/dir0/file0"))[0]; FileStatus expected = writtenFiles["/dir0/file0"]; CompareFile(expected, status); // LISTSTATUS operation to an empty directory statuses = webhdfs.ListStatus(new Path("/emptydir")); NUnit.Framework.Assert.AreEqual(0, statuses.Length); // LISTSTATUS operation to a invalid path Uri url = new Uri("http://localhost:" + port + "/webhdfs/v1/invalid/?op=LISTSTATUS" ); VerifyHttpResponseCode(HttpURLConnection.HttpNotFound, url); // LISTSTATUS operation to a invalid prefix url = new Uri("http://localhost:" + port + "/foo"); VerifyHttpResponseCode(HttpURLConnection.HttpNotFound, url); // GETFILESTATUS operation status = webhdfs.GetFileStatus(new Path("/dir0/file0")); CompareFile(expected, status); // GETFILESTATUS operation to a invalid path url = new Uri("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETFILESTATUS" ); VerifyHttpResponseCode(HttpURLConnection.HttpNotFound, url); // invalid operation url = new Uri("http://localhost:" + port + "/webhdfs/v1/?op=INVALID"); VerifyHttpResponseCode(HttpURLConnection.HttpBadRequest, url); // invalid method url = new Uri("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS"); HttpURLConnection connection = (HttpURLConnection)url.OpenConnection(); connection.SetRequestMethod("POST"); connection.Connect(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpBadMethod, connection.GetResponseCode ()); } finally { // shutdown the viewer viewer.Close(); } }
/// <exception cref="System.IO.IOException"/> private WebHdfsFileSystem SpyWebhdfsInSecureSetup() { WebHdfsFileSystem fsOrig = new WebHdfsFileSystem(); fsOrig.Initialize(URI.Create("webhdfs://127.0.0.1:0"), conf); WebHdfsFileSystem fs = Org.Mockito.Mockito.Spy(fsOrig); return(fs); }
public virtual void TestWebHdfsCustomUriPortWithCustomDefaultPorts() { URI uri = URI.Create("webhdfs://localhost:789"); WebHdfsFileSystem fs = (WebHdfsFileSystem)FileSystem.Get(uri, conf); NUnit.Framework.Assert.AreEqual(123, fs.GetDefaultPort()); NUnit.Framework.Assert.AreEqual(uri, fs.GetUri()); NUnit.Framework.Assert.AreEqual("127.0.0.1:789", fs.GetCanonicalServiceName()); }
public static void BeforeClassSetup() { Configuration conf = new HdfsConfiguration(); conf.SetBoolean(DFSConfigKeys.DfsWebhdfsEnabledKey, true); conf.Set(FsPermission.UmaskLabel, "000"); conf.SetInt(DFSConfigKeys.DfsNamenodeMaxComponentLengthKey, 0); cluster = new MiniDFSCluster.Builder(conf).Build(); webhdfs = WebHdfsTestUtil.GetWebHdfsFileSystem(conf, WebHdfsFileSystem.Scheme); dfs = cluster.GetFileSystem(); }
/// <summary> /// Test get with offset and length parameters that combine to request a length /// greater than actual file length. /// </summary> /// <exception cref="System.IO.IOException"/> public virtual void TestOffsetPlusLengthParamsLongerThanFile() { WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; Path dir = new Path("/test"); NUnit.Framework.Assert.IsTrue(webhdfs.Mkdirs(dir)); // Create a file with some content. Path testFile = new Path("/test/testOffsetPlusLengthParamsLongerThanFile"); string content = "testOffsetPlusLengthParamsLongerThanFile"; FSDataOutputStream testFileOut = webhdfs.Create(testFile); try { testFileOut.Write(Sharpen.Runtime.GetBytesForString(content, "US-ASCII")); } finally { IOUtils.CloseStream(testFileOut); } // Open the file, but request offset starting at 1 and length equal to file // length. Considering the offset, this is longer than the actual content. HttpOpParam.OP op = GetOpParam.OP.Open; Uri url = webhdfs.ToUrl(op, testFile, new LengthParam(Sharpen.Extensions.ValueOf( content.Length)), new OffsetParam(1L)); HttpURLConnection conn = null; InputStream @is = null; try { conn = (HttpURLConnection)url.OpenConnection(); conn.SetRequestMethod(op.GetType().ToString()); conn.SetDoOutput(op.GetDoOutput()); conn.SetInstanceFollowRedirects(true); // Expect OK response and Content-Length header equal to actual length. NUnit.Framework.Assert.AreEqual(HttpServletResponse.ScOk, conn.GetResponseCode()); NUnit.Framework.Assert.AreEqual((content.Length - 1).ToString(), conn.GetHeaderField ("Content-Length")); // Check content matches. byte[] respBody = new byte[content.Length - 1]; @is = conn.GetInputStream(); IOUtils.ReadFully(@is, respBody, 0, content.Length - 1); NUnit.Framework.Assert.AreEqual(Sharpen.Runtime.Substring(content, 1), Sharpen.Runtime.GetStringForBytes (respBody, "US-ASCII")); } finally { IOUtils.CloseStream(@is); if (conn != null) { conn.Disconnect(); } } }
public virtual void SetUp() { Configuration conf = WebHdfsTestUtil.CreateConf(); serverSocket = Sharpen.Extensions.CreateServerSocket(0, ConnectionBacklog); nnHttpAddress = new IPEndPoint("localhost", serverSocket.GetLocalPort()); conf.Set(DFSConfigKeys.DfsNamenodeHttpAddressKey, "localhost:" + serverSocket.GetLocalPort ()); fs = WebHdfsTestUtil.GetWebHdfsFileSystem(conf, WebHdfsFileSystem.Scheme); fs.connectionFactory = connectionFactory; clients = new AList <SocketChannel>(); serverThread = null; }
public virtual void TestAuditWebHdfsOpen() { Path file = new Path(fnames[0]); fs.SetPermission(file, new FsPermission((short)0x1a4)); fs.SetOwner(file, "root", null); SetupAuditLogs(); WebHdfsFileSystem webfs = WebHdfsTestUtil.GetWebHdfsFileSystemAs(userGroupInfo, conf , WebHdfsFileSystem.Scheme); webfs.Open(file); VerifyAuditLogsCheckPattern(true, 3, webOpenPattern); }
/// <exception cref="System.IO.IOException"/> /// <exception cref="Sharpen.URISyntaxException"/> public virtual void TestEncodedPathUrl() { Configuration conf = new Configuration(); WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.Get(uri, conf); // Construct a file path that contains percentage-encoded string string pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668"; Path fsPath = new Path(pathName); Uri encodedPathUrl = webhdfs.ToUrl(PutOpParam.OP.Create, fsPath); // We should get back the original file path after cycling back and decoding NUnit.Framework.Assert.AreEqual(WebHdfsFileSystem.PathPrefix + pathName, encodedPathUrl .ToURI().GetPath()); }
/// <exception cref="System.IO.IOException"/> public virtual void TestSimpleAuthParamsInUrl() { Configuration conf = new Configuration(); UserGroupInformation ugi = UserGroupInformation.CreateRemoteUser("test-user"); UserGroupInformation.SetLoginUser(ugi); WebHdfsFileSystem webhdfs = GetWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); // send user+token Uri fileStatusUrl = webhdfs.ToUrl(GetOpParam.OP.Getfilestatus, fsPath); CheckQueryParams(new string[] { GetOpParam.OP.Getfilestatus.ToQueryString(), new UserParam(ugi.GetShortUserName()).ToString() }, fileStatusUrl); }
/// <exception cref="System.IO.IOException"/> private void CheckNoTokenForOperation(HttpOpParam.OP op) { WebHdfsFileSystem fs = SpyWebhdfsInSecureSetup(); Org.Mockito.Mockito.DoReturn(null).When(fs).GetDelegationToken(null); fs.Initialize(URI.Create("webhdfs://127.0.0.1:0"), conf); // do not get a token! fs.ToUrl(op, null); Org.Mockito.Mockito.Verify(fs, Org.Mockito.Mockito.Never()).GetDelegationToken(); Org.Mockito.Mockito.Verify(fs, Org.Mockito.Mockito.Never()).GetDelegationToken(null ); Org.Mockito.Mockito.Verify(fs, Org.Mockito.Mockito.Never()).SetDelegationToken((Org.Apache.Hadoop.Security.Token.Token <object>)Matchers.Any <Org.Apache.Hadoop.Security.Token.Token>()); }
public virtual void TestAuditWebHdfsStat() { Path file = new Path(fnames[0]); fs.SetPermission(file, new FsPermission((short)0x1a4)); fs.SetOwner(file, "root", null); SetupAuditLogs(); WebHdfsFileSystem webfs = WebHdfsTestUtil.GetWebHdfsFileSystemAs(userGroupInfo, conf , WebHdfsFileSystem.Scheme); FileStatus st = webfs.GetFileStatus(file); VerifyAuditLogs(true); NUnit.Framework.Assert.IsTrue("failed to stat file", st != null && st.IsFile()); }
/// <exception cref="System.IO.IOException"/> public virtual void TestCheckAccessUrl() { Configuration conf = new Configuration(); UserGroupInformation ugi = UserGroupInformation.CreateRemoteUser("test-user"); UserGroupInformation.SetLoginUser(ugi); WebHdfsFileSystem webhdfs = GetWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/p1"); Uri checkAccessUrl = webhdfs.ToUrl(GetOpParam.OP.Checkaccess, fsPath, new FsActionParam (FsAction.ReadWrite)); CheckQueryParams(new string[] { GetOpParam.OP.Checkaccess.ToQueryString(), new UserParam (ugi.GetShortUserName()).ToString(), FsActionParam.Name + "=" + FsAction.ReadWrite .Symbol }, checkAccessUrl); }
public virtual void TestAuditWebHdfs() { Path file = new Path(fnames[0]); fs.SetPermission(file, new FsPermission((short)0x1a4)); fs.SetOwner(file, "root", null); SetupAuditLogs(); WebHdfsFileSystem webfs = WebHdfsTestUtil.GetWebHdfsFileSystemAs(userGroupInfo, conf , WebHdfsFileSystem.Scheme); InputStream istream = webfs.Open(file); int val = istream.Read(); istream.Close(); VerifyAuditLogsRepeat(true, 3); NUnit.Framework.Assert.IsTrue("failed to read from file", val >= 0); }
public virtual void TestWebImageViewerForAcl() { WebImageViewer viewer = new WebImageViewer(NetUtils.CreateSocketAddr("localhost:0" )); try { viewer.InitServer(originalFsimage.GetAbsolutePath()); int port = viewer.GetPort(); // create a WebHdfsFileSystem instance URI uri = new URI("webhdfs://localhost:" + port.ToString()); Configuration conf = new Configuration(); WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.Get(uri, conf); // GETACLSTATUS operation to a directory without ACL AclStatus acl = webhdfs.GetAclStatus(new Path("/dirWithNoAcl")); NUnit.Framework.Assert.AreEqual(writtenAcls["/dirWithNoAcl"], acl); // GETACLSTATUS operation to a directory with a default ACL acl = webhdfs.GetAclStatus(new Path("/dirWithDefaultAcl")); NUnit.Framework.Assert.AreEqual(writtenAcls["/dirWithDefaultAcl"], acl); // GETACLSTATUS operation to a file without ACL acl = webhdfs.GetAclStatus(new Path("/noAcl")); NUnit.Framework.Assert.AreEqual(writtenAcls["/noAcl"], acl); // GETACLSTATUS operation to a file with a ACL acl = webhdfs.GetAclStatus(new Path("/withAcl")); NUnit.Framework.Assert.AreEqual(writtenAcls["/withAcl"], acl); // GETACLSTATUS operation to a file with several ACL entries acl = webhdfs.GetAclStatus(new Path("/withSeveralAcls")); NUnit.Framework.Assert.AreEqual(writtenAcls["/withSeveralAcls"], acl); // GETACLSTATUS operation to a invalid path Uri url = new Uri("http://localhost:" + port + "/webhdfs/v1/invalid/?op=GETACLSTATUS" ); HttpURLConnection connection = (HttpURLConnection)url.OpenConnection(); connection.SetRequestMethod("GET"); connection.Connect(); NUnit.Framework.Assert.AreEqual(HttpURLConnection.HttpNotFound, connection.GetResponseCode ()); } finally { // shutdown the viewer viewer.Close(); } }
/// <exception cref="System.IO.IOException"/> public virtual void TestTokenForNonTokenOp() { WebHdfsFileSystem fs = SpyWebhdfsInSecureSetup(); Org.Apache.Hadoop.Security.Token.Token <object> token = Org.Mockito.Mockito.Mock <Org.Apache.Hadoop.Security.Token.Token >(); Org.Mockito.Mockito.DoReturn(token).When(fs).GetDelegationToken(null); // should get/set/renew token fs.ToUrl(GetOpParam.OP.Open, null); Org.Mockito.Mockito.Verify(fs).GetDelegationToken(); Org.Mockito.Mockito.Verify(fs).GetDelegationToken(null); Org.Mockito.Mockito.Verify(fs).SetDelegationToken(token); Org.Mockito.Mockito.Reset(fs); // should return prior token fs.ToUrl(GetOpParam.OP.Open, null); Org.Mockito.Mockito.Verify(fs).GetDelegationToken(); Org.Mockito.Mockito.Verify(fs, Org.Mockito.Mockito.Never()).GetDelegationToken(null ); Org.Mockito.Mockito.Verify(fs, Org.Mockito.Mockito.Never()).SetDelegationToken(token ); }
/// <exception cref="System.IO.IOException"/> private void InitWebHdfs(Configuration conf) { if (WebHdfsFileSystem.IsEnabled(conf, HttpServer2.Log)) { // set user pattern based on configuration file UserParam.SetUserPattern(conf.Get(DFSConfigKeys.DfsWebhdfsUserPatternKey, DFSConfigKeys .DfsWebhdfsUserPatternDefault)); // add authentication filter for webhdfs string className = conf.Get(DFSConfigKeys.DfsWebhdfsAuthenticationFilterKey, DFSConfigKeys .DfsWebhdfsAuthenticationFilterDefault); string name = className; string pathSpec = WebHdfsFileSystem.PathPrefix + "/*"; IDictionary <string, string> @params = GetAuthFilterParams(conf); HttpServer2.DefineFilter(httpServer.GetWebAppContext(), name, className, @params, new string[] { pathSpec }); HttpServer2.Log.Info("Added filter '" + name + "' (class=" + className + ")"); // add webhdfs packages httpServer.AddJerseyResourcePackage(typeof(NamenodeWebHdfsMethods).Assembly.GetName () + ";" + typeof(Param).Assembly.GetName(), pathSpec); } }
public virtual void TestAuditWebHdfsDenied() { Path file = new Path(fnames[0]); fs.SetPermission(file, new FsPermission((short)0x180)); fs.SetOwner(file, "root", null); SetupAuditLogs(); try { WebHdfsFileSystem webfs = WebHdfsTestUtil.GetWebHdfsFileSystemAs(userGroupInfo, conf , WebHdfsFileSystem.Scheme); InputStream istream = webfs.Open(file); int val = istream.Read(); NUnit.Framework.Assert.Fail("open+read must not succeed, got " + val); } catch (AccessControlException) { System.Console.Out.WriteLine("got access denied, as expected."); } VerifyAuditLogsRepeat(false, 2); }
public virtual void TestSetTokenServiceAndKind() { MiniDFSCluster cluster = null; try { Configuration clusterConf = new HdfsConfiguration(conf); SecurityUtil.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Simple , clusterConf); clusterConf.SetBoolean(DFSConfigKeys.DfsNamenodeDelegationTokenAlwaysUseKey, true ); // trick the NN into thinking s[ecurity is enabled w/o it trying // to login from a keytab UserGroupInformation.SetConfiguration(clusterConf); cluster = new MiniDFSCluster.Builder(clusterConf).NumDataNodes(0).Build(); cluster.WaitActive(); SecurityUtil.SetAuthenticationMethod(UserGroupInformation.AuthenticationMethod.Kerberos , clusterConf); WebHdfsFileSystem fs = WebHdfsTestUtil.GetWebHdfsFileSystem(clusterConf, "webhdfs" ); Whitebox.SetInternalState(fs, "canRefreshDelegationToken", true); URLConnectionFactory factory = new _URLConnectionFactory_268(new _ConnectionConfigurator_262 ()); Whitebox.SetInternalState(fs, "connectionFactory", factory); Org.Apache.Hadoop.Security.Token.Token <object> token1 = fs.GetDelegationToken(); NUnit.Framework.Assert.AreEqual(new Text("bar"), token1.GetKind()); HttpOpParam.OP op = GetOpParam.OP.Getdelegationtoken; Org.Apache.Hadoop.Security.Token.Token <DelegationTokenIdentifier> token2 = new _FsPathResponseRunner_281 (op, null, new RenewerParam(null)).Run(); NUnit.Framework.Assert.AreEqual(new Text("bar"), token2.GetKind()); NUnit.Framework.Assert.AreEqual(new Text("foo"), token2.GetService()); } finally { if (cluster != null) { cluster.Shutdown(); } } }
public virtual void TestWebHdfsOffsetAndLength() { MiniDFSCluster cluster = null; Configuration conf = WebHdfsTestUtil.CreateConf(); int Offset = 42; int Length = 512; string Path = "/foo"; byte[] Contents = new byte[1024]; Random.NextBytes(Contents); try { cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(1).Build(); WebHdfsFileSystem fs = WebHdfsTestUtil.GetWebHdfsFileSystem(conf, WebHdfsFileSystem .Scheme); using (OutputStream os = fs.Create(new Path(Path))) { os.Write(Contents); } IPEndPoint addr = cluster.GetNameNode().GetHttpAddress(); Uri url = new Uri("http", addr.GetHostString(), addr.Port, WebHdfsFileSystem.PathPrefix + Path + "?op=OPEN" + Param.ToSortedString("&", new OffsetParam((long)Offset), new LengthParam((long)Length))); HttpURLConnection conn = (HttpURLConnection)url.OpenConnection(); conn.SetInstanceFollowRedirects(true); NUnit.Framework.Assert.AreEqual(Length, conn.GetContentLength()); byte[] subContents = new byte[Length]; byte[] realContents = new byte[Length]; System.Array.Copy(Contents, Offset, subContents, 0, Length); IOUtils.ReadFully(conn.GetInputStream(), realContents); Assert.AssertArrayEquals(subContents, realContents); } finally { if (cluster != null) { cluster.Shutdown(); } } }
/// <exception cref="System.IO.IOException"/> public virtual void TestRootDir() { Path root = new Path("/"); WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; Uri url = webhdfs.ToUrl(GetOpParam.OP.Null, root); WebHdfsFileSystem.Log.Info("null url=" + url); NUnit.Framework.Assert.IsTrue(url.ToString().Contains("v1")); //test root permission FileStatus status = fs.GetFileStatus(root); NUnit.Framework.Assert.IsTrue(status != null); NUnit.Framework.Assert.AreEqual(0x1ff, status.GetPermission().ToShort()); //delete root NUnit.Framework.Assert.IsFalse(fs.Delete(root, true)); //create file using root path try { FSDataOutputStream @out = fs.Create(root); @out.Write(1); @out.Close(); Fail(); } catch (IOException e) { WebHdfsFileSystem.Log.Info("This is expected.", e); } //open file using root path try { FSDataInputStream @in = fs.Open(root); @in.Read(); Fail(); } catch (IOException e) { WebHdfsFileSystem.Log.Info("This is expected.", e); } }