use of org.apache.hadoop.hdfs.web.resources.UserParam in project hadoop by apache.
the class TestWebHdfsUrl method testCheckAccessUrl.
@Test(timeout = 60000)
public void testCheckAccessUrl() throws IOException {
Configuration conf = new Configuration();
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
UserGroupInformation.setLoginUser(ugi);
WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
Path fsPath = new Path("/p1");
URL checkAccessUrl = webhdfs.toUrl(GetOpParam.Op.CHECKACCESS, fsPath, new FsActionParam(FsAction.READ_WRITE));
checkQueryParams(new String[] { GetOpParam.Op.CHECKACCESS.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), FsActionParam.NAME + "=" + FsAction.READ_WRITE.SYMBOL }, checkAccessUrl);
}
use of org.apache.hadoop.hdfs.web.resources.UserParam in project hadoop by apache.
the class TestWebHdfsUrl method testSimpleProxyAuthParamsInUrl.
@Test(timeout = 60000)
public void testSimpleProxyAuthParamsInUrl() throws IOException {
Configuration conf = new Configuration();
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
UserGroupInformation.setLoginUser(ugi);
WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
Path fsPath = new Path("/");
// send real+effective
URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
checkQueryParams(new String[] { GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, fileStatusUrl);
}
Aggregations