use of java.util.HashSet in project hadoop by apache.
the class TestWebDelegationToken method doAsKerberosUser.
public static <T> T doAsKerberosUser(String principal, String keytab, final Callable<T> callable) throws Exception {
LoginContext loginContext = null;
try {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal, keytab));
loginContext.login();
subject = loginContext.getSubject();
return Subject.doAs(subject, new PrivilegedExceptionAction<T>() {
@Override
public T run() throws Exception {
return callable.call();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
} finally {
if (loginContext != null) {
loginContext.logout();
}
}
}
use of java.util.HashSet in project hadoop by apache.
the class DFSTestUtil method setFederatedConfiguration.
public static void setFederatedConfiguration(MiniDFSCluster cluster, Configuration conf) {
Set<String> nameservices = new HashSet<String>();
for (NameNodeInfo info : cluster.getNameNodeInfos()) {
assert info.nameserviceId != null;
nameservices.add(info.nameserviceId);
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, info.nameserviceId), DFSUtil.createUri(HdfsConstants.HDFS_URI_SCHEME, info.nameNode.getNameNodeAddress()).toString());
conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, info.nameserviceId), DFSUtil.createUri(HdfsConstants.HDFS_URI_SCHEME, info.nameNode.getNameNodeAddress()).toString());
}
conf.set(DFSConfigKeys.DFS_NAMESERVICES, Joiner.on(",").join(nameservices));
}
use of java.util.HashSet in project hadoop by apache.
the class TestBalancer method testBalancerWithIncludeList.
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout = 100000)
public void testBalancerWithIncludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add("datanodeY");
doTest(conf, new long[] { CAPACITY, CAPACITY }, new String[] { RACK0, RACK1 }, CAPACITY, RACK2, new HostNameBasedNodes(new String[] { "datanodeX", "datanodeY", "datanodeZ" }, BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), false, false);
}
use of java.util.HashSet in project hadoop by apache.
the class TestBalancer method testBalancerCliWithIncludeListInAFile.
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout = 100000)
public void testBalancerCliWithIncludeListInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add("datanodeY");
doTest(conf, new long[] { CAPACITY, CAPACITY }, new String[] { RACK0, RACK1 }, CAPACITY, RACK2, new HostNameBasedNodes(new String[] { "datanodeX", "datanodeY", "datanodeZ" }, BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), true, true);
}
use of java.util.HashSet in project hadoop by apache.
the class TestBalancer method testBalancerWithExcludeList.
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout = 100000)
public void testBalancerWithExcludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add("datanodeY");
excludeHosts.add("datanodeZ");
doTest(conf, new long[] { CAPACITY, CAPACITY }, new String[] { RACK0, RACK1 }, CAPACITY, RACK2, new HostNameBasedNodes(new String[] { "datanodeX", "datanodeY", "datanodeZ" }, excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()), false, false);
}
Aggregations