use of org.apache.hadoop.security.authorize.PolicyProvider in project hadoop by apache.
the class AdminService method refreshServiceAcls.
private void refreshServiceAcls() throws IOException, YarnException {
PolicyProvider policyProvider = RMPolicyProvider.getInstance();
Configuration conf = getConfiguration(new Configuration(false), YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE);
refreshServiceAcls(conf, policyProvider);
}
use of org.apache.hadoop.security.authorize.PolicyProvider in project apex-core by apache.
the class StreamingContainerParent method startRpcServer.
protected void startRpcServer() {
Configuration conf = getConfig();
LOG.info("Config: " + conf);
LOG.info("Listener thread count " + listenerThreadCount);
try {
server = new RPC.Builder(conf).setProtocol(StreamingContainerUmbilicalProtocol.class).setInstance(this).setBindAddress("0.0.0.0").setPort(0).setNumHandlers(listenerThreadCount).setSecretManager(tokenSecretManager).setVerbose(false).build();
// Enable service authorization?
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) {
//refreshServiceAcls(conf, new MRAMPolicyProvider());
server.refreshServiceAcl(conf, new PolicyProvider() {
@Override
public Service[] getServices() {
return (new Service[] { new Service(StreamingContainerUmbilicalProtocol.class.getName(), StreamingContainerUmbilicalProtocol.class) });
}
});
}
server.start();
this.address = NetUtils.getConnectAddress(server);
LOG.info("Container callback server listening at " + this.address);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
}
use of org.apache.hadoop.security.authorize.PolicyProvider in project hbase by apache.
the class TestTokenAuthentication method setupBeforeClass.
@BeforeClass
public static void setupBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL.startMiniZKCluster();
// register token type for protocol
SecurityInfo.addInfo(AuthenticationProtos.AuthenticationService.getDescriptor().getName(), new SecurityInfo("hbase.test.kerberos.principal", AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN));
// security settings only added after startup so that ZK does not require SASL
Configuration conf = TEST_UTIL.getConfiguration();
conf.set("hadoop.security.authentication", "kerberos");
conf.set("hbase.security.authentication", "kerberos");
conf.setBoolean(HADOOP_SECURITY_AUTHORIZATION, true);
server = new TokenServer(conf);
serverThread = new Thread(server);
Threads.setDaemonThreadRunning(serverThread, "TokenServer:" + server.getServerName().toString());
// wait for startup
while (!server.isStarted() && !server.isStopped()) {
Thread.sleep(10);
}
server.rpcServer.refreshAuthManager(new PolicyProvider() {
@Override
public Service[] getServices() {
return new Service[] { new Service("security.client.protocol.acl", AuthenticationProtos.AuthenticationService.BlockingInterface.class) };
}
});
ZKClusterId.setClusterId(server.getZooKeeper(), clusterId);
secretManager = (AuthenticationTokenSecretManager) server.getSecretManager();
while (secretManager.getCurrentKey() == null) {
Thread.sleep(1);
}
}
use of org.apache.hadoop.security.authorize.PolicyProvider in project hadoop by apache.
the class AdminService method refreshActiveServicesAcls.
private void refreshActiveServicesAcls() throws IOException, YarnException {
PolicyProvider policyProvider = RMPolicyProvider.getInstance();
Configuration conf = getConfiguration(new Configuration(false), YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE);
rmContext.getClientRMService().refreshServiceAcls(conf, policyProvider);
rmContext.getApplicationMasterService().refreshServiceAcls(conf, policyProvider);
rmContext.getResourceTrackerService().refreshServiceAcls(conf, policyProvider);
}
Aggregations