use of org.apache.hadoop.ipc.RPC in project hadoop by apache.
the class DataNode method initIpcServer.
private void initIpcServer() throws IOException {
InetSocketAddress ipcAddr = NetUtils.createSocketAddr(getConf().getTrimmed(DFS_DATANODE_IPC_ADDRESS_KEY));
// Add all the RPC protocols that the Datanode implements
RPC.setProtocolEngine(getConf(), ClientDatanodeProtocolPB.class, ProtobufRpcEngine.class);
ClientDatanodeProtocolServerSideTranslatorPB clientDatanodeProtocolXlator = new ClientDatanodeProtocolServerSideTranslatorPB(this);
BlockingService service = ClientDatanodeProtocolService.newReflectiveBlockingService(clientDatanodeProtocolXlator);
ipcServer = new RPC.Builder(getConf()).setProtocol(ClientDatanodeProtocolPB.class).setInstance(service).setBindAddress(ipcAddr.getHostName()).setPort(ipcAddr.getPort()).setNumHandlers(getConf().getInt(DFS_DATANODE_HANDLER_COUNT_KEY, DFS_DATANODE_HANDLER_COUNT_DEFAULT)).setVerbose(false).setSecretManager(blockPoolTokenSecretManager).build();
ReconfigurationProtocolServerSideTranslatorPB reconfigurationProtocolXlator = new ReconfigurationProtocolServerSideTranslatorPB(this);
service = ReconfigurationProtocolService.newReflectiveBlockingService(reconfigurationProtocolXlator);
DFSUtil.addPBProtocol(getConf(), ReconfigurationProtocolPB.class, service, ipcServer);
InterDatanodeProtocolServerSideTranslatorPB interDatanodeProtocolXlator = new InterDatanodeProtocolServerSideTranslatorPB(this);
service = InterDatanodeProtocolService.newReflectiveBlockingService(interDatanodeProtocolXlator);
DFSUtil.addPBProtocol(getConf(), InterDatanodeProtocolPB.class, service, ipcServer);
TraceAdminProtocolServerSideTranslatorPB traceAdminXlator = new TraceAdminProtocolServerSideTranslatorPB(this);
BlockingService traceAdminService = TraceAdminService.newReflectiveBlockingService(traceAdminXlator);
DFSUtil.addPBProtocol(getConf(), TraceAdminProtocolPB.class, traceAdminService, ipcServer);
LOG.info("Opened IPC server at " + ipcServer.getListenerAddress());
// set service-level authorization security policy
if (getConf().getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
ipcServer.refreshServiceAcl(getConf(), new HDFSPolicyProvider());
}
}
use of org.apache.hadoop.ipc.RPC in project tez by apache.
the class TezTaskCommunicatorImpl method startRpcServer.
protected void startRpcServer() {
try {
JobTokenSecretManager jobTokenSecretManager = new JobTokenSecretManager();
jobTokenSecretManager.addTokenForJob(tokenIdentifier, sessionToken);
server = new RPC.Builder(conf).setProtocol(TezTaskUmbilicalProtocol.class).setBindAddress("0.0.0.0").setPort(0).setInstance(taskUmbilical).setNumHandlers(conf.getInt(TezConfiguration.TEZ_AM_TASK_LISTENER_THREAD_COUNT, TezConfiguration.TEZ_AM_TASK_LISTENER_THREAD_COUNT_DEFAULT)).setPortRangeConfig(TezConfiguration.TEZ_AM_TASK_AM_PORT_RANGE).setSecretManager(jobTokenSecretManager).build();
// Enable service authorization?
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) {
refreshServiceAcls(conf, new TezAMPolicyProvider());
}
server.start();
InetSocketAddress serverBindAddress = NetUtils.getConnectAddress(server);
this.address = NetUtils.createSocketAddrForHost(serverBindAddress.getAddress().getCanonicalHostName(), serverBindAddress.getPort());
LOG.info("Instantiated TezTaskCommunicator RPC at " + this.address);
} catch (IOException e) {
throw new TezUncheckedException(e);
}
}
use of org.apache.hadoop.ipc.RPC in project hive by apache.
the class LlapUtil method startProtocolServer.
public static RPC.Server startProtocolServer(int srvPort, int numHandlers, AtomicReference<InetSocketAddress> bindAddress, Configuration conf, BlockingService impl, Class<?> protocolClass, SecretManager<?> secretManager, PolicyProvider provider, ConfVars... aclVars) {
InetSocketAddress addr = new InetSocketAddress(srvPort);
RPC.Server server;
try {
server = createRpcServer(protocolClass, addr, conf, numHandlers, impl, secretManager, provider, aclVars);
server.start();
} catch (IOException e) {
LOG.error("Failed to run RPC Server on port: " + srvPort, e);
throw new RuntimeException(e);
}
InetSocketAddress serverBindAddress = NetUtils.getConnectAddress(server);
InetSocketAddress bindAddressVal = NetUtils.createSocketAddrForHost(serverBindAddress.getAddress().getCanonicalHostName(), serverBindAddress.getPort());
if (bindAddress != null) {
bindAddress.set(bindAddressVal);
}
LOG.info("Instantiated " + protocolClass.getSimpleName() + " at " + bindAddressVal);
return server;
}
use of org.apache.hadoop.ipc.RPC in project hive by apache.
the class LlapProtocolServerImpl method startProtocolServer.
private RPC.Server startProtocolServer(int srvPort, int numHandlers, AtomicReference<InetSocketAddress> bindAddress, Configuration conf, BlockingService impl, Class<?> protocolClass, ConfVars... aclVars) {
InetSocketAddress addr = new InetSocketAddress(srvPort);
RPC.Server server;
try {
server = createServer(protocolClass, addr, conf, numHandlers, impl, aclVars);
server.start();
} catch (IOException e) {
LOG.error("Failed to run RPC Server on port: " + srvPort, e);
throw new RuntimeException(e);
}
InetSocketAddress serverBindAddress = NetUtils.getConnectAddress(server);
bindAddress.set(NetUtils.createSocketAddrForHost(serverBindAddress.getAddress().getCanonicalHostName(), serverBindAddress.getPort()));
LOG.info("Instantiated " + protocolClass.getSimpleName() + " at " + bindAddress);
return server;
}
Aggregations