use of org.apache.accumulo.server.rpc.TimedProcessor in project accumulo by apache.
the class Proxy method createProxyServer.
public static ServerAddress createProxyServer(HostAndPort address, TProtocolFactory protocolFactory, Properties properties, ClientConfiguration clientConf) throws Exception {
final int numThreads = Integer.parseInt(properties.getProperty(THRIFT_THREAD_POOL_SIZE_KEY, THRIFT_THREAD_POOL_SIZE_DEFAULT));
final long maxFrameSize = ConfigurationTypeHelper.getFixedMemoryAsBytes(properties.getProperty(THRIFT_MAX_FRAME_SIZE_KEY, THRIFT_MAX_FRAME_SIZE_DEFAULT));
final int simpleTimerThreadpoolSize = Integer.parseInt(Property.GENERAL_SIMPLETIMER_THREADPOOL_SIZE.getDefaultValue());
// How frequently to try to resize the thread pool
final long threadpoolResizeInterval = 1000l * 5;
// No timeout
final long serverSocketTimeout = 0l;
// Use the new hadoop metrics2 support
final MetricsFactory metricsFactory = new MetricsFactory(false);
final String serverName = "Proxy", threadName = "Accumulo Thrift Proxy";
// create the implementation of the proxy interface
ProxyServer impl = new ProxyServer(properties);
// Wrap the implementation -- translate some exceptions
AccumuloProxy.Iface wrappedImpl = RpcWrapper.service(impl);
// Create the processor from the implementation
TProcessor processor = new AccumuloProxy.Processor<>(wrappedImpl);
// Get the type of thrift server to instantiate
final String serverTypeStr = properties.getProperty(THRIFT_SERVER_TYPE, THRIFT_SERVER_TYPE_DEFAULT);
ThriftServerType serverType = DEFAULT_SERVER_TYPE;
if (!THRIFT_SERVER_TYPE_DEFAULT.equals(serverTypeStr)) {
serverType = ThriftServerType.get(serverTypeStr);
}
SslConnectionParams sslParams = null;
SaslServerConnectionParams saslParams = null;
switch(serverType) {
case SSL:
sslParams = SslConnectionParams.forClient(ClientContext.convertClientConfig(clientConf));
break;
case SASL:
if (!clientConf.hasSasl()) {
// ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
log.error("FATAL: SASL thrift server was requested but it is disabled in client configuration");
throw new RuntimeException("SASL is not enabled in configuration");
}
// Kerberos needs to be enabled to use it
if (!UserGroupInformation.isSecurityEnabled()) {
// ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
log.error("FATAL: Hadoop security is not enabled");
throw new RuntimeException();
}
// Login via principal and keytab
final String kerberosPrincipal = properties.getProperty(KERBEROS_PRINCIPAL, ""), kerberosKeytab = properties.getProperty(KERBEROS_KEYTAB, "");
if (StringUtils.isBlank(kerberosPrincipal) || StringUtils.isBlank(kerberosKeytab)) {
// ACCUMULO-3651 Changed level to error and added FATAL to message for slf4j capability
log.error("FATAL: Kerberos principal and keytab must be provided");
throw new RuntimeException();
}
UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytab);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
log.info("Logged in as {}", ugi.getUserName());
// The kerberosPrimary set in the SASL server needs to match the principal we're logged in as.
final String shortName = ugi.getShortUserName();
log.info("Setting server primary to {}", shortName);
clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY, shortName);
KerberosToken token = new KerberosToken();
saslParams = new SaslServerConnectionParams(clientConf, token, null);
processor = new UGIAssumingProcessor(processor);
break;
default:
// nothing to do -- no extra configuration necessary
break;
}
// Hook up support for tracing for thrift calls
TimedProcessor timedProcessor = new TimedProcessor(metricsFactory, processor, serverName, threadName);
// Create the thrift server with our processor and properties
ServerAddress serverAddr = TServerUtils.startTServer(serverType, timedProcessor, protocolFactory, serverName, threadName, numThreads, simpleTimerThreadpoolSize, threadpoolResizeInterval, maxFrameSize, sslParams, saslParams, serverSocketTimeout, address);
return serverAddr;
}
Aggregations