use of org.apache.thrift.transport.TTransportException in project storm by apache.
the class KerberosSaslTransportPlugin method connect.
@Override
public TTransport connect(TTransport transport, String serverHost, String asUser) throws TTransportException, IOException {
//create an authentication callback handler
ClientCallbackHandler client_callback_handler = new ClientCallbackHandler(login_conf);
//login our user
LoginCacheKey key = new LoginCacheKey(login_conf, AuthUtils.LOGIN_CONTEXT_CLIENT);
Login login = loginCache.get(key);
if (login == null) {
LOG.debug("Kerberos Login was not found in the Login Cache, attempting to contact the Kerberos Server");
synchronized (loginCache) {
login = loginCache.get(key);
if (login == null) {
try {
//specify a configuration object to be used
Configuration.setConfiguration(login_conf);
//now login
login = new Login(AuthUtils.LOGIN_CONTEXT_CLIENT, client_callback_handler);
login.startThreadIfNeeded();
loginCache.put(key, login);
} catch (LoginException ex) {
LOG.error("Server failed to login in principal:" + ex, ex);
throw new RuntimeException(ex);
}
}
}
}
final Subject subject = login.getSubject();
if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) {
//error
throw new RuntimeException("Fail to verify user principal with section \"" + AuthUtils.LOGIN_CONTEXT_CLIENT + "\" in login configuration file " + login_conf);
}
final String principal = StringUtils.isBlank(asUser) ? getPrincipal(subject) : asUser;
String serviceName = AuthUtils.get(login_conf, AuthUtils.LOGIN_CONTEXT_CLIENT, "serviceName");
if (serviceName == null) {
serviceName = AuthUtils.SERVICE;
}
Map<String, String> props = new TreeMap<String, String>();
props.put(Sasl.QOP, "auth");
props.put(Sasl.SERVER_AUTH, "false");
LOG.debug("SASL GSSAPI client transport is being established");
final TTransport sasalTransport = new TSaslClientTransport(KERBEROS, principal, serviceName, serverHost, props, null, transport);
//open Sasl transport with the login credential
try {
Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
public Void run() {
try {
LOG.debug("do as:" + principal);
sasalTransport.open();
} catch (Exception e) {
LOG.error("Client failed to open SaslClientTransport to interact with a server during session initiation: " + e, e);
}
return null;
}
});
} catch (PrivilegedActionException e) {
throw new RuntimeException(e);
}
return sasalTransport;
}
use of org.apache.thrift.transport.TTransportException in project storm by apache.
the class BlobStoreUtils method downloadMissingBlob.
// Download missing blobs from potential nimbodes
public static boolean downloadMissingBlob(Map<String, Object> conf, BlobStore blobStore, String key, Set<NimbusInfo> nimbusInfos) throws TTransportException {
ReadableBlobMeta rbm;
ClientBlobStore remoteBlobStore;
InputStreamWithMeta in;
boolean isSuccess = false;
LOG.debug("Download blob NimbusInfos {}", nimbusInfos);
for (NimbusInfo nimbusInfo : nimbusInfos) {
if (isSuccess) {
break;
}
LOG.debug("Download blob key: {}, NimbusInfo {}", key, nimbusInfo);
try (NimbusClient client = new NimbusClient(conf, nimbusInfo.getHost(), nimbusInfo.getPort(), null)) {
rbm = client.getClient().getBlobMeta(key);
remoteBlobStore = new NimbusBlobStore();
remoteBlobStore.setClient(conf, client);
in = remoteBlobStore.getBlob(key);
blobStore.createBlob(key, in, rbm.get_settable(), getNimbusSubject());
// if key already exists while creating the blob else update it
Iterator<String> keyIterator = blobStore.listKeys();
while (keyIterator.hasNext()) {
if (keyIterator.next().equals(key)) {
LOG.debug("Success creating key, {}", key);
isSuccess = true;
break;
}
}
} catch (IOException | AuthorizationException exception) {
throw new RuntimeException(exception);
} catch (KeyAlreadyExistsException kae) {
LOG.info("KeyAlreadyExistsException Key: {} {}", key, kae);
} catch (KeyNotFoundException knf) {
// Catching and logging KeyNotFoundException because, if
// there is a subsequent update and delete, the non-leader
// nimbodes might throw an exception.
LOG.info("KeyNotFoundException Key: {} {}", key, knf);
} catch (Exception exp) {
// Logging an exception while client is connecting
LOG.error("Exception {}", exp);
}
}
if (!isSuccess) {
LOG.error("Could not download the blob with key: {}", key);
}
return isSuccess;
}
use of org.apache.thrift.transport.TTransportException in project storm by apache.
the class BlobStoreUtils method downloadUpdatedBlob.
// Download updated blobs from potential nimbodes
public static boolean downloadUpdatedBlob(Map<String, Object> conf, BlobStore blobStore, String key, Set<NimbusInfo> nimbusInfos) throws TTransportException {
ClientBlobStore remoteBlobStore;
InputStreamWithMeta in;
AtomicOutputStream out;
boolean isSuccess = false;
LOG.debug("Download blob NimbusInfos {}", nimbusInfos);
for (NimbusInfo nimbusInfo : nimbusInfos) {
if (isSuccess) {
break;
}
try (NimbusClient client = new NimbusClient(conf, nimbusInfo.getHost(), nimbusInfo.getPort(), null)) {
remoteBlobStore = new NimbusBlobStore();
remoteBlobStore.setClient(conf, client);
in = remoteBlobStore.getBlob(key);
out = blobStore.updateBlob(key, getNimbusSubject());
byte[] buffer = new byte[2048];
int len = 0;
while ((len = in.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
if (out != null) {
out.close();
}
isSuccess = true;
} catch (IOException | AuthorizationException exception) {
throw new RuntimeException(exception);
} catch (KeyNotFoundException knf) {
// Catching and logging KeyNotFoundException because, if
// there is a subsequent update and delete, the non-leader
// nimbodes might throw an exception.
LOG.info("KeyNotFoundException {}", knf);
} catch (Exception exp) {
// Logging an exception while client is connecting
LOG.error("Exception {}", exp);
}
}
if (!isSuccess) {
LOG.error("Could not update the blob with key: {}", key);
}
return isSuccess;
}
use of org.apache.thrift.transport.TTransportException in project storm by apache.
the class ReturnResultsReducer method complete.
@Override
public void complete(ReturnResultsState state, TridentCollector collector) {
// only one of the multireducers will receive the tuples
if (state.returnInfo != null) {
String result = JSONValue.toJSONString(state.results);
Map retMap = null;
try {
retMap = (Map) JSONValue.parseWithException(state.returnInfo);
} catch (ParseException e) {
collector.reportError(e);
return;
}
final String host = (String) retMap.get("host");
final int port = Utils.getInt(retMap.get("port"));
String id = (String) retMap.get("id");
DistributedRPCInvocations.Iface client;
if (local) {
client = (DistributedRPCInvocations.Iface) ServiceRegistry.getService(host);
} else {
List server = new ArrayList() {
{
add(host);
add(port);
}
};
if (!_clients.containsKey(server)) {
try {
_clients.put(server, new DRPCInvocationsClient(conf, host, port));
} catch (TTransportException ex) {
throw new RuntimeException(ex);
}
}
client = _clients.get(server);
}
try {
client.result(id, result);
} catch (AuthorizationException aze) {
collector.reportError(aze);
} catch (TException e) {
collector.reportError(e);
}
}
}
use of org.apache.thrift.transport.TTransportException in project hive by apache.
the class HiveAuthFactory method getAuthTransFactory.
public TTransportFactory getAuthTransFactory() throws LoginException {
TTransportFactory transportFactory;
TSaslServerTransport.Factory serverTransportFactory;
if (isSASLWithKerberizedHadoop()) {
try {
serverTransportFactory = saslServer.createSaslServerTransportFactory(getSaslProperties());
} catch (TTransportException e) {
throw new LoginException(e.getMessage());
}
if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
// no-op
} else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
try {
serverTransportFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap<String, String>(), new PlainSaslHelper.PlainServerCallbackHandler(authTypeStr));
} catch (AuthenticationException e) {
throw new LoginException("Error setting callback handler" + e);
}
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
transportFactory = saslServer.wrapTransportFactory(serverTransportFactory);
} else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) || authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
} else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
transportFactory = new TTransportFactory();
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
return transportFactory;
}
Aggregations