use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class KafkaBrokers method getBrokerEndpoints.
@Lock(LockType.READ)
public Set<String> getBrokerEndpoints() throws IOException, KeeperException, InterruptedException {
try {
String zkConnectionString = getZookeeperConnectionString();
final ZooKeeper zk = new ZooKeeper(zkConnectionString, Settings.ZOOKEEPER_SESSION_TIMEOUT_MS, new Watcher() {
@Override
public void process(WatchedEvent watchedEvent) {
// NOOP
}
});
try {
return zk.getChildren("/brokers/ids", false).stream().map(bi -> getBrokerInfo(zk, bi)).filter(StringUtils::isNoneEmpty).map(bi -> bi.split(KafkaConst.DLIMITER)).flatMap(Arrays::stream).filter(this::isValidBrokerInfo).collect(Collectors.toSet());
} finally {
zk.close();
}
} catch (ServiceDiscoveryException ex) {
throw new IOException(ex);
} catch (RuntimeException ex) {
if (ex.getCause() instanceof KeeperException) {
throw (KeeperException) ex.getCause();
}
if (ex.getCause() instanceof InterruptedException) {
throw (InterruptedException) ex.getCause();
}
throw ex;
}
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class LivyController method deleteLivySession.
/**
* Delete livy session with given id
*
* @param sessionId
* @return
*/
public int deleteLivySession(int sessionId) {
Client client = ClientBuilder.newClient();
Response res;
try {
WebTarget target = client.target(getLivyURL()).path("/sessions/" + sessionId);
res = target.request().delete();
} catch (ServiceDiscoveryException ex) {
LOGGER.log(Level.SEVERE, null, ex);
return Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
} catch (NotFoundException e) {
return Response.Status.NOT_FOUND.getStatusCode();
} finally {
client.close();
}
return res.getStatus();
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class FeaturestoreController method createOfflineJdbcConnector.
public FeaturestoreStorageConnectorDTO createOfflineJdbcConnector(String databaseName) throws FeaturestoreException {
String hiveEndpoint = "";
try {
hiveEndpoint = serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HIVE_SERVER_TLS);
} catch (ServiceDiscoveryException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.CONNECTOR_NOT_FOUND, Level.SEVERE, "Could not create Hive connection string", e.getMessage(), e);
}
String connectionString = HiveController.HIVE_JDBC_PREFIX + hiveEndpoint + "/" + databaseName + ";auth=noSasl;ssl=true;twoWay=true;";
List<OptionDTO> arguments = FeaturestoreConstants.OFFLINE_JDBC_CONNECTOR_ARGS.stream().map(arg -> new OptionDTO(arg, null)).collect(Collectors.toList());
FeaturestoreJdbcConnectorDTO featurestoreJdbcConnectorDTO = new FeaturestoreJdbcConnectorDTO();
featurestoreJdbcConnectorDTO.setStorageConnectorType(FeaturestoreConnectorType.JDBC);
featurestoreJdbcConnectorDTO.setName(databaseName);
featurestoreJdbcConnectorDTO.setDescription("JDBC connector for the Offline Feature Store");
featurestoreJdbcConnectorDTO.setConnectionString(connectionString);
featurestoreJdbcConnectorDTO.setArguments(arguments);
return featurestoreJdbcConnectorDTO;
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class SparkController method createSparkJob.
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
private SparkJob createSparkJob(String username, Jobs job, Users user) throws JobException, GenericException, ServiceException {
SparkJob sparkjob = null;
try {
// Set Hopsworks consul service domain, don't use the address, use the name
String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
UserGroupInformation proxyUser = ugiService.getProxyUser(username);
try {
sparkjob = proxyUser.doAs((PrivilegedExceptionAction<SparkJob>) () -> new SparkJob(job, submitter, user, settings.getHadoopSymbolicLinkDir(), hdfsUsersBean.getHdfsUserName(job.getProject(), user), settings, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
throw new JobException(RESTCodes.JobErrorCode.PROXY_ERROR, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
} catch (ServiceDiscoveryException ex) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_NOT_FOUND, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
}
if (sparkjob == null) {
throw new GenericException(RESTCodes.GenericErrorCode.UNKNOWN_ERROR, Level.WARNING, "Could not instantiate job with name: " + job.getName() + " and id: " + job.getId(), "sparkjob object was null");
}
return sparkjob;
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class JupyterController method convertIPythonNotebook.
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public String convertIPythonNotebook(String hdfsUsername, String notebookPath, Project project, String pyPath, NotebookConversion notebookConversion) throws ServiceException {
File baseDir = new File(settings.getStagingDir() + settings.CONVERSION_DIR);
if (!baseDir.exists()) {
baseDir.mkdir();
}
File conversionDir = new File(baseDir, DigestUtils.sha256Hex(Integer.toString(ThreadLocalRandom.current().nextInt())));
conversionDir.mkdir();
HdfsUsers user = hdfsUsersFacade.findByName(hdfsUsername);
try {
String prog = settings.getSudoersDir() + "/convert-ipython-notebook.sh";
ProcessDescriptor processDescriptor = new ProcessDescriptor.Builder().addCommand("/usr/bin/sudo").addCommand(prog).addCommand(notebookPath).addCommand(hdfsUsername).addCommand(settings.getAnacondaProjectDir()).addCommand(pyPath).addCommand(conversionDir.getAbsolutePath()).addCommand(notebookConversion.name()).addCommand(projectUtils.getFullDockerImageName(project, true)).setWaitTimeout(60l, // on a TLS VM the timeout needs to be greater than 20s
TimeUnit.SECONDS).redirectErrorStream(true).build();
LOGGER.log(Level.FINE, processDescriptor.toString());
certificateMaterializer.materializeCertificatesLocalCustomDir(user.getUsername(), project.getName(), conversionDir.getAbsolutePath());
ProcessResult processResult = osProcessExecutor.execute(processDescriptor);
if (!processResult.processExited() || processResult.getExitCode() != 0) {
LOGGER.log(Level.WARNING, "error code: " + processResult.getExitCode(), "Failed to convert " + notebookPath + "\nstderr: " + processResult.getStderr() + "\nstdout: " + processResult.getStdout());
throw new ServiceException(RESTCodes.ServiceErrorCode.IPYTHON_CONVERT_ERROR, Level.SEVERE, "error code: " + processResult.getExitCode(), "Failed to convert " + notebookPath + "\nstderr: " + processResult.getStderr() + "\nstdout: " + processResult.getStdout());
}
String stdOut = processResult.getStdout();
if (!Strings.isNullOrEmpty(stdOut) && notebookConversion.equals(NotebookConversion.HTML)) {
StringBuilder renderedNotebookSB = new StringBuilder(stdOut);
int startIndex = renderedNotebookSB.indexOf("<html>");
int stopIndex = renderedNotebookSB.length();
return renderedNotebookSB.substring(startIndex, stopIndex);
}
return null;
} catch (IOException | ServiceDiscoveryException ex) {
throw new ServiceException(RESTCodes.ServiceErrorCode.IPYTHON_CONVERT_ERROR, Level.SEVERE, null, ex.getMessage(), ex);
} finally {
certificateMaterializer.removeCertificatesLocalCustomDir(user.getUsername(), project.getName(), conversionDir.getAbsolutePath());
}
}
Aggregations