use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class FlinkController method startJob.
public Execution startJob(final Jobs job, final Users user) throws GenericException, JobException, ServiceException {
// First: some parameter checking.
if (job == null) {
throw new NullPointerException("Cannot run a null job.");
} else if (user == null) {
throw new NullPointerException("Cannot run a job as a null user.");
} else if (job.getJobType() != JobType.FLINK) {
throw new IllegalArgumentException("Job configuration is not a Flink job configuration.");
}
// Set Hopsworks consul service domain, don't use the address, use the name
String username = hdfsUsersBean.getHdfsUserName(job.getProject(), user);
FlinkJob flinkjob = null;
try {
String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
UserGroupInformation proxyUser = ugiService.getProxyUser(username);
try {
flinkjob = proxyUser.doAs((PrivilegedExceptionAction<FlinkJob>) () -> new FlinkJob(job, submitter, user, hdfsUsersBean.getHdfsUserName(job.getProject(), job.getCreator()), settings, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
} catch (InterruptedException ex) {
LOGGER.log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
throw new JobException(RESTCodes.JobErrorCode.PROXY_ERROR, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
} catch (ServiceDiscoveryException ex) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_NOT_FOUND, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
}
if (flinkjob == null) {
throw new GenericException(RESTCodes.GenericErrorCode.UNKNOWN_ERROR, Level.WARNING, "Could not instantiate job with name: " + job.getName() + " and id: " + job.getId(), "sparkjob object was null");
}
Execution execution = flinkjob.requestExecutionId();
submitter.startExecution(flinkjob);
activityFacade.persistActivity(ActivityFacade.RAN_JOB, job.getProject(), user.asUser(), ActivityFlag.JOB);
return execution;
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class SecretsController method getShared.
/**
* Gets a decrypted shared secret depending on its Visibility. It will throw an exception
* if the Visibility was set to PRIVATE or the caller is not member of the Project
* the Secret is shared with.
*
* @param caller The user who requested the Secret
* @param ownerUser the user owner of the secret
* @param secretName Identifier of the Secret
* @return The decrypted Secret
* @throws UserException
* @throws ServiceException
* @throws ProjectException
*/
public SecretPlaintext getShared(Users caller, Users ownerUser, String secretName) throws UserException, ServiceException, ProjectException {
checkIfUserIsNull(caller);
checkIfNameIsNullOrEmpty(secretName);
checkIfUserIsNull(ownerUser);
Secret storedSecret = secretsFacade.findById(new SecretId(ownerUser.getUid(), secretName));
checkIfSecretIsNull(storedSecret, secretName, ownerUser);
if (storedSecret.getVisibilityType() == null || storedSecret.getVisibilityType().equals(VisibilityType.PRIVATE)) {
throw new UserException(RESTCodes.UserErrorCode.ACCESS_CONTROL, Level.FINE, "Secret is Private", "User " + caller.getUsername() + " requested PRIVATE secret <" + ownerUser.getUid() + ", " + secretName + ">");
}
Integer projectId = storedSecret.getProjectIdScope();
if (projectId == null) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_GENERIC_ERROR, Level.WARNING, "Visibility's Project ID is empty", "Secret " + secretName + " visibility is PROJECT but Project ID is null");
}
Project project = projectFacade.find(projectId);
if (project == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Project with ID " + projectId + " does not exist!", "User " + caller.getUsername() + " requested shared Secret " + secretName + " but Project with ID " + projectId + "does not exist");
}
// Check if caller is member of the Project
for (ProjectTeam projectTeam : project.getProjectTeamCollection()) {
if (caller.getUid().equals(projectTeam.getUser().getUid())) {
try {
return decrypt(ownerUser, storedSecret);
} catch (IOException | GeneralSecurityException ex) {
throw new UserException(RESTCodes.UserErrorCode.SECRET_ENCRYPTION_ERROR, Level.SEVERE, "Error decrypting Secret", "Could not decrypt Secret " + secretName, ex);
}
}
}
// Check if caller is a member of some shared project
throw new UserException(RESTCodes.UserErrorCode.ACCESS_CONTROL, Level.FINE, "Not authorized to access Secret " + secretName, "User " + caller.getUsername() + " tried to access shared Secret " + secretName + " but they are not member of Project " + project.getName());
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class ClusterUtilisationService method metrics.
@GET
@Path("/metrics")
@Produces(MediaType.APPLICATION_JSON)
public Response metrics() throws ServiceException {
Service rm = null;
try {
rm = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.HTTPS_RESOURCEMANAGER);
} catch (ServiceDiscoveryException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_DISCOVERY_ERROR, Level.FINE);
}
HttpHost rmHost = new HttpHost(rm.getAddress(), rm.getPort(), "https");
HttpGet getRequest = new HttpGet(METRICS_ENDPOINT);
// defined as string as we don't really need to look inside it
String response = null;
try {
response = httpClient.execute(rmHost, getRequest, new HttpClient.StringResponseHandler());
} catch (IOException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.RM_METRICS_ERROR, Level.FINE);
}
JSONObject jsonObject = new JSONObject(response);
jsonObject.put("deploying", hostsFacade.countUnregistered());
return Response.ok().entity(jsonObject.toString()).build();
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class FeaturegroupController method convertFeaturegrouptoDTO.
/**
* Convert a featuregroup entity to a DTO representation
*
* @param featuregroup the entity to convert
* @return a DTO representation of the entity
*/
private FeaturegroupDTO convertFeaturegrouptoDTO(Featuregroup featuregroup, Project project, Users user) throws FeaturestoreException, ServiceException {
String featurestoreName = featurestoreFacade.getHiveDbName(featuregroup.getFeaturestore().getHiveDbId());
switch(featuregroup.getFeaturegroupType()) {
case CACHED_FEATURE_GROUP:
CachedFeaturegroupDTO cachedFeaturegroupDTO = cachedFeaturegroupController.convertCachedFeaturegroupToDTO(featuregroup, project, user);
cachedFeaturegroupDTO.setFeaturestoreName(featurestoreName);
return cachedFeaturegroupDTO;
case ON_DEMAND_FEATURE_GROUP:
FeaturestoreStorageConnectorDTO storageConnectorDTO = connectorController.convertToConnectorDTO(user, project, featuregroup.getOnDemandFeaturegroup().getFeaturestoreConnector());
OnDemandFeaturegroupDTO onDemandFeaturegroupDTO = new OnDemandFeaturegroupDTO(featurestoreName, featuregroup, storageConnectorDTO);
try {
String path = getFeatureGroupLocation(featuregroup);
String location = featurestoreUtils.prependNameNode(path);
onDemandFeaturegroupDTO.setLocation(location);
} catch (ServiceDiscoveryException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_NOT_FOUND, Level.SEVERE);
}
return onDemandFeaturegroupDTO;
default:
throw new IllegalArgumentException(RESTCodes.FeaturestoreErrorCode.ILLEGAL_FEATUREGROUP_TYPE.getMessage() + ", Recognized Feature group types are: " + FeaturegroupType.ON_DEMAND_FEATURE_GROUP + ", and: " + FeaturegroupType.CACHED_FEATURE_GROUP + ". The provided feature group type was not recognized: " + featuregroup.getFeaturegroupType());
}
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class OfflineFeatureGroupController method openMetastoreClient.
// Here we can't use the HiveMetaStoreClient.java wrapper as we would need to export environment variables and so on
// instead we assemble directly the thirft client, which is what the HiveMetaStoreClient does behind the scenes.
private ThriftHiveMetastore.Client openMetastoreClient(Project project, Users user) throws ServiceException, IOException {
String hdfsUsername = hdfsUsersController.getHdfsUserName(project, user);
ThriftHiveMetastore.Client client = null;
try {
certificateMaterializer.materializeCertificatesLocal(user.getUsername(), project.getName());
CertificateMaterializer.CryptoMaterial userMaterial = certificateMaterializer.getUserMaterial(user.getUsername(), project.getName());
// read Password
String password = String.copyValueOf(userMaterial.getPassword());
// Get metastore service information from consul
Service metastoreService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.HIVE_METASTORE);
TTransport transport;
if (settings.getHopsRpcTls()) {
// Setup secure connection with the Hive metastore.
TSSLTransportFactory.TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters();
params.setTrustStore(certificateMaterializer.getUserTransientTruststorePath(project, user), password);
params.setKeyStore(certificateMaterializer.getUserTransientKeystorePath(project, user), password);
transport = TSSLTransportFactory.getClientSocket(metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT, params);
} else {
transport = new TSocket(TConfiguration.DEFAULT, metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT);
}
TProtocol protocol = new TBinaryProtocol(transport);
client = new ThriftHiveMetastore.Client(protocol);
// Open transport
if (!transport.isOpen()) {
transport.open();
}
// Set the UGI on the metastore side
client.set_ugi(hdfsUsername, new ArrayList<>());
if (settings.getHopsRpcTls()) {
// Send the certificate to the metastore so it can operate with the fs.
client.set_crypto(userMaterial.getKeyStore(), password, userMaterial.getTrustStore(), password, false);
}
} catch (CryptoPasswordNotFoundException | ServiceDiscoveryException | TException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.METASTORE_CONNECTION_ERROR, Level.SEVERE, "Hive metastore connection error", e.getMessage(), e);
}
return client;
}
Aggregations