use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class NodesBean method restartAgentsInternal.
private Map<Hosts, Future<RemoteCommandResult>> restartAgentsInternal(List<Hosts> hosts) {
Map<Hosts, Future<RemoteCommandResult>> asyncResults = new HashMap<>(hosts.size());
for (Hosts host : hosts) {
try {
Future<RemoteCommandResult> asyncResult = agentLivenessMonitor.restartAsync(host);
asyncResults.put(host, asyncResult);
} catch (ServiceException ex) {
asyncResults.put(host, null);
}
}
return asyncResults;
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class LibraryResource method uninstall.
@ApiOperation(value = "Uninstall a python library from the environment")
@DELETE
@Produces(MediaType.APPLICATION_JSON)
@Path("{library}")
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.PYTHON }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response uninstall(@Context SecurityContext sc, @PathParam("library") String library) throws ServiceException, GenericException, PythonException {
validatePattern(library);
Users user = jwtHelper.getUserPrincipal(sc);
environmentController.checkCondaEnabled(project, pythonVersion, true);
if (settings.getImmutablePythonLibraryNames().contains(library)) {
throw new ServiceException(RESTCodes.ServiceErrorCode.ANACONDA_DEP_REMOVE_FORBIDDEN, Level.INFO, "library: " + library);
}
environmentController.checkCondaEnvExists(project, user);
commandsController.deleteCommands(project, library);
libraryController.uninstallLibrary(project, user, library);
return Response.noContent().build();
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class ProjectService method example.
@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
TourProjectType demoType;
try {
demoType = TourProjectType.fromString(type);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
ProjectDTO projectDTO = new ProjectDTO();
Project project = null;
projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
Users user = jWTHelper.getUserPrincipal(sc);
String username = user.getUsername();
List<String> projectServices = new ArrayList<>();
// save the project
String readMeMessage = null;
switch(demoType) {
case KAFKA:
// It's a Kafka guide
projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.KAFKA);
readMeMessage = "jar file to demonstrate Kafka streaming";
break;
case SPARK:
// It's a Spark guide
projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.SPARK);
readMeMessage = "jar file to demonstrate the creation of a spark batch job";
break;
case FS:
// It's a Featurestore guide
projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.FS);
readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
break;
case ML:
// It's a TensorFlow guide
projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.ML);
readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
break;
default:
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
projectDTO.setServices(projectServices);
DistributedFileSystemOps dfso = null;
DistributedFileSystemOps udfso = null;
try {
project = projectController.createProject(projectDTO, user, req.getSession().getId());
dfso = dfs.getDfsOps();
username = hdfsUsersBean.getHdfsUserName(project, user);
udfso = dfs.getDfsOps(username);
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
// TestJob dataset
datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
} catch (Exception ex) {
projectController.cleanup(project, req.getSession().getId());
throw ex;
} finally {
if (dfso != null) {
dfso.close();
}
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class ElasticController method globalSearchHighLevel.
public SearchHit[] globalSearchHighLevel(String searchTerm) throws ServiceException, ElasticException {
// check if the index are up and running
if (!elasticClientCtrl.mngIndexExists(Settings.META_INDEX)) {
throw new ServiceException(RESTCodes.ServiceErrorCode.ELASTIC_INDEX_NOT_FOUND, Level.SEVERE, "index: " + Settings.META_INDEX);
}
LOG.log(Level.FINE, "Found elastic index, now executing the query.");
SearchResponse response = executeSearchQuery(globalSearchQuery(searchTerm.toLowerCase()));
if (response.status().getStatus() == 200) {
if (response.getHits().getHits().length > 0) {
return response.getHits().getHits();
}
return new SearchHit[0];
}
// elasticsearch rather than a bad query
throw new ElasticException(RESTCodes.ElasticErrorCode.ELASTIC_QUERY_ERROR, Level.INFO, "Error while executing query, code: " + response.status().getStatus());
}
use of io.hops.hopsworks.exceptions.ServiceException in project hopsworks by logicalclocks.
the class ElasticController method datasetSearchHighLevel.
public SearchHit[] datasetSearchHighLevel(Integer projectId, String datasetName, String searchTerm) throws ServiceException, ElasticException {
// check if the indices are up and running
if (!elasticClientCtrl.mngIndexExists(Settings.META_INDEX)) {
throw new ServiceException(RESTCodes.ServiceErrorCode.ELASTIC_INDEX_NOT_FOUND, Level.SEVERE, "index: " + Settings.META_INDEX);
}
String dsName = datasetName;
Project project;
if (datasetName.contains(Settings.SHARED_FILE_SEPARATOR)) {
String[] sharedDS = datasetName.split(Settings.SHARED_FILE_SEPARATOR);
dsName = sharedDS[1];
project = projectFacade.findByName(sharedDS[0]);
} else {
project = projectFacade.find(projectId);
}
Dataset dataset = datasetController.getByProjectAndDsName(project, null, dsName);
final long datasetId = dataset.getInodeId();
SearchResponse response = executeSearchQuery(datasetSearchQuery(datasetId, searchTerm.toLowerCase()));
if (response.status().getStatus() == 200) {
if (response.getHits().getHits().length > 0) {
return response.getHits().getHits();
}
return new SearchHit[0];
}
// elasticsearch rather than a bad query
throw new ElasticException(RESTCodes.ElasticErrorCode.ELASTIC_QUERY_ERROR, Level.INFO, "Error while executing query, code: " + response.status().getStatus());
}
Aggregations