use of io.hops.hopsworks.exceptions.KafkaException in project hopsworks by logicalclocks.
the class ProjectService method example.
@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
TourProjectType demoType;
try {
demoType = TourProjectType.fromString(type);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
ProjectDTO projectDTO = new ProjectDTO();
Project project = null;
projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
Users user = jWTHelper.getUserPrincipal(sc);
String username = user.getUsername();
List<String> projectServices = new ArrayList<>();
// save the project
String readMeMessage = null;
switch(demoType) {
case KAFKA:
// It's a Kafka guide
projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.KAFKA);
readMeMessage = "jar file to demonstrate Kafka streaming";
break;
case SPARK:
// It's a Spark guide
projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.SPARK);
readMeMessage = "jar file to demonstrate the creation of a spark batch job";
break;
case FS:
// It's a Featurestore guide
projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.FS);
readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
break;
case ML:
// It's a TensorFlow guide
projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.ML);
readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
break;
default:
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
projectDTO.setServices(projectServices);
DistributedFileSystemOps dfso = null;
DistributedFileSystemOps udfso = null;
try {
project = projectController.createProject(projectDTO, user, req.getSession().getId());
dfso = dfs.getDfsOps();
username = hdfsUsersBean.getHdfsUserName(project, user);
udfso = dfs.getDfsOps(username);
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
// TestJob dataset
datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
} catch (Exception ex) {
projectController.cleanup(project, req.getSession().getId());
throw ex;
} finally {
if (dfso != null) {
dfso.close();
}
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
use of io.hops.hopsworks.exceptions.KafkaException in project hopsworks by logicalclocks.
the class KafkaController method addFullPermissionAclsToTopic.
private void addFullPermissionAclsToTopic(String aclProjectName, String topicName, Integer projectId) throws ProjectException, KafkaException, UserException {
Project p = projectFacade.findByName(aclProjectName);
if (p == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "Could not find project: " + aclProjectName);
}
List<AclDTO> acls = p.getProjectTeamCollection().stream().map(member -> member.getUser().getEmail()).map(email -> new AclDTO(p.getName(), email, "allow", Settings.KAFKA_ACL_WILDCARD, Settings.KAFKA_ACL_WILDCARD, Settings.KAFKA_ACL_WILDCARD)).collect(Collectors.toList());
for (AclDTO acl : acls) {
addAclsToTopic(topicName, projectId, acl);
}
}
use of io.hops.hopsworks.exceptions.KafkaException in project hopsworks by logicalclocks.
the class KafkaController method removeAclFromTopic.
public void removeAclFromTopic(String topicName, Integer aclId) throws KafkaException {
TopicAcls ta = topicAclsFacade.find(aclId);
if (ta == null) {
throw new KafkaException(RESTCodes.KafkaErrorCode.ACL_NOT_FOUND, Level.FINE, "topic: " + topicName);
}
if (!ta.getProjectTopics().getTopicName().equals(topicName)) {
throw new KafkaException(RESTCodes.KafkaErrorCode.ACL_NOT_FOR_TOPIC, Level.FINE, "topic: " + topicName);
}
topicAclsFacade.remove(ta);
}
use of io.hops.hopsworks.exceptions.KafkaException in project hopsworks by logicalclocks.
the class KafkaController method getSubjectForTopic.
public SubjectDTO getSubjectForTopic(Project project, String topic) throws KafkaException, ProjectException {
Optional<ProjectTopics> pt = projectTopicsFacade.findTopicByNameAndProject(project, topic);
if (!pt.isPresent()) {
SharedTopics sharedTopic = sharedTopicsFacade.findSharedTopicByProjectAndTopic(project.getId(), topic).orElseThrow(() -> new KafkaException(RESTCodes.KafkaErrorCode.TOPIC_NOT_SHARED, Level.FINE, "topic: " + topic + ", project: " + project.getName()));
Project sharedWithProject = projectFacade.findById(sharedTopic.getProjectId()).orElseThrow(() -> new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "projectId: " + sharedTopic.getSharedTopicsPK().getProjectId()));
pt = projectTopicsFacade.findTopicByNameAndProject(sharedWithProject, topic);
}
if (!pt.isPresent()) {
throw new KafkaException(RESTCodes.KafkaErrorCode.TOPIC_NOT_FOUND, Level.FINE, "project=" + project.getName() + ", topic=" + topic);
}
return new SubjectDTO(pt.get().getSubjects());
}
use of io.hops.hopsworks.exceptions.KafkaException in project hopsworks by logicalclocks.
the class KafkaController method addAclsToTopic.
private Pair<TopicAcls, Response.Status> addAclsToTopic(String topicName, Integer projectId, String selectedProjectName, String userEmail, String permissionType, String operationType, String host, String role) throws ProjectException, KafkaException, UserException {
if (Strings.isNullOrEmpty(topicName) || userEmail == null) {
throw new IllegalArgumentException("Topic and userEmail must be provided.");
}
// get the project id
Project topicOwnerProject = Optional.ofNullable(projectFacade.find(projectId)).orElseThrow(() -> new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "projectId: " + projectId));
if (!topicOwnerProject.getName().equals(selectedProjectName)) {
if (projectFacade.findByName(selectedProjectName) == null) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_NOT_FOUND, Level.FINE, "The specified project " + "for the topic" + topicName + " was not found");
}
}
ProjectTopics pt = projectTopicsFacade.findTopicByNameAndProject(topicOwnerProject, topicName).orElseThrow(() -> new KafkaException(RESTCodes.KafkaErrorCode.TOPIC_NOT_FOUND, Level.FINE, "Topic: " + topicName));
// should not be able to create multiple ACLs at the same time
if (userEmail.equals("*")) {
throw new KafkaException(RESTCodes.KafkaErrorCode.ACL_FOR_ANY_USER, Level.FINE, "topic: " + topicName);
}
// fetch the user name from database
Users user = Optional.ofNullable(userFacade.findByEmail(userEmail)).orElseThrow(() -> new UserException(RESTCodes.UserErrorCode.USER_WAS_NOT_FOUND, Level.FINE, "user: " + userEmail));
String principalName = KafkaConst.buildPrincipalName(selectedProjectName, user.getUsername());
Optional<TopicAcls> optionalAcl = topicAclsFacade.getTopicAcls(topicName, principalName, permissionType, operationType, host, role);
if (optionalAcl.isPresent()) {
return Pair.of(optionalAcl.get(), Response.Status.OK);
}
TopicAcls acl = topicAclsFacade.addAclsToTopic(pt, user, permissionType, operationType, host, role, principalName);
return Pair.of(acl, Response.Status.CREATED);
}
Aggregations