use of io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO in project hopsworks by logicalclocks.
the class ProjectService method example.
@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
TourProjectType demoType;
try {
demoType = TourProjectType.fromString(type);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
ProjectDTO projectDTO = new ProjectDTO();
Project project = null;
projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
Users user = jWTHelper.getUserPrincipal(sc);
String username = user.getUsername();
List<String> projectServices = new ArrayList<>();
// save the project
String readMeMessage = null;
switch(demoType) {
case KAFKA:
// It's a Kafka guide
projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.KAFKA);
readMeMessage = "jar file to demonstrate Kafka streaming";
break;
case SPARK:
// It's a Spark guide
projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.SPARK);
readMeMessage = "jar file to demonstrate the creation of a spark batch job";
break;
case FS:
// It's a Featurestore guide
projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.FS);
readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
break;
case ML:
// It's a TensorFlow guide
projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
populateActiveServices(projectServices, TourProjectType.ML);
readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
break;
default:
throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
}
projectDTO.setServices(projectServices);
DistributedFileSystemOps dfso = null;
DistributedFileSystemOps udfso = null;
try {
project = projectController.createProject(projectDTO, user, req.getSession().getId());
dfso = dfs.getDfsOps();
username = hdfsUsersBean.getHdfsUserName(project, user);
udfso = dfs.getDfsOps(username);
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
// TestJob dataset
datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
} catch (Exception ex) {
projectController.cleanup(project, req.getSession().getId());
throw ex;
} finally {
if (dfso != null) {
dfso.close();
}
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
use of io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO in project hopsworks by logicalclocks.
the class ProjectController method createProject.
/**
* Creates a new project(project), the related DIR, the different services in
* the project, and the master of the
* project.
* <p>
* This needs to be an atomic operation (all or nothing) REQUIRES_NEW will
* make sure a new transaction is created even
* if this method is called from within a transaction.
*
* @param projectDTO
* @param owner
* @param sessionId
* @return
*/
public Project createProject(ProjectDTO projectDTO, Users owner, String sessionId) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, HopsSecurityException, ServiceException, FeaturestoreException, ElasticException, SchemaException, IOException {
Long startTime = System.currentTimeMillis();
// check that the project name is ok
String projectName = projectDTO.getProjectName();
FolderNameValidator.isValidProjectName(projectUtils, projectName);
List<ProjectServiceEnum> projectServices = new ArrayList<>();
if (projectDTO.getServices() != null) {
for (String s : projectDTO.getServices()) {
ProjectServiceEnum se = ProjectServiceEnum.valueOf(s.toUpperCase());
projectServices.add(se);
}
}
LOGGER.log(Level.FINE, () -> "PROJECT CREATION TIME. Step 1: " + (System.currentTimeMillis() - startTime));
DistributedFileSystemOps dfso = null;
Project project = null;
try {
dfso = dfs.getDfsOps();
/*
* create a project in the database
* if the creation go through it means that there is no other project with
* the same name.
* this project creation act like a lock, no other project can be created
* with the same name
* until this project is removed from the database
*/
try {
project = createProject(projectName, owner, projectDTO.getDescription(), dfso);
} catch (EJBException ex) {
LOGGER.log(Level.WARNING, null, ex);
Path dummy = new Path("/tmp/" + projectName);
try {
dfso.rm(dummy, true);
} catch (IOException e) {
LOGGER.log(Level.SEVERE, null, e);
}
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_EXISTS, Level.SEVERE, "project: " + projectName, ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 2 (hdfs): {0}", System.currentTimeMillis() - startTime);
verifyProject(project, dfso, sessionId);
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 3 (verify): {0}", System.currentTimeMillis() - startTime);
// Run the handlers.
try {
ProjectHandler.runProjectPreCreateHandlers(projectHandlers, project);
} catch (ProjectException ex) {
cleanup(project, sessionId, null, true, owner);
throw ex;
}
List<Future<?>> projectCreationFutures = new ArrayList<>();
// This is an async call
try {
projectCreationFutures.add(certificatesController.generateCertificates(project, owner));
} catch (Exception ex) {
cleanup(project, sessionId, projectCreationFutures, true, owner);
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERT_CREATION_ERROR, Level.SEVERE, "project: " + project.getName() + "owner: " + owner.getUsername(), ex.getMessage(), ex);
}
String username = hdfsUsersController.getHdfsUserName(project, owner);
if (username == null || username.isEmpty()) {
cleanup(project, sessionId, projectCreationFutures, true, owner);
throw new UserException(RESTCodes.UserErrorCode.USER_WAS_NOT_FOUND, Level.SEVERE, "project: " + project.getName() + "owner: " + owner.getUsername());
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 4 (certs): {0}", System.currentTimeMillis() - startTime);
// all the verifications have passed, we can now create the project
// create the project folder
ProvTypeDTO provType = settings.getProvType().dto;
try {
mkProjectDIR(projectName, dfso);
fsProvController.updateProjectProvType(project, provType, dfso);
} catch (IOException | EJBException | ProvenanceException ex) {
cleanup(project, sessionId, projectCreationFutures, true, owner);
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_FOLDER_NOT_CREATED, Level.SEVERE, "project: " + projectName, ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 5 (folders): {0}", System.currentTimeMillis() - startTime);
// update the project with the project folder inode
try {
setProjectInode(project, dfso);
} catch (IOException | EJBException ex) {
cleanup(project, sessionId, projectCreationFutures, true, owner);
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_INODE_CREATION_ERROR, Level.SEVERE, "project: " + projectName, ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 6 (inodes): {0}", System.currentTimeMillis() - startTime);
// set payment and quotas
try {
setProjectOwnerAndQuotas(project, dfso, owner);
} catch (IOException | EJBException ex) {
cleanup(project, sessionId, projectCreationFutures, true, owner);
throw new ProjectException(RESTCodes.ProjectErrorCode.QUOTA_ERROR, Level.SEVERE, "project: " + project.getName(), ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 7 (quotas): {0}", System.currentTimeMillis() - startTime);
try {
hdfsUsersController.addProjectFolderOwner(project, dfso);
createProjectLogResources(owner, project, dfso);
} catch (IOException | EJBException ex) {
cleanup(project, sessionId, projectCreationFutures);
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_SET_PERMISSIONS_ERROR, Level.SEVERE, "project: " + projectName, ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 8 (logs): {0}", System.currentTimeMillis() - startTime);
// inconsistencies
try {
elasticController.deleteProjectIndices(project);
elasticController.deleteProjectSavedObjects(projectName);
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 9 (elastic cleanup): {0}", System.currentTimeMillis() - startTime);
} catch (ElasticException ex) {
LOGGER.log(Level.FINE, "Error while cleaning old project indices", ex);
}
logProject(project, OperationType.Add);
// enable services
for (ProjectServiceEnum service : projectServices) {
try {
projectCreationFutures.addAll(addService(project, service, owner, dfso, provType));
} catch (RESTException | IOException ex) {
cleanup(project, sessionId, projectCreationFutures);
throw ex;
}
}
try {
for (Future f : projectCreationFutures) {
if (f != null) {
f.get();
}
}
} catch (InterruptedException | ExecutionException ex) {
LOGGER.log(Level.SEVERE, "Error while waiting for the certificate generation thread to finish. Will try to " + "cleanup...", ex);
cleanup(project, sessionId, projectCreationFutures);
throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERT_CREATION_ERROR, Level.SEVERE);
}
// Run the handlers.
try {
ProjectHandler.runProjectPostCreateHandlers(projectHandlers, project);
} catch (ProjectException ex) {
cleanup(project, sessionId, projectCreationFutures);
throw ex;
}
try {
project = environmentController.createEnv(project, owner);
} catch (PythonException | EJBException ex) {
cleanup(project, sessionId, projectCreationFutures);
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_ANACONDA_ENABLE_ERROR, Level.SEVERE, "project: " + projectName, ex.getMessage(), ex);
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 10 (env): {0}", System.currentTimeMillis() - startTime);
return project;
} finally {
if (dfso != null) {
dfso.close();
}
LOGGER.log(Level.FINE, "PROJECT CREATION TIME. Step 11 (close): {0}", System.currentTimeMillis() - startTime);
}
}
use of io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO in project hopsworks by logicalclocks.
the class DelaDatasetController method createDataset.
public Dataset createDataset(Users user, Project project, String name, String description) throws DatasetException, HopsSecurityException, ProvenanceException {
DistributedFileSystemOps dfso = dfs.getDfsOps();
try {
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
datasetCtrl.createDataset(user, project, name, description, projectMetaStatus, false, DatasetAccessPermission.EDITABLE, dfso);
return datasetController.getByProjectAndDsName(project, null, name);
} finally {
if (dfso != null) {
dfso.close();
}
}
}
use of io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO in project hopsworks by logicalclocks.
the class ProjectService method updateProject.
@PUT
@Path("{projectId}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER })
public Response updateProject(ProjectDTO projectDTO, @PathParam("projectId") Integer id, @Context SecurityContext sc) throws ProjectException, DatasetException, HopsSecurityException, ServiceException, FeaturestoreException, ElasticException, SchemaException, KafkaException, ProvenanceException, IOException, UserException {
RESTApiJsonResponse json = new RESTApiJsonResponse();
Users user = jWTHelper.getUserPrincipal(sc);
Project project = projectController.findProjectById(id);
boolean updated = false;
if (projectController.updateProjectDescription(project, projectDTO.getDescription(), user)) {
json.setSuccessMessage(ResponseMessages.PROJECT_DESCRIPTION_CHANGED);
updated = true;
}
if (projectController.updateProjectRetention(project, projectDTO.getRetentionPeriod(), user)) {
json.setSuccessMessage(json.getSuccessMessage() + "\n" + ResponseMessages.PROJECT_RETENTON_CHANGED);
updated = true;
}
if (!projectDTO.getServices().isEmpty()) {
// Create dfso here and pass them to the different controllers
DistributedFileSystemOps dfso = dfs.getDfsOps();
DistributedFileSystemOps udfso = dfs.getDfsOps(hdfsUsersBean.getHdfsUserName(project, user));
for (String s : projectDTO.getServices()) {
ProjectServiceEnum se = null;
se = ProjectServiceEnum.valueOf(s.toUpperCase());
ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
List<Future<?>> serviceFutureList = projectController.addService(project, se, user, dfso, udfso, projectMetaStatus);
if (serviceFutureList != null) {
// Wait for the futures
for (Future f : serviceFutureList) {
try {
f.get();
} catch (InterruptedException | ExecutionException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_GENERIC_ERROR, Level.SEVERE, "service: " + s, e.getMessage(), e);
}
}
// Service successfully enabled
json.setSuccessMessage(json.getSuccessMessage() + "\n" + ResponseMessages.PROJECT_SERVICE_ADDED + s);
updated = true;
}
}
// close dfsos
if (dfso != null) {
dfso.close();
}
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
if (!updated) {
json.setSuccessMessage(ResponseMessages.NOTHING_TO_UPDATE);
}
return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(json).build();
}
use of io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO in project hopsworks by logicalclocks.
the class DatasetResource method postByPath.
@POST
@Path("{path: .+}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Post an action on a file, dir or dataset.")
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_CREATE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response postByPath(@Context UriInfo uriInfo, @Context SecurityContext sc, @PathParam("path") String path, @QueryParam("type") DatasetType datasetType, @QueryParam("target_project") String targetProjectName, @QueryParam("action") DatasetActions.Post action, @QueryParam("description") String description, @QueryParam("searchable") Boolean searchable, @QueryParam("generate_readme") Boolean generateReadme, @QueryParam("destination_path") String destPath, @QueryParam("destination_type") DatasetType destDatasetType, @DefaultValue("READ_ONLY") @QueryParam("permission") DatasetAccessPermission permission) throws DatasetException, ProjectException, HopsSecurityException, ProvenanceException, MetadataException, SchematizedTagException {
Users user = jwtHelper.getUserPrincipal(sc);
DatasetPath datasetPath;
DatasetPath distDatasetPath;
Project project = this.getProject();
switch(action == null ? DatasetActions.Post.CREATE : action) {
case CREATE:
if (datasetType != null && !datasetType.equals(DatasetType.DATASET)) {
// can only create dataset
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_OPERATION_INVALID, Level.FINE);
}
datasetPath = datasetHelper.getNewDatasetPath(project, path, DatasetType.DATASET);
if (datasetPath.isTopLevelDataset()) {
checkIfDataOwner(project, user);
}
if (datasetPath.isTopLevelDataset() && !datasetHelper.isBasicDatasetProjectParent(project, datasetPath)) {
// fake shared dataset with :: in dataset name at dataset creation
throw new DatasetException(RESTCodes.DatasetErrorCode.DATASET_NAME_INVALID, Level.FINE);
}
ProvTypeDTO projectProvCore = fsProvenanceController.getMetaStatus(user, project, searchable);
ResourceRequest resourceRequest;
if (datasetPath.isTopLevelDataset()) {
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.DATASET);
Dataset ds = datasetController.getByProjectAndFullPath(project, datasetPath.getFullPath().toString());
datasetHelper.updateDataset(project, datasetPath, ds);
datasetPath.setInode(ds.getInode());
DatasetDTO dto = datasetBuilder.build(uriInfo, resourceRequest, user, datasetPath, null, null, false);
return Response.created(dto.getHref()).entity(dto).build();
} else {
datasetHelper.checkIfDatasetExists(project, datasetPath);
datasetHelper.updateDataset(project, datasetPath);
datasetController.createDirectory(project, user, datasetPath.getFullPath(), datasetPath.getDatasetName(), datasetPath.isTopLevelDataset(), description, Provenance.getDatasetProvCore(projectProvCore, Provenance.MLType.DATASET), generateReadme, permission);
resourceRequest = new ResourceRequest(ResourceRequest.Name.INODES);
Inode inode = inodeController.getInodeAtPath(datasetPath.getFullPath().toString());
datasetPath.setInode(inode);
InodeDTO dto = inodeBuilder.buildStat(uriInfo, resourceRequest, user, datasetPath, inode);
return Response.created(dto.getHref()).entity(dto).build();
}
case COPY:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.copy(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case MOVE:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.move(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath(), datasetPath.getDataset(), distDatasetPath.getDataset());
break;
case SHARE:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.share(targetProjectName, datasetPath.getFullPath().toString(), permission, project, user);
break;
case ACCEPT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.acceptShared(project, user, datasetPath.getDatasetSharedWith());
break;
case ZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.zip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.zip(project, user, datasetPath.getFullPath(), null);
}
break;
case UNZIP:
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
if (destPath != null) {
distDatasetPath = datasetHelper.getDatasetPath(project, destPath, destDatasetType);
datasetController.unzip(project, user, datasetPath.getFullPath(), distDatasetPath.getFullPath());
} else {
datasetController.unzip(project, user, datasetPath.getFullPath(), null);
}
break;
case REJECT:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.rejectShared(datasetPath.getDatasetSharedWith());
break;
case PUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.shareWithCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case UNPUBLISH:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareFromCluster(project, datasetPath.getDataset(), user, datasetPath.getFullPath());
break;
case IMPORT:
checkIfDataOwner(project, user);
Project srcProject = projectController.findProjectByName(targetProjectName);
datasetPath = datasetHelper.getDatasetPathIfFileExist(srcProject, path, datasetType);
datasetController.share(project.getName(), datasetPath.getFullPath().toString(), DatasetAccessPermission.READ_ONLY, srcProject, user);
break;
case UNSHARE_ALL:
checkIfDataOwner(project, user);
datasetPath = datasetHelper.getDatasetPathIfFileExist(project, path, datasetType);
datasetController.unshareAll(datasetPath.getDataset(), user);
break;
default:
throw new WebApplicationException("Action not valid.", Response.Status.NOT_FOUND);
}
return Response.noContent().build();
}
Aggregations