use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobsResource method delete.
@ApiOperation(value = "Delete the job with the given ID")
@DELETE
@Path("{name}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response delete(@ApiParam(value = "id", required = true) @PathParam("name") String name, @Context SecurityContext sc, @Context UriInfo uriInfo) throws JobException {
Users user = jWTHelper.getUserPrincipal(sc);
Jobs job = jobController.getJob(project, name);
if (job.getJobConfig().getSchedule() != null) {
jobController.unscheduleJob(job);
}
switch(job.getJobType()) {
case SPARK:
case PYSPARK:
case FLINK:
jobController.deleteJob(job, user);
break;
default:
throw new JobException(RESTCodes.JobErrorCode.JOB_TYPE_UNSUPPORTED, Level.FINEST, job.getJobType().toString());
}
return Response.noContent().build();
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobsResource method getJob.
@ApiOperation(value = "Get the job with requested ID", response = JobDTO.class)
@GET
@Path("{name}")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response getJob(@PathParam("name") String name, @BeanParam JobsBeanParam jobsBeanParam, @Context UriInfo uriInfo, @Context SecurityContext sc) throws JobException {
Jobs job = jobController.getJob(project, name);
ResourceRequest resourceRequest = new ResourceRequest(ResourceRequest.Name.JOBS);
resourceRequest.setExpansions(jobsBeanParam.getExpansions().getResources());
JobDTO dto = jobsBuilder.build(uriInfo, resourceRequest, job);
return Response.ok().entity(dto).build();
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class FsJobManagerController method setupStatisticsJob.
public Jobs setupStatisticsJob(Project project, Users user, Featurestore featurestore, Featuregroup featureGroup, TrainingDataset trainingDataset) throws FeaturestoreException, JobException, GenericException, ProjectException, ServiceException {
DistributedFileSystemOps udfso = dfs.getDfsOps(hdfsUsersController.getHdfsUserName(project, user));
Map<String, String> jobConfiguration = new HashMap<>();
try {
String entityName = featureGroup != null ? featureGroup.getName() : trainingDataset.getName();
Integer entityVersion = featureGroup != null ? featureGroup.getVersion() : trainingDataset.getVersion();
String jobConfigurationPath = getJobConfigurationPath(project, entityName, entityVersion, "statistics");
jobConfiguration.put("feature_store", featurestoreController.getOfflineFeaturestoreDbName(featurestore.getProject()));
jobConfiguration.put("type", featureGroup != null ? "fg" : "td");
jobConfiguration.put("name", entityName);
jobConfiguration.put("version", String.valueOf(entityVersion));
String jobConfigurationStr = objectMapper.writeValueAsString(jobConfiguration);
writeToHDFS(jobConfigurationPath, jobConfigurationStr, udfso);
String jobArgs = getJobArgs(COMPUTE_STATS_OP, jobConfigurationPath);
Jobs statisticsJob = configureJob(user, project, null, getJobName(COMPUTE_STATS_OP, Utils.getFeatureStoreEntityName(entityName, entityVersion)), jobArgs);
// Differently from the ingestion job. At this stage, no other action is required by the client.
// So we can start the job directly
executionController.start(statisticsJob, jobArgs, user);
return statisticsJob;
} catch (IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_JOB_SETUP, Level.SEVERE, "Error setting up statistics job", e.getMessage(), e);
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class ModelsController method versionProgram.
public String versionProgram(Accessor accessor, String jobName, String kernelId, String modelName, int modelVersion) throws JobException, ServiceException {
if (!Strings.isNullOrEmpty(jobName)) {
// model in job
Jobs experimentJob = jobController.getJob(accessor.experimentProject, jobName);
switch(experimentJob.getJobType()) {
case SPARK:
case PYSPARK:
{
SparkJobConfiguration sparkJobConf = (SparkJobConfiguration) experimentJob.getJobConfig();
String suffix = sparkJobConf.getAppPath().substring(sparkJobConf.getAppPath().lastIndexOf("."));
String relativePath = Settings.HOPS_MODELS_DATASET + "/" + modelName + "/" + modelVersion + "/program" + suffix;
Path path = new Path(Utils.getProjectPath(accessor.modelProject.getName()) + relativePath);
jobController.versionProgram(sparkJobConf.getAppPath(), accessor.udfso, path);
return relativePath;
}
case PYTHON:
{
throw new IllegalArgumentException("python jobs unavailable in community");
}
default:
throw new IllegalArgumentException("cannot version program for job type:" + experimentJob.getJobType());
}
} else {
// model in jupyter
String relativePath = Settings.HOPS_MODELS_DATASET + "/" + modelName + "/" + modelVersion + "/program.ipynb";
Path path = new Path(Utils.getProjectPath(accessor.modelProject.getName()) + relativePath);
jupyterController.versionProgram(accessor.hdfsUser, kernelId, path, accessor.udfso);
return relativePath;
}
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobFacade method findByProject.
// ====================================================================================================================
public CollectionInfo findByProject(Integer offset, Integer limit, Set<? extends AbstractFacade.FilterBy> filters, Set<? extends AbstractFacade.SortBy> sorts, Project project) {
// If filter or sort are on subresource, set inner join
String join = "";
if (sorts != null) {
for (SortBy sort : sorts) {
if (sort.getValue().equals(Sorts.FINALSTATUS.getValue()) || sort.getValue().equals(Sorts.PROGRESS.getValue()) || sort.getValue().equals(Sorts.STATE.getValue()) || sort.getValue().equals(Sorts.SUBMISSIONTIME.getValue()) || sort.getValue().equals(Sorts.DURATION.getValue())) {
join = JPQL_EXECUTIONS;
break;
}
}
}
if (filters != null) {
for (FilterBy filterBy : filters) {
if (filterBy.getValue().equals(Filters.LATEST_EXECUTION.getValue())) {
join = JPQL_EXECUTIONS;
break;
}
}
}
String queryStr = buildQuery("SELECT j FROM Jobs j " + join, filters, sorts, "j.project = :project ");
String queryCountStr = buildQuery("SELECT COUNT(DISTINCT j.id) FROM Jobs j " + join, filters, sorts, "j.project = :project ");
Query query = em.createQuery(queryStr, Jobs.class).setParameter("project", project);
Query queryCount = em.createQuery(queryCountStr, Jobs.class).setParameter("project", project);
setFilter(filters, query);
setFilter(filters, queryCount);
setOffsetAndLim(offset, limit, query);
return new CollectionInfo((Long) queryCount.getSingleResult(), query.getResultList());
}
Aggregations