use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobFacade method removeJob.
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void removeJob(Jobs job) {
try {
Jobs managedJob = em.find(Jobs.class, job.getId());
em.remove(em.merge(managedJob));
em.flush();
} catch (SecurityException | IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "Could not delete job:" + job.getId());
throw ex;
}
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class FsJobManagerController method setupTrainingDatasetJob.
public Jobs setupTrainingDatasetJob(Project project, Users user, TrainingDataset trainingDataset, QueryDTO queryDTO, Boolean overwrite, Map<String, String> writeOptions, SparkJobConfiguration sparkJobConfiguration) throws FeaturestoreException, JobException, GenericException, ProjectException, ServiceException {
DistributedFileSystemOps udfso = dfs.getDfsOps(hdfsUsersController.getHdfsUserName(project, user));
try {
String jobConfigurationPath = getJobConfigurationPath(project, trainingDataset.getName(), trainingDataset.getVersion(), "td");
Map<String, Object> jobConfiguration = new HashMap<>();
jobConfiguration.put("feature_store", featurestoreController.getOfflineFeaturestoreDbName(trainingDataset.getFeaturestore().getProject()));
jobConfiguration.put("name", trainingDataset.getName());
jobConfiguration.put("version", String.valueOf(trainingDataset.getVersion()));
jobConfiguration.put("query", queryDTO);
jobConfiguration.put("write_options", writeOptions);
jobConfiguration.put("overwrite", overwrite);
String jobConfigurationStr = objectMapper.writeValueAsString(jobConfiguration);
writeToHDFS(jobConfigurationPath, jobConfigurationStr, udfso);
String jobArgs = getJobArgs(TRAINING_DATASET_OP, jobConfigurationPath);
Jobs trainingDatasetJob = configureJob(user, project, sparkJobConfiguration, getJobName(TRAINING_DATASET_OP, Utils.getTrainingDatasetName(trainingDataset)), jobArgs);
executionController.start(trainingDatasetJob, jobArgs, user);
return trainingDatasetJob;
} catch (IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_JOB_SETUP, Level.SEVERE, "Error setting up training dataset job", e.getMessage(), e);
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class FsJobManagerController method setupIngestionJob.
public IngestionJob setupIngestionJob(Project project, Users user, Featuregroup featureGroup, SparkJobConfiguration sparkJobConfiguration, IngestionDataFormat dataFormat, Map<String, String> writeOptions, Map<String, String> dataOptions) throws FeaturestoreException, DatasetException, HopsSecurityException, JobException {
DistributedFileSystemOps udfso = dfs.getDfsOps(hdfsUsersController.getHdfsUserName(project, user));
try {
String dataPath = getIngestionPath(project, user, featureGroup, udfso);
String jobConfigurationPath = getJobConfigurationPath(project, featureGroup.getName(), featureGroup.getVersion(), "ingestion");
Map<String, Object> jobConfiguration = new HashMap<>();
jobConfiguration.put("feature_store", featurestoreController.getOfflineFeaturestoreDbName(featureGroup.getFeaturestore().getProject()));
jobConfiguration.put("name", featureGroup.getName());
jobConfiguration.put("version", String.valueOf(featureGroup.getVersion()));
jobConfiguration.put("data_path", dataPath);
jobConfiguration.put("data_format", dataFormat.toString());
jobConfiguration.put("data_options", dataOptions);
jobConfiguration.put("write_options", writeOptions);
String jobConfigurationStr = objectMapper.writeValueAsString(jobConfiguration);
writeToHDFS(jobConfigurationPath, jobConfigurationStr, udfso);
Jobs ingestionJob = configureJob(user, project, sparkJobConfiguration, getJobName(INSERT_FG_OP, Utils.getFeaturegroupName(featureGroup)), getJobArgs(INSERT_FG_OP, jobConfigurationPath));
// the client will trigger the job once the data upload is done.
return new IngestionJob(dataPath, ingestionJob);
} catch (IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.ERROR_JOB_SETUP, Level.SEVERE, "Error setting up feature group import job", e.getMessage(), e);
} finally {
dfs.closeDfsClient(udfso);
}
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobsResource method unscheduleJob.
/**
* Remove scheduling for the job with this jobid. The return value is a
* JSON object stating operation successful
* or not.
* <p>
* @param name job name
* @return Response
*/
@ApiOperation(value = "Cancel a job's schedule.")
@DELETE
@Path("{name}/schedule")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response unscheduleJob(@PathParam("name") String name, @Context SecurityContext sc) throws JobException {
if (Strings.isNullOrEmpty(name)) {
throw new IllegalArgumentException("job name was not provided or it was not set.");
}
Jobs job = jobFacade.findByProjectAndName(project, name);
if (job == null) {
throw new JobException(RESTCodes.JobErrorCode.JOB_NOT_FOUND, Level.FINEST);
}
jobController.unscheduleJob(job);
return Response.noContent().build();
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobsResource method updateSchedule.
@ApiOperation(value = "Create/Update job's schedule.")
@PUT
@Path("{name}/schedule")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response updateSchedule(ScheduleDTO schedule, @PathParam("name") String name, @Context SecurityContext sc, @Context UriInfo uriInfo) throws JobException {
if (schedule == null) {
throw new IllegalArgumentException("Schedule parameter was not provided.");
}
Jobs job = jobController.getJob(project, name);
Users user = jWTHelper.getUserPrincipal(sc);
jobController.updateSchedule(project, job, schedule, user);
return Response.noContent().build();
}
Aggregations