use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class CodeController method saveJob.
private Path saveJob(Project project, Users user, String entityId, Path dirPath, String applicationId) throws FeaturestoreException {
// get job path
Jobs job = jobFacade.findByProjectAndName(project, entityId);
// Currently we can save code only for (Py)Spark and Python jobs
String appPath = null;
if (job.getJobType() == JobType.SPARK || job.getJobType() == JobType.PYSPARK) {
appPath = ((SparkJobConfiguration) job.getJobConfig()).getAppPath();
}
// generate file path
String extension = Utils.getExtension(appPath).orElse("");
Path path = new Path(dirPath, applicationId + "." + extension);
// read job and save to file path
String projectUsername = hdfsUsersController.getHdfsUserName(project, user);
DistributedFileSystemOps udfso = null;
try {
udfso = dfs.getDfsOps(projectUsername);
String notebookString = udfso.cat(appPath);
udfso.create(path, notebookString);
} catch (IOException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.CODE_READ_ERROR, Level.WARNING, e.getMessage(), e.getMessage(), e);
} finally {
dfs.closeDfsClient(udfso);
}
return path;
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class TrainingDatasetService method compute.
@POST
@Path("/{trainingDatasetId}/compute")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Setup a job to compute and write a training dataset", response = JobDTO.class)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.DATASET_VIEW, ApiScope.FEATURESTORE }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response compute(@Context UriInfo uriInfo, @Context SecurityContext sc, @PathParam("trainingDatasetId") Integer trainingDatasetId, TrainingDatasetJobConf trainingDatasetJobConf) throws FeaturestoreException, ServiceException, JobException, ProjectException, GenericException {
verifyIdProvided(trainingDatasetId);
Users user = jWTHelper.getUserPrincipal(sc);
TrainingDataset trainingDataset = trainingDatasetController.getTrainingDatasetById(featurestore, trainingDatasetId);
Map<String, String> writeOptions = null;
if (trainingDatasetJobConf.getWriteOptions() != null) {
writeOptions = trainingDatasetJobConf.getWriteOptions().stream().collect(Collectors.toMap(OptionDTO::getName, OptionDTO::getValue));
}
Jobs job = fsJobManagerController.setupTrainingDatasetJob(project, user, trainingDataset, trainingDatasetJobConf.getQuery(), trainingDatasetJobConf.getOverwrite(), writeOptions, trainingDatasetJobConf.getSparkJobConfiguration());
JobDTO jobDTO = jobsBuilder.build(uriInfo, new ResourceRequest(ResourceRequest.Name.JOBS), job);
return Response.created(jobDTO.getHref()).entity(jobDTO).build();
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobFacade method put.
/**
* Create a new Jobs instance.
* <p/>
*
* @param creator The creator of the job.
* @param project The project in which this job is defined.
* @param config The job configuration file.
* @return Jobs The created Jobs entity instance.
*/
// This seems to ensure that the entity is actually created and can later
// be found using em.find().
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Jobs put(Users creator, Project project, JobConfiguration config, Jobs job) {
// Argument checking
if (creator == null || project == null || config == null) {
throw new IllegalArgumentException("Owner, project and config must be non-null.");
}
if (job == null) {
// First: create a job object
job = new Jobs(config, project, creator, config.getAppName());
} else {
job.setJobConfig(config);
}
// Finally: persist it, getting the assigned id.
job = em.merge(job);
// To get the id.
em.flush();
return job;
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobFacade method updateJobSchedule.
public boolean updateJobSchedule(int jobId, ScheduleDTO schedule) {
boolean status = false;
try {
Jobs managedJob = em.find(Jobs.class, jobId);
JobConfiguration config = managedJob.getJobConfig();
config.setSchedule(schedule);
TypedQuery<Jobs> q = em.createNamedQuery("Jobs.updateConfig", Jobs.class);
q.setParameter("id", jobId);
q.setParameter("jobconfig", config);
int result = q.executeUpdate();
LOGGER.log(Level.INFO, "Updated entity count = {0}", result);
if (result == 1) {
status = true;
}
} catch (SecurityException | IllegalArgumentException ex) {
LOGGER.log(Level.SEVERE, "Could not update job with id:" + jobId);
throw ex;
}
return status;
}
use of io.hops.hopsworks.persistence.entity.jobs.description.Jobs in project hopsworks by logicalclocks.
the class JobScheduler method timeout.
/**
* Execute the job given as extra info to the timer object.
* <p/>
* @param timer
*/
@Timeout
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void timeout(Timer timer) {
Serializable jobId = timer.getInfo();
try {
// Valid id?
if (!(jobId instanceof Integer)) {
logger.log(Level.WARNING, "Trying to run a scheduled execution, but info is not " + jobId.getClass().getSimpleName());
return;
}
// Valid job?
Jobs job = jobFacade.find(jobId);
// Make sure the job is valid (still exists in DB and user still in the project where the job is)
if (job == null) {
logger.log(Level.WARNING, "Trying to run a job with non-existing id, canceling timer.");
timer.cancel();
return;
} else if (projectTeamFacade.findCurrentRole(job.getProject(), job.getCreator()) == null) {
logger.log(Level.INFO, "Trying to run a job created by a user no longer in this project, canceling timer.");
timer.cancel();
return;
}
// Run scheduled job
executionController.start(job, null, job.getCreator());
} catch (Exception e) {
logger.log(Level.SEVERE, "Failed to start an execution for job " + jobId, e);
}
}
Aggregations