use of io.hops.hopsworks.persistence.entity.jobs.configuration.JobConfiguration in project hopsworks by logicalclocks.
the class JobFacade method updateJobSchedule.
public boolean updateJobSchedule(int jobId, ScheduleDTO schedule) {
boolean status = false;
try {
Jobs managedJob = em.find(Jobs.class, jobId);
JobConfiguration config = managedJob.getJobConfig();
config.setSchedule(schedule);
TypedQuery<Jobs> q = em.createNamedQuery("Jobs.updateConfig", Jobs.class);
q.setParameter("id", jobId);
q.setParameter("jobconfig", config);
int result = q.executeUpdate();
LOGGER.log(Level.INFO, "Updated entity count = {0}", result);
if (result == 1) {
status = true;
}
} catch (SecurityException | IllegalArgumentException ex) {
LOGGER.log(Level.SEVERE, "Could not update job with id:" + jobId);
throw ex;
}
return status;
}
use of io.hops.hopsworks.persistence.entity.jobs.configuration.JobConfiguration in project hopsworks by logicalclocks.
the class JobController method inspectProgram.
@TransactionAttribute(TransactionAttributeType.NEVER)
public JobConfiguration inspectProgram(String path, Project project, Users user, JobType jobType) throws JobException {
DistributedFileSystemOps udfso = null;
try {
String username = hdfsUsersBean.getHdfsUserName(project, user);
udfso = dfs.getDfsOps(username);
LOGGER.log(Level.FINE, "Inspecting executable job program by {0} at path: {1}", new Object[] { username, path });
JobConfiguration jobConf = getConfiguration(project, jobType, true);
switch(jobType) {
case SPARK:
case PYSPARK:
if (Strings.isNullOrEmpty(path) || !(path.endsWith(".jar") || path.endsWith(".py") || path.endsWith(".ipynb"))) {
throw new IllegalArgumentException("Path does not point to a .jar, .py or .ipynb file.");
}
return sparkController.inspectProgram((SparkJobConfiguration) jobConf, path, udfso);
case FLINK:
return jobConf;
default:
throw new IllegalArgumentException("Job type not supported: " + jobType);
}
} finally {
if (udfso != null) {
dfs.closeDfsClient(udfso);
}
}
}
use of io.hops.hopsworks.persistence.entity.jobs.configuration.JobConfiguration in project hopsworks by logicalclocks.
the class JobsResource method inspect.
@ApiOperation(value = "Inspect user program and return a JobConfiguration", response = SparkJobConfiguration.class)
@GET
@Path("{jobtype : python|docker|spark|pyspark|flink}/inspection")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response inspect(@ApiParam(value = "job type", example = "spark") @PathParam("jobtype") JobType jobtype, @ApiParam(value = "path", example = "/Projects/demo_spark_admin000/Resources/spark-examples.jar", required = true) @QueryParam("path") String path, @Context SecurityContext sc) throws JobException {
Users user = jWTHelper.getUserPrincipal(sc);
JobConfiguration config = jobController.inspectProgram(path, project, user, jobtype);
return Response.ok().entity(config).build();
}
Aggregations