Search in sources :

Example 11 with Execution

use of io.hops.hopsworks.persistence.entity.jobs.history.Execution in project hopsworks by logicalclocks.

the class SparkController method startJob.

/**
 * Start the Spark job as the given user.
 * <p/>
 * @param job
 * @param user
 * @return
 * @throws IllegalStateException If Spark is not set up properly.
 * @throws IOException If starting the job fails.
 * Spark job.
 */
public Execution startJob(final Jobs job, String args, final Users user) throws ServiceException, GenericException, JobException, ProjectException {
    // First: some parameter checking.
    sanityCheck(job, user);
    String username = hdfsUsersBean.getHdfsUserName(job.getProject(), user);
    SparkJobConfiguration sparkConfig = (SparkJobConfiguration) job.getJobConfig();
    String appPath = sparkConfig.getAppPath();
    if (job.getJobType().equals(JobType.PYSPARK)) {
        if (job.getProject().getPythonEnvironment() == null) {
            // Throw error in Hopsworks UI to notify user to enable Anaconda
            throw new JobException(RESTCodes.JobErrorCode.JOB_START_FAILED, Level.SEVERE, "PySpark job needs to have Python Anaconda environment enabled");
        }
    }
    SparkJob sparkjob = createSparkJob(username, job, user);
    Execution exec = sparkjob.requestExecutionId(args);
    if (job.getJobType().equals(JobType.PYSPARK) && appPath.endsWith(".ipynb")) {
        submitter.getExecutionFacade().updateState(exec, JobState.CONVERTING_NOTEBOOK);
        String pyAppPath = HopsUtils.prepJupyterNotebookConversion(exec, username, dfs);
        sparkConfig.setAppPath(pyAppPath);
        jupyterController.convertIPythonNotebook(username, appPath, job.getProject(), pyAppPath, jupyterController.getNotebookConversionType(appPath, user, job.getProject()));
    }
    submitter.startExecution(sparkjob, args);
    activityFacade.persistActivity(ActivityFacade.RAN_JOB + job.getName(), job.getProject(), user.asUser(), ActivityFlag.JOB);
    return exec;
}
Also used : JobException(io.hops.hopsworks.exceptions.JobException) Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) SparkJobConfiguration(io.hops.hopsworks.persistence.entity.jobs.configuration.spark.SparkJobConfiguration)

Example 12 with Execution

use of io.hops.hopsworks.persistence.entity.jobs.history.Execution in project hopsworks by logicalclocks.

the class JobController method deleteJob.

@TransactionAttribute(TransactionAttributeType.NEVER)
public void deleteJob(Jobs job, Users user) throws JobException {
    // Kill running execution of this job (if any)
    executionController.stop(job);
    // Wait till execution is in a final state
    List<Execution> nonFinishedExecutions = executionFacade.findByJobAndNotFinished(job);
    LOGGER.log(Level.FINE, "nonFinishedExecutions:" + nonFinishedExecutions);
    int sleep = 2000;
    int timeout = 60;
    int retries = timeout * 1000 / sleep;
    int i = 0;
    while (!nonFinishedExecutions.isEmpty() && i < retries) {
        LOGGER.log(Level.INFO, "waiting for executions:" + nonFinishedExecutions);
        // Wait a few seconds till execution in final state
        try {
            Thread.sleep(sleep);
        } catch (InterruptedException ex) {
            throw new JobException(RESTCodes.JobErrorCode.JOB_DELETION_ERROR, Level.WARNING, "Interrupted while waiting to stop the job's executions. Job: " + job.getName(), ex.getMessage(), ex);
        }
        nonFinishedExecutions = executionFacade.findByJobAndNotFinished(job);
        i++;
    }
    try {
        LOGGER.log(Level.FINE, "Request to delete job name ={0} job id ={1}", new Object[] { job.getName(), job.getId() });
        jobFacade.removeJob(job);
        String username = hdfsUsersBean.getHdfsUserName(job.getProject(), user);
        HopsUtils.cleanupJobDatasetResources(job, username, dfs);
        LOGGER.log(Level.FINE, "Deleted job name ={0} job id ={1}", new Object[] { job.getName(), job.getId() });
        String activityMessage = ActivityFacade.DELETED_JOB + job.getName();
        if (!nonFinishedExecutions.isEmpty()) {
            activityMessage += " with pending executions: " + nonFinishedExecutions;
        }
        activityFacade.persistActivity(activityMessage, job.getProject(), user.getEmail(), ActivityFlag.JOB);
    } catch (DatabaseException ex) {
        LOGGER.log(Level.SEVERE, "Job cannot be deleted job name ={0} job id ={1}", new Object[] { job.getName(), job.getId() });
        throw new JobException(RESTCodes.JobErrorCode.JOB_DELETION_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
    }
}
Also used : JobException(io.hops.hopsworks.exceptions.JobException) Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) DatabaseException(org.eclipse.persistence.exceptions.DatabaseException) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 13 with Execution

use of io.hops.hopsworks.persistence.entity.jobs.history.Execution in project hopsworks by logicalclocks.

the class ExecutionsResource method stopExecution.

@ApiOperation(value = "Stop an execution(run) of the job", notes = "Stops an execution of a job by providing the status.", response = ExecutionDTO.class)
@PUT
@Path("{id}/status")
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_SCIENTIST, AllowedProjectRoles.DATA_OWNER })
@JWTRequired(acceptedTokens = { Audience.API, Audience.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
@ApiKeyRequired(acceptedScopes = { ApiScope.JOB }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response stopExecution(@ApiParam(value = "Id of execution.", required = true) @PathParam("id") Integer id, @ApiParam(value = "status to set.", required = true) Status status, @Context SecurityContext sc, @Context UriInfo uriInfo) throws JobException {
    Execution exec = executionController.stopExecution(id);
    ResourceRequest resourceRequest = new ResourceRequest(ResourceRequest.Name.EXECUTIONS);
    return Response.accepted().entity(executionsBuilder.build(uriInfo, resourceRequest, exec)).build();
}
Also used : Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) ResourceRequest(io.hops.hopsworks.common.api.ResourceRequest) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) ApiOperation(io.swagger.annotations.ApiOperation) ApiKeyRequired(io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles) PUT(javax.ws.rs.PUT)

Example 14 with Execution

use of io.hops.hopsworks.persistence.entity.jobs.history.Execution in project hopsworks by logicalclocks.

the class ExecutionFacade method create.

public Execution create(Jobs job, Users user, JobState state, String stdoutPath, String stderrPath, JobFinalStatus finalStatus, float progress, String hdfsUser, String args) {
    // Check if state is ok
    if (state == null) {
        state = JobState.INITIALIZING;
    }
    if (finalStatus == null) {
        finalStatus = JobFinalStatus.UNDEFINED;
    }
    // Create new object
    Execution exec = new Execution(state, job, user, new java.util.Date(), stdoutPath, stderrPath, finalStatus, progress, hdfsUser, args);
    // And persist it
    em.persist(exec);
    em.flush();
    return exec;
}
Also used : Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) Date(java.util.Date)

Example 15 with Execution

use of io.hops.hopsworks.persistence.entity.jobs.history.Execution in project hopsworks by logicalclocks.

the class ExecutionFacade method getExecution.

private Execution getExecution(Execution exec) {
    // Find the updated execution object
    Execution obj = em.find(Execution.class, exec.getId());
    int count = 0;
    while (obj == null && count < 10) {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException ex) {
            logger.log(Level.SEVERE, null, ex);
        }
        logger.info("Trying to get the Execution Object");
        obj = em.find(Execution.class, exec.getId());
        count++;
    }
    if (obj == null) {
        throw new IllegalStateException("Unable to find Execution object with id " + exec.getId());
    }
    return obj;
}
Also used : Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution)

Aggregations

Execution (io.hops.hopsworks.persistence.entity.jobs.history.Execution)17 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)6 ApiKeyRequired (io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired)6 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)6 ApiOperation (io.swagger.annotations.ApiOperation)6 Path (javax.ws.rs.Path)5 Produces (javax.ws.rs.Produces)5 JobException (io.hops.hopsworks.exceptions.JobException)4 IOException (java.io.IOException)4 ResourceRequest (io.hops.hopsworks.common.api.ResourceRequest)3 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)3 JobLogDTO (io.hops.hopsworks.common.jobs.JobLogDTO)2 YarnClientWrapper (io.hops.hopsworks.common.yarn.YarnClientWrapper)2 GenericException (io.hops.hopsworks.exceptions.GenericException)2 SparkJobConfiguration (io.hops.hopsworks.persistence.entity.jobs.configuration.spark.SparkJobConfiguration)2 Users (io.hops.hopsworks.persistence.entity.user.Users)2 TransactionAttribute (javax.ejb.TransactionAttribute)2 GET (javax.ws.rs.GET)2 POST (javax.ws.rs.POST)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2