Search in sources :

Example 1 with JobException

use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.

the class JupyterService method startNotebookServer.

@POST
@Path("/start")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@AllowedProjectRoles({ AllowedProjectRoles.DATA_OWNER, AllowedProjectRoles.DATA_SCIENTIST })
@JWTRequired(acceptedTokens = { Audience.API }, allowedUserRoles = { "HOPS_ADMIN", "HOPS_USER" })
public Response startNotebookServer(JupyterSettings jupyterSettings, @Context HttpServletRequest req, @Context SecurityContext sc, @Context UriInfo uriInfo) throws ProjectException, HopsSecurityException, ServiceException, GenericException, JobException {
    Users hopsworksUser = jWTHelper.getUserPrincipal(sc);
    String hdfsUser = hdfsUsersController.getHdfsUserName(project, hopsworksUser);
    // from in the front-end
    if (jupyterSettings.getUsers() == null) {
        jupyterSettings.setUsers(hopsworksUser);
    }
    if (project.getPaymentType().equals(PaymentType.PREPAID)) {
        YarnProjectsQuota projectQuota = yarnProjectsQuotaFacade.findByProjectName(project.getName());
        if (projectQuota == null || projectQuota.getQuotaRemaining() <= 0) {
            throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_QUOTA_ERROR, Level.FINE);
        }
    }
    if (project.getPythonEnvironment() == null) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.ANACONDA_NOT_ENABLED, Level.FINE);
    }
    if (jupyterSettings.getMode() == null) {
        // set default mode for jupyter if mode is null
        jupyterSettings.setMode(JupyterMode.JUPYTER_LAB);
    }
    // Jupyter Git works only for JupyterLab
    if (jupyterSettings.isGitBackend() && jupyterSettings.getMode().equals(JupyterMode.JUPYTER_CLASSIC)) {
        throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "Git support available only in JupyterLab");
    }
    // Do not allow auto push on shutdown if api key is missing
    GitConfig gitConfig = jupyterSettings.getGitConfig();
    if (jupyterSettings.isGitBackend() && gitConfig.getShutdownAutoPush() && Strings.isNullOrEmpty(gitConfig.getApiKeyName())) {
        throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "Auto push not supported if api key is not configured.");
    }
    // Verify that API token has got write access on the repo if ShutdownAutoPush is enabled
    if (jupyterSettings.isGitBackend() && gitConfig.getShutdownAutoPush() && !jupyterNbVCSController.hasWriteAccess(hopsworksUser, gitConfig.getApiKeyName(), gitConfig.getRemoteGitURL(), gitConfig.getGitBackend())) {
        throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.FINE, "API token " + gitConfig.getApiKeyName() + " does not have write access on " + gitConfig.getRemoteGitURL());
    }
    JupyterProject jp = jupyterFacade.findByUser(hdfsUser);
    if (jp == null) {
        HdfsUsers user = hdfsUsersFacade.findByName(hdfsUser);
        String configSecret = DigestUtils.sha256Hex(Integer.toString(ThreadLocalRandom.current().nextInt()));
        JupyterDTO dto = null;
        DistributedFileSystemOps dfso = dfsService.getDfsOps();
        String allowOriginHost = uriInfo.getBaseUri().getHost();
        int allowOriginPort = uriInfo.getBaseUri().getPort();
        String allowOriginPortStr = allowOriginPort != -1 ? ":" + allowOriginPort : "";
        String allowOrigin = settings.getJupyterOriginScheme() + "://" + allowOriginHost + allowOriginPortStr;
        try {
            jupyterSettingsFacade.update(jupyterSettings);
            // Inspect dependencies
            sparkController.inspectDependencies(project, hopsworksUser, (SparkJobConfiguration) jupyterSettings.getJobConfig());
            dto = jupyterManager.startJupyterServer(project, configSecret, hdfsUser, hopsworksUser, jupyterSettings, allowOrigin);
            jupyterJWTManager.materializeJWT(hopsworksUser, project, jupyterSettings, dto.getCid(), dto.getPort(), JUPYTER_JWT_AUD);
            HopsUtils.materializeCertificatesForUserCustomDir(project.getName(), user.getUsername(), settings.getHdfsTmpCertDir(), dfso, certificateMaterializer, settings, dto.getCertificatesDir());
            jupyterManager.waitForStartup(project, hopsworksUser);
        } catch (ServiceException | TimeoutException ex) {
            if (dto != null) {
                jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
            }
            throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
        } catch (IOException ex) {
            if (dto != null) {
                jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
            }
            throw new HopsSecurityException(RESTCodes.SecurityErrorCode.CERT_MATERIALIZATION_ERROR, Level.SEVERE, ex.getMessage(), null, ex);
        } finally {
            if (dfso != null) {
                dfsService.closeDfsClient(dfso);
            }
        }
        String externalIp = Ip.getHost(req.getRequestURL().toString());
        try {
            Date expirationDate = new Date();
            Calendar cal = Calendar.getInstance();
            cal.setTime(expirationDate);
            cal.add(Calendar.HOUR_OF_DAY, jupyterSettings.getShutdownLevel());
            expirationDate = cal.getTime();
            jp = jupyterFacade.saveServer(externalIp, project, configSecret, dto.getPort(), user.getId(), dto.getToken(), dto.getCid(), expirationDate, jupyterSettings.isNoLimit());
            // set minutes left until notebook server is killed
            Duration durationLeft = Duration.between(new Date().toInstant(), jp.getExpires().toInstant());
            jp.setMinutesUntilExpiration(durationLeft.toMinutes());
        } catch (Exception e) {
            LOGGER.log(Level.SEVERE, "Failed to save Jupyter notebook settings", e);
            jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
        }
        if (jp == null) {
            throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_SAVE_SETTINGS_ERROR, Level.SEVERE);
        }
        if (jupyterSettings.isGitBackend()) {
            try {
                // Init is idempotent, calling it on an already initialized repo won't affect it
                jupyterNbVCSController.init(jp, jupyterSettings);
                if (jupyterSettings.getGitConfig().getStartupAutoPull()) {
                    jupyterNbVCSController.pull(jp, jupyterSettings);
                }
            } catch (ServiceException ex) {
                jupyterController.shutdownQuietly(project, hdfsUser, hopsworksUser, configSecret, dto.getCid(), dto.getPort());
                throw ex;
            }
        }
    } else {
        throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_SERVER_ALREADY_RUNNING, Level.FINE);
    }
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(jp).build();
}
Also used : DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) Calendar(java.util.Calendar) JupyterProject(io.hops.hopsworks.persistence.entity.jupyter.JupyterProject) Duration(java.time.Duration) HdfsUsers(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers) Users(io.hops.hopsworks.persistence.entity.user.Users) IOException(java.io.IOException) HdfsUsers(io.hops.hopsworks.persistence.entity.hdfs.user.HdfsUsers) Date(java.util.Date) TimeoutException(java.util.concurrent.TimeoutException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) JobException(io.hops.hopsworks.exceptions.JobException) GenericException(io.hops.hopsworks.exceptions.GenericException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ElasticException(io.hops.hopsworks.exceptions.ElasticException) IOException(java.io.IOException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) GitConfig(io.hops.hopsworks.persistence.entity.jupyter.config.GitConfig) YarnProjectsQuota(io.hops.hopsworks.persistence.entity.jobs.quota.YarnProjectsQuota) JupyterDTO(io.hops.hopsworks.common.dao.jupyter.config.JupyterDTO) TimeoutException(java.util.concurrent.TimeoutException) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces) JWTRequired(io.hops.hopsworks.jwt.annotation.JWTRequired) AllowedProjectRoles(io.hops.hopsworks.api.filter.AllowedProjectRoles)

Example 2 with JobException

use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.

the class ProjectService method example.

@POST
@Path("starterProject/{type}")
@Produces(MediaType.APPLICATION_JSON)
public Response example(@PathParam("type") String type, @Context HttpServletRequest req, @Context SecurityContext sc) throws DatasetException, GenericException, KafkaException, ProjectException, UserException, ServiceException, HopsSecurityException, FeaturestoreException, JobException, IOException, ElasticException, SchemaException, ProvenanceException {
    TourProjectType demoType;
    try {
        demoType = TourProjectType.fromString(type);
    } catch (IllegalArgumentException e) {
        throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
    }
    ProjectDTO projectDTO = new ProjectDTO();
    Project project = null;
    projectDTO.setDescription("A demo project for getting started with " + demoType.getDescription());
    Users user = jWTHelper.getUserPrincipal(sc);
    String username = user.getUsername();
    List<String> projectServices = new ArrayList<>();
    // save the project
    String readMeMessage = null;
    switch(demoType) {
        case KAFKA:
            // It's a Kafka guide
            projectDTO.setProjectName("demo_" + TourProjectType.KAFKA.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.KAFKA);
            readMeMessage = "jar file to demonstrate Kafka streaming";
            break;
        case SPARK:
            // It's a Spark guide
            projectDTO.setProjectName("demo_" + TourProjectType.SPARK.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.SPARK);
            readMeMessage = "jar file to demonstrate the creation of a spark batch job";
            break;
        case FS:
            // It's a Featurestore guide
            projectDTO.setProjectName("demo_" + TourProjectType.FS.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.FS);
            readMeMessage = "Dataset containing a jar file and data that can be used to run a sample spark-job for " + "inserting data in the feature store.";
            break;
        case ML:
            // It's a TensorFlow guide
            projectDTO.setProjectName("demo_" + TourProjectType.ML.getTourName() + "_" + username);
            populateActiveServices(projectServices, TourProjectType.ML);
            readMeMessage = "Jupyter notebooks and training data for demonstrating how to run Deep Learning";
            break;
        default:
            throw new IllegalArgumentException("Type must be one of: " + Arrays.toString(TourProjectType.values()));
    }
    projectDTO.setServices(projectServices);
    DistributedFileSystemOps dfso = null;
    DistributedFileSystemOps udfso = null;
    try {
        project = projectController.createProject(projectDTO, user, req.getSession().getId());
        dfso = dfs.getDfsOps();
        username = hdfsUsersBean.getHdfsUserName(project, user);
        udfso = dfs.getDfsOps(username);
        ProvTypeDTO projectMetaStatus = fsProvenanceController.getProjectProvType(user, project);
        String tourFilesDataset = projectController.addTourFilesToProject(user.getEmail(), project, dfso, dfso, demoType, projectMetaStatus);
        // TestJob dataset
        datasetController.generateReadme(udfso, tourFilesDataset, readMeMessage, project.getName());
    } catch (Exception ex) {
        projectController.cleanup(project, req.getSession().getId());
        throw ex;
    } finally {
        if (dfso != null) {
            dfso.close();
        }
        if (udfso != null) {
            dfs.closeDfsClient(udfso);
        }
    }
    return noCacheResponse.getNoCacheResponseBuilder(Response.Status.CREATED).entity(project).build();
}
Also used : TourProjectType(io.hops.hopsworks.common.project.TourProjectType) ProjectDTO(io.hops.hopsworks.common.project.ProjectDTO) Project(io.hops.hopsworks.persistence.entity.project.Project) DistributedFileSystemOps(io.hops.hopsworks.common.hdfs.DistributedFileSystemOps) ArrayList(java.util.ArrayList) Users(io.hops.hopsworks.persistence.entity.user.Users) ProvTypeDTO(io.hops.hopsworks.common.provenance.core.dto.ProvTypeDTO) DatasetException(io.hops.hopsworks.exceptions.DatasetException) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException) ElasticException(io.hops.hopsworks.exceptions.ElasticException) IOException(java.io.IOException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) UserException(io.hops.hopsworks.exceptions.UserException) ExecutionException(java.util.concurrent.ExecutionException) ProjectException(io.hops.hopsworks.exceptions.ProjectException) JobException(io.hops.hopsworks.exceptions.JobException) GenericException(io.hops.hopsworks.exceptions.GenericException) KafkaException(io.hops.hopsworks.exceptions.KafkaException) HopsSecurityException(io.hops.hopsworks.exceptions.HopsSecurityException) ProvenanceException(io.hops.hopsworks.exceptions.ProvenanceException) SchemaException(io.hops.hopsworks.exceptions.SchemaException) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces)

Example 3 with JobException

use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.

the class LocalHostJupyterProcessMgr method startJupyterServer.

@Override
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public JupyterDTO startJupyterServer(Project project, String secretConfig, String hdfsUser, Users user, JupyterSettings js, String allowOrigin) throws ServiceException, JobException {
    String prog = settings.getSudoersDir() + "/jupyter.sh";
    Integer port = ThreadLocalRandom.current().nextInt(40000, 59999);
    JupyterPaths jp = jupyterConfigFilesGenerator.generateConfiguration(project, secretConfig, hdfsUser, user, js, port, allowOrigin);
    String secretDir = settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret();
    String token = TokenGenerator.generateToken(TOKEN_LENGTH);
    String cid = "";
    // The Jupyter Notebook is running at: http://localhost:8888/?token=c8de56fa4deed24899803e93c227592aef6538f93025fe01
    int maxTries = 5;
    // kill any running servers for this user, clear cached entries
    while (maxTries > 0) {
        try {
            // use pidfile to kill any running servers
            ProcessDescriptor processDescriptor = new ProcessDescriptor.Builder().addCommand("/usr/bin/sudo").addCommand(prog).addCommand("start").addCommand(jp.getNotebookPath()).addCommand(settings.getHadoopSymbolicLinkDir() + "-" + settings.getHadoopVersion()).addCommand(hdfsUser).addCommand(settings.getAnacondaProjectDir()).addCommand(port.toString()).addCommand(HopsUtils.getJupyterLogName(hdfsUser, port)).addCommand(secretDir).addCommand(jp.getCertificatesDir()).addCommand(hdfsUser).addCommand(token).addCommand(js.getMode().getValue()).addCommand(projectUtils.getFullDockerImageName(project, false)).addCommand(Boolean.toString(js.isGitBackend())).redirectErrorStream(true).setCurrentWorkingDirectory(new File(jp.getNotebookPath())).setWaitTimeout(60L, TimeUnit.SECONDS).build();
            String pidfile = jp.getRunDirPath() + "/jupyter.pid";
            ProcessResult processResult = osProcessExecutor.execute(processDescriptor);
            if (processResult.getExitCode() != 0) {
                String errorMsg = "Could not start Jupyter server. Exit code: " + processResult.getExitCode() + " Error: stdout: " + processResult.getStdout() + " stderr: " + processResult.getStderr();
                LOGGER.log(Level.SEVERE, errorMsg);
                throw new IOException(errorMsg);
            }
            // Read the pid for Jupyter Notebook
            cid = com.google.common.io.Files.readFirstLine(new File(pidfile), Charset.defaultCharset());
            return new JupyterDTO(port, token, cid, secretConfig, jp.getCertificatesDir());
        } catch (Exception ex) {
            LOGGER.log(Level.SEVERE, "Problem executing shell script to start Jupyter server", ex);
            maxTries--;
        }
    }
    String errorMsg = "Failed to start Jupyter";
    throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_START_ERROR, Level.SEVERE, errorMsg, errorMsg + " for project " + project);
}
Also used : ServiceException(io.hops.hopsworks.exceptions.ServiceException) JupyterPaths(io.hops.hopsworks.common.dao.jupyter.config.JupyterPaths) URIBuilder(org.apache.http.client.utils.URIBuilder) ProcessResult(io.hops.hopsworks.common.util.ProcessResult) ProcessDescriptor(io.hops.hopsworks.common.util.ProcessDescriptor) IOException(java.io.IOException) File(java.io.File) ClientProtocolException(org.apache.http.client.ClientProtocolException) URISyntaxException(java.net.URISyntaxException) TimeoutException(java.util.concurrent.TimeoutException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) JobException(io.hops.hopsworks.exceptions.JobException) JupyterDTO(io.hops.hopsworks.common.dao.jupyter.config.JupyterDTO) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 4 with JobException

use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.

the class FlinkController method startJob.

public Execution startJob(final Jobs job, final Users user) throws GenericException, JobException, ServiceException {
    // First: some parameter checking.
    if (job == null) {
        throw new NullPointerException("Cannot run a null job.");
    } else if (user == null) {
        throw new NullPointerException("Cannot run a job as a null user.");
    } else if (job.getJobType() != JobType.FLINK) {
        throw new IllegalArgumentException("Job configuration is not a Flink job configuration.");
    }
    // Set Hopsworks consul service domain, don't use the address, use the name
    String username = hdfsUsersBean.getHdfsUserName(job.getProject(), user);
    FlinkJob flinkjob = null;
    try {
        String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
        UserGroupInformation proxyUser = ugiService.getProxyUser(username);
        try {
            flinkjob = proxyUser.doAs((PrivilegedExceptionAction<FlinkJob>) () -> new FlinkJob(job, submitter, user, hdfsUsersBean.getHdfsUserName(job.getProject(), job.getCreator()), settings, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
        } catch (InterruptedException ex) {
            LOGGER.log(Level.SEVERE, null, ex);
        }
    } catch (IOException ex) {
        throw new JobException(RESTCodes.JobErrorCode.PROXY_ERROR, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
    } catch (ServiceDiscoveryException ex) {
        throw new ServiceException(RESTCodes.ServiceErrorCode.SERVICE_NOT_FOUND, Level.SEVERE, "job: " + job.getId() + ", user:" + user.getUsername(), ex.getMessage(), ex);
    }
    if (flinkjob == null) {
        throw new GenericException(RESTCodes.GenericErrorCode.UNKNOWN_ERROR, Level.WARNING, "Could not instantiate job with name: " + job.getName() + " and id: " + job.getId(), "sparkjob object was null");
    }
    Execution execution = flinkjob.requestExecutionId();
    submitter.startExecution(flinkjob);
    activityFacade.persistActivity(ActivityFacade.RAN_JOB, job.getProject(), user.asUser(), ActivityFlag.JOB);
    return execution;
}
Also used : PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) IOException(java.io.IOException) GenericException(io.hops.hopsworks.exceptions.GenericException) JobException(io.hops.hopsworks.exceptions.JobException) Execution(io.hops.hopsworks.persistence.entity.jobs.history.Execution) ServiceException(io.hops.hopsworks.exceptions.ServiceException) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 5 with JobException

use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.

the class SparkController method inspectProgram.

public SparkJobConfiguration inspectProgram(SparkJobConfiguration existingConfig, String path, DistributedFileSystemOps udfso) throws JobException {
    SparkJobConfiguration sparkConfig = null;
    if (existingConfig == null) {
        sparkConfig = new SparkJobConfiguration();
    } else {
        sparkConfig = existingConfig;
    }
    // If the main program is in a jar, try to set main class from it
    if (path.endsWith(".jar")) {
        try (JarInputStream jis = new JarInputStream(udfso.open(path))) {
            Manifest mf = jis.getManifest();
            if (mf != null) {
                Attributes atts = mf.getMainAttributes();
                if (atts.containsKey(Attributes.Name.MAIN_CLASS)) {
                    sparkConfig.setMainClass(atts.getValue(Attributes.Name.MAIN_CLASS));
                } else {
                    sparkConfig.setMainClass(null);
                }
            }
        } catch (IOException ex) {
            throw new JobException(RESTCodes.JobErrorCode.JAR_INSPECTION_ERROR, Level.SEVERE, "Failed to inspect jar at:" + path, ex.getMessage(), ex);
        }
    } else {
        // In that case we should not override it and set the experimentType, only set it if no default exists
        if (existingConfig == null) {
            sparkConfig.setExperimentType(ExperimentType.EXPERIMENT);
        }
        sparkConfig.setMainClass(Settings.SPARK_PY_MAINCLASS);
    }
    sparkConfig.setAppPath(path);
    return sparkConfig;
}
Also used : JobException(io.hops.hopsworks.exceptions.JobException) JarInputStream(java.util.jar.JarInputStream) SparkJobConfiguration(io.hops.hopsworks.persistence.entity.jobs.configuration.spark.SparkJobConfiguration) Attributes(java.util.jar.Attributes) IOException(java.io.IOException) Manifest(java.util.jar.Manifest)

Aggregations

JobException (io.hops.hopsworks.exceptions.JobException)23 IOException (java.io.IOException)11 Path (javax.ws.rs.Path)8 Produces (javax.ws.rs.Produces)8 AllowedProjectRoles (io.hops.hopsworks.api.filter.AllowedProjectRoles)7 JWTRequired (io.hops.hopsworks.jwt.annotation.JWTRequired)7 GenericException (io.hops.hopsworks.exceptions.GenericException)6 ServiceException (io.hops.hopsworks.exceptions.ServiceException)6 Users (io.hops.hopsworks.persistence.entity.user.Users)6 DistributedFileSystemOps (io.hops.hopsworks.common.hdfs.DistributedFileSystemOps)5 ProjectException (io.hops.hopsworks.exceptions.ProjectException)4 SparkJobConfiguration (io.hops.hopsworks.persistence.entity.jobs.configuration.spark.SparkJobConfiguration)4 Execution (io.hops.hopsworks.persistence.entity.jobs.history.Execution)4 ApiOperation (io.swagger.annotations.ApiOperation)4 TransactionAttribute (javax.ejb.TransactionAttribute)4 ApiKeyRequired (io.hops.hopsworks.api.filter.apiKey.ApiKeyRequired)3 YarnAppUrlsDTO (io.hops.hopsworks.common.dao.jobs.description.YarnAppUrlsDTO)3 DatasetException (io.hops.hopsworks.exceptions.DatasetException)3 ArrayList (java.util.ArrayList)3 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)3