Search in sources :

Example 31 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project quick by bakdata.

the class JobCleanerTest method shouldRunWithoutErrorForJobsWithoutStatus.

@Test
void shouldRunWithoutErrorForJobsWithoutStatus() {
    final JobCleaner jobCleaner = new JobCleaner(this.client);
    final KubernetesResources resources = new KubernetesResources();
    final Job deletionJob = resources.createDeletionJob("test", "image", List.of("--key", "value"));
    this.kubernetesServer.getClient().batch().v1().jobs().create(deletionJob);
    assertThatNoException().isThrownBy(jobCleaner::deleteJobs);
}
Also used : KubernetesResources(com.bakdata.quick.manager.k8s.KubernetesResources) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) Test(org.junit.jupiter.api.Test) KubernetesTest(com.bakdata.quick.manager.k8s.KubernetesTest)

Example 32 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pravega by pravega.

the class RemoteSequential method newJob.

private Job newJob(String id, String className, String methodName) {
    Map<String, String> labels = new HashMap<>(1);
    labels.put("testMethodName", methodName);
    // This can be used to set environment variables while executing the job on Metronome.
    Map<String, String> env = new HashMap<>(2);
    env.put("masterIP", System.getProperty("masterIP"));
    env.put("env2", "value102");
    Artifact art = new Artifact();
    // It caches the artifacts, disabling it for now.
    art.setCache(false);
    // jar is not executable.
    art.setExecutable(false);
    art.setExtract(false);
    art.setUri(System.getProperty("testArtifactUrl", "InvalidTestArtifactURL"));
    Restart restart = new Restart();
    // the tests are expected to finish in 2 mins, this can be changed to
    restart.setActiveDeadlineSeconds(120);
    // a higher value if required.
    restart.setPolicy("NEVER");
    Run run = new Run();
    run.setArtifacts(Collections.singletonList(art));
    run.setCmd("docker run --rm -v $(pwd):/data " + System.getProperty("dockerImageRegistry") + "/java:8 java" + " -DmasterIP=" + LoginClient.MESOS_MASTER + " -DskipServiceInstallation=" + Utils.isSkipServiceInstallationEnabled() + " -cp /data/pravega-test-system-" + System.getProperty("testVersion") + ".jar io.pravega.test.system.SingleJUnitTestRunner " + className + "#" + methodName + " > server.log 2>&1" + "; exit $?");
    // CPU shares.
    run.setCpus(0.5);
    // amount of memory required for running test in MB.
    run.setMem(512.0);
    run.setDisk(50.0);
    run.setEnv(env);
    run.setMaxLaunchDelay(3600);
    run.setRestart(restart);
    run.setUser("root");
    Job job = new Job();
    job.setId(id);
    job.setDescription(id);
    job.setLabels(labels);
    job.setRun(run);
    return job;
}
Also used : HashMap(java.util.HashMap) Run(io.pravega.test.system.framework.metronome.model.v1.Run) Restart(io.pravega.test.system.framework.metronome.model.v1.Restart) Job(io.pravega.test.system.framework.metronome.model.v1.Job) Artifact(io.pravega.test.system.framework.metronome.model.v1.Artifact)

Example 33 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class EmbeddedVersionCheckSystemListener method deleteJobIfNecessary.

protected void deleteJobIfNecessary() throws SchedulerException {
    // $NON-NLS-1$
    IScheduler scheduler = PentahoSystem.get(IScheduler.class, "IScheduler2", null);
    IJobFilter filter = new IJobFilter() {

        public boolean accept(Job job) {
            return job.getJobName().contains(EmbeddedVersionCheckSystemListener.VERSION_CHECK_JOBNAME);
        }
    };
    // Like old code - remove the existing job and replace it
    List<Job> matchingJobs = scheduler.getJobs(filter);
    if ((matchingJobs != null) && (matchingJobs.size() > 0)) {
        for (Job verCkJob : matchingJobs) {
            scheduler.removeJob(verCkJob.getJobId());
        }
    }
}
Also used : IJobFilter(org.pentaho.platform.api.scheduler2.IJobFilter) Job(org.pentaho.platform.api.scheduler2.Job) IScheduler(org.pentaho.platform.api.scheduler2.IScheduler)

Example 34 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerService method updateJob.

public Job updateJob(JobScheduleRequest scheduleRequest) throws IllegalAccessException, IOException, SchedulerException {
    Job job = getScheduler().getJob(scheduleRequest.getJobId());
    if (job != null) {
        scheduleRequest.getJobParameters().add(new JobScheduleParam(QuartzScheduler.RESERVEDMAPKEY_ACTIONUSER, job.getUserName()));
    }
    Job newJob = createJob(scheduleRequest);
    removeJob(scheduleRequest.getJobId());
    return newJob;
}
Also used : JobScheduleParam(org.pentaho.platform.web.http.api.resources.JobScheduleParam) Job(org.pentaho.platform.api.scheduler2.Job)

Example 35 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerService method createJob.

public Job createJob(JobScheduleRequest scheduleRequest) throws IOException, SchedulerException, IllegalAccessException {
    // Used to determine if created by a RunInBackgroundCommand
    boolean runInBackground = scheduleRequest.getSimpleJobTrigger() == null && scheduleRequest.getComplexJobTrigger() == null && scheduleRequest.getCronJobTrigger() == null;
    if (!runInBackground && !getPolicy().isAllowed(SchedulerAction.NAME)) {
        throw new SecurityException();
    }
    boolean hasInputFile = !StringUtils.isEmpty(scheduleRequest.getInputFile());
    RepositoryFile file = null;
    if (hasInputFile) {
        try {
            file = getRepository().getFile(scheduleRequest.getInputFile());
        } catch (UnifiedRepositoryException ure) {
            hasInputFile = false;
            logger.warn(ure.getMessage(), ure);
        }
    }
    // if we have an inputfile, generate job name based on that if the name is not passed in
    if (hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
        // $NON-NLS-1$
        scheduleRequest.setJobName(file.getName().substring(0, file.getName().lastIndexOf(".")));
    } else if (!StringUtils.isEmpty(scheduleRequest.getActionClass())) {
        String actionClass = scheduleRequest.getActionClass().substring(scheduleRequest.getActionClass().lastIndexOf(".") + 1);
        // $NON-NLS-1$
        scheduleRequest.setJobName(actionClass);
    } else if (!hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
        // just make up a name
        // $NON-NLS-1$
        scheduleRequest.setJobName("" + System.currentTimeMillis());
    }
    if (hasInputFile) {
        if (file == null) {
            logger.error("Cannot find input source file " + scheduleRequest.getInputFile() + " Aborting schedule...");
            throw new SchedulerException(new ServiceException("Cannot find input source file " + scheduleRequest.getInputFile()));
        }
        Map<String, Serializable> metadata = getRepository().getFileMetadata(file.getId());
        if (metadata.containsKey(RepositoryFile.SCHEDULABLE_KEY)) {
            boolean schedulable = BooleanUtils.toBoolean((String) metadata.get(RepositoryFile.SCHEDULABLE_KEY));
            if (!schedulable) {
                throw new IllegalAccessException();
            }
        }
    }
    if (scheduleRequest.getTimeZone() != null) {
        updateStartDateForTimeZone(scheduleRequest);
    }
    Job job = null;
    IJobTrigger jobTrigger = SchedulerResourceUtil.convertScheduleRequestToJobTrigger(scheduleRequest, scheduler);
    HashMap<String, Serializable> parameterMap = new HashMap<>();
    for (JobScheduleParam param : scheduleRequest.getJobParameters()) {
        parameterMap.put(param.getName(), param.getValue());
    }
    if (isPdiFile(file)) {
        parameterMap = handlePDIScheduling(file, parameterMap, scheduleRequest.getPdiParameters());
    }
    parameterMap.put(LocaleHelper.USER_LOCALE_PARAM, LocaleHelper.getLocale());
    if (hasInputFile) {
        SchedulerOutputPathResolver outputPathResolver = getSchedulerOutputPathResolver(scheduleRequest);
        String outputFile = outputPathResolver.resolveOutputFilePath();
        String actionId = SchedulerResourceUtil.resolveActionId(scheduleRequest.getInputFile());
        final String inputFile = scheduleRequest.getInputFile();
        parameterMap.put(ActionUtil.QUARTZ_STREAMPROVIDER_INPUT_FILE, inputFile);
        job = getScheduler().createJob(scheduleRequest.getJobName(), actionId, parameterMap, jobTrigger, new RepositoryFileStreamProvider(inputFile, outputFile, getAutoCreateUniqueFilename(scheduleRequest), getAppendDateFormat(scheduleRequest)));
    } else {
        // need to locate actions from plugins if done this way too (but for now, we're just on main)
        String actionClass = scheduleRequest.getActionClass();
        try {
            @SuppressWarnings("unchecked") Class<IAction> iaction = getAction(actionClass);
            job = getScheduler().createJob(scheduleRequest.getJobName(), iaction, parameterMap, jobTrigger);
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
    }
    return job;
}
Also used : JobScheduleParam(org.pentaho.platform.web.http.api.resources.JobScheduleParam) Serializable(java.io.Serializable) SchedulerException(org.pentaho.platform.api.scheduler2.SchedulerException) IAction(org.pentaho.platform.api.action.IAction) HashMap(java.util.HashMap) SchedulerOutputPathResolver(org.pentaho.platform.web.http.api.resources.SchedulerOutputPathResolver) ServiceException(org.pentaho.platform.api.engine.ServiceException) RepositoryFileStreamProvider(org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider) IJobTrigger(org.pentaho.platform.api.scheduler2.IJobTrigger) UnifiedRepositoryException(org.pentaho.platform.api.repository2.unified.UnifiedRepositoryException) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile) Job(org.pentaho.platform.api.scheduler2.Job)

Aggregations

Job (org.pentaho.platform.api.scheduler2.Job)94 Test (org.junit.Test)87 Job (io.fabric8.kubernetes.api.model.batch.v1.Job)38 Serializable (java.io.Serializable)25 ArrayList (java.util.ArrayList)24 SimpleJobTrigger (org.pentaho.platform.api.scheduler2.SimpleJobTrigger)21 Job (com.google.cloud.talent.v4beta1.Job)20 HashMap (java.util.HashMap)20 JobScheduleRequest (org.pentaho.platform.web.http.api.resources.JobScheduleRequest)19 ComplexJobTrigger (org.pentaho.platform.api.scheduler2.ComplexJobTrigger)18 SchedulerException (org.pentaho.platform.api.scheduler2.SchedulerException)17 JobServiceClient (com.google.cloud.talent.v4beta1.JobServiceClient)16 Date (java.util.Date)14 IJobFilter (org.pentaho.platform.api.scheduler2.IJobFilter)14 Job (com.google.cloud.video.transcoder.v1.Job)13 TranscoderServiceClient (com.google.cloud.video.transcoder.v1.TranscoderServiceClient)13 JobBuilder (io.fabric8.kubernetes.api.model.batch.v1.JobBuilder)13 IJobTrigger (org.pentaho.platform.api.scheduler2.IJobTrigger)12 Map (java.util.Map)11 Test (org.junit.jupiter.api.Test)10