Search in sources :

Example 91 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerServiceTest method testGetContentCleanerJobException.

@Test
public void testGetContentCleanerJobException() throws Exception {
    IJobFilter jobFilter = mock(IJobFilter.class);
    List<Job> jobs = new ArrayList<>();
    IPentahoSession session = mock(IPentahoSession.class);
    doReturn(session).when(schedulerService).getSession();
    doReturn("sessionName").when(session).getName();
    doReturn(true).when(schedulerService.policy).isAllowed(AdministerSecurityAction.NAME);
    doReturn(jobFilter).when(schedulerService).getJobFilter(anyBoolean(), nullable(String.class));
    doThrow(new SchedulerException("")).when(schedulerService.scheduler).getJobs(any(IJobFilter.class));
    try {
        schedulerService.getContentCleanerJob();
        fail();
    } catch (SchedulerException e) {
    // Should catch the exception
    }
    verify(schedulerService).getSession();
    verify(session).getName();
    verify(schedulerService.policy).isAllowed(AdministerSecurityAction.NAME);
    verify(schedulerService.scheduler).getJobs(any(IJobFilter.class));
}
Also used : IJobFilter(org.pentaho.platform.api.scheduler2.IJobFilter) SchedulerException(org.pentaho.platform.api.scheduler2.SchedulerException) IPentahoSession(org.pentaho.platform.api.engine.IPentahoSession) ArrayList(java.util.ArrayList) Job(org.pentaho.platform.api.scheduler2.Job) Test(org.junit.Test)

Example 92 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SolutionImportHandler method importSchedules.

protected void importSchedules(List<JobScheduleRequest> scheduleList) throws PlatformImportException {
    if (CollectionUtils.isNotEmpty(scheduleList)) {
        SchedulerResource schedulerResource = new SchedulerResource();
        schedulerResource.pause();
        for (JobScheduleRequest jobScheduleRequest : scheduleList) {
            boolean jobExists = false;
            List<Job> jobs = getAllJobs(schedulerResource);
            if (jobs != null) {
                // paramRequest to map<String, Serializable>
                Map<String, Serializable> mapParamsRequest = new HashMap<>();
                for (JobScheduleParam paramRequest : jobScheduleRequest.getJobParameters()) {
                    mapParamsRequest.put(paramRequest.getName(), paramRequest.getValue());
                }
                for (Job job : jobs) {
                    if ((mapParamsRequest.get(RESERVEDMAPKEY_LINEAGE_ID) != null) && (mapParamsRequest.get(RESERVEDMAPKEY_LINEAGE_ID).equals(job.getJobParams().get(RESERVEDMAPKEY_LINEAGE_ID)))) {
                        jobExists = true;
                    }
                    if (overwriteFile && jobExists) {
                        JobRequest jobRequest = new JobRequest();
                        jobRequest.setJobId(job.getJobId());
                        schedulerResource.removeJob(jobRequest);
                        jobExists = false;
                        break;
                    }
                }
            }
            if (!jobExists) {
                try {
                    Response response = createSchedulerJob(schedulerResource, jobScheduleRequest);
                    if (response.getStatus() == Response.Status.OK.getStatusCode()) {
                        if (response.getEntity() != null) {
                            // get the schedule job id from the response and add it to the import session
                            ImportSession.getSession().addImportedScheduleJobId(response.getEntity().toString());
                        }
                    }
                } catch (Exception e) {
                    // the space(s)
                    if (jobScheduleRequest.getInputFile().contains(" ") || jobScheduleRequest.getOutputFile().contains(" ")) {
                        getLogger().info(Messages.getInstance().getString("SolutionImportHandler.SchedulesWithSpaces", jobScheduleRequest.getInputFile()));
                        File inFile = new File(jobScheduleRequest.getInputFile());
                        File outFile = new File(jobScheduleRequest.getOutputFile());
                        String inputFileName = inFile.getParent() + RepositoryFile.SEPARATOR + inFile.getName().replace(" ", "_");
                        String outputFileName = outFile.getParent() + RepositoryFile.SEPARATOR + outFile.getName().replace(" ", "_");
                        jobScheduleRequest.setInputFile(inputFileName);
                        jobScheduleRequest.setOutputFile(outputFileName);
                        try {
                            if (!File.separator.equals(RepositoryFile.SEPARATOR)) {
                                // on windows systems, the backslashes will result in the file not being found in the repository
                                jobScheduleRequest.setInputFile(inputFileName.replace(File.separator, RepositoryFile.SEPARATOR));
                                jobScheduleRequest.setOutputFile(outputFileName.replace(File.separator, RepositoryFile.SEPARATOR));
                            }
                            Response response = createSchedulerJob(schedulerResource, jobScheduleRequest);
                            if (response.getStatus() == Response.Status.OK.getStatusCode()) {
                                if (response.getEntity() != null) {
                                    // get the schedule job id from the response and add it to the import session
                                    ImportSession.getSession().addImportedScheduleJobId(response.getEntity().toString());
                                }
                            }
                        } catch (Exception ex) {
                            // log it and keep going. we should stop processing all schedules just because one fails.
                            getLogger().error(Messages.getInstance().getString("SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage()), ex);
                        }
                    } else {
                        // log it and keep going. we should stop processing all schedules just because one fails.
                        getLogger().error(Messages.getInstance().getString("SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage()));
                    }
                }
            } else {
                getLogger().info(Messages.getInstance().getString("DefaultImportHandler.ERROR_0009_OVERWRITE_CONTENT", jobScheduleRequest.toString()));
            }
        }
        schedulerResource.start();
    }
}
Also used : JobScheduleParam(org.pentaho.platform.web.http.api.resources.JobScheduleParam) Serializable(java.io.Serializable) HashMap(java.util.HashMap) AlreadyExistsException(org.pentaho.platform.api.engine.security.userroledao.AlreadyExistsException) DomainStorageException(org.pentaho.metadata.repository.DomainStorageException) DomainIdNullException(org.pentaho.metadata.repository.DomainIdNullException) DomainAlreadyExistsException(org.pentaho.metadata.repository.DomainAlreadyExistsException) IOException(java.io.IOException) Response(javax.ws.rs.core.Response) JobRequest(org.pentaho.platform.web.http.api.resources.JobRequest) SchedulerResource(org.pentaho.platform.web.http.api.resources.SchedulerResource) Job(org.pentaho.platform.api.scheduler2.Job) JobScheduleRequest(org.pentaho.platform.web.http.api.resources.JobScheduleRequest) ManifestFile(org.pentaho.platform.plugin.services.importexport.ImportSession.ManifestFile) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile) File(java.io.File)

Example 93 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerResourceTest method testAddBlockout.

@Test
public void testAddBlockout() throws Exception {
    JobScheduleRequest mockJobScheduleRequest = mock(JobScheduleRequest.class);
    Job mockJob = mock(Job.class);
    doReturn(mockJob).when(schedulerResource.schedulerService).addBlockout(mockJobScheduleRequest);
    String jobId = "jobId";
    doReturn(jobId).when(mockJob).getJobId();
    Response mockJobResponse = mock(Response.class);
    doReturn(mockJobResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
    Response testResponse = schedulerResource.addBlockout(mockJobScheduleRequest);
    assertEquals(mockJobResponse, testResponse);
    verify(schedulerResource.schedulerService, times(1)).addBlockout(mockJobScheduleRequest);
    verify(mockJob, times(1)).getJobId();
    verify(schedulerResource, times(1)).buildPlainTextOkResponse(jobId);
}
Also used : Response(javax.ws.rs.core.Response) Job(org.pentaho.platform.api.scheduler2.Job) Test(org.junit.Test)

Example 94 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerResourceTest method testUpdateBlockout.

@Test
public void testUpdateBlockout() throws Exception {
    String jobId = "jobId";
    JobScheduleRequest mockJobScheduleRequest = mock(JobScheduleRequest.class);
    doReturn(true).when(schedulerResource.schedulerService).isScheduleAllowed();
    JobRequest mockJobRequest = mock(JobRequest.class);
    doReturn(mockJobRequest).when(schedulerResource).getJobRequest();
    Job mockJob = mock(Job.class);
    doReturn(mockJob).when(schedulerResource.schedulerService).updateBlockout(jobId, mockJobScheduleRequest);
    doReturn(jobId).when(mockJob).getJobId();
    Response mockResponse = mock(Response.class);
    doReturn(mockResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
    Response testResponse = schedulerResource.updateBlockout(jobId, mockJobScheduleRequest);
    assertEquals(mockResponse, testResponse);
    verify(schedulerResource.schedulerService, times(1)).updateBlockout(jobId, mockJobScheduleRequest);
    verify(mockJob, times(1)).getJobId();
}
Also used : Response(javax.ws.rs.core.Response) Job(org.pentaho.platform.api.scheduler2.Job) Test(org.junit.Test)

Example 95 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pentaho-platform by pentaho.

the class SchedulerResourceTest method testCreateJob.

@Test
public void testCreateJob() throws Exception {
    JobScheduleRequest mockRequest = mock(JobScheduleRequest.class);
    Job mockJob = mock(Job.class);
    doReturn(mockJob).when(schedulerResource.schedulerService).createJob(mockRequest);
    String jobId = "jobId";
    doReturn(jobId).when(mockJob).getJobId();
    Response mockResponse = mock(Response.class);
    doReturn(mockResponse).when(schedulerResource).buildPlainTextOkResponse(jobId);
    Response testResponse = schedulerResource.createJob(mockRequest);
    assertEquals(mockResponse, testResponse);
    verify(schedulerResource.schedulerService, times(1)).createJob(mockRequest);
    verify(mockJob, times(1)).getJobId();
    verify(schedulerResource, times(1)).buildPlainTextOkResponse(jobId);
}
Also used : Response(javax.ws.rs.core.Response) Job(org.pentaho.platform.api.scheduler2.Job) Test(org.junit.Test)

Aggregations

Job (org.pentaho.platform.api.scheduler2.Job)94 Test (org.junit.Test)69 Serializable (java.io.Serializable)25 SimpleJobTrigger (org.pentaho.platform.api.scheduler2.SimpleJobTrigger)21 HashMap (java.util.HashMap)20 ArrayList (java.util.ArrayList)19 JobScheduleRequest (org.pentaho.platform.web.http.api.resources.JobScheduleRequest)19 ComplexJobTrigger (org.pentaho.platform.api.scheduler2.ComplexJobTrigger)18 SchedulerException (org.pentaho.platform.api.scheduler2.SchedulerException)17 Date (java.util.Date)14 IJobFilter (org.pentaho.platform.api.scheduler2.IJobFilter)14 Job (com.google.cloud.video.transcoder.v1.Job)13 TranscoderServiceClient (com.google.cloud.video.transcoder.v1.TranscoderServiceClient)13 IPentahoSession (org.pentaho.platform.api.engine.IPentahoSession)12 IJobTrigger (org.pentaho.platform.api.scheduler2.IJobTrigger)12 AudioStream (com.google.cloud.video.transcoder.v1.AudioStream)8 JobConfig (com.google.cloud.video.transcoder.v1.JobConfig)8 VideoStream (com.google.cloud.video.transcoder.v1.VideoStream)8 Map (java.util.Map)8 CronJobTrigger (org.pentaho.platform.api.scheduler2.CronJobTrigger)8