use of org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider in project pentaho-platform by pentaho.
the class SchedulerService method createJob.
public Job createJob(JobScheduleRequest scheduleRequest) throws IOException, SchedulerException, IllegalAccessException {
// Used to determine if created by a RunInBackgroundCommand
boolean runInBackground = scheduleRequest.getSimpleJobTrigger() == null && scheduleRequest.getComplexJobTrigger() == null && scheduleRequest.getCronJobTrigger() == null;
if (!runInBackground && !getPolicy().isAllowed(SchedulerAction.NAME)) {
throw new SecurityException();
}
boolean hasInputFile = !StringUtils.isEmpty(scheduleRequest.getInputFile());
RepositoryFile file = null;
if (hasInputFile) {
try {
file = getRepository().getFile(scheduleRequest.getInputFile());
} catch (UnifiedRepositoryException ure) {
hasInputFile = false;
logger.warn(ure.getMessage(), ure);
}
}
// if we have an inputfile, generate job name based on that if the name is not passed in
if (hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// $NON-NLS-1$
scheduleRequest.setJobName(file.getName().substring(0, file.getName().lastIndexOf(".")));
} else if (!StringUtils.isEmpty(scheduleRequest.getActionClass())) {
String actionClass = scheduleRequest.getActionClass().substring(scheduleRequest.getActionClass().lastIndexOf(".") + 1);
// $NON-NLS-1$
scheduleRequest.setJobName(actionClass);
} else if (!hasInputFile && StringUtils.isEmpty(scheduleRequest.getJobName())) {
// just make up a name
// $NON-NLS-1$
scheduleRequest.setJobName("" + System.currentTimeMillis());
}
if (hasInputFile) {
if (file == null) {
logger.error("Cannot find input source file " + scheduleRequest.getInputFile() + " Aborting schedule...");
throw new SchedulerException(new ServiceException("Cannot find input source file " + scheduleRequest.getInputFile()));
}
Map<String, Serializable> metadata = getRepository().getFileMetadata(file.getId());
if (metadata.containsKey(RepositoryFile.SCHEDULABLE_KEY)) {
boolean schedulable = BooleanUtils.toBoolean((String) metadata.get(RepositoryFile.SCHEDULABLE_KEY));
if (!schedulable) {
throw new IllegalAccessException();
}
}
}
if (scheduleRequest.getTimeZone() != null) {
updateStartDateForTimeZone(scheduleRequest);
}
Job job = null;
IJobTrigger jobTrigger = SchedulerResourceUtil.convertScheduleRequestToJobTrigger(scheduleRequest, scheduler);
HashMap<String, Serializable> parameterMap = new HashMap<>();
for (JobScheduleParam param : scheduleRequest.getJobParameters()) {
parameterMap.put(param.getName(), param.getValue());
}
if (isPdiFile(file)) {
parameterMap = handlePDIScheduling(file, parameterMap, scheduleRequest.getPdiParameters());
}
parameterMap.put(LocaleHelper.USER_LOCALE_PARAM, LocaleHelper.getLocale());
if (scheduleRequest.getUseWorkerNodes() != null && !scheduleRequest.getUseWorkerNodes().trim().isEmpty()) {
parameterMap.put("useWorkerNodes", scheduleRequest.getUseWorkerNodes().trim());
}
if (hasInputFile) {
SchedulerOutputPathResolver outputPathResolver = getSchedulerOutputPathResolver(scheduleRequest);
String outputFile = outputPathResolver.resolveOutputFilePath();
String actionId = SchedulerResourceUtil.resolveActionId(scheduleRequest.getInputFile());
final String inputFile = scheduleRequest.getInputFile();
parameterMap.put(ActionUtil.QUARTZ_STREAMPROVIDER_INPUT_FILE, inputFile);
job = getScheduler().createJob(scheduleRequest.getJobName(), actionId, parameterMap, jobTrigger, new RepositoryFileStreamProvider(inputFile, outputFile, getAutoCreateUniqueFilename(scheduleRequest), getAppendDateFormat(scheduleRequest)));
} else {
// need to locate actions from plugins if done this way too (but for now, we're just on main)
String actionClass = scheduleRequest.getActionClass();
try {
@SuppressWarnings("unchecked") Class<IAction> iaction = getAction(actionClass);
job = getScheduler().createJob(scheduleRequest.getJobName(), iaction, parameterMap, jobTrigger);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
return job;
}
use of org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider in project pentaho-platform by pentaho.
the class ScheduleExportUtil method createJobScheduleRequest.
public static JobScheduleRequest createJobScheduleRequest(Job job) {
if (job == null) {
throw new IllegalArgumentException(Messages.getInstance().getString("ScheduleExportUtil.JOB_MUST_NOT_BE_NULL"));
}
JobScheduleRequest schedule = new JobScheduleRequest();
schedule.setJobName(job.getJobName());
schedule.setDuration(job.getJobTrigger().getDuration());
schedule.setJobState(job.getState());
Map<String, Serializable> jobParams = job.getJobParams();
Object streamProviderObj = jobParams.get(QuartzScheduler.RESERVEDMAPKEY_STREAMPROVIDER);
RepositoryFileStreamProvider streamProvider = null;
if (streamProviderObj instanceof RepositoryFileStreamProvider) {
streamProvider = (RepositoryFileStreamProvider) streamProviderObj;
} else if (streamProviderObj instanceof String) {
String inputFilePath = null;
String outputFilePath = null;
String inputOutputString = (String) streamProviderObj;
String[] tokens = inputOutputString.split(":");
if (!ArrayUtils.isEmpty(tokens) && tokens.length == 2) {
inputFilePath = tokens[0].split("=")[1].trim();
outputFilePath = tokens[1].split("=")[1].trim();
streamProvider = new RepositoryFileStreamProvider(inputFilePath, outputFilePath, true);
}
}
if (streamProvider != null) {
schedule.setInputFile(streamProvider.getInputFilePath());
schedule.setOutputFile(streamProvider.getOutputFilePath());
} else {
// let's look to see if we can figure out the input and output file
String directory = (String) jobParams.get("directory");
String transName = (String) jobParams.get("transformation");
String jobName = (String) jobParams.get("job");
String artifact = transName == null ? jobName : transName;
if (directory != null && artifact != null) {
String outputFile = RepositoryFilenameUtils.concat(directory, artifact);
outputFile += "*";
if (artifact.equals(jobName)) {
artifact += ".kjb";
} else {
artifact += ".ktr";
}
String inputFile = RepositoryFilenameUtils.concat(directory, artifact);
schedule.setInputFile(inputFile);
schedule.setOutputFile(outputFile);
}
}
for (String key : jobParams.keySet()) {
Serializable serializable = jobParams.get(key);
if (RUN_PARAMETERS_KEY.equals(key)) {
if (schedule.getPdiParameters() == null) {
schedule.setPdiParameters(new HashMap<String, String>());
}
schedule.getPdiParameters().putAll((Map<String, String>) serializable);
} else {
JobScheduleParam param = null;
if (serializable instanceof String) {
String value = (String) serializable;
if (QuartzScheduler.RESERVEDMAPKEY_ACTIONCLASS.equals(key)) {
schedule.setActionClass(value);
} else if (IBlockoutManager.TIME_ZONE_PARAM.equals(key)) {
schedule.setTimeZone(value);
}
param = new JobScheduleParam(key, (String) serializable);
} else if (serializable instanceof Number) {
param = new JobScheduleParam(key, (Number) serializable);
} else if (serializable instanceof Date) {
param = new JobScheduleParam(key, (Date) serializable);
} else if (serializable instanceof Boolean) {
param = new JobScheduleParam(key, (Boolean) serializable);
}
if (param != null) {
schedule.getJobParameters().add(param);
}
}
}
if (job.getJobTrigger() instanceof SimpleJobTrigger) {
SimpleJobTrigger jobTrigger = (SimpleJobTrigger) job.getJobTrigger();
schedule.setSimpleJobTrigger(jobTrigger);
} else if (job.getJobTrigger() instanceof ComplexJobTrigger) {
ComplexJobTrigger jobTrigger = (ComplexJobTrigger) job.getJobTrigger();
// force it to a cron trigger to get the auto-parsing of the complex trigger
CronJobTrigger cron = new CronJobTrigger();
cron.setCronString(jobTrigger.getCronString());
cron.setStartTime(jobTrigger.getStartTime());
cron.setEndTime(jobTrigger.getEndTime());
cron.setDuration(jobTrigger.getDuration());
cron.setUiPassParam(jobTrigger.getUiPassParam());
schedule.setCronJobTrigger(cron);
} else if (job.getJobTrigger() instanceof CronJobTrigger) {
CronJobTrigger jobTrigger = (CronJobTrigger) job.getJobTrigger();
schedule.setCronJobTrigger(jobTrigger);
} else {
// don't know what this is, can't export it
throw new IllegalArgumentException(Messages.getInstance().getString("PentahoPlatformExporter.UNSUPPORTED_JobTrigger", job.getJobTrigger().getClass().getName()));
}
return schedule;
}
use of org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider in project pentaho-platform by pentaho.
the class ScheduleExportUtilTest method testCreateJobScheduleRequest_StreamProviderJobParam.
@Test
public void testCreateJobScheduleRequest_StreamProviderJobParam() throws Exception {
String jobName = "JOB";
String inputPath = "/input/path/to/file.ext";
String outputPath = "/output/path/location.*";
Map<String, Serializable> params = new HashMap<>();
RepositoryFileStreamProvider streamProvider = mock(RepositoryFileStreamProvider.class);
params.put(QuartzScheduler.RESERVEDMAPKEY_STREAMPROVIDER, streamProvider);
Job job = mock(Job.class);
CronJobTrigger trigger = mock(CronJobTrigger.class);
when(job.getJobTrigger()).thenReturn(trigger);
when(job.getJobName()).thenReturn(jobName);
when(job.getJobParams()).thenReturn(params);
when(streamProvider.getInputFilePath()).thenReturn(inputPath);
when(streamProvider.getOutputFilePath()).thenReturn(outputPath);
JobScheduleRequest jobScheduleRequest = ScheduleExportUtil.createJobScheduleRequest(job);
assertEquals(inputPath, jobScheduleRequest.getInputFile());
assertEquals(outputPath, jobScheduleRequest.getOutputFile());
assertEquals(0, jobScheduleRequest.getJobParameters().size());
}
use of org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider in project pentaho-platform by pentaho.
the class LocalActionInvokerTest method getStreamProviderTest.
@Test
public void getStreamProviderTest() {
Map<String, Serializable> paramMap = new HashMap<>();
RepositoryFileStreamProvider repositoryFileStreamProvider = new RepositoryFileStreamProvider();
paramMap.put(ActionUtil.INVOKER_STREAMPROVIDER, repositoryFileStreamProvider);
IBackgroundExecutionStreamProvider iBackgroundExecutionStreamProvider = defaultActionInvoker.getStreamProvider(paramMap);
Assert.assertEquals(repositoryFileStreamProvider, iBackgroundExecutionStreamProvider);
}
use of org.pentaho.platform.web.http.api.resources.RepositoryFileStreamProvider in project pentaho-platform by pentaho.
the class LocalActionInvokerTest method getStreamProviderWithInputAndOutputFileTest.
@Test
public void getStreamProviderWithInputAndOutputFileTest() throws IOException {
Map<String, Serializable> paramMap = new HashMap<>();
RepositoryFileStreamProvider repositoryFileStreamProvider = new RepositoryFileStreamProvider();
File inputFile = new File("example.txt");
BufferedWriter output = new BufferedWriter(new FileWriter(inputFile));
output.write("TEST TEXT");
paramMap.put(ActionUtil.INVOKER_STREAMPROVIDER, repositoryFileStreamProvider);
paramMap.put(ActionUtil.INVOKER_STREAMPROVIDER_INPUT_FILE, inputFile);
paramMap.put(ActionUtil.INVOKER_STREAMPROVIDER_OUTPUT_FILE_PATTERN, inputFile);
paramMap.put(ActionUtil.INVOKER_STREAMPROVIDER_UNIQUE_FILE_NAME, true);
IBackgroundExecutionStreamProvider iBackgroundExecutionStreamProvider = defaultActionInvoker.getStreamProvider(paramMap);
Assert.assertEquals(iBackgroundExecutionStreamProvider, repositoryFileStreamProvider);
}
Aggregations