use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.
the class JobEntrySetVariables method execute.
public Result execute(Result result, int nr) throws KettleException {
result.setResult(true);
result.setNrErrors(0);
try {
List<String> variables = new ArrayList<String>();
List<String> variableValues = new ArrayList<String>();
List<Integer> variableTypes = new ArrayList<Integer>();
String realFilename = environmentSubstitute(filename);
if (!Utils.isEmpty(realFilename)) {
try (InputStream is = KettleVFS.getInputStream(realFilename);
// for UTF8 properties files
InputStreamReader isr = new InputStreamReader(is, "UTF-8");
BufferedReader reader = new BufferedReader(isr)) {
Properties properties = new Properties();
properties.load(reader);
for (Object key : properties.keySet()) {
variables.add((String) key);
variableValues.add((String) properties.get(key));
variableTypes.add(fileVariableType);
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableReadPropertiesFile", realFilename));
}
}
for (int i = 0; i < variableName.length; i++) {
variables.add(variableName[i]);
variableValues.add(variableValue[i]);
variableTypes.add(variableType[i]);
}
for (int i = 0; i < variables.size(); i++) {
String varname = variables.get(i);
String value = variableValues.get(i);
int type = variableTypes.get(i);
if (replaceVars) {
varname = environmentSubstitute(varname);
value = environmentSubstitute(value);
}
// OK, where do we set this value...
switch(type) {
case VARIABLE_TYPE_JVM:
System.setProperty(varname, value);
setVariable(varname, value);
Job parentJobTraverse = parentJob;
while (parentJobTraverse != null) {
parentJobTraverse.setVariable(varname, value);
parentJobTraverse = parentJobTraverse.getParentJob();
}
break;
case VARIABLE_TYPE_ROOT_JOB:
// set variable in this job entry
setVariable(varname, value);
Job rootJob = parentJob;
while (rootJob != null) {
rootJob.setVariable(varname, value);
rootJob = rootJob.getParentJob();
}
break;
case VARIABLE_TYPE_CURRENT_JOB:
changedInitialVariables.put(varname, getVariable(varname));
setVariable(varname, value);
if (parentJob != null) {
parentJob.setVariable(varname, value);
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableCurrentJob", varname));
}
break;
case VARIABLE_TYPE_PARENT_JOB:
setVariable(varname, value);
if (parentJob != null) {
parentJob.setVariable(varname, value);
Job gpJob = parentJob.getParentJob();
if (gpJob != null) {
gpJob.setVariable(varname, value);
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableParentJob", varname));
}
} else {
throw new KettleJobException(BaseMessages.getString(PKG, "JobEntrySetVariables.Error.UnableSetVariableCurrentJob", varname));
}
break;
default:
break;
}
// ok we can process this line
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntrySetVariables.Log.SetVariableToValue", varname, value));
}
}
} catch (Exception e) {
result.setResult(false);
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobEntrySetVariables.UnExcpectedError", e.getMessage()));
}
return result;
}
use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.
the class JobEntryJobIT method testLogfileWritesFromRemote.
@Test
public void testLogfileWritesFromRemote() throws Exception {
JobEntryJob job = spy(new JobEntryJob(JOB_ENTRY_JOB_NAME));
doCallRealMethod().when(job).execute(any(Result.class), anyInt());
Job parentJob = mock(Job.class);
JobMeta parentJobMeta = mock(JobMeta.class);
JobMeta jobMeta = mock(JobMeta.class);
SlaveServer slaveServer = mock(SlaveServer.class);
LogChannelInterface log = mock(LogChannelInterface.class);
SlaveServerJobStatus status = mock(SlaveServerJobStatus.class);
when(parentJob.getLogLevel()).thenReturn(LogLevel.BASIC);
when(parentJobMeta.getRepositoryDirectory()).thenReturn(null);
when(jobMeta.getRepositoryDirectory()).thenReturn(mock(RepositoryDirectoryInterface.class));
when(jobMeta.getName()).thenReturn(JOB_META_NAME);
when(parentJob.getJobMeta()).thenReturn(parentJobMeta);
when(parentJobMeta.findSlaveServer(REMOTE_SLAVE_SERVER_NAME)).thenReturn(slaveServer);
when(slaveServer.getLogChannel()).thenReturn(log);
when(log.getLogLevel()).thenReturn(LogLevel.BASIC);
when(slaveServer.sendXML(anyString(), anyString())).thenReturn(REPLY);
when(slaveServer.execService(anyString())).thenReturn(REPLY);
when(slaveServer.getJobStatus(anyString(), anyString(), anyInt())).thenReturn(status);
when(status.getResult()).thenReturn(mock(Result.class));
when(status.getLoggingString()).thenReturn(LOG);
file = Files.createTempFile("file", "");
doReturn(LOG_FILE_NAME).when(job).getLogFilename();
doReturn(file.toString()).when(job).environmentSubstitute(LOG_FILE_NAME);
doReturn(REMOTE_SLAVE_SERVER_NAME).when(job).environmentSubstitute(REMOTE_SLAVE_SERVER_NAME);
doReturn(jobMeta).when(job).getJobMeta(any(Repository.class), any(VariableSpace.class));
doNothing().when(job).copyVariablesFrom(anyObject());
doNothing().when(job).setParentVariableSpace(anyObject());
job.setLogfile = true;
job.createParentFolder = false;
job.logFileLevel = LogLevel.BASIC;
job.execPerRow = false;
job.paramsFromPrevious = false;
job.argFromPrevious = false;
job.waitingToFinish = true;
job.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
job.setRemoteSlaveServerName(REMOTE_SLAVE_SERVER_NAME);
job.setParentJob(parentJob);
job.setParentJobMeta(parentJobMeta);
job.execute(new Result(), 0);
String result = Files.lines(file).collect(Collectors.joining(""));
assertTrue(result.contains(LOG));
}
use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.
the class JobEntryZipFileIT method processFile_ReturnsTrue_OnSuccess.
@Test
public void processFile_ReturnsTrue_OnSuccess() throws Exception {
final String zipPath = createTempZipFileName("pdi-15013");
final String content = "temp file";
final File tempFile = createTempFile(content);
tempFile.deleteOnExit();
try {
Result result = new Result();
JobEntryZipFile entry = new JobEntryZipFile();
assertTrue(entry.processRowFile(new Job(), result, zipPath, null, null, tempFile.getAbsolutePath(), null, false));
boolean isTrue = true;
FileObject zip = KettleVFS.getFileObject(zipPath);
assertTrue("Zip archive should be created", zip.exists());
ByteArrayOutputStream os = new ByteArrayOutputStream();
IOUtils.copy(zip.getContent().getInputStream(), os);
ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(os.toByteArray()));
ZipEntry zipEntry = zis.getNextEntry();
assertEquals("Input file should be put into the archive", tempFile.getName(), zipEntry.getName());
os.reset();
IOUtils.copy(zis, os);
assertEquals("File's content should be equal to original", content, new String(os.toByteArray()));
} finally {
tempFile.delete();
File tempZipFile = new File(zipPath);
tempZipFile.delete();
}
}
use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.
the class JobLogTable method getLogRecord.
/**
* This method calculates all the values that are required
*
* @param id
* the id to use or -1 if no id is needed
* @param status
* the log status to use
*/
public RowMetaAndData getLogRecord(LogStatus status, Object subject, Object parent) {
if (subject == null || subject instanceof Job) {
Job job = (Job) subject;
Result result = null;
if (job != null) {
result = job.getResult();
}
RowMetaAndData row = new RowMetaAndData();
for (LogTableField field : fields) {
if (field.isEnabled()) {
Object value = null;
if (job != null) {
switch(ID.valueOf(field.getId())) {
case ID_JOB:
value = new Long(job.getBatchId());
break;
case CHANNEL_ID:
value = job.getLogChannelId();
break;
case JOBNAME:
value = job.getJobname();
break;
case STATUS:
value = status.getStatus();
break;
case LINES_READ:
value = result == null ? null : new Long(result.getNrLinesRead());
break;
case LINES_WRITTEN:
value = result == null ? null : new Long(result.getNrLinesWritten());
break;
case LINES_INPUT:
value = result == null ? null : new Long(result.getNrLinesInput());
break;
case LINES_OUTPUT:
value = result == null ? null : new Long(result.getNrLinesOutput());
break;
case LINES_UPDATED:
value = result == null ? null : new Long(result.getNrLinesUpdated());
break;
case LINES_REJECTED:
value = result == null ? null : new Long(result.getNrLinesRejected());
break;
case ERRORS:
value = result == null ? null : new Long(result.getNrErrors());
break;
case STARTDATE:
value = job.getStartDate();
break;
case LOGDATE:
value = job.getLogDate();
break;
case ENDDATE:
value = job.getEndDate();
break;
case DEPDATE:
value = job.getDepDate();
break;
case REPLAYDATE:
value = job.getCurrentDate();
break;
case LOG_FIELD:
value = getLogBuffer(job, job.getLogChannelId(), status, logSizeLimit);
break;
case EXECUTING_SERVER:
value = job.getExecutingServer();
break;
case EXECUTING_USER:
value = job.getExecutingUser();
break;
case START_JOB_ENTRY:
value = job.getStartJobEntryCopy() != null ? job.getStartJobEntryCopy().getName() : null;
break;
case CLIENT:
value = KettleClientEnvironment.getInstance().getClient() != null ? KettleClientEnvironment.getInstance().getClient().toString() : "unknown";
break;
default:
break;
}
}
row.addValue(field.getFieldName(), field.getDataType(), value);
row.getRowMeta().getValueMeta(row.size() - 1).setLength(field.getLength());
}
}
return row;
} else {
return null;
}
}
use of org.pentaho.di.job.Job in project pentaho-kettle by pentaho.
the class JobEntryFilesExistTest method setUp.
@Before
public void setUp() throws Exception {
job = new Job(null, new JobMeta());
entry = new JobEntryFilesExist();
job.getJobMeta().addJobEntry(new JobEntryCopy(entry));
entry.setParentJob(job);
JobMeta mockJobMeta = mock(JobMeta.class);
entry.setParentJobMeta(mockJobMeta);
job.setStopped(false);
existingFile1 = TestUtils.createRamFile(getClass().getSimpleName() + "/existingFile1.ext", entry);
existingFile2 = TestUtils.createRamFile(getClass().getSimpleName() + "/existingFile2.ext", entry);
}
Aggregations