use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobEntryExternalResourceListenerTest method testBeforeAfterExecution.
@Test
public void testBeforeAfterExecution() throws Exception {
IJobEntryExternalResourceConsumer consumer = mock(IJobEntryExternalResourceConsumer.class);
JobMeta mockJobMeta = mock(JobMeta.class);
Job job = mock(Job.class);
when(job.getJobMeta()).thenReturn(mockJobMeta);
JobEntryInterface jobEntryInterface = mock(JobEntryInterface.class);
when(jobEntryInterface.getParentJob()).thenReturn(job);
when(jobEntryInterface.getResourceDependencies(mockJobMeta)).thenReturn(Collections.singletonList(new ResourceReference(null, Collections.singletonList(new ResourceEntry("myFile", ResourceEntry.ResourceType.FILE)))));
JobEntryCopy jobEntryCopy = mock(JobEntryCopy.class);
IExecutionProfile executionProfile = mock(IExecutionProfile.class);
IExecutionData executionData = mock(IExecutionData.class);
when(executionProfile.getExecutionData()).thenReturn(executionData);
JobLineageHolderMap.getInstance().getLineageHolder(job).setExecutionProfile(executionProfile);
JobEntryExternalResourceListener listener = new JobEntryExternalResourceListener(consumer);
FileObject mockFile = mock(FileObject.class);
FileName mockFilename = mock(FileName.class);
when(mockFilename.getPath()).thenReturn("/path/to/file");
when(mockFile.getName()).thenReturn(mockFilename);
ResultFile resultFile = mock(ResultFile.class);
when(resultFile.getFile()).thenReturn(mockFile);
List<ResultFile> resultFiles = Collections.singletonList(resultFile);
Result result = mock(Result.class);
when(result.getResultFilesList()).thenReturn(resultFiles);
// Call beforeExecution for coverage
listener.beforeExecution(null, null, null);
listener.afterExecution(job, jobEntryCopy, jobEntryInterface, result);
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobEntryBaseJsonSerializerTest method testWriteExternalResources.
@Test
public void testWriteExternalResources() throws Exception {
Job parentJob = mock(Job.class);
JobMeta jobMeta = mock(JobMeta.class);
List<ResourceReference> dependencies = new ArrayList<ResourceReference>();
ResourceReference resRef = mock(ResourceReference.class);
dependencies.add(resRef);
List<ResourceEntry> resources = new ArrayList<ResourceEntry>();
ResourceEntry resEntry = mock(ResourceEntry.class);
resources.add(resEntry);
when(resRef.getEntries()).thenReturn(resources);
when(resEntry.getResource()).thenReturn("path/to/my/resource");
when(resEntry.getResourcetype()).thenReturn(ResourceEntry.ResourceType.FILE);
when(meta.getParentJob()).thenReturn(parentJob);
when(parentJob.getJobMeta()).thenReturn(jobMeta);
when(meta.getResourceDependencies(jobMeta)).thenReturn(dependencies);
serializer.writeExternalResources(meta, json, provider);
verify(json).writeArrayFieldStart(JobEntryBaseJsonSerializer.JSON_PROPERTY_EXTERNAL_RESOURCES);
verify(json).writeObject(any(IExternalResourceInfo.class));
verify(json).writeEndArray();
}
use of org.pentaho.di.job.Job in project pentaho-platform by pentaho.
the class StatsDatabaseCheck method executeJob.
protected boolean executeJob(JobMeta jobMeta, String jobFileFullPath) {
if (jobMeta != null) {
Job job = new Job(null, jobMeta);
Result result = new Result();
try {
job.execute(0, result);
job.waitUntilFinished();
} catch (KettleException ke) {
Logger.error("Error executing " + jobFileFullPath, ke.getMessage());
return false;
}
}
return true;
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobMetaJsonSerializer method serializeSteps.
@Override
protected void serializeSteps(JobMeta meta, JsonGenerator json) throws IOException {
json.writeArrayFieldStart(JSON_PROPERTY_STEPS);
int numberOfEntries = meta.nrJobEntries();
for (int i = 0; i < numberOfEntries; i++) {
JobEntryCopy jobEntry = meta.getJobEntry(i);
LineageRepository repo = getLineageRepository();
ObjectId jobId = meta.getObjectId() == null ? new StringObjectId(meta.getName()) : meta.getObjectId();
ObjectId entryId = jobEntry.getObjectId() == null ? new StringObjectId(jobEntry.getName()) : jobEntry.getObjectId();
JobEntryInterface jobEntryInterface = jobEntry.getEntry();
JobEntryBase jobEntryBase = getJobEntryBase(jobEntryInterface);
Job job = new Job(null, meta);
jobEntryBase.setParentJob(job);
jobEntryInterface.setObjectId(entryId);
try {
jobEntryInterface.saveRep(repo, null, jobId);
} catch (KettleException e) {
LOGGER.warn(Messages.getString("INFO.Serialization.Trans.Step", jobEntry.getName()), e);
}
json.writeObject(jobEntryBase);
}
json.writeEndArray();
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class ExternalResourceConsumerIT method testExternalResourceConsumer.
@Test
public void testExternalResourceConsumer() throws Exception {
FileInputStream xmlStream = new FileInputStream(transOrJobPath);
Variables vars = new Variables();
for (String key : variables.keySet()) {
vars.setVariable(key, variables.get(key));
}
// run the trans or job
if (transOrJobPath.endsWith(".ktr")) {
KettleClientEnvironment.getInstance().setClient(KettleClientEnvironment.ClientType.PAN);
TransMeta tm = new TransMeta(xmlStream, null, true, vars, null);
tm.setFilename(tm.getName());
Trans trans = new Trans(tm, null, tm.getName(), REPO_PATH, transOrJobPath);
for (String var : vars.listVariables()) {
trans.setVariable(var, vars.getVariable(var));
}
trans.execute(null);
trans.waitUntilFinished();
} else {
KettleClientEnvironment.getInstance().setClient(KettleClientEnvironment.ClientType.KITCHEN);
JobMeta jm = new JobMeta(new Variables(), transOrJobPath, null, null, null);
jm.setFilename(jm.getName());
Job job = new Job(null, jm);
Variables variables = new Variables();
variables.initializeVariablesFrom(job.getParentJob());
jm.setInternalKettleVariables(variables);
for (String var : vars.listVariables()) {
jm.setVariable(var, vars.getVariable(var));
}
job.copyParametersFrom(jm);
job.copyVariablesFrom(jm);
job.activateParameters();
// We have to call the extension point ourselves -- don't ask :(
ExtensionPointHandler.callExtensionPoint(job.getLogChannel(), KettleExtensionPoint.JobStart.id, job);
job.execute(0, null);
job.fireJobFinishListeners();
}
}
Aggregations