use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobAnalyzer method analyze.
@Override
public synchronized IMetaverseNode analyze(IComponentDescriptor descriptor, IDocument document) throws MetaverseAnalyzerException {
validateState(document);
Object repoObject = document.getContent();
JobMeta jobMeta = null;
if (repoObject instanceof String) {
// hydrate the job
try {
String content = (String) repoObject;
ByteArrayInputStream xmlStream = new ByteArrayInputStream(content.getBytes());
jobMeta = new JobMeta(xmlStream, null, null);
} catch (KettleXMLException e) {
throw new MetaverseAnalyzerException(e);
}
} else if (repoObject instanceof JobMeta) {
jobMeta = (JobMeta) repoObject;
}
// construct a dummy job based on our JobMeta so we get out VariableSpace set properly
jobMeta.setFilename(document.getStringID());
Job j = new Job(null, jobMeta);
j.setInternalKettleVariables(jobMeta);
IComponentDescriptor documentDescriptor = new MetaverseComponentDescriptor(document.getStringID(), DictionaryConst.NODE_TYPE_JOB, new Namespace(descriptor.getLogicalId()), descriptor.getContext());
// Create a metaverse node and start filling in details
IMetaverseNode node = metaverseObjectFactory.createNodeObject(document.getNamespace(), jobMeta.getName(), DictionaryConst.NODE_TYPE_JOB);
node.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
// pull out the standard fields
String description = jobMeta.getDescription();
if (description != null) {
node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
}
String extendedDescription = jobMeta.getExtendedDescription();
if (extendedDescription != null) {
node.setProperty("extendedDescription", extendedDescription);
}
Date createdDate = jobMeta.getCreatedDate();
if (createdDate != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
}
String createdUser = jobMeta.getCreatedUser();
if (createdUser != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
}
Date lastModifiedDate = jobMeta.getModifiedDate();
if (lastModifiedDate != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
}
String lastModifiedUser = jobMeta.getModifiedUser();
if (lastModifiedUser != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
}
String version = jobMeta.getJobversion();
if (version != null) {
node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
}
String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
if (status != null && !status.startsWith("!")) {
node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
}
node.setProperty(DictionaryConst.PROPERTY_PATH, document.getProperty(DictionaryConst.PROPERTY_PATH));
// Process job parameters
String[] parameters = jobMeta.listParameters();
if (parameters != null) {
for (String parameter : parameters) {
try {
// Determine parameter properties and add them to a map, then the map to the list
String defaultParameterValue = jobMeta.getParameterDefault(parameter);
String parameterValue = jobMeta.getParameterValue(parameter);
String parameterDescription = jobMeta.getParameterDescription(parameter);
PropertiesHolder paramProperties = new PropertiesHolder();
paramProperties.setProperty("defaultValue", defaultParameterValue);
paramProperties.setProperty("value", parameterValue);
paramProperties.setProperty("description", parameterDescription);
node.setProperty("parameter_" + parameter, paramProperties.toString());
} catch (UnknownParamException upe) {
// This shouldn't happen as we're using the list provided by the meta
throw new MetaverseAnalyzerException(upe);
}
}
}
// handle the entries
for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
JobEntryCopy entry = jobMeta.getJobEntry(i);
try {
if (entry != null) {
entry.getEntry().setParentJob(j);
IMetaverseNode jobEntryNode = null;
JobEntryInterface jobEntryInterface = entry.getEntry();
IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, descriptor.getContext());
Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
}
} else {
GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
}
if (jobEntryNode != null) {
metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
}
}
} catch (Throwable mae) {
// Don't throw an exception, just log and carry on
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
}
}
// Model the hops between steps
int numHops = jobMeta.nrJobHops();
for (int i = 0; i < numHops; i++) {
JobHopMeta hop = jobMeta.getJobHop(i);
JobEntryCopy fromEntry = hop.getFromEntry();
JobEntryCopy toEntry = hop.getToEntry();
INamespace childNs = new Namespace(node.getLogicalId());
// process legitimate hops
if (fromEntry != null && toEntry != null) {
IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
}
}
metaverseBuilder.addNode(node);
addParentLink(documentDescriptor, node);
return node;
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobRuntimeExtensionPoint method callExtensionPoint.
/**
* Callback when a job is about to be started
*
* @param logChannelInterface A reference to the log in this context (the Job object's log)
* @param o The object being operated on (Job in this case)
* @throws org.pentaho.di.core.exception.KettleException
*/
@Override
public void callExtensionPoint(LogChannelInterface logChannelInterface, Object o) throws KettleException {
// Job Started listeners get called after the extension point is invoked, so just add a job listener
if (o instanceof Job) {
Job job = ((Job) o);
// If runtime lineage collection is disabled, don't run any lineage processes/methods
if (!isRuntimeEnabled()) {
return;
}
// Create and populate an execution profile with what we know so far
ExecutionProfile executionProfile = new ExecutionProfile();
populateExecutionProfile(executionProfile, job);
IMetaverseBuilder builder = JobLineageHolderMap.getInstance().getMetaverseBuilder(job);
// Add the job finished listener
job.addJobListener(this);
// Analyze the current transformation
if (documentAnalyzer != null) {
documentAnalyzer.setMetaverseBuilder(builder);
// Create a document for the Trans
final String clientName = executionProfile.getExecutionEngine().getName();
final INamespace namespace = new Namespace(clientName);
final IMetaverseNode designNode = builder.getMetaverseObjectFactory().createNodeObject(clientName, clientName, DictionaryConst.NODE_TYPE_LOCATOR);
builder.addNode(designNode);
final JobMeta jobMeta = job.getJobMeta();
// The variables and parameters in the Job may not have been set on the meta, so we do it here
// to ensure the job analyzer will have access to the parameter values.
jobMeta.copyParametersFrom(job);
jobMeta.activateParameters();
job.copyVariablesFrom(jobMeta);
if (job.getRep() != null) {
jobMeta.setRepository(job.getRep());
}
String id = getFilename(jobMeta);
if (!id.endsWith(jobMeta.getDefaultExtension())) {
id += "." + jobMeta.getDefaultExtension();
}
IDocument metaverseDocument = builder.getMetaverseObjectFactory().createDocumentObject();
metaverseDocument.setNamespace(namespace);
metaverseDocument.setContent(jobMeta);
metaverseDocument.setStringID(id);
metaverseDocument.setName(jobMeta.getName());
metaverseDocument.setExtension("kjb");
metaverseDocument.setMimeType(URLConnection.getFileNameMap().getContentTypeFor("job.kjb"));
metaverseDocument.setContext(new AnalysisContext(DictionaryConst.CONTEXT_RUNTIME));
String normalizedPath;
try {
normalizedPath = KettleAnalyzerUtil.normalizeFilePath(id);
} catch (MetaverseException e) {
normalizedPath = id;
}
metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAME, job.getName());
metaverseDocument.setProperty(DictionaryConst.PROPERTY_PATH, normalizedPath);
metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAMESPACE, namespace.getNamespaceId());
Runnable analyzerRunner = MetaverseUtil.getAnalyzerRunner(documentAnalyzer, metaverseDocument);
MetaverseCompletionService.getInstance().submit(analyzerRunner, id);
}
// Save the lineage objects for later
LineageHolder holder = JobLineageHolderMap.getInstance().getLineageHolder(job);
holder.setExecutionProfile(executionProfile);
holder.setMetaverseBuilder(builder);
}
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobRuntimeExtensionPoint method createLineGraph.
protected void createLineGraph(final Job job) {
try {
// Get the current execution profile for this transformation
LineageHolder holder = JobLineageHolderMap.getInstance().getLineageHolder(job);
Future lineageTask = holder.getLineageTask();
if (lineageTask != null) {
try {
lineageTask.get();
} catch (InterruptedException e) {
// TODO logger?
e.printStackTrace();
} catch (ExecutionException e) {
// TODO logger?
e.printStackTrace();
}
}
// Get the current execution profile for this job
IExecutionProfile executionProfile = JobLineageHolderMap.getInstance().getLineageHolder(job).getExecutionProfile();
if (executionProfile == null) {
// Something's wrong here, the transStarted method didn't properly store the execution profile. We should know
// the same info, so populate a new ExecutionProfile using the current Trans
executionProfile = new ExecutionProfile();
populateExecutionProfile(executionProfile, job);
}
ExecutionData executionData = (ExecutionData) executionProfile.getExecutionData();
Result result = job.getResult();
if (result != null) {
executionData.setFailureCount(result.getNrErrors());
}
// Export the lineage info (execution profile, lineage graph, etc.)
try {
if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
// clearOutput right before the first call to outputXYZ().
if ("latest".equals(lineageWriter.getOutputStrategy())) {
lineageWriter.cleanOutput(holder);
}
lineageWriter.outputExecutionProfile(holder);
}
} catch (IOException e) {
log.warn(Messages.getString("ERROR.CouldNotWriteExecutionProfile", job.getName(), e.getMessage()));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
}
// lineage information into its own graph
try {
Job parentJob = job.getParentJob();
Trans parentTrans = job.getParentTrans();
if (parentJob == null && parentTrans == null) {
// Add the execution profile information to the lineage graph
addRuntimeLineageInfo(holder);
if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
lineageWriter.outputLineageGraph(holder);
}
}
} catch (IOException e) {
log.warn(Messages.getString("ERROR.CouldNotWriteLineageGraph", job.getName(), Const.NVL(e.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
}
} catch (Throwable t) {
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", job.getName(), Const.NVL(t.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), t);
}
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobRuntimeExtensionPointTest method testJobFinished.
@Test
public void testJobFinished() throws Exception {
JobRuntimeExtensionPoint ext = spy(jobExtensionPoint);
ext.jobFinished(null);
verify(ext, never()).populateExecutionProfile(Mockito.any(IExecutionProfile.class), Mockito.any(Job.class));
ext.jobFinished(job);
// The logic in jobFinished() is now in a thread, so we can't verify methods were called
Job mockJob = spy(job);
Result result = mock(Result.class);
when(mockJob.getResult()).thenReturn(result);
ext.jobFinished(mockJob);
// The logic in jobFinished() is now in a thread, so we can't verify methods were called
// Exception handling test removed because jobFinished() logic is in a thread and can't throw checked exceptions
}
use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.
the class JobRuntimeExtensionPointTest method setUp.
@Before
public void setUp() throws Exception {
jobExtensionPoint = new JobRuntimeExtensionPoint();
jobExtensionPoint.setRuntimeEnabled(true);
jobMeta = spy(new JobMeta());
jobMeta.setName(TEST_JOB_NAME);
jobMeta.setFilename(TEST_JOB_PATH);
jobMeta.setDescription(TEST_JOB_DESCRIPTION);
job = new Job(null, jobMeta);
job.setExecutingServer(TEST_SERVER);
job.setExecutingUser(TEST_USER);
job.setVariable(TEST_VAR_NAME, TEST_VAR_VALUE);
when(jobMeta.getUsedVariables()).thenReturn(Collections.singletonList(TEST_VAR_NAME));
job.addParameterDefinition(TEST_PARAM_NAME, TEST_PARAM_DEFAULT_VALUE, TEST_PARAM_DESCRIPTION);
job.setParameterValue(TEST_PARAM_NAME, TEST_PARAM_VALUE);
job.setArguments(new String[] { "arg0", "arg1" });
}
Aggregations