use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.
the class JobAnalyzer method analyze.
@Override
public synchronized IMetaverseNode analyze(final IComponentDescriptor documentDescriptor, final AbstractMeta meta, final IMetaverseNode node, final String documentPath) throws MetaverseAnalyzerException {
final JobMeta jobMeta = (JobMeta) meta;
Job j = new Job(null, jobMeta);
j.setInternalKettleVariables(jobMeta);
// pull out the standard fields
String description = jobMeta.getDescription();
if (description != null) {
node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
}
String extendedDescription = jobMeta.getExtendedDescription();
if (extendedDescription != null) {
node.setProperty("extendedDescription", extendedDescription);
}
Date createdDate = jobMeta.getCreatedDate();
if (createdDate != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
}
String createdUser = jobMeta.getCreatedUser();
if (createdUser != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
}
Date lastModifiedDate = jobMeta.getModifiedDate();
if (lastModifiedDate != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
}
String lastModifiedUser = jobMeta.getModifiedUser();
if (lastModifiedUser != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
}
String version = jobMeta.getJobversion();
if (version != null) {
node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
}
String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
if (status != null && !status.startsWith("!")) {
node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
}
node.setProperty(DictionaryConst.PROPERTY_PATH, documentPath);
// Process job parameters
String[] parameters = jobMeta.listParameters();
if (parameters != null) {
for (String parameter : parameters) {
try {
// Determine parameter properties and add them to a map, then the map to the list
String defaultParameterValue = jobMeta.getParameterDefault(parameter);
String parameterValue = jobMeta.getParameterValue(parameter);
String parameterDescription = jobMeta.getParameterDescription(parameter);
PropertiesHolder paramProperties = new PropertiesHolder();
paramProperties.setProperty("defaultValue", defaultParameterValue);
paramProperties.setProperty("value", parameterValue);
paramProperties.setProperty("description", parameterDescription);
node.setProperty("parameter_" + parameter, paramProperties.toString());
} catch (UnknownParamException upe) {
// This shouldn't happen as we're using the list provided by the meta
throw new MetaverseAnalyzerException(upe);
}
}
}
// handle the entries
for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
JobEntryCopy entry = jobMeta.getJobEntry(i);
try {
if (entry != null) {
entry.getEntry().setParentJob(j);
IMetaverseNode jobEntryNode = null;
JobEntryInterface jobEntryInterface = entry.getEntry();
IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, documentDescriptor.getContext());
Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
// change while the job is being analyzed
if (jobEntryAnalyzer instanceof IClonableJobEntryAnalyzer) {
jobEntryAnalyzer = ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).cloneAnalyzer();
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentAnalyzer(this);
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentDescriptor(documentDescriptor);
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentPath(documentPath);
} else {
log.debug(Messages.getString("WARNING.CannotCloneAnalyzer"), jobEntryAnalyzer);
}
jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
}
} else if (new AnnotatedClassFields(jobEntryInterface, jobEntryInterface.getParentJobMeta()).hasMetaverseAnnotations()) {
AnnotationDrivenJobAnalyzer annotationDrivenJobAnalyzer = new AnnotationDrivenJobAnalyzer(jobEntryInterface);
annotationDrivenJobAnalyzer.setMetaverseBuilder(metaverseBuilder);
annotationDrivenJobAnalyzer.setDocumentAnalyzer(this);
annotationDrivenJobAnalyzer.setDocumentDescriptor(documentDescriptor);
annotationDrivenJobAnalyzer.setDocumentPath(documentPath);
jobEntryNode = annotationDrivenJobAnalyzer.analyze(entryDescriptor, jobEntryInterface);
} else {
GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
}
if (jobEntryNode != null) {
metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
}
}
} catch (Exception mae) {
// Don't throw an exception, just log and carry on
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
}
}
// Model the hops between steps
int numHops = jobMeta.nrJobHops();
for (int i = 0; i < numHops; i++) {
JobHopMeta hop = jobMeta.getJobHop(i);
JobEntryCopy fromEntry = hop.getFromEntry();
JobEntryCopy toEntry = hop.getToEntry();
INamespace childNs = new Namespace(node.getLogicalId());
// process legitimate hops
if (fromEntry != null && toEntry != null) {
IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
}
}
metaverseBuilder.addNode(node);
addParentLink(documentDescriptor, node);
return node;
}
use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.
the class JobAnalyzerTest method testAnalyzerJobWithEntriesAndHop.
@Test
public void testAnalyzerJobWithEntriesAndHop() throws MetaverseAnalyzerException {
JobEntryCopy mockToEntryMeta = mock(JobEntryCopy.class);
when(mockToEntryMeta.getEntry()).thenReturn(mockJobEntryInterface);
when(mockToEntryMeta.getParentJobMeta()).thenReturn(mockContent);
when(mockContent.nrJobEntries()).thenReturn(2);
when(mockContent.getJobEntry(0)).thenReturn(mockJobEntry);
when(mockContent.getJobEntry(1)).thenReturn(mockToEntryMeta);
when(mockContent.nrJobHops()).thenReturn(1);
final JobHopMeta hop = new JobHopMeta(mockJobEntry, mockToEntryMeta);
when(mockContent.getJobHop(0)).thenReturn(hop);
IMetaverseNode node = analyzer.analyze(descriptor, mockJobDoc);
assertNotNull(node);
}
use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.
the class JobMetaJsonSerializer method serializeHops.
@Override
protected void serializeHops(JobMeta meta, JsonGenerator json) throws IOException {
// Hops
json.writeArrayFieldStart(JSON_PROPERTY_HOPS);
int numberOfHops = meta.nrJobHops();
for (int i = 0; i < numberOfHops; i++) {
JobHopMeta hopMeta = meta.getJobHop(i);
HopInfo hopInfo = new HopInfo(hopMeta);
json.writeObject(hopInfo);
}
json.writeEndArray();
}
use of org.pentaho.di.job.JobHopMeta in project pentaho-metaverse by pentaho.
the class JobMetaJsonSerializerTest method testSerializeHops.
@Test
public void testSerializeHops() throws Exception {
JobHopMeta jobHopMeta = mock(JobHopMeta.class);
JobEntryCopy fromJobEntry = mock(JobEntryCopy.class);
JobEntryCopy toJobEntry = mock(JobEntryCopy.class);
when(meta.nrJobHops()).thenReturn(2);
when(meta.getJobHop(anyInt())).thenReturn(jobHopMeta);
when(jobHopMeta.getFromEntry()).thenReturn(fromJobEntry);
when(jobHopMeta.getToEntry()).thenReturn(toJobEntry);
when(jobHopMeta.isEnabled()).thenReturn(true);
when(fromJobEntry.getName()).thenReturn("from");
when(toJobEntry.getName()).thenReturn("to");
serializer.serializeHops(meta, json);
verify(json, times(2)).writeObject(any(HopInfo.class));
}
use of org.pentaho.di.job.JobHopMeta in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.
/**
* Load a job in a directory
*
* @param log
* the logging channel
* @param rep
* The Repository
* @param jobname
* The name of the job
* @param repdir
* The directory in which the job resides.
* @throws KettleException
*/
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
JobMeta jobMeta = new JobMeta();
synchronized (repository) {
try {
// Clear everything...
jobMeta.clear();
jobMeta.setRepositoryDirectory(repdir);
// Get the transformation id
jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
// If no valid id is available in the database, then give error...
if (jobMeta.getObjectId() != null) {
// Load the notes...
ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
}
RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
if (id_logdb > 0) {
// Get the logconnection
//
DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
jobMeta.getJobLogTable().setConnectionName(logDb.getName());
// jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
}
jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
// Load all the log tables for the job...
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
if (monitor != null) {
monitor.worked(1);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
}
// Read objects from the shared XML file & the repository
try {
jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
//
log.logError(Const.getStackTracker(e));
}
if (monitor != null) {
monitor.worked(1);
}
if (log.isDetailed()) {
log.logDetailed("Loading " + noteids.length + " notes");
}
for (int i = 0; i < noteids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
}
NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
if (jobMeta.indexOfNote(ni) < 0) {
jobMeta.addNote(ni);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the group attributes map
//
jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
// Load the job entries...
//
// Keep a unique list of job entries to facilitate in the loading.
//
List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
if (log.isDetailed()) {
log.logDetailed("Loading " + jecids.length + " job entries");
}
for (int i = 0; i < jecids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
}
JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
if (jec.isMissing()) {
jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
}
// Also set the copy number...
// We count the number of job entry copies that use the job
// entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
if (jec.getEntry() == copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = jobMeta.indexOfJobEntry(jec);
if (idx < 0) {
if (jec.getName() != null && jec.getName().length() > 0) {
jobMeta.addJobEntry(jec);
}
} else {
// replace it!
jobMeta.setJobEntry(idx, jec);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the hops...
if (log.isDetailed()) {
log.logDetailed("Loading " + hopid.length + " job hops");
}
for (int i = 0; i < hopid.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
}
JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
jobMeta.getJobhops().add(hi);
if (monitor != null) {
monitor.worked(1);
}
}
loadRepParameters(jobMeta);
// Finally, clear the changed flags...
jobMeta.clearChanged();
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
}
if (monitor != null) {
monitor.done();
}
// close prepared statements, minimize locking etc.
//
repository.connectionDelegate.closeAttributeLookupPreparedStatements();
return jobMeta;
} else {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
}
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
} finally {
jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
jobMeta.setInternalKettleVariables();
}
}
}
Aggregations