use of org.pentaho.metaverse.api.analyzer.kettle.annotations.AnnotatedClassFields in project pentaho-metaverse by pentaho.
the class JobAnalyzer method analyze.
@Override
public synchronized IMetaverseNode analyze(final IComponentDescriptor documentDescriptor, final AbstractMeta meta, final IMetaverseNode node, final String documentPath) throws MetaverseAnalyzerException {
final JobMeta jobMeta = (JobMeta) meta;
Job j = new Job(null, jobMeta);
j.setInternalKettleVariables(jobMeta);
// pull out the standard fields
String description = jobMeta.getDescription();
if (description != null) {
node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
}
String extendedDescription = jobMeta.getExtendedDescription();
if (extendedDescription != null) {
node.setProperty("extendedDescription", extendedDescription);
}
Date createdDate = jobMeta.getCreatedDate();
if (createdDate != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
}
String createdUser = jobMeta.getCreatedUser();
if (createdUser != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
}
Date lastModifiedDate = jobMeta.getModifiedDate();
if (lastModifiedDate != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
}
String lastModifiedUser = jobMeta.getModifiedUser();
if (lastModifiedUser != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
}
String version = jobMeta.getJobversion();
if (version != null) {
node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
}
String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
if (status != null && !status.startsWith("!")) {
node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
}
node.setProperty(DictionaryConst.PROPERTY_PATH, documentPath);
// Process job parameters
String[] parameters = jobMeta.listParameters();
if (parameters != null) {
for (String parameter : parameters) {
try {
// Determine parameter properties and add them to a map, then the map to the list
String defaultParameterValue = jobMeta.getParameterDefault(parameter);
String parameterValue = jobMeta.getParameterValue(parameter);
String parameterDescription = jobMeta.getParameterDescription(parameter);
PropertiesHolder paramProperties = new PropertiesHolder();
paramProperties.setProperty("defaultValue", defaultParameterValue);
paramProperties.setProperty("value", parameterValue);
paramProperties.setProperty("description", parameterDescription);
node.setProperty("parameter_" + parameter, paramProperties.toString());
} catch (UnknownParamException upe) {
// This shouldn't happen as we're using the list provided by the meta
throw new MetaverseAnalyzerException(upe);
}
}
}
// handle the entries
for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
JobEntryCopy entry = jobMeta.getJobEntry(i);
try {
if (entry != null) {
entry.getEntry().setParentJob(j);
IMetaverseNode jobEntryNode = null;
JobEntryInterface jobEntryInterface = entry.getEntry();
IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, documentDescriptor.getContext());
Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
// change while the job is being analyzed
if (jobEntryAnalyzer instanceof IClonableJobEntryAnalyzer) {
jobEntryAnalyzer = ((IClonableJobEntryAnalyzer) jobEntryAnalyzer).cloneAnalyzer();
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentAnalyzer(this);
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentDescriptor(documentDescriptor);
((IClonableJobEntryAnalyzer) jobEntryAnalyzer).setDocumentPath(documentPath);
} else {
log.debug(Messages.getString("WARNING.CannotCloneAnalyzer"), jobEntryAnalyzer);
}
jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
}
} else if (new AnnotatedClassFields(jobEntryInterface, jobEntryInterface.getParentJobMeta()).hasMetaverseAnnotations()) {
AnnotationDrivenJobAnalyzer annotationDrivenJobAnalyzer = new AnnotationDrivenJobAnalyzer(jobEntryInterface);
annotationDrivenJobAnalyzer.setMetaverseBuilder(metaverseBuilder);
annotationDrivenJobAnalyzer.setDocumentAnalyzer(this);
annotationDrivenJobAnalyzer.setDocumentDescriptor(documentDescriptor);
annotationDrivenJobAnalyzer.setDocumentPath(documentPath);
jobEntryNode = annotationDrivenJobAnalyzer.analyze(entryDescriptor, jobEntryInterface);
} else {
GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
}
if (jobEntryNode != null) {
metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
}
}
} catch (Exception mae) {
// Don't throw an exception, just log and carry on
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
}
}
// Model the hops between steps
int numHops = jobMeta.nrJobHops();
for (int i = 0; i < numHops; i++) {
JobHopMeta hop = jobMeta.getJobHop(i);
JobEntryCopy fromEntry = hop.getFromEntry();
JobEntryCopy toEntry = hop.getToEntry();
INamespace childNs = new Namespace(node.getLogicalId());
// process legitimate hops
if (fromEntry != null && toEntry != null) {
IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
}
}
metaverseBuilder.addNode(node);
addParentLink(documentDescriptor, node);
return node;
}
use of org.pentaho.metaverse.api.analyzer.kettle.annotations.AnnotatedClassFields in project pentaho-metaverse by pentaho.
the class TransformationAnalyzer method analyze.
@Override
public synchronized IMetaverseNode analyze(final IComponentDescriptor documentDescriptor, final AbstractMeta meta, final IMetaverseNode node, final String documentPath) throws MetaverseAnalyzerException {
final TransMeta transMeta = (TransMeta) meta;
Trans t = new Trans(transMeta);
t.setInternalKettleVariables(transMeta);
// pull out the standard fields
String description = transMeta.getDescription();
if (description != null) {
node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
}
String extendedDescription = transMeta.getExtendedDescription();
if (extendedDescription != null) {
node.setProperty("extendedDescription", extendedDescription);
}
Date createdDate = transMeta.getCreatedDate();
if (createdDate != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
}
String createdUser = transMeta.getCreatedUser();
if (createdUser != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
}
Date lastModifiedDate = transMeta.getModifiedDate();
if (lastModifiedDate != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
}
String lastModifiedUser = transMeta.getModifiedUser();
if (lastModifiedUser != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
}
String version = transMeta.getTransversion();
if (version != null) {
node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
}
String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(transMeta.getTransstatus()));
if (status != null && !status.startsWith("!")) {
node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
}
node.setProperty(DictionaryConst.PROPERTY_PATH, documentPath);
String[] parameters = transMeta.listParameters();
if (parameters != null) {
for (String parameter : parameters) {
try {
// Determine parameter properties and add them to a map, then the map to the list
String defaultParameterValue = transMeta.getParameterDefault(parameter);
String parameterValue = transMeta.getParameterValue(parameter);
String parameterDescription = transMeta.getParameterDescription(parameter);
PropertiesHolder paramProperties = new PropertiesHolder();
paramProperties.setProperty("defaultValue", defaultParameterValue);
paramProperties.setProperty("value", parameterValue);
paramProperties.setProperty("description", parameterDescription);
node.setProperty("parameter_" + parameter, paramProperties.toString());
} catch (UnknownParamException upe) {
// This shouldn't happen as we're using the list provided by the meta
throw new MetaverseAnalyzerException(upe);
}
}
}
final List<AnalyzerHolder> analyzerHolders = new ArrayList();
// handle the step
for (int stepNr = 0; stepNr < transMeta.nrSteps(); stepNr++) {
StepMeta stepMeta = transMeta.getStep(stepNr);
try {
if (stepMeta != null) {
if (stepMeta.getParentTransMeta() == null) {
stepMeta.setParentTransMeta(transMeta);
}
IMetaverseNode stepNode = null;
IComponentDescriptor stepDescriptor = new MetaverseComponentDescriptor(stepMeta.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP, node, documentDescriptor.getContext());
Set<IStepAnalyzer> stepAnalyzers = getStepAnalyzers(stepMeta);
final BaseStepMeta baseStepMeta = getBaseStepMetaFromStepMeta(stepMeta);
if (stepAnalyzers != null && !stepAnalyzers.isEmpty()) {
for (IStepAnalyzer stepAnalyzer : stepAnalyzers) {
// transformation execution and does not change while the transformation is being analyzed
if (stepAnalyzer instanceof IClonableStepAnalyzer) {
stepAnalyzer = ((IClonableStepAnalyzer) stepAnalyzer).cloneAnalyzer();
((IClonableStepAnalyzer) stepAnalyzer).setDocumentAnalyzer(this);
((IClonableStepAnalyzer) stepAnalyzer).setDocumentDescriptor(documentDescriptor);
((IClonableStepAnalyzer) stepAnalyzer).setDocumentPath(documentPath);
} else {
log.debug(Messages.getString("WARNING.CannotCloneAnalyzer"), stepAnalyzer);
}
stepAnalyzer.setMetaverseBuilder(metaverseBuilder);
stepNode = (IMetaverseNode) stepAnalyzer.analyze(stepDescriptor, baseStepMeta);
analyzerHolders.add(new AnalyzerHolder(stepAnalyzer, baseStepMeta, stepNode));
}
} else if ((new AnnotatedClassFields(baseStepMeta)).hasMetaverseAnnotations()) {
AnnotationDrivenStepMetaAnalyzer annotationDrivenStepMetaAnalyzer = new AnnotationDrivenStepMetaAnalyzer(baseStepMeta);
annotationDrivenStepMetaAnalyzer.setMetaverseBuilder(metaverseBuilder);
annotationDrivenStepMetaAnalyzer.setDocumentAnalyzer(this);
annotationDrivenStepMetaAnalyzer.setDocumentDescriptor(documentDescriptor);
annotationDrivenStepMetaAnalyzer.setDocumentPath(documentPath);
stepNode = annotationDrivenStepMetaAnalyzer.analyze(stepDescriptor, baseStepMeta);
analyzerHolders.add(new AnalyzerHolder(annotationDrivenStepMetaAnalyzer, baseStepMeta, stepNode));
} else {
GenericStepMetaAnalyzer defaultStepAnalyzer = new GenericStepMetaAnalyzer();
defaultStepAnalyzer.setMetaverseBuilder(metaverseBuilder);
stepNode = defaultStepAnalyzer.analyze(stepDescriptor, getBaseStepMetaFromStepMeta(stepMeta));
}
if (stepNode != null) {
metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, stepNode);
}
}
} catch (Throwable mae) {
// Don't throw an exception, just log and carry on
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", stepMeta.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
}
}
// Model the hops between steps
int numHops = transMeta.nrTransHops();
for (int i = 0; i < numHops; i++) {
TransHopMeta hop = transMeta.getTransHop(i);
StepMeta fromStep = hop.getFromStep();
StepMeta toStep = hop.getToStep();
INamespace childNs = new Namespace(node.getLogicalId());
// process legitimate hops
if (fromStep != null && toStep != null) {
IMetaverseNode fromStepNode = metaverseObjectFactory.createNodeObject(childNs, fromStep.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP);
IMetaverseNode toStepNode = metaverseObjectFactory.createNodeObject(childNs, toStep.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP);
// Create and decorate the link between the steps
IMetaverseLink link = metaverseObjectFactory.createLinkObject();
link.setFromNode(fromStepNode);
link.setLabel(DictionaryConst.LINK_HOPSTO);
link.setToNode(toStepNode);
// Is this hop enabled?
link.setProperty(DictionaryConst.PROPERTY_ENABLED, hop.isEnabled());
// Add metadata about the type of stream (target, error, info) it is. Default to "target".
String linkType = "target";
if (fromStep.isSendingErrorRowsToStep(toStep)) {
linkType = "error";
} else {
String[] infoStepnames = toStep.getStepMetaInterface().getStepIOMeta().getInfoStepnames();
// If the "from" step is the source of an info stream to the "to" step, it's an "info" hop
if (Const.indexOfString(fromStep.getName(), infoStepnames) >= 0) {
linkType = "info";
}
}
link.setProperty(DictionaryConst.PROPERTY_TYPE, linkType);
metaverseBuilder.addLink(link);
}
}
metaverseBuilder.addNode(node);
addParentLink(documentDescriptor, node);
// perform any necessary post processing - currently only supported on IClonableStepAnalyzers
for (final AnalyzerHolder analyzerHolder : analyzerHolders) {
if (analyzerHolder.getAnalyzer() instanceof IClonableStepAnalyzer) {
final IClonableStepAnalyzer clonableAnalyzer = (IClonableStepAnalyzer) analyzerHolder.getAnalyzer();
clonableAnalyzer.postAnalyze(analyzerHolder.getMeta());
}
}
return node;
}
Aggregations