use of org.pentaho.metaverse.api.IMetaverseNode in project pentaho-metaverse by pentaho.
the class BaseDocumentAnalyzer method addParentLink.
/**
* This method creates the document
* node relationships with the system level structural and data nodes in the graph.
*
* @param child the document node
* @throws MetaverseAnalyzerException
*/
public void addParentLink(IComponentDescriptor descriptor, IMetaverseNode child) throws MetaverseAnalyzerException {
// The document is always a child of the locator. If this nis not the case, then do not
// subclass this document analyzer.
//
// This will create a virtual node that will line up with the correct
// locator node for this document in the graph.
IMetaverseNode locatorNode = metaverseObjectFactory.createNodeObject(descriptor.getStringID());
metaverseBuilder.addLink(locatorNode, DictionaryConst.LINK_CONTAINS, child);
}
use of org.pentaho.metaverse.api.IMetaverseNode in project pentaho-metaverse by pentaho.
the class TransExtensionPointUtil method addLineageGraph.
public static void addLineageGraph(final TransMeta transMeta) throws MetaverseException {
if (transMeta == null) {
throw new MetaverseException(Messages.getString("ERROR.Document.IsNull"));
}
// Get the "natural" filename (repo-based if in repository, filesystem-based otherwise)
String filename = getFilename(transMeta);
final Graph graph = new TinkerGraph();
final IMetaverseBuilder metaverseBuilder = new MetaverseBuilder(graph);
final IMetaverseObjectFactory objFactory = MetaverseUtil.getDocumentController().getMetaverseObjectFactory();
// Add the client design node
final String clientName = KettleClientEnvironment.getInstance().getClient().toString();
final INamespace namespace = new Namespace(clientName);
final IMetaverseNode designNode = objFactory.createNodeObject(clientName, clientName, DictionaryConst.NODE_TYPE_LOCATOR);
metaverseBuilder.addNode(designNode);
// Create a document object containing the transMeta
final IDocument document = MetaverseUtil.createDocument(namespace, transMeta, filename, transMeta.getName(), "ktr", URLConnection.getFileNameMap().getContentTypeFor("trans.ktr"));
MetaverseUtil.addLineageGraph(document, graph);
}
use of org.pentaho.metaverse.api.IMetaverseNode in project pentaho-metaverse by pentaho.
the class TransformationRuntimeExtensionPoint method startAnalyzer.
protected void startAnalyzer(Trans trans) throws KettleException {
if (trans == null) {
return;
}
// Create and populate an execution profile with what we know so far
ExecutionProfile executionProfile = new ExecutionProfile();
populateExecutionProfile(executionProfile, trans);
IMetaverseBuilder builder = TransLineageHolderMap.getInstance().getMetaverseBuilder(trans);
// Analyze the current transformation
if (documentAnalyzer != null) {
documentAnalyzer.setMetaverseBuilder(builder);
// Create a document for the Trans
final String clientName = executionProfile.getExecutionEngine().getName();
final INamespace namespace = new Namespace(clientName);
final IMetaverseNode designNode = builder.getMetaverseObjectFactory().createNodeObject(clientName, clientName, DictionaryConst.NODE_TYPE_LOCATOR);
builder.addNode(designNode);
final TransMeta transMeta = trans.getTransMeta();
String id = TransExtensionPointUtil.getFilename(transMeta);
IDocument metaverseDocument = builder.getMetaverseObjectFactory().createDocumentObject();
metaverseDocument.setNamespace(namespace);
metaverseDocument.setContent(transMeta);
metaverseDocument.setStringID(id);
metaverseDocument.setName(transMeta.getName());
metaverseDocument.setExtension("ktr");
metaverseDocument.setMimeType(URLConnection.getFileNameMap().getContentTypeFor("trans.ktr"));
metaverseDocument.setContext(new AnalysisContext(DictionaryConst.CONTEXT_RUNTIME));
String normalizedPath;
try {
normalizedPath = KettleAnalyzerUtil.normalizeFilePath(id);
} catch (MetaverseException e) {
normalizedPath = id;
}
metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAME, trans.getName());
metaverseDocument.setProperty(DictionaryConst.PROPERTY_PATH, normalizedPath);
metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAMESPACE, namespace.getNamespaceId());
Runnable analyzerRunner = MetaverseUtil.getAnalyzerRunner(documentAnalyzer, metaverseDocument);
MetaverseCompletionService.getInstance().submit(analyzerRunner, id);
}
// Save the lineage objects for later
LineageHolder holder = TransLineageHolderMap.getInstance().getLineageHolder(trans);
holder.setExecutionProfile(executionProfile);
holder.setMetaverseBuilder(builder);
}
use of org.pentaho.metaverse.api.IMetaverseNode in project pentaho-metaverse by pentaho.
the class JobJobEntryAnalyzer method customAnalyze.
@Override
protected void customAnalyze(JobEntryJob entry, IMetaverseNode rootNode) throws MetaverseAnalyzerException {
JobMeta subJobMeta = null;
JobMeta parentJobMeta = entry.getParentJob().getJobMeta();
// For some reason the JobMeta's variables have been reset by now, so re-activate them
parentJobMeta.activateParameters();
Repository repo = parentJobMeta.getRepository();
String jobPath = null;
switch(entry.getSpecificationMethod()) {
case FILENAME:
try {
jobPath = parentJobMeta.environmentSubstitute(entry.getFilename());
String normalized = KettleAnalyzerUtil.normalizeFilePath(jobPath);
subJobMeta = getSubJobMeta(normalized);
jobPath = normalized;
} catch (Exception e) {
throw new MetaverseAnalyzerException("Sub job can not be found - " + jobPath, e);
}
break;
case REPOSITORY_BY_NAME:
if (repo != null) {
String dir = parentJobMeta.environmentSubstitute(entry.getDirectory());
String file = parentJobMeta.environmentSubstitute(entry.getJobName());
try {
RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
subJobMeta = repo.loadJob(file, rdi, null, null);
String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
jobPath = filename + "." + subJobMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub job can not be found in repository - " + file, e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the job");
}
break;
case REPOSITORY_BY_REFERENCE:
if (repo != null) {
try {
subJobMeta = repo.loadJob(entry.getJobObjectId(), null);
String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
jobPath = filename + "." + subJobMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub job can not be found by reference - " + entry.getJobObjectId(), e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the job");
}
break;
}
IComponentDescriptor ds = new MetaverseComponentDescriptor(subJobMeta.getName(), DictionaryConst.NODE_TYPE_JOB, descriptor.getNamespace().getParentNamespace());
IMetaverseNode jobNode = createNodeFromDescriptor(ds);
jobNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
jobNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
jobNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
metaverseBuilder.addLink(rootNode, DictionaryConst.LINK_EXECUTES, jobNode);
}
use of org.pentaho.metaverse.api.IMetaverseNode in project pentaho-metaverse by pentaho.
the class TransJobEntryAnalyzer method customAnalyze.
@Override
protected void customAnalyze(JobEntryTrans entry, IMetaverseNode rootNode) throws MetaverseAnalyzerException {
TransMeta subTransMeta = null;
JobMeta parentJobMeta = entry.getParentJob().getJobMeta();
// For some reason the JobMeta's variables have been reset by now, so re-activate them
parentJobMeta.activateParameters();
Repository repo = parentJobMeta.getRepository();
String transPath = null;
switch(entry.getSpecificationMethod()) {
case FILENAME:
try {
transPath = parentJobMeta.environmentSubstitute(entry.getFilename());
String normalized = KettleAnalyzerUtil.normalizeFilePath(transPath);
subTransMeta = getSubTransMeta(normalized);
transPath = normalized;
} catch (Exception e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found - " + transPath, e);
}
break;
case REPOSITORY_BY_NAME:
if (repo != null) {
String dir = parentJobMeta.environmentSubstitute(entry.getDirectory());
String file = parentJobMeta.environmentSubstitute(entry.getTransname());
try {
RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
subTransMeta = repo.loadTransformation(file, rdi, null, true, null);
transPath = subTransMeta.getPathAndName() + "." + subTransMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found in repository - " + file, e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the transformation");
}
break;
case REPOSITORY_BY_REFERENCE:
if (repo != null) {
try {
subTransMeta = repo.loadTransformation(entry.getTransObjectId(), null);
transPath = subTransMeta.getPathAndName() + "." + subTransMeta.getDefaultExtension();
} catch (KettleException e) {
throw new MetaverseAnalyzerException("Sub transformation can not be found by reference - " + entry.getTransObjectId(), e);
}
} else {
throw new MetaverseAnalyzerException("Not connected to a repository, can't get the transformation");
}
break;
}
IComponentDescriptor ds = new MetaverseComponentDescriptor(subTransMeta.getName(), DictionaryConst.NODE_TYPE_TRANS, descriptor.getNamespace().getParentNamespace());
IMetaverseNode transformationNode = createNodeFromDescriptor(ds);
transformationNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
transformationNode.setProperty(DictionaryConst.PROPERTY_PATH, transPath);
transformationNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
metaverseBuilder.addLink(rootNode, DictionaryConst.LINK_EXECUTES, transformationNode);
}
Aggregations