Search in sources :

Example 41 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobAnalyzer method analyze.

@Override
public synchronized IMetaverseNode analyze(IComponentDescriptor descriptor, IDocument document) throws MetaverseAnalyzerException {
    validateState(document);
    Object repoObject = document.getContent();
    JobMeta jobMeta = null;
    if (repoObject instanceof String) {
        // hydrate the job
        try {
            String content = (String) repoObject;
            ByteArrayInputStream xmlStream = new ByteArrayInputStream(content.getBytes());
            jobMeta = new JobMeta(xmlStream, null, null);
        } catch (KettleXMLException e) {
            throw new MetaverseAnalyzerException(e);
        }
    } else if (repoObject instanceof JobMeta) {
        jobMeta = (JobMeta) repoObject;
    }
    // construct a dummy job based on our JobMeta so we get out VariableSpace set properly
    jobMeta.setFilename(document.getStringID());
    Job j = new Job(null, jobMeta);
    j.setInternalKettleVariables(jobMeta);
    IComponentDescriptor documentDescriptor = new MetaverseComponentDescriptor(document.getStringID(), DictionaryConst.NODE_TYPE_JOB, new Namespace(descriptor.getLogicalId()), descriptor.getContext());
    // Create a metaverse node and start filling in details
    IMetaverseNode node = metaverseObjectFactory.createNodeObject(document.getNamespace(), jobMeta.getName(), DictionaryConst.NODE_TYPE_JOB);
    node.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
    // pull out the standard fields
    String description = jobMeta.getDescription();
    if (description != null) {
        node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
    }
    String extendedDescription = jobMeta.getExtendedDescription();
    if (extendedDescription != null) {
        node.setProperty("extendedDescription", extendedDescription);
    }
    Date createdDate = jobMeta.getCreatedDate();
    if (createdDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
    }
    String createdUser = jobMeta.getCreatedUser();
    if (createdUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
    }
    Date lastModifiedDate = jobMeta.getModifiedDate();
    if (lastModifiedDate != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
    }
    String lastModifiedUser = jobMeta.getModifiedUser();
    if (lastModifiedUser != null) {
        node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
    }
    String version = jobMeta.getJobversion();
    if (version != null) {
        node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
    }
    String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(jobMeta.getJobstatus()));
    if (status != null && !status.startsWith("!")) {
        node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
    }
    node.setProperty(DictionaryConst.PROPERTY_PATH, document.getProperty(DictionaryConst.PROPERTY_PATH));
    // Process job parameters
    String[] parameters = jobMeta.listParameters();
    if (parameters != null) {
        for (String parameter : parameters) {
            try {
                // Determine parameter properties and add them to a map, then the map to the list
                String defaultParameterValue = jobMeta.getParameterDefault(parameter);
                String parameterValue = jobMeta.getParameterValue(parameter);
                String parameterDescription = jobMeta.getParameterDescription(parameter);
                PropertiesHolder paramProperties = new PropertiesHolder();
                paramProperties.setProperty("defaultValue", defaultParameterValue);
                paramProperties.setProperty("value", parameterValue);
                paramProperties.setProperty("description", parameterDescription);
                node.setProperty("parameter_" + parameter, paramProperties.toString());
            } catch (UnknownParamException upe) {
                // This shouldn't happen as we're using the list provided by the meta
                throw new MetaverseAnalyzerException(upe);
            }
        }
    }
    // handle the entries
    for (int i = 0; i < jobMeta.nrJobEntries(); i++) {
        JobEntryCopy entry = jobMeta.getJobEntry(i);
        try {
            if (entry != null) {
                entry.getEntry().setParentJob(j);
                IMetaverseNode jobEntryNode = null;
                JobEntryInterface jobEntryInterface = entry.getEntry();
                IComponentDescriptor entryDescriptor = new MetaverseComponentDescriptor(entry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY, node, descriptor.getContext());
                Set<IJobEntryAnalyzer> jobEntryAnalyzers = getJobEntryAnalyzers(jobEntryInterface);
                if (jobEntryAnalyzers != null && !jobEntryAnalyzers.isEmpty()) {
                    for (IJobEntryAnalyzer jobEntryAnalyzer : jobEntryAnalyzers) {
                        jobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                        jobEntryNode = (IMetaverseNode) jobEntryAnalyzer.analyze(entryDescriptor, entry.getEntry());
                    }
                } else {
                    GenericJobEntryMetaAnalyzer defaultJobEntryAnalyzer = new GenericJobEntryMetaAnalyzer();
                    defaultJobEntryAnalyzer.setMetaverseBuilder(metaverseBuilder);
                    jobEntryNode = defaultJobEntryAnalyzer.analyze(entryDescriptor, jobEntryInterface);
                }
                if (jobEntryNode != null) {
                    metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, jobEntryNode);
                }
            }
        } catch (Throwable mae) {
            // Don't throw an exception, just log and carry on
            log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", entry.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
        }
    }
    // Model the hops between steps
    int numHops = jobMeta.nrJobHops();
    for (int i = 0; i < numHops; i++) {
        JobHopMeta hop = jobMeta.getJobHop(i);
        JobEntryCopy fromEntry = hop.getFromEntry();
        JobEntryCopy toEntry = hop.getToEntry();
        INamespace childNs = new Namespace(node.getLogicalId());
        // process legitimate hops
        if (fromEntry != null && toEntry != null) {
            IMetaverseNode fromEntryNode = metaverseObjectFactory.createNodeObject(childNs, fromEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            IMetaverseNode toEntryNode = metaverseObjectFactory.createNodeObject(childNs, toEntry.getName(), DictionaryConst.NODE_TYPE_JOB_ENTRY);
            metaverseBuilder.addLink(fromEntryNode, DictionaryConst.LINK_HOPSTO, toEntryNode);
        }
    }
    metaverseBuilder.addNode(node);
    addParentLink(documentDescriptor, node);
    return node;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) PropertiesHolder(org.pentaho.metaverse.api.PropertiesHolder) JobHopMeta(org.pentaho.di.job.JobHopMeta) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) IJobEntryAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryAnalyzer) INamespace(org.pentaho.metaverse.api.INamespace) Namespace(org.pentaho.metaverse.api.Namespace) Date(java.util.Date) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) INamespace(org.pentaho.metaverse.api.INamespace) ByteArrayInputStream(java.io.ByteArrayInputStream) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) GenericJobEntryMetaAnalyzer(org.pentaho.metaverse.analyzer.kettle.jobentry.GenericJobEntryMetaAnalyzer) Job(org.pentaho.di.job.Job)

Example 42 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPoint method callExtensionPoint.

/**
 * Callback when a job is about to be started
 *
 * @param logChannelInterface A reference to the log in this context (the Job object's log)
 * @param o                   The object being operated on (Job in this case)
 * @throws org.pentaho.di.core.exception.KettleException
 */
@Override
public void callExtensionPoint(LogChannelInterface logChannelInterface, Object o) throws KettleException {
    // Job Started listeners get called after the extension point is invoked, so just add a job listener
    if (o instanceof Job) {
        Job job = ((Job) o);
        // If runtime lineage collection is disabled, don't run any lineage processes/methods
        if (!isRuntimeEnabled()) {
            return;
        }
        // Create and populate an execution profile with what we know so far
        ExecutionProfile executionProfile = new ExecutionProfile();
        populateExecutionProfile(executionProfile, job);
        IMetaverseBuilder builder = JobLineageHolderMap.getInstance().getMetaverseBuilder(job);
        // Add the job finished listener
        job.addJobListener(this);
        // Analyze the current transformation
        if (documentAnalyzer != null) {
            documentAnalyzer.setMetaverseBuilder(builder);
            // Create a document for the Trans
            final String clientName = executionProfile.getExecutionEngine().getName();
            final INamespace namespace = new Namespace(clientName);
            final IMetaverseNode designNode = builder.getMetaverseObjectFactory().createNodeObject(clientName, clientName, DictionaryConst.NODE_TYPE_LOCATOR);
            builder.addNode(designNode);
            final JobMeta jobMeta = job.getJobMeta();
            // The variables and parameters in the Job may not have been set on the meta, so we do it here
            // to ensure the job analyzer will have access to the parameter values.
            jobMeta.copyParametersFrom(job);
            jobMeta.activateParameters();
            job.copyVariablesFrom(jobMeta);
            if (job.getRep() != null) {
                jobMeta.setRepository(job.getRep());
            }
            String id = getFilename(jobMeta);
            if (!id.endsWith(jobMeta.getDefaultExtension())) {
                id += "." + jobMeta.getDefaultExtension();
            }
            IDocument metaverseDocument = builder.getMetaverseObjectFactory().createDocumentObject();
            metaverseDocument.setNamespace(namespace);
            metaverseDocument.setContent(jobMeta);
            metaverseDocument.setStringID(id);
            metaverseDocument.setName(jobMeta.getName());
            metaverseDocument.setExtension("kjb");
            metaverseDocument.setMimeType(URLConnection.getFileNameMap().getContentTypeFor("job.kjb"));
            metaverseDocument.setContext(new AnalysisContext(DictionaryConst.CONTEXT_RUNTIME));
            String normalizedPath;
            try {
                normalizedPath = KettleAnalyzerUtil.normalizeFilePath(id);
            } catch (MetaverseException e) {
                normalizedPath = id;
            }
            metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAME, job.getName());
            metaverseDocument.setProperty(DictionaryConst.PROPERTY_PATH, normalizedPath);
            metaverseDocument.setProperty(DictionaryConst.PROPERTY_NAMESPACE, namespace.getNamespaceId());
            Runnable analyzerRunner = MetaverseUtil.getAnalyzerRunner(documentAnalyzer, metaverseDocument);
            MetaverseCompletionService.getInstance().submit(analyzerRunner, id);
        }
        // Save the lineage objects for later
        LineageHolder holder = JobLineageHolderMap.getInstance().getLineageHolder(job);
        holder.setExecutionProfile(executionProfile);
        holder.setMetaverseBuilder(builder);
    }
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) AnalysisContext(org.pentaho.metaverse.api.AnalysisContext) ExecutionProfile(org.pentaho.metaverse.impl.model.ExecutionProfile) IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) INamespace(org.pentaho.metaverse.api.INamespace) Namespace(org.pentaho.metaverse.api.Namespace) INamespace(org.pentaho.metaverse.api.INamespace) Job(org.pentaho.di.job.Job) IMetaverseBuilder(org.pentaho.metaverse.api.IMetaverseBuilder) IDocument(org.pentaho.metaverse.api.IDocument) MetaverseException(org.pentaho.metaverse.api.MetaverseException) LineageHolder(org.pentaho.metaverse.api.model.LineageHolder)

Example 43 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPoint method createLineGraph.

protected void createLineGraph(final Job job) {
    try {
        // Get the current execution profile for this transformation
        LineageHolder holder = JobLineageHolderMap.getInstance().getLineageHolder(job);
        Future lineageTask = holder.getLineageTask();
        if (lineageTask != null) {
            try {
                lineageTask.get();
            } catch (InterruptedException e) {
                // TODO logger?
                e.printStackTrace();
            } catch (ExecutionException e) {
                // TODO logger?
                e.printStackTrace();
            }
        }
        // Get the current execution profile for this job
        IExecutionProfile executionProfile = JobLineageHolderMap.getInstance().getLineageHolder(job).getExecutionProfile();
        if (executionProfile == null) {
            // Something's wrong here, the transStarted method didn't properly store the execution profile. We should know
            // the same info, so populate a new ExecutionProfile using the current Trans
            executionProfile = new ExecutionProfile();
            populateExecutionProfile(executionProfile, job);
        }
        ExecutionData executionData = (ExecutionData) executionProfile.getExecutionData();
        Result result = job.getResult();
        if (result != null) {
            executionData.setFailureCount(result.getNrErrors());
        }
        // Export the lineage info (execution profile, lineage graph, etc.)
        try {
            if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
                // clearOutput right before the first call to outputXYZ().
                if ("latest".equals(lineageWriter.getOutputStrategy())) {
                    lineageWriter.cleanOutput(holder);
                }
                lineageWriter.outputExecutionProfile(holder);
            }
        } catch (IOException e) {
            log.warn(Messages.getString("ERROR.CouldNotWriteExecutionProfile", job.getName(), e.getMessage()));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
        }
        // lineage information into its own graph
        try {
            Job parentJob = job.getParentJob();
            Trans parentTrans = job.getParentTrans();
            if (parentJob == null && parentTrans == null) {
                // Add the execution profile information to the lineage graph
                addRuntimeLineageInfo(holder);
                if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
                    lineageWriter.outputLineageGraph(holder);
                }
            }
        } catch (IOException e) {
            log.warn(Messages.getString("ERROR.CouldNotWriteLineageGraph", job.getName(), Const.NVL(e.getLocalizedMessage(), "Unspecified")));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
        }
    } catch (Throwable t) {
        log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", job.getName(), Const.NVL(t.getLocalizedMessage(), "Unspecified")));
        log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), t);
    }
}
Also used : IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Future(java.util.concurrent.Future) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Job(org.pentaho.di.job.Job) ExecutionProfile(org.pentaho.metaverse.impl.model.ExecutionProfile) IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Trans(org.pentaho.di.trans.Trans) LineageHolder(org.pentaho.metaverse.api.model.LineageHolder) IExecutionData(org.pentaho.metaverse.api.model.IExecutionData) ExecutionData(org.pentaho.metaverse.impl.model.ExecutionData) Result(org.pentaho.di.core.Result)

Example 44 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPointTest method testJobFinished.

@Test
public void testJobFinished() throws Exception {
    JobRuntimeExtensionPoint ext = spy(jobExtensionPoint);
    ext.jobFinished(null);
    verify(ext, never()).populateExecutionProfile(Mockito.any(IExecutionProfile.class), Mockito.any(Job.class));
    ext.jobFinished(job);
    // The logic in jobFinished() is now in a thread, so we can't verify methods were called
    Job mockJob = spy(job);
    Result result = mock(Result.class);
    when(mockJob.getResult()).thenReturn(result);
    ext.jobFinished(mockJob);
// The logic in jobFinished() is now in a thread, so we can't verify methods were called
// Exception handling test removed because jobFinished() logic is in a thread and can't throw checked exceptions
}
Also used : IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Job(org.pentaho.di.job.Job) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 45 with Job

use of org.pentaho.di.job.Job in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPointTest method setUp.

@Before
public void setUp() throws Exception {
    jobExtensionPoint = new JobRuntimeExtensionPoint();
    jobExtensionPoint.setRuntimeEnabled(true);
    jobMeta = spy(new JobMeta());
    jobMeta.setName(TEST_JOB_NAME);
    jobMeta.setFilename(TEST_JOB_PATH);
    jobMeta.setDescription(TEST_JOB_DESCRIPTION);
    job = new Job(null, jobMeta);
    job.setExecutingServer(TEST_SERVER);
    job.setExecutingUser(TEST_USER);
    job.setVariable(TEST_VAR_NAME, TEST_VAR_VALUE);
    when(jobMeta.getUsedVariables()).thenReturn(Collections.singletonList(TEST_VAR_NAME));
    job.addParameterDefinition(TEST_PARAM_NAME, TEST_PARAM_DEFAULT_VALUE, TEST_PARAM_DESCRIPTION);
    job.setParameterValue(TEST_PARAM_NAME, TEST_PARAM_VALUE);
    job.setArguments(new String[] { "arg0", "arg1" });
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) Job(org.pentaho.di.job.Job) Before(org.junit.Before)

Aggregations

Job (org.pentaho.di.job.Job)95 JobMeta (org.pentaho.di.job.JobMeta)44 Test (org.junit.Test)35 Result (org.pentaho.di.core.Result)22 KettleException (org.pentaho.di.core.exception.KettleException)20 PrintWriter (java.io.PrintWriter)17 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)17 Trans (org.pentaho.di.trans.Trans)14 IOException (java.io.IOException)11 Before (org.junit.Before)11 Point (org.pentaho.di.core.gui.Point)11 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)11 JobExecutionConfiguration (org.pentaho.di.job.JobExecutionConfiguration)10 Repository (org.pentaho.di.repository.Repository)10 ArrayList (java.util.ArrayList)9 ServletException (javax.servlet.ServletException)9 HttpServletRequest (javax.servlet.http.HttpServletRequest)9 HttpServletResponse (javax.servlet.http.HttpServletResponse)9 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)9 JobConfiguration (org.pentaho.di.job.JobConfiguration)9