Search in sources :

Example 86 with Result

use of org.pentaho.di.core.Result in project pentaho-platform by pentaho.

the class StatsDatabaseCheck method executeJob.

protected boolean executeJob(JobMeta jobMeta, String jobFileFullPath) {
    if (jobMeta != null) {
        Job job = new Job(null, jobMeta);
        Result result = new Result();
        try {
            job.execute(0, result);
            job.waitUntilFinished();
        } catch (KettleException ke) {
            Logger.error("Error executing " + jobFileFullPath, ke.getMessage());
            return false;
        }
    }
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) Job(org.pentaho.di.job.Job) Result(org.pentaho.di.core.Result)

Example 87 with Result

use of org.pentaho.di.core.Result in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPoint method createLineGraph.

protected void createLineGraph(final Job job) {
    try {
        // Get the current execution profile for this transformation
        LineageHolder holder = JobLineageHolderMap.getInstance().getLineageHolder(job);
        Future lineageTask = holder.getLineageTask();
        if (lineageTask != null) {
            try {
                lineageTask.get();
            } catch (InterruptedException e) {
                // TODO logger?
                e.printStackTrace();
            } catch (ExecutionException e) {
                // TODO logger?
                e.printStackTrace();
            }
        }
        // Get the current execution profile for this job
        IExecutionProfile executionProfile = JobLineageHolderMap.getInstance().getLineageHolder(job).getExecutionProfile();
        if (executionProfile == null) {
            // Something's wrong here, the transStarted method didn't properly store the execution profile. We should know
            // the same info, so populate a new ExecutionProfile using the current Trans
            executionProfile = new ExecutionProfile();
            populateExecutionProfile(executionProfile, job);
        }
        ExecutionData executionData = (ExecutionData) executionProfile.getExecutionData();
        Result result = job.getResult();
        if (result != null) {
            executionData.setFailureCount(result.getNrErrors());
        }
        // Export the lineage info (execution profile, lineage graph, etc.)
        try {
            if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
                // clearOutput right before the first call to outputXYZ().
                if ("latest".equals(lineageWriter.getOutputStrategy())) {
                    lineageWriter.cleanOutput(holder);
                }
                lineageWriter.outputExecutionProfile(holder);
            }
        } catch (IOException e) {
            log.warn(Messages.getString("ERROR.CouldNotWriteExecutionProfile", job.getName(), e.getMessage()));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
        }
        // lineage information into its own graph
        try {
            Job parentJob = job.getParentJob();
            Trans parentTrans = job.getParentTrans();
            if (parentJob == null && parentTrans == null) {
                // Add the execution profile information to the lineage graph
                addRuntimeLineageInfo(holder);
                if (lineageWriter != null && !"none".equals(lineageWriter.getOutputStrategy())) {
                    lineageWriter.outputLineageGraph(holder);
                }
            }
        } catch (IOException e) {
            log.warn(Messages.getString("ERROR.CouldNotWriteLineageGraph", job.getName(), Const.NVL(e.getLocalizedMessage(), "Unspecified")));
            log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), e);
        }
    } catch (Throwable t) {
        log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", job.getName(), Const.NVL(t.getLocalizedMessage(), "Unspecified")));
        log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), t);
    }
}
Also used : IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Future(java.util.concurrent.Future) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Job(org.pentaho.di.job.Job) ExecutionProfile(org.pentaho.metaverse.impl.model.ExecutionProfile) IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Trans(org.pentaho.di.trans.Trans) LineageHolder(org.pentaho.metaverse.api.model.LineageHolder) IExecutionData(org.pentaho.metaverse.api.model.IExecutionData) ExecutionData(org.pentaho.metaverse.impl.model.ExecutionData) Result(org.pentaho.di.core.Result)

Example 88 with Result

use of org.pentaho.di.core.Result in project pentaho-metaverse by pentaho.

the class JobRuntimeExtensionPointTest method testJobFinished.

@Test
public void testJobFinished() throws Exception {
    JobRuntimeExtensionPoint ext = spy(jobExtensionPoint);
    ext.jobFinished(null);
    verify(ext, never()).populateExecutionProfile(Mockito.any(IExecutionProfile.class), Mockito.any(Job.class));
    ext.jobFinished(job);
    // The logic in jobFinished() is now in a thread, so we can't verify methods were called
    Job mockJob = spy(job);
    Result result = mock(Result.class);
    when(mockJob.getResult()).thenReturn(result);
    ext.jobFinished(mockJob);
// The logic in jobFinished() is now in a thread, so we can't verify methods were called
// Exception handling test removed because jobFinished() logic is in a thread and can't throw checked exceptions
}
Also used : IExecutionProfile(org.pentaho.metaverse.api.model.IExecutionProfile) Job(org.pentaho.di.job.Job) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 89 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class FixedTimeStreamWindow method sendBufferToSubtrans.

private Result sendBufferToSubtrans(List<I> input) throws KettleException {
    final List<RowMetaAndData> rows = input.stream().map(row -> row.toArray(new Object[0])).map(objects -> new RowMetaAndData(rowMeta, objects)).collect(Collectors.toList());
    Optional<Result> optionalRes = subtransExecutor.execute(rows);
    return optionalRes.orElse(new Result());
}
Also used : RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Result(org.pentaho.di.core.Result) List(java.util.List) StreamWindow(org.pentaho.di.trans.streaming.api.StreamWindow) SubtransExecutor(org.pentaho.di.trans.SubtransExecutor) KettleException(org.pentaho.di.core.exception.KettleException) Optional(java.util.Optional) Observable(io.reactivex.Observable) Schedulers(io.reactivex.schedulers.Schedulers) MILLISECONDS(java.util.concurrent.TimeUnit.MILLISECONDS) Collectors(java.util.stream.Collectors) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Result(org.pentaho.di.core.Result)

Example 90 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class TextFileInput method init.

@Override
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (TextFileInputMeta) smi;
    data = (TextFileInputData) sdi;
    if (super.init(smi, sdi)) {
        initErrorHandling();
        initReplayFactory();
        data.setFiles(meta.getTextFileList(this));
        data.filterProcessor = new TextFileFilterProcessor(meta.getFilter());
        // If there are missing files,
        // fail if we don't ignore errors
        // 
        Result previousResult = getTrans().getPreviousResult();
        Map<String, ResultFile> resultFiles = (previousResult != null) ? previousResult.getResultFiles() : null;
        if ((previousResult == null || resultFiles == null || resultFiles.size() == 0) && data.getFiles().nrOfMissingFiles() > 0 && !meta.isAcceptingFilenames() && !meta.isErrorIgnored()) {
            logError(BaseMessages.getString(PKG, "TextFileInput.Log.Error.NoFilesSpecified"));
            return false;
        }
        String clusterSize = getVariable(Const.INTERNAL_VARIABLE_CLUSTER_SIZE);
        if (!Utils.isEmpty(clusterSize) && Integer.valueOf(clusterSize) > 1) {
            // TODO: add metadata to configure this.
            String nr = getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NUMBER);
            if (log.isDetailed()) {
                logDetailed("Running on slave server #" + nr + " : assuming that each slave reads a dedicated part of the same file(s).");
            }
        }
        // If no nullif field is supplied, take the default.
        // String null_value = nullif;
        // if (null_value == null)
        // {
        // // value="";
        // }
        // String null_cmp = Const.rightPad(new StringBuilder(null_value), pol.length());
        // calculate the file format type in advance so we can use a switch
        data.fileFormatType = meta.getFileFormatTypeNr();
        // calculate the file type in advance CSV or Fixed?
        data.fileType = meta.getFileTypeNr();
        // Handle the possibility of a variable substitution
        data.separator = environmentSubstitute(meta.getSeparator());
        data.enclosure = environmentSubstitute(meta.getEnclosure());
        data.escapeCharacter = environmentSubstitute(meta.getEscapeCharacter());
        // Add additional fields
        if (!Utils.isEmpty(meta.getShortFileNameField())) {
            data.addShortFilename = true;
        }
        if (!Utils.isEmpty(meta.getPathField())) {
            data.addPath = true;
        }
        if (!Utils.isEmpty(meta.getExtensionField())) {
            data.addExtension = true;
        }
        if (!Utils.isEmpty(meta.getSizeField())) {
            data.addSize = true;
        }
        if (!Utils.isEmpty(meta.isHiddenField())) {
            data.addIsHidden = true;
        }
        if (!Utils.isEmpty(meta.getLastModificationDateField())) {
            data.addLastModificationDate = true;
        }
        if (!Utils.isEmpty(meta.getUriField())) {
            data.addUri = true;
        }
        if (!Utils.isEmpty(meta.getRootUriField())) {
            data.addRootUri = true;
        }
        return true;
    }
    return false;
}
Also used : ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) ResultFile(org.pentaho.di.core.ResultFile) Result(org.pentaho.di.core.Result)

Aggregations

Result (org.pentaho.di.core.Result)192 Test (org.junit.Test)75 KettleException (org.pentaho.di.core.exception.KettleException)75 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)64 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)57 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)40 FileObject (org.apache.commons.vfs2.FileObject)34 Job (org.pentaho.di.job.Job)32 IOException (java.io.IOException)24 ResultFile (org.pentaho.di.core.ResultFile)20 File (java.io.File)17 ArrayList (java.util.ArrayList)16 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)15 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)12 Pattern (java.util.regex.Pattern)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 Database (org.pentaho.di.core.database.Database)9 Date (java.util.Date)8 Trans (org.pentaho.di.trans.Trans)8 Matcher (java.util.regex.Matcher)7