Search in sources :

Example 91 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class TransExecutor method executeTransformation.

private void executeTransformation() throws KettleException {
    TransExecutorData transExecutorData = getData();
    // If we got 0 rows on input we don't really want to execute the transformation
    if (transExecutorData.groupBuffer.isEmpty()) {
        return;
    }
    transExecutorData.groupTimeStart = System.currentTimeMillis();
    if (first) {
        discardLogLines(transExecutorData);
    }
    Trans executorTrans = createInternalTrans();
    transExecutorData.setExecutorTrans(executorTrans);
    // Pass parameter values
    passParametersToTrans();
    // keep track for drill down in Spoon...
    getTrans().addActiveSubTransformation(getStepname(), executorTrans);
    Result result = new Result();
    result.setRows(transExecutorData.groupBuffer);
    executorTrans.setPreviousResult(result);
    try {
        executorTrans.prepareExecution(getTrans().getArguments());
        // run transformation
        executorTrans.startThreads();
        // Inform the parent transformation we started something here...
        for (DelegationListener delegationListener : getTrans().getDelegationListeners()) {
            // TODO: copy some settings in the transformation execution configuration, not strictly needed
            // but the execution configuration information is useful in case of a transformation re-start on Carte
            delegationListener.transformationDelegationStarted(executorTrans, new TransExecutionConfiguration());
        }
        // Wait a while until we're done with the transformation
        executorTrans.waitUntilFinished();
        result = executorTrans.getResult();
    } catch (KettleException e) {
        log.logError("An error occurred executing the transformation: ", e);
        result.setResult(false);
        result.setNrErrors(1);
    }
    if (result.isSafeStop()) {
        getTrans().safeStop();
    } else if (result.getNrErrors() > 0) {
        getTrans().stopAll();
    }
    collectTransResults(result);
    collectExecutionResults(result);
    collectExecutionResultFiles(result);
    transExecutorData.groupBuffer.clear();
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) KettleException(org.pentaho.di.core.exception.KettleException) Trans(org.pentaho.di.trans.Trans) Result(org.pentaho.di.core.Result) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 92 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class TableAgileMart method dropTable.

@Override
public boolean dropTable() {
    TableOutputMeta meta = getMeta();
    TableOutputData data = getData();
    String schema = meta.getSchemaName();
    String table = meta.getTableName();
    if (schema != null && !schema.equals("")) {
        table = schema + "." + table;
    }
    String sql = "drop table " + table + ";";
    try {
        Result result = data.db.execStatement(sql);
        int status = result.getExitStatus();
        if (status == 0) {
            util.updateMetadata(meta, -1);
        }
        return status == 0;
    } catch (KettleDatabaseException e) {
        message = "Could not drop table: " + table;
        logError(message, e);
    }
    return false;
}
Also used : TableOutputData(org.pentaho.di.trans.steps.tableoutput.TableOutputData) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) TableOutputMeta(org.pentaho.di.trans.steps.tableoutput.TableOutputMeta) Result(org.pentaho.di.core.Result)

Example 93 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class Mapping method processRow.

/**
 * Process a single row. In our case, we send one row of data to a piece of transformation. In the transformation, we
 * look up the MappingInput step to send our rows to it. As a consequence, for the time being, there can only be one
 * MappingInput and one MappingOutput step in the Mapping.
 */
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    try {
        meta = (MappingMeta) smi;
        setData((MappingData) sdi);
        MappingInput[] mappingInputs = getData().getMappingTrans().findMappingInput();
        MappingOutput[] mappingOutputs = getData().getMappingTrans().findMappingOutput();
        getData().wasStarted = true;
        switch(getData().mappingTransMeta.getTransformationType()) {
            case Normal:
            case SerialSingleThreaded:
                // Before we start, let's see if there are loose ends to tie up...
                // 
                List<RowSet> inputRowSets = getInputRowSets();
                if (!inputRowSets.isEmpty()) {
                    for (RowSet rowSet : inputRowSets) {
                        // 
                        if (mappingInputs.length == 1) {
                            // Simple case: only one input mapping. Move the RowSet over
                            // 
                            mappingInputs[0].addRowSetToInputRowSets(rowSet);
                        } else {
                            // 
                            throw new KettleException("Unsupported situation detected where more than one Mapping Input step needs to be handled.  " + "To solve it, insert a dummy step before the mapping step.");
                        }
                    }
                    clearInputRowSets();
                }
                // 
                if (!getRemoteInputSteps().isEmpty()) {
                    // 
                    for (RemoteStep remoteStep : getRemoteInputSteps()) {
                        // 
                        if (mappingInputs.length == 1) {
                            // Simple case: only one input mapping. Move the remote step over
                            // 
                            mappingInputs[0].getRemoteInputSteps().add(remoteStep);
                        } else {
                            // 
                            throw new KettleException("Unsupported situation detected where a remote input step is expecting data " + "to end up in a particular Mapping Input step of a sub-transformation.  " + "To solve it, insert a dummy step before the mapping.");
                        }
                    }
                    getRemoteInputSteps().clear();
                }
                // Do the same thing for output row sets
                // 
                List<RowSet> outputRowSets = getOutputRowSets();
                if (!outputRowSets.isEmpty()) {
                    for (RowSet rowSet : outputRowSets) {
                        // 
                        if (mappingOutputs.length == 1) {
                            // Simple case: only one output mapping. Move the RowSet over
                            // 
                            mappingOutputs[0].addRowSetToOutputRowSets(rowSet);
                        } else {
                            // 
                            throw new KettleException("Unsupported situation detected where more than one Mapping Output step needs to be handled.  " + "To solve it, insert a dummy step after the mapping step.");
                        }
                    }
                    clearOutputRowSets();
                }
                // 
                if (!getRemoteOutputSteps().isEmpty()) {
                    // 
                    for (RemoteStep remoteStep : getRemoteOutputSteps()) {
                        // 
                        if (mappingOutputs.length == 1) {
                            // Simple case: only one output mapping. Move the remote step over
                            // 
                            mappingOutputs[0].getRemoteOutputSteps().add(remoteStep);
                        } else {
                            // 
                            throw new KettleException("Unsupported situation detected where a remote output step is expecting data " + "to end up in a particular Mapping Output step of a sub-transformation.  " + "To solve it, insert a dummy step after the mapping.");
                        }
                    }
                    getRemoteOutputSteps().clear();
                }
                // Start the mapping/sub-transformation threads
                // 
                getData().getMappingTrans().startThreads();
                // 
                if (getTransMeta().getTransformationType() == TransformationType.Normal) {
                    getData().getMappingTrans().waitUntilFinished();
                    // Set some statistics from the mapping...
                    // This will show up in Spoon, etc.
                    // 
                    Result result = getData().getMappingTrans().getResult();
                    setErrors(result.getNrErrors());
                    setLinesRead(result.getNrLinesRead());
                    setLinesWritten(result.getNrLinesWritten());
                    setLinesInput(result.getNrLinesInput());
                    setLinesOutput(result.getNrLinesOutput());
                    setLinesUpdated(result.getNrLinesUpdated());
                    setLinesRejected(result.getNrLinesRejected());
                }
                return false;
            case SingleThreaded:
                if (mappingInputs.length > 1 || mappingOutputs.length > 1) {
                    throw new KettleException("Multiple input or output steps are not supported for a single threaded mapping.");
                }
                if ((log != null) && log.isDebug()) {
                    List<RowSet> mappingInputRowSets = mappingInputs[0].getInputRowSets();
                    log.logDebug("# of input buffers: " + mappingInputRowSets.size());
                    if (mappingInputRowSets.size() > 0) {
                        log.logDebug("Input buffer 0 size: " + mappingInputRowSets.get(0).size());
                    }
                }
                // Now execute one batch...Basic logging
                // 
                boolean result = getData().singleThreadedTransExcecutor.oneIteration();
                if (!result) {
                    getData().singleThreadedTransExcecutor.dispose();
                    setOutputDone();
                    return false;
                }
                return true;
            default:
                throw new KettleException("Transformation type '" + getData().mappingTransMeta.getTransformationType().getDescription() + "' is an unsupported transformation type for a mapping");
        }
    } catch (Throwable t) {
        // 
        if (getData().getMappingTrans() != null) {
            getData().getMappingTrans().stopAll();
        }
        // 
        throw new KettleException(t);
    }
}
Also used : MappingInput(org.pentaho.di.trans.steps.mappinginput.MappingInput) RemoteStep(org.pentaho.di.trans.step.RemoteStep) KettleException(org.pentaho.di.core.exception.KettleException) RowSet(org.pentaho.di.core.RowSet) MappingOutput(org.pentaho.di.trans.steps.mappingoutput.MappingOutput) Result(org.pentaho.di.core.Result)

Example 94 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class BaseFileInputStep method init.

/**
 * Initialize step before execute.
 */
@Override
public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (M) smi;
    data = (D) sdi;
    if (!super.init(smi, sdi)) {
        return false;
    }
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (getTransMeta().getNamedClusterEmbedManager() != null) {
        getTransMeta().getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, getTransMeta().getEmbeddedMetastoreProviderKey());
    }
    initErrorHandling();
    meta.additionalOutputFields.normalize();
    data.files = meta.getFileInputList(this);
    data.currentFileIndex = 0;
    // If there are missing files,
    // fail if we don't ignore errors
    // 
    Result previousResult = getTrans().getPreviousResult();
    Map<String, ResultFile> resultFiles = (previousResult != null) ? previousResult.getResultFiles() : null;
    if ((previousResult == null || resultFiles == null || resultFiles.size() == 0) && data.files.nrOfMissingFiles() > 0 && !meta.inputFiles.acceptingFilenames && !meta.errorHandling.errorIgnored) {
        logError(BaseMessages.getString(PKG, "TextFileInput.Log.Error.NoFilesSpecified"));
        return false;
    }
    String clusterSize = getVariable(Const.INTERNAL_VARIABLE_CLUSTER_SIZE);
    if (!Utils.isEmpty(clusterSize) && Integer.valueOf(clusterSize) > 1) {
        // TODO: add metadata to configure this.
        String nr = getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NUMBER);
        if (log.isDetailed()) {
            logDetailed("Running on slave server #" + nr + " : assuming that each slave reads a dedicated part of the same file(s).");
        }
    }
    return init();
}
Also used : ResultFile(org.pentaho.di.core.ResultFile) Result(org.pentaho.di.core.Result)

Example 95 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobExecutor method executeJob.

private void executeJob() throws KettleException {
    // 
    if (data.groupBuffer.isEmpty()) {
        return;
    }
    data.groupTimeStart = System.currentTimeMillis();
    if (first) {
        discardLogLines(data);
    }
    data.executorJob = createJob(meta.getRepository(), data.executorJobMeta, this);
    data.executorJob.shareVariablesWith(data.executorJobMeta);
    data.executorJob.setParentTrans(getTrans());
    data.executorJob.setLogLevel(getLogLevel());
    data.executorJob.setInternalKettleVariables(this);
    data.executorJob.copyParametersFrom(data.executorJobMeta);
    data.executorJob.setArguments(getTrans().getArguments());
    // data.executorJob.setInteractive(); TODO: pass interactivity through the transformation too for drill-down.
    // TODO
    /*
     * if (data.executorJob.isInteractive()) {
     * data.executorJob.getJobEntryListeners().addAll(parentJob.getJobEntryListeners()); }
     */
    // Pass the accumulated rows
    // 
    data.executorJob.setSourceRows(data.groupBuffer);
    // Pass parameter values
    // 
    passParametersToJob();
    // keep track for drill down in Spoon...
    // 
    getTrans().getActiveSubjobs().put(getStepname(), data.executorJob);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobStart.id, data.executorJob);
    data.executorJob.beginProcessing();
    Result result = new Result();
    // 
    for (DelegationListener delegationListener : getTrans().getDelegationListeners()) {
        // TODO: copy some settings in the job execution configuration, not strictly needed
        // but the execution configuration information is useful in case of a job re-start on Carte
        // 
        delegationListener.jobDelegationStarted(data.executorJob, new JobExecutionConfiguration());
    }
    // 
    try {
        result = data.executorJob.execute(0, result);
    } catch (KettleException e) {
        log.logError("An error occurred executing the job: ", e);
        result.setResult(false);
        result.setNrErrors(1);
    } finally {
        try {
            ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobFinish.id, data.executorJob);
            data.executorJob.fireJobFinishListeners();
        } catch (KettleException e) {
            result.setNrErrors(1);
            result.setResult(false);
            log.logError(BaseMessages.getString(PKG, "JobExecutor.Log.ErrorExecJob", e.getMessage()), e);
        }
    }
    // 
    if (meta.getExecutionResultTargetStepMeta() != null) {
        Object[] outputRow = RowDataUtil.allocateRowData(data.executionResultsOutputRowMeta.size());
        int idx = 0;
        if (!Utils.isEmpty(meta.getExecutionTimeField())) {
            outputRow[idx++] = Long.valueOf(System.currentTimeMillis() - data.groupTimeStart);
        }
        if (!Utils.isEmpty(meta.getExecutionResultField())) {
            outputRow[idx++] = Boolean.valueOf(result.getResult());
        }
        if (!Utils.isEmpty(meta.getExecutionNrErrorsField())) {
            outputRow[idx++] = Long.valueOf(result.getNrErrors());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesReadField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesRead());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesWrittenField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesWritten());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesInputField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesInput());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesOutputField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesOutput());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesRejectedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesRejected());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesUpdatedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesUpdated());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesDeletedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesDeleted());
        }
        if (!Utils.isEmpty(meta.getExecutionFilesRetrievedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrFilesRetrieved());
        }
        if (!Utils.isEmpty(meta.getExecutionExitStatusField())) {
            outputRow[idx++] = Long.valueOf(result.getExitStatus());
        }
        if (!Utils.isEmpty(meta.getExecutionLogTextField())) {
            String channelId = data.executorJob.getLogChannelId();
            String logText = KettleLogStore.getAppender().getBuffer(channelId, false).toString();
            outputRow[idx++] = logText;
        }
        if (!Utils.isEmpty(meta.getExecutionLogChannelIdField())) {
            outputRow[idx++] = data.executorJob.getLogChannelId();
        }
        putRowTo(data.executionResultsOutputRowMeta, outputRow, data.executionResultRowSet);
    }
    // 
    if (meta.getResultRowsTargetStepMeta() != null && result.getRows() != null) {
        for (RowMetaAndData row : result.getRows()) {
            Object[] targetRow = RowDataUtil.allocateRowData(data.resultRowsOutputRowMeta.size());
            for (int i = 0; i < meta.getResultRowsField().length; i++) {
                ValueMetaInterface valueMeta = row.getRowMeta().getValueMeta(i);
                if (valueMeta.getType() != meta.getResultRowsType()[i]) {
                    throw new KettleException(BaseMessages.getString(PKG, "JobExecutor.IncorrectDataTypePassed", valueMeta.getTypeDesc(), ValueMetaFactory.getValueMetaName(meta.getResultRowsType()[i])));
                }
                targetRow[i] = row.getData()[i];
            }
            putRowTo(data.resultRowsOutputRowMeta, targetRow, data.resultRowsRowSet);
        }
    }
    if (meta.getResultFilesTargetStepMeta() != null && result.getResultFilesList() != null) {
        for (ResultFile resultFile : result.getResultFilesList()) {
            Object[] targetRow = RowDataUtil.allocateRowData(data.resultFilesOutputRowMeta.size());
            int idx = 0;
            targetRow[idx++] = resultFile.getFile().getName().toString();
            // TODO: time, origin, ...
            putRowTo(data.resultFilesOutputRowMeta, targetRow, data.resultFilesRowSet);
        }
    }
    data.groupBuffer.clear();
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Result(org.pentaho.di.core.Result) DelegationListener(org.pentaho.di.job.DelegationListener) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Aggregations

Result (org.pentaho.di.core.Result)192 Test (org.junit.Test)75 KettleException (org.pentaho.di.core.exception.KettleException)75 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)64 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)57 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)40 FileObject (org.apache.commons.vfs2.FileObject)34 Job (org.pentaho.di.job.Job)32 IOException (java.io.IOException)24 ResultFile (org.pentaho.di.core.ResultFile)20 File (java.io.File)17 ArrayList (java.util.ArrayList)16 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)15 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)12 Pattern (java.util.regex.Pattern)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 Database (org.pentaho.di.core.database.Database)9 Date (java.util.Date)8 Trans (org.pentaho.di.trans.Trans)8 Matcher (java.util.regex.Matcher)7