Search in sources :

Example 1 with DelegationListener

use of org.pentaho.di.job.DelegationListener in project pentaho-kettle by pentaho.

the class JobEntryTrans method execute.

/**
 * Execute this job entry and return the result. In this case it means, just set the result boolean in the Result
 * class.
 *
 * @param result The result of the previous execution
 * @param nr     the job entry number
 * @return The Result of the execution.
 */
@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel transLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    String realLogFilename = "";
    if (setLogfile) {
        transLogLevel = logFileLevel;
        realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobTrans.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel(), this)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename, this), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableOpenAppender", realLogFilename, e.toString()));
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
    }
    // 
    switch(specificationMethod) {
        case FILENAME:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTrans", environmentSubstitute(getFilename())));
            }
            break;
        case REPOSITORY_BY_NAME:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransInDirec", environmentSubstitute(getFilename()), environmentSubstitute(directory)));
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransByReference", transObjectId));
            }
            break;
        default:
            break;
    }
    // Load the transformation only once for the complete loop!
    // Throws an exception if it was not possible to load the transformation. For example, the XML file doesn't exist or
    // the repository is down.
    // Log the stack trace and return an error condition from this
    // 
    TransMeta transMeta = null;
    try {
        transMeta = getTransMeta(rep, metaStore, this);
    } catch (KettleException e) {
        logError(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToRunJob", parentJobMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
        result.setNrErrors(1);
        result.setResult(false);
        return result;
    }
    int iteration = 0;
    String[] args1 = arguments;
    if (args1 == null || args1.length == 0) {
        // No arguments set, look at the parent job.
        args1 = parentJob.getArguments();
    }
    // initializeVariablesFrom(parentJob);
    // 
    // For the moment only do variable translation at the start of a job, not
    // for every input row (if that would be switched on). This is for safety,
    // the real argument setting is later on.
    // 
    String[] args = null;
    if (args1 != null) {
        args = new String[args1.length];
        for (int idx = 0; idx < args1.length; idx++) {
            args[idx] = environmentSubstitute(args1[idx]);
        }
    }
    RowMetaAndData resultRow = null;
    boolean first = true;
    List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
    while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentJob.isStopped()) {
        // 
        if (execPerRow) {
            result.getRows().clear();
        }
        if (rows != null && execPerRow) {
            resultRow = rows.get(iteration);
        } else {
            resultRow = null;
        }
        NamedParams namedParam = new NamedParamsDefault();
        if (parameters != null) {
            for (int idx = 0; idx < parameters.length; idx++) {
                if (!Utils.isEmpty(parameters[idx])) {
                    // We have a parameter
                    // 
                    namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                    if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                        // There is no field name specified.
                        // 
                        String value = Const.NVL(environmentSubstitute(parameterValues[idx]), "");
                        namedParam.setParameterValue(parameters[idx], value);
                    } else {
                        // something filled in, in the field column...
                        // 
                        String value = "";
                        if (resultRow != null) {
                            value = resultRow.getString(parameterFieldNames[idx], "");
                        }
                        namedParam.setParameterValue(parameters[idx], value);
                    }
                }
            }
        }
        first = false;
        Result previousResult = result;
        try {
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.StartingTrans", getFilename(), getName(), getDescription()));
            }
            if (clearResultRows) {
                previousResult.setRows(new ArrayList<RowMetaAndData>());
            }
            if (clearResultFiles) {
                previousResult.getResultFiles().clear();
            }
            /*
         * Set one or more "result" rows on the transformation...
         */
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    newList.add(resultRow);
                    // This previous result rows list can be either empty or not.
                    // Depending on the checkbox "clear result rows"
                    // In this case, it would execute the transformation with one extra row each time
                    // Can't figure out a real use-case for it, but hey, who am I to decide that, right?
                    // :-)
                    // 
                    previousResult.getRows().addAll(newList);
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                }
                if (paramsFromPrevious) {
                    // Copy the input the parameters
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            // Handle the parameters...
            // 
            transMeta.clearParameters();
            String[] parameterNames = transMeta.listParameters();
            StepWithMappingMeta.activateParams(transMeta, transMeta, this, parameterNames, parameters, parameterValues);
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                log.logBasic(BaseMessages.getString(PKG, "JobTrans.RunConfig.Message"), runConfiguration);
                runConfiguration = environmentSubstitute(runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), transMeta, rep });
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely() && !executionConfiguration.isExecutingClustered()) {
                        result.setResult(true);
                        return result;
                    }
                    clustering = executionConfiguration.isExecutingClustered();
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            // 
            if (clustering) {
                executionConfiguration.setClusterPosting(true);
                executionConfiguration.setClusterPreparing(true);
                executionConfiguration.setClusterStarting(true);
                executionConfiguration.setClusterShowingTransformation(false);
                executionConfiguration.setSafeModeEnabled(false);
                executionConfiguration.setRepository(rep);
                executionConfiguration.setLogLevel(transLogLevel);
                executionConfiguration.setPreviousResult(previousResult);
                // Also pass the variables from the transformation into the execution configuration
                // That way it can go over the HTTP connection to the slave server.
                // 
                executionConfiguration.setVariables(transMeta);
                // Also set the arguments...
                // 
                executionConfiguration.setArgumentStrings(args);
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
                }
                TransSplitter transSplitter = null;
                long errors = 0;
                try {
                    transSplitter = Trans.executeClustered(transMeta, executionConfiguration);
                    // Monitor the running transformations, wait until they are done.
                    // Also kill them all if anything goes bad
                    // Also clean up afterwards...
                    // 
                    errors += Trans.monitorClusteredTransformation(log, transSplitter, parentJob);
                } catch (Exception e) {
                    logError("Error during clustered execution. Cleaning up clustered execution.", e);
                    // In case something goes wrong, make sure to clean up afterwards!
                    // 
                    errors++;
                    if (transSplitter != null) {
                        Trans.cleanupCluster(log, transSplitter);
                    } else {
                        // Try to clean anyway...
                        // 
                        SlaveServer master = null;
                        for (StepMeta stepMeta : transMeta.getSteps()) {
                            if (stepMeta.isClustered()) {
                                for (SlaveServer slaveServer : stepMeta.getClusterSchema().getSlaveServers()) {
                                    if (slaveServer.isMaster()) {
                                        master = slaveServer;
                                        break;
                                    }
                                }
                            }
                        }
                        if (master != null) {
                            master.deAllocateServerSockets(transMeta.getName(), null);
                        }
                    }
                }
                result.clear();
                if (transSplitter != null) {
                    Result clusterResult = Trans.getClusteredTransformationResult(log, transSplitter, parentJob, executionConfiguration.isLogRemoteExecutionLocally());
                    result.add(clusterResult);
                }
                result.setNrErrors(result.getNrErrors() + errors);
            } else if (remoteSlaveServer != null) {
                // Execute this transformation remotely
                // 
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                executionConfiguration.setPreviousResult(previousResult.clone());
                executionConfiguration.setArgumentStrings(args);
                executionConfiguration.setVariables(this);
                executionConfiguration.setRemoteServer(remoteSlaveServer);
                executionConfiguration.setLogLevel(transLogLevel);
                executionConfiguration.setRepository(rep);
                executionConfiguration.setLogFileName(realLogFilename);
                executionConfiguration.setSetAppendLogfile(setAppendLogfile);
                executionConfiguration.setSetLogfile(setLogfile);
                Map<String, String> params = executionConfiguration.getParams();
                for (String param : transMeta.listParameters()) {
                    String value = Const.NVL(transMeta.getParameterValue(param), Const.NVL(transMeta.getParameterDefault(param), transMeta.getVariable(param)));
                    params.put(param, value);
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
                }
                // Send the XML over to the slave server
                // Also start the transformation over there...
                // 
                String carteObjectId = Trans.sendToSlaveServer(transMeta, executionConfiguration, rep, metaStore);
                // Now start the monitoring...
                // 
                SlaveServerTransStatus transStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0);
                        if (!transStatus.isRunning()) {
                            // The transformation is finished, get the result...
                            // 
                            // get the status with the result ( we don't do it above because of changing PDI-15781)
                            transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0, true);
                            Result remoteResult = transStatus.getResult();
                            result.clear();
                            result.add(remoteResult);
                            // 
                            if (remoteResult.isStopped()) {
                                // 
                                result.setNrErrors(result.getNrErrors() + 1);
                            }
                            // Make sure to clean up : write a log record etc, close any left-over sockets etc.
                            // 
                            remoteSlaveServer.cleanupTransformation(transMeta.getName(), carteObjectId);
                            break;
                        }
                    } catch (Exception e1) {
                        logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableContactSlaveServer", "" + remoteSlaveServer, transMeta.getName()), e1);
                        result.setNrErrors(result.getNrErrors() + 1L);
                        // Stop looking too, chances are too low the server will come back on-line
                        break;
                    }
                    // sleep for 2 seconds
                    try {
                        Thread.sleep(2000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                if (parentJob.isStopped()) {
                    // 
                    if (transStatus == null || transStatus.isRunning()) {
                        // Try a remote abort ...
                        // 
                        remoteSlaveServer.stopTransformation(transMeta.getName(), transStatus.getId());
                        // And a cleanup...
                        // 
                        remoteSlaveServer.cleanupTransformation(transMeta.getName(), transStatus.getId());
                        // Set an error state!
                        // 
                        result.setNrErrors(result.getNrErrors() + 1L);
                    }
                }
            } else {
                // Execute this transformation on the local machine
                // 
                // Create the transformation from meta-data
                // 
                // trans = new Trans( transMeta, this );
                final TransMeta meta = transMeta;
                trans = new TransSupplier(transMeta, log, () -> new Trans(meta)).get();
                trans.setParent(this);
                // Pass the socket repository as early as possible...
                // 
                trans.setSocketRepository(parentJob.getSocketRepository());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    trans.setPassedBatchId(parentJob.getPassedBatchId());
                }
                // set the parent job on the transformation, variables are taken from here...
                // 
                trans.setParentJob(parentJob);
                trans.setParentVariableSpace(parentJob);
                trans.setLogLevel(transLogLevel);
                trans.setPreviousResult(previousResult);
                trans.setArguments(arguments);
                // Mappings need the repository to load from
                // 
                trans.setRepository(rep);
                // inject the metaStore
                trans.setMetaStore(metaStore);
                // First get the root job
                // 
                Job rootJob = parentJob;
                while (rootJob.getParentJob() != null) {
                    rootJob = rootJob.getParentJob();
                }
                // Get the start and end-date from the root job...
                // 
                trans.setJobStartDate(rootJob.getStartDate());
                trans.setJobEndDate(rootJob.getEndDate());
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.transformationDelegationStarted(trans, new TransExecutionConfiguration());
                }
                try {
                    // Start execution...
                    // 
                    trans.execute(args);
                    // TODO is it possible to implement Observer pattern to avoid Thread.sleep here?
                    while (!trans.isFinished() && trans.getErrors() == 0) {
                        if (parentJob.isStopped()) {
                            trans.stopAll();
                            break;
                        } else {
                            try {
                                Thread.sleep(0, 500);
                            } catch (InterruptedException e) {
                            // Ignore errors
                            }
                        }
                    }
                    trans.waitUntilFinished();
                    if (parentJob.isStopped() || trans.getErrors() != 0) {
                        trans.stopAll();
                        result.setNrErrors(1);
                    }
                    Result newResult = trans.getResult();
                    // clear only the numbers, NOT the files or rows.
                    result.clear();
                    result.add(newResult);
                    // Set the result rows too, if any ...
                    if (!Utils.isEmpty(newResult.getRows())) {
                        result.setRows(newResult.getRows());
                    }
                    if (setLogfile) {
                        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, KettleVFS.getFileObject(realLogFilename, this), parentJob.getJobname(), toString());
                        result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                    }
                } catch (KettleException e) {
                    logError(BaseMessages.getString(PKG, "JobTrans.Error.UnablePrepareExec"), e);
                    result.setNrErrors(1);
                }
            }
        } catch (Exception e) {
            logError(BaseMessages.getString(PKG, "JobTrans.ErrorUnableOpenTrans", e.getMessage()));
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
        }
        iteration++;
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() == 0) {
        result.setResult(true);
    } else {
        result.setResult(false);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) NamedParams(org.pentaho.di.core.parameters.NamedParams) TransMeta(org.pentaho.di.trans.TransMeta) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Job(org.pentaho.di.job.Job) Trans(org.pentaho.di.trans.Trans) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) StepMeta(org.pentaho.di.trans.step.StepMeta) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) TransSupplier(org.pentaho.di.trans.TransSupplier) TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) SlaveServerTransStatus(org.pentaho.di.www.SlaveServerTransStatus) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter) Map(java.util.Map) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 2 with DelegationListener

use of org.pentaho.di.job.DelegationListener in project pentaho-kettle by pentaho.

the class TransExecutor method executeTransformation.

private void executeTransformation() throws KettleException {
    TransExecutorData transExecutorData = getData();
    // If we got 0 rows on input we don't really want to execute the transformation
    if (transExecutorData.groupBuffer.isEmpty()) {
        return;
    }
    transExecutorData.groupTimeStart = System.currentTimeMillis();
    if (first) {
        discardLogLines(transExecutorData);
    }
    Trans executorTrans = createInternalTrans();
    transExecutorData.setExecutorTrans(executorTrans);
    // Pass parameter values
    passParametersToTrans();
    // keep track for drill down in Spoon...
    getTrans().addActiveSubTransformation(getStepname(), executorTrans);
    Result result = new Result();
    result.setRows(transExecutorData.groupBuffer);
    executorTrans.setPreviousResult(result);
    try {
        executorTrans.prepareExecution(getTrans().getArguments());
        // run transformation
        executorTrans.startThreads();
        // Inform the parent transformation we started something here...
        for (DelegationListener delegationListener : getTrans().getDelegationListeners()) {
            // TODO: copy some settings in the transformation execution configuration, not strictly needed
            // but the execution configuration information is useful in case of a transformation re-start on Carte
            delegationListener.transformationDelegationStarted(executorTrans, new TransExecutionConfiguration());
        }
        // Wait a while until we're done with the transformation
        executorTrans.waitUntilFinished();
        result = executorTrans.getResult();
    } catch (KettleException e) {
        log.logError("An error occurred executing the transformation: ", e);
        result.setResult(false);
        result.setNrErrors(1);
    }
    if (result.isSafeStop()) {
        getTrans().safeStop();
    } else if (result.getNrErrors() > 0) {
        getTrans().stopAll();
    }
    collectTransResults(result);
    collectExecutionResults(result);
    collectExecutionResultFiles(result);
    transExecutorData.groupBuffer.clear();
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) KettleException(org.pentaho.di.core.exception.KettleException) Trans(org.pentaho.di.trans.Trans) Result(org.pentaho.di.core.Result) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 3 with DelegationListener

use of org.pentaho.di.job.DelegationListener in project pentaho-kettle by pentaho.

the class JobExecutor method executeJob.

private void executeJob() throws KettleException {
    // 
    if (data.groupBuffer.isEmpty()) {
        return;
    }
    data.groupTimeStart = System.currentTimeMillis();
    if (first) {
        discardLogLines(data);
    }
    data.executorJob = createJob(meta.getRepository(), data.executorJobMeta, this);
    data.executorJob.shareVariablesWith(data.executorJobMeta);
    data.executorJob.setParentTrans(getTrans());
    data.executorJob.setLogLevel(getLogLevel());
    data.executorJob.setInternalKettleVariables(this);
    data.executorJob.copyParametersFrom(data.executorJobMeta);
    data.executorJob.setArguments(getTrans().getArguments());
    // data.executorJob.setInteractive(); TODO: pass interactivity through the transformation too for drill-down.
    // TODO
    /*
     * if (data.executorJob.isInteractive()) {
     * data.executorJob.getJobEntryListeners().addAll(parentJob.getJobEntryListeners()); }
     */
    // Pass the accumulated rows
    // 
    data.executorJob.setSourceRows(data.groupBuffer);
    // Pass parameter values
    // 
    passParametersToJob();
    // keep track for drill down in Spoon...
    // 
    getTrans().getActiveSubjobs().put(getStepname(), data.executorJob);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobStart.id, data.executorJob);
    data.executorJob.beginProcessing();
    Result result = new Result();
    // 
    for (DelegationListener delegationListener : getTrans().getDelegationListeners()) {
        // TODO: copy some settings in the job execution configuration, not strictly needed
        // but the execution configuration information is useful in case of a job re-start on Carte
        // 
        delegationListener.jobDelegationStarted(data.executorJob, new JobExecutionConfiguration());
    }
    // 
    try {
        result = data.executorJob.execute(0, result);
    } catch (KettleException e) {
        log.logError("An error occurred executing the job: ", e);
        result.setResult(false);
        result.setNrErrors(1);
    } finally {
        try {
            ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobFinish.id, data.executorJob);
            data.executorJob.fireJobFinishListeners();
        } catch (KettleException e) {
            result.setNrErrors(1);
            result.setResult(false);
            log.logError(BaseMessages.getString(PKG, "JobExecutor.Log.ErrorExecJob", e.getMessage()), e);
        }
    }
    // 
    if (meta.getExecutionResultTargetStepMeta() != null) {
        Object[] outputRow = RowDataUtil.allocateRowData(data.executionResultsOutputRowMeta.size());
        int idx = 0;
        if (!Utils.isEmpty(meta.getExecutionTimeField())) {
            outputRow[idx++] = Long.valueOf(System.currentTimeMillis() - data.groupTimeStart);
        }
        if (!Utils.isEmpty(meta.getExecutionResultField())) {
            outputRow[idx++] = Boolean.valueOf(result.getResult());
        }
        if (!Utils.isEmpty(meta.getExecutionNrErrorsField())) {
            outputRow[idx++] = Long.valueOf(result.getNrErrors());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesReadField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesRead());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesWrittenField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesWritten());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesInputField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesInput());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesOutputField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesOutput());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesRejectedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesRejected());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesUpdatedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesUpdated());
        }
        if (!Utils.isEmpty(meta.getExecutionLinesDeletedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrLinesDeleted());
        }
        if (!Utils.isEmpty(meta.getExecutionFilesRetrievedField())) {
            outputRow[idx++] = Long.valueOf(result.getNrFilesRetrieved());
        }
        if (!Utils.isEmpty(meta.getExecutionExitStatusField())) {
            outputRow[idx++] = Long.valueOf(result.getExitStatus());
        }
        if (!Utils.isEmpty(meta.getExecutionLogTextField())) {
            String channelId = data.executorJob.getLogChannelId();
            String logText = KettleLogStore.getAppender().getBuffer(channelId, false).toString();
            outputRow[idx++] = logText;
        }
        if (!Utils.isEmpty(meta.getExecutionLogChannelIdField())) {
            outputRow[idx++] = data.executorJob.getLogChannelId();
        }
        putRowTo(data.executionResultsOutputRowMeta, outputRow, data.executionResultRowSet);
    }
    // 
    if (meta.getResultRowsTargetStepMeta() != null && result.getRows() != null) {
        for (RowMetaAndData row : result.getRows()) {
            Object[] targetRow = RowDataUtil.allocateRowData(data.resultRowsOutputRowMeta.size());
            for (int i = 0; i < meta.getResultRowsField().length; i++) {
                ValueMetaInterface valueMeta = row.getRowMeta().getValueMeta(i);
                if (valueMeta.getType() != meta.getResultRowsType()[i]) {
                    throw new KettleException(BaseMessages.getString(PKG, "JobExecutor.IncorrectDataTypePassed", valueMeta.getTypeDesc(), ValueMetaFactory.getValueMetaName(meta.getResultRowsType()[i])));
                }
                targetRow[i] = row.getData()[i];
            }
            putRowTo(data.resultRowsOutputRowMeta, targetRow, data.resultRowsRowSet);
        }
    }
    if (meta.getResultFilesTargetStepMeta() != null && result.getResultFilesList() != null) {
        for (ResultFile resultFile : result.getResultFilesList()) {
            Object[] targetRow = RowDataUtil.allocateRowData(data.resultFilesOutputRowMeta.size());
            int idx = 0;
            targetRow[idx++] = resultFile.getFile().getName().toString();
            // TODO: time, origin, ...
            putRowTo(data.resultFilesOutputRowMeta, targetRow, data.resultFilesRowSet);
        }
    }
    data.groupBuffer.clear();
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Result(org.pentaho.di.core.Result) DelegationListener(org.pentaho.di.job.DelegationListener) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Example 4 with DelegationListener

use of org.pentaho.di.job.DelegationListener in project pentaho-kettle by pentaho.

the class JobEntryJob method execute.

@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel jobLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    if (setLogfile) {
        String realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobJob.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!createParentFolder(realLogFilename)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError("Unable to open file appender for file [" + getLogFilename() + "] : " + e.toString());
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        jobLogLevel = logFileLevel;
    }
    try {
        // First load the job, outside of the loop...
        if (parentJob.getJobMeta() != null) {
            // reset the internal variables again.
            // Maybe we should split up the variables even more like in UNIX shells.
            // The internal variables need to be reset to be able use them properly
            // in 2 sequential sub jobs.
            parentJob.getJobMeta().setInternalKettleVariables();
        }
        // 
        switch(specificationMethod) {
            case REPOSITORY_BY_NAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository : [" + directory + " : " + environmentSubstitute(jobname) + "]");
                }
                break;
            case FILENAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from XML file : [" + environmentSubstitute(filename) + "]");
                }
                break;
            case REPOSITORY_BY_REFERENCE:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository by reference : [" + jobObjectId + "]");
                }
                break;
            default:
                break;
        }
        JobMeta jobMeta = getJobMeta(rep, this);
        // 
        if (jobMeta == null) {
            throw new KettleException("Unable to load the job: please specify the name and repository directory OR a filename");
        }
        verifyRecursiveExecution(parentJob, jobMeta);
        int iteration = 0;
        String[] args1 = arguments;
        // no arguments? Check the parent jobs arguments
        if (args1 == null || args1.length == 0) {
            args1 = parentJob.getArguments();
        }
        copyVariablesFrom(parentJob);
        setParentVariableSpace(parentJob);
        // 
        // For the moment only do variable translation at the start of a job, not
        // for every input row (if that would be switched on)
        // 
        String[] args = null;
        if (args1 != null) {
            args = new String[args1.length];
            for (int idx = 0; idx < args1.length; idx++) {
                args[idx] = environmentSubstitute(args1[idx]);
            }
        }
        RowMetaAndData resultRow = null;
        boolean first = true;
        List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
        while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
            first = false;
            // 
            if (execPerRow) {
                result.getRows().clear();
            }
            if (rows != null && execPerRow) {
                resultRow = rows.get(iteration);
            } else {
                resultRow = null;
            }
            NamedParams namedParam = new NamedParamsDefault();
            // 
            if (paramsFromPrevious) {
                String[] parentParameters = parentJob.listParameters();
                for (int idx = 0; idx < parentParameters.length; idx++) {
                    String par = parentParameters[idx];
                    String def = parentJob.getParameterDefault(par);
                    String val = parentJob.getParameterValue(par);
                    String des = parentJob.getParameterDescription(par);
                    namedParam.addParameterDefinition(par, def, des);
                    namedParam.setParameterValue(par, val);
                }
            }
            // 
            if (parameters != null) {
                for (int idx = 0; idx < parameters.length; idx++) {
                    if (!Utils.isEmpty(parameters[idx])) {
                        // 
                        if (Const.indexOfString(parameters[idx], namedParam.listParameters()) < 0) {
                            // We have a parameter
                            try {
                                namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                            } catch (DuplicateParamException e) {
                                // Should never happen
                                // 
                                logError("Duplicate parameter definition for " + parameters[idx]);
                            }
                        }
                        if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                            namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                        } else {
                            // something filled in, in the field column...
                            // 
                            String value = "";
                            if (resultRow != null) {
                                value = resultRow.getString(parameterFieldNames[idx], "");
                            }
                            namedParam.setParameterValue(parameters[idx], value);
                        }
                    }
                }
            }
            Result oneResult = new Result();
            List<RowMetaAndData> sourceRows = null;
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    newList.add(resultRow);
                    sourceRows = newList;
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Keep it as it was...
                    sourceRows = result.getRows();
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                log.logBasic(BaseMessages.getString(PKG, "JobJob.RunConfig.Message"), runConfiguration);
                runConfiguration = environmentSubstitute(runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), jobMeta, rep });
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely()) {
                        result.setResult(true);
                        return result;
                    }
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            if (remoteSlaveServer == null) {
                // Local execution...
                // 
                // Create a new job
                // 
                job = new Job(rep, jobMeta, this);
                job.setParentJob(parentJob);
                job.setLogLevel(jobLogLevel);
                job.shareVariablesWith(this);
                job.setInternalKettleVariables(this);
                job.copyParametersFrom(jobMeta);
                job.setInteractive(parentJob.isInteractive());
                if (job.isInteractive()) {
                    job.getJobEntryListeners().addAll(parentJob.getJobEntryListeners());
                }
                // Pass the socket repository all around.
                // 
                job.setSocketRepository(parentJob.getSocketRepository());
                // Set the parameters calculated above on this instance.
                // 
                job.clearParameters();
                String[] parameterNames = job.listParameters();
                for (int idx = 0; idx < parameterNames.length; idx++) {
                    // Grab the parameter value set in the job entry
                    // 
                    String thisValue = namedParam.getParameterValue(parameterNames[idx]);
                    if (!Utils.isEmpty(thisValue)) {
                        // Set the value as specified by the user in the job entry
                        // 
                        job.setParameterValue(parameterNames[idx], thisValue);
                    } else {
                        // 
                        if (isPassingAllParameters()) {
                            String parentValue = parentJob.getParameterValue(parameterNames[idx]);
                            if (!Utils.isEmpty(parentValue)) {
                                job.setParameterValue(parameterNames[idx], parentValue);
                            }
                        }
                    }
                }
                job.activateParameters();
                // Set the source rows we calculated above...
                // 
                job.setSourceRows(sourceRows);
                // Don't forget the logging...
                job.beginProcessing();
                // Link the job with the sub-job
                parentJob.getJobTracker().addJobTracker(job.getJobTracker());
                // Link both ways!
                job.getJobTracker().setParentJobTracker(parentJob.getJobTracker());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    job.setPassedBatchId(parentJob.getBatchId());
                }
                job.setArguments(args);
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.jobDelegationStarted(job, new JobExecutionConfiguration());
                }
                JobEntryJobRunner runner = new JobEntryJobRunner(job, result, nr, log);
                Thread jobRunnerThread = new Thread(runner);
                // PDI-6518
                // added UUID to thread name, otherwise threads do share names if jobs entries are executed in parallel in a
                // parent job
                // if that happens, contained transformations start closing each other's connections
                jobRunnerThread.setName(Const.NVL(job.getJobMeta().getName(), job.getJobMeta().getFilename()) + " UUID: " + UUID.randomUUID().toString());
                jobRunnerThread.start();
                // 
                while (!runner.isFinished() && !parentJob.isStopped()) {
                    try {
                        Thread.sleep(0, 1);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // if the parent-job was stopped, stop the sub-job too...
                if (parentJob.isStopped()) {
                    job.stopAll();
                    // Wait until finished!
                    runner.waitUntilFinished();
                }
                oneResult = runner.getResult();
            } else {
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
                // lightClone() because rows are
                jobExecutionConfiguration.setPreviousResult(result.lightClone());
                // overwritten in next line.
                jobExecutionConfiguration.getPreviousResult().setRows(sourceRows);
                jobExecutionConfiguration.setArgumentStrings(args);
                jobExecutionConfiguration.setVariables(this);
                jobExecutionConfiguration.setRemoteServer(remoteSlaveServer);
                jobExecutionConfiguration.setRepository(rep);
                jobExecutionConfiguration.setLogLevel(jobLogLevel);
                jobExecutionConfiguration.setPassingExport(passingExport);
                jobExecutionConfiguration.setExpandingRemoteJob(expandingRemoteJob);
                for (String param : namedParam.listParameters()) {
                    String defValue = namedParam.getParameterDefault(param);
                    String value = namedParam.getParameterValue(param);
                    jobExecutionConfiguration.getParams().put(param, Const.NVL(value, defValue));
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    jobExecutionConfiguration.setPassedBatchId(parentJob.getBatchId());
                }
                // Send the XML over to the slave server
                // Also start the job over there...
                // 
                String carteObjectId = null;
                try {
                    carteObjectId = Job.sendToSlaveServer(jobMeta, jobExecutionConfiguration, rep, metaStore);
                } catch (KettleException e) {
                    // Perhaps the job exists on the remote server, carte is down, etc.
                    // This is an abort situation, stop the parent job...
                    // We want this in case we are running in parallel. The other job
                    // entries can stop running now.
                    // 
                    parentJob.stopAll();
                    // 
                    throw e;
                }
                // Now start the monitoring...
                // 
                SlaveServerJobStatus jobStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
                        if (jobStatus.getResult() != null) {
                            // The job is finished, get the result...
                            // 
                            oneResult = jobStatus.getResult();
                            break;
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to verify the status of job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                    // sleep for 1 second
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // Write log from carte to file
                if (setLogfile && jobStatus != null) {
                    String logFromCarte = jobStatus.getLoggingString();
                    if (!Utils.isEmpty(logFromCarte)) {
                        FileObject logfile = logChannelFileWriter.getLogFile();
                        OutputStream logFileOutputStream = null;
                        try {
                            logFileOutputStream = KettleVFS.getOutputStream(logfile, setAppendLogfile);
                            logFileOutputStream.write(logFromCarte.getBytes());
                            logFileOutputStream.flush();
                        } catch (Exception e) {
                            logError("There was an error logging to file '" + logfile + "'", e);
                        } finally {
                            try {
                                if (logFileOutputStream != null) {
                                    logFileOutputStream.close();
                                    logFileOutputStream = null;
                                }
                            } catch (Exception e) {
                                logError("There was an error closing log file file '" + logfile + "'", e);
                            }
                        }
                    }
                }
                if (!waitingToFinish) {
                    // Since the job was posted successfully, the result is true...
                    // 
                    oneResult = new Result();
                    oneResult.setResult(true);
                }
                if (parentJob.isStopped()) {
                    try {
                        // 
                        if (jobStatus == null || jobStatus.isRunning()) {
                            // Try a remote abort ...
                            // 
                            remoteSlaveServer.stopJob(jobMeta.getName(), carteObjectId);
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to stop job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                }
            }
            // clear only the numbers, NOT the files or rows.
            result.clear();
            result.add(oneResult);
            // Set the result rows too, if any ...
            if (!Utils.isEmpty(oneResult.getRows())) {
                result.setRows(new ArrayList<RowMetaAndData>(oneResult.getRows()));
            }
            // 
            if (oneResult.getResult() == false) {
                result.setNrErrors(result.getNrErrors() + 1);
            }
            iteration++;
        }
    } catch (KettleException ke) {
        logError("Error running job entry 'job' : ", ke);
        result.setResult(false);
        result.setNrErrors(1L);
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() > 0) {
        result.setResult(false);
    } else {
        result.setResult(true);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) NamedParams(org.pentaho.di.core.parameters.NamedParams) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) SlaveServerJobStatus(org.pentaho.di.www.SlaveServerJobStatus) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileObject(org.apache.commons.vfs2.FileObject) Job(org.pentaho.di.job.Job) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DelegationListener(org.pentaho.di.job.DelegationListener)

Aggregations

Result (org.pentaho.di.core.Result)4 KettleException (org.pentaho.di.core.exception.KettleException)4 DelegationListener (org.pentaho.di.job.DelegationListener)4 ResultFile (org.pentaho.di.core.ResultFile)3 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)3 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)3 ArrayList (java.util.ArrayList)2 SlaveServer (org.pentaho.di.cluster.SlaveServer)2 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)2 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)2 LogChannelFileWriter (org.pentaho.di.core.logging.LogChannelFileWriter)2 LogLevel (org.pentaho.di.core.logging.LogLevel)2 NamedParams (org.pentaho.di.core.parameters.NamedParams)2 NamedParamsDefault (org.pentaho.di.core.parameters.NamedParamsDefault)2 Job (org.pentaho.di.job.Job)2 JobExecutionConfiguration (org.pentaho.di.job.JobExecutionConfiguration)2 Trans (org.pentaho.di.trans.Trans)2 TransExecutionConfiguration (org.pentaho.di.trans.TransExecutionConfiguration)2 OutputStream (java.io.OutputStream)1 Map (java.util.Map)1