Search in sources :

Example 1 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryTrans method execute.

/**
 * Execute this job entry and return the result. In this case it means, just set the result boolean in the Result
 * class.
 *
 * @param result The result of the previous execution
 * @param nr     the job entry number
 * @return The Result of the execution.
 */
@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel transLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    String realLogFilename = "";
    if (setLogfile) {
        transLogLevel = logFileLevel;
        realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobTrans.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel(), this)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename, this), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableOpenAppender", realLogFilename, e.toString()));
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
    }
    // 
    switch(specificationMethod) {
        case FILENAME:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTrans", environmentSubstitute(getFilename())));
            }
            break;
        case REPOSITORY_BY_NAME:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransInDirec", environmentSubstitute(getFilename()), environmentSubstitute(directory)));
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransByReference", transObjectId));
            }
            break;
        default:
            break;
    }
    // Load the transformation only once for the complete loop!
    // Throws an exception if it was not possible to load the transformation. For example, the XML file doesn't exist or
    // the repository is down.
    // Log the stack trace and return an error condition from this
    // 
    TransMeta transMeta = null;
    try {
        transMeta = getTransMeta(rep, metaStore, this);
    } catch (KettleException e) {
        logError(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToRunJob", parentJobMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
        result.setNrErrors(1);
        result.setResult(false);
        return result;
    }
    int iteration = 0;
    String[] args1 = arguments;
    if (args1 == null || args1.length == 0) {
        // No arguments set, look at the parent job.
        args1 = parentJob.getArguments();
    }
    // initializeVariablesFrom(parentJob);
    // 
    // For the moment only do variable translation at the start of a job, not
    // for every input row (if that would be switched on). This is for safety,
    // the real argument setting is later on.
    // 
    String[] args = null;
    if (args1 != null) {
        args = new String[args1.length];
        for (int idx = 0; idx < args1.length; idx++) {
            args[idx] = environmentSubstitute(args1[idx]);
        }
    }
    RowMetaAndData resultRow = null;
    boolean first = true;
    List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
    while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentJob.isStopped()) {
        // 
        if (execPerRow) {
            result.getRows().clear();
        }
        if (rows != null && execPerRow) {
            resultRow = rows.get(iteration);
        } else {
            resultRow = null;
        }
        NamedParams namedParam = new NamedParamsDefault();
        if (parameters != null) {
            for (int idx = 0; idx < parameters.length; idx++) {
                if (!Utils.isEmpty(parameters[idx])) {
                    // We have a parameter
                    // 
                    namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                    if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                        // There is no field name specified.
                        // 
                        String value = Const.NVL(environmentSubstitute(parameterValues[idx]), "");
                        namedParam.setParameterValue(parameters[idx], value);
                    } else {
                        // something filled in, in the field column...
                        // 
                        String value = "";
                        if (resultRow != null) {
                            value = resultRow.getString(parameterFieldNames[idx], "");
                        }
                        namedParam.setParameterValue(parameters[idx], value);
                    }
                }
            }
        }
        first = false;
        Result previousResult = result;
        try {
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobTrans.StartingTrans", getFilename(), getName(), getDescription()));
            }
            if (clearResultRows) {
                previousResult.setRows(new ArrayList<RowMetaAndData>());
            }
            if (clearResultFiles) {
                previousResult.getResultFiles().clear();
            }
            /*
         * Set one or more "result" rows on the transformation...
         */
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    newList.add(resultRow);
                    // This previous result rows list can be either empty or not.
                    // Depending on the checkbox "clear result rows"
                    // In this case, it would execute the transformation with one extra row each time
                    // Can't figure out a real use-case for it, but hey, who am I to decide that, right?
                    // :-)
                    // 
                    previousResult.getRows().addAll(newList);
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                }
                if (paramsFromPrevious) {
                    // Copy the input the parameters
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            // Handle the parameters...
            // 
            transMeta.clearParameters();
            String[] parameterNames = transMeta.listParameters();
            StepWithMappingMeta.activateParams(transMeta, transMeta, this, parameterNames, parameters, parameterValues);
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                log.logBasic(BaseMessages.getString(PKG, "JobTrans.RunConfig.Message"), runConfiguration);
                runConfiguration = environmentSubstitute(runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), transMeta, rep });
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely() && !executionConfiguration.isExecutingClustered()) {
                        result.setResult(true);
                        return result;
                    }
                    clustering = executionConfiguration.isExecutingClustered();
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            // 
            if (clustering) {
                executionConfiguration.setClusterPosting(true);
                executionConfiguration.setClusterPreparing(true);
                executionConfiguration.setClusterStarting(true);
                executionConfiguration.setClusterShowingTransformation(false);
                executionConfiguration.setSafeModeEnabled(false);
                executionConfiguration.setRepository(rep);
                executionConfiguration.setLogLevel(transLogLevel);
                executionConfiguration.setPreviousResult(previousResult);
                // Also pass the variables from the transformation into the execution configuration
                // That way it can go over the HTTP connection to the slave server.
                // 
                executionConfiguration.setVariables(transMeta);
                // Also set the arguments...
                // 
                executionConfiguration.setArgumentStrings(args);
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
                }
                TransSplitter transSplitter = null;
                long errors = 0;
                try {
                    transSplitter = Trans.executeClustered(transMeta, executionConfiguration);
                    // Monitor the running transformations, wait until they are done.
                    // Also kill them all if anything goes bad
                    // Also clean up afterwards...
                    // 
                    errors += Trans.monitorClusteredTransformation(log, transSplitter, parentJob);
                } catch (Exception e) {
                    logError("Error during clustered execution. Cleaning up clustered execution.", e);
                    // In case something goes wrong, make sure to clean up afterwards!
                    // 
                    errors++;
                    if (transSplitter != null) {
                        Trans.cleanupCluster(log, transSplitter);
                    } else {
                        // Try to clean anyway...
                        // 
                        SlaveServer master = null;
                        for (StepMeta stepMeta : transMeta.getSteps()) {
                            if (stepMeta.isClustered()) {
                                for (SlaveServer slaveServer : stepMeta.getClusterSchema().getSlaveServers()) {
                                    if (slaveServer.isMaster()) {
                                        master = slaveServer;
                                        break;
                                    }
                                }
                            }
                        }
                        if (master != null) {
                            master.deAllocateServerSockets(transMeta.getName(), null);
                        }
                    }
                }
                result.clear();
                if (transSplitter != null) {
                    Result clusterResult = Trans.getClusteredTransformationResult(log, transSplitter, parentJob, executionConfiguration.isLogRemoteExecutionLocally());
                    result.add(clusterResult);
                }
                result.setNrErrors(result.getNrErrors() + errors);
            } else if (remoteSlaveServer != null) {
                // Execute this transformation remotely
                // 
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                executionConfiguration.setPreviousResult(previousResult.clone());
                executionConfiguration.setArgumentStrings(args);
                executionConfiguration.setVariables(this);
                executionConfiguration.setRemoteServer(remoteSlaveServer);
                executionConfiguration.setLogLevel(transLogLevel);
                executionConfiguration.setRepository(rep);
                executionConfiguration.setLogFileName(realLogFilename);
                executionConfiguration.setSetAppendLogfile(setAppendLogfile);
                executionConfiguration.setSetLogfile(setLogfile);
                Map<String, String> params = executionConfiguration.getParams();
                for (String param : transMeta.listParameters()) {
                    String value = Const.NVL(transMeta.getParameterValue(param), Const.NVL(transMeta.getParameterDefault(param), transMeta.getVariable(param)));
                    params.put(param, value);
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
                }
                // Send the XML over to the slave server
                // Also start the transformation over there...
                // 
                String carteObjectId = Trans.sendToSlaveServer(transMeta, executionConfiguration, rep, metaStore);
                // Now start the monitoring...
                // 
                SlaveServerTransStatus transStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0);
                        if (!transStatus.isRunning()) {
                            // The transformation is finished, get the result...
                            // 
                            // get the status with the result ( we don't do it above because of changing PDI-15781)
                            transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0, true);
                            Result remoteResult = transStatus.getResult();
                            result.clear();
                            result.add(remoteResult);
                            // 
                            if (remoteResult.isStopped()) {
                                // 
                                result.setNrErrors(result.getNrErrors() + 1);
                            }
                            // Make sure to clean up : write a log record etc, close any left-over sockets etc.
                            // 
                            remoteSlaveServer.cleanupTransformation(transMeta.getName(), carteObjectId);
                            break;
                        }
                    } catch (Exception e1) {
                        logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableContactSlaveServer", "" + remoteSlaveServer, transMeta.getName()), e1);
                        result.setNrErrors(result.getNrErrors() + 1L);
                        // Stop looking too, chances are too low the server will come back on-line
                        break;
                    }
                    // sleep for 2 seconds
                    try {
                        Thread.sleep(2000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                if (parentJob.isStopped()) {
                    // 
                    if (transStatus == null || transStatus.isRunning()) {
                        // Try a remote abort ...
                        // 
                        remoteSlaveServer.stopTransformation(transMeta.getName(), transStatus.getId());
                        // And a cleanup...
                        // 
                        remoteSlaveServer.cleanupTransformation(transMeta.getName(), transStatus.getId());
                        // Set an error state!
                        // 
                        result.setNrErrors(result.getNrErrors() + 1L);
                    }
                }
            } else {
                // Execute this transformation on the local machine
                // 
                // Create the transformation from meta-data
                // 
                // trans = new Trans( transMeta, this );
                final TransMeta meta = transMeta;
                trans = new TransSupplier(transMeta, log, () -> new Trans(meta)).get();
                trans.setParent(this);
                // Pass the socket repository as early as possible...
                // 
                trans.setSocketRepository(parentJob.getSocketRepository());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    trans.setPassedBatchId(parentJob.getPassedBatchId());
                }
                // set the parent job on the transformation, variables are taken from here...
                // 
                trans.setParentJob(parentJob);
                trans.setParentVariableSpace(parentJob);
                trans.setLogLevel(transLogLevel);
                trans.setPreviousResult(previousResult);
                trans.setArguments(arguments);
                // Mappings need the repository to load from
                // 
                trans.setRepository(rep);
                // inject the metaStore
                trans.setMetaStore(metaStore);
                // First get the root job
                // 
                Job rootJob = parentJob;
                while (rootJob.getParentJob() != null) {
                    rootJob = rootJob.getParentJob();
                }
                // Get the start and end-date from the root job...
                // 
                trans.setJobStartDate(rootJob.getStartDate());
                trans.setJobEndDate(rootJob.getEndDate());
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.transformationDelegationStarted(trans, new TransExecutionConfiguration());
                }
                try {
                    // Start execution...
                    // 
                    trans.execute(args);
                    // TODO is it possible to implement Observer pattern to avoid Thread.sleep here?
                    while (!trans.isFinished() && trans.getErrors() == 0) {
                        if (parentJob.isStopped()) {
                            trans.stopAll();
                            break;
                        } else {
                            try {
                                Thread.sleep(0, 500);
                            } catch (InterruptedException e) {
                            // Ignore errors
                            }
                        }
                    }
                    trans.waitUntilFinished();
                    if (parentJob.isStopped() || trans.getErrors() != 0) {
                        trans.stopAll();
                        result.setNrErrors(1);
                    }
                    Result newResult = trans.getResult();
                    // clear only the numbers, NOT the files or rows.
                    result.clear();
                    result.add(newResult);
                    // Set the result rows too, if any ...
                    if (!Utils.isEmpty(newResult.getRows())) {
                        result.setRows(newResult.getRows());
                    }
                    if (setLogfile) {
                        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, KettleVFS.getFileObject(realLogFilename, this), parentJob.getJobname(), toString());
                        result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                    }
                } catch (KettleException e) {
                    logError(BaseMessages.getString(PKG, "JobTrans.Error.UnablePrepareExec"), e);
                    result.setNrErrors(1);
                }
            }
        } catch (Exception e) {
            logError(BaseMessages.getString(PKG, "JobTrans.ErrorUnableOpenTrans", e.getMessage()));
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
        }
        iteration++;
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() == 0) {
        result.setResult(true);
    } else {
        result.setResult(false);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) NamedParams(org.pentaho.di.core.parameters.NamedParams) TransMeta(org.pentaho.di.trans.TransMeta) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Job(org.pentaho.di.job.Job) Trans(org.pentaho.di.trans.Trans) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) StepMeta(org.pentaho.di.trans.step.StepMeta) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) TransSupplier(org.pentaho.di.trans.TransSupplier) TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) SlaveServerTransStatus(org.pentaho.di.www.SlaveServerTransStatus) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter) Map(java.util.Map) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 2 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryWriteToFile method execute.

public Result execute(Result previousResult, int nr) {
    Result result = previousResult;
    result.setResult(false);
    result.setNrErrors(1);
    String realFilename = getRealFilename();
    if (!Utils.isEmpty(realFilename)) {
        // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
        if (parentJobMeta.getNamedClusterEmbedManager() != null) {
            parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
        }
        String content = environmentSubstitute(getContent());
        String encoding = environmentSubstitute(getEncoding());
        OutputStreamWriter osw = null;
        OutputStream os = null;
        try {
            // Create parent folder if needed
            createParentFolder(realFilename);
            // Create / open file for writing
            os = KettleVFS.getOutputStream(realFilename, this, isAppendFile());
            if (Utils.isEmpty(encoding)) {
                if (isDebug()) {
                    logDebug(BaseMessages.getString(PKG, "JobWriteToFile.Log.WritingToFile", realFilename));
                }
                osw = new OutputStreamWriter(os);
            } else {
                if (isDebug()) {
                    logDebug(BaseMessages.getString(PKG, "JobWriteToFile.Log.WritingToFileWithEncoding", realFilename, encoding));
                }
                osw = new OutputStreamWriter(os, encoding);
            }
            osw.write(content);
            result.setResult(true);
            result.setNrErrors(0);
        } catch (Exception e) {
            logError(BaseMessages.getString(PKG, "JobWriteToFile.Error.WritingFile", realFilename, e.getMessage()));
        } finally {
            if (osw != null) {
                try {
                    osw.flush();
                    osw.close();
                } catch (Exception ex) {
                /* Ignore */
                }
            }
            if (os != null) {
                try {
                    os.flush();
                    os.close();
                } catch (Exception ex) {
                /* Ignore */
                }
            }
        }
    } else {
        logError(BaseMessages.getString(PKG, "JobWriteToFile.Error.MissinfgFile"));
    }
    return result;
}
Also used : OutputStream(java.io.OutputStream) OutputStreamWriter(java.io.OutputStreamWriter) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) Result(org.pentaho.di.core.Result)

Example 3 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobEntryZipFile method execute.

public Result execute(Result previousResult, int nr) {
    Result result = previousResult;
    List<RowMetaAndData> rows = result.getRows();
    // reset values
    String realZipfilename;
    String realWildcard = null;
    String realWildcardExclude = null;
    String realTargetdirectory;
    String realMovetodirectory = environmentSubstitute(movetoDirectory);
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    // Sanity check
    boolean SanityControlOK = true;
    if (afterZip == 2) {
        if (Utils.isEmpty(realMovetodirectory)) {
            SanityControlOK = false;
            logError(BaseMessages.getString(PKG, "JobZipFiles.AfterZip_No_DestinationFolder_Defined.Label"));
        } else {
            FileObject moveToDirectory = null;
            try {
                moveToDirectory = KettleVFS.getFileObject(realMovetodirectory, this);
                if (moveToDirectory.exists()) {
                    if (moveToDirectory.getType() == FileType.FOLDER) {
                        if (log.isDetailed()) {
                            logDetailed(BaseMessages.getString(PKG, "JobZipFiles.Log.MoveToFolderExist", realMovetodirectory));
                        }
                    } else {
                        SanityControlOK = false;
                        logError(BaseMessages.getString(PKG, "JobZipFiles.Log.MoveToFolderNotFolder", realMovetodirectory));
                    }
                } else {
                    if (log.isDetailed()) {
                        logDetailed(BaseMessages.getString(PKG, "JobZipFiles.Log.MoveToFolderNotNotExist", realMovetodirectory));
                    }
                    if (createMoveToDirectory) {
                        moveToDirectory.createFolder();
                        if (log.isDetailed()) {
                            logDetailed(BaseMessages.getString(PKG, "JobZipFiles.Log.MoveToFolderCreaterd", realMovetodirectory));
                        }
                    } else {
                        SanityControlOK = false;
                        logError(BaseMessages.getString(PKG, "JobZipFiles.Log.MoveToFolderNotNotExist", realMovetodirectory));
                    }
                }
            } catch (Exception e) {
                SanityControlOK = false;
                logError(BaseMessages.getString(PKG, "JobZipFiles.ErrorGettingMoveToFolder.Label", realMovetodirectory), e);
            } finally {
                if (moveToDirectory != null) {
                    realMovetodirectory = KettleVFS.getFilename(moveToDirectory);
                    try {
                        moveToDirectory.close();
                    } catch (Exception e) {
                        logError("Error moving to directory", e);
                        SanityControlOK = false;
                    }
                }
            }
        }
    }
    if (!SanityControlOK) {
        return errorResult(result);
    }
    if (isFromPrevious) {
        if (log.isDetailed()) {
            logDetailed(BaseMessages.getString(PKG, "JobZipFiles.ArgFromPrevious.Found", (rows != null ? rows.size() : 0) + ""));
        }
    }
    if (isFromPrevious && rows != null) {
        try {
            for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
                // get arguments from previous job entry
                RowMetaAndData resultRow = rows.get(iteration);
                // get target directory
                realTargetdirectory = resultRow.getString(0, null);
                if (!Utils.isEmpty(realTargetdirectory)) {
                    // get wildcard to include
                    if (!Utils.isEmpty(resultRow.getString(1, null))) {
                        realWildcard = resultRow.getString(1, null);
                    }
                    // get wildcard to exclude
                    if (!Utils.isEmpty(resultRow.getString(2, null))) {
                        realWildcardExclude = resultRow.getString(2, null);
                    }
                    // get destination zip file
                    realZipfilename = resultRow.getString(3, null);
                    if (!Utils.isEmpty(realZipfilename)) {
                        if (!processRowFile(parentJob, result, realZipfilename, realWildcard, realWildcardExclude, realTargetdirectory, realMovetodirectory, createParentFolder)) {
                            return errorResult(result);
                        }
                    } else {
                        logError("destination zip filename is empty! Ignoring row...");
                    }
                } else {
                    logError("Target directory is empty! Ignoring row...");
                }
            }
        } catch (Exception e) {
            logError("Erreur during process!", e);
            result.setResult(false);
            result.setNrErrors(1);
        }
    } else if (!isFromPrevious) {
        if (!Utils.isEmpty(sourceDirectory)) {
            // get values from job entry
            realZipfilename = getFullFilename(environmentSubstitute(zipFilename), addDate, addTime, specifyFormat, dateTimeFormat);
            realWildcard = environmentSubstitute(wildCard);
            realWildcardExclude = environmentSubstitute(excludeWildCard);
            realTargetdirectory = environmentSubstitute(sourceDirectory);
            boolean success = processRowFile(parentJob, result, realZipfilename, realWildcard, realWildcardExclude, realTargetdirectory, realMovetodirectory, createParentFolder);
            if (success) {
                result.setResult(true);
            } else {
                errorResult(result);
            }
        } else {
            logError("Source folder/file is empty! Ignoring row...");
        }
    }
    // End
    return result;
}
Also used : RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileObject(org.apache.commons.vfs2.FileObject) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) IOException(java.io.IOException) Result(org.pentaho.di.core.Result)

Example 4 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class KitchenCommandExecutor method execute.

public int execute(String repoName, String noRepo, String username, String trustUser, String password, String dirName, String filename, String jobName, String listJobs, String listDirs, String exportRepo, String initialDir, String listRepos, String listParams, NamedParams params, NamedParams customParams, String[] arguments) throws Throwable {
    getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Starting"));
    Date start = Calendar.getInstance().getTime();
    logDebug("Kitchen.Log.AllocateNewJob");
    Job job = null;
    // In case we use a repository...
    Repository repository = null;
    try {
        if (getMetaStore() == null) {
            setMetaStore(createDefaultMetastore());
        }
        // Read kettle job specified on command-line?
        if (!Utils.isEmpty(repoName) || !Utils.isEmpty(filename)) {
            logDebug("Kitchen.Log.ParsingCommandLine");
            if (!Utils.isEmpty(repoName) && !YES.equalsIgnoreCase(noRepo)) {
                /**
                 * if set, _trust_user_ needs to be considered. See pur-plugin's:
                 *
                 * @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/PurRepositoryConnector.java#L97-L101
                 * @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/WebServiceManager.java#L130-L133
                 */
                if (YES.equalsIgnoreCase(trustUser)) {
                    System.setProperty("pentaho.repository.client.attemptTrust", YES);
                }
                // In case we use a repository...
                // some commands are to load a Trans from the repo; others are merely to print some repo-related information
                RepositoryMeta repositoryMeta = loadRepositoryConnection(repoName, "Kitchen.Log.LoadingRep", "Kitchen.Error.NoRepDefinied", "Kitchen.Log.FindingRep");
                repository = establishRepositoryConnection(repositoryMeta, username, password, RepositoryOperation.EXECUTE_JOB);
                job = executeRepositoryBasedCommand(repository, repositoryMeta, dirName, jobName, listJobs, listDirs);
            }
            // Try to load if from file anyway.
            if (!Utils.isEmpty(filename) && job == null) {
                // Try to load the job from file, even if it failed to load from the repository
                job = executeFilesystemBasedCommand(initialDir, filename);
            }
        } else if (YES.equalsIgnoreCase(listRepos)) {
            // list the repositories placed at repositories.xml
            printRepositories(loadRepositoryInfo("Kitchen.Log.ListRep", "Kitchen.Error.NoRepDefinied"));
        }
    } catch (KettleException e) {
        job = null;
        if (repository != null) {
            repository.disconnect();
        }
        System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.StopProcess", e.getMessage()));
    }
    if (job == null) {
        if (!YES.equalsIgnoreCase(listJobs) && !YES.equalsIgnoreCase(listDirs) && !YES.equalsIgnoreCase(listRepos)) {
            System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.canNotLoadJob"));
        }
        return CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode();
    }
    if (!Utils.isEmpty(exportRepo)) {
        try {
            // Export the resources linked to the currently loaded file...
            TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(exportRepo, job.getJobMeta(), job, repository, getMetaStore());
            String launchFile = topLevelResource.getResourceName();
            String message = ResourceUtil.getExplanation(exportRepo, launchFile, job.getJobMeta());
            System.out.println();
            System.out.println(message);
            // Setting the list parameters option will make kitchen exit below in the parameters section
            listParams = YES;
        } catch (Exception e) {
            System.out.println(Const.getStackTracker(e));
            return CommandExecutorCodes.Kitchen.UNEXPECTED_ERROR.getCode();
        }
    }
    Result result = null;
    int returnCode = CommandExecutorCodes.Kitchen.SUCCESS.getCode();
    try {
        // Set the command line arguments on the job ...
        job.setArguments(arguments != null ? arguments : null);
        job.initializeVariablesFrom(null);
        job.setLogLevel(getLog().getLogLevel());
        job.getJobMeta().setInternalKettleVariables(job);
        job.setRepository(repository);
        job.getJobMeta().setRepository(repository);
        job.getJobMeta().setMetaStore(getMetaStore());
        // Map the command line named parameters to the actual named parameters. Skip for
        // the moment any extra command line parameter not known in the job.
        String[] jobParams = job.getJobMeta().listParameters();
        for (String param : jobParams) {
            String value = params.getParameterValue(param);
            if (value != null) {
                job.getJobMeta().setParameterValue(param, value);
            }
        }
        job.copyParametersFrom(job.getJobMeta());
        // Put the parameters over the already defined variable space. Parameters get priority.
        job.activateParameters();
        // Set custom options in the job extension map as Strings
        for (String optionName : customParams.listParameters()) {
            String optionValue = customParams.getParameterValue(optionName);
            if (optionName != null && optionValue != null) {
                job.getExtensionDataMap().put(optionName, optionValue);
            }
        }
        // List the parameters defined in this job, then simply exit...
        if (YES.equalsIgnoreCase(listParams)) {
            printJobParameters(job);
            // same as the other list options
            return CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode();
        }
        job.start();
        job.waitUntilFinished();
        // Execute the selected job.
        result = job.getResult();
    } finally {
        if (repository != null) {
            repository.disconnect();
        }
        if (YES.equalsIgnoreCase(trustUser)) {
            // we set it, now we sanitize it
            System.clearProperty("pentaho.repository.client.attemptTrust");
        }
    }
    getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Finished"));
    if (result != null && result.getNrErrors() != 0) {
        getLog().logError(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.FinishedWithErrors"));
        returnCode = CommandExecutorCodes.Kitchen.ERRORS_DURING_PROCESSING.getCode();
    }
    Date stop = Calendar.getInstance().getTime();
    calculateAndPrintElapsedTime(start, stop, "Kitchen.Log.StartStop", "Kitchen.Log.ProcessEndAfter", "Kitchen.Log.ProcessEndAfterLong", "Kitchen.Log.ProcessEndAfterLonger", "Kitchen.Log.ProcessEndAfterLongest");
    return returnCode;
}
Also used : RepositoryMeta(org.pentaho.di.repository.RepositoryMeta) TopLevelResource(org.pentaho.di.resource.TopLevelResource) KettleException(org.pentaho.di.core.exception.KettleException) Repository(org.pentaho.di.repository.Repository) Job(org.pentaho.di.job.Job) Date(java.util.Date) KettleException(org.pentaho.di.core.exception.KettleException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) ExecutionException(java.util.concurrent.ExecutionException) Result(org.pentaho.di.core.Result)

Example 5 with Result

use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.

the class JobPainter method drawJobEntryCopy.

protected void drawJobEntryCopy(JobEntryCopy jobEntryCopy) {
    if (!jobEntryCopy.isDrawn()) {
        return;
    }
    int alpha = gc.getAlpha();
    Point pt = jobEntryCopy.getLocation();
    if (pt == null) {
        pt = new Point(50, 50);
    }
    Point screen = real2screen(pt.x, pt.y);
    int x = screen.x;
    int y = screen.y;
    String name = jobEntryCopy.getName();
    if (jobEntryCopy.isSelected()) {
        gc.setLineWidth(3);
    } else {
        gc.setLineWidth(1);
    }
    gc.setBackground(EColor.BACKGROUND);
    gc.fillRoundRectangle(x - 1, y - 1, iconsize + 1, iconsize + 1, 7, 7);
    gc.drawJobEntryIcon(x, y, jobEntryCopy, magnification);
    if (!shadow) {
        areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_ICON, x, y, iconsize, iconsize, offset, subject, jobEntryCopy));
    }
    gc.setForeground(jobEntryCopy.isMissing() ? EColor.RED : EColor.CRYSTAL);
    gc.drawRoundRectangle(x - 1, y - 1, iconsize + 1, iconsize + 1, 7, 7);
    gc.setForeground(EColor.CRYSTAL);
    Point textsize = new Point(gc.textExtent("" + name).x, gc.textExtent("" + name).y);
    gc.setBackground(EColor.BACKGROUND);
    gc.setLineWidth(1);
    int xpos = x + (iconsize / 2) - (textsize.x / 2);
    int ypos = y + iconsize + 5;
    gc.setForeground(EColor.BLACK);
    gc.drawText(name, xpos, ypos, true);
    if (activeJobEntries != null && activeJobEntries.contains(jobEntryCopy)) {
        gc.setForeground(EColor.BLUE);
        int iconX = (x + iconsize) - (MINI_ICON_SIZE / 2);
        int iconY = y - (MINI_ICON_SIZE / 2);
        gc.drawImage(EImage.BUSY, iconX, iconY, magnification);
        areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_BUSY, iconX, iconY, MINI_ICON_SIZE, MINI_ICON_SIZE, offset, subject, jobEntryCopy));
    } else {
        gc.setForeground(EColor.BLACK);
    }
    JobEntryResult jobEntryResult = findJobEntryResult(jobEntryCopy);
    if (jobEntryResult != null) {
        Result result = jobEntryResult.getResult();
        int iconX = (x + iconsize) - (MINI_ICON_SIZE / 2);
        int iconY = y - (MINI_ICON_SIZE / 2);
        // 
        if (jobEntryResult.isCheckpoint()) {
            gc.drawImage(EImage.CHECKPOINT, iconX, iconY, magnification);
            areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_RESULT_CHECKPOINT, iconX, iconY, MINI_ICON_SIZE, MINI_ICON_SIZE, offset, jobEntryCopy, jobEntryResult));
        } else {
            if (result.getResult()) {
                gc.drawImage(EImage.TRUE, iconX, iconY, magnification);
                areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_RESULT_SUCCESS, iconX, iconY, MINI_ICON_SIZE, MINI_ICON_SIZE, offset, jobEntryCopy, jobEntryResult));
            } else {
                gc.drawImage(EImage.FALSE, iconX, iconY, magnification);
                areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_RESULT_FAILURE, iconX, iconY, MINI_ICON_SIZE, MINI_ICON_SIZE, offset, jobEntryCopy, jobEntryResult));
            }
        }
    }
    // 
    if (mouseOverEntries.contains(jobEntryCopy)) {
        gc.setTransform(translationX, translationY, 0, BasePainter.FACTOR_1_TO_1);
        EImage[] miniIcons = new EImage[] { EImage.INPUT, EImage.EDIT, EImage.CONTEXT_MENU, EImage.OUTPUT };
        // First drawn the mini-icons balloon below the job entry
        // 
        int totalHeight = 0;
        int totalIconsWidth = 0;
        int totalWidth = 2 * MINI_ICON_MARGIN;
        for (EImage miniIcon : miniIcons) {
            Point bounds = gc.getImageBounds(miniIcon);
            totalWidth += bounds.x + MINI_ICON_MARGIN;
            totalIconsWidth += bounds.x + MINI_ICON_MARGIN;
            if (bounds.y > totalHeight) {
                totalHeight = bounds.y;
            }
        }
        totalHeight += 2 * MINI_ICON_MARGIN;
        gc.setFont(EFont.SMALL);
        String trimmedName = jobEntryCopy.getName().length() < 30 ? jobEntryCopy.getName() : jobEntryCopy.getName().substring(0, 30);
        Point nameExtent = gc.textExtent(trimmedName);
        nameExtent.y += 2 * MINI_ICON_MARGIN;
        nameExtent.x += 3 * MINI_ICON_MARGIN;
        totalHeight += nameExtent.y;
        if (nameExtent.x > totalWidth) {
            totalWidth = nameExtent.x;
        }
        int areaX = translateToCurrentScale(x) + translateToCurrentScale(iconsize) / 2 - totalWidth / 2 + MINI_ICON_SKEW;
        int areaY = translateToCurrentScale(y) + translateToCurrentScale(iconsize) + MINI_ICON_DISTANCE + BasePainter.CONTENT_MENU_INDENT;
        gc.setForeground(EColor.CRYSTAL);
        gc.setBackground(EColor.CRYSTAL);
        gc.setLineWidth(1);
        gc.fillRoundRectangle(areaX, areaY, totalWidth, totalHeight, BasePainter.CORNER_RADIUS_5, BasePainter.CORNER_RADIUS_5);
        gc.setBackground(EColor.WHITE);
        gc.fillRoundRectangle(areaX, areaY + nameExtent.y, totalWidth, (totalHeight - nameExtent.y), BasePainter.CORNER_RADIUS_5, BasePainter.CORNER_RADIUS_5);
        gc.fillRectangle(areaX, areaY + nameExtent.y, totalWidth, (totalHeight - nameExtent.y) / 2);
        gc.drawRoundRectangle(areaX, areaY, totalWidth, totalHeight, BasePainter.CORNER_RADIUS_5, BasePainter.CORNER_RADIUS_5);
        gc.setForeground(EColor.WHITE);
        gc.drawText(trimmedName, areaX + (totalWidth - nameExtent.x) / 2 + MINI_ICON_MARGIN, areaY + MINI_ICON_MARGIN, true);
        gc.setForeground(EColor.CRYSTAL);
        gc.setBackground(EColor.CRYSTAL);
        gc.setFont(EFont.GRAPH);
        areaOwners.add(new AreaOwner(AreaType.MINI_ICONS_BALLOON, translateTo1To1(areaX), translateTo1To1(areaY), translateTo1To1(totalWidth), translateTo1To1(totalHeight), offset, jobMeta, jobEntryCopy));
        gc.fillPolygon(new int[] { areaX + totalWidth / 2 - MINI_ICON_TRIANGLE_BASE / 2 + 1, areaY + 2, areaX + totalWidth / 2 + MINI_ICON_TRIANGLE_BASE / 2, areaY + 2, areaX + totalWidth / 2 - MINI_ICON_SKEW, areaY - MINI_ICON_DISTANCE - 3 });
        gc.setBackground(EColor.WHITE);
        // Put on the icons...
        // 
        int xIcon = areaX + (totalWidth - totalIconsWidth) / 2 + MINI_ICON_MARGIN;
        int yIcon = areaY + 5 + nameExtent.y;
        for (int i = 0; i < miniIcons.length; i++) {
            EImage miniIcon = miniIcons[i];
            Point bounds = gc.getImageBounds(miniIcon);
            boolean enabled = false;
            switch(i) {
                case // INPUT
                0:
                    enabled = !jobEntryCopy.isStart();
                    areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_MINI_ICON_INPUT, translateTo1To1(xIcon), translateTo1To1(yIcon), translateTo1To1(bounds.x), translateTo1To1(bounds.y), offset, jobMeta, jobEntryCopy));
                    break;
                case // EDIT
                1:
                    enabled = true;
                    areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_MINI_ICON_EDIT, translateTo1To1(xIcon), translateTo1To1(yIcon), translateTo1To1(bounds.x), translateTo1To1(bounds.y), offset, jobMeta, jobEntryCopy));
                    break;
                case // Job entry context menu
                2:
                    enabled = true;
                    areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_MINI_ICON_CONTEXT, translateTo1To1(xIcon), translateTo1To1(yIcon), translateTo1To1(bounds.x), translateTo1To1(bounds.y), offset, jobMeta, jobEntryCopy));
                    break;
                case // OUTPUT
                3:
                    enabled = true;
                    areaOwners.add(new AreaOwner(AreaType.JOB_ENTRY_MINI_ICON_OUTPUT, translateTo1To1(xIcon), translateTo1To1(yIcon), translateTo1To1(bounds.x), translateTo1To1(bounds.y), offset, jobMeta, jobEntryCopy));
                    break;
                default:
                    break;
            }
            if (enabled) {
                gc.setAlpha(255);
            } else {
                gc.setAlpha(100);
            }
            gc.drawImage(miniIcon, xIcon, yIcon, BasePainter.FACTOR_1_TO_1);
            xIcon += bounds.x + 5;
        }
        gc.setTransform(translationX, translationY, 0, magnification);
    }
    // Restore the previous alpha value
    // 
    gc.setAlpha(alpha);
}
Also used : EImage(org.pentaho.di.core.gui.PrimitiveGCInterface.EImage) AreaOwner(org.pentaho.di.core.gui.AreaOwner) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Result(org.pentaho.di.core.Result)

Aggregations

Result (org.pentaho.di.core.Result)192 Test (org.junit.Test)75 KettleException (org.pentaho.di.core.exception.KettleException)75 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)64 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)57 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)40 FileObject (org.apache.commons.vfs2.FileObject)34 Job (org.pentaho.di.job.Job)32 IOException (java.io.IOException)24 ResultFile (org.pentaho.di.core.ResultFile)20 File (java.io.File)17 ArrayList (java.util.ArrayList)16 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)15 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)12 Pattern (java.util.regex.Pattern)10 KettleFileException (org.pentaho.di.core.exception.KettleFileException)10 Database (org.pentaho.di.core.database.Database)9 Date (java.util.Date)8 Trans (org.pentaho.di.trans.Trans)8 Matcher (java.util.regex.Matcher)7