Search in sources :

Example 11 with LogLevel

use of org.pentaho.di.core.logging.LogLevel in project pentaho-kettle by pentaho.

the class AbstractMessages method get.

public String get(String key, String... params) {
    String res = null;
    String notFoundKey = "!" + key + "!";
    for (String pName : packageNames) {
        // Kettle will generate an exception message if there is a
        // failed message search. Since we are searching over multiple
        // packages, we don't want this message generated unless we
        // cannot find the message in any of the packages.
        LogLevel logLevel = DefaultLogLevel.getLogLevel();
        DefaultLogLevel.setLogLevel(LogLevel.NOTHING);
        try {
            res = BaseMessages.getString(pName, key);
        } finally {
            DefaultLogLevel.setLogLevel(logLevel);
        }
        if (!res.equals(notFoundKey)) {
            return res;
        }
    }
    // its normal error.
    return BaseMessages.getString(packageNames.get(0), key);
}
Also used : LogLevel(org.pentaho.di.core.logging.LogLevel) DefaultLogLevel(org.pentaho.di.core.logging.DefaultLogLevel)

Example 12 with LogLevel

use of org.pentaho.di.core.logging.LogLevel in project pentaho-kettle by pentaho.

the class BasePluginType method getTranslation.

protected static String getTranslation(String string, String packageName, String altPackageName, Class<?> resourceClass) {
    if (string == null) {
        return null;
    }
    if (string.startsWith("i18n:")) {
        String[] parts = string.split(":");
        if (parts.length != 3) {
            return string;
        } else {
            return BaseMessages.getString(parts[1], parts[2]);
        }
    } else {
        // Try the default package name
        // 
        String translation;
        if (!Utils.isEmpty(packageName)) {
            LogLevel oldLogLevel = DefaultLogLevel.getLogLevel();
            // avoid i18n messages for missing locale
            // 
            DefaultLogLevel.setLogLevel(LogLevel.BASIC);
            translation = BaseMessages.getString(packageName, string, resourceClass);
            if (translation.startsWith("!") && translation.endsWith("!")) {
                translation = BaseMessages.getString(PKG, string, resourceClass);
            }
            // restore loglevel, when the last alternative fails, log it when loglevel is detailed
            // 
            DefaultLogLevel.setLogLevel(oldLogLevel);
            if (!Utils.isEmpty(altPackageName)) {
                if (translation.startsWith("!") && translation.endsWith("!")) {
                    translation = BaseMessages.getString(altPackageName, string, resourceClass);
                }
            }
        } else {
            // Translations are not supported, simply keep the original text.
            // 
            translation = string;
        }
        return translation;
    }
}
Also used : LogLevel(org.pentaho.di.core.logging.LogLevel) DefaultLogLevel(org.pentaho.di.core.logging.DefaultLogLevel)

Example 13 with LogLevel

use of org.pentaho.di.core.logging.LogLevel in project pentaho-kettle by pentaho.

the class JobEntryJob method execute.

@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel jobLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    if (setLogfile) {
        String realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobJob.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!createParentFolder(realLogFilename)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError("Unable to open file appender for file [" + getLogFilename() + "] : " + e.toString());
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        jobLogLevel = logFileLevel;
    }
    try {
        // First load the job, outside of the loop...
        if (parentJob.getJobMeta() != null) {
            // reset the internal variables again.
            // Maybe we should split up the variables even more like in UNIX shells.
            // The internal variables need to be reset to be able use them properly
            // in 2 sequential sub jobs.
            parentJob.getJobMeta().setInternalKettleVariables();
        }
        // 
        switch(specificationMethod) {
            case REPOSITORY_BY_NAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository : [" + directory + " : " + environmentSubstitute(jobname) + "]");
                }
                break;
            case FILENAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from XML file : [" + environmentSubstitute(filename) + "]");
                }
                break;
            case REPOSITORY_BY_REFERENCE:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository by reference : [" + jobObjectId + "]");
                }
                break;
            default:
                break;
        }
        JobMeta jobMeta = getJobMeta(rep, this);
        // 
        if (jobMeta == null) {
            throw new KettleException("Unable to load the job: please specify the name and repository directory OR a filename");
        }
        verifyRecursiveExecution(parentJob, jobMeta);
        int iteration = 0;
        String[] args1 = arguments;
        // no arguments? Check the parent jobs arguments
        if (args1 == null || args1.length == 0) {
            args1 = parentJob.getArguments();
        }
        copyVariablesFrom(parentJob);
        setParentVariableSpace(parentJob);
        // 
        // For the moment only do variable translation at the start of a job, not
        // for every input row (if that would be switched on)
        // 
        String[] args = null;
        if (args1 != null) {
            args = new String[args1.length];
            for (int idx = 0; idx < args1.length; idx++) {
                args[idx] = environmentSubstitute(args1[idx]);
            }
        }
        RowMetaAndData resultRow = null;
        boolean first = true;
        List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
        while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
            first = false;
            // 
            if (execPerRow) {
                result.getRows().clear();
            }
            if (rows != null && execPerRow) {
                resultRow = rows.get(iteration);
            } else {
                resultRow = null;
            }
            NamedParams namedParam = new NamedParamsDefault();
            // 
            if (paramsFromPrevious) {
                String[] parentParameters = parentJob.listParameters();
                for (int idx = 0; idx < parentParameters.length; idx++) {
                    String par = parentParameters[idx];
                    String def = parentJob.getParameterDefault(par);
                    String val = parentJob.getParameterValue(par);
                    String des = parentJob.getParameterDescription(par);
                    namedParam.addParameterDefinition(par, def, des);
                    namedParam.setParameterValue(par, val);
                }
            }
            // 
            if (parameters != null) {
                for (int idx = 0; idx < parameters.length; idx++) {
                    if (!Utils.isEmpty(parameters[idx])) {
                        // 
                        if (Const.indexOfString(parameters[idx], namedParam.listParameters()) < 0) {
                            // We have a parameter
                            try {
                                namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                            } catch (DuplicateParamException e) {
                                // Should never happen
                                // 
                                logError("Duplicate parameter definition for " + parameters[idx]);
                            }
                        }
                        if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                            namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                        } else {
                            // something filled in, in the field column...
                            // 
                            String value = "";
                            if (resultRow != null) {
                                value = resultRow.getString(parameterFieldNames[idx], "");
                            }
                            namedParam.setParameterValue(parameters[idx], value);
                        }
                    }
                }
            }
            Result oneResult = new Result();
            List<RowMetaAndData> sourceRows = null;
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    newList.add(resultRow);
                    sourceRows = newList;
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Keep it as it was...
                    sourceRows = result.getRows();
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                log.logBasic(BaseMessages.getString(PKG, "JobJob.RunConfig.Message"), runConfiguration);
                runConfiguration = environmentSubstitute(runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), jobMeta, rep });
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely()) {
                        result.setResult(true);
                        return result;
                    }
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            if (remoteSlaveServer == null) {
                // Local execution...
                // 
                // Create a new job
                // 
                job = new Job(rep, jobMeta, this);
                job.setParentJob(parentJob);
                job.setLogLevel(jobLogLevel);
                job.shareVariablesWith(this);
                job.setInternalKettleVariables(this);
                job.copyParametersFrom(jobMeta);
                job.setInteractive(parentJob.isInteractive());
                if (job.isInteractive()) {
                    job.getJobEntryListeners().addAll(parentJob.getJobEntryListeners());
                }
                // Pass the socket repository all around.
                // 
                job.setSocketRepository(parentJob.getSocketRepository());
                // Set the parameters calculated above on this instance.
                // 
                job.clearParameters();
                String[] parameterNames = job.listParameters();
                for (int idx = 0; idx < parameterNames.length; idx++) {
                    // Grab the parameter value set in the job entry
                    // 
                    String thisValue = namedParam.getParameterValue(parameterNames[idx]);
                    if (!Utils.isEmpty(thisValue)) {
                        // Set the value as specified by the user in the job entry
                        // 
                        job.setParameterValue(parameterNames[idx], thisValue);
                    } else {
                        // 
                        if (isPassingAllParameters()) {
                            String parentValue = parentJob.getParameterValue(parameterNames[idx]);
                            if (!Utils.isEmpty(parentValue)) {
                                job.setParameterValue(parameterNames[idx], parentValue);
                            }
                        }
                    }
                }
                job.activateParameters();
                // Set the source rows we calculated above...
                // 
                job.setSourceRows(sourceRows);
                // Don't forget the logging...
                job.beginProcessing();
                // Link the job with the sub-job
                parentJob.getJobTracker().addJobTracker(job.getJobTracker());
                // Link both ways!
                job.getJobTracker().setParentJobTracker(parentJob.getJobTracker());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    job.setPassedBatchId(parentJob.getBatchId());
                }
                job.setArguments(args);
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.jobDelegationStarted(job, new JobExecutionConfiguration());
                }
                JobEntryJobRunner runner = new JobEntryJobRunner(job, result, nr, log);
                Thread jobRunnerThread = new Thread(runner);
                // PDI-6518
                // added UUID to thread name, otherwise threads do share names if jobs entries are executed in parallel in a
                // parent job
                // if that happens, contained transformations start closing each other's connections
                jobRunnerThread.setName(Const.NVL(job.getJobMeta().getName(), job.getJobMeta().getFilename()) + " UUID: " + UUID.randomUUID().toString());
                jobRunnerThread.start();
                // 
                while (!runner.isFinished() && !parentJob.isStopped()) {
                    try {
                        Thread.sleep(0, 1);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // if the parent-job was stopped, stop the sub-job too...
                if (parentJob.isStopped()) {
                    job.stopAll();
                    // Wait until finished!
                    runner.waitUntilFinished();
                }
                oneResult = runner.getResult();
            } else {
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
                // lightClone() because rows are
                jobExecutionConfiguration.setPreviousResult(result.lightClone());
                // overwritten in next line.
                jobExecutionConfiguration.getPreviousResult().setRows(sourceRows);
                jobExecutionConfiguration.setArgumentStrings(args);
                jobExecutionConfiguration.setVariables(this);
                jobExecutionConfiguration.setRemoteServer(remoteSlaveServer);
                jobExecutionConfiguration.setRepository(rep);
                jobExecutionConfiguration.setLogLevel(jobLogLevel);
                jobExecutionConfiguration.setPassingExport(passingExport);
                jobExecutionConfiguration.setExpandingRemoteJob(expandingRemoteJob);
                for (String param : namedParam.listParameters()) {
                    String defValue = namedParam.getParameterDefault(param);
                    String value = namedParam.getParameterValue(param);
                    jobExecutionConfiguration.getParams().put(param, Const.NVL(value, defValue));
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    jobExecutionConfiguration.setPassedBatchId(parentJob.getBatchId());
                }
                // Send the XML over to the slave server
                // Also start the job over there...
                // 
                String carteObjectId = null;
                try {
                    carteObjectId = Job.sendToSlaveServer(jobMeta, jobExecutionConfiguration, rep, metaStore);
                } catch (KettleException e) {
                    // Perhaps the job exists on the remote server, carte is down, etc.
                    // This is an abort situation, stop the parent job...
                    // We want this in case we are running in parallel. The other job
                    // entries can stop running now.
                    // 
                    parentJob.stopAll();
                    // 
                    throw e;
                }
                // Now start the monitoring...
                // 
                SlaveServerJobStatus jobStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
                        if (jobStatus.getResult() != null) {
                            // The job is finished, get the result...
                            // 
                            oneResult = jobStatus.getResult();
                            break;
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to verify the status of job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                    // sleep for 1 second
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // Write log from carte to file
                if (setLogfile && jobStatus != null) {
                    String logFromCarte = jobStatus.getLoggingString();
                    if (!Utils.isEmpty(logFromCarte)) {
                        FileObject logfile = logChannelFileWriter.getLogFile();
                        OutputStream logFileOutputStream = null;
                        try {
                            logFileOutputStream = KettleVFS.getOutputStream(logfile, setAppendLogfile);
                            logFileOutputStream.write(logFromCarte.getBytes());
                            logFileOutputStream.flush();
                        } catch (Exception e) {
                            logError("There was an error logging to file '" + logfile + "'", e);
                        } finally {
                            try {
                                if (logFileOutputStream != null) {
                                    logFileOutputStream.close();
                                    logFileOutputStream = null;
                                }
                            } catch (Exception e) {
                                logError("There was an error closing log file file '" + logfile + "'", e);
                            }
                        }
                    }
                }
                if (!waitingToFinish) {
                    // Since the job was posted successfully, the result is true...
                    // 
                    oneResult = new Result();
                    oneResult.setResult(true);
                }
                if (parentJob.isStopped()) {
                    try {
                        // 
                        if (jobStatus == null || jobStatus.isRunning()) {
                            // Try a remote abort ...
                            // 
                            remoteSlaveServer.stopJob(jobMeta.getName(), carteObjectId);
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to stop job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                }
            }
            // clear only the numbers, NOT the files or rows.
            result.clear();
            result.add(oneResult);
            // Set the result rows too, if any ...
            if (!Utils.isEmpty(oneResult.getRows())) {
                result.setRows(new ArrayList<RowMetaAndData>(oneResult.getRows()));
            }
            // 
            if (oneResult.getResult() == false) {
                result.setNrErrors(result.getNrErrors() + 1);
            }
            iteration++;
        }
    } catch (KettleException ke) {
        logError("Error running job entry 'job' : ", ke);
        result.setResult(false);
        result.setNrErrors(1L);
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() > 0) {
        result.setResult(false);
    } else {
        result.setResult(true);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) NamedParams(org.pentaho.di.core.parameters.NamedParams) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) SlaveServerJobStatus(org.pentaho.di.www.SlaveServerJobStatus) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileObject(org.apache.commons.vfs2.FileObject) Job(org.pentaho.di.job.Job) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 14 with LogLevel

use of org.pentaho.di.core.logging.LogLevel in project pentaho-kettle by pentaho.

the class JobEntryShell method execute.

public Result execute(Result result, int nr) throws KettleException {
    FileLoggingEventListener loggingEventListener = null;
    LogLevel shellLogLevel = parentJob.getLogLevel();
    if (setLogfile) {
        String realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobEntryShell.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            loggingEventListener = new FileLoggingEventListener(getLogChannelId(), realLogFilename, setAppendLogfile);
            KettleLogStore.getAppender().addLoggingEventListener(loggingEventListener);
        } catch (KettleException e) {
            logError(BaseMessages.getString(PKG, "JobEntryShell.Error.UnableopenAppenderFile", getLogFilename(), e.toString()));
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        shellLogLevel = logFileLevel;
    }
    log.setLogLevel(shellLogLevel);
    result.setEntryNr(nr);
    // "Translate" the arguments for later
    String[] substArgs = null;
    if (arguments != null) {
        substArgs = new String[arguments.length];
        for (int idx = 0; idx < arguments.length; idx++) {
            substArgs[idx] = environmentSubstitute(arguments[idx]);
        }
    }
    int iteration = 0;
    String[] args = substArgs;
    RowMetaAndData resultRow = null;
    boolean first = true;
    List<RowMetaAndData> rows = result.getRows();
    if (log.isDetailed()) {
        logDetailed(BaseMessages.getString(PKG, "JobEntryShell.Log.FoundPreviousRows", "" + (rows != null ? rows.size() : 0)));
    }
    while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
        first = false;
        if (rows != null && execPerRow) {
            resultRow = rows.get(iteration);
        } else {
            resultRow = null;
        }
        List<RowMetaAndData> cmdRows = null;
        if (execPerRow) {
            if (argFromPrevious) {
                if (resultRow != null) {
                    args = new String[resultRow.size()];
                    for (int i = 0; i < resultRow.size(); i++) {
                        args[i] = resultRow.getString(i, null);
                    }
                }
            } else {
                // Just pass a single row
                List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                newList.add(resultRow);
                cmdRows = newList;
            }
        } else {
            if (argFromPrevious) {
                // Only put the first Row on the arguments
                args = null;
                if (resultRow != null) {
                    args = new String[resultRow.size()];
                    for (int i = 0; i < resultRow.size(); i++) {
                        args[i] = resultRow.getString(i, null);
                    }
                } else {
                    cmdRows = rows;
                }
            } else {
                // Keep it as it was...
                cmdRows = rows;
            }
        }
        executeShell(result, cmdRows, args);
        iteration++;
    }
    if (setLogfile) {
        if (loggingEventListener != null) {
            KettleLogStore.getAppender().removeLoggingEventListener(loggingEventListener);
            loggingEventListener.close();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, loggingEventListener.getFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
        }
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileLoggingEventListener(org.pentaho.di.core.logging.FileLoggingEventListener) ArrayList(java.util.ArrayList) ResultFile(org.pentaho.di.core.ResultFile) LogLevel(org.pentaho.di.core.logging.LogLevel)

Example 15 with LogLevel

use of org.pentaho.di.core.logging.LogLevel in project pentaho-kettle by pentaho.

the class StepMockHelper method redirectLog.

/**
 *  In case you need to use log methods during the tests
 *  use redirectLog method after creating new StepMockHelper object.
 *  Examples:
 *    stepMockHelper.redirectLog( System.out, LogLevel.ROWLEVEL );
 *    stepMockHelper.redirectLog( new FileOutputStream("log.txt"), LogLevel.BASIC );
 */
public void redirectLog(final OutputStream out, LogLevel channelLogLevel) {
    final LogChannel log = spy(new LogChannel(this.getClass().getName(), true));
    log.setLogLevel(channelLogLevel);
    when(logChannelInterfaceFactory.create(any(), any(LoggingObjectInterface.class))).thenReturn(log);
    doAnswer(new Answer<Object>() {

        @Override
        public Object answer(InvocationOnMock invocation) throws Throwable {
            Object[] args = invocation.getArguments();
            LogLevel logLevel = (LogLevel) args[1];
            LogLevel channelLogLevel = log.getLogLevel();
            if (!logLevel.isVisible(channelLogLevel)) {
                // not for our eyes.
                return null;
            }
            if (channelLogLevel.getLevel() >= logLevel.getLevel()) {
                LogMessageInterface logMessage = (LogMessageInterface) args[0];
                out.write(logMessage.getMessage().getBytes());
                out.write('\n');
                out.write('\r');
                out.flush();
                return true;
            }
            return false;
        }
    }).when(log).println((LogMessageInterface) anyObject(), (LogLevel) anyObject());
}
Also used : InvocationOnMock(org.mockito.invocation.InvocationOnMock) LogMessageInterface(org.pentaho.di.core.logging.LogMessageInterface) LogChannel(org.pentaho.di.core.logging.LogChannel) Matchers.anyObject(org.mockito.Matchers.anyObject) LoggingObjectInterface(org.pentaho.di.core.logging.LoggingObjectInterface) LogLevel(org.pentaho.di.core.logging.LogLevel)

Aggregations

LogLevel (org.pentaho.di.core.logging.LogLevel)19 KettleException (org.pentaho.di.core.exception.KettleException)9 Job (org.pentaho.di.job.Job)5 IOException (java.io.IOException)4 PrintWriter (java.io.PrintWriter)4 ServletException (javax.servlet.ServletException)4 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)4 Repository (org.pentaho.di.repository.Repository)4 Trans (org.pentaho.di.trans.Trans)4 ArrayList (java.util.ArrayList)3 ResultFile (org.pentaho.di.core.ResultFile)3 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)3 DefaultLogLevel (org.pentaho.di.core.logging.DefaultLogLevel)3 JobExecutionConfiguration (org.pentaho.di.job.JobExecutionConfiguration)3 JobMeta (org.pentaho.di.job.JobMeta)3 Matchers.anyObject (org.mockito.Matchers.anyObject)2 InvocationOnMock (org.mockito.invocation.InvocationOnMock)2 SlaveServer (org.pentaho.di.cluster.SlaveServer)2 Result (org.pentaho.di.core.Result)2 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)2