Search in sources :

Example 36 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class JobLogDelegate method showErrors.

public void showErrors() {
    String all = jobLogText.getText();
    ArrayList<String> err = new ArrayList<String>();
    int i = 0;
    int startpos = 0;
    int crlen = Const.CR.length();
    String line = null;
    String lineUpper = null;
    while (i < all.length() - crlen) {
        if (all.substring(i, i + crlen).equalsIgnoreCase(Const.CR)) {
            line = all.substring(startpos, i);
            lineUpper = line.toUpperCase();
            if (lineUpper.indexOf(BaseMessages.getString(PKG, "JobLog.System.ERROR")) >= 0 || lineUpper.indexOf(BaseMessages.getString(PKG, "JobLog.System.EXCEPTION")) >= 0) {
                err.add(line);
            }
            // New start of line
            startpos = i + crlen;
        }
        i++;
    }
    line = all.substring(startpos);
    lineUpper = line.toUpperCase();
    if (lineUpper.indexOf(BaseMessages.getString(PKG, "JobLog.System.ERROR")) >= 0 || lineUpper.indexOf(BaseMessages.getString(PKG, "JobLog.System.EXCEPTION")) >= 0) {
        err.add(line);
    }
    if (err.size() > 0) {
        String[] err_lines = new String[err.size()];
        for (i = 0; i < err_lines.length; i++) {
            err_lines[i] = err.get(i);
        }
        EnterSelectionDialog esd = new EnterSelectionDialog(jobGraph.getShell(), err_lines, BaseMessages.getString(PKG, "JobLog.Dialog.ErrorLines.Title"), BaseMessages.getString(PKG, "JobLog.Dialog.ErrorLines.Message"));
        line = esd.open();
        if (line != null) {
            JobMeta jobMeta = jobGraph.getManagedObject();
            for (i = 0; i < jobMeta.nrJobEntries(); i++) {
                JobEntryCopy entryCopy = jobMeta.getJobEntry(i);
                if (line.indexOf(entryCopy.getName()) >= 0) {
                    spoon.editJobEntry(jobMeta, entryCopy);
                }
            }
        // System.out.println("Error line selected: "+line);
        }
    }
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) ArrayList(java.util.ArrayList) EnterSelectionDialog(org.pentaho.di.ui.core.dialog.EnterSelectionDialog)

Example 37 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class RepositoryImporter method importAll.

@Override
public synchronized void importAll(RepositoryImportFeedbackInterface feedback, String fileDirectory, String[] filenames, RepositoryDirectoryInterface baseDirectory, boolean overwrite, boolean continueOnError, String versionComment) {
    this.baseDirectory = baseDirectory;
    this.overwrite = overwrite;
    this.continueOnError = continueOnError;
    this.versionComment = versionComment;
    String importPathCompatibility = System.getProperty(Const.KETTLE_COMPATIBILITY_IMPORT_PATH_ADDITION_ON_VARIABLES, "N");
    this.needToCheckPathForVariables = "N".equalsIgnoreCase(importPathCompatibility);
    askReplace = Props.getInstance().askAboutReplacingDatabaseConnections();
    if (askReplace) {
        if (feedback instanceof HasOverwritePrompter) {
            Props.getInstance().setProperty(IMPORT_ASK_ABOUT_REPLACE_CS, "Y");
            Props.getInstance().setProperty(IMPORT_ASK_ABOUT_REPLACE_DB, "Y");
            Props.getInstance().setProperty(IMPORT_ASK_ABOUT_REPLACE_PS, "Y");
            Props.getInstance().setProperty(IMPORT_ASK_ABOUT_REPLACE_SS, "Y");
            this.overwritePrompter = ((HasOverwritePrompter) feedback).getOverwritePrompter();
        } else {
            this.overwritePrompter = new OverwritePrompter() {

                @Override
                public boolean overwritePrompt(String arg0, String arg1, String arg2) {
                    throw new RuntimeException(BaseMessages.getString(PKG, "RepositoryImporter.CannotPrompt.Label"));
                }
            };
        }
    } else {
        final boolean replaceExisting = Props.getInstance().replaceExistingDatabaseConnections();
        this.overwritePrompter = new OverwritePrompter() {

            @Override
            public boolean overwritePrompt(String arg0, String arg1, String arg2) {
                return replaceExisting;
            }
        };
    }
    referencingObjects = new ArrayList<RepositoryObject>();
    feedback.setLabel(BaseMessages.getString(PKG, "RepositoryImporter.ImportXML.Label"));
    try {
        loadSharedObjects();
        RepositoryImportLocation.setRepositoryImportLocation(baseDirectory);
        for (int ii = 0; ii < filenames.length; ++ii) {
            final String filename = (!Utils.isEmpty(fileDirectory)) ? fileDirectory + Const.FILE_SEPARATOR + filenames[ii] : filenames[ii];
            if (log.isBasic()) {
                log.logBasic("Import objects from XML file [" + filename + "]");
            }
            feedback.addLog(BaseMessages.getString(PKG, "RepositoryImporter.WhichFile.Log", filename));
            // To where?
            feedback.setLabel(BaseMessages.getString(PKG, "RepositoryImporter.WhichDir.Label"));
            // 
            try {
                RepositoryExportSaxParser parser = new RepositoryExportSaxParser(filename, feedback);
                parser.parse(this);
            } catch (Exception e) {
                addException(e);
                feedback.showError(BaseMessages.getString(PKG, "RepositoryImporter.ErrorGeneral.Title"), BaseMessages.getString(PKG, "RepositoryImporter.ErrorGeneral.Message"), e);
            }
        }
        // Correct those jobs and transformations that contain references to other objects.
        for (RepositoryObject repoObject : referencingObjects) {
            switch(repoObject.getObjectType()) {
                case TRANSFORMATION:
                    TransMeta transMeta = rep.loadTransformation(repoObject.getObjectId(), null);
                    saveTransformationToRepo(transMeta, feedback);
                    break;
                case JOB:
                    JobMeta jobMeta = rep.loadJob(repoObject.getObjectId(), null);
                    saveJobToRepo(jobMeta, feedback);
                    break;
                default:
                    throw new KettleException(BaseMessages.getString(PKG, "RepositoryImporter.ErrorDetectFileType"));
            }
        }
        feedback.addLog(BaseMessages.getString(PKG, "RepositoryImporter.ImportFinished.Log"));
    } catch (Exception e) {
        addException(e);
        feedback.showError(BaseMessages.getString(PKG, "RepositoryImporter.ErrorGeneral.Title"), BaseMessages.getString(PKG, "RepositoryImporter.ErrorGeneral.Message"), e);
    } finally {
        // set the repository import location to null when done!
        RepositoryImportLocation.setRepositoryImportLocation(null);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) TransMeta(org.pentaho.di.trans.TransMeta) OverwritePrompter(org.pentaho.di.core.gui.OverwritePrompter) HasOverwritePrompter(org.pentaho.di.core.gui.HasOverwritePrompter) KettleException(org.pentaho.di.core.exception.KettleException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) LookupReferencesException(org.pentaho.di.core.exception.LookupReferencesException) SAXParseException(org.xml.sax.SAXParseException) HasOverwritePrompter(org.pentaho.di.core.gui.HasOverwritePrompter)

Example 38 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class JobEntryJob method exportResources.

/**
 * Exports the object to a flat-file system, adding content with filename keys to a set of definitions. The supplied
 * resource naming interface allows the object to name appropriately without worrying about those parts of the
 * implementation specific details.
 *
 * @param space
 *          The variable space to resolve (environment) variables with.
 * @param definitions
 *          The map containing the filenames and content
 * @param namingInterface
 *          The resource naming interface allows the object to be named appropriately
 * @param repository
 *          The repository to load resources from
 * @param metaStore
 *          the metaStore to load external metadata from
 *
 * @return The filename for this object. (also contained in the definitions map)
 * @throws KettleException
 *           in case something goes wrong during the export
 */
@Override
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
    // Try to load the transformation from repository or file.
    // Modify this recursively too...
    // 
    // AGAIN: there is no need to clone this job entry because the caller is
    // responsible for this.
    // 
    // First load the job meta data...
    // 
    // To make sure variables are available.
    copyVariablesFrom(space);
    JobMeta jobMeta = getJobMeta(repository, metaStore, space);
    // Also go down into the job and export the files there. (going down
    // recursively)
    // 
    String proposedNewFilename = jobMeta.exportResources(jobMeta, definitions, namingInterface, repository, metaStore);
    // To get a relative path to it, we inject
    // ${Internal.Entry.Current.Directory}
    // 
    String newFilename = "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + proposedNewFilename;
    // Set the filename in the job
    // 
    jobMeta.setFilename(newFilename);
    // exports always reside in the root directory, in case we want to turn this
    // into a file repository...
    // 
    jobMeta.setRepositoryDirectory(new RepositoryDirectory());
    // export to filename ALWAYS (this allows the exported XML to be executed remotely)
    // 
    setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
    // change it in the job entry
    // 
    filename = newFilename;
    return proposedNewFilename;
}
Also used : JobMeta(org.pentaho.di.job.JobMeta) RepositoryDirectory(org.pentaho.di.repository.RepositoryDirectory)

Example 39 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.

/**
 * Load a job in a directory
 *
 * @param log
 *          the logging channel
 * @param rep
 *          The Repository
 * @param jobname
 *          The name of the job
 * @param repdir
 *          The directory in which the job resides.
 * @throws KettleException
 */
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
    JobMeta jobMeta = new JobMeta();
    synchronized (repository) {
        try {
            // Clear everything...
            jobMeta.clear();
            jobMeta.setRepositoryDirectory(repdir);
            // Get the transformation id
            jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
            // If no valid id is available in the database, then give error...
            if (jobMeta.getObjectId() != null) {
                // Load the notes...
                ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
                ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
                ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
                int nrWork = 2 + noteids.length + jecids.length + hopid.length;
                if (monitor != null) {
                    monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
                }
                RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
                jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
                jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
                jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
                jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
                jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
                jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
                jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
                jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
                jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
                long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
                if (id_logdb > 0) {
                    // Get the logconnection
                    // 
                    DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
                    jobMeta.getJobLogTable().setConnectionName(logDb.getName());
                // jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
                }
                jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
                jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
                jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
                jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
                jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
                // Load all the log tables for the job...
                // 
                RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
                for (LogTableInterface logTable : jobMeta.getLogTables()) {
                    logTable.loadFromRepository(attributeInterface);
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                // 
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
                }
                // Read objects from the shared XML file & the repository
                try {
                    jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
                    jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
                    // 
                    log.logError(Const.getStackTracker(e));
                }
                if (monitor != null) {
                    monitor.worked(1);
                }
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + noteids.length + " notes");
                }
                for (int i = 0; i < noteids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
                    }
                    NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
                    if (jobMeta.indexOfNote(ni) < 0) {
                        jobMeta.addNote(ni);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the group attributes map
                // 
                jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
                // Load the job entries...
                // 
                // Keep a unique list of job entries to facilitate in the loading.
                // 
                List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + jecids.length + " job entries");
                }
                for (int i = 0; i < jecids.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
                    if (jec.isMissing()) {
                        jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
                    }
                    // Also set the copy number...
                    // We count the number of job entry copies that use the job
                    // entry
                    // 
                    int copyNr = 0;
                    for (JobEntryCopy copy : jobMeta.getJobCopies()) {
                        if (jec.getEntry() == copy.getEntry()) {
                            copyNr++;
                        }
                    }
                    jec.setNr(copyNr);
                    int idx = jobMeta.indexOfJobEntry(jec);
                    if (idx < 0) {
                        if (jec.getName() != null && jec.getName().length() > 0) {
                            jobMeta.addJobEntry(jec);
                        }
                    } else {
                        // replace it!
                        jobMeta.setJobEntry(idx, jec);
                    }
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                // Load the hops...
                if (log.isDetailed()) {
                    log.logDetailed("Loading " + hopid.length + " job hops");
                }
                for (int i = 0; i < hopid.length; i++) {
                    if (monitor != null) {
                        monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
                    }
                    JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
                    jobMeta.getJobhops().add(hi);
                    if (monitor != null) {
                        monitor.worked(1);
                    }
                }
                loadRepParameters(jobMeta);
                // Finally, clear the changed flags...
                jobMeta.clearChanged();
                if (monitor != null) {
                    monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
                }
                if (monitor != null) {
                    monitor.done();
                }
                // close prepared statements, minimize locking etc.
                // 
                repository.connectionDelegate.closeAttributeLookupPreparedStatements();
                return jobMeta;
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
            }
        } catch (KettleException dbe) {
            throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
        } finally {
            jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
            jobMeta.setInternalKettleVariables();
        }
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) JobHopMeta(org.pentaho.di.job.JobHopMeta) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) LongObjectId(org.pentaho.di.repository.LongObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ArrayList(java.util.ArrayList) LongObjectId(org.pentaho.di.repository.LongObjectId) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate) RepositoryAttributeInterface(org.pentaho.di.repository.RepositoryAttributeInterface) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Example 40 with JobMeta

use of org.pentaho.di.job.JobMeta in project pentaho-kettle by pentaho.

the class KettleFileRepository method save.

public void save(RepositoryElementInterface repositoryElement, String versionComment, ProgressMonitorListener monitor, ObjectId parentId, boolean used) throws KettleException {
    try {
        if (!(repositoryElement instanceof XMLInterface) && !(repositoryElement instanceof SharedObjectInterface)) {
            throw new KettleException("Class [" + repositoryElement.getClass().getName() + "] needs to implement the XML Interface in order to save it to disk");
        }
        if (!Utils.isEmpty(versionComment)) {
            insertLogEntry("Save repository element : " + repositoryElement.toString() + " : " + versionComment);
        }
        ObjectId objectId = new StringObjectId(calcObjectId(repositoryElement));
        FileObject fileObject = getFileObject(repositoryElement);
        String xml = ((XMLInterface) repositoryElement).getXML();
        OutputStream os = KettleVFS.getOutputStream(fileObject, false);
        os.write(xml.getBytes(Const.XML_ENCODING));
        os.close();
        if (repositoryElement instanceof ChangedFlagInterface) {
            ((ChangedFlagInterface) repositoryElement).clearChanged();
        }
        // 
        if (repositoryElement.getObjectId() != null && !repositoryElement.getObjectId().equals(objectId)) {
            delObject(repositoryElement.getObjectId());
        }
        repositoryElement.setObjectId(objectId);
        // 
        if (repositoryElement instanceof TransMeta) {
            ((TransMeta) repositoryElement).saveMetaStoreObjects(this, metaStore);
        }
        if (repositoryElement instanceof JobMeta) {
            ((JobMeta) repositoryElement).saveMetaStoreObjects(this, metaStore);
        }
    } catch (Exception e) {
        throw new KettleException("Unable to save repository element [" + repositoryElement + "] to XML file : " + calcFilename(repositoryElement), e);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) StringObjectId(org.pentaho.di.repository.StringObjectId) ObjectId(org.pentaho.di.repository.ObjectId) ChangedFlagInterface(org.pentaho.di.core.changed.ChangedFlagInterface) OutputStream(java.io.OutputStream) TransMeta(org.pentaho.di.trans.TransMeta) SharedObjectInterface(org.pentaho.di.shared.SharedObjectInterface) FileObject(org.apache.commons.vfs2.FileObject) XMLInterface(org.pentaho.di.core.xml.XMLInterface) StringObjectId(org.pentaho.di.repository.StringObjectId) KettleFileException(org.pentaho.di.core.exception.KettleFileException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleException(org.pentaho.di.core.exception.KettleException) IOException(java.io.IOException)

Aggregations

JobMeta (org.pentaho.di.job.JobMeta)254 Test (org.junit.Test)88 TransMeta (org.pentaho.di.trans.TransMeta)69 KettleException (org.pentaho.di.core.exception.KettleException)62 JobEntryCopy (org.pentaho.di.job.entry.JobEntryCopy)48 Job (org.pentaho.di.job.Job)45 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)27 Repository (org.pentaho.di.repository.Repository)25 RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)25 Point (org.pentaho.di.core.gui.Point)24 ArrayList (java.util.ArrayList)23 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)22 SlaveServer (org.pentaho.di.cluster.SlaveServer)17 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)17 FileObject (org.apache.commons.vfs2.FileObject)16 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)16 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)15 SimpleLoggingObject (org.pentaho.di.core.logging.SimpleLoggingObject)15 PrintWriter (java.io.PrintWriter)12 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)12