Search in sources :

Example 51 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class JobMeta method getStringList.

/**
 * Get a list of all the strings used in this job.
 *
 * @return A list of StringSearchResult with strings used in the job
 */
public List<StringSearchResult> getStringList(boolean searchSteps, boolean searchDatabases, boolean searchNotes) {
    List<StringSearchResult> stringList = new ArrayList<StringSearchResult>();
    if (searchSteps) {
        // vars are...
        for (int i = 0; i < nrJobEntries(); i++) {
            JobEntryCopy entryMeta = getJobEntry(i);
            stringList.add(new StringSearchResult(entryMeta.getName(), entryMeta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.JobEntryName")));
            if (entryMeta.getDescription() != null) {
                stringList.add(new StringSearchResult(entryMeta.getDescription(), entryMeta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.JobEntryDescription")));
            }
            JobEntryInterface metaInterface = entryMeta.getEntry();
            StringSearcher.findMetaData(metaInterface, 1, stringList, entryMeta, this);
        }
    }
    // are...
    if (searchDatabases) {
        for (int i = 0; i < nrDatabases(); i++) {
            DatabaseMeta meta = getDatabase(i);
            stringList.add(new StringSearchResult(meta.getName(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseConnectionName")));
            if (meta.getHostname() != null) {
                stringList.add(new StringSearchResult(meta.getHostname(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseHostName")));
            }
            if (meta.getDatabaseName() != null) {
                stringList.add(new StringSearchResult(meta.getDatabaseName(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseName")));
            }
            if (meta.getUsername() != null) {
                stringList.add(new StringSearchResult(meta.getUsername(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseUsername")));
            }
            if (meta.getPluginId() != null) {
                stringList.add(new StringSearchResult(meta.getPluginId(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseTypeDescription")));
            }
            if (meta.getDatabasePortNumberString() != null) {
                stringList.add(new StringSearchResult(meta.getDatabasePortNumberString(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabasePort")));
            }
            if (meta.getServername() != null) {
                stringList.add(new StringSearchResult(meta.getServername(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabaseServer")));
            }
            // {
            if (meta.getPassword() != null) {
                stringList.add(new StringSearchResult(meta.getPassword(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.DatabasePassword")));
            // }
            }
        }
    }
    // are...
    if (searchNotes) {
        for (int i = 0; i < nrNotes(); i++) {
            NotePadMeta meta = getNote(i);
            if (meta.getNote() != null) {
                stringList.add(new StringSearchResult(meta.getNote(), meta, this, BaseMessages.getString(PKG, "JobMeta.SearchMetadata.NotepadText")));
            }
        }
    }
    return stringList;
}
Also used : JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) JobEntryInterface(org.pentaho.di.job.entry.JobEntryInterface) ArrayList(java.util.ArrayList) StringSearchResult(org.pentaho.di.core.reflection.StringSearchResult) NotePadMeta(org.pentaho.di.core.NotePadMeta) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Example 52 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class JobMeta method getXML.

/*
   * (non-Javadoc)
   *
   * @see org.pentaho.di.core.xml.XMLInterface#getXML()
   */
public String getXML() {
    // Clear the embedded named clusters.  We will be repopulating from steps that used named clusters
    getNamedClusterEmbedManager().clear();
    Props props = null;
    if (Props.isInitialized()) {
        props = Props.getInstance();
    }
    StringBuilder retval = new StringBuilder(500);
    retval.append(XMLHandler.openTag(XML_TAG)).append(Const.CR);
    retval.append("  ").append(XMLHandler.addTagValue("name", getName()));
    retval.append("  ").append(XMLHandler.addTagValue("description", description));
    retval.append("  ").append(XMLHandler.addTagValue("extended_description", extendedDescription));
    retval.append("  ").append(XMLHandler.addTagValue("job_version", jobVersion));
    if (jobStatus >= 0) {
        retval.append("  ").append(XMLHandler.addTagValue("job_status", jobStatus));
    }
    retval.append("  ").append(XMLHandler.addTagValue("directory", (directory != null ? directory.getPath() : RepositoryDirectory.DIRECTORY_SEPARATOR)));
    retval.append("  ").append(XMLHandler.addTagValue("created_user", createdUser));
    retval.append("  ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(createdDate)));
    retval.append("  ").append(XMLHandler.addTagValue("modified_user", modifiedUser));
    retval.append("  ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(modifiedDate)));
    retval.append("    ").append(XMLHandler.openTag(XML_TAG_PARAMETERS)).append(Const.CR);
    String[] parameters = listParameters();
    for (int idx = 0; idx < parameters.length; idx++) {
        retval.append("      ").append(XMLHandler.openTag("parameter")).append(Const.CR);
        retval.append("        ").append(XMLHandler.addTagValue("name", parameters[idx]));
        try {
            retval.append("        ").append(XMLHandler.addTagValue("default_value", getParameterDefault(parameters[idx])));
            retval.append("        ").append(XMLHandler.addTagValue("description", getParameterDescription(parameters[idx])));
        } catch (UnknownParamException e) {
        // skip the default value and/or description. This exception should never happen because we use listParameters()
        // above.
        }
        retval.append("      ").append(XMLHandler.closeTag("parameter")).append(Const.CR);
    }
    retval.append("    ").append(XMLHandler.closeTag(XML_TAG_PARAMETERS)).append(Const.CR);
    Set<DatabaseMeta> usedDatabaseMetas = getUsedDatabaseMetas();
    // Save the database connections...
    for (int i = 0; i < nrDatabases(); i++) {
        DatabaseMeta dbMeta = getDatabase(i);
        if (props != null && props.areOnlyUsedConnectionsSavedToXML()) {
            if (usedDatabaseMetas.contains(dbMeta)) {
                retval.append(dbMeta.getXML());
            }
        } else {
            retval.append(dbMeta.getXML());
        }
    }
    // The slave servers...
    // 
    retval.append("    ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
    for (int i = 0; i < slaveServers.size(); i++) {
        SlaveServer slaveServer = slaveServers.get(i);
        retval.append(slaveServer.getXML());
    }
    retval.append("    ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR);
    // 
    for (LogTableInterface logTable : getLogTables()) {
        retval.append(logTable.getXML());
    }
    retval.append("   ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed));
    retval.append("   ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile));
    retval.append("  ").append(XMLHandler.openTag("entries")).append(Const.CR);
    for (int i = 0; i < nrJobEntries(); i++) {
        JobEntryCopy jge = getJobEntry(i);
        jge.getEntry().setRepository(repository);
        retval.append(jge.getXML());
    }
    retval.append("  ").append(XMLHandler.closeTag("entries")).append(Const.CR);
    retval.append("  ").append(XMLHandler.openTag("hops")).append(Const.CR);
    for (JobHopMeta hi : jobhops) {
        // Look at all the hops
        retval.append(hi.getXML());
    }
    retval.append("  ").append(XMLHandler.closeTag("hops")).append(Const.CR);
    retval.append("  ").append(XMLHandler.openTag("notepads")).append(Const.CR);
    for (int i = 0; i < nrNotes(); i++) {
        NotePadMeta ni = getNote(i);
        retval.append(ni.getXML());
    }
    retval.append("  ").append(XMLHandler.closeTag("notepads")).append(Const.CR);
    // Also store the attribute groups
    // 
    retval.append(AttributesUtil.getAttributesXml(attributesMap));
    retval.append(XMLHandler.closeTag(XML_TAG)).append(Const.CR);
    return XMLFormatter.format(retval.toString());
}
Also used : Props(org.pentaho.di.core.Props) SlaveServer(org.pentaho.di.cluster.SlaveServer) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) JobEntryCopy(org.pentaho.di.job.entry.JobEntryCopy) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) NotePadMeta(org.pentaho.di.core.NotePadMeta)

Example 53 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class Job method beginProcessing.

/**
 * Handle logging at start
 *
 * @return true if it went OK.
 *
 * @throws KettleException
 */
public boolean beginProcessing() throws KettleException {
    currentDate = new Date();
    logDate = new Date();
    startDate = Const.MIN_DATE;
    endDate = currentDate;
    resetErrors();
    final JobLogTable jobLogTable = jobMeta.getJobLogTable();
    int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1);
    if (jobLogTable.isDefined()) {
        DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta();
        String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName());
        String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName());
        String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
        Database ldb = new Database(this, logcon);
        ldb.shareVariablesWith(this);
        ldb.connect();
        ldb.setCommit(logCommitSize);
        try {
            // See if we have to add a batch id...
            Long id_batch = 1L;
            if (jobMeta.getJobLogTable().isBatchIdUsed()) {
                id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName());
                setBatchId(id_batch.longValue());
                if (getPassedBatchId() <= 0) {
                    setPassedBatchId(id_batch.longValue());
                }
            }
            Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END);
            if (!Utils.isEmpty(lastr)) {
                Date last;
                try {
                    last = ldb.getReturnRowMeta().getDate(lastr, 0);
                } catch (KettleValueException e) {
                    throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e);
                }
                if (last != null) {
                    startDate = last;
                }
            }
            depDate = currentDate;
            ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null);
            if (!ldb.isAutoCommit()) {
                ldb.commitLog(true, jobMeta.getJobLogTable());
            }
            ldb.disconnect();
            // 
            if (intervalInSeconds > 0) {
                final Timer timer = new Timer(getName() + " - interval logging timer");
                TimerTask timerTask = new TimerTask() {

                    @Override
                    public void run() {
                        try {
                            endProcessing();
                        } catch (Exception e) {
                            log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e);
                            // Also stop the show...
                            // 
                            errors.incrementAndGet();
                            stopAll();
                        }
                    }
                };
                timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000);
                addJobListener(new JobAdapter() {

                    @Override
                    public void jobFinished(Job job) {
                        timer.cancel();
                    }
                });
            }
            // Add a listener at the end of the job to take of writing the final job
            // log record...
            // 
            addJobListener(new JobAdapter() {

                @Override
                public void jobFinished(Job job) throws KettleException {
                    try {
                        endProcessing();
                    } catch (KettleJobException e) {
                        log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e);
                        // job is failed in case log database record is failed!
                        throw new KettleException(e);
                    }
                }
            });
        } catch (KettleDatabaseException dbe) {
            // This is even before actual execution
            addErrors(1);
            throw new KettleJobException(BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe);
        } finally {
            ldb.disconnect();
        }
    }
    // If we need to write out the job entry logging information, do so at the end of the job:
    // 
    JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable();
    if (jobEntryLogTable.isDefined()) {
        addJobListener(new JobAdapter() {

            @Override
            public void jobFinished(Job job) throws KettleException {
                try {
                    writeJobEntryLogInformation();
                } catch (KettleException e) {
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e);
                }
            }
        });
    }
    // If we need to write the log channel hierarchy and lineage information,
    // add a listener for that too...
    // 
    ChannelLogTable channelLogTable = jobMeta.getChannelLogTable();
    if (channelLogTable.isDefined()) {
        addJobListener(new JobAdapter() {

            @Override
            public void jobFinished(Job job) throws KettleException {
                try {
                    writeLogChannelInformation();
                } catch (KettleException e) {
                    throw new KettleException(BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e);
                }
            }
        });
    }
    JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false);
    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension);
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobLogTable(org.pentaho.di.core.logging.JobLogTable) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleJobException(org.pentaho.di.core.exception.KettleJobException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) KettleJobException(org.pentaho.di.core.exception.KettleJobException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Timer(java.util.Timer) TimerTask(java.util.TimerTask) ChannelLogTable(org.pentaho.di.core.logging.ChannelLogTable) Database(org.pentaho.di.core.database.Database) FileObject(org.apache.commons.vfs2.FileObject) JobEntryLogTable(org.pentaho.di.core.logging.JobEntryLogTable) KettleValueException(org.pentaho.di.core.exception.KettleValueException) JobEntryJob(org.pentaho.di.job.entries.job.JobEntryJob)

Example 54 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class DatabaseConfigurationImportRule method verifyRule.

@Override
public List<ImportValidationFeedback> verifyRule(Object subject) {
    List<ImportValidationFeedback> feedback = new ArrayList<ImportValidationFeedback>();
    if (!isEnabled()) {
        return feedback;
    }
    if (databaseMeta == null) {
        feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "This rule contains no database to validate against."));
        return feedback;
    }
    DatabaseMeta verify = null;
    if (subject instanceof HasDatabasesInterface) {
        HasDatabasesInterface dbs = (HasDatabasesInterface) subject;
        verify = dbs.findDatabase(databaseMeta.getName());
    } else if (subject instanceof DatabaseMeta) {
        // 
        if (databaseMeta.getName().equals(((DatabaseMeta) subject).getName())) {
            verify = (DatabaseMeta) subject;
        }
    }
    if (verify == null) {
        return feedback;
    }
    // 
    if (!Utils.isEmpty(databaseMeta.getDatabaseName())) {
        if (!databaseMeta.getDatabaseName().equals(verify.getDatabaseName())) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The name of the database is not set to the expected value '" + databaseMeta.getDatabaseName() + "'."));
        }
    }
    // 
    if (!Utils.isEmpty(databaseMeta.getHostname())) {
        if (!databaseMeta.getHostname().equals(verify.getHostname())) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The host name of the database is not set to the expected value '" + databaseMeta.getHostname() + "'."));
        }
    }
    // 
    if (!Utils.isEmpty(databaseMeta.getDatabasePortNumberString())) {
        if (!databaseMeta.getDatabasePortNumberString().equals(verify.getDatabasePortNumberString())) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The database port of the database is not set to the expected value '" + databaseMeta.getDatabasePortNumberString() + "'."));
        }
    }
    // 
    if (!Utils.isEmpty(databaseMeta.getUsername())) {
        if (!databaseMeta.getUsername().equals(verify.getUsername())) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The username of the database is not set to the expected value '" + databaseMeta.getUsername() + "'."));
        }
    }
    // 
    if (!Utils.isEmpty(databaseMeta.getPassword())) {
        if (!databaseMeta.getPassword().equals(verify.getPassword())) {
            feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.ERROR, "The password of the database is not set to the expected value."));
        }
    }
    if (feedback.isEmpty()) {
        feedback.add(new ImportValidationFeedback(this, ImportValidationResultType.APPROVAL, "The database connection was found and verified."));
    }
    return feedback;
}
Also used : ImportValidationFeedback(org.pentaho.di.imp.rule.ImportValidationFeedback) ArrayList(java.util.ArrayList) HasDatabasesInterface(org.pentaho.di.trans.HasDatabasesInterface) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Example 55 with DatabaseMeta

use of org.pentaho.di.core.database.DatabaseMeta in project pentaho-kettle by pentaho.

the class IngresVectorwiseLoader method createCommandLine.

/**
 * Create the command line for a sql process depending on the meta information supplied.
 *
 * @param meta
 *          The meta data to create the command line from
 *
 * @return The string to execute.
 *
 * @throws KettleException
 *           Upon any exception
 */
public String createCommandLine(IngresVectorwiseLoaderMeta meta) throws KettleException {
    StringBuilder sb = new StringBuilder(300);
    if (!Utils.isEmpty(meta.getSqlPath())) {
        try {
            FileObject fileObject = KettleVFS.getFileObject(environmentSubstitute(meta.getSqlPath()), getTransMeta());
            String sqlexec = Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject));
            sb.append(sqlexec);
        // sql @tc-dwh-test.timocom.net,tcp_ip,VW[ingres,pwd]::dwh
        } catch (KettleFileException ex) {
            throw new KettleException("Error retrieving command string", ex);
        }
    } else {
        if (meta.isUsingVwload()) {
            if (isDetailed()) {
                logDetailed("vwload defaults to system path");
            }
            sb.append("vwload");
        } else {
            if (isDetailed()) {
                logDetailed("sql defaults to system path");
            }
            sb.append("sql");
        }
    }
    DatabaseMeta dm = meta.getDatabaseMeta();
    if (dm != null) {
        String databaseName = environmentSubstitute(Const.NVL(dm.getDatabaseName(), ""));
        String password = Encr.decryptPasswordOptionallyEncrypted(environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getPassword(), "")));
        String port = environmentSubstitute(Const.NVL(dm.getDatabasePortNumberString(), "")).replace("7", "");
        String username = environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getUsername(), ""));
        String hostname = environmentSubstitute(Const.NVL(dm.getDatabaseInterface().getHostname(), ""));
        String schemaTable = dm.getQuotedSchemaTableCombination(null, environmentSubstitute(meta.getTableName()));
        String encoding = environmentSubstitute(Const.NVL(meta.getEncoding(), ""));
        String fifoFile = Const.optionallyQuoteStringByOS(environmentSubstitute(Const.NVL(meta.getFifoFileName(), "")));
        String errorFile = Const.optionallyQuoteStringByOS(environmentSubstitute(Const.NVL(meta.getErrorFileName(), "")));
        int maxNrErrors = Const.toInt(environmentSubstitute(Const.NVL(meta.getMaxNrErrors(), "0")), 0);
        if (meta.isUsingVwload()) {
            sb.append(" -u ").append(username);
            sb.append(" -P ").append(password);
            sb.append(" -f ").append(meta.getDelimiter()).append("");
            sb.append(" -t ").append(schemaTable);
            if (!Utils.isEmpty(encoding)) {
                sb.append(" -C ").append(encoding);
            }
            if (!Utils.isEmpty(errorFile)) {
                sb.append(" -l ").append(errorFile);
            }
            if (maxNrErrors > 0) {
                // need multiplication for two because every wrong rows
                // provide 2 errors that is not evident
                sb.append(" -x ").append(maxNrErrors * 2);
            }
            sb.append(" ").append(databaseName);
            sb.append(" ").append(fifoFile);
        } else if (meta.isUseDynamicVNode()) {
            // logical portname in JDBC use a 7
            sb.append(" @").append(hostname).append(",").append(port).append("[").append(username).append(",").append(password).append("]::").append(databaseName);
        } else {
            // Database Name
            // 
            sb.append(" ").append(databaseName);
            if (meta.isUseAuthentication()) {
                sb.append("-P").append(password);
            }
        }
    } else {
        throw new KettleException("No connection specified");
    }
    return sb.toString();
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) FileObject(org.apache.commons.vfs2.FileObject) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta)

Aggregations

DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)522 Test (org.junit.Test)133 KettleException (org.pentaho.di.core.exception.KettleException)131 Database (org.pentaho.di.core.database.Database)88 MessageBox (org.eclipse.swt.widgets.MessageBox)66 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)63 TransMeta (org.pentaho.di.trans.TransMeta)57 StepMeta (org.pentaho.di.trans.step.StepMeta)54 ArrayList (java.util.ArrayList)53 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)48 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)44 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)42 SlaveServer (org.pentaho.di.cluster.SlaveServer)33 IMetaStore (org.pentaho.metastore.api.IMetaStore)30 ObjectId (org.pentaho.di.repository.ObjectId)29 DatabaseExplorerDialog (org.pentaho.di.ui.core.database.dialog.DatabaseExplorerDialog)29 JobMeta (org.pentaho.di.job.JobMeta)26 TransHopMeta (org.pentaho.di.trans.TransHopMeta)26 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 PluginRegistry (org.pentaho.di.core.plugins.PluginRegistry)24