Search in sources :

Example 76 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryXSDValidator method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "xmlfilename", xmlfilename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "xsdfilename", xsdfilename);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to save job entry of type 'xsdvalidator' to the repository for id_job=" + id_job, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 77 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntrySSH2GET method saveRep.

@Override
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "servername", serverName);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "username", userName);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "password", Encr.encryptPasswordIfNotUsingVariables(password));
        rep.saveJobEntryAttribute(id_job, getObjectId(), "serverport", serverPort);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "ftpdirectory", ftpDirectory);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "localdirectory", localDirectory);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "wildcard", wildcard);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "only_new", onlyGettingNewFiles);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "usehttpproxy", usehttpproxy);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "httpproxyhost", httpProxyHost);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "httpproxyport", httpproxyport);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "httpproxyusername", httpproxyusername);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "httpproxypassword", httpProxyPassword);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "publicpublickey", publicpublickey);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilename", keyFilename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "keyfilepass", keyFilePass);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "usebasicauthentication", useBasicAuthentication);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "afterftpput", afterFtpPut);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "destinationfolder", destinationfolder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "createdestinationfolder", createdestinationfolder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "cachehostkey", cachehostkey);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "timeout", timeout);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "createtargetfolder", createtargetfolder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "includeSubFolders", includeSubFolders);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobSSH2GET.Log.UnableSaveRep", "" + id_job, dbe.getMessage()));
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 78 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class TransMeta method checkSteps.

/**
 * Checks all the steps and fills a List of (CheckResult) remarks.
 *
 * @param remarks
 *          The remarks list to add to.
 * @param only_selected
 *          true to check only the selected steps, false for all steps
 * @param monitor
 *          a progress monitor listener to be updated as the SQL statements are generated
 */
public void checkSteps(List<CheckResultInterface> remarks, boolean only_selected, ProgressMonitorListener monitor, VariableSpace space, Repository repository, IMetaStore metaStore) {
    try {
        // Start with a clean slate...
        remarks.clear();
        Map<ValueMetaInterface, String> values = new Hashtable<>();
        String[] stepnames;
        StepMeta[] steps;
        List<StepMeta> selectedSteps = getSelectedSteps();
        if (!only_selected || selectedSteps.isEmpty()) {
            stepnames = getStepNames();
            steps = getStepsArray();
        } else {
            stepnames = getSelectedStepNames();
            steps = selectedSteps.toArray(new StepMeta[selectedSteps.size()]);
        }
        ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.BeforeCheckSteps.id, new CheckStepsExtension(remarks, space, this, steps, repository, metaStore));
        boolean stop_checking = false;
        if (monitor != null) {
            monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.VerifyingThisTransformationTask.Title"), steps.length + 2);
        }
        for (int i = 0; i < steps.length && !stop_checking; i++) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.VerifyingStepTask.Title", stepnames[i]));
            }
            StepMeta stepMeta = steps[i];
            int nrinfo = findNrInfoSteps(stepMeta);
            StepMeta[] infostep = null;
            if (nrinfo > 0) {
                infostep = getInfoStep(stepMeta);
            }
            RowMetaInterface info = null;
            if (infostep != null) {
                try {
                    info = getStepFields(infostep);
                } catch (KettleStepException kse) {
                    info = null;
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.ErrorOccurredGettingStepInfoFields.Description", "" + stepMeta, Const.CR + kse.getMessage()), stepMeta);
                    remarks.add(cr);
                }
            }
            // The previous fields from non-informative steps:
            RowMetaInterface prev = null;
            try {
                prev = getPrevStepFields(stepMeta);
            } catch (KettleStepException kse) {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.ErrorOccurredGettingInputFields.Description", "" + stepMeta, Const.CR + kse.getMessage()), stepMeta);
                remarks.add(cr);
                // This is a severe error: stop checking...
                // Otherwise we wind up checking time & time again because nothing gets put in the database
                // cache, the timeout of certain databases is very long... (Oracle)
                stop_checking = true;
            }
            if (isStepUsedInTransHops(stepMeta) || getSteps().size() == 1) {
                // Get the input & output steps!
                // Copy to arrays:
                String[] input = getPrevStepNames(stepMeta);
                String[] output = getNextStepNames(stepMeta);
                // Check step specific info...
                ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.BeforeCheckStep.id, new CheckStepsExtension(remarks, space, this, new StepMeta[] { stepMeta }, repository, metaStore));
                stepMeta.check(remarks, this, prev, input, output, info, space, repository, metaStore);
                ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.AfterCheckStep.id, new CheckStepsExtension(remarks, space, this, new StepMeta[] { stepMeta }, repository, metaStore));
                // See if illegal characters etc. were used in field-names...
                if (prev != null) {
                    for (int x = 0; x < prev.size(); x++) {
                        ValueMetaInterface v = prev.getValueMeta(x);
                        String name = v.getName();
                        if (name == null) {
                            values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameIsEmpty.Description"));
                        } else if (name.indexOf(' ') >= 0) {
                            values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameContainsSpaces.Description"));
                        } else {
                            char[] list = new char[] { '.', ',', '-', '/', '+', '*', '\'', '\t', '"', '|', '@', '(', ')', '{', '}', '!', '^' };
                            for (int c = 0; c < list.length; c++) {
                                if (name.indexOf(list[c]) >= 0) {
                                    values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameContainsUnfriendlyCodes.Description", String.valueOf(list[c])));
                                }
                            }
                        }
                    }
                    // Check if 2 steps with the same name are entering the step...
                    if (prev.size() > 1) {
                        String[] fieldNames = prev.getFieldNames();
                        String[] sortedNames = Const.sortStrings(fieldNames);
                        String prevName = sortedNames[0];
                        for (int x = 1; x < sortedNames.length; x++) {
                            // Checking for doubles
                            if (prevName.equalsIgnoreCase(sortedNames[x])) {
                                // Give a warning!!
                                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.HaveTheSameNameField.Description", prevName), stepMeta);
                                remarks.add(cr);
                            } else {
                                prevName = sortedNames[x];
                            }
                        }
                    }
                } else {
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.CannotFindPreviousFields.Description") + stepMeta.getName(), stepMeta);
                    remarks.add(cr);
                }
            } else {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.StepIsNotUsed.Description"), stepMeta);
                remarks.add(cr);
            }
            // Also check for mixing rows...
            try {
                checkRowMixingStatically(stepMeta, null);
            } catch (KettleRowException e) {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, e.getMessage(), stepMeta);
                remarks.add(cr);
            }
            if (monitor != null) {
                // progress bar...
                monitor.worked(1);
                if (monitor.isCanceled()) {
                    stop_checking = true;
                }
            }
        }
        // Also, check the logging table of the transformation...
        if (monitor == null || !monitor.isCanceled()) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.CheckingTheLoggingTableTask.Title"));
            }
            if (transLogTable.getDatabaseMeta() != null) {
                Database logdb = new Database(this, transLogTable.getDatabaseMeta());
                logdb.shareVariablesWith(this);
                try {
                    logdb.connect();
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.ConnectingWorks.Description"), null);
                    remarks.add(cr);
                    if (transLogTable.getTableName() != null) {
                        if (logdb.checkTableExists(transLogTable.getTableName())) {
                            cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.LoggingTableExists.Description", transLogTable.getTableName()), null);
                            remarks.add(cr);
                            RowMetaInterface fields = transLogTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
                            String sql = logdb.getDDL(transLogTable.getTableName(), fields);
                            if (sql == null || sql.length() == 0) {
                                cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.CorrectLayout.Description"), null);
                                remarks.add(cr);
                            } else {
                                cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LoggingTableNeedsAdjustments.Description") + Const.CR + sql, null);
                                remarks.add(cr);
                            }
                        } else {
                            cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LoggingTableDoesNotExist.Description"), null);
                            remarks.add(cr);
                        }
                    } else {
                        cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LogTableNotSpecified.Description"), null);
                        remarks.add(cr);
                    }
                } catch (KettleDatabaseException dbe) {
                // Ignore errors
                } finally {
                    logdb.disconnect();
                }
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.CheckingForDatabaseUnfriendlyCharactersInFieldNamesTask.Title"));
        }
        if (values.size() > 0) {
            for (ValueMetaInterface v : values.keySet()) {
                String message = values.get(v);
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.Description", v.getName(), message, v.getOrigin()), findStep(v.getOrigin()));
                remarks.add(cr);
            }
        } else {
            CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.Description"), null);
            remarks.add(cr);
        }
        if (monitor != null) {
            monitor.worked(1);
        }
        ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.AfterCheckSteps.id, new CheckStepsExtension(remarks, space, this, steps, repository, metaStore));
    } catch (Exception e) {
        log.logError(Const.getStackTracker(e));
        throw new RuntimeException(e);
    }
}
Also used : KettleStepException(org.pentaho.di.core.exception.KettleStepException) Hashtable(java.util.Hashtable) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) StepMeta(org.pentaho.di.trans.step.StepMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleRowException(org.pentaho.di.core.exception.KettleRowException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) IOException(java.io.IOException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) CheckResult(org.pentaho.di.core.CheckResult) KettleRowException(org.pentaho.di.core.exception.KettleRowException) Database(org.pentaho.di.core.database.Database)

Example 79 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class TransMeta method getSQLStatements.

/**
 * Builds a list of all the SQL statements that this transformation needs in order to work properly.
 *
 * @param monitor
 *          a progress monitor listener to be updated as the SQL statements are generated
 * @return An ArrayList of SQLStatement objects.
 * @throws KettleStepException
 *           if any errors occur during SQL statement generation
 */
public List<SQLStatement> getSQLStatements(ProgressMonitorListener monitor) throws KettleStepException {
    if (monitor != null) {
        monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForTransformationTask.Title"), nrSteps() + 1);
    }
    List<SQLStatement> stats = new ArrayList<>();
    for (int i = 0; i < nrSteps(); i++) {
        StepMeta stepMeta = getStep(i);
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForStepTask.Title", "" + stepMeta));
        }
        RowMetaInterface prev = getPrevStepFields(stepMeta);
        SQLStatement sqlCompat = compatibleStepMetaGetSQLStatements(stepMeta.getStepMetaInterface(), stepMeta, prev);
        if (sqlCompat.getSQL() != null || sqlCompat.hasError()) {
            stats.add(sqlCompat);
        }
        SQLStatement sql = stepMeta.getStepMetaInterface().getSQLStatements(this, stepMeta, prev, repository, metaStore);
        if (sql.getSQL() != null || sql.hasError()) {
            stats.add(sql);
        }
        if (monitor != null) {
            monitor.worked(1);
        }
    }
    // 
    if (monitor != null) {
        monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForTransformationTask.Title2"));
    }
    if (transLogTable.getDatabaseMeta() != null && (!Utils.isEmpty(transLogTable.getTableName()) || !Utils.isEmpty(performanceLogTable.getTableName()))) {
        try {
            for (LogTableInterface logTable : new LogTableInterface[] { transLogTable, performanceLogTable, channelLogTable, stepLogTable }) {
                if (logTable.getDatabaseMeta() != null && !Utils.isEmpty(logTable.getTableName())) {
                    Database db = null;
                    try {
                        db = new Database(this, transLogTable.getDatabaseMeta());
                        db.shareVariablesWith(this);
                        db.connect();
                        RowMetaInterface fields = logTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
                        String schemaTable = logTable.getDatabaseMeta().getQuotedSchemaTableCombination(logTable.getSchemaName(), logTable.getTableName());
                        String sql = db.getDDL(schemaTable, fields);
                        if (!Utils.isEmpty(sql)) {
                            SQLStatement stat = new SQLStatement("<this transformation>", transLogTable.getDatabaseMeta(), sql);
                            stats.add(stat);
                        }
                    } catch (Exception e) {
                        throw new KettleDatabaseException("Unable to connect to logging database [" + logTable.getDatabaseMeta() + "]", e);
                    } finally {
                        if (db != null) {
                            db.disconnect();
                        }
                    }
                }
            }
        } catch (KettleDatabaseException dbe) {
            SQLStatement stat = new SQLStatement("<this transformation>", transLogTable.getDatabaseMeta(), null);
            stat.setError(BaseMessages.getString(PKG, "TransMeta.SQLStatement.ErrorDesc.ErrorObtainingTransformationLogTableInfo") + dbe.getMessage());
            stats.add(stat);
        }
    }
    if (monitor != null) {
        monitor.worked(1);
    }
    if (monitor != null) {
        monitor.done();
    }
    return stats;
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ArrayList(java.util.ArrayList) Database(org.pentaho.di.core.database.Database) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) SQLStatement(org.pentaho.di.core.SQLStatement) StepMeta(org.pentaho.di.trans.step.StepMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleRowException(org.pentaho.di.core.exception.KettleRowException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) IOException(java.io.IOException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 80 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class DimensionLookup method setDimLookup.

/**
 * table: dimension table keys[]: which dim-fields do we use to look up key? retval: name of the key to return
 * datefield: do we have a datefield? datefrom, dateto: date-range, if any.
 */
private void setDimLookup(RowMetaInterface rowMeta) throws KettleDatabaseException {
    DatabaseMeta databaseMeta = meta.getDatabaseMeta();
    data.lookupRowMeta = new RowMeta();
    /*
     * DEFAULT, SYSDATE, START_TRANS, COLUMN_VALUE :
     *
     * SELECT <tk>, <version>, ... , FROM <table> WHERE key1=keys[1] AND key2=keys[2] ... AND <datefrom> <= <datefield>
     * AND <dateto> > <datefield> ;
     *
     * NULL :
     *
     * SELECT <tk>, <version>, ... , FROM <table> WHERE key1=keys[1] AND key2=keys[2] ... AND ( <datefrom> is null OR
     * <datefrom> <= <datefield> ) AND <dateto> >= <datefield>
     */
    String sql = "SELECT " + databaseMeta.quoteField(meta.getKeyField()) + ", " + databaseMeta.quoteField(meta.getVersionField());
    if (!Utils.isEmpty(meta.getFieldLookup())) {
        for (int i = 0; i < meta.getFieldLookup().length; i++) {
            // Don't retrieve the fields without input
            if (!Utils.isEmpty(meta.getFieldLookup()[i]) && !DimensionLookupMeta.isUpdateTypeWithoutArgument(meta.isUpdate(), meta.getFieldUpdate()[i])) {
                sql += ", " + databaseMeta.quoteField(meta.getFieldLookup()[i]);
                if (!Utils.isEmpty(meta.getFieldStream()[i]) && !meta.getFieldLookup()[i].equals(meta.getFieldStream()[i])) {
                    sql += " AS " + databaseMeta.quoteField(meta.getFieldStream()[i]);
                }
            }
        }
    }
    if (meta.getCacheSize() >= 0) {
        sql += ", " + databaseMeta.quoteField(meta.getDateFrom()) + ", " + databaseMeta.quoteField(meta.getDateTo());
    }
    sql += " FROM " + data.schemaTable + " WHERE ";
    for (int i = 0; i < meta.getKeyLookup().length; i++) {
        if (i != 0) {
            sql += " AND ";
        }
        sql += databaseMeta.quoteField(meta.getKeyLookup()[i]) + " = ? ";
        data.lookupRowMeta.addValueMeta(rowMeta.getValueMeta(data.keynrs[i]));
    }
    String dateFromField = databaseMeta.quoteField(meta.getDateFrom());
    String dateToField = databaseMeta.quoteField(meta.getDateTo());
    if (meta.isUsingStartDateAlternative() && (meta.getStartDateAlternative() == DimensionLookupMeta.START_DATE_ALTERNATIVE_NULL) || (meta.getStartDateAlternative() == DimensionLookupMeta.START_DATE_ALTERNATIVE_COLUMN_VALUE)) {
        // Null as a start date is possible...
        // 
        sql += " AND ( " + dateFromField + " IS NULL OR " + dateFromField + " <= ? )" + Const.CR;
        sql += " AND " + dateToField + " > ?" + Const.CR;
        data.lookupRowMeta.addValueMeta(new ValueMetaDate(meta.getDateFrom()));
        data.lookupRowMeta.addValueMeta(new ValueMetaDate(meta.getDateTo()));
    } else {
        // Null as a start date is NOT possible
        // 
        sql += " AND ? >= " + dateFromField + Const.CR;
        sql += " AND ? < " + dateToField + Const.CR;
        data.lookupRowMeta.addValueMeta(new ValueMetaDate(meta.getDateFrom()));
        data.lookupRowMeta.addValueMeta(new ValueMetaDate(meta.getDateTo()));
    }
    try {
        logDetailed("Dimension Lookup setting preparedStatement to [" + sql + "]");
        data.prepStatementLookup = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sql));
        if (databaseMeta.supportsSetMaxRows()) {
            // alywas get only 1 line back!
            data.prepStatementLookup.setMaxRows(1);
        }
        if (databaseMeta.getDatabaseInterface().isMySQLVariant()) {
            // Make sure to DISABLE Streaming Result sets
            data.prepStatementLookup.setFetchSize(0);
        }
        logDetailed("Finished preparing dimension lookup statement.");
    } catch (SQLException ex) {
        throw new KettleDatabaseException("Unable to prepare dimension lookup", ex);
    }
}
Also used : RowMeta(org.pentaho.di.core.row.RowMeta) SQLException(java.sql.SQLException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) ValueMetaDate(org.pentaho.di.core.row.value.ValueMetaDate)

Aggregations

KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)279 KettleException (org.pentaho.di.core.exception.KettleException)176 SQLException (java.sql.SQLException)69 Database (org.pentaho.di.core.database.Database)46 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)41 KettleValueException (org.pentaho.di.core.exception.KettleValueException)39 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)37 KettleDatabaseBatchException (org.pentaho.di.core.exception.KettleDatabaseBatchException)33 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)31 BatchUpdateException (java.sql.BatchUpdateException)27 ResultSet (java.sql.ResultSet)27 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)26 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)25 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 FileObject (org.apache.commons.vfs2.FileObject)18 LongObjectId (org.pentaho.di.repository.LongObjectId)17 Savepoint (java.sql.Savepoint)16 ArrayList (java.util.ArrayList)16 Test (org.junit.Test)14