Search in sources :

Example 6 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class GPBulkLoaderDialog method setTableFieldCombo.

private void setTableFieldCombo() {
    Runnable fieldLoader = new Runnable() {

        public void run() {
            if (!wTable.isDisposed() && !wConnection.isDisposed() && !wSchema.isDisposed()) {
                final String tableName = wTable.getText(), connectionName = wConnection.getText(), schemaName = wSchema.getText();
                // clear
                for (ColumnInfo colInfo : tableFieldColumns) {
                    colInfo.setComboValues(new String[] {});
                }
                if (!Utils.isEmpty(tableName)) {
                    DatabaseMeta ci = transMeta.findDatabase(connectionName);
                    if (ci != null) {
                        Database db = new Database(loggingObject, ci);
                        try {
                            db.connect();
                            String schemaTable = ci.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName), transMeta.environmentSubstitute(tableName));
                            RowMetaInterface r = db.getTableFields(schemaTable);
                            if (null != r) {
                                String[] fieldNames = r.getFieldNames();
                                if (null != fieldNames) {
                                    for (ColumnInfo colInfo : tableFieldColumns) {
                                        colInfo.setComboValues(fieldNames);
                                    }
                                }
                            }
                        } catch (Exception e) {
                            for (ColumnInfo colInfo : tableFieldColumns) {
                                colInfo.setComboValues(new String[] {});
                            }
                        // ignore any errors here. drop downs will not be
                        // filled, but no problem for the user
                        }
                    }
                }
            }
        }
    };
    shell.getDisplay().asyncExec(fieldLoader);
}
Also used : Database(org.pentaho.di.core.database.Database) ColumnInfo(org.pentaho.di.ui.core.widget.ColumnInfo) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) KettleException(org.pentaho.di.core.exception.KettleException)

Example 7 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class TransMeta method checkSteps.

/**
 * Checks all the steps and fills a List of (CheckResult) remarks.
 *
 * @param remarks
 *          The remarks list to add to.
 * @param only_selected
 *          true to check only the selected steps, false for all steps
 * @param monitor
 *          a progress monitor listener to be updated as the SQL statements are generated
 */
public void checkSteps(List<CheckResultInterface> remarks, boolean only_selected, ProgressMonitorListener monitor, VariableSpace space, Repository repository, IMetaStore metaStore) {
    try {
        // Start with a clean slate...
        remarks.clear();
        Map<ValueMetaInterface, String> values = new Hashtable<>();
        String[] stepnames;
        StepMeta[] steps;
        List<StepMeta> selectedSteps = getSelectedSteps();
        if (!only_selected || selectedSteps.isEmpty()) {
            stepnames = getStepNames();
            steps = getStepsArray();
        } else {
            stepnames = getSelectedStepNames();
            steps = selectedSteps.toArray(new StepMeta[selectedSteps.size()]);
        }
        ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.BeforeCheckSteps.id, new CheckStepsExtension(remarks, space, this, steps, repository, metaStore));
        boolean stop_checking = false;
        if (monitor != null) {
            monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.VerifyingThisTransformationTask.Title"), steps.length + 2);
        }
        for (int i = 0; i < steps.length && !stop_checking; i++) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.VerifyingStepTask.Title", stepnames[i]));
            }
            StepMeta stepMeta = steps[i];
            int nrinfo = findNrInfoSteps(stepMeta);
            StepMeta[] infostep = null;
            if (nrinfo > 0) {
                infostep = getInfoStep(stepMeta);
            }
            RowMetaInterface info = null;
            if (infostep != null) {
                try {
                    info = getStepFields(infostep);
                } catch (KettleStepException kse) {
                    info = null;
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.ErrorOccurredGettingStepInfoFields.Description", "" + stepMeta, Const.CR + kse.getMessage()), stepMeta);
                    remarks.add(cr);
                }
            }
            // The previous fields from non-informative steps:
            RowMetaInterface prev = null;
            try {
                prev = getPrevStepFields(stepMeta);
            } catch (KettleStepException kse) {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.ErrorOccurredGettingInputFields.Description", "" + stepMeta, Const.CR + kse.getMessage()), stepMeta);
                remarks.add(cr);
                // This is a severe error: stop checking...
                // Otherwise we wind up checking time & time again because nothing gets put in the database
                // cache, the timeout of certain databases is very long... (Oracle)
                stop_checking = true;
            }
            if (isStepUsedInTransHops(stepMeta) || getSteps().size() == 1) {
                // Get the input & output steps!
                // Copy to arrays:
                String[] input = getPrevStepNames(stepMeta);
                String[] output = getNextStepNames(stepMeta);
                // Check step specific info...
                ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.BeforeCheckStep.id, new CheckStepsExtension(remarks, space, this, new StepMeta[] { stepMeta }, repository, metaStore));
                stepMeta.check(remarks, this, prev, input, output, info, space, repository, metaStore);
                ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.AfterCheckStep.id, new CheckStepsExtension(remarks, space, this, new StepMeta[] { stepMeta }, repository, metaStore));
                // See if illegal characters etc. were used in field-names...
                if (prev != null) {
                    for (int x = 0; x < prev.size(); x++) {
                        ValueMetaInterface v = prev.getValueMeta(x);
                        String name = v.getName();
                        if (name == null) {
                            values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameIsEmpty.Description"));
                        } else if (name.indexOf(' ') >= 0) {
                            values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameContainsSpaces.Description"));
                        } else {
                            char[] list = new char[] { '.', ',', '-', '/', '+', '*', '\'', '\t', '"', '|', '@', '(', ')', '{', '}', '!', '^' };
                            for (int c = 0; c < list.length; c++) {
                                if (name.indexOf(list[c]) >= 0) {
                                    values.put(v, BaseMessages.getString(PKG, "TransMeta.Value.CheckingFieldName.FieldNameContainsUnfriendlyCodes.Description", String.valueOf(list[c])));
                                }
                            }
                        }
                    }
                    // Check if 2 steps with the same name are entering the step...
                    if (prev.size() > 1) {
                        String[] fieldNames = prev.getFieldNames();
                        String[] sortedNames = Const.sortStrings(fieldNames);
                        String prevName = sortedNames[0];
                        for (int x = 1; x < sortedNames.length; x++) {
                            // Checking for doubles
                            if (prevName.equalsIgnoreCase(sortedNames[x])) {
                                // Give a warning!!
                                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.HaveTheSameNameField.Description", prevName), stepMeta);
                                remarks.add(cr);
                            } else {
                                prevName = sortedNames[x];
                            }
                        }
                    }
                } else {
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.CannotFindPreviousFields.Description") + stepMeta.getName(), stepMeta);
                    remarks.add(cr);
                }
            } else {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.StepIsNotUsed.Description"), stepMeta);
                remarks.add(cr);
            }
            // Also check for mixing rows...
            try {
                checkRowMixingStatically(stepMeta, null);
            } catch (KettleRowException e) {
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, e.getMessage(), stepMeta);
                remarks.add(cr);
            }
            if (monitor != null) {
                // progress bar...
                monitor.worked(1);
                if (monitor.isCanceled()) {
                    stop_checking = true;
                }
            }
        }
        // Also, check the logging table of the transformation...
        if (monitor == null || !monitor.isCanceled()) {
            if (monitor != null) {
                monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.CheckingTheLoggingTableTask.Title"));
            }
            if (transLogTable.getDatabaseMeta() != null) {
                Database logdb = new Database(this, transLogTable.getDatabaseMeta());
                logdb.shareVariablesWith(this);
                try {
                    logdb.connect();
                    CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.ConnectingWorks.Description"), null);
                    remarks.add(cr);
                    if (transLogTable.getTableName() != null) {
                        if (logdb.checkTableExists(transLogTable.getSchemaName(), transLogTable.getTableName())) {
                            cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.LoggingTableExists.Description", transLogTable.getTableName()), null);
                            remarks.add(cr);
                            RowMetaInterface fields = transLogTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
                            String sql = logdb.getDDL(transLogTable.getTableName(), fields);
                            if (sql == null || sql.length() == 0) {
                                cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.CorrectLayout.Description"), null);
                                remarks.add(cr);
                            } else {
                                cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LoggingTableNeedsAdjustments.Description") + Const.CR + sql, null);
                                remarks.add(cr);
                            }
                        } else {
                            cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LoggingTableDoesNotExist.Description"), null);
                            remarks.add(cr);
                        }
                    } else {
                        cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultError.LogTableNotSpecified.Description"), null);
                        remarks.add(cr);
                    }
                } catch (KettleDatabaseException dbe) {
                // Ignore errors
                } finally {
                    logdb.disconnect();
                }
            }
            if (monitor != null) {
                monitor.worked(1);
            }
        }
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.CheckingForDatabaseUnfriendlyCharactersInFieldNamesTask.Title"));
        }
        if (values.size() > 0) {
            for (Map.Entry<ValueMetaInterface, String> entry : values.entrySet()) {
                String message = entry.getValue();
                ValueMetaInterface v = entry.getKey();
                CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultWarning.Description", v.getName(), message, v.getOrigin()), findStep(v.getOrigin()));
                remarks.add(cr);
            }
        } else {
            CheckResult cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "TransMeta.CheckResult.TypeResultOK.Description"), null);
            remarks.add(cr);
        }
        if (monitor != null) {
            monitor.worked(1);
        }
        ExtensionPointHandler.callExtensionPoint(getLogChannel(), KettleExtensionPoint.AfterCheckSteps.id, new CheckStepsExtension(remarks, space, this, steps, repository, metaStore));
    } catch (Exception e) {
        log.logError(Const.getStackTracker(e));
        throw new RuntimeException(e);
    }
}
Also used : KettleStepException(org.pentaho.di.core.exception.KettleStepException) Hashtable(java.util.Hashtable) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) StepMeta(org.pentaho.di.trans.step.StepMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleRowException(org.pentaho.di.core.exception.KettleRowException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) IOException(java.io.IOException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) CheckResult(org.pentaho.di.core.CheckResult) KettleRowException(org.pentaho.di.core.exception.KettleRowException) Database(org.pentaho.di.core.database.Database) Map(java.util.Map) HashMap(java.util.HashMap)

Example 8 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class TransMeta method getSQLStatements.

/**
 * Builds a list of all the SQL statements that this transformation needs in order to work properly.
 *
 * @param monitor
 *          a progress monitor listener to be updated as the SQL statements are generated
 * @return An ArrayList of SQLStatement objects.
 * @throws KettleStepException
 *           if any errors occur during SQL statement generation
 */
public List<SQLStatement> getSQLStatements(ProgressMonitorListener monitor) throws KettleStepException {
    if (monitor != null) {
        monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForTransformationTask.Title"), nrSteps() + 1);
    }
    List<SQLStatement> stats = new ArrayList<>();
    for (int i = 0; i < nrSteps(); i++) {
        StepMeta stepMeta = getStep(i);
        if (monitor != null) {
            monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForStepTask.Title", "" + stepMeta));
        }
        RowMetaInterface prev = getPrevStepFields(stepMeta);
        SQLStatement sqlCompat = compatibleStepMetaGetSQLStatements(stepMeta.getStepMetaInterface(), stepMeta, prev);
        if (sqlCompat.getSQL() != null || sqlCompat.hasError()) {
            stats.add(sqlCompat);
        }
        SQLStatement sql = stepMeta.getStepMetaInterface().getSQLStatements(this, stepMeta, prev, repository, metaStore);
        if (sql.getSQL() != null || sql.hasError()) {
            stats.add(sql);
        }
        if (monitor != null) {
            monitor.worked(1);
        }
    }
    // 
    if (monitor != null) {
        monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.GettingTheSQLForTransformationTask.Title2"));
    }
    if (transLogTable.getDatabaseMeta() != null && (!Utils.isEmpty(transLogTable.getTableName()) || !Utils.isEmpty(performanceLogTable.getTableName()))) {
        try {
            for (LogTableInterface logTable : new LogTableInterface[] { transLogTable, performanceLogTable, channelLogTable, stepLogTable }) {
                if (logTable.getDatabaseMeta() != null && !Utils.isEmpty(logTable.getTableName())) {
                    Database db = null;
                    try {
                        db = new Database(this, transLogTable.getDatabaseMeta());
                        db.shareVariablesWith(this);
                        db.connect();
                        RowMetaInterface fields = logTable.getLogRecord(LogStatus.START, null, null).getRowMeta();
                        String schemaTable = logTable.getDatabaseMeta().getQuotedSchemaTableCombination(logTable.getSchemaName(), logTable.getTableName());
                        String sql = db.getDDL(schemaTable, fields);
                        if (!Utils.isEmpty(sql)) {
                            SQLStatement stat = new SQLStatement("<this transformation>", transLogTable.getDatabaseMeta(), sql);
                            stats.add(stat);
                        }
                    } catch (Exception e) {
                        throw new KettleDatabaseException("Unable to connect to logging database [" + logTable.getDatabaseMeta() + "]", e);
                    } finally {
                        if (db != null) {
                            db.disconnect();
                        }
                    }
                }
            }
        } catch (KettleDatabaseException dbe) {
            SQLStatement stat = new SQLStatement("<this transformation>", transLogTable.getDatabaseMeta(), null);
            stat.setError(BaseMessages.getString(PKG, "TransMeta.SQLStatement.ErrorDesc.ErrorObtainingTransformationLogTableInfo") + dbe.getMessage());
            stats.add(stat);
        }
    }
    if (monitor != null) {
        monitor.worked(1);
    }
    if (monitor != null) {
        monitor.done();
    }
    return stats;
}
Also used : LogTableInterface(org.pentaho.di.core.logging.LogTableInterface) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ArrayList(java.util.ArrayList) Database(org.pentaho.di.core.database.Database) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) SQLStatement(org.pentaho.di.core.SQLStatement) StepMeta(org.pentaho.di.trans.step.StepMeta) Point(org.pentaho.di.core.gui.Point) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleRowException(org.pentaho.di.core.exception.KettleRowException) FileSystemException(org.apache.commons.vfs2.FileSystemException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) IOException(java.io.IOException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 9 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class Trans method writeStepLogInformation.

/**
 * Writes step information to a step logging table (if one has been configured).
 *
 * @throws KettleException if any errors occur during logging
 */
protected void writeStepLogInformation() throws KettleException {
    Database db = null;
    StepLogTable stepLogTable = getTransMeta().getStepLogTable();
    try {
        db = createDataBase(stepLogTable.getDatabaseMeta());
        db.shareVariablesWith(this);
        db.connect();
        db.setCommit(logCommitSize);
        for (StepMetaDataCombi combi : getSteps()) {
            db.writeLogRecord(stepLogTable, LogStatus.START, combi, null);
        }
        db.cleanupLogRecords(stepLogTable, getName());
    } catch (Exception e) {
        throw new KettleException(BaseMessages.getString(PKG, "Trans.Exception.UnableToWriteStepInformationToLogTable"), e);
    } finally {
        disconnectDb(db);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) StepLogTable(org.pentaho.di.core.logging.StepLogTable) Database(org.pentaho.di.core.database.Database) StepMetaDataCombi(org.pentaho.di.trans.step.StepMetaDataCombi) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleTransException(org.pentaho.di.core.exception.KettleTransException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 10 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class Trans method calculateBatchIdAndDateRange.

/**
 * Calculate the batch id and date range for the transformation.
 *
 * @throws KettleTransException if there are any errors during calculation
 */
public void calculateBatchIdAndDateRange() throws KettleTransException {
    TransLogTable transLogTable = transMeta.getTransLogTable();
    currentDate = new Date();
    logDate = new Date();
    startDate = Const.MIN_DATE;
    endDate = currentDate;
    DatabaseMeta logConnection = transLogTable.getDatabaseMeta();
    String logTable = environmentSubstitute(transLogTable.getActualTableName());
    String logSchema = environmentSubstitute(transLogTable.getActualSchemaName());
    try {
        if (logConnection != null) {
            String logSchemaAndTable = logConnection.getQuotedSchemaTableCombination(logSchema, logTable);
            if (Utils.isEmpty(logTable)) {
                // to log to.
                throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.NoLogTableDefined"));
            }
            if (Utils.isEmpty(transMeta.getName()) && logTable != null) {
                throw new KettleException(BaseMessages.getString(PKG, "Trans.Exception.NoTransnameAvailableForLogging"));
            }
            transLogTableDatabaseConnection = new Database(this, logConnection);
            transLogTableDatabaseConnection.shareVariablesWith(this);
            if (log.isDetailed()) {
                log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.OpeningLogConnection", "" + logConnection));
            }
            transLogTableDatabaseConnection.connect();
            transLogTableDatabaseConnection.setCommit(logCommitSize);
            // 
            if (transLogTable.isBatchIdUsed()) {
                Long id_batch = logConnection.getNextBatchId(transLogTableDatabaseConnection, logSchema, logTable, transLogTable.getKeyField().getFieldName());
                setBatchId(id_batch.longValue());
            }
            // 
            // Get the date range from the logging table: from the last end_date to now. (currentDate)
            // 
            Object[] lastr = transLogTableDatabaseConnection.getLastLogDate(logSchemaAndTable, transMeta.getName(), false, LogStatus.END);
            if (lastr != null && lastr.length > 0) {
                startDate = (Date) lastr[0];
                if (log.isDetailed()) {
                    log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.StartDateFound") + startDate);
                }
            }
            // 
            if (transMeta.getMaxDateConnection() != null && transMeta.getMaxDateTable() != null && transMeta.getMaxDateTable().length() > 0 && transMeta.getMaxDateField() != null && transMeta.getMaxDateField().length() > 0) {
                if (log.isDetailed()) {
                    log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.LookingForMaxdateConnection", "" + transMeta.getMaxDateConnection()));
                }
                DatabaseMeta maxcon = transMeta.getMaxDateConnection();
                if (maxcon != null) {
                    Database maxdb = new Database(this, maxcon);
                    maxdb.shareVariablesWith(this);
                    try {
                        if (log.isDetailed()) {
                            log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.OpeningMaximumDateConnection"));
                        }
                        maxdb.connect();
                        maxdb.setCommit(logCommitSize);
                        // 
                        // Determine the endDate by looking at a field in a table...
                        // 
                        String sql = "SELECT MAX(" + transMeta.getMaxDateField() + ") FROM " + transMeta.getMaxDateTable();
                        RowMetaAndData r1 = maxdb.getOneRow(sql);
                        if (r1 != null) {
                            // OK, we have a value, what's the offset?
                            Date maxvalue = r1.getRowMeta().getDate(r1.getData(), 0);
                            if (maxvalue != null) {
                                if (log.isDetailed()) {
                                    log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.LastDateFoundOnTheMaxdateConnection") + r1);
                                }
                                endDate.setTime((long) (maxvalue.getTime() + (transMeta.getMaxDateOffset() * 1000)));
                            }
                        } else {
                            if (log.isDetailed()) {
                                log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.NoLastDateFoundOnTheMaxdateConnection"));
                            }
                        }
                    } catch (KettleException e) {
                        throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.ErrorConnectingToDatabase", "" + transMeta.getMaxDateConnection()), e);
                    } finally {
                        maxdb.disconnect();
                    }
                } else {
                    throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.MaximumDateConnectionCouldNotBeFound", "" + transMeta.getMaxDateConnection()));
                }
            }
            // Get the maximum in depdate...
            if (transMeta.nrDependencies() > 0) {
                if (log.isDetailed()) {
                    log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.CheckingForMaxDependencyDate"));
                }
                // 
                // Maybe one of the tables where this transformation is dependent on has changed?
                // If so we need to change the start-date!
                // 
                depDate = Const.MIN_DATE;
                Date maxdepdate = Const.MIN_DATE;
                if (lastr != null && lastr.length > 0) {
                    // #1: last depdate
                    Date dep = (Date) lastr[1];
                    if (dep != null) {
                        maxdepdate = dep;
                        depDate = dep;
                    }
                }
                for (int i = 0; i < transMeta.nrDependencies(); i++) {
                    TransDependency td = transMeta.getDependency(i);
                    DatabaseMeta depcon = td.getDatabase();
                    if (depcon != null) {
                        Database depdb = new Database(this, depcon);
                        try {
                            depdb.connect();
                            depdb.setCommit(logCommitSize);
                            String sql = "SELECT MAX(" + td.getFieldname() + ") FROM " + td.getTablename();
                            RowMetaAndData r1 = depdb.getOneRow(sql);
                            if (r1 != null) {
                                // OK, we have a row, get the result!
                                Date maxvalue = (Date) r1.getData()[0];
                                if (maxvalue != null) {
                                    if (log.isDetailed()) {
                                        log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.FoundDateFromTable", td.getTablename(), "." + td.getFieldname(), " = " + maxvalue.toString()));
                                    }
                                    if (maxvalue.getTime() > maxdepdate.getTime()) {
                                        maxdepdate = maxvalue;
                                    }
                                } else {
                                    throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.UnableToGetDependencyInfoFromDB", td.getDatabase().getName() + ".", td.getTablename() + ".", td.getFieldname()));
                                }
                            } else {
                                throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.UnableToGetDependencyInfoFromDB", td.getDatabase().getName() + ".", td.getTablename() + ".", td.getFieldname()));
                            }
                        } catch (KettleException e) {
                            throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.ErrorInDatabase", "" + td.getDatabase()), e);
                        } finally {
                            depdb.disconnect();
                        }
                    } else {
                        throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.ConnectionCouldNotBeFound", "" + td.getDatabase()));
                    }
                    if (log.isDetailed()) {
                        log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.Maxdepdate") + (XMLHandler.date2string(maxdepdate)));
                    }
                }
                // 
                if (maxdepdate.getTime() > depDate.getTime()) {
                    depDate = maxdepdate;
                    startDate = Const.MIN_DATE;
                }
            } else {
                depDate = currentDate;
            }
        }
        // OK, now we have a date-range. See if we need to set a maximum!
        if (// Do we have a difference specified?
        transMeta.getMaxDateDifference() > 0.0 && startDate.getTime() > Const.MIN_DATE.getTime()) {
            // Is the startdate > Minimum?
            // See if the end-date is larger then Start_date + DIFF?
            Date maxdesired = new Date(startDate.getTime() + ((long) transMeta.getMaxDateDifference() * 1000));
            // 
            if (endDate.compareTo(maxdesired) > 0) {
                endDate = maxdesired;
            }
        }
    } catch (KettleException e) {
        throw new KettleTransException(BaseMessages.getString(PKG, "Trans.Exception.ErrorCalculatingDateRange", logTable), e);
    }
// Be careful, We DO NOT close the trans log table database connection!!!
// It's closed later in beginProcessing() to prevent excessive connect/disconnect repetitions.
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) Date(java.util.Date) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) Database(org.pentaho.di.core.database.Database) TransLogTable(org.pentaho.di.core.logging.TransLogTable) KettleTransException(org.pentaho.di.core.exception.KettleTransException) FileObject(org.apache.commons.vfs2.FileObject)

Aggregations

Database (org.pentaho.di.core.database.Database)238 KettleException (org.pentaho.di.core.exception.KettleException)135 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)90 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)82 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)62 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)46 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)32 KettleStepException (org.pentaho.di.core.exception.KettleStepException)30 MessageBox (org.eclipse.swt.widgets.MessageBox)28 CheckResult (org.pentaho.di.core.CheckResult)25 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)25 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 SQLStatement (org.pentaho.di.core.SQLStatement)21 EnterSelectionDialog (org.pentaho.di.ui.core.dialog.EnterSelectionDialog)21 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)18 KettleValueException (org.pentaho.di.core.exception.KettleValueException)17 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)16 ColumnInfo (org.pentaho.di.ui.core.widget.ColumnInfo)15