Search in sources :

Example 1 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTruncateTables method loadRep.

public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
    try {
        connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
        this.argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
        // How many arguments?
        int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name");
        allocate(argnr);
        // Read them all...
        for (int a = 0; a < argnr; a++) {
            this.arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name");
            this.schemaname[a] = rep.getJobEntryAttributeString(id_jobentry, a, "schemaname");
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobEntryTruncateTables.UnableLoadRep", "" + id_jobentry), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 2 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTruncateTables method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", this.argFromPrevious);
        // save the arguments...
        if (this.arguments != null) {
            for (int i = 0; i < this.arguments.length; i++) {
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "name", this.arguments[i]);
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "schemaname", this.schemaname[i]);
            }
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobEntryTruncateTables.UnableSaveRep", "" + id_job), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 3 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryWaitForFile method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "maximum_timeout", maximumTimeout);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "check_cycle_time", checkCycleTime);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "success_on_timeout", successOnTimeout);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "file_size_check", fileSizeCheck);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_filename_result", addFilenameToResult);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to save job entry of type 'wait for file' to the repository for id_job=" + id_job, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 4 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryWaitForSQL method saveRep.

@Override
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "success_condition", getSuccessConditionCode(successCondition));
        rep.saveJobEntryAttribute(id_job, getObjectId(), "rows_count_value", rowsCountValue);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "custom_sql", customSQL);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "is_custom_sql", iscustomSQL);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "is_usevars", isUseVars);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_rows_result", isAddRowsResult);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "maximum_timeout", maximumTimeout);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "check_cycle_time", checkCycleTime);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "success_on_timeout", successOnTimeout);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_result_rows", isClearResultList);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobEntryWaitForSQL.UnableSaveRep", "" + id_job), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 5 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryWaitForSQL method SQLDataOK.

protected boolean SQLDataOK(Result result, long nrRowsLimit, String realSchemaName, String realTableName, String customSQL) throws KettleException {
    String countStatement = null;
    long rowsCount = 0;
    boolean successOK = false;
    List<Object[]> ar = null;
    RowMetaInterface rowMeta = null;
    Database db = new Database(this, connection);
    db.shareVariablesWith(this);
    try {
        db.connect(parentJob.getTransactionId(), null);
        if (iscustomSQL) {
            countStatement = customSQL;
        } else {
            if (!Utils.isEmpty(realSchemaName)) {
                countStatement = selectCount + db.getDatabaseMeta().getQuotedSchemaTableCombination(realSchemaName, realTableName);
            } else {
                countStatement = selectCount + db.getDatabaseMeta().quoteField(realTableName);
            }
        }
        if (countStatement != null) {
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.RunSQLStatement", countStatement));
            }
            if (iscustomSQL) {
                ar = db.getRows(countStatement, 0);
                if (ar != null) {
                    rowsCount = ar.size();
                } else {
                    if (log.isDebug()) {
                        logDebug(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.customSQLreturnedNothing", countStatement));
                    }
                }
            } else {
                RowMetaAndData row = db.getOneRow(countStatement);
                if (row != null) {
                    rowsCount = row.getInteger(0);
                }
            }
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Log.NrRowsReturned", "" + rowsCount));
            }
            switch(successCondition) {
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_EQUAL:
                    successOK = (rowsCount == nrRowsLimit);
                    break;
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_DIFFERENT:
                    successOK = (rowsCount != nrRowsLimit);
                    break;
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER:
                    successOK = (rowsCount < nrRowsLimit);
                    break;
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_SMALLER_EQUAL:
                    successOK = (rowsCount <= nrRowsLimit);
                    break;
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER:
                    successOK = (rowsCount > nrRowsLimit);
                    break;
                case JobEntryWaitForSQL.SUCCESS_CONDITION_ROWS_COUNT_GREATER_EQUAL:
                    successOK = (rowsCount >= nrRowsLimit);
                    break;
                default:
                    break;
            }
        }
    // end if countStatement!=null
    } catch (KettleDatabaseException dbe) {
        logError(BaseMessages.getString(PKG, "JobEntryWaitForSQL.Error.RunningEntry", dbe.getMessage()));
    } finally {
        if (db != null) {
            if (isAddRowsResult && iscustomSQL && ar != null) {
                rowMeta = db.getQueryFields(countStatement, false);
            }
            db.disconnect();
        }
    }
    if (successOK) {
        // ad rows to result
        if (isAddRowsResult && iscustomSQL && ar != null) {
            List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
            for (int i = 0; i < ar.size(); i++) {
                rows.add(new RowMetaAndData(rowMeta, ar.get(i)));
            }
            if (rows != null) {
                result.getRows().addAll(rows);
            }
        }
    }
    return successOK;
}
Also used : RowMetaAndData(org.pentaho.di.core.RowMetaAndData) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) ArrayList(java.util.ArrayList) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface)

Aggregations

KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)279 KettleException (org.pentaho.di.core.exception.KettleException)176 SQLException (java.sql.SQLException)69 Database (org.pentaho.di.core.database.Database)46 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)41 KettleValueException (org.pentaho.di.core.exception.KettleValueException)39 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)37 KettleDatabaseBatchException (org.pentaho.di.core.exception.KettleDatabaseBatchException)33 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)31 BatchUpdateException (java.sql.BatchUpdateException)27 ResultSet (java.sql.ResultSet)27 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)26 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)25 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 FileObject (org.apache.commons.vfs2.FileObject)18 LongObjectId (org.pentaho.di.repository.LongObjectId)17 Savepoint (java.sql.Savepoint)16 ArrayList (java.util.ArrayList)16 Test (org.junit.Test)14