Search in sources :

Example 16 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryCreateFolder method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "foldername", foldername);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "fail_of_folder_exists", failOfFolderExists);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to save job entry of type 'create Folder' to the repository for id_job=" + id_job, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 17 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryDeleteFile method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "fail_if_file_not_exists", failIfFileNotExists);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobEntryDeleteFile.ERROR_0003_Unable_To_Save_Job_To_Repository", id_job), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 18 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class Trans method closeUniqueDatabaseConnections.

/**
 * Close unique database connections. If there are errors in the Result, perform a rollback
 *
 * @param result
 *          the result of the transformation execution
 */
private void closeUniqueDatabaseConnections(Result result) {
    // 
    if (parentJob != null && transactionId != null && parentJob.getTransactionId() != null && transactionId.equals(parentJob.getTransactionId())) {
        return;
    }
    // 
    if (parentTrans != null && parentTrans.getTransMeta().isUsingUniqueConnections() && transactionId != null && parentTrans.getTransactionId() != null && transactionId.equals(parentTrans.getTransactionId())) {
        return;
    }
    // First we get all the database connections ...
    // 
    DatabaseConnectionMap map = DatabaseConnectionMap.getInstance();
    synchronized (map) {
        List<Database> databaseList = new ArrayList<>(map.getMap().values());
        for (Database database : databaseList) {
            if (database.getConnectionGroup().equals(getTransactionId())) {
                try {
                    // 
                    if (result.getNrErrors() > 0) {
                        try {
                            database.rollback(true);
                            log.logBasic(BaseMessages.getString(PKG, "Trans.Exception.TransactionsRolledBackOnConnection", database.toString()));
                        } catch (Exception e) {
                            throw new KettleDatabaseException(BaseMessages.getString(PKG, "Trans.Exception.ErrorRollingBackUniqueConnection", database.toString()), e);
                        }
                    } else {
                        try {
                            database.commit(true);
                            log.logBasic(BaseMessages.getString(PKG, "Trans.Exception.TransactionsCommittedOnConnection", database.toString()));
                        } catch (Exception e) {
                            throw new KettleDatabaseException(BaseMessages.getString(PKG, "Trans.Exception.ErrorCommittingUniqueConnection", database.toString()), e);
                        }
                    }
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "Trans.Exception.ErrorHandlingTransformationTransaction", database.toString()), e);
                    result.setNrErrors(result.getNrErrors() + 1);
                } finally {
                    try {
                        // This database connection belongs to this transformation.
                        database.closeConnectionOnly();
                    } catch (Exception e) {
                        log.logError(BaseMessages.getString(PKG, "Trans.Exception.ErrorHandlingTransformationTransaction", database.toString()), e);
                        result.setNrErrors(result.getNrErrors() + 1);
                    } finally {
                        // Remove the database from the list...
                        // 
                        map.removeConnection(database.getConnectionGroup(), database.getPartitionId(), database);
                    }
                }
            }
        }
        // Who else needs to be informed of the rollback or commit?
        // 
        List<DatabaseTransactionListener> transactionListeners = map.getTransactionListeners(getTransactionId());
        if (result.getNrErrors() > 0) {
            for (DatabaseTransactionListener listener : transactionListeners) {
                try {
                    listener.rollback();
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "Trans.Exception.ErrorHandlingTransactionListenerRollback"), e);
                    result.setNrErrors(result.getNrErrors() + 1);
                }
            }
        } else {
            for (DatabaseTransactionListener listener : transactionListeners) {
                try {
                    listener.commit();
                } catch (Exception e) {
                    log.logError(BaseMessages.getString(PKG, "Trans.Exception.ErrorHandlingTransactionListenerCommit"), e);
                    result.setNrErrors(result.getNrErrors() + 1);
                }
            }
        }
    }
}
Also used : DatabaseConnectionMap(org.pentaho.di.core.database.map.DatabaseConnectionMap) DatabaseTransactionListener(org.pentaho.di.core.database.DatabaseTransactionListener) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) ArrayList(java.util.ArrayList) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleTransException(org.pentaho.di.core.exception.KettleTransException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 19 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class Trans method endProcessing.

/**
 * End processing. Also handle any logging operations associated with the end of a transformation
 *
 * @return true if all end processing is successful, false otherwise
 * @throws KettleException
 *           if any errors occur during processing
 */
private synchronized boolean endProcessing() throws KettleException {
    LogStatus status;
    if (isFinished()) {
        if (isStopped()) {
            status = LogStatus.STOP;
        } else {
            status = LogStatus.END;
        }
    } else if (isPaused()) {
        status = LogStatus.PAUSED;
    } else {
        status = LogStatus.RUNNING;
    }
    TransLogTable transLogTable = transMeta.getTransLogTable();
    int intervalInSeconds = Const.toInt(environmentSubstitute(transLogTable.getLogInterval()), -1);
    logDate = new Date();
    // OK, we have some logging to do...
    // 
    DatabaseMeta logcon = transMeta.getTransLogTable().getDatabaseMeta();
    String logTable = transMeta.getTransLogTable().getActualTableName();
    if (logcon != null) {
        Database ldb = null;
        try {
            // 
            if (transLogTableDatabaseConnection == null) {
                ldb = new Database(this, logcon);
                ldb.shareVariablesWith(this);
                ldb.connect();
                ldb.setCommit(logCommitSize);
                transLogTableDatabaseConnection = ldb;
            } else {
                ldb = transLogTableDatabaseConnection;
            }
            // 
            if (!Utils.isEmpty(logTable)) {
                ldb.writeLogRecord(transLogTable, status, this, null);
            }
            // 
            if (status.equals(LogStatus.END) || status.equals(LogStatus.STOP)) {
                ldb.cleanupLogRecords(transLogTable);
            }
            // 
            if (!ldb.isAutoCommit()) {
                ldb.commitLog(true, transMeta.getTransLogTable());
            }
        } catch (KettleDatabaseException e) {
            // PDI-9790 error write to log db is transaction error
            log.logError(BaseMessages.getString(PKG, "Database.Error.WriteLogTable", logTable), e);
            errors.incrementAndGet();
        // end PDI-9790
        } catch (Exception e) {
            throw new KettleException(BaseMessages.getString(PKG, "Trans.Exception.ErrorWritingLogRecordToTable", transMeta.getTransLogTable().getActualTableName()), e);
        } finally {
            if (intervalInSeconds <= 0 || (status.equals(LogStatus.END) || status.equals(LogStatus.STOP))) {
                ldb.disconnect();
                // disconnected
                transLogTableDatabaseConnection = null;
            }
        }
    }
    return true;
}
Also used : LogStatus(org.pentaho.di.core.logging.LogStatus) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) TransLogTable(org.pentaho.di.core.logging.TransLogTable) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) DatabaseMeta(org.pentaho.di.core.database.DatabaseMeta) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) Date(java.util.Date) UnknownParamException(org.pentaho.di.core.parameters.UnknownParamException) KettleValueException(org.pentaho.di.core.exception.KettleValueException) KettleTransException(org.pentaho.di.core.exception.KettleTransException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 20 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryPGPEncryptFiles method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "gpglocation", gpglocation);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", arg_from_previous);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "include_subfolders", include_subfolders);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_result_filesname", add_result_filesname);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "destination_is_a_file", destination_is_a_file);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "create_destination_folder", create_destination_folder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "nr_errors_less_than", nr_errors_less_than);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "success_condition", success_condition);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_date", add_date);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_time", add_time);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "SpecifyFormat", SpecifyFormat);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "date_time_format", date_time_format);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "AddDateBeforeExtension", AddDateBeforeExtension);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "DoNotKeepFolderStructure", DoNotKeepFolderStructure);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "iffileexists", iffileexists);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "destinationFolder", destinationFolder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "ifmovedfileexists", ifmovedfileexists);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "moved_date_time_format", moved_date_time_format);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_moved_date", add_moved_date);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_moved_time", add_moved_time);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "SpecifyMoveFormat", SpecifyMoveFormat);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "create_move_to_folder", create_move_to_folder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "asciiMode", asciiMode);
        // save the arguments...
        if (source_filefolder != null) {
            for (int i = 0; i < source_filefolder.length; i++) {
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "action_type", getActionTypeCode(action_type[i]));
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "source_filefolder", source_filefolder[i]);
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "userid", userid[i]);
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "destination_filefolder", destination_filefolder[i]);
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "wildcard", wildcard[i]);
            }
        }
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Error.Exception.UnableSaveRep") + id_job, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Aggregations

KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)279 KettleException (org.pentaho.di.core.exception.KettleException)176 SQLException (java.sql.SQLException)69 Database (org.pentaho.di.core.database.Database)46 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)41 KettleValueException (org.pentaho.di.core.exception.KettleValueException)39 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)37 KettleDatabaseBatchException (org.pentaho.di.core.exception.KettleDatabaseBatchException)33 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)31 BatchUpdateException (java.sql.BatchUpdateException)27 ResultSet (java.sql.ResultSet)27 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)26 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)25 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 FileObject (org.apache.commons.vfs2.FileObject)18 LongObjectId (org.pentaho.di.repository.LongObjectId)17 Savepoint (java.sql.Savepoint)16 ArrayList (java.util.ArrayList)16 Test (org.junit.Test)14