Search in sources :

Example 31 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTableExists method execute.

public Result execute(Result previousResult, int nr) {
    Result result = previousResult;
    result.setResult(false);
    if (connection != null) {
        Database db = new Database(this, connection);
        db.shareVariablesWith(this);
        try {
            db.connect(parentJob.getTransactionId(), null);
            String realTablename = environmentSubstitute(tablename);
            String realSchemaname = environmentSubstitute(schemaname);
            if (!Utils.isEmpty(realSchemaname)) {
                realTablename = db.getDatabaseMeta().getQuotedSchemaTableCombination(realSchemaname, realTablename);
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "TableExists.Log.SchemaTable", realTablename));
                }
            } else {
                realTablename = db.getDatabaseMeta().quoteField(realTablename);
            }
            if (db.checkTableExists(realTablename)) {
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "TableExists.Log.TableExists", realTablename));
                }
                result.setResult(true);
            } else {
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "TableExists.Log.TableNotExists", realTablename));
                }
            }
        } catch (KettleDatabaseException dbe) {
            result.setNrErrors(1);
            logError(BaseMessages.getString(PKG, "TableExists.Error.RunningJobEntry", dbe.getMessage()));
        } finally {
            if (db != null) {
                try {
                    db.disconnect();
                } catch (Exception e) {
                /* Ignore */
                }
            }
        }
    } else {
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "TableExists.Error.NoConnectionDefined"));
    }
    return result;
}
Also used : KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) Result(org.pentaho.di.core.Result)

Example 32 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTableExists method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
        rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "TableExists.Meta.UnableSaveRep", "" + id_job), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 33 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTalendJobExec method saveRep.

public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "class_name", className);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException(BaseMessages.getString(PKG, "JobEntryTalendJobExec.ERROR_0003_Cannot_Save_Job_Entry", id_job), dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException)

Example 34 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTrans method loadRep.

// Load the jobentry from repository
// 
@Override
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
    try {
        String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
        specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
        String transId = rep.getJobEntryAttributeString(id_jobentry, "trans_object_id");
        transObjectId = Utils.isEmpty(transId) ? null : new StringObjectId(transId);
        transname = rep.getJobEntryAttributeString(id_jobentry, "name");
        directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
        filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
        // Backward compatibility check for object specification
        // 
        checkObjectLocationSpecificationMethod();
        argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
        paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
        execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
        clearResultRows = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_rows", true);
        clearResultFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_files", true);
        setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
        addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
        addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
        logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
        logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
        logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
        clustering = rep.getJobEntryAttributeBoolean(id_jobentry, "cluster");
        createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
        remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
        setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
        waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
        followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
        loggingRemoteWork = rep.getJobEntryAttributeBoolean(id_jobentry, "logging_remote_work");
        runConfiguration = rep.getJobEntryAttributeString(id_jobentry, "run_configuration");
        // How many arguments?
        int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
        allocateArgs(argnr);
        // Read all arguments...
        for (int a = 0; a < argnr; a++) {
            arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
        }
        // How many arguments?
        int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
        allocateParams(parameternr);
        // Read all parameters ...
        for (int a = 0; a < parameternr; a++) {
            parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
            parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
            parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
        }
        passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to load job entry of type 'trans' from the repository for id_jobentry=" + id_jobentry, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) StringObjectId(org.pentaho.di.repository.StringObjectId) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Example 35 with KettleDatabaseException

use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.

the class JobEntryTrans method saveRep.

// Save the attributes of this job entry
// 
@Override
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
    try {
        rep.saveJobEntryAttribute(id_job, getObjectId(), "specification_method", specificationMethod == null ? null : specificationMethod.getCode());
        rep.saveJobEntryAttribute(id_job, getObjectId(), "trans_object_id", transObjectId == null ? null : transObjectId.toString());
        rep.saveJobEntryAttribute(id_job, getObjectId(), "name", getTransname());
        rep.saveJobEntryAttribute(id_job, getObjectId(), "dir_path", getDirectory() != null ? getDirectory() : "");
        rep.saveJobEntryAttribute(id_job, getObjectId(), "file_name", filename);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", argFromPrevious);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "params_from_previous", paramsFromPrevious);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "exec_per_row", execPerRow);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_rows", clearResultRows);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_files", clearResultFiles);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "set_logfile", setLogfile);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_date", addDate);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "add_time", addTime);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "logfile", logfile);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "logext", logext);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "loglevel", logFileLevel != null ? logFileLevel.getCode() : null);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "cluster", clustering);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "slave_server_name", remoteSlaveServerName);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "set_append_logfile", setAppendLogfile);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "wait_until_finished", waitingToFinish);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "follow_abort_remote", followingAbortRemotely);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "create_parent_folder", createParentFolder);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "logging_remote_work", loggingRemoteWork);
        rep.saveJobEntryAttribute(id_job, getObjectId(), "run_configuration", runConfiguration);
        // Save the arguments...
        if (arguments != null) {
            for (int i = 0; i < arguments.length; i++) {
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "argument", arguments[i]);
            }
        }
        // Save the parameters...
        if (parameters != null) {
            for (int i = 0; i < parameters.length; i++) {
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_name", parameters[i]);
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_stream_name", Const.NVL(parameterFieldNames[i], ""));
                rep.saveJobEntryAttribute(id_job, getObjectId(), i, "parameter_value", Const.NVL(parameterValues[i], ""));
            }
        }
        rep.saveJobEntryAttribute(id_job, getObjectId(), "pass_all_parameters", passingAllParameters);
    } catch (KettleDatabaseException dbe) {
        throw new KettleException("Unable to save job entry of type 'trans' to the repository for id_job=" + id_job, dbe);
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint)

Aggregations

KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)279 KettleException (org.pentaho.di.core.exception.KettleException)176 SQLException (java.sql.SQLException)69 Database (org.pentaho.di.core.database.Database)46 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)41 KettleValueException (org.pentaho.di.core.exception.KettleValueException)39 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)37 KettleDatabaseBatchException (org.pentaho.di.core.exception.KettleDatabaseBatchException)33 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)31 BatchUpdateException (java.sql.BatchUpdateException)27 ResultSet (java.sql.ResultSet)27 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)26 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)25 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 FileObject (org.apache.commons.vfs2.FileObject)18 LongObjectId (org.pentaho.di.repository.LongObjectId)17 Savepoint (java.sql.Savepoint)16 ArrayList (java.util.ArrayList)16 Test (org.junit.Test)14